8000 fix: perfect gpustack compatibility, fix openai edge case · drivecore/mycoder@9359f62 · GitHub
[go: up one dir, main page]

Skip to content

Commit 9359f62

Browse files
committed
fix: perfect gpustack compatibility, fix openai edge case
1 parent 38f13b2 commit 9359f62

File tree

10 files changed

+36
-149
lines changed

10 files changed

+36
-149
lines changed

mycoder.config.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,9 @@ export default {
1818
//model: 'llama3.2:3b',
1919
//provider: 'xai',
2020
//model: 'grok-2-latest',
21+
//provider: 'openai',
22+
//model: 'qwen2.5-coder:14b',
23+
//baseUrl: 'http://192.168.2.66:80/v1-openai',
2124
maxTokens: 4096,
2225
temperature: 0.7,
2326

packages/agent/src/core/llm/__tests__/openai.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ describe('OpenAIProvider', () => {
177177
'role' in toolUseMessage
178178
) {
179179
expect(toolUseMessage.role).toBe('assistant');
180-
expect(toolUseMessage.content).toBe(null);
180+
expect(toolUseMessage.content).toBe(''); // required by gpustack' implementation of openai SDK.
181181

182182
if (
183183
'tool_calls' in toolUseMessage &&

packages/agent/src/core/llm/provider.ts

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -57,12 +57,6 @@ export const providerConfig: Record<string, ProviderConfig> = {
5757
model: 'gpt-4o-2024-05-13',
5858
factory: (model, options) => new OpenAIProvider(model, options),
5959
},
60-
gpustack: {
61-
docsUrl: 'https://mycoder.ai/docs/provider/local-openai',
62-
model: 'llama3.2',
63-
baseUrl: 'http://localhost:80',
64-
factory: (model, options) => new OpenAIProvider(model, options),
65-
},
6660
ollama: {
6761
docsUrl: 'https://mycoder.ai/docs/provider/ollama',
6862
model: 'llama3.2',

packages/agent/src/core/llm/providers/openai.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ export class OpenAIProvider implements LLMProvider {
154154
// so we'll include it as a function call in an assistant message
155155
return {
156156
role: 'assistant',
157-
content: null,
157+
content: '',
158158
tool_calls: [
159159
{
160160
id: msg.id,

packages/cli/src/commands/$default.ts

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -114,18 +114,25 @@ export async function executePrompt(
114114
throw new Error(`Unknown provider: ${config.provider}`);
115115
}
116116

117-
const { keyName } = providerSettings;
117+
// only validate key if baseUrl is not set, otherwise we assume the user is using a local provider
118118
let apiKey: string | undefined = undefined;
119+
const { keyName } = providerSettings;
119120
if (keyName) {
120121
// Then fall back to environment variable
122+
logger.info(`Looking API key in env: ${keyName}`);
121123
apiKey = process.env[keyName];
122-
if (!apiKey) {
123-
logger.error(getProviderApiKeyError(config.provider));
124-
throw new Error(`${config.provider} API key not found`);
124+
if (!config.baseUrl) {
125+
if (!apiKey) {
126+
logger.error(getProviderApiKeyError(config.provider));
127+
throw new Error(`${config.provider} API key not found`);
128+
}
125129
}
126130
}
127131

128132
logger.info(`LLM: ${config.provider}/${config.model}`);
133+
if (apiKey) {
134+
logger.info(`Using API key: ${apiKey.slice(0, 4)}...`);
135+
}
129136
if (config.baseUrl) {
130137
// For Ollama, we check if the base URL is set
131138
logger.info(`Using base url: ${config.baseUrl}`);

packages/docs/docs/providers/anthropic.md

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,6 @@ export default {
3232
provider: 'anthropic',
3333
model: 'claude-3-7-sonnet-20250219',
3434

35-
// Optional: Set API key directly (environment variable is preferred)
36-
// anthropicApiKey: 'your_api_key_here',
37-
3835
// Other MyCoder settings
3936
maxTokens: 4096,
4037
temperature: 0.7,

packages/docs/docs/providers/index.mdx

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,8 @@ MyCoder supports multiple Language Model (LLM) providers, giving you flexibility
1111
MyCoder currently supports the following LLM providers:
1212

1313
- [**Anthropic**](./anthropic.md) - Claude models from Anthropic
14-
- [**OpenAI**](./openai.md) - GPT models from OpenAI
14+
- [**OpenAI**](./openai.md) - GPT models from OpenAI (and OpenAI compatible providers)
1515
- [**Ollama**](./ollama.md) - Self-hosted open-source models via Ollama
16-
- [**Local OpenAI Compatible**](./local-openai.md) - GPUStack and other OpenAI-compatible servers
1716
- [**xAI**](./xai.md) - Grok models from xAI
1817

1918
## Configuring Providers
@@ -54,5 +53,4 @@ For detailed instructions on setting up each provider, see the provider-specific
5453
- [Anthropic Configuration](./anthropic.md)
5554
- [OpenAI Configuration](./openai.md)
5655
- [Ollama Configuration](./ollama.md)
57-
- [Local OpenAI Compatible Configuration](./local-openai.md)
5856
- [xAI Configuration](./xai.md)

packages/docs/docs/providers/local-openai.md

Lines changed: 0 additions & 123 deletions
This file was deleted.

packages/docs/docs/providers/openai.md

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,10 +38,6 @@ export default {
3838
provider: 'openai',
3939
model: 'gpt-4o',
4040

41-
// Optional: Set API key directly (environment variable is preferred)
42-
// openaiApiKey: 'your_api_key_here',
43-
// openaiOrganization: 'your_organization_id',
44-
4541
// Other MyCoder settings
4642
maxTokens: 4096,
4743
temperature: 0.7,
@@ -60,6 +56,24 @@ MyCoder supports all OpenAI models that have tool/function calling capabilities.
6056

6157
You can use any other OpenAI model that supports function calling with MyCoder. The OpenAI provider is not limited to just these listed models.
6258

59+
## Using OpenAI Compatible Providers
60+
61+
A number of providers offer OpenAI compatible REST API endpoints, such as xAI and [GPUStack](https://gpustack.ai). To point the OpenAI provider to a different provider REST API set the `baseUrl` and also, if applicable, the `OPENAI_API_KEY` to their required key. For example:
62+
63+
```javascript
64+
export default {
65+
// Provider selection
66+
provider: 'openai',
67+
model: 'qwen2.5',
68+
baseUrl: 'http://localhost/v1-openai',
69+
70+
// Other MyCoder settings
71+
maxTokens: 4096,
72+
temperature: 0.7,
73+
// ...
74+
};
75+
```
76+
6377
## Best Practices
6478

6579
- GPT-4o provides the best balance of performance and cost for most MyCoder tasks

packages/docs/docs/providers/xai.md

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,6 @@ export default {
3232
provider: 'xai',
3333
model: 'grok-2-latest',
3434

35-
// Optional: Set API key directly (environment variable is preferred)
36-
// xaiApiKey: 'your_api_key_here',
37-
3835
// Other MyCoder settings
3936
maxTokens: 4096,
4037
temperature: 0.7,
@@ -77,4 +74,4 @@ If you encounter issues with xAI's Grok:
7774
- For tool-calling issues, ensure your functions are properly formatted
7875
- Monitor your token usage to avoid unexpected costs
7976

80-
For more information, visit the [xAI Documentation](https://x.ai/docs).
77+
For more information, visit the [xAI Documentation](https://x.ai/docs).

0 commit comments

Comments
 (0)
0