8000 Fix: #69 local model server failure by Gerome-Elassaad · Pull Request #71 · codinit-dev/codinit-dev · GitHub
[go: up one dir, main page]

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions app/components/ui/Button.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,7 @@ const buttonVariants = cva(
);

export interface ButtonProps
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
VariantProps<typeof buttonVariants> {
extends React.ButtonHTMLAttributes<HTMLButtonElement>, VariantProps<typeof buttonVariants> {
_asChild?: boolean;
}

Expand Down
18 changes: 8 additions & 10 deletions app/lib/.server/llm/stream-text.ts
10BC0
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import { allowedHTMLElements } from '~/utils/markdown';
import { LLMManager } from '~/lib/modules/llm/manager';
import { createScopedLogger } from '~/utils/logger';
import { createFilesContext, extractPropertiesFromMessage } from './utils';
import { BuiltInToolService } from '~/lib/services/builtInToolService';

export type Messages = Message[];

Expand Down Expand Up @@ -210,16 +209,16 @@ Use these preferences when creating UI components, styling code, or suggesting d

logger.info(`Sending llm call to ${provider.name} with model ${modelDetails.name}`);

let allTools = { ...options?.tools };
const builtInToolService = BuiltInToolService.getInstance();
const builtInTools = builtInToolService.toolsWithoutExecute;
/*
* Only pass tools that are properly implemented with valid Zod schemas.
* Built-in tools from JSON don't have Zod validation, so we don't pass any tools
* to avoid zod-to-json-schema conversion errors.
*/
const allTools = {};

if (Object.keys(builtInTools).length > 0) {
allTools = { ...allTools, ...builtInTools };
logger.info(`Added ${Object.keys(builtInTools).length} built-in tools:`, Object.keys(builtInTools));
}
logger.info(`Skipping all tool passing to AI SDK - tools are processed server-side only`);

const hasTools = Object.keys(allTools).length > 0;
const hasTools = false;

return await _streamText({
model: provider.getModelInstance({
Expand All @@ -232,6 +231,5 @@ Use these preferences when creating UI components, styling code, or suggesting d
maxTokens: dynamicMaxTokens,
messages: convertToCoreMessages(processedMessages as any),
...(hasTools ? { tools: allTools, toolChoice: 'auto' } : {}),
...options,
});
}
3 changes: 1 addition & 2 deletions app/lib/hooks/StickToBottom.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ export interface StickToBottomContext {
const StickToBottomContext = createContext<StickToBottomContext | null>(null);

export interface StickToBottomProps
extends Omit<React.HTMLAttributes<HTMLDivElement>, 'children'>,
StickToBottomOptions {
extends Omit<React.HTMLAttributes<HTMLDivElement>, 'children'>, StickToBottomOptions {
contextRef?: React.Ref<StickToBottomContext>;
instance?: ReturnType<typeof useStickToBottom>;
children: ((context: StickToBottomContext) => ReactNode) | ReactNode;
Expand Down
107 changes: 98 additions & 9 deletions app/lib/hooks/useLocalProviders.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,114 @@
import { useCallback, useState } from 'react';
import { useCallback, useState, useEffect } from 'react';
import type { IProviderConfig } from '~/types/model';
import { updateProviderSettings } from '~/lib/stores/settings';

export interface UseLocalProvidersReturn {
localProviders: IProviderConfig[];
refreshLocalProviders: () => void;
isChecking: boolean;
}

export function useLocalProviders(): UseLocalProvidersReturn {
const [localProviders, setLocalProviders] = useState<IProviderConfig[]>([]);
const [isChecking, setIsChecking] = useState(false);

const refreshLocalProviders = useCallback(() => {
/*
* Refresh logic for local providers
* This would typically involve checking the status of Ollama and LMStudio
* For now, we'll just return an empty array
*/
setLocalProviders([]);
}, []);
const checkLocalProvider = useCallback(
async (name: string, baseUrl: string, icon: string): Promise<IProviderConfig | null> => {
try {
// Try to connect to the local provider
const response = await fetch(`${baseUrl}/api/tags`, {
method: 'GET',
signal: AbortSignal.timeout(5000), // 5 second timeout
});

if (response.ok) {
return {
name,
icon,
getApiKeyLink: `https://${name.toLowerCase()}.com/download`,
labelForGetApiKey: `Download ${name}`,
staticModels: [],
settings: {
enabled: true,
baseUrl,
},
};
}
} catch {
// Provider not available, return null
}
return null;
},
[],
);

const refreshLocalProviders = useCallback(async () => {
setIsChecking(true);

try {
const providers: IProviderConfig[] = [];

// Check Ollama

const ollamaProvider = await checkLocalProvider(
'Ollama',
'http://localhost:11434',
'/thirdparty/logos/ollama.svg',
);

if (ollamaProvider) {
providers.push(ollamaProvider);
}

// Check LMStudio

const lmStudioProvider = await checkLocalProvider(
'LMStudio',
'http://localhost:1234',
'/thirdparty/logos/lmstudio.svg',
);

if (lmStudioProvider) {
providers.push(lmStudioProvider);
}

setLocalProviders(providers);

// Enable detected providers in settings
providers.forEach((provider) => {
updateProviderSettings(provider.name, { enabled: true, baseUrl: provider.settings.baseUrl });
});

// Disable providers that are no longer available
const availableProviderNames = providers.map((p) => p.name);
const allLocalProviderNames = ['Ollama', 'LMStudio'];

allLocalProviderNames.forEach((providerName) => {
if (!availableProviderNames.includes(providerName)) {
updateProviderSettings(providerName, { enabled: false });
}
});
} catch (error) {
console.error('Error checking local providers:', error);
setLocalProviders([]);

// Disable all local providers if there's an error
['Ollama', 'LMStudio'].forEach((providerName) => {
updateProviderSettings(providerName, { enabled: false });
});
} finally {
setIsChecking(false);
}
}, [checkLocalProvider]);

// Auto-refresh on mount
useEffect(() => {
refreshLocalProviders();
}, [refreshLocalProviders]);

return {
localProviders,
refreshLocalProviders,
isChecking,
};
}
42 changes: 34 additions & 8 deletions app/lib/modules/llm/providers/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -109,14 +109,40 @@ export default class OllamaProvider extends BaseProvider {
throw new Error('Invalid response from Ollama API: missing models array');
}

return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details?.parameter_size || 'unknown'})`,
provider: this.name,
maxTokenAllowed: 8000,
maxCompletionTokens: 8000,
icon: '/thirdparty/logos/ollama.svg',
}));
return data.models.map((model: OllamaModel) => {
// Use proper context window based on model family and parameter size
let contextWindow = 4096; // default

// Larger context windows for modern models
if (model.details?.parameter_size) {
const paramSize = parseInt(model.details.parameter_size.replace(/[^0-9]/g, ''));

// Models with larger parameter sizes generally support larger contexts
if (paramSize >= 70) {
contextWindow = 32768; // 32k for 70B+ models
} else if (paramSize >= 30) {
contextWindow = 16384; // 16k for 30B+ models
} else if (paramSize >= 7) {
contextWindow = 8192; // 8k for 7B+ models
}
}

// Special handling for specific model families
if (model.details?.family?.includes('llama') && model.details?.parameter_size?.includes('70')) {
contextWindow = 32768; // Llama 70B models
} else if (model.details?.family?.includes('llama') && model.details?.parameter_size?.includes('405')) {
contextWindow = 128000; // Llama 405B models
}

return {
name: model.name,
label: `${model.name} (${model.details?.parameter_size || 'unknown'}, ${contextWindow >= 1000 ? Math.floor(contextWindow / 1000) + 'k' : contextWindow} ctx)`,
provider: this.name,
maxTokenAllowed: contextWindow,
maxCompletionTokens: Math.min(contextWindow, 4096), // Cap completion tokens
icon: '/thirdparty/logos/ollama.svg',
};
});
}

getModelInstance: (options: {
Expand Down
42 changes: 12 additions & 30 deletions app/lib/modules/llm/providers/open-router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,23 +33,23 @@ export default class OpenRouterProvider extends BaseProvider {
* Claude Opus 4.5 via OpenRouter: 200k context, maximum intelligence
*/
{
name: 'anthropic/claude-opus-4-5',
name: 'anthropic/claude-opus-4.5',
label: 'Claude Opus 4.5',
provider: 'OpenRouter',
maxTokenAllowed: 200000,
maxCompletionTokens: 64000,
},

// Claude Sonnet 4.5 via OpenRouter: 200k context
// Claude Sonnet 4.5 via OpenRouter: 1M context, highest intelligence
{
name: 'anthropic/claude-sonnet-4-5',
name: 'anthropic/claude-sonnet-4.5',
label: 'Claude Sonnet 4.5',
provider: 'OpenRouter',
maxTokenAllowed: 200000,
maxTokenAllowed: 1000000,
maxCompletionTokens: 64000,
},

// GPT-5.2 Pro via OpenRouter: 400k context, highest accuracy
// GPT-5.2 Pro via OpenRouter: 400k context, latest GPT
{
name: 'openai/gpt-5.2-pro',
label: 'GPT-5.2 Pro',
Expand All @@ -58,39 +58,21 @@ export default class OpenRouterProvider extends BaseProvider {
maxCompletionTokens: 128000,
},

// GPT-5.2 Thinking via OpenRouter: 400k context, complex reasoning
{
name: 'openai/gpt-5.2-thinking',
label: 'GPT-5.2 Thinking',
provider: 'OpenRouter',
maxTokenAllowed: 400000,
maxCompletionTokens: 128000,
},

// GPT-5.2 Instant via OpenRouter: 400k context, optimized for speed
{
name: 'openai/gpt-5.2-instant',
label: 'GPT-5.2 Instant',
provider: 'OpenRouter',
maxTokenAllowed: 400000,
maxCompletionTokens: 128000,
},

// GPT-5.1 via OpenRouter: 128k context
// GPT-4o via OpenRouter: 128k context, reliable fallback
{
name: 'openai/gpt-5.1',
label: 'GPT-5.1',
name: 'openai/gpt-4o',
label: 'GPT-4o',
provider: 'OpenRouter',
maxTokenAllowed: 128000,
maxCompletionTokens: 16384,
},

// DeepSeek-R1 via OpenRouter: 128k context
// DeepSeek R1 via OpenRouter: 163k context, free tier available
{
name: 'deepseek/deepseek-r1',
label: 'DeepSeek-R1',
name: 'deepseek/deepseek-r1-0528:free',
label: 'DeepSeek R1 (Free)',
provider: 'OpenRouter',
maxTokenAllowed: 131072,
maxTokenAllowed: 163840,
maxCompletionTokens: 32768,
},
];
Expand Down
4 changes: 2 additions & 2 deletions app/lib/stores/settings.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { atom, map } from 'nanostores';
import { PROVIDER_LIST } from '~/utils/constants';
import type { IProviderConfig } from '~/types/model';
import type { IProviderConfig, IProviderSetting } from '~/types/model';
import type {
TabVisibilityConfig,
TabWindowConfig,
Expand Down Expand Up @@ -101,7 +101,7 @@ const getInitialProviderSettings = (): ProviderSetting => {
export const providersStore = map<ProviderSetting>(getInitialProviderSettings());

// Create a function to update provider settings that handles both store and persistence
export const updateProviderSettings = (provider: string, settings: ProviderSetting) => {
export const updateProviderSettings = (provider: string, settings: Partial<IProviderSetting>) => {
const currentSettings = providersStore.get();

// Create new provider config with updated settings
Expand Down
Loading
0