8000 52 cannot select a model from ollama by Gerome-Elassaad · Pull Request #54 · codinit-dev/codinit-dev · GitHub
[go: up one dir, main page]

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions app/components/@settings/core/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,11 @@ export const DEFAULT_TAB_CONFIG = [
{ id: 'local-providers', visible: true, window: 'user' as const, order: 3 },
{ id: 'connection', visible: true, window: 'user' as const, order: 4 },
{ id: 'notifications', visible: true, window: 'user' as const, order: 5 },
{ id: 'event-logs', visible: true, window: 'user' as const, order: 6 },

// User Window Tabs (In dropdown, initially hidden)
{ id: 'profile', visible: false, window: 'user' as const, order: 7 },
{ id: 'settings', visible: false, window: 'user' as const, order: 8 },
{ id: 'api-keys', visible: true, window: 'user' as const, order: 9 },
{ id: 'task-manager', visible: false, window: 'user' as const, order: 10 },
{ id: 'service-status', visible: false, window: 'user' as const, order: 11 },

// User Window Tabs (Hidden, controlled by TaskManagerTab)
Expand All @@ -82,11 +80,9 @@ export const DEFAULT_TAB_CONFIG = [
{ id: 'local-providers', visible: true, window: 'developer' as const, order: 3 },
{ id: 'connection', visible: true, window: 'developer' as const, order: 4 },
{ id: 'notifications', visible: true, window: 'developer' as const, order: 5 },
{ id: 'event-logs', visible: true, window: 'developer' as const, order: 6 },
{ id: 'profile', visible: true, window: 'developer' as const, order: 7 },
{ id: 'settings', visible: true, window: 'developer' as const, order: 8 },
{ id: 'api-keys', visible: true, window: 'developer' as const, order: 9 },
{ id: 'task-manager', visible: true, window: 'developer' as const, order: 10 },
{ id: 'service-status', visible: true, window: 'developer' as const, order: 11 },
{ id: 'debug', visible: true, window: 'developer' as const, order: 12 },
{ id: 'update', visible: true, window: 'developer' as const, order: 13 },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ const PROVIDER_ICONS: Record<ProviderName, string> = {
// Update PROVIDER_DESCRIPTIONS to use the same type
const PROVIDER_DESCRIPTIONS: Partial<Record<ProviderName, string>> = {
Anthropic: 'Access Claude and other Anthropic models',
OpenAI: 'Use GPT-4, GPT-3.5, and other OpenAI models',
OpenAI: 'Use GPT-5.2, GPT-4.5, and other OpenAI models',
};

const CloudProvidersTab = () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,15 @@ export default function LocalProvidersTab() {
try {
setIsLoadingModels(true);

const response = await fetch('http://127.0.0.1:11434/api/tags');
const ollamaProvider = filteredProviders.find((p) => p.name === 'Ollama');
const baseUrl = ollamaProvider?.settings.baseUrl || OLLAMA_API_URL;

const response = await fetch(`${baseUrl}/api/tags`);

if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}

const data = (await response.json()) as { models: OllamaModel[] };

setOllamaModels(
Expand All @@ -159,14 +167,20 @@ export default function LocalProvidersTab() {
);
} catch (error) {
console.error('Error fetching Ollama models:', error);

const errorMsg = error instanceof Error ? error.message : 'Unknown error occurred';
toast(`Failed to fetch Ollama models: ${errorMsg}`);
} finally {
setIsLoadingModels(false);
}
};

const updateOllamaModel = async (modelName: string): Promise<boolean> => {
try {
const response = await fetch(`${OLLAMA_API_URL}/api/pull`, {
const ollamaProvider = filteredProviders.find((p) => p.name === 'Ollama');
const baseUrl = ollamaProvider?.settings.baseUrl || OLLAMA_API_URL;

const response = await fetch(`${baseUrl}/api/pull`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ name: modelName }),
Expand Down Expand Up @@ -218,7 +232,7 @@ export default function LocalProvidersTab() {
}
}

const updatedResponse = await fetch('http://127.0.0.1:11434/api/tags');
const updatedResponse = await fetch(`${baseUrl}/api/tags`);
const updatedData = (await updatedResponse.json()) as { models: OllamaModel[] };
const updatedModel = updatedData.models.find((m) => m.name === modelName);

Expand Down Expand Up @@ -275,7 +289,10 @@ export default function LocalProvidersTab() {

const handleDeleteOllamaModel = async (modelName: string) => {
try {
const response = await fetch(`${OLLAMA_API_URL}/api/delete`, {
const ollamaProvider = filteredProviders.find((p) => p.name === 'Ollama');
const baseUrl = ollamaProvider?.settings.baseUrl || OLLAMA_API_URL;

const response = await fetch(`${baseUrl}/api/delete`, {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
Expand Down
5 changes: 3 additions & 2 deletions app/lib/modules/llm/manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ export class LLMManager {
provider: BaseProvider,
apiKeys?: Record<string, string>,
serverEnv?: Record<string, string>,
providerSettings?: Record<string, IProviderSetting>,
): boolean {
// Check if provider has API key configuration
const config = provider.config;
Expand All @@ -128,7 +129,7 @@ export class LLMManager {
// For local providers like Ollama and LMStudio, check if baseUrl is configured
if (provider.name === 'Ollama' || provider.name === 'LMStudio') {
const baseUrlKey = provider.name === 'Ollama' ? 'OLLAMA_API_BASE_URL' : 'LMSTUDIO_API_BASE_URL';
const hasBaseUrl = apiKeys?.[baseUrlKey] || serverEnv?.[baseUrlKey];
const hasBaseUrl = providerSettings?.[provider.name]?.baseUrl || apiKeys?.[baseUrlKey] || serverEnv?.[baseUrlKey];

if (!hasBaseUrl) {
return false;
Expand Down Expand Up @@ -217,7 +218,7 @@ export class LLMManager {

// Check if provider has required configuration before attempting fetch
const providerConfig = providerSettings?.[provider.name];
const hasRequiredConfig = this._hasRequiredConfiguration(provider, apiKeys, serverEnv);
const hasRequiredConfig = this._hasRequiredConfiguration(provider, apiKeys, serverEnv, providerSettings);

if (!hasRequiredConfig) {
logger.debug(`Skipping ${provider.name}: missing required configuration`);
Expand Down
37 changes: 32 additions & 5 deletions app/lib/modules/llm/providers/lmstudio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,33 @@ export default class LMStudioProvider extends BaseProvider {
*/
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';

baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
if (isDocker) {
try {
const url = new URL(baseUrl);
if (url.hostname === 'localhost' || url.hostname === '127.0.0.1') {
url.hostname = 'host.docker.internal';
baseUrl = url.toString().replace(/\/$/, '');
}
} catch (error) {
logger.warn('Failed to parse LMStudio baseUrl for Docker mapping:', error);
}
}
}

const response = await fetch(`${baseUrl}/v1/models`);

if (!response.ok) {
throw new Error(
`Failed to fetch LMStudio models: HTTP ${response.status} ${response.statusText}`,
);
}

const data = (await response.json()) as { data: Array<{ id: string } B8DB > };

if (!data || !Array.isArray(data.data)) {
throw new Error('Invalid response from LMStudio API: missing data array');
}

return data.data.map((model) => ({
name: model.id,
label: model.id,
Expand Down Expand Up @@ -78,9 +98,16 @@ export default class LMStudioProvider extends BaseProvider {

const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';

if (typeof window === 'undefined') {
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
if (typeof window === 'undefined' && isDocker) {
try {
const url = new URL(baseUrl);
if (url.hostname === 'localhost' || url.hostname === '127.0.0.1') {
url.hostname = 'host.docker.internal';
baseUrl = url.toString().replace(/\/$/, '');
}
} catch (error) {
logger.warn('Failed to parse LMStudio baseUrl for Docker mapping:', error);
}
}

logger.debug('LMStudio Base Url used: ', baseUrl);
Expand Down
42 changes: 35 additions & 7 deletions app/lib/modules/llm/providers/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,18 +83,35 @@ export default class OllamaProvider extends BaseProvider {
*/
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';

baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
if (isDocker) {
try {
const url = new URL(baseUrl);

if (url.hostname === 'localhost' || url.hostname === '127.0.0.1') {
url.hostname = 'host.docker.internal';
baseUrl = url.toString().replace(/\/$/, '');
}
} catch (error) {
logger.warn('Failed to parse Ollama baseUrl for Docker mapping:', error);
}
}
}

const response = await fetch(`${baseUrl}/api/tags`);

if (!response.ok) {
throw new Error(`Failed to fetch Ollama models: HTTP ${response.status} ${response.statusText}`);
}

const data = (await response.json()) as OllamaApiResponse;

// console.log({ ollamamodels: data.models });
if (!data || !Array.isArray(data.models)) {
throw new Error('Invalid response from Ollama API: missing models array');
}

return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
label: `${model.name} (${model.details?.parameter_size || 'unknown'})`,
provider: this.name,
maxTokenAllowed: 8000,
maxCompletionTokens: 8000,
Expand All @@ -119,14 +136,25 @@ export default class OllamaProvider extends BaseProvider {
defaultApiTokenKey: '',
});

// Backend: Check if we're running in Docker
if (!baseUrl) {
throw new Error('No baseUrl found for OLLAMA provider');
}

// Backend: Check if we're running in Docker
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || envRecord.RUNNING_IN_DOCKER === 'true';
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;

if (isDocker) {
try {
const url = new URL(baseUrl);

if (url.hostname === 'localhost' || url.hostname === '127.0.0.1') {
url.hostname = 'host.docker.internal';
baseUrl = url.toString().replace(/\/$/, '');
}
} catch (error) {
logger.warn('Failed to parse Ollama baseUrl for Docker mapping:', error);
}
}

logger.debug('Ollama Base Url used: ', baseUrl);

Expand Down
0