diff --git a/src/libs/agent-runtime/lmstudio/index.test.ts b/src/libs/agent-runtime/lmstudio/index.test.ts index 87c5ae529ccf7..b230d32e684b4 100644 --- a/src/libs/agent-runtime/lmstudio/index.test.ts +++ b/src/libs/agent-runtime/lmstudio/index.test.ts @@ -12,7 +12,7 @@ import * as debugStreamModule from '../utils/debugStream'; import { LobeLMStudioAI } from './index'; const provider = ModelProvider.LMStudio; -const defaultBaseURL = 'http://localhost:1234/v1'; +const defaultBaseURL = 'http://127.0.0.1:1234/v1'; const bizErrorType = 'ProviderBizError'; const invalidErrorType = 'InvalidProviderAPIKey'; diff --git a/src/libs/agent-runtime/lmstudio/index.ts b/src/libs/agent-runtime/lmstudio/index.ts index 4927bbf5ea150..9ef31ab793071 100644 --- a/src/libs/agent-runtime/lmstudio/index.ts +++ b/src/libs/agent-runtime/lmstudio/index.ts @@ -3,7 +3,7 @@ import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory'; export const LobeLMStudioAI = LobeOpenAICompatibleFactory({ apiKey: 'placeholder-to-avoid-error', - baseURL: 'http://localhost:1234/v1', + baseURL: 'http://127.0.0.1:1234/v1', debug: { chatCompletion: () => process.env.DEBUG_LMSTUDIO_CHAT_COMPLETION === '1', },