diff --git a/src/app/settings/llm/OpenAI/index.tsx b/src/app/settings/llm/OpenAI/index.tsx
index 8c452276285b..b67ec5d96f3c 100644
--- a/src/app/settings/llm/OpenAI/index.tsx
+++ b/src/app/settings/llm/OpenAI/index.tsx
@@ -4,12 +4,7 @@ import { memo } from 'react';
import ProviderConfig from '../components/ProviderConfig';
const OpenAIProvider = memo(() => (
- }
- />
+ } />
));
export default OpenAIProvider;
diff --git a/src/config/server/provider.ts b/src/config/server/provider.ts
index 5ac45109faa1..2888622e2315 100644
--- a/src/config/server/provider.ts
+++ b/src/config/server/provider.ts
@@ -83,6 +83,10 @@ declare global {
* @deprecated
*/
OLLAMA_CUSTOM_MODELS?: string;
+ /**
+ * @deprecated
+ */
+ OPENROUTER_CUSTOM_MODELS?: string;
}
}
}
@@ -119,6 +123,24 @@ export const getProviderConfig = () => {
regions = process.env.OPENAI_FUNCTION_REGIONS.split(',');
}
+ if (process.env.CUSTOM_MODELS) {
+ console.warn(
+ 'DEPRECATED: `CUSTOM_MODELS` is deprecated, please use `OPENAI_MODEL_LIST` instead, we will remove `CUSTOM_MODELS` in the LobeChat 1.0',
+ );
+ }
+
+ if (process.env.OLLAMA_CUSTOM_MODELS) {
+ console.warn(
+ 'DEPRECATED: `OLLAMA_CUSTOM_MODELS` is deprecated, please use `OLLAMA_MODEL_LIST` instead, we will remove `OLLAMA_CUSTOM_MODELS` in the LobeChat 1.0',
+ );
+ }
+
+ if (process.env.OPENROUTER_CUSTOM_MODELS) {
+ console.warn(
+ 'DEPRECATED: `OPENROUTER_CUSTOM_MODELS` is deprecated, please use `OPENROUTER_MODEL_LIST` instead, we will remove `OPENROUTER_CUSTOM_MODELS` in the LobeChat 1.0',
+ );
+ }
+
return {
API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,
diff --git a/src/const/settings/index.ts b/src/const/settings/index.ts
index 1f9410afd2ca..3a3791e99a87 100644
--- a/src/const/settings/index.ts
+++ b/src/const/settings/index.ts
@@ -108,7 +108,7 @@ export const DEFAULT_LLM_CONFIG: GlobalLLMConfig = {
},
openai: {
apiKey: '',
- enabled: false,
+ enabled: true,
enabledModels: filterEnabledModels(OpenAIProvider),
},
openrouter: {
diff --git a/src/store/global/slices/settings/selectors/modelConfig.ts b/src/store/global/slices/settings/selectors/modelConfig.ts
index 0a9d916e5511..40b5da5b4068 100644
--- a/src/store/global/slices/settings/selectors/modelConfig.ts
+++ b/src/store/global/slices/settings/selectors/modelConfig.ts
@@ -14,13 +14,8 @@ const providerConfig = (provider: string) => (s: GlobalStore) =>
| GeneralModelProviderConfig
| undefined;
-const providerEnabled = (provider: GlobalLLMProviderKey) => (s: GlobalStore) => {
- // TODO: we need to migrate the 'openAI' key to 'openai'
- // @ts-ignore
- if (provider === 'openai') return true;
-
- return currentSettings(s).languageModel[provider]?.enabled || false;
-};
+const providerEnabled = (provider: GlobalLLMProviderKey) => (s: GlobalStore) =>
+ currentSettings(s).languageModel[provider]?.enabled || false;
const providerEnableModels = (provider: string) => (s: GlobalStore) => {
if (!providerConfig(provider)(s)?.enabledModels) return;
diff --git a/src/store/global/slices/settings/selectors/modelProvider.ts b/src/store/global/slices/settings/selectors/modelProvider.ts
index a94e14cbef93..e1ed88d9e05c 100644
--- a/src/store/global/slices/settings/selectors/modelProvider.ts
+++ b/src/store/global/slices/settings/selectors/modelProvider.ts
@@ -43,6 +43,7 @@ const serverProviderModelCards =
* define all the model list of providers
*/
const providerModelList = (s: GlobalStore): ModelProviderCard[] => {
+ // if the chat model is config in the server side, use the server side model cards
const openaiChatModels = serverProviderModelCards('openai')(s);
const ollamaChatModels = serverProviderModelCards('ollama')(s);
const openrouterChatModels = serverProviderModelCards('openrouter')(s);