diff --git a/src/const/fetch.ts b/src/const/fetch.ts index 7d8ce9a3ca45..00e28f023d1c 100644 --- a/src/const/fetch.ts +++ b/src/const/fetch.ts @@ -1,3 +1,5 @@ +export const OPENAI_END_POINT = 'X-OPENAI-END_POINT'; + export const OPENAI_API_KEY_HEADER_KEY = 'X-OPENAI-API-KEY'; export const LOBE_CHAT_ACCESS_CODE = 'X-LOBE_CHAT_ACCESS_CODE'; diff --git a/src/pages/api/openai.api.ts b/src/pages/api/openai.api.ts index a33ee07f8dad..2390cd9e0cab 100644 --- a/src/pages/api/openai.api.ts +++ b/src/pages/api/openai.api.ts @@ -1,4 +1,4 @@ -import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY } from '@/const/fetch'; +import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY, OPENAI_END_POINT } from '@/const/fetch'; import { ErrorType } from '@/types/fetch'; import { OpenAIStreamPayload } from '@/types/openai'; @@ -12,6 +12,7 @@ export default async function handler(req: Request) { const payload = (await req.json()) as OpenAIStreamPayload; const apiKey = req.headers.get(OPENAI_API_KEY_HEADER_KEY); const accessCode = req.headers.get(LOBE_CHAT_ACCESS_CODE); + const endpoint = req.headers.get(OPENAI_END_POINT); const result = checkAuth({ accessCode, apiKey }); @@ -19,5 +20,5 @@ export default async function handler(req: Request) { return createErrorResponse(result.error as ErrorType); } - return createChatCompletion({ OPENAI_API_KEY: apiKey, payload }); + return createChatCompletion({ OPENAI_API_KEY: apiKey, endpoint, payload }); } diff --git a/src/pages/api/openai.ts b/src/pages/api/openai.ts index 0db797f08c11..86394a3f0039 100644 --- a/src/pages/api/openai.ts +++ b/src/pages/api/openai.ts @@ -11,19 +11,23 @@ import { OpenAIStreamPayload } from '@/types/openai'; const isDev = process.env.NODE_ENV === 'development'; // 创建 OpenAI 实例 -export const createOpenAI = (userApiKey: string | null) => { +export const createOpenAI = (userApiKey: string | null, endpoint?: string | null) => { const { OPENAI_API_KEY, OPENAI_PROXY_URL } = getServerConfig(); const config = new Configuration({ apiKey: !userApiKey ? OPENAI_API_KEY : userApiKey, }); - return new OpenAIApi(config, isDev && OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined); + return new OpenAIApi( + config, + endpoint ? endpoint : isDev && OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined, + ); }; interface CreateChatCompletionOptions { OPENAI_API_KEY: string | null; callbacks?: (payload: OpenAIStreamPayload) => OpenAIStreamCallbacks; + endpoint?: string | null; payload: OpenAIStreamPayload; } @@ -31,10 +35,11 @@ export const createChatCompletion = async ({ payload, callbacks, OPENAI_API_KEY, + endpoint, }: CreateChatCompletionOptions) => { // ============ 0.创建 OpenAI 实例 ============ // - const openai = createOpenAI(OPENAI_API_KEY); + const openai = createOpenAI(OPENAI_API_KEY, endpoint); const { messages, plugins: enabledPlugins, ...params } = payload; diff --git a/src/services/chatModel.ts b/src/services/chatModel.ts index bc20e9a5c704..5b37ac555748 100644 --- a/src/services/chatModel.ts +++ b/src/services/chatModel.ts @@ -1,6 +1,6 @@ import { merge } from 'lodash-es'; -import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY } from '@/const/fetch'; +import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY, OPENAI_END_POINT } from '@/const/fetch'; import { initialLobeAgentConfig } from '@/store/session'; import { useSettings } from '@/store/settings'; import type { OpenAIStreamPayload } from '@/types/openai'; @@ -33,6 +33,7 @@ export const fetchChatModel = ( 'Content-Type': 'application/json', [LOBE_CHAT_ACCESS_CODE]: useSettings.getState().settings.password || '', [OPENAI_API_KEY_HEADER_KEY]: useSettings.getState().settings.OPENAI_API_KEY || '', + [OPENAI_END_POINT]: useSettings.getState().settings.endpoint || '', }, method: 'POST', signal: options?.signal,