Skip to content

Commit

Permalink
✨ feat: 支持使用自定义 OpenAI Key
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed Jul 25, 2023
1 parent 71311d3 commit fb454a0
Show file tree
Hide file tree
Showing 8 changed files with 73 additions and 38 deletions.
2 changes: 2 additions & 0 deletions src/const/fetch.ts
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
export const OPENAI_SERVICE_ERROR_CODE = 555;

export const OPENAI_API_KEY_HEADER_KEY = 'X-OPENAI-API-KEY';
4 changes: 3 additions & 1 deletion src/pages/api/openai.api.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { StreamingTextResponse } from 'ai';

import { OPENAI_API_KEY_HEADER_KEY } from '@/const/fetch';
import { OpenAIStreamPayload } from '@/types/openai';

import { createChatCompletion } from './openai';
Expand All @@ -8,8 +9,9 @@ export const runtime = 'edge';

export default async function handler(req: Request) {
const payload = (await req.json()) as OpenAIStreamPayload;
const apiKey = req.headers.get(OPENAI_API_KEY_HEADER_KEY);

const stream = await createChatCompletion(payload);
const stream = await createChatCompletion({ OPENAI_API_KEY: apiKey, payload });

return new StreamingTextResponse(stream);
}
40 changes: 26 additions & 14 deletions src/pages/api/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,30 @@ import pluginList from '../../plugins';
const isDev = process.env.NODE_ENV === 'development';
const OPENAI_PROXY_URL = process.env.OPENAI_PROXY_URL;

// Create an OpenAI API client (that's edge friendly!)
const config = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});

export const openai = new OpenAIApi(
config,
isDev && OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined,
);

export const createChatCompletion = async (
payload: OpenAIStreamPayload,
callbacks?: (payload: OpenAIStreamPayload) => OpenAIStreamCallbacks,
) => {
// 创建 OpenAI 实例
export const createOpenAI = (OPENAI_API_KEY: string | null) => {
const config = new Configuration({
apiKey: OPENAI_API_KEY ?? process.env.OPENAI_API_KEY,
});

return new OpenAIApi(config, isDev && OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined);
};

interface CreateChatCompletionOptions {
OPENAI_API_KEY: string | null;
callbacks?: (payload: OpenAIStreamPayload) => OpenAIStreamCallbacks;
payload: OpenAIStreamPayload;
}

export const createChatCompletion = async ({
payload,
callbacks,
OPENAI_API_KEY,
}: CreateChatCompletionOptions) => {
// ============ 0.创建 OpenAI 实例 ============ //

const openai = createOpenAI(OPENAI_API_KEY);

const { messages, plugins: enabledPlugins, ...params } = payload;

// ============ 1. 前置处理 functions ============ //
Expand All @@ -42,6 +52,8 @@ export const createChatCompletion = async (
// ============ 2. 前置处理 messages ============ //
const formatMessages = messages.map((m) => ({ content: m.content, role: m.role }));

// ============ 3. 发送请求 ============ //

const requestParams = { functions, messages: formatMessages, stream: true, ...params };

const response = await openai.createChatCompletion(requestParams);
Expand Down
43 changes: 27 additions & 16 deletions src/pages/api/plugins.api.ts
Original file line number Diff line number Diff line change
@@ -1,36 +1,47 @@
import { StreamingTextResponse } from 'ai';
import { ChatCompletionRequestMessage } from 'openai-edge';

import { OPENAI_API_KEY_HEADER_KEY } from '@/const/fetch';
import { OpenAIStreamPayload } from '@/types/openai';

import pluginList from '../../plugins';
import { createChatCompletion, openai } from './openai';
import { createChatCompletion, createOpenAI } from './openai';

export const runtime = 'edge';

export default async function handler(req: Request) {
const payload = (await req.json()) as OpenAIStreamPayload;
const apiKey = req.headers.get(OPENAI_API_KEY_HEADER_KEY);

const stream = await createChatCompletion(payload, (payload) => ({
experimental_onFunctionCall: async ({ name, arguments: args }, createFunctionCallMessages) => {
console.log(`检测到 functionCall: ${name}`);
const openai = createOpenAI(apiKey);

const func = pluginList.find((f) => f.name === name);
const stream = await createChatCompletion({
OPENAI_API_KEY: apiKey,
callbacks: (payload) => ({
experimental_onFunctionCall: async (
{ name, arguments: args },
createFunctionCallMessages,
) => {
console.log(`检测到 functionCall: ${name}`);

if (func) {
const result = await func.runner(args as any);
const func = pluginList.find((f) => f.name === name);

console.log(`[${name}]`, args, `result:`, JSON.stringify(result, null, 2));
if (func) {
const result = await func.runner(args as any);

const newMessages = createFunctionCallMessages(result) as ChatCompletionRequestMessage[];
console.log(`[${name}]`, args, `result:`, JSON.stringify(result, null, 2));

return openai.createChatCompletion({
...payload,
messages: [...payload.messages, ...newMessages],
});
}
},
}));
const newMessages = createFunctionCallMessages(result) as ChatCompletionRequestMessage[];

return openai.createChatCompletion({
...payload,
messages: [...payload.messages, ...newMessages],
});
}
},
}),
payload,
});

return new StreamingTextResponse(stream);
}
4 changes: 2 additions & 2 deletions src/pages/setting/SettingForm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ const SettingForm = memo(() => {
children: <Input.Password placeholder={t('settingOpenAI.token.placeholder')} />,
desc: t('settingOpenAI.token.desc'),
label: t('settingOpenAI.token.title'),
name: 'token',
name: 'OPENAI_API_KEY',
},
{
children: <Input placeholder={t('settingOpenAI.endpoint.placeholder')} />,
Expand Down Expand Up @@ -236,7 +236,7 @@ const SettingForm = memo(() => {
children: <Input.Password placeholder={t('settingSystem.accessCode.placeholder')} />,
desc: t('settingSystem.accessCode.desc'),
label: t('settingSystem.accessCode.title'),
name: 'accessCode',
name: 'password',
},
{
children: (
Expand Down
10 changes: 9 additions & 1 deletion src/services/chatModel.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,23 @@
import { merge } from 'lodash-es';

import { OPENAI_API_KEY_HEADER_KEY } from '@/const/fetch';
import { initialLobeAgentConfig } from '@/store/session';
import { useSettings } from '@/store/settings';
import type { OpenAIStreamPayload } from '@/types/openai';

import { URLS } from './url';

interface FetchChatModelOptions {
signal?: AbortSignal | undefined;
withPlugin?: boolean;
}

/**
* 专门用于对话的 fetch
*/
export const fetchChatModel = (
params: Partial<OpenAIStreamPayload>,
options?: { signal?: AbortSignal | undefined; withPlugin?: boolean },
options?: FetchChatModelOptions,
) => {
const payload = merge(
{
Expand All @@ -25,6 +32,7 @@ export const fetchChatModel = (
body: JSON.stringify(payload),
headers: {
'Content-Type': 'application/json',
[OPENAI_API_KEY_HEADER_KEY]: useSettings.getState().settings.OPENAI_API_KEY || '',
},
method: 'POST',
signal: options?.signal,
Expand Down
4 changes: 2 additions & 2 deletions src/store/settings/initialState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { LanguageModel } from '@/types/llm';
export type SidebarTabKey = 'chat' | 'market';

export const DEFAULT_SETTINGS: ConfigSettings = {
accessCode: '',
OPENAI_API_KEY: '',
avatar: '',
compressThreshold: 24,
enableCompressThreshold: false,
Expand All @@ -18,11 +18,11 @@ export const DEFAULT_SETTINGS: ConfigSettings = {
maxTokens: 2000,
model: LanguageModel.GPT3_5,
neutralColor: '',
password: '',
presencePenalty: 0,
primaryColor: '',
temperature: 0.5,
themeMode: 'auto',
token: '',
topP: 1,
};

Expand Down
4 changes: 2 additions & 2 deletions src/types/exportConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { Locales } from './locale';
* 配置设置
*/
export interface ConfigSettings {
accessCode: string;
OPENAI_API_KEY: string;
avatar: string;
compressThreshold: number;
enableCompressThreshold: boolean;
Expand All @@ -23,11 +23,11 @@ export interface ConfigSettings {
maxTokens: number;
model: LanguageModel;
neutralColor: NeutralColors | '';
password: string;
presencePenalty: number;
primaryColor: PrimaryColors | '';
temperature: number;
themeMode: ThemeMode;
token: string;
topP: number;
}

Expand Down

0 comments on commit fb454a0

Please sign in to comment.