From 28ee40c1a1185c56857c80f9e86af3d8e64572a8 Mon Sep 17 00:00:00 2001 From: Rubu Jam Date: Tue, 14 May 2024 06:24:31 +0000 Subject: [PATCH 1/2] =?UTF-8?q?=F0=9F=90=9B=20fix:=20Fix=20DeepSeek=20usin?= =?UTF-8?q?g=20wrong=20model=20ID?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../(main)/settings/llm/DeepSeek/index.tsx | 2 +- src/app/api/chat/agentRuntime.ts | 2 + src/libs/agent-runtime/deepseek/index.test.ts | 73 ++++++++++++++++++- 3 files changed, 73 insertions(+), 4 deletions(-) diff --git a/src/app/(main)/settings/llm/DeepSeek/index.tsx b/src/app/(main)/settings/llm/DeepSeek/index.tsx index 48888b5f139e..0c663c0dfb1c 100644 --- a/src/app/(main)/settings/llm/DeepSeek/index.tsx +++ b/src/app/(main)/settings/llm/DeepSeek/index.tsx @@ -10,7 +10,7 @@ import ProviderConfig from '../components/ProviderConfig'; const DeepSeekProvider = memo(() => { return ( } diff --git a/src/app/api/chat/agentRuntime.ts b/src/app/api/chat/agentRuntime.ts index 84c5243ed202..4d2bdbde3b8a 100644 --- a/src/app/api/chat/agentRuntime.ts +++ b/src/app/api/chat/agentRuntime.ts @@ -139,7 +139,9 @@ const getLlmOptionsFromPayload = (provider: string, payload: JWTPayload) => { } case ModelProvider.DeepSeek: { const { DEEPSEEK_API_KEY } = getServerConfig(); + const apiKey = apiKeyManager.pick(payload?.apiKey || DEEPSEEK_API_KEY); + return { apiKey }; } case ModelProvider.TogetherAI: { diff --git a/src/libs/agent-runtime/deepseek/index.test.ts b/src/libs/agent-runtime/deepseek/index.test.ts index 0769a296c4dd..6b98f9fdfe27 100644 --- a/src/libs/agent-runtime/deepseek/index.test.ts +++ b/src/libs/agent-runtime/deepseek/index.test.ts @@ -44,6 +44,74 @@ describe('LobeDeepSeekAI', () => { }); describe('chat', () => { + it('should call chat with corresponding options', async () => { + // Arrange + const mockStream = new ReadableStream(); + const mockResponse = Promise.resolve(mockStream); + + (instance['client'].chat.completions.create as Mock).mockResolvedValue(mockResponse); + + // Act + const result = await instance.chat({ + max_tokens: 1024, + messages: [{ content: 'Hello', role: 'user' }], + model: 'deepseek-chat', + temperature: 0.7, + }); + + // Assert + expect(instance['client'].chat.completions.create).toHaveBeenCalledWith( + { + max_tokens: 1024, + stream: true, + messages: [{ content: 'Hello', role: 'user' }], + model: 'deepseek-chat', + temperature: 0.7, + }, + { headers: { Accept: '*/*' } }, + ); + expect(result).toBeInstanceOf(Response); + }); + + describe('handlePayload option', () => { + it('should set stream to false when payload contains tools', async () => { + const mockCreateMethod = vi + .spyOn(instance['client'].chat.completions, 'create') + .mockResolvedValue({ + id: 'chatcmpl-8xDx5AETP8mESQN7UB30GxTN2H1SO', + object: 'chat.completion', + created: 1709125675, + model: 'deepseek-chat', + system_fingerprint: 'fp_86156a94a0', + choices: [ + { + index: 0, + message: { role: 'assistant', content: 'hello' }, + logprobs: null, + finish_reason: 'stop', + }, + ], + }); + + await instance.chat({ + messages: [{ content: 'Hello', role: 'user' }], + model: 'deepseek-chat', + temperature: 0, + tools: [ + { + type: 'function', + function: { name: 'tool1', description: '', parameters: {} }, + }, + ], + }); + + expect(mockCreateMethod).toHaveBeenCalledWith( + expect.objectContaining({ stream: false }), + expect.anything(), + ); + }); + }); + describe('Error', () => { it('should return OpenAIBizError with an openai error response when OpenAI.APIError is thrown', async () => { // Arrange @@ -81,7 +149,7 @@ describe('LobeDeepSeekAI', () => { } }); - it('should throw AgentRuntimeError with NoOpenAIAPIKey if no apiKey is provided', async () => { + it('should throw AgentRuntimeError with InvalidOpenAIAPIKey if no apiKey is provided', async () => { try { new LobeDeepSeekAI({}); } catch (e) { @@ -210,7 +278,7 @@ describe('LobeDeepSeekAI', () => { }); describe('DEBUG', () => { - it('should call debugStream and return StreamingTextResponse when DEBUG_DEEPSEEK_CHAT_COMPLETION is 1', async () => { + it('should call debugStream and return StreamingTextResponse when DEBUG_OPENROUTER_CHAT_COMPLETION is 1', async () => { // Arrange const mockProdStream = new ReadableStream() as any; // 模拟的 prod 流 const mockDebugStream = new ReadableStream({ @@ -239,7 +307,6 @@ describe('LobeDeepSeekAI', () => { await instance.chat({ messages: [{ content: 'Hello', role: 'user' }], model: 'deepseek-chat', - stream: true, temperature: 0, }); From cb775175e88ec9d3461911b739a05a6eba42ae62 Mon Sep 17 00:00:00 2001 From: Rubu Jam Date: Tue, 14 May 2024 06:35:01 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=90=9B=20fix:=20Fix=20DeepSeek=20usin?= =?UTF-8?q?g=20wrong=20model=20ID?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/libs/agent-runtime/deepseek/index.test.ts | 73 +------------------ 1 file changed, 3 insertions(+), 70 deletions(-) diff --git a/src/libs/agent-runtime/deepseek/index.test.ts b/src/libs/agent-runtime/deepseek/index.test.ts index 6b98f9fdfe27..0769a296c4dd 100644 --- a/src/libs/agent-runtime/deepseek/index.test.ts +++ b/src/libs/agent-runtime/deepseek/index.test.ts @@ -44,74 +44,6 @@ describe('LobeDeepSeekAI', () => { }); describe('chat', () => { - it('should call chat with corresponding options', async () => { - // Arrange - const mockStream = new ReadableStream(); - const mockResponse = Promise.resolve(mockStream); - - (instance['client'].chat.completions.create as Mock).mockResolvedValue(mockResponse); - - // Act - const result = await instance.chat({ - max_tokens: 1024, - messages: [{ content: 'Hello', role: 'user' }], - model: 'deepseek-chat', - temperature: 0.7, - }); - - // Assert - expect(instance['client'].chat.completions.create).toHaveBeenCalledWith( - { - max_tokens: 1024, - stream: true, - messages: [{ content: 'Hello', role: 'user' }], - model: 'deepseek-chat', - temperature: 0.7, - }, - { headers: { Accept: '*/*' } }, - ); - expect(result).toBeInstanceOf(Response); - }); - - describe('handlePayload option', () => { - it('should set stream to false when payload contains tools', async () => { - const mockCreateMethod = vi - .spyOn(instance['client'].chat.completions, 'create') - .mockResolvedValue({ - id: 'chatcmpl-8xDx5AETP8mESQN7UB30GxTN2H1SO', - object: 'chat.completion', - created: 1709125675, - model: 'deepseek-chat', - system_fingerprint: 'fp_86156a94a0', - choices: [ - { - index: 0, - message: { role: 'assistant', content: 'hello' }, - logprobs: null, - finish_reason: 'stop', - }, - ], - }); - - await instance.chat({ - messages: [{ content: 'Hello', role: 'user' }], - model: 'deepseek-chat', - temperature: 0, - tools: [ - { - type: 'function', - function: { name: 'tool1', description: '', parameters: {} }, - }, - ], - }); - - expect(mockCreateMethod).toHaveBeenCalledWith( - expect.objectContaining({ stream: false }), - expect.anything(), - ); - }); - }); - describe('Error', () => { it('should return OpenAIBizError with an openai error response when OpenAI.APIError is thrown', async () => { // Arrange @@ -149,7 +81,7 @@ describe('LobeDeepSeekAI', () => { } }); - it('should throw AgentRuntimeError with InvalidOpenAIAPIKey if no apiKey is provided', async () => { + it('should throw AgentRuntimeError with NoOpenAIAPIKey if no apiKey is provided', async () => { try { new LobeDeepSeekAI({}); } catch (e) { @@ -278,7 +210,7 @@ describe('LobeDeepSeekAI', () => { }); describe('DEBUG', () => { - it('should call debugStream and return StreamingTextResponse when DEBUG_OPENROUTER_CHAT_COMPLETION is 1', async () => { + it('should call debugStream and return StreamingTextResponse when DEBUG_DEEPSEEK_CHAT_COMPLETION is 1', async () => { // Arrange const mockProdStream = new ReadableStream() as any; // 模拟的 prod 流 const mockDebugStream = new ReadableStream({ @@ -307,6 +239,7 @@ describe('LobeDeepSeekAI', () => { await instance.chat({ messages: [{ content: 'Hello', role: 'user' }], model: 'deepseek-chat', + stream: true, temperature: 0, });