Skip to content

Commit

Permalink
πŸ› fix: fix the max token of claude 3 (lobehub#1526)
Browse files Browse the repository at this point in the history
* Update index.ts

* βœ… test: fix test
  • Loading branch information
arvinxx authored Mar 10, 2024
1 parent 192688a commit 222fae3
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 18 deletions.
25 changes: 8 additions & 17 deletions src/libs/agent-runtime/anthropic/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ describe('LobeAnthropicAI', () => {
});

describe('chat', () => {

it('should return a StreamingTextResponse on successful API call', async () => {
const result = await instance.chat({
messages: [{ content: 'Hello', role: 'user' }],
Expand Down Expand Up @@ -64,7 +63,7 @@ describe('LobeAnthropicAI', () => {

// Assert
expect(instance['client'].messages.create).toHaveBeenCalledWith({
max_tokens: 1024,
max_tokens: 4096,
messages: [{ content: 'Hello', role: 'user' }],
model: 'claude-instant-1.2',
stream: true,
Expand Down Expand Up @@ -97,7 +96,7 @@ describe('LobeAnthropicAI', () => {

// Assert
expect(instance['client'].messages.create).toHaveBeenCalledWith({
max_tokens: 1024,
max_tokens: 4096,
messages: [{ content: 'Hello', role: 'user' }],
model: 'claude-instant-1.2',
stream: true,
Expand All @@ -121,9 +120,7 @@ describe('LobeAnthropicAI', () => {
// Act
const result = await instance.chat({
max_tokens: 2048,
messages: [
{ content: 'Hello', role: 'user' },
],
messages: [{ content: 'Hello', role: 'user' }],
model: 'claude-instant-1.2',
temperature: 0.5,
top_p: 1,
Expand All @@ -132,14 +129,12 @@ describe('LobeAnthropicAI', () => {
// Assert
expect(instance['client'].messages.create).toHaveBeenCalledWith({
max_tokens: 2048,
messages: [
{ content: 'Hello', role: 'user' },
],
messages: [{ content: 'Hello', role: 'user' }],
model: 'claude-instant-1.2',
stream: true,
temperature: 0.5,
top_p: 1,
})
});
expect(result).toBeInstanceOf(Response);
});

Expand All @@ -158,9 +153,7 @@ describe('LobeAnthropicAI', () => {
const result = await instance.chat({
frequency_penalty: 0.5, // Unsupported option
max_tokens: 2048,
messages: [
{ content: 'Hello', role: 'user' },
],
messages: [{ content: 'Hello', role: 'user' }],
model: 'claude-instant-1.2',
presence_penalty: 0.5,
temperature: 0.5,
Expand All @@ -170,14 +163,12 @@ describe('LobeAnthropicAI', () => {
// Assert
expect(instance['client'].messages.create).toHaveBeenCalledWith({
max_tokens: 2048,
messages: [
{ content: 'Hello', role: 'user' },
],
messages: [{ content: 'Hello', role: 'user' }],
model: 'claude-instant-1.2',
stream: true,
temperature: 0.5,
top_p: 1,
})
});
expect(result).toBeInstanceOf(Response);
});

Expand Down
2 changes: 1 addition & 1 deletion src/libs/agent-runtime/anthropic/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ export class LobeAnthropicAI implements LobeRuntimeAI {
const user_messages = messages.filter((m) => m.role !== 'system');

const requestParams: Anthropic.MessageCreateParams = {
max_tokens: max_tokens || 1024,
max_tokens: max_tokens || 4096,
messages: this.buildAnthropicMessages(user_messages),
model: model,
stream: true,
Expand Down

0 comments on commit 222fae3

Please sign in to comment.