Skip to content

Commit

Permalink
fix(groq-adapter): update token size, example usage, and dependencies
Browse files Browse the repository at this point in the history
- Correct token size
- Update example usage to use the generate function
- Update dependencies
  • Loading branch information
Mahmoud Abughali committed Sep 12, 2024
1 parent 9e936bc commit 6f2cd70
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 4 deletions.
4 changes: 2 additions & 2 deletions examples/llms/providers/groq.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ const llm = new GroqChatLLM({
});

console.info("Meta", await llm.meta());
const response = await llm.stream([
const response = await llm.generate([
BaseMessage.of({
role: "user",
text: "Hello world!",
}),
]);
console.info(response);
console.info(response.getTextContent());
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@
"dirty-json": "0.9.2",
"duck-duck-scrape": "^2.2.5",
"fast-xml-parser": "^4.4.1",
"groq-sdk": "^0.7.0",
"header-generator": "^2.1.54",
"joplin-turndown-plugin-gfm": "^1.0.12",
"mathjs": "^13.1.1",
Expand All @@ -111,6 +110,7 @@
"@langchain/community": "~0.2.28",
"@langchain/core": "~0.2.27",
"@langchain/langgraph": "~0.0.34",
"groq-sdk": "^0.7.0",
"ollama": "^0.5.8",
"openai": "^4.56.0",
"openai-chat-tokens": "^0.2.8"
Expand Down Expand Up @@ -140,6 +140,7 @@
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-unused-imports": "^4.1.3",
"glob": "^11.0.0",
"groq-sdk": "^0.7.0",
"husky": "^9.1.5",
"langchain": "~0.2.16",
"lint-staged": "^15.2.9",
Expand Down
2 changes: 1 addition & 1 deletion src/adapters/groq/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ export class GroqChatLLM extends ChatLLM<ChatGroqOutput> {
} else if (this.modelId.includes("llava-v1.5")) {
return { tokenLimit: 4 * 1024 };
} else if (this.modelId.includes("llama-3.1-70b") || this.modelId.includes("llama-3.1-8b")) {
return { tokenLimit: 131 * 1024 };
return { tokenLimit: 128 * 1024 };
} else if (this.modelId.includes("mixtral-8x7b")) {
return { tokenLimit: 32 * 1024 };
}
Expand Down
1 change: 1 addition & 0 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2590,6 +2590,7 @@ __metadata:
"@langchain/community": ~0.2.28
"@langchain/core": ~0.2.27
"@langchain/langgraph": ~0.0.34
groq-sdk: ^0.7.0
ollama: ^0.5.8
openai: ^4.56.0
openai-chat-tokens: ^0.2.8
Expand Down

0 comments on commit 6f2cd70

Please sign in to comment.