From a180fc37a1896ca8d2c0983ee4f9c920a106b6ce Mon Sep 17 00:00:00 2001 From: Shareef P Date: Fri, 1 Dec 2023 19:36:39 +0530 Subject: [PATCH] onToken in llama_cpp llm --- langchain/src/llms/llama_cpp.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/langchain/src/llms/llama_cpp.ts b/langchain/src/llms/llama_cpp.ts index 5081bde04008..f2d6518ffd2b 100644 --- a/langchain/src/llms/llama_cpp.ts +++ b/langchain/src/llms/llama_cpp.ts @@ -72,10 +72,11 @@ export class LlamaCpp extends LLM { /** @ignore */ async _call( prompt: string, - _options?: this["ParsedCallOptions"] + options?: this["ParsedCallOptions"] ): Promise { try { const promptOptions = { + onToken: options?.onToken, maxTokens: this?.maxTokens, temperature: this?.temperature, topK: this?.topK,