From 99ace8fc7b96de7811c80f22158910d5f5d2b295 Mon Sep 17 00:00:00 2001 From: Matous Havlena Date: Fri, 13 Sep 2024 22:31:33 +0200 Subject: [PATCH] fix(llm): llama3.1 add trailing new line (#22) --- src/adapters/shared/llmChatTemplates.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/adapters/shared/llmChatTemplates.ts b/src/adapters/shared/llmChatTemplates.ts index 0c60d510..d3ca4d59 100644 --- a/src/adapters/shared/llmChatTemplates.ts +++ b/src/adapters/shared/llmChatTemplates.ts @@ -70,6 +70,7 @@ const llama31: LLMChatTemplate = { {{assistant}}<|eot_id|>{{/assistant}}{{#ipython}}<|start_header_id|>ipython<|end_header_id|> {{ipython}}<|eot_id|>{{/ipython}}{{/messages}}<|start_header_id|>assistant<|end_header_id|> + `, }), messagesToPrompt: messagesToPromptFactory({ ipython: "ipython" }),