From ef09392f6510d4307332e7d8e9ecf77a4abf119a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexandre=20Bult=C3=A9?= Date: Thu, 18 Jan 2024 10:03:04 +0100 Subject: [PATCH] Allow api key to be set via model.key This makes the behavior consistent with the openai's one documented in https://llm.datasette.io/en/stable/python-api.html#basic-prompt-execution --- llm_mistral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llm_mistral.py b/llm_mistral.py index f66b6db..c5ff86c 100644 --- a/llm_mistral.py +++ b/llm_mistral.py @@ -81,7 +81,7 @@ def build_messages(self, prompt, conversation): return messages def execute(self, prompt, stream, response, conversation): - key = llm.get_key("", "mistral", "LLM_MISTRAL_KEY") + key = llm.get_key("", "mistral", "LLM_MISTRAL_KEY") or getattr(self, "key", None) messages = self.build_messages(prompt, conversation) response._prompt_json = {"messages": messages} body = {