From 4a5afb97a2b05b1a0a451ab15be895e15896a458 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 11 Oct 2024 10:25:35 -0400 Subject: [PATCH] fix: add info on running with remote ollama I think running with a remote ollama instance will be a relatively common use case. Having this little bit of extra info would have saved me time and void me having to hunt through the code and other examples to figure out how to use my remote instance. Signed-off-by: Michael Dawson --- examples/llms/providers/ollama.ts | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/examples/llms/providers/ollama.ts b/examples/llms/providers/ollama.ts index 1d2fef77..cdbbd941 100644 --- a/examples/llms/providers/ollama.ts +++ b/examples/llms/providers/ollama.ts @@ -1,6 +1,7 @@ import { OllamaLLM } from "bee-agent-framework/adapters/ollama/llm"; import { OllamaChatLLM } from "bee-agent-framework/adapters/ollama/chat"; import { BaseMessage } from "bee-agent-framework/llms/primitives/message"; +import { Ollama } from "ollama"; { console.info("===RAW==="); @@ -40,3 +41,24 @@ import { BaseMessage } from "bee-agent-framework/llms/primitives/message"; ]); console.info(response.finalResult); } + +{ + console.info("===REMOTE OLLAMA==="); + const llm = new OllamaChatLLM({ + modelId: "llama3.1", + client: new Ollama({ + // use the IP for the server you have ollama running on + host: "http://10.1.2.38:11434", + }), + }); + + console.info("Meta", await llm.meta()); + + const response = await llm.generate([ + BaseMessage.of({ + role: "user", + text: "Hello world!", + }), + ]); + console.info(response.finalResult); +}