From a8fa111a88dbcdae304525d7a4ca6f8ed70bf928 Mon Sep 17 00:00:00 2001 From: MICHAEL DESMOND Date: Thu, 12 Dec 2024 15:38:02 -0500 Subject: [PATCH] fix: resolve latest feedback Signed-off-by: MICHAEL DESMOND --- examples/agents/granite/README.md | 32 +++++++++------------ examples/agents/granite/granite_bee.ts | 20 ++++--------- examples/agents/granite/granite_wiki_bee.ts | 24 ++++++---------- src/agents/bee/runners/default/runner.ts | 2 +- src/agents/bee/runners/granite/runner.ts | 19 ++++++------ 5 files changed, 38 insertions(+), 59 deletions(-) diff --git a/examples/agents/granite/README.md b/examples/agents/granite/README.md index ce76999b..dda7a069 100644 --- a/examples/agents/granite/README.md +++ b/examples/agents/granite/README.md @@ -52,24 +52,22 @@ The [granite_bee](/examples/agents/granite/granite_bee.ts) example agent is set OLLAMA_HOST={http://0.0.0.0:11434} ``` -1. Run the [granite_bee](/examples/agents/granite/granite_bee.ts) example: +1. Run the [granite_bee](/examples/agents/granite/granite_bee.ts) agent: ```shell - yarn run start examples/agents/granite/granite_bee.ts + yarn run start examples/agents/granite/granite_bee.ts <<< "What is the current weather in London?" ``` - This will show the various stages of the agent running and ultimately deliver an answer similar to the following: + This will show the various stages of the agent running (reasoning, tool calling etc.) and ultimately deliver an answer similar to the following: > Agent 🤖 : The current weather in London is 13.3°C with no rain. The maximum temperature for today is expected to be 13.5°C. - The default prompt is the question `What is the current weather in London?` - - You can provide other prompts as follows: + You can try out other prompts as follows: ```shell - yarn run start examples/agents/granite/granite_bee.ts <<< 'Who is the president of the USA?' - yarn run start examples/agents/granite/granite_bee.ts <<< 'What is the spanish word for dog?' - yarn run start examples/agents/granite/granite_bee.ts <<< 'What does the alias command do in the BASH shell?' + yarn run start examples/agents/granite/granite_bee.ts <<< "Who is the current CEO of IBM?" + yarn run start examples/agents/granite/granite_bee.ts <<< "What is the spanish and french word for dog?" + yarn run start examples/agents/granite/granite_bee.ts <<< "What does the alias command do in the BASH shell?" yarn run start examples/agents/granite/granite_bee.ts <<< "What’s the largest technology company by market capitalization right now?" yarn run start examples/agents/granite/granite_bee.ts <<< "What’s the weather going to be like tomorrow in Sydney?" ``` @@ -94,24 +92,22 @@ ollama pull nomic-embed-text ollama serve ``` -Run the [granite_wiki_bee](/examples/agents/granite/granite_wiki_bee.ts) example: +Run the [granite_wiki_bee](/examples/agents/granite/granite_wiki_bee.ts) agent: ```shell -yarn run start examples/agents/granite/granite_wiki_bee.ts +yarn run start examples/agents/granite/granite_wiki_bee.ts <<< "Who were the authors of the paper 'Attention is all you need' and how many citations does it have?" ``` -You will see the agent running a react pattern and producing a final answer similar to the following: +You will see the agent reasoning, calling the WikipediaTool and producing a final answer similar to the following: > Agent 🤖 : The authors of the paper 'Attention is all you need' are Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan Gomez, Lukasz Kaiser, and Illia Polosukhin. The paper has been cited more than 100,000 times as of 2024. -The default prompt is the question `Who were the authors of the paper 'Attention is all you need' and how many citations does it have?` - -You can provide other prompts as follows: +You can try out other prompts in the following manner: ```shell -yarn run start examples/agents/granite/granite_wiki_bee.ts <<< 'When was the collapse of the Roman Empire?' -yarn run start examples/agents/granite/granite_wiki_bee.ts <<< 'What is the Great Barrier Reef?' -yarn run start examples/agents/granite/granite_wiki_bee.ts <<< 'Where is IBM headquartered?' +yarn run start examples/agents/granite/granite_wiki_bee.ts <<< "When was the collapse of the Roman Empire?" +yarn run start examples/agents/granite/granite_wiki_bee.ts <<< "What is the Great Barrier Reef?" +yarn run start examples/agents/granite/granite_wiki_bee.ts <<< "Where is IBM headquartered?" ``` > [!NOTE] diff --git a/examples/agents/granite/granite_bee.ts b/examples/agents/granite/granite_bee.ts index 112dfb1e..ac370482 100644 --- a/examples/agents/granite/granite_bee.ts +++ b/examples/agents/granite/granite_bee.ts @@ -15,8 +15,7 @@ import { Ollama } from "ollama"; import OpenAI from "openai"; import { z } from "zod"; import * as process from "node:process"; -import fs from "node:fs"; -import pc from "picocolors"; +import { createConsoleReader } from "examples/helpers/io.js"; const Providers = { WATSONX: "watsonx", @@ -75,13 +74,6 @@ function getChatLLM(provider?: Provider): ChatLLM { return factory(); } -function getPrompt(fallback: string) { - if (process.stdin.isTTY) { - return fallback; - } - return fs.readFileSync(process.stdin.fd).toString().trim() || fallback; -} - const llm = getChatLLM(); const agent = new BeeAgent({ llm, @@ -89,10 +81,10 @@ const agent = new BeeAgent({ tools: [new OpenMeteoTool(), new DuckDuckGoSearchTool({ maxResults: 3 })], }); -try { - const prompt = getPrompt(`What is the current weather in London?`); - console.log(pc.blue(`User 👤:`), prompt); +const reader = createConsoleReader(); +try { + const prompt = await reader.prompt(); const response = await agent .run( { prompt }, @@ -106,10 +98,10 @@ try { ) .observe((emitter) => { emitter.on("update", (data) => { - console.log(pc.gray(`Agent 🤖 (${data.update.key}): ${data.update.value.trim()}`)); + reader.write(`Agent (${data.update.key}) 🤖 : `, data.update.value.trim()); }); }); - console.log(pc.red(`Agent 🤖:`), response.result.text); + reader.write(`Agent 🤖: `, response.result.text); } catch (error) { console.error(FrameworkError.ensure(error).dump()); } finally { diff --git a/examples/agents/granite/granite_wiki_bee.ts b/examples/agents/granite/granite_wiki_bee.ts index b593799f..41267b9a 100644 --- a/examples/agents/granite/granite_wiki_bee.ts +++ b/examples/agents/granite/granite_wiki_bee.ts @@ -4,15 +4,13 @@ import { FrameworkError } from "bee-agent-framework/errors"; import { TokenMemory } from "bee-agent-framework/memory/tokenMemory"; import { OllamaChatLLM } from "bee-agent-framework/adapters/ollama/chat"; import { z } from "zod"; -import * as process from "node:process"; import { OllamaLLM } from "bee-agent-framework/adapters/ollama/llm"; import { SimilarityTool } from "bee-agent-framework/tools/similarity"; import { cosineSimilarityMatrix } from "bee-agent-framework/internals/helpers/math"; import { WikipediaTool } from "bee-agent-framework/tools/search/wikipedia"; import { splitString } from "bee-agent-framework/internals/helpers/string"; import { AnyTool } from "bee-agent-framework/tools/base"; -import pc from "picocolors"; -import fs from "node:fs"; +import { createConsoleReader } from "examples/helpers/io.js"; // Creates a wikipedia tool that supports information retrieval function wikipediaRetrivalTool(passageSize: number, overlap: number, maxResults: number): AnyTool { @@ -76,13 +74,6 @@ function wikipediaRetrivalTool(passageSize: number, overlap: number, maxResults: })); } -function getPrompt(fallback: string) { - if (process.stdin.isTTY) { - return fallback; - } - return fs.readFileSync(process.stdin.fd).toString().trim() || fallback; -} - // Agent LLM const llm = new OllamaChatLLM({ modelId: "granite3-dense:8b", @@ -99,11 +90,10 @@ const agent = new BeeAgent({ tools: [wikipediaRetrivalTool(200, 50, 3)], }); +const reader = createConsoleReader(); + try { - const prompt = getPrompt( - `Who were the authors of the paper 'Attention is all you need' and how many citations does it have?`, - ); - console.log(pc.blue(`User 👤:`), prompt); + const prompt = await reader.prompt(); const response = await agent .run( { prompt }, @@ -117,10 +107,12 @@ try { ) .observe((emitter) => { emitter.on("update", (data) => { - console.log(pc.gray(`Agent 🤖 (${data.update.key}): ${data.update.value.trim()}`)); + reader.write(`Agent (${data.update.key}) 🤖 : `, data.update.value.trim()); }); }); - console.log(pc.red(`Agent 🤖:`), response.result.text); + reader.write(`Agent 🤖: `, response.result.text); } catch (error) { console.error(FrameworkError.ensure(error).dump()); +} finally { + reader.close(); } diff --git a/src/agents/bee/runners/default/runner.ts b/src/agents/bee/runners/default/runner.ts index 10896149..0fb89975 100644 --- a/src/agents/bee/runners/default/runner.ts +++ b/src/agents/bee/runners/default/runner.ts @@ -295,7 +295,7 @@ export class DefaultRunner extends BaseRunner { text: this.templates.system.render({ tools: await self.system.variables.tools(), instructions: undefined, - createdAt: undefined, + createdAt: new Date().toISOString(), }), meta: { createdAt: new Date(), diff --git a/src/agents/bee/runners/granite/runner.ts b/src/agents/bee/runners/granite/runner.ts index 09f6f70c..80022298 100644 --- a/src/agents/bee/runners/granite/runner.ts +++ b/src/agents/bee/runners/granite/runner.ts @@ -72,16 +72,15 @@ export class GraniteRunner extends DefaultRunner { await memory.add( BaseMessage.of({ role: "available_tools", - text: await (async () => - JSON.stringify( - (await this.renderers.system.variables.tools()).map((tool) => ({ - name: tool.name, - description: tool.description, - schema: JSON.parse(tool.schema), - })), - null, - 4, - ))(), + text: JSON.stringify( + (await this.renderers.system.variables.tools()).map((tool) => ({ + name: tool.name, + description: tool.description, + schema: JSON.parse(tool.schema), + })), + null, + 4, + ), }), index, );