From 32b4efa1edaa52f377a50aca47f5020d25af3de6 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Mon, 27 Nov 2023 10:10:02 -0800 Subject: [PATCH 1/7] Add more tests for agent streaming --- langchain/src/agents/tests/agent.int.test.ts | 52 +++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index c600768cecc3..6ebdfb7f6cc6 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -306,7 +306,7 @@ test("Run tool web-browser", async () => { ]; const executor = await initializeAgentExecutorWithOptions(tools, model, { - agentType: "zero-shot-react-description", + agentType: "chat-conversational-react-description", returnIntermediateSteps: true, }); console.log("Loaded agent."); @@ -377,3 +377,53 @@ test("Agent can stream", async () => { // the last tool used should be the web-browser expect(toolsUsed?.[toolsUsed.length - 1]).toEqual("web-browser"); }); + +test.only("Agent can stream with chat messages", async () => { + const model = new ChatOpenAI({ + temperature: 0, + modelName: "gpt-4-1106-preview", + streaming: true, + }); + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + new WebBrowser({ model, embeddings: new OpenAIEmbeddings() }), + ]; + + const executor = await initializeAgentExecutorWithOptions(tools, model, { + agentType: "chat-conversational-react-description", + returnIntermediateSteps: true, + }); + console.log("Loaded agent."); + + const input = `What is the word of the day on merriam webster`; + console.log(`Executing with input "${input}"...`); + + const result = await executor.stream({ input, chat_history: [] }); + let streamIters = 0; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let finalResponse: any; + for await (const item of result) { + streamIters += 1; + console.log("Stream item:", item); + // each stream contains the previous steps, + // so we can overwrite on each stream. + finalResponse = item; + } + + console.log("__finalResponse__", finalResponse); + + expect("intermediateSteps" in finalResponse).toBeTruthy(); + expect("output" in finalResponse).toBeTruthy(); + + expect(streamIters).toBeGreaterThan(1); + const toolsUsed: Array = finalResponse.intermediateSteps.map( + (step: AgentStep) => step.action.tool + ); + // the last tool used should be the web-browser + expect(toolsUsed?.[toolsUsed.length - 1]).toEqual("web-browser"); +}); From e39d79e0c45c0f0ab43c5443d01cd141aa84c83d Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Mon, 27 Nov 2023 10:12:57 -0800 Subject: [PATCH 2/7] add memory --- langchain/src/agents/tests/agent.int.test.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index 6ebdfb7f6cc6..2fcc88844437 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -13,6 +13,8 @@ import { ChatOpenAI } from "../../chat_models/openai.js"; import { RunnableSequence } from "../../schema/runnable/base.js"; import { OutputParserException } from "../../schema/output_parser.js"; import { AIMessage, AgentStep } from "../../schema/index.js"; +import { BufferMemory } from "../../memory/buffer_memory.js"; +import { ChatMessageHistory } from "../../memory/index.js"; test("Run agent from hub", async () => { const model = new OpenAI({ temperature: 0, modelName: "text-babbage-001" }); @@ -393,10 +395,18 @@ test.only("Agent can stream with chat messages", async () => { new Calculator(), new WebBrowser({ model, embeddings: new OpenAIEmbeddings() }), ]; + const memory = new BufferMemory({ + chatHistory: new ChatMessageHistory([]), + memoryKey: 'chat_history', // this is the key expected by https://github.com/langchain-ai/langchainjs/blob/a13a8969345b0f149c1ca4a120d63508b06c52a5/langchain/src/agents/initialize.ts#L166 + inputKey: 'input', + outputKey: 'output', + returnMessages: true, + }); const executor = await initializeAgentExecutorWithOptions(tools, model, { agentType: "chat-conversational-react-description", returnIntermediateSteps: true, + memory, }); console.log("Loaded agent."); From 8292b9fea1e70b9e3f20a718f85d8b12853caec0 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Mon, 27 Nov 2023 10:58:39 -0800 Subject: [PATCH 3/7] unfocus test --- langchain/src/agents/tests/agent.int.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index 2fcc88844437..b1c0e469d15c 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -380,7 +380,7 @@ test("Agent can stream", async () => { expect(toolsUsed?.[toolsUsed.length - 1]).toEqual("web-browser"); }); -test.only("Agent can stream with chat messages", async () => { +test("Agent can stream with chat messages", async () => { const model = new ChatOpenAI({ temperature: 0, modelName: "gpt-4-1106-preview", From 7655a8ae12c6708c81d3f6e2fe822a4ca39c98d4 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Mon, 27 Nov 2023 11:00:12 -0800 Subject: [PATCH 4/7] chore: lint files --- langchain/src/agents/tests/agent.int.test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index b1c0e469d15c..eaeada1022a3 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -397,9 +397,9 @@ test("Agent can stream with chat messages", async () => { ]; const memory = new BufferMemory({ chatHistory: new ChatMessageHistory([]), - memoryKey: 'chat_history', // this is the key expected by https://github.com/langchain-ai/langchainjs/blob/a13a8969345b0f149c1ca4a120d63508b06c52a5/langchain/src/agents/initialize.ts#L166 - inputKey: 'input', - outputKey: 'output', + memoryKey: "chat_history", // this is the key expected by https://github.com/langchain-ai/langchainjs/blob/a13a8969345b0f149c1ca4a120d63508b06c52a5/langchain/src/agents/initialize.ts#L166 + inputKey: "input", + outputKey: "output", returnMessages: true, }); From c0517249b9ffb6f30bbf1e21dfb3a70ed9f769a2 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Mon, 27 Nov 2023 12:51:32 -0800 Subject: [PATCH 5/7] cr --- langchain/src/agents/tests/agent.int.test.ts | 48 ++++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index eaeada1022a3..a37c0ef01bd3 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -337,18 +337,13 @@ test("Agent can stream", async () => { streaming: true, }); const tools = [ - new SerpAPI(process.env.SERPAPI_API_KEY, { - location: "Austin,Texas,United States", - hl: "en", - gl: "us", - }), new Calculator(), new WebBrowser({ model, embeddings: new OpenAIEmbeddings() }), ]; const executor = await initializeAgentExecutorWithOptions(tools, model, { agentType: "zero-shot-react-description", - returnIntermediateSteps: true, + returnIntermediateSteps: false, }); console.log("Loaded agent."); @@ -358,22 +353,29 @@ test("Agent can stream", async () => { const result = await executor.stream({ input }); let streamIters = 0; // eslint-disable-next-line @typescript-eslint/no-explicit-any - let finalResponse: any; + const finalResponse: any = []; for await (const item of result) { streamIters += 1; console.log("Stream item:", item); - // each stream contains the previous steps, - // so we can overwrite on each stream. - finalResponse = item; + // each stream does NOT contain the previous steps, + // because returnIntermediateSteps is false so we + // push each new stream item to the array. + finalResponse.push(item); } - console.log("__finalResponse__", finalResponse); + // The last item should contain "output" + expect("output" in finalResponse[finalResponse.length - 1]).toBeTruthy(); - expect("intermediateSteps" in finalResponse).toBeTruthy(); - expect("output" in finalResponse).toBeTruthy(); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const intermediateSteps = finalResponse.flatMap((item: any) => { + if ("intermediateSteps" in item) { + return item.intermediateSteps; + } + return []; + }) expect(streamIters).toBeGreaterThan(1); - const toolsUsed: Array = finalResponse.intermediateSteps.map( + const toolsUsed: Array = intermediateSteps.map( (step: AgentStep) => step.action.tool ); // the last tool used should be the web-browser @@ -387,17 +389,12 @@ test("Agent can stream with chat messages", async () => { streaming: true, }); const tools = [ - new SerpAPI(process.env.SERPAPI_API_KEY, { - location: "Austin,Texas,United States", - hl: "en", - gl: "us", - }), new Calculator(), new WebBrowser({ model, embeddings: new OpenAIEmbeddings() }), ]; const memory = new BufferMemory({ chatHistory: new ChatMessageHistory([]), - memoryKey: "chat_history", // this is the key expected by https://github.com/langchain-ai/langchainjs/blob/a13a8969345b0f149c1ca4a120d63508b06c52a5/langchain/src/agents/initialize.ts#L166 + memoryKey: "chat_history", inputKey: "input", outputKey: "output", returnMessages: true, @@ -410,7 +407,7 @@ test("Agent can stream with chat messages", async () => { }); console.log("Loaded agent."); - const input = `What is the word of the day on merriam webster`; + const input = `What is the word of the day on merriam webster, and what is the sum of all letter indices (relative to the english alphabet) in the word?`; console.log(`Executing with input "${input}"...`); const result = await executor.stream({ input, chat_history: [] }); @@ -420,7 +417,8 @@ test("Agent can stream with chat messages", async () => { for await (const item of result) { streamIters += 1; console.log("Stream item:", item); - // each stream contains the previous steps, + // each stream contains the previous steps + // because returnIntermediateSteps is true), // so we can overwrite on each stream. finalResponse = item; } @@ -434,6 +432,8 @@ test("Agent can stream with chat messages", async () => { const toolsUsed: Array = finalResponse.intermediateSteps.map( (step: AgentStep) => step.action.tool ); - // the last tool used should be the web-browser - expect(toolsUsed?.[toolsUsed.length - 1]).toEqual("web-browser"); + // the first tool used should be web-browser, and last should be calculator. + // This can be flaky so if the test is failing, inspect these conditions first. + expect(toolsUsed?.[toolsUsed.length - 1]).toEqual("calculator"); + expect(toolsUsed?.[0]).toEqual("web-browser"); }); From c4e97096a86437ccfa1570e72a4dbbea54e9a8a4 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Mon, 27 Nov 2023 13:02:10 -0800 Subject: [PATCH 6/7] chore: lint files --- langchain/src/agents/tests/agent.int.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index a37c0ef01bd3..ad501539ccf2 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -372,7 +372,7 @@ test("Agent can stream", async () => { return item.intermediateSteps; } return []; - }) + }); expect(streamIters).toBeGreaterThan(1); const toolsUsed: Array = intermediateSteps.map( From 05f304c34dfc7d77eb967871b87a873dfc14066a Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Mon, 27 Nov 2023 15:56:20 -0800 Subject: [PATCH 7/7] Update langchain/src/agents/tests/agent.int.test.ts --- langchain/src/agents/tests/agent.int.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index ad501539ccf2..5b37737f0649 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -308,7 +308,7 @@ test("Run tool web-browser", async () => { ]; const executor = await initializeAgentExecutorWithOptions(tools, model, { - agentType: "chat-conversational-react-description", + agentType: "zero-shot-react-description", returnIntermediateSteps: true, }); console.log("Loaded agent.");