From 62bb754d1d326120a4635f8a75a2d25574ace576 Mon Sep 17 00:00:00 2001 From: Qi Date: Sat, 17 Jun 2023 13:55:43 -0700 Subject: [PATCH] [api] return response in a streaming way (#136) * stream the result back * add debug hack * increase the stream interval * every 5 letters * refactor * update system message * clean up * Revert "add debug hack" This reverts commit e7c0dd73c9e5091df62986bc8a6f7d83151d474f. --- skyagi-web/src/lib/utils.ts | 35 ++++++++++++++++++- .../api/send-conversation-message/+server.ts | 12 +++---- .../routes/api/send-system-message/+server.ts | 15 +++++--- 3 files changed, 51 insertions(+), 11 deletions(-) diff --git a/skyagi-web/src/lib/utils.ts b/skyagi-web/src/lib/utils.ts index b57a02b1..c230b683 100644 --- a/skyagi-web/src/lib/utils.ts +++ b/skyagi-web/src/lib/utils.ts @@ -4,4 +4,37 @@ export const checkValidity = (data: any) => { } else { return true; } -}; \ No newline at end of file +}; + +export function sleep(ms: number): void { + const start = Date.now(); + while (Date.now() - start < ms) {} +} + +export async function getResponseStream(metadata: object, respMsg: string) { + const encoder = new TextEncoder(); + const stream = new ReadableStream({ + start(controller) { + for (const [key, value] of Object.entries(metadata)) { + const metaDataMsg = JSON.stringify({[key]: value}); + controller.enqueue(JSON.stringify(metaDataMsg)); + sleep(1000); // Sleep for 1 second + } + + let totalLen = respMsg.length; + let chunkSize = 5, start = 0, end = chunkSize; + const interval = setInterval(() => { + const encodedData = encoder.encode(respMsg.slice(start, end)); + controller.enqueue(encodedData); + start = end; + end = end + chunkSize >= totalLen ? totalLen : end + chunkSize; + if (start >= end) { + clearInterval(interval); + controller.close(); + } + }, 1000); + }, + }); + + return stream; +} \ No newline at end of file diff --git a/skyagi-web/src/routes/api/send-conversation-message/+server.ts b/skyagi-web/src/routes/api/send-conversation-message/+server.ts index fdfd999b..cf61cfc3 100644 --- a/skyagi-web/src/routes/api/send-conversation-message/+server.ts +++ b/skyagi-web/src/routes/api/send-conversation-message/+server.ts @@ -1,6 +1,7 @@ import type { RequestHandler } from './$types'; import type { Config } from '@sveltejs/adapter-vercel'; import { GenerativeAgent } from '$lib/agent'; +import { getResponseStream } from '$lib/utils'; // Can switch to the edge func if serverless is not necessary export const config: Config = { @@ -56,12 +57,11 @@ export const PUT = (async ({ request, locals }: { request: Request; locals: App. await agent.addMemory(`${agent.name} observed ${newMessage} and said ${respMsg}`); // return - const resp = { + const respMetaData = { 'success': 1, - 'resp_msg': { - 'if_continue': ifContinue, - 'message': respMsg - } + 'if_continue': ifContinue } - return new Response(JSON.stringify(resp), { status: 200 }); + + const stream = await getResponseStream(respMetaData, respMsg); + return new Response(stream); }) satisfies RequestHandler; diff --git a/skyagi-web/src/routes/api/send-system-message/+server.ts b/skyagi-web/src/routes/api/send-system-message/+server.ts index 9b484f63..c8e34f58 100644 --- a/skyagi-web/src/routes/api/send-system-message/+server.ts +++ b/skyagi-web/src/routes/api/send-system-message/+server.ts @@ -1,6 +1,6 @@ import type { RequestHandler } from './$types'; import type { Config } from '@sveltejs/adapter-vercel'; -import { checkValidity } from '$lib/utils'; +import { checkValidity, getResponseStream } from '$lib/utils'; import { AIMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate, SystemMessagePromptTemplate } from "langchain/prompts" import { LLMChain } from "langchain/chains"; import { load_llm_from_config } from '$lib/model/model'; @@ -89,7 +89,7 @@ export const PUT = (async ({ request, locals }: { request: Request; locals: App. let msgResp = respChainRes.text.trim(); if (msgResp.includes('NOTHING')) { - return new Response(JSON.stringify({ 'success': 1, 'resp_msg': { 'is_valid': false, 'message': '' } }), { status: 200 }); + return new Response(JSON.stringify({ 'success': 1, 'is_valid': false, 'message': '' }), { status: 200 }); } chatMessages.push(AIMessagePromptTemplate.fromTemplate(msgResp)); @@ -111,10 +111,17 @@ export const PUT = (async ({ request, locals }: { request: Request; locals: App. const validationResp = validationChainRes.text.trim(); if (validationResp.includes('no')) { - return new Response(JSON.stringify({ 'success': 1, 'resp_msg': { 'is_valid': false, 'message': '' } }), { status: 200 }); + return new Response(JSON.stringify({ 'success': 1, 'is_valid': false, 'message': '' }), { status: 200 }); } msgResp = msgResp.slice(msgResp.startsWith('"') ? 1 : 0, msgResp.endsWith('"') ? -1 : undefined); - return new Response(JSON.stringify({ 'success': 1, 'resp_msg': { 'is_valid': true, 'message': msgResp } }), { status: 200 }); + const respMetaData = { + 'success': 1, + 'is_valid': true + } + + const stream = await getResponseStream(respMetaData, msgResp); + return new Response(stream); + }) satisfies RequestHandler;