From 5b6840967d363f70678922eb124a0456b610b621 Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Tue, 4 Jun 2024 16:36:05 -0400 Subject: [PATCH] chore: rename Client to GPTScript This change also defaults to the SDK server selecting a random available port. --- README.md | 68 +++++++++++++------------- src/gptscript.ts | 105 ++++++++++++++++++++++------------------ tests/gptscript.test.ts | 74 ++++++++++++++-------------- 3 files changed, 129 insertions(+), 118 deletions(-) diff --git a/README.md b/README.md index c2ebd49..9b3508c 100644 --- a/README.md +++ b/README.md @@ -29,12 +29,12 @@ npm exec -c "gptscript https://get.gptscript.ai/echo.gpt --input 'Hello, World!' You will see "Hello, World!" in the output of the command. -## Client +## GPTScript -The client allows the caller to run gptscript files, tools, and other operations (see below). There are currently no -options for this singleton client, so `new gptscript.Client()` is all you need. Although, the intention is that a -single client is all you need for the life of your application, you should call `close()` on the client when you are -done. +The GPTScript instance allows the caller to run gptscript files, tools, and other operations (see below). There are +currently no options for this class, so `new gptscript.GPTScript()` is all you need. Although, the intention is that a +single instance is all you need for the life of your application, you should call `close()` on the instance when you +are done. ## Options @@ -64,10 +64,10 @@ Lists all the available built-in tools. const gptscript = require('@gptscript-ai/gptscript'); async function listTools() { - const client = new gptscript.Client(); - const tools = await client.listTools(); + const g = new gptscript.GPTScript(); + const tools = await g.listTools(); console.log(tools); - client.close() + g.close(); } ``` @@ -82,13 +82,13 @@ const gptscript = require('@gptscript-ai/gptscript'); async function listModels() { let models = []; - const client = new gptscript.Client(); + const g = new gptscript.GPTScript(); try { - models = await client.listModels(); + models = await g.listModels(); } catch (error) { console.error(error); } - client.close() + g.close(); } ``` @@ -102,13 +102,13 @@ Get the first of the current `gptscript` binary being used for the calls. const gptscript = require('@gptscript-ai/gptscript'); async function version() { - const client = new gptscript.Client(); + const g = new gptscript.GPTScript(); try { - console.log(await client.version()); + console.log(await g.version()); } catch (error) { console.error(error); } - client.close() + g.close(); } ``` @@ -124,14 +124,14 @@ const t = { instructions: "Who was the president of the united states in 1928?" }; -const client = new gptscript.Client(); +const g = new gptscript.GPTScript(); try { - const run = await client.evaluate(t); + const run = await g.evaluate(t); console.log(await run.text()); } catch (error) { console.error(error); } -client.close(); +g.close(); ``` ### run @@ -147,14 +147,14 @@ const opts = { }; async function execFile() { - const client = new gptscript.Client(); + const g = new gptscript.GPTScript(); try { - const run = await client.run('./hello.gpt', opts); + const run = await g.run('./hello.gpt', opts); console.log(await run.text()); } catch (e) { console.error(e); } - client.close(); + g.close(); } ``` @@ -175,9 +175,9 @@ const opts = { }; async function streamExecFileWithEvents() { - const client = new gptscript.Client(); + const g = new gptscript.GPTScript(); try { - const run = await client.run('./test.gpt', opts); + const run = await g.run('./test.gpt', opts); run.on(gptscript.RunEventType.Event, data => { console.log(`event: ${JSON.stringify(data)}`); @@ -187,7 +187,7 @@ async function streamExecFileWithEvents() { } catch (e) { console.error(e); } - client.close(); + g.close(); } ``` @@ -206,15 +206,15 @@ const opts = { }; async function streamExecFileWithEvents() { - const client = new gptscript.Client(); + const g = new gptscript.GPTScript(); try { - const run = await client.run('./test.gpt', opts); + const run = await g.run('./test.gpt', opts); run.on(gptscript.RunEventType.CallConfirm, async (data: gptscript.CallFrame) => { // data.Tool has the information for the command being run. // data.Input has the input for this command - await client.confirm({ + await g.confirm({ id: data.id, accept: true, // false if the command should not be run message: "", // Explain the denial (ignored if accept is true) @@ -225,7 +225,7 @@ async function streamExecFileWithEvents() { } catch (e) { console.error(e); } - client.close(); + g.close(); } ``` @@ -245,14 +245,14 @@ const opts = { }; async function streamExecFileWithEvents() { - const client = new gptscript.Client(); + const g = new gptscript.GPTScript(); try { - const run = await client.run('./test.gpt', opts); + const run = await g.run('./test.gpt', opts); run.on(gptscript.RunEventType.Prompt, async (data: gptscript.PromptFrame) => { // data will have the information for what the gptscript is prompting. - await client.promptResponse({ + await g.promptResponse({ id: data.id, // response is a map of fields to values responses: {[data.fields[0]]: "Some Value"} @@ -263,7 +263,7 @@ async function streamExecFileWithEvents() { } catch (e) { console.error(e); } - client.close(); + g.close(); } ``` @@ -292,8 +292,8 @@ const t = { }; async function streamExecFileWithEvents() { - const client = new gptscript.Client(); - let run = await client.evaluate(t, opts); + const g = new gptscript.GPTScript(); + let run = await g.evaluate(t, opts); try { // Wait for the initial run to complete. await run.text(); @@ -312,7 +312,7 @@ async function streamExecFileWithEvents() { console.error(e); } - client.close(); + g.close(); // The state here should either be RunState.Finished (on success) or RunState.Error (on error). console.log(run.state) diff --git a/src/gptscript.ts b/src/gptscript.ts index f4a261b..33d4a37 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -2,6 +2,7 @@ import http from "http" import path from "path" import child_process from "child_process" import {fileURLToPath} from "url" +import net from "net" export interface RunOpts { input?: string @@ -31,35 +32,45 @@ export enum RunEventType { Prompt = "prompt" } -let serverProcess: child_process.ChildProcess -let clientCount: number = 0 +export class GPTScript { + private static serverURL: string = "" + private static serverProcess: child_process.ChildProcess + private static instanceCount: number = 0 -export class Client { - private readonly gptscriptURL: string - private clientReady: boolean - constructor() { - this.clientReady = false - this.gptscriptURL = "http://" + (process.env.GPTSCRIPT_URL || "127.0.0.1:9090") - clientCount++ - if (clientCount === 1 && process.env.GPTSCRIPT_DISABLE_SERVER !== "true") { - serverProcess = child_process.spawn(getCmdPath(), ["--listen-address", this.gptscriptURL.replace("http://", "").replace("https://", ""), "sdkserver"], { - env: process.env, - stdio: ["pipe"] - }) + private ready: boolean - process.on("exit", (code) => { - serverProcess.stdin?.end() - serverProcess.kill(code) - }) + constructor() { + this.ready = false + GPTScript.instanceCount++ + if (GPTScript.instanceCount === 1 && process.env.GPTSCRIPT_DISABLE_SERVER !== "true") { + GPTScript.serverURL = process.env.GPTSCRIPT_URL || "http://127.0.0.1:0" + const u = new URL(GPTScript.serverURL) + if (u.port === "0") { + const srv = net.createServer() + const s = srv.listen(0, () => { + GPTScript.serverURL = "http://" + u.hostname + ":" + String((s.address() as net.AddressInfo).port) + srv.close() + + GPTScript.serverProcess = child_process.spawn(getCmdPath(), ["--listen-address", GPTScript.serverURL.replace("http://", ""), "sdkserver"], { + env: process.env, + stdio: ["pipe"] + }) + + process.on("exit", (code) => { + GPTScript.serverProcess.stdin?.end() + GPTScript.serverProcess.kill(code) + }) + }) + } } } close(): void { - clientCount-- - if (clientCount === 0 && serverProcess) { - serverProcess.kill("SIGTERM") - serverProcess.stdin?.end() + GPTScript.instanceCount-- + if (GPTScript.instanceCount === 0 && GPTScript.serverProcess) { + GPTScript.serverProcess.kill("SIGTERM") + GPTScript.serverProcess.stdin?.end() } } @@ -76,10 +87,10 @@ export class Client { } async runBasicCommand(cmd: string): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } - const r = new RunSubcommand(cmd, "", "", {}, this.gptscriptURL) + const r = new RunSubcommand(cmd, "", "", {}, GPTScript.serverURL) r.requestNoStream(null) return r.text() } @@ -92,10 +103,10 @@ export class Client { * @return {Run} The Run object representing the running tool. */ async run(toolName: string, opts: RunOpts = {}): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } - return (new Run("run", toolName, "", opts, this.gptscriptURL)).nextChat(opts.input) + return (new Run("run", toolName, "", opts, GPTScript.serverURL)).nextChat(opts.input) } /** @@ -106,8 +117,8 @@ export class Client { * @return {Run} The Run object representing the evaluation. */ async evaluate(tool: ToolDef | ToolDef[] | string, opts: RunOpts = {}): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } let toolString: string = "" @@ -119,30 +130,30 @@ export class Client { toolString = toolDefToString(tool) } - return (new Run("evaluate", "", toolString, opts, this.gptscriptURL)).nextChat(opts.input) + return (new Run("evaluate", "", toolString, opts, GPTScript.serverURL)).nextChat(opts.input) } async parse(fileName: string): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } - const r: Run = new RunSubcommand("parse", fileName, "", {}, this.gptscriptURL) + const r: Run = new RunSubcommand("parse", fileName, "", {}, GPTScript.serverURL) r.request({file: fileName}) return parseBlocksFromNodes((await r.json()).nodes) } async parseTool(toolContent: string): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } - const r: Run = new RunSubcommand("parse", "", toolContent, {}, this.gptscriptURL) + const r: Run = new RunSubcommand("parse", "", toolContent, {}, GPTScript.serverURL) r.request({content: toolContent}) return parseBlocksFromNodes((await r.json()).nodes) } async stringify(blocks: Block[]): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } const nodes: any[] = [] @@ -162,16 +173,16 @@ export class Client { } } - const r: Run = new RunSubcommand("fmt", "", JSON.stringify({nodes: nodes}), {}, this.gptscriptURL) + const r: Run = new RunSubcommand("fmt", "", JSON.stringify({nodes: nodes}), {}, GPTScript.serverURL) r.request({nodes: nodes}) return r.text() } async confirm(response: AuthResponse): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } - const resp = await fetch(`${this.gptscriptURL}/confirm/${response.id}`, { + const resp = await fetch(`${GPTScript.serverURL}/confirm/${response.id}`, { method: "POST", body: JSON.stringify(response) }) @@ -182,10 +193,10 @@ export class Client { } async promptResponse(response: PromptResponse): Promise { - if (!this.clientReady) { - this.clientReady = await this.testGPTScriptURL(20) + if (!this.ready) { + this.ready = await this.testGPTScriptURL(20) } - const resp = await fetch(`${this.gptscriptURL}/prompt-response/${response.id}`, { + const resp = await fetch(`${GPTScript.serverURL}/prompt-response/${response.id}`, { method: "POST", body: JSON.stringify(response.responses) }) @@ -197,7 +208,7 @@ export class Client { private async testGPTScriptURL(count: number): Promise { try { - await fetch(`${this.gptscriptURL}/healthz`) + await fetch(`${GPTScript.serverURL}/healthz`) return true } catch { if (count === 0) { diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index ff35093..b166381 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -2,7 +2,7 @@ import * as gptscript from "../src/gptscript" import path from "path" import {fileURLToPath} from "url" -let client: gptscript.Client +let g: gptscript.GPTScript const __dirname = path.dirname(fileURLToPath(import.meta.url)) describe("gptscript module", () => { @@ -11,32 +11,32 @@ describe("gptscript module", () => { throw new Error("neither OPENAI_API_KEY nor GPTSCRIPT_URL is set") } - client = new gptscript.Client() + g = new gptscript.GPTScript() }) afterAll(() => { - client.close() + g.close() }) - test("creating an closing another client should work", async () => { - const other = new gptscript.Client() + test("creating an closing another instance should work", async () => { + const other = new gptscript.GPTScript() await other.version() other.close() }) test("listTools returns available tools", async () => { - const tools = await client.listTools() + const tools = await g.listTools() expect(tools).toBeDefined() }) test("listModels returns a list of models", async () => { // Similar structure to listTools - let models = await client.listModels() + let models = await g.listModels() expect(models).toBeDefined() }) test("version returns a gptscript version", async () => { // Similar structure to listTools - let version = await client.version() + let version = await g.version() expect(version).toContain("gptscript version") }) @@ -45,7 +45,7 @@ describe("gptscript module", () => { instructions: "who was the president of the united states in 1928?" } - const run = await client.evaluate(t as any) + const run = await g.evaluate(t as any) expect(run).toBeDefined() expect(await run.text()).toContain("Calvin Coolidge") }) @@ -60,7 +60,7 @@ describe("gptscript module", () => { disableCache: true, } - const run = await client.evaluate(t as any, opts) + const run = await g.evaluate(t as any, opts) run.on(gptscript.RunEventType.CallProgress, (data: gptscript.CallFrame) => { for (let output of data.output) out += `system: ${output.content}` }) @@ -80,7 +80,7 @@ describe("gptscript module", () => { context: [path.join(__dirname, "fixtures", "acorn-labs-context.gpt")] } - const run = await client.evaluate(t as any, {disableCache: true}) + const run = await g.evaluate(t as any, {disableCache: true}) out = await run.text() err = run.err @@ -91,7 +91,7 @@ describe("gptscript module", () => { test("should execute test.gpt correctly", async () => { const testGptPath = path.join(__dirname, "fixtures", "test.gpt") - const result = await (await client.run(testGptPath)).text() + const result = await (await g.run(testGptPath)).text() expect(result).toBeDefined() expect(result).toContain("Calvin Coolidge") }) @@ -104,7 +104,7 @@ describe("gptscript module", () => { disableCache: true, } - const run = await client.run(testGptPath, opts) + const run = await g.run(testGptPath, opts) run.on(gptscript.RunEventType.CallProgress, data => { for (let output of data.output) out += `system: ${output.content}` }) @@ -123,7 +123,7 @@ describe("gptscript module", () => { disableCache: true, } - const run = await client.run(testGptPath, opts) + const run = await g.run(testGptPath, opts) run.on(gptscript.RunEventType.CallProgress, data => { for (let output of data.output) out += `system: ${output.content}` }) @@ -143,7 +143,7 @@ describe("gptscript module", () => { } try { - const run = await client.run(testGptPath, opts) + const run = await g.run(testGptPath, opts) run.on(gptscript.RunEventType.CallProgress, data => { run.close() }) @@ -173,7 +173,7 @@ describe("gptscript module", () => { instructions: "${question}" } - const response = await (await client.evaluate([t0 as any, t1 as any])).text() + const response = await (await g.evaluate([t0 as any, t1 as any])).text() expect(response).toBeDefined() expect(response).toContain("Calvin Coolidge") }, 30000) @@ -197,14 +197,14 @@ describe("gptscript module", () => { instructions: "${question}" } as any - const response = await (await client.evaluate([t0, t1, t2], {subTool: "other"})).text() + const response = await (await g.evaluate([t0, t1, t2], {subTool: "other"})).text() expect(response).toBeDefined() expect(response).toContain("Ronald Reagan") }, 30000) }) test("parse file", async () => { - const response = await client.parse(path.join(__dirname, "fixtures", "test.gpt")) + const response = await g.parse(path.join(__dirname, "fixtures", "test.gpt")) expect(response).toBeDefined() expect(response).toHaveLength(1) expect((response[0] as gptscript.Tool).instructions).toEqual("who was the president in 1928?") @@ -212,7 +212,7 @@ describe("gptscript module", () => { test("parse string tool", async () => { const tool = "How much wood would a woodchuck chuck if a woodchuck could chuck wood?" - const response = await client.parseTool(tool) + const response = await g.parseTool(tool) expect(response).toBeDefined() expect(response).toHaveLength(1) expect((response[0] as gptscript.Tool).instructions).toEqual(tool) @@ -220,7 +220,7 @@ describe("gptscript module", () => { test("parse string tool with text node", async () => { const tool = "How much wood would a woodchuck chuck if a woodchuck could chuck wood?\n---\n!markdown\nThis is a text node" - const response = await client.parseTool(tool) + const response = await g.parseTool(tool) expect(response).toBeDefined() expect(response).toHaveLength(2) expect((response[0] as gptscript.Tool).instructions).toEqual("How much wood would a woodchuck chuck if a woodchuck could chuck wood?") @@ -229,7 +229,7 @@ describe("gptscript module", () => { test("parse string tool global tools", async () => { const tool = "Global Tools: acorn, do-work\nHow much wood would a woodchuck chuck if a woodchuck could chuck wood?" - const response = await client.parseTool(tool) + const response = await g.parseTool(tool) expect(response).toBeDefined() expect(response).toHaveLength(1) expect((response[0] as gptscript.Tool).instructions).toEqual("How much wood would a woodchuck chuck if a woodchuck could chuck wood?") @@ -238,7 +238,7 @@ describe("gptscript module", () => { test("parse string tool first line shebang", async () => { const tool = "\n#!/usr/bin/env python\nHow much wood would a woodchuck chuck if a woodchuck could chuck wood?" - const response = await client.parseTool(tool) + const response = await g.parseTool(tool) expect(response).toBeDefined() expect(response).toHaveLength(1) expect((response[0] as gptscript.Tool).instructions).toEqual("#!/usr/bin/env python\nHow much wood would a woodchuck chuck if a woodchuck could chuck wood?") @@ -260,7 +260,7 @@ describe("gptscript module", () => { } } - const response = await client.stringify([tool as any]) + const response = await g.stringify([tool as any]) expect(response).toBeDefined() expect(response).toContain("Tools: sys.write, sys.read") expect(response).toContain("This is a test") @@ -277,7 +277,7 @@ describe("gptscript module", () => { const opts = { disableCache: true, } - let run = await client.evaluate(t as any, opts) + let run = await g.evaluate(t as any, opts) const inputs = [ "List the three largest states in the United States by area.", @@ -316,7 +316,7 @@ describe("gptscript module", () => { const opts = { disableCache: true } - let run = await client.run(path.join(__dirname, "fixtures", "chat.gpt"), opts) + let run = await g.run(path.join(__dirname, "fixtures", "chat.gpt"), opts) const inputs = [ "List the 3 largest of the Great Lakes by volume.", @@ -351,14 +351,14 @@ describe("gptscript module", () => { }, 60000) test("nextChat on file providing chat state", async () => { - let run = await client.run(path.join(__dirname, "fixtures", "chat.gpt"), {disableCache: true}) + let run = await g.run(path.join(__dirname, "fixtures", "chat.gpt"), {disableCache: true}) run = run.nextChat("List the 3 largest of the Great Lakes by volume.") expect(await run.text()).toContain("Lake Superior") expect(run.err).toEqual("") expect(run.state).toEqual(gptscript.RunState.Continue) - run = await client.run(path.join(__dirname, "fixtures", "chat.gpt"), { + run = await g.run(path.join(__dirname, "fixtures", "chat.gpt"), { disableCache: true, input: "What is the total area of the third one in square miles?", chatState: run.currentChatState() @@ -375,14 +375,14 @@ describe("gptscript module", () => { instructions: "You are a chat bot. Don't finish the conversation until I say 'bye'.", tools: ["sys.chat.finish"] } - let run = await client.evaluate(t as any, {disableCache: true}) + let run = await g.evaluate(t as any, {disableCache: true}) run = run.nextChat("List the three largest states in the United States by area.") expect(await run.text()).toContain("California") expect(run.err).toEqual("") expect(run.state).toEqual(gptscript.RunState.Continue) - run = await client.evaluate(t as any, { + run = await g.evaluate(t as any, { disableCache: true, input: "What is the capital of the second one?", chatState: run.currentChatState() @@ -399,11 +399,11 @@ describe("gptscript module", () => { instructions: "List the files in the current working directory.", tools: ["sys.exec"] } - const run = await client.evaluate(t as any, {confirm: true}) + const run = await g.evaluate(t as any, {confirm: true}) run.on(gptscript.RunEventType.CallConfirm, async (data: gptscript.CallFrame) => { expect(data.input).toContain(`"ls"`) confirmFound = true - await client.confirm({id: data.id, accept: true}) + await g.confirm({id: data.id, accept: true}) }) expect(await run.text()).toContain("README.md") @@ -417,11 +417,11 @@ describe("gptscript module", () => { instructions: "List the files in the current working directory.", tools: ["sys.exec"] } - const run = await client.evaluate(t as any, {confirm: true}) + const run = await g.evaluate(t as any, {confirm: true}) run.on(gptscript.RunEventType.CallConfirm, async (data: gptscript.CallFrame) => { expect(data.input).toContain(`"ls"`) confirmFound = true - await client.confirm({id: data.id, accept: false, message: "I will not allow it!"}) + await g.confirm({id: data.id, accept: false, message: "I will not allow it!"}) }) expect(await run.text()).toContain("authorization error") @@ -435,7 +435,7 @@ describe("gptscript module", () => { instructions: "Use the sys.prompt user to ask the user for 'first name' which is not sensitive. After you get their first name, say hello.", tools: ["sys.prompt"] } - const run = await client.evaluate(t as any, {prompt: true}) + const run = await g.evaluate(t as any, {prompt: true}) run.on(gptscript.RunEventType.Prompt, async (data: gptscript.PromptFrame) => { expect(data.message).toContain("first name") expect(data.fields.length).toEqual(1) @@ -443,7 +443,7 @@ describe("gptscript module", () => { expect(data.sensitive).toBeFalsy() promptFound = true - await client.promptResponse({id: data.id, responses: {[data.fields[0]]: "Clicky"}}) + await g.promptResponse({id: data.id, responses: {[data.fields[0]]: "Clicky"}}) }) expect(await run.text()).toContain("Clicky") @@ -457,7 +457,7 @@ describe("gptscript module", () => { instructions: "Use the sys.prompt user to ask the user for 'first name' which is not sensitive. After you get their first name, say hello.", tools: ["sys.prompt"] } - const run = await client.evaluate(t as any) + const run = await g.evaluate(t as any) run.on(gptscript.RunEventType.Prompt, async (data: gptscript.PromptFrame) => { promptFound = true }) @@ -485,7 +485,7 @@ describe("gptscript module", () => { instructions: `${shebang}\nexit \${EXIT_CODE}` } as gptscript.ToolDef - let run = await client.evaluate([t, contextTool], {disableCache: true, env: ["EXIT_CODE=1"]}) + let run = await g.evaluate([t, contextTool], {disableCache: true, env: ["EXIT_CODE=1"]}) try { await run.text() } catch {