From 2f41711ffb4e272b2bf5c338ab3d27bce6e35917 Mon Sep 17 00:00:00 2001 From: jeremyphilemon Date: Fri, 7 Jun 2024 00:44:12 +0300 Subject: [PATCH 1/5] Add next pages examples to folder --- examples/next-openai-pages/.env.local.example | 3 +- .../app/api/assistant-tools/route.ts | 71 +++++++++ .../app/api/assistant/route.ts | 35 +++++ .../app/api/call-tool/route.ts | 35 +++++ .../app/api/call-tools-in-parallel/route.ts | 41 ++++++ .../app/api/chat-app-route/route.ts | 19 --- .../app/api/generate-chat/route.ts | 14 ++ .../app/api/generate-object/route.ts | 24 ++++ .../app/api/generate-text/route.ts | 14 ++ .../app/api/generative-ui-route/route.ts | 50 +++++++ .../app/api/stream-chat/route.ts | 14 ++ .../app/api/stream-text/route.ts | 14 ++ examples/next-openai-pages/package.json | 7 +- examples/next-openai-pages/pages/_app.tsx | 9 +- .../next-openai-pages/pages/_document.tsx | 2 +- .../next-openai-pages/pages/api/chat-edge.ts | 15 +- examples/next-openai-pages/pages/api/chat.ts | 25 ---- .../pages/api/use-chat-tools-ui.ts | 53 ------- .../index.tsx | 30 ++++ .../stream-assistant-response/index.tsx | 30 ++++ .../pages/basics/generate-object/index.tsx | 38 +++++ .../pages/basics/generate-text/index.tsx | 33 +++++ .../pages/basics/stream-text/index.tsx | 24 ++++ .../pages/chat-app-route.tsx | 29 ---- .../next-openai-pages/pages/chat-edge.tsx | 29 ---- .../pages/chat/edge-runtime/index.tsx | 28 ++++ .../pages/chat/generate-chat/index.tsx | 63 ++++++++ .../pages/chat/stream-chat/index.tsx | 28 ++++ .../route-components/index.tsx | 134 +++++++++++++++++ examples/next-openai-pages/pages/index.tsx | 128 ++++++++++++++--- .../pages/tools/call-tool/index.tsx | 37 +++++ .../tools/call-tools-in-parallel/index.tsx | 39 +++++ .../pages/use-chat-tools-ui.tsx | 136 ------------------ .../{pages => styles}/globals.css | 0 pnpm-lock.yaml | 130 +++++++++++++++-- 35 files changed, 1035 insertions(+), 346 deletions(-) create mode 100644 examples/next-openai-pages/app/api/assistant-tools/route.ts create mode 100644 examples/next-openai-pages/app/api/assistant/route.ts create mode 100644 examples/next-openai-pages/app/api/call-tool/route.ts create mode 100644 examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts delete mode 100644 examples/next-openai-pages/app/api/chat-app-route/route.ts create mode 100644 examples/next-openai-pages/app/api/generate-chat/route.ts create mode 100644 examples/next-openai-pages/app/api/generate-object/route.ts create mode 100644 examples/next-openai-pages/app/api/generate-text/route.ts create mode 100644 examples/next-openai-pages/app/api/generative-ui-route/route.ts create mode 100644 examples/next-openai-pages/app/api/stream-chat/route.ts create mode 100644 examples/next-openai-pages/app/api/stream-text/route.ts delete mode 100644 examples/next-openai-pages/pages/api/chat.ts delete mode 100644 examples/next-openai-pages/pages/api/use-chat-tools-ui.ts create mode 100644 examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx create mode 100644 examples/next-openai-pages/pages/assistants/stream-assistant-response/index.tsx create mode 100644 examples/next-openai-pages/pages/basics/generate-object/index.tsx create mode 100644 examples/next-openai-pages/pages/basics/generate-text/index.tsx create mode 100644 examples/next-openai-pages/pages/basics/stream-text/index.tsx delete mode 100644 examples/next-openai-pages/pages/chat-app-route.tsx delete mode 100644 examples/next-openai-pages/pages/chat-edge.tsx create mode 100644 examples/next-openai-pages/pages/chat/edge-runtime/index.tsx create mode 100644 examples/next-openai-pages/pages/chat/generate-chat/index.tsx create mode 100644 examples/next-openai-pages/pages/chat/stream-chat/index.tsx create mode 100644 examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx create mode 100644 examples/next-openai-pages/pages/tools/call-tool/index.tsx create mode 100644 examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx delete mode 100644 examples/next-openai-pages/pages/use-chat-tools-ui.tsx rename examples/next-openai-pages/{pages => styles}/globals.css (100%) diff --git a/examples/next-openai-pages/.env.local.example b/examples/next-openai-pages/.env.local.example index 8198c84d5c38..6b4834b21161 100644 --- a/examples/next-openai-pages/.env.local.example +++ b/examples/next-openai-pages/.env.local.example @@ -1 +1,2 @@ -OPENAI_API_KEY=xxxxxxx \ No newline at end of file +OPENAI_API_KEY=xxxxxxx +ASSISTANT_ID=xxxxxxx \ No newline at end of file diff --git a/examples/next-openai-pages/app/api/assistant-tools/route.ts b/examples/next-openai-pages/app/api/assistant-tools/route.ts new file mode 100644 index 000000000000..170c97602d3f --- /dev/null +++ b/examples/next-openai-pages/app/api/assistant-tools/route.ts @@ -0,0 +1,71 @@ +import { AssistantResponse } from 'ai'; +import OpenAI from 'openai'; + +const openai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY || '', +}); + +export async function POST(req: Request) { + const input: { + threadId: string | null; + message: string; + } = await req.json(); + + const threadId = input.threadId ?? (await openai.beta.threads.create({})).id; + + const createdMessage = await openai.beta.threads.messages.create(threadId, { + role: 'user', + content: input.message, + }); + + return AssistantResponse( + { threadId, messageId: createdMessage.id }, + async ({ forwardStream }) => { + const runStream = openai.beta.threads.runs.stream(threadId, { + assistant_id: + process.env.ASSISTANT_ID ?? + (() => { + throw new Error('ASSISTANT_ID is not set'); + })(), + }); + + let runResult = await forwardStream(runStream); + + while ( + runResult?.status === 'requires_action' && + runResult.required_action?.type === 'submit_tool_outputs' + ) { + const tool_outputs = + runResult.required_action.submit_tool_outputs.tool_calls.map( + (toolCall: any) => { + const parameters = JSON.parse(toolCall.function.arguments); + + switch (toolCall.function.name) { + case 'celsiusToFahrenheit': + const celsius = parseFloat(parameters.value); + const fahrenheit = celsius * (9 / 5) + 32; + + return { + tool_call_id: toolCall.id, + output: `${celsius}°C is ${fahrenheit.toFixed(2)}°F`, + }; + + default: + throw new Error( + `Unknown tool call function: ${toolCall.function.name}`, + ); + } + }, + ); + + runResult = await forwardStream( + openai.beta.threads.runs.submitToolOutputsStream( + threadId, + runResult.id, + { tool_outputs }, + ), + ); + } + }, + ); +} diff --git a/examples/next-openai-pages/app/api/assistant/route.ts b/examples/next-openai-pages/app/api/assistant/route.ts new file mode 100644 index 000000000000..747c8ac5ece5 --- /dev/null +++ b/examples/next-openai-pages/app/api/assistant/route.ts @@ -0,0 +1,35 @@ +import { AssistantResponse } from 'ai'; +import OpenAI from 'openai'; + +const openai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY || '', +}); + +export async function POST(req: Request) { + const input: { + threadId: string | null; + message: string; + } = await req.json(); + + const threadId = input.threadId ?? (await openai.beta.threads.create({})).id; + + const createdMessage = await openai.beta.threads.messages.create(threadId, { + role: 'user', + content: input.message, + }); + + return AssistantResponse( + { threadId, messageId: createdMessage.id }, + async ({ forwardStream }) => { + const runStream = openai.beta.threads.runs.stream(threadId, { + assistant_id: + process.env.ASSISTANT_ID ?? + (() => { + throw new Error('ASSISTANT_ID environment is not set'); + })(), + }); + + await forwardStream(runStream); + }, + ); +} diff --git a/examples/next-openai-pages/app/api/call-tool/route.ts b/examples/next-openai-pages/app/api/call-tool/route.ts new file mode 100644 index 000000000000..e244ce33791a --- /dev/null +++ b/examples/next-openai-pages/app/api/call-tool/route.ts @@ -0,0 +1,35 @@ +import { ToolInvocation, convertToCoreMessages, streamText } from "ai"; +import { openai } from "@ai-sdk/openai"; +import { z } from "zod"; + +interface Message { + role: "user" | "assistant"; + content: string; + toolInvocations?: ToolInvocation[]; +} + +export async function POST(req: Request) { + const { messages }: { messages: Message[] } = await req.json(); + + const result = await streamText({ + model: openai("gpt-4"), + system: "You are a helpful assistant.", + // @ts-ignore + messages: convertToCoreMessages(messages), + tools: { + celsiusToFahrenheit: { + description: "Converts celsius to fahrenheit", + parameters: z.object({ + value: z.string().describe("The value in celsius"), + }), + execute: async ({ value }) => { + const celsius = parseFloat(value); + const fahrenheit = celsius * (9 / 5) + 32; + return `${celsius}°C is ${fahrenheit.toFixed(2)}°F`; + }, + }, + }, + }); + + return result.toAIStreamResponse(); +} diff --git a/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts b/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts new file mode 100644 index 000000000000..c096791bcffc --- /dev/null +++ b/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts @@ -0,0 +1,41 @@ +import { ToolInvocation, convertToCoreMessages, streamText } from "ai"; +import { openai } from "@ai-sdk/openai"; +import { z } from "zod"; + +interface Message { + role: "user" | "assistant"; + content: string; + toolInvocations?: ToolInvocation[]; +} + +function getWeather({ city, unit }: { city: string; unit: string }) { + return { value: 25, description: "Sunny" }; +} + +export async function POST(req: Request) { + const { messages }: { messages: Message[] } = await req.json(); + + const result = await streamText({ + model: openai("gpt-4o"), + system: "You are a helpful assistant.", + // @ts-ignore + messages: convertToCoreMessages(messages), + tools: { + getWeather: { + description: "Get the weather for a location", + parameters: z.object({ + city: z.string().describe("The city to get the weather for"), + unit: z + .enum(["C", "F"]) + .describe("The unit to display the temperature in"), + }), + execute: async ({ city, unit }) => { + const weather = getWeather({ city, unit }); + return `It is currently 25°${weather.value}°${unit} and ${weather.description} in ${city}!`; + }, + }, + }, + }); + + return result.toAIStreamResponse(); +} diff --git a/examples/next-openai-pages/app/api/chat-app-route/route.ts b/examples/next-openai-pages/app/api/chat-app-route/route.ts deleted file mode 100644 index daf8bb1e5281..000000000000 --- a/examples/next-openai-pages/app/api/chat-app-route/route.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { openai } from '@ai-sdk/openai'; -import { streamText } from 'ai'; - -// Allow streaming responses up to 30 seconds -export const maxDuration = 30; - -export async function POST(req: Request) { - // Extract the `messages` from the body of the request - const { messages } = await req.json(); - - // Call the language model - const result = await streamText({ - model: openai('gpt-4-turbo'), - messages, - }); - - // Respond with the stream - return result.toAIStreamResponse(); -} diff --git a/examples/next-openai-pages/app/api/generate-chat/route.ts b/examples/next-openai-pages/app/api/generate-chat/route.ts new file mode 100644 index 000000000000..5500abd27490 --- /dev/null +++ b/examples/next-openai-pages/app/api/generate-chat/route.ts @@ -0,0 +1,14 @@ +import { CoreMessage, generateText } from 'ai'; +import { openai } from '@ai-sdk/openai'; + +export async function POST(req: Request) { + const { messages }: { messages: CoreMessage[] } = await req.json(); + + const { responseMessages } = await generateText({ + model: openai('gpt-4'), + system: 'You are a helpful assistant.', + messages, + }); + + return Response.json({ messages: responseMessages }); +} diff --git a/examples/next-openai-pages/app/api/generate-object/route.ts b/examples/next-openai-pages/app/api/generate-object/route.ts new file mode 100644 index 000000000000..d9c09dbc6466 --- /dev/null +++ b/examples/next-openai-pages/app/api/generate-object/route.ts @@ -0,0 +1,24 @@ +import { generateObject } from "ai"; +import { openai } from "@ai-sdk/openai"; +import { z } from "zod"; + +export async function POST(req: Request) { + const { prompt }: { prompt: string } = await req.json(); + + const { object } = await generateObject({ + model: openai("gpt-4"), + system: "You are a helpful assistant.", + prompt, + schema: z.object({ + notifications: z.array( + z.object({ + name: z.string().describe("Name of a fictional person."), + message: z.string().describe("Do not use emojis or links."), + minutesAgo: z.number(), + }) + ), + }), + }); + + return Response.json({ object }); +} diff --git a/examples/next-openai-pages/app/api/generate-text/route.ts b/examples/next-openai-pages/app/api/generate-text/route.ts new file mode 100644 index 000000000000..22fdba632807 --- /dev/null +++ b/examples/next-openai-pages/app/api/generate-text/route.ts @@ -0,0 +1,14 @@ +import { generateText } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export async function POST(req: Request) { + const { prompt }: { prompt: string } = await req.json(); + + const { text } = await generateText({ + model: openai("gpt-4"), + system: "You are a helpful assistant.", + prompt, + }); + + return Response.json({ text }); +} diff --git a/examples/next-openai-pages/app/api/generative-ui-route/route.ts b/examples/next-openai-pages/app/api/generative-ui-route/route.ts new file mode 100644 index 000000000000..9338a0547615 --- /dev/null +++ b/examples/next-openai-pages/app/api/generative-ui-route/route.ts @@ -0,0 +1,50 @@ +import { CoreMessage, convertToCoreMessages, streamText } from "ai"; +import { openai } from "@ai-sdk/openai"; +import { z } from "zod"; + +export async function POST(req: Request) { + const { messages }: { messages: CoreMessage[] } = await req.json(); + + const result = await streamText({ + model: openai("gpt-4"), + system: "You are a helpful assistant.", + // @ts-expect-error TODO: fix messages type + messages: convertToCoreMessages(messages), + tools: { + getWeatherInformation: { + description: "show the weather in a given city to the user", + parameters: z.object({ city: z.string() }), + execute: async ({}: { city: string }) => { + return { + value: 24, + unit: "celsius", + weeklyForecast: [ + { day: "Mon", value: 24 }, + { day: "Tue", value: 25 }, + { day: "Wed", value: 26 }, + { day: "Thu", value: 27 }, + { day: "Fri", value: 28 }, + { day: "Sat", value: 29 }, + { day: "Sun", value: 30 }, + ], + }; + }, + }, + // client-side tool that starts user interaction: + askForConfirmation: { + description: "Ask the user for confirmation.", + parameters: z.object({ + message: z.string().describe("The message to ask for confirmation."), + }), + }, + // client-side tool that is automatically executed on the client: + getLocation: { + description: + "Get the user location. Always ask for confirmation before using this tool.", + parameters: z.object({}), + }, + }, + }); + + return result.toAIStreamResponse(); +} diff --git a/examples/next-openai-pages/app/api/stream-chat/route.ts b/examples/next-openai-pages/app/api/stream-chat/route.ts new file mode 100644 index 000000000000..b1ec08e7cb3f --- /dev/null +++ b/examples/next-openai-pages/app/api/stream-chat/route.ts @@ -0,0 +1,14 @@ +import { CoreMessage, streamText } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export async function POST(req: Request) { + const { messages }: { messages: CoreMessage[] } = await req.json(); + + const result = await streamText({ + model: openai("gpt-4"), + system: "You are a helpful assistant.", + messages, + }); + + return result.toAIStreamResponse(); +} diff --git a/examples/next-openai-pages/app/api/stream-text/route.ts b/examples/next-openai-pages/app/api/stream-text/route.ts new file mode 100644 index 000000000000..d113ac190bcd --- /dev/null +++ b/examples/next-openai-pages/app/api/stream-text/route.ts @@ -0,0 +1,14 @@ +import { streamText } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export async function POST(req: Request) { + const { prompt }: { prompt: string } = await req.json(); + + const result = await streamText({ + model: openai("gpt-4"), + system: "You are a helpful assistant.", + prompt, + }); + + return result.toAIStreamResponse(); +} diff --git a/examples/next-openai-pages/package.json b/examples/next-openai-pages/package.json index 8dd42540dcce..0e2af747eec8 100644 --- a/examples/next-openai-pages/package.json +++ b/examples/next-openai-pages/package.json @@ -12,19 +12,20 @@ "@ai-sdk/openai": "latest", "ai": "latest", "next": "latest", + "openai": "^4.33.0", "react": "^18", "react-dom": "^18", "zod": "3.23.8" }, "devDependencies": { - "@types/node": "^17.0.12", - "@types/react": "^18", + "@types/node": "^20.12.7", + "@types/react": "^18.3.3", "@types/react-dom": "^18", "autoprefixer": "^10.4.14", "eslint": "^7.32.0", "eslint-config-next": "14.2.3", "postcss": "^8.4.23", "tailwindcss": "^3.3.2", - "typescript": "5.1.3" + "typescript": "5.4.5" } } diff --git a/examples/next-openai-pages/pages/_app.tsx b/examples/next-openai-pages/pages/_app.tsx index fd347395fa2b..a7a790fba51f 100644 --- a/examples/next-openai-pages/pages/_app.tsx +++ b/examples/next-openai-pages/pages/_app.tsx @@ -1,9 +1,6 @@ -import './globals.css'; -import type { AppProps } from 'next/app'; +import "@/styles/globals.css"; +import type { AppProps } from "next/app"; -export default function ExampleApp({ - Component, - pageProps, -}: AppProps): JSX.Element { +export default function App({ Component, pageProps }: AppProps) { return ; } diff --git a/examples/next-openai-pages/pages/_document.tsx b/examples/next-openai-pages/pages/_document.tsx index e1e9cbbb75aa..b2fff8b4262d 100644 --- a/examples/next-openai-pages/pages/_document.tsx +++ b/examples/next-openai-pages/pages/_document.tsx @@ -1,4 +1,4 @@ -import { Html, Head, Main, NextScript } from 'next/document'; +import { Html, Head, Main, NextScript } from "next/document"; export default function Document() { return ( diff --git a/examples/next-openai-pages/pages/api/chat-edge.ts b/examples/next-openai-pages/pages/api/chat-edge.ts index be7da866440c..e2d1e68c0442 100644 --- a/examples/next-openai-pages/pages/api/chat-edge.ts +++ b/examples/next-openai-pages/pages/api/chat-edge.ts @@ -1,22 +1,15 @@ -import { createOpenAI } from '@ai-sdk/openai'; -import { streamText } from 'ai'; +import { openai } from "@ai-sdk/openai"; +import { streamText } from "ai"; -export const runtime = 'edge'; - -// Create an OpenAI Provider instance -const openai = createOpenAI({ - apiKey: process.env.OPENAI_API_KEY ?? '', -}); +export const runtime = "edge"; export default async function handler(req: Request) { const { messages } = await req.json(); - // Ask OpenAI for a streaming chat completion given the prompt const result = await streamText({ - model: openai('gpt-4-turbo-preview'), + model: openai("gpt-4-turbo-preview"), messages, }); - // Edge environment: return the AI stream as a single response return result.toAIStreamResponse(); } diff --git a/examples/next-openai-pages/pages/api/chat.ts b/examples/next-openai-pages/pages/api/chat.ts deleted file mode 100644 index d86e0719cf39..000000000000 --- a/examples/next-openai-pages/pages/api/chat.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { createOpenAI } from '@ai-sdk/openai'; -import { streamText } from 'ai'; -import { NextApiRequest, NextApiResponse } from 'next'; - -// Create an OpenAI Provider instance -const openai = createOpenAI({ - apiKey: process.env.OPENAI_API_KEY ?? '', -}); - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse, -) { - const { messages } = await req.body; - - // Ask OpenAI for a streaming chat completion given the prompt - const result = await streamText({ - model: openai('gpt-4-turbo-preview'), - messages, - }); - - // write the AI stream to the response - // Note: this is sent as a single response, not a stream - result.pipeAIStreamToResponse(res); -} diff --git a/examples/next-openai-pages/pages/api/use-chat-tools-ui.ts b/examples/next-openai-pages/pages/api/use-chat-tools-ui.ts deleted file mode 100644 index 30f0d14d20d3..000000000000 --- a/examples/next-openai-pages/pages/api/use-chat-tools-ui.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { openai } from '@ai-sdk/openai'; -import { convertToCoreMessages, streamText } from 'ai'; -import { NextApiRequest, NextApiResponse } from 'next'; -import { z } from 'zod'; - -export default async function handler( - request: NextApiRequest, - response: NextApiResponse, -) { - const { messages } = await request.body; - - const result = await streamText({ - model: openai('gpt-4-turbo'), - messages: convertToCoreMessages(messages), - tools: { - // server-side tool with execute function: - getWeatherInformation: { - description: 'show the weather in a given city to the user', - parameters: z.object({ city: z.string() }), - execute: async ({}: { city: string }) => { - return { - value: 24, - unit: 'celsius', - weeklyForecast: [ - { day: 'Mon', value: 24 }, - { day: 'Tue', value: 25 }, - { day: 'Wed', value: 26 }, - { day: 'Thu', value: 27 }, - { day: 'Fri', value: 28 }, - { day: 'Sat', value: 29 }, - { day: 'Sun', value: 30 }, - ], - }; - }, - }, - // client-side tool that starts user interaction: - askForConfirmation: { - description: 'Ask the user for confirmation.', - parameters: z.object({ - message: z.string().describe('The message to ask for confirmation.'), - }), - }, - // client-side tool that is automatically executed on the client: - getLocation: { - description: - 'Get the user location. Always ask for confirmation before using this tool.', - parameters: z.object({}), - }, - }, - }); - - result.pipeAIStreamToResponse(response); -} diff --git a/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx b/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx new file mode 100644 index 000000000000..36d986cf8dea --- /dev/null +++ b/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx @@ -0,0 +1,30 @@ +import { Message, useAssistant } from "ai/react"; + +export default function Page() { + const { status, messages, input, submitMessage, handleInputChange } = + useAssistant({ api: "/api/assistant-tools" }); + + return ( +
+
status: {status}
+ +
+ {messages.map((message: Message) => ( +
+
{`${message.role}: `}
+
{message.content}
+
+ ))} +
+ +
+ +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/assistants/stream-assistant-response/index.tsx b/examples/next-openai-pages/pages/assistants/stream-assistant-response/index.tsx new file mode 100644 index 000000000000..b614f6c45f5f --- /dev/null +++ b/examples/next-openai-pages/pages/assistants/stream-assistant-response/index.tsx @@ -0,0 +1,30 @@ +import { Message, useAssistant } from "ai/react"; + +export default function Page() { + const { status, messages, input, submitMessage, handleInputChange } = + useAssistant({ api: "/api/assistant" }); + + return ( +
+
status: {status}
+ +
+ {messages.map((message: Message) => ( +
+
{`${message.role}: `}
+
{message.content}
+
+ ))} +
+ +
+ +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/basics/generate-object/index.tsx b/examples/next-openai-pages/pages/basics/generate-object/index.tsx new file mode 100644 index 000000000000..ce4dc9c81888 --- /dev/null +++ b/examples/next-openai-pages/pages/basics/generate-object/index.tsx @@ -0,0 +1,38 @@ +import { useState } from "react"; + +export default function Page() { + const [generation, setGeneration] = useState(""); + const [isLoading, setIsLoading] = useState(false); + + return ( +
+
{ + setIsLoading(true); + + await fetch("/api/generate-object", { + method: "POST", + body: JSON.stringify({ + prompt: "Messages during finals week.", + }), + }).then((response) => { + response.json().then((json) => { + console.log(json); + setGeneration(JSON.stringify(json.object, null, 2)); + setIsLoading(false); + }); + }); + }} + > + Generate +
+ + {isLoading ? ( + "Loading..." + ) : ( +
{generation}
+ )} +
+ ); +} diff --git a/examples/next-openai-pages/pages/basics/generate-text/index.tsx b/examples/next-openai-pages/pages/basics/generate-text/index.tsx new file mode 100644 index 000000000000..42de2a8b21de --- /dev/null +++ b/examples/next-openai-pages/pages/basics/generate-text/index.tsx @@ -0,0 +1,33 @@ +import { useState } from "react"; + +export default function Page() { + const [generation, setGeneration] = useState(""); + const [isLoading, setIsLoading] = useState(false); + + return ( +
+
{ + setIsLoading(true); + + await fetch("/api/generate-text", { + method: "POST", + body: JSON.stringify({ + prompt: "Why is the sky blue?", + }), + }).then((response) => { + response.json().then((json) => { + setGeneration(json.text); + setIsLoading(false); + }); + }); + }} + > + Generate +
+ + {isLoading ? "Loading..." : generation} +
+ ); +} diff --git a/examples/next-openai-pages/pages/basics/stream-text/index.tsx b/examples/next-openai-pages/pages/basics/stream-text/index.tsx new file mode 100644 index 000000000000..df4d33c87557 --- /dev/null +++ b/examples/next-openai-pages/pages/basics/stream-text/index.tsx @@ -0,0 +1,24 @@ +"use client"; + +import { useCompletion } from "ai/react"; + +export default function Page() { + const { completion, complete } = useCompletion({ + api: "/api/stream-text", + }); + + return ( +
+
{ + await complete("Why is the sky blue?"); + }} + > + Generate +
+ + {completion} +
+ ); +} diff --git a/examples/next-openai-pages/pages/chat-app-route.tsx b/examples/next-openai-pages/pages/chat-app-route.tsx deleted file mode 100644 index 4982dc86daea..000000000000 --- a/examples/next-openai-pages/pages/chat-app-route.tsx +++ /dev/null @@ -1,29 +0,0 @@ -'use client'; - -import { useChat } from 'ai/react'; - -export default function Chat() { - const { messages, input, handleInputChange, handleSubmit } = useChat({ - api: '/api/chat-app-route', - }); - - return ( -
- {messages.map(m => ( -
- {m.role === 'user' ? 'User: ' : 'AI: '} - {m.content} -
- ))} - -
- -
-
- ); -} diff --git a/examples/next-openai-pages/pages/chat-edge.tsx b/examples/next-openai-pages/pages/chat-edge.tsx deleted file mode 100644 index aaadf39bfd44..000000000000 --- a/examples/next-openai-pages/pages/chat-edge.tsx +++ /dev/null @@ -1,29 +0,0 @@ -'use client'; - -import { useChat } from 'ai/react'; - -export default function Chat() { - const { messages, input, handleInputChange, handleSubmit } = useChat({ - api: '/api/chat-edge', - }); - - return ( -
- {messages.map(m => ( -
- {m.role === 'user' ? 'User: ' : 'AI: '} - {m.content} -
- ))} - -
- -
-
- ); -} diff --git a/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx b/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx new file mode 100644 index 000000000000..557e95040aec --- /dev/null +++ b/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx @@ -0,0 +1,28 @@ +import { useChat } from "ai/react"; + +export default function Chat() { + const { messages, input, handleInputChange, handleSubmit } = useChat({ + api: "/api/chat-edge", + }); + + return ( +
+
+ {messages.map((message) => ( +
+
{`${message.role}: `}
+
{message.content}
+
+ ))} +
+ +
+ +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/chat/generate-chat/index.tsx b/examples/next-openai-pages/pages/chat/generate-chat/index.tsx new file mode 100644 index 000000000000..20650c719624 --- /dev/null +++ b/examples/next-openai-pages/pages/chat/generate-chat/index.tsx @@ -0,0 +1,63 @@ +import { CoreMessage } from "ai"; +import { useState } from "react"; + +export default function Page() { + const [input, setInput] = useState(""); + const [messages, setMessages] = useState([]); + + return ( +
+
+ {messages.map((message, index) => ( +
+
{`${message.role}: `}
+
+ {typeof message.content === "string" + ? message.content + : message.content + .filter((part) => part.type === "text") + .map((part, partIndex) => ( + // @ts-ignore +
{part.text}
+ ))} +
+
+ ))} +
+ +
+ { + setInput(event.target.value); + }} + className="bg-zinc-100 w-full p-2" + onKeyDown={async (event) => { + if (event.key === "Enter") { + setInput(""); + + setMessages((currentMessages) => [ + ...currentMessages, + { role: "user", content: input }, + ]); + + const response = await fetch("/api/generate-chat", { + method: "POST", + body: JSON.stringify({ + messages: [...messages, { role: "user", content: input }], + }), + }); + + const { messages: newMessages } = await response.json(); + + setMessages((currentMessages) => [ + ...currentMessages, + ...newMessages, + ]); + } + }} + /> +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/chat/stream-chat/index.tsx b/examples/next-openai-pages/pages/chat/stream-chat/index.tsx new file mode 100644 index 000000000000..c4d67672a25b --- /dev/null +++ b/examples/next-openai-pages/pages/chat/stream-chat/index.tsx @@ -0,0 +1,28 @@ +import { useChat } from "ai/react"; + +export default function Page() { + const { messages, input, handleSubmit, handleInputChange } = useChat({ + api: "/api/stream-chat", + }); + + return ( +
+
+ {messages.map((message) => ( +
+
{`${message.role}: `}
+
{message.content}
+
+ ))} +
+ +
+ +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx b/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx new file mode 100644 index 000000000000..1ca574008c54 --- /dev/null +++ b/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx @@ -0,0 +1,134 @@ +import { ToolInvocation } from "ai"; +import { useChat } from "ai/react"; + +export default function Page() { + const { messages, input, handleInputChange, handleSubmit, addToolResult } = + useChat({ + api: "/api/generative-ui-route", + maxToolRoundtrips: 5, + async onToolCall({ toolCall }) { + if (toolCall.toolName === "getLocation") { + const cities = [ + "New York", + "Los Angeles", + "Chicago", + "San Francisco", + ]; + + return cities[Math.floor(Math.random() * cities.length)]; + } + }, + }); + + const renderToolResult = (tool: ToolInvocation) => { + const toolCallId = tool.toolCallId; + + // render confirmation tool (client-side tool with user interaction) + if (tool.toolName === "askForConfirmation") { + return ( +
+ {tool.args.message} +
+ {"result" in tool ? ( +
{tool.result}
+ ) : ( + <> + + + + )} +
+
+ ); + } + + // other tools: + return "result" in tool ? ( + tool.toolName === "getWeatherInformation" ? ( +
+
+
+ {tool.result.value}°{tool.result.unit === "celsius" ? "C" : "F"} +
+ +
+
+
+ {tool.result.weeklyForecast.map((forecast: any) => ( +
+
{forecast.day}
+
{forecast.value}°
+
+ ))} +
+
+ ) : tool.toolName === "getLocation" ? ( +
+ User is in {tool.result}. +
+ ) : ( +
+ Tool call {`${tool.toolName}: `} + {tool.result} +
+ ) + ) : ( +
+ Calling {tool.toolName}... +
+ ); + }; + + return ( +
+
+ {messages.map((message) => ( +
+
{`${ + message.toolInvocations ? "tool" : message.role + }: `}
+
+ {message.toolInvocations + ? message.toolInvocations.map((tool) => renderToolResult(tool)) + : message.content} +
+
+ ))} +
+ +
+ +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/index.tsx b/examples/next-openai-pages/pages/index.tsx index b3b33a897267..e5667d402175 100644 --- a/examples/next-openai-pages/pages/index.tsx +++ b/examples/next-openai-pages/pages/index.tsx @@ -1,26 +1,118 @@ -'use client'; +import Image from "next/image"; +import { Inter } from "next/font/google"; -import { useChat } from 'ai/react'; +const inter = Inter({ subsets: ["latin"] }); -export default function Chat() { - const { messages, input, handleInputChange, handleSubmit } = useChat(); +export default function Home() { return ( -
- {messages.map(m => ( -
- {m.role === 'user' ? 'User: ' : 'AI: '} - {m.content} +
+
+

+ Get started by editing  + pages/index.tsx +

+ - ))} +
-
- + Next.js Logo -
-
+
+ +
+ +

+ Docs{" "} + + -> + +

+

+ Find in-depth information about Next.js features and API. +

+
+ + +

+ Learn{" "} + + -> + +

+

+ Learn about Next.js in an interactive course with quizzes! +

+
+ + +

+ Templates{" "} + + -> + +

+

+ Discover and deploy boilerplate example Next.js projects. +

+
+ + +

+ Deploy{" "} + + -> + +

+

+ Instantly deploy your Next.js site to a shareable URL with Vercel. +

+
+
+ ); } diff --git a/examples/next-openai-pages/pages/tools/call-tool/index.tsx b/examples/next-openai-pages/pages/tools/call-tool/index.tsx new file mode 100644 index 000000000000..55dc62c44d16 --- /dev/null +++ b/examples/next-openai-pages/pages/tools/call-tool/index.tsx @@ -0,0 +1,37 @@ +import { useChat } from "ai/react"; + +export default function Page() { + const { messages, input, handleInputChange, handleSubmit } = useChat({ + api: "/api/call-tool", + maxToolRoundtrips: 1, + }); + + return ( +
+
+ {messages.map((message) => ( +
+
{`${ + message.toolInvocations ? "tool" : message.role + }: `}
+
+ {message.toolInvocations + ? message.toolInvocations.map( + (tool) => `${tool.toolName}(${JSON.stringify(tool.args)})` + ) + : message.content} +
+
+ ))} +
+ +
+ +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx b/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx new file mode 100644 index 000000000000..967c2744fbca --- /dev/null +++ b/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx @@ -0,0 +1,39 @@ +import { useChat } from "ai/react"; + +export default function Page() { + const { messages, input, handleInputChange, handleSubmit } = useChat({ + api: "/api/call-tools-in-parallel", + maxToolRoundtrips: 1, + }); + + return ( +
+
+ {messages.map((message) => ( +
+
{`${ + message.toolInvocations ? "tool" : message.role + }: `}
+
+ {message.toolInvocations + ? message.toolInvocations.map((tool) => ( +
{`${ + tool.toolName + }(${JSON.stringify(tool.args)})`}
+ )) + : message.content} +
+
+ ))} +
+ +
+ +
+
+ ); +} diff --git a/examples/next-openai-pages/pages/use-chat-tools-ui.tsx b/examples/next-openai-pages/pages/use-chat-tools-ui.tsx deleted file mode 100644 index 67df288a2cb0..000000000000 --- a/examples/next-openai-pages/pages/use-chat-tools-ui.tsx +++ /dev/null @@ -1,136 +0,0 @@ -import { ToolInvocation } from 'ai'; -import { Message, useChat } from 'ai/react'; - -export default function Chat() { - const { messages, input, handleInputChange, handleSubmit, addToolResult } = - useChat({ - api: '/api/use-chat-tools-ui', - maxAutomaticRoundtrips: 5, - - // run client-side tools that are automatically executed: - async onToolCall({ toolCall }) { - if (toolCall.toolName === 'getLocation') { - const cities = [ - 'New York', - 'Los Angeles', - 'Chicago', - 'San Francisco', - ]; - return cities[Math.floor(Math.random() * cities.length)]; - } - }, - }); - - return ( -
- {messages?.map((m: Message) => ( -
- {`${m.role}: `} - {m.content} - {m.toolInvocations?.map((toolInvocation: ToolInvocation) => { - const toolCallId = toolInvocation.toolCallId; - - // render confirmation tool (client-side tool with user interaction) - if (toolInvocation.toolName === 'askForConfirmation') { - return ( -
- {toolInvocation.args.message} -
- {'result' in toolInvocation ? ( - {toolInvocation.result} - ) : ( - <> - - - - )} -
-
- ); - } - - // other tools: - return 'result' in toolInvocation ? ( - toolInvocation.toolName === 'getWeatherInformation' ? ( -
-
-
- {toolInvocation.result.value}° - {toolInvocation.result.unit === 'celsius' ? 'C' : 'F'} -
- -
-
-
- {toolInvocation.result.weeklyForecast.map( - (forecast: any) => ( -
-
{forecast.day}
-
{forecast.value}°
-
- ), - )} -
-
- ) : toolInvocation.toolName === 'getLocation' ? ( -
- User is in {toolInvocation.result}. -
- ) : ( -
- Tool call {`${toolInvocation.toolName}: `} - {toolInvocation.result} -
- ) - ) : ( -
- Calling {toolInvocation.toolName}... -
- ); - })} -
- ))} - -
- -
-
- ); -} diff --git a/examples/next-openai-pages/pages/globals.css b/examples/next-openai-pages/styles/globals.css similarity index 100% rename from examples/next-openai-pages/pages/globals.css rename to examples/next-openai-pages/styles/globals.css diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 436522c02259..6fc2aa123edd 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -551,6 +551,9 @@ importers: next: specifier: latest version: 14.2.3(react-dom@18.2.0)(react@18.2.0) + openai: + specifier: ^4.33.0 + version: 4.47.1 react: specifier: ^18 version: 18.2.0 @@ -562,11 +565,11 @@ importers: version: 3.23.8 devDependencies: '@types/node': - specifier: ^17.0.12 - version: 17.0.45 + specifier: ^20.12.7 + version: 20.12.7 '@types/react': - specifier: ^18 - version: 18.2.8 + specifier: ^18.3.3 + version: 18.3.3 '@types/react-dom': specifier: ^18 version: 18.2.4 @@ -578,7 +581,7 @@ importers: version: 7.32.0 eslint-config-next: specifier: 14.2.3 - version: 14.2.3(eslint@7.32.0)(typescript@5.1.3) + version: 14.2.3(eslint@7.32.0)(typescript@5.4.5) postcss: specifier: ^8.4.23 version: 8.4.31 @@ -586,8 +589,8 @@ importers: specifier: ^3.3.2 version: 3.3.5 typescript: - specifier: 5.1.3 - version: 5.1.3 + specifier: 5.4.5 + version: 5.4.5 examples/next-openai-rate-limits: dependencies: @@ -1209,7 +1212,7 @@ importers: dependencies: eslint-config-next: specifier: ^14.2.3 - version: 14.2.3(eslint@7.32.0)(typescript@5.1.3) + version: 14.2.3(eslint@7.32.0)(typescript@5.4.5) eslint-config-prettier: specifier: ^8.3.0 version: 8.10.0(eslint@7.32.0) @@ -7028,7 +7031,7 @@ packages: /@types/http-proxy@1.17.14: resolution: {integrity: sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==} dependencies: - '@types/node': 20.11.20 + '@types/node': 20.14.2 dev: true /@types/is-ci@3.0.4: @@ -7058,7 +7061,7 @@ packages: /@types/node-fetch@2.6.9: resolution: {integrity: sha512-bQVlnMLFJ2d35DkPNjEPmd9ueO/rh5EiaZt2bhqiSarPjZIuIV6bPQVqcrEyvNo+AfTrRGVazle1tl597w3gfA==} dependencies: - '@types/node': 20.11.20 + '@types/node': 20.14.2 form-data: 4.0.0 /@types/node@12.20.55: @@ -7077,6 +7080,18 @@ packages: resolution: {integrity: sha512-7/rR21OS+fq8IyHTgtLkDK949uzsa6n8BkziAKtPVpugIkO6D+/ooXMvzXxDnZrmtXVfjb1bKQafYpb8s89LOg==} dependencies: undici-types: 5.26.5 + dev: true + + /@types/node@20.12.7: + resolution: {integrity: sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==} + dependencies: + undici-types: 5.26.5 + dev: true + + /@types/node@20.14.2: + resolution: {integrity: sha512-xyu6WAMVwv6AKFLB+e/7ySZVr/0zLCzOa7rSpq6jNwpqOrUbcACDWC+53d4n2QHOnDou0fbIsg8wZu/sxrnI4Q==} + dependencies: + undici-types: 5.26.5 /@types/node@20.9.0: resolution: {integrity: sha512-nekiGu2NDb1BcVofVcEKMIwzlx4NjHlcjhoxxKBNLtz15Y1z7MYf549DFvkHSId02Ax6kGwWntIBPC3l/JZcmw==} @@ -7111,6 +7126,13 @@ packages: '@types/scheduler': 0.16.6 csstype: 3.1.2 + /@types/react@18.3.3: + resolution: {integrity: sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw==} + dependencies: + '@types/prop-types': 15.7.10 + csstype: 3.1.2 + dev: true + /@types/resolve@1.20.2: resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==} @@ -7150,6 +7172,25 @@ packages: transitivePeerDependencies: - supports-color + /@typescript-eslint/parser@5.62.0(eslint@7.32.0)(typescript@5.4.5): + resolution: {integrity: sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/scope-manager': 5.62.0 + '@typescript-eslint/types': 5.62.0 + '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.4.5) + debug: 4.3.4 + eslint: 7.32.0 + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + /@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.1.6): resolution: {integrity: sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -7222,6 +7263,26 @@ packages: - supports-color dev: true + /@typescript-eslint/typescript-estree@5.62.0(typescript@5.4.5): + resolution: {integrity: sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/types': 5.62.0 + '@typescript-eslint/visitor-keys': 5.62.0 + debug: 4.3.4 + globby: 11.1.0 + is-glob: 4.0.3 + semver: 7.5.4 + tsutils: 3.21.0(typescript@5.4.5) + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + /@typescript-eslint/visitor-keys@5.62.0: resolution: {integrity: sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -10110,6 +10171,31 @@ packages: transitivePeerDependencies: - eslint-import-resolver-webpack - supports-color + dev: true + + /eslint-config-next@14.2.3(eslint@7.32.0)(typescript@5.4.5): + resolution: {integrity: sha512-ZkNztm3Q7hjqvB1rRlOX8P9E/cXRL9ajRcs8jufEtwMfTVYRqnmtnaSu57QqHyBlovMuiB8LEzfLBkh5RYV6Fg==} + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + typescript: '>=3.3.1' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@next/eslint-plugin-next': 14.2.3 + '@rushstack/eslint-patch': 1.5.1 + '@typescript-eslint/parser': 5.62.0(eslint@7.32.0)(typescript@5.4.5) + eslint: 7.32.0 + eslint-import-resolver-node: 0.3.9 + eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@5.62.0)(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.0)(eslint@7.32.0) + eslint-plugin-import: 2.29.0(@typescript-eslint/parser@5.62.0)(eslint-import-resolver-typescript@3.6.1)(eslint@7.32.0) + eslint-plugin-jsx-a11y: 6.8.0(eslint@7.32.0) + eslint-plugin-react: 7.34.1(eslint@7.32.0) + eslint-plugin-react-hooks: 5.0.0-canary-7118f5dd7-20230705(eslint@7.32.0) + typescript: 5.4.5 + transitivePeerDependencies: + - eslint-import-resolver-webpack + - supports-color /eslint-config-next@14.2.3(eslint@8.57.0)(typescript@5.1.6): resolution: {integrity: sha512-ZkNztm3Q7hjqvB1rRlOX8P9E/cXRL9ajRcs8jufEtwMfTVYRqnmtnaSu57QqHyBlovMuiB8LEzfLBkh5RYV6Fg==} @@ -12139,7 +12225,7 @@ packages: resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} engines: {node: '>= 10.13.0'} dependencies: - '@types/node': 20.11.20 + '@types/node': 20.14.2 merge-stream: 2.0.0 supports-color: 8.1.1 dev: true @@ -17025,8 +17111,8 @@ packages: picocolors: 1.0.0 sade: 1.8.1 svelte: 4.2.3 - svelte-preprocess: 5.1.4(svelte@4.2.3)(typescript@5.1.6) - typescript: 5.1.6 + svelte-preprocess: 5.1.4(svelte@4.2.3)(typescript@5.4.5) + typescript: 5.4.5 transitivePeerDependencies: - '@babel/core' - coffeescript @@ -17048,7 +17134,7 @@ packages: svelte: 4.2.3 dev: true - /svelte-preprocess@5.1.4(svelte@4.2.3)(typescript@5.1.6): + /svelte-preprocess@5.1.4(svelte@4.2.3)(typescript@5.4.5): resolution: {integrity: sha512-IvnbQ6D6Ao3Gg6ftiM5tdbR6aAETwjhHV+UKGf5bHGYR69RQvF1ho0JKPcbUON4vy4R7zom13jPjgdOWCQ5hDA==} engines: {node: '>= 16.0.0'} requiresBuild: true @@ -17092,7 +17178,7 @@ packages: sorcery: 0.11.0 strip-indent: 3.0.0 svelte: 4.2.3 - typescript: 5.1.6 + typescript: 5.4.5 dev: true /svelte@4.2.3: @@ -17607,6 +17693,15 @@ packages: typescript: 5.1.6 dev: true + /tsutils@3.21.0(typescript@5.4.5): + resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} + engines: {node: '>= 6'} + peerDependencies: + typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' + dependencies: + tslib: 1.14.1 + typescript: 5.4.5 + /tsx@4.7.1: resolution: {integrity: sha512-8d6VuibXHtlN5E3zFkgY8u4DX7Y3Z27zvvPKVmLon/D4AjuKzarkUBTLDBgj9iTQ0hg5xM7c/mYiRVM+HETf0g==} engines: {node: '>=18.0.0'} @@ -17836,6 +17931,11 @@ packages: hasBin: true dev: true + /typescript@5.4.5: + resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} + engines: {node: '>=14.17'} + hasBin: true + /ufo@1.3.1: resolution: {integrity: sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw==} dev: true From 0540ec55a9749772b54c66228298ae4fdff2e046 Mon Sep 17 00:00:00 2001 From: jeremyphilemon Date: Fri, 7 Jun 2024 00:45:24 +0300 Subject: [PATCH 2/5] Apply prettier --- .../app/api/call-tool/route.ts | 16 ++++---- .../app/api/call-tools-in-parallel/route.ts | 22 +++++------ .../app/api/generate-object/route.ts | 16 ++++---- .../app/api/generate-text/route.ts | 8 ++-- .../app/api/generative-ui-route/route.ts | 34 ++++++++--------- .../app/api/stream-chat/route.ts | 8 ++-- .../app/api/stream-text/route.ts | 8 ++-- examples/next-openai-pages/pages/_app.tsx | 4 +- .../next-openai-pages/pages/_document.tsx | 2 +- .../next-openai-pages/pages/api/chat-edge.ts | 8 ++-- .../index.tsx | 6 +-- .../stream-assistant-response/index.tsx | 6 +-- .../pages/basics/generate-object/index.tsx | 16 ++++---- .../pages/basics/generate-text/index.tsx | 16 ++++---- .../pages/basics/stream-text/index.tsx | 8 ++-- .../pages/chat/edge-runtime/index.tsx | 6 +-- .../pages/chat/generate-chat/index.tsx | 30 +++++++-------- .../pages/chat/stream-chat/index.tsx | 6 +-- .../route-components/index.tsx | 38 +++++++++---------- examples/next-openai-pages/pages/index.tsx | 16 ++++---- .../pages/tools/call-tool/index.tsx | 10 ++--- .../tools/call-tools-in-parallel/index.tsx | 10 ++--- 22 files changed, 147 insertions(+), 147 deletions(-) diff --git a/examples/next-openai-pages/app/api/call-tool/route.ts b/examples/next-openai-pages/app/api/call-tool/route.ts index e244ce33791a..9826d9745b8f 100644 --- a/examples/next-openai-pages/app/api/call-tool/route.ts +++ b/examples/next-openai-pages/app/api/call-tool/route.ts @@ -1,9 +1,9 @@ -import { ToolInvocation, convertToCoreMessages, streamText } from "ai"; -import { openai } from "@ai-sdk/openai"; -import { z } from "zod"; +import { ToolInvocation, convertToCoreMessages, streamText } from 'ai'; +import { openai } from '@ai-sdk/openai'; +import { z } from 'zod'; interface Message { - role: "user" | "assistant"; + role: 'user' | 'assistant'; content: string; toolInvocations?: ToolInvocation[]; } @@ -12,15 +12,15 @@ export async function POST(req: Request) { const { messages }: { messages: Message[] } = await req.json(); const result = await streamText({ - model: openai("gpt-4"), - system: "You are a helpful assistant.", + model: openai('gpt-4'), + system: 'You are a helpful assistant.', // @ts-ignore messages: convertToCoreMessages(messages), tools: { celsiusToFahrenheit: { - description: "Converts celsius to fahrenheit", + description: 'Converts celsius to fahrenheit', parameters: z.object({ - value: z.string().describe("The value in celsius"), + value: z.string().describe('The value in celsius'), }), execute: async ({ value }) => { const celsius = parseFloat(value); diff --git a/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts b/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts index c096791bcffc..02c4ac379539 100644 --- a/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts +++ b/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts @@ -1,33 +1,33 @@ -import { ToolInvocation, convertToCoreMessages, streamText } from "ai"; -import { openai } from "@ai-sdk/openai"; -import { z } from "zod"; +import { ToolInvocation, convertToCoreMessages, streamText } from 'ai'; +import { openai } from '@ai-sdk/openai'; +import { z } from 'zod'; interface Message { - role: "user" | "assistant"; + role: 'user' | 'assistant'; content: string; toolInvocations?: ToolInvocation[]; } function getWeather({ city, unit }: { city: string; unit: string }) { - return { value: 25, description: "Sunny" }; + return { value: 25, description: 'Sunny' }; } export async function POST(req: Request) { const { messages }: { messages: Message[] } = await req.json(); const result = await streamText({ - model: openai("gpt-4o"), - system: "You are a helpful assistant.", + model: openai('gpt-4o'), + system: 'You are a helpful assistant.', // @ts-ignore messages: convertToCoreMessages(messages), tools: { getWeather: { - description: "Get the weather for a location", + description: 'Get the weather for a location', parameters: z.object({ - city: z.string().describe("The city to get the weather for"), + city: z.string().describe('The city to get the weather for'), unit: z - .enum(["C", "F"]) - .describe("The unit to display the temperature in"), + .enum(['C', 'F']) + .describe('The unit to display the temperature in'), }), execute: async ({ city, unit }) => { const weather = getWeather({ city, unit }); diff --git a/examples/next-openai-pages/app/api/generate-object/route.ts b/examples/next-openai-pages/app/api/generate-object/route.ts index d9c09dbc6466..cabff4128d87 100644 --- a/examples/next-openai-pages/app/api/generate-object/route.ts +++ b/examples/next-openai-pages/app/api/generate-object/route.ts @@ -1,21 +1,21 @@ -import { generateObject } from "ai"; -import { openai } from "@ai-sdk/openai"; -import { z } from "zod"; +import { generateObject } from 'ai'; +import { openai } from '@ai-sdk/openai'; +import { z } from 'zod'; export async function POST(req: Request) { const { prompt }: { prompt: string } = await req.json(); const { object } = await generateObject({ - model: openai("gpt-4"), - system: "You are a helpful assistant.", + model: openai('gpt-4'), + system: 'You are a helpful assistant.', prompt, schema: z.object({ notifications: z.array( z.object({ - name: z.string().describe("Name of a fictional person."), - message: z.string().describe("Do not use emojis or links."), + name: z.string().describe('Name of a fictional person.'), + message: z.string().describe('Do not use emojis or links.'), minutesAgo: z.number(), - }) + }), ), }), }); diff --git a/examples/next-openai-pages/app/api/generate-text/route.ts b/examples/next-openai-pages/app/api/generate-text/route.ts index 22fdba632807..907428e9b48f 100644 --- a/examples/next-openai-pages/app/api/generate-text/route.ts +++ b/examples/next-openai-pages/app/api/generate-text/route.ts @@ -1,12 +1,12 @@ -import { generateText } from "ai"; -import { openai } from "@ai-sdk/openai"; +import { generateText } from 'ai'; +import { openai } from '@ai-sdk/openai'; export async function POST(req: Request) { const { prompt }: { prompt: string } = await req.json(); const { text } = await generateText({ - model: openai("gpt-4"), - system: "You are a helpful assistant.", + model: openai('gpt-4'), + system: 'You are a helpful assistant.', prompt, }); diff --git a/examples/next-openai-pages/app/api/generative-ui-route/route.ts b/examples/next-openai-pages/app/api/generative-ui-route/route.ts index 9338a0547615..06f8a1599dd9 100644 --- a/examples/next-openai-pages/app/api/generative-ui-route/route.ts +++ b/examples/next-openai-pages/app/api/generative-ui-route/route.ts @@ -1,46 +1,46 @@ -import { CoreMessage, convertToCoreMessages, streamText } from "ai"; -import { openai } from "@ai-sdk/openai"; -import { z } from "zod"; +import { CoreMessage, convertToCoreMessages, streamText } from 'ai'; +import { openai } from '@ai-sdk/openai'; +import { z } from 'zod'; export async function POST(req: Request) { const { messages }: { messages: CoreMessage[] } = await req.json(); const result = await streamText({ - model: openai("gpt-4"), - system: "You are a helpful assistant.", + model: openai('gpt-4'), + system: 'You are a helpful assistant.', // @ts-expect-error TODO: fix messages type messages: convertToCoreMessages(messages), tools: { getWeatherInformation: { - description: "show the weather in a given city to the user", + description: 'show the weather in a given city to the user', parameters: z.object({ city: z.string() }), execute: async ({}: { city: string }) => { return { value: 24, - unit: "celsius", + unit: 'celsius', weeklyForecast: [ - { day: "Mon", value: 24 }, - { day: "Tue", value: 25 }, - { day: "Wed", value: 26 }, - { day: "Thu", value: 27 }, - { day: "Fri", value: 28 }, - { day: "Sat", value: 29 }, - { day: "Sun", value: 30 }, + { day: 'Mon', value: 24 }, + { day: 'Tue', value: 25 }, + { day: 'Wed', value: 26 }, + { day: 'Thu', value: 27 }, + { day: 'Fri', value: 28 }, + { day: 'Sat', value: 29 }, + { day: 'Sun', value: 30 }, ], }; }, }, // client-side tool that starts user interaction: askForConfirmation: { - description: "Ask the user for confirmation.", + description: 'Ask the user for confirmation.', parameters: z.object({ - message: z.string().describe("The message to ask for confirmation."), + message: z.string().describe('The message to ask for confirmation.'), }), }, // client-side tool that is automatically executed on the client: getLocation: { description: - "Get the user location. Always ask for confirmation before using this tool.", + 'Get the user location. Always ask for confirmation before using this tool.', parameters: z.object({}), }, }, diff --git a/examples/next-openai-pages/app/api/stream-chat/route.ts b/examples/next-openai-pages/app/api/stream-chat/route.ts index b1ec08e7cb3f..c05a7a044361 100644 --- a/examples/next-openai-pages/app/api/stream-chat/route.ts +++ b/examples/next-openai-pages/app/api/stream-chat/route.ts @@ -1,12 +1,12 @@ -import { CoreMessage, streamText } from "ai"; -import { openai } from "@ai-sdk/openai"; +import { CoreMessage, streamText } from 'ai'; +import { openai } from '@ai-sdk/openai'; export async function POST(req: Request) { const { messages }: { messages: CoreMessage[] } = await req.json(); const result = await streamText({ - model: openai("gpt-4"), - system: "You are a helpful assistant.", + model: openai('gpt-4'), + system: 'You are a helpful assistant.', messages, }); diff --git a/examples/next-openai-pages/app/api/stream-text/route.ts b/examples/next-openai-pages/app/api/stream-text/route.ts index d113ac190bcd..295cd9dadb8d 100644 --- a/examples/next-openai-pages/app/api/stream-text/route.ts +++ b/examples/next-openai-pages/app/api/stream-text/route.ts @@ -1,12 +1,12 @@ -import { streamText } from "ai"; -import { openai } from "@ai-sdk/openai"; +import { streamText } from 'ai'; +import { openai } from '@ai-sdk/openai'; export async function POST(req: Request) { const { prompt }: { prompt: string } = await req.json(); const result = await streamText({ - model: openai("gpt-4"), - system: "You are a helpful assistant.", + model: openai('gpt-4'), + system: 'You are a helpful assistant.', prompt, }); diff --git a/examples/next-openai-pages/pages/_app.tsx b/examples/next-openai-pages/pages/_app.tsx index a7a790fba51f..c14313e83e53 100644 --- a/examples/next-openai-pages/pages/_app.tsx +++ b/examples/next-openai-pages/pages/_app.tsx @@ -1,5 +1,5 @@ -import "@/styles/globals.css"; -import type { AppProps } from "next/app"; +import '@/styles/globals.css'; +import type { AppProps } from 'next/app'; export default function App({ Component, pageProps }: AppProps) { return ; diff --git a/examples/next-openai-pages/pages/_document.tsx b/examples/next-openai-pages/pages/_document.tsx index b2fff8b4262d..e1e9cbbb75aa 100644 --- a/examples/next-openai-pages/pages/_document.tsx +++ b/examples/next-openai-pages/pages/_document.tsx @@ -1,4 +1,4 @@ -import { Html, Head, Main, NextScript } from "next/document"; +import { Html, Head, Main, NextScript } from 'next/document'; export default function Document() { return ( diff --git a/examples/next-openai-pages/pages/api/chat-edge.ts b/examples/next-openai-pages/pages/api/chat-edge.ts index e2d1e68c0442..a6f82db7d9bd 100644 --- a/examples/next-openai-pages/pages/api/chat-edge.ts +++ b/examples/next-openai-pages/pages/api/chat-edge.ts @@ -1,13 +1,13 @@ -import { openai } from "@ai-sdk/openai"; -import { streamText } from "ai"; +import { openai } from '@ai-sdk/openai'; +import { streamText } from 'ai'; -export const runtime = "edge"; +export const runtime = 'edge'; export default async function handler(req: Request) { const { messages } = await req.json(); const result = await streamText({ - model: openai("gpt-4-turbo-preview"), + model: openai('gpt-4-turbo-preview'), messages, }); diff --git a/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx b/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx index 36d986cf8dea..acd8c1f4420b 100644 --- a/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx +++ b/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx @@ -1,8 +1,8 @@ -import { Message, useAssistant } from "ai/react"; +import { Message, useAssistant } from 'ai/react'; export default function Page() { const { status, messages, input, submitMessage, handleInputChange } = - useAssistant({ api: "/api/assistant-tools" }); + useAssistant({ api: '/api/assistant-tools' }); return (
@@ -19,7 +19,7 @@ export default function Page() {
@@ -19,7 +19,7 @@ export default function Page() { { setIsLoading(true); - await fetch("/api/generate-object", { - method: "POST", + await fetch('/api/generate-object', { + method: 'POST', body: JSON.stringify({ - prompt: "Messages during finals week.", + prompt: 'Messages during finals week.', }), - }).then((response) => { - response.json().then((json) => { + }).then(response => { + response.json().then(json => { console.log(json); setGeneration(JSON.stringify(json.object, null, 2)); setIsLoading(false); @@ -29,7 +29,7 @@ export default function Page() {
{isLoading ? ( - "Loading..." + 'Loading...' ) : (
{generation}
)} diff --git a/examples/next-openai-pages/pages/basics/generate-text/index.tsx b/examples/next-openai-pages/pages/basics/generate-text/index.tsx index 42de2a8b21de..b441ff0501ae 100644 --- a/examples/next-openai-pages/pages/basics/generate-text/index.tsx +++ b/examples/next-openai-pages/pages/basics/generate-text/index.tsx @@ -1,7 +1,7 @@ -import { useState } from "react"; +import { useState } from 'react'; export default function Page() { - const [generation, setGeneration] = useState(""); + const [generation, setGeneration] = useState(''); const [isLoading, setIsLoading] = useState(false); return ( @@ -11,13 +11,13 @@ export default function Page() { onClick={async () => { setIsLoading(true); - await fetch("/api/generate-text", { - method: "POST", + await fetch('/api/generate-text', { + method: 'POST', body: JSON.stringify({ - prompt: "Why is the sky blue?", + prompt: 'Why is the sky blue?', }), - }).then((response) => { - response.json().then((json) => { + }).then(response => { + response.json().then(json => { setGeneration(json.text); setIsLoading(false); }); @@ -27,7 +27,7 @@ export default function Page() { Generate
- {isLoading ? "Loading..." : generation} + {isLoading ? 'Loading...' : generation}
); } diff --git a/examples/next-openai-pages/pages/basics/stream-text/index.tsx b/examples/next-openai-pages/pages/basics/stream-text/index.tsx index df4d33c87557..db139d2089a5 100644 --- a/examples/next-openai-pages/pages/basics/stream-text/index.tsx +++ b/examples/next-openai-pages/pages/basics/stream-text/index.tsx @@ -1,10 +1,10 @@ -"use client"; +'use client'; -import { useCompletion } from "ai/react"; +import { useCompletion } from 'ai/react'; export default function Page() { const { completion, complete } = useCompletion({ - api: "/api/stream-text", + api: '/api/stream-text', }); return ( @@ -12,7 +12,7 @@ export default function Page() {
{ - await complete("Why is the sky blue?"); + await complete('Why is the sky blue?'); }} > Generate diff --git a/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx b/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx index 557e95040aec..8e545182bc6a 100644 --- a/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx +++ b/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx @@ -1,14 +1,14 @@ -import { useChat } from "ai/react"; +import { useChat } from 'ai/react'; export default function Chat() { const { messages, input, handleInputChange, handleSubmit } = useChat({ - api: "/api/chat-edge", + api: '/api/chat-edge', }); return (
- {messages.map((message) => ( + {messages.map(message => (
{`${message.role}: `}
{message.content}
diff --git a/examples/next-openai-pages/pages/chat/generate-chat/index.tsx b/examples/next-openai-pages/pages/chat/generate-chat/index.tsx index 20650c719624..9f4829084122 100644 --- a/examples/next-openai-pages/pages/chat/generate-chat/index.tsx +++ b/examples/next-openai-pages/pages/chat/generate-chat/index.tsx @@ -1,8 +1,8 @@ -import { CoreMessage } from "ai"; -import { useState } from "react"; +import { CoreMessage } from 'ai'; +import { useState } from 'react'; export default function Page() { - const [input, setInput] = useState(""); + const [input, setInput] = useState(''); const [messages, setMessages] = useState([]); return ( @@ -12,10 +12,10 @@ export default function Page() {
{`${message.role}: `}
- {typeof message.content === "string" + {typeof message.content === 'string' ? message.content : message.content - .filter((part) => part.type === "text") + .filter(part => part.type === 'text') .map((part, partIndex) => ( // @ts-ignore
{part.text}
@@ -28,29 +28,29 @@ export default function Page() {
{ + onChange={event => { setInput(event.target.value); }} className="bg-zinc-100 w-full p-2" - onKeyDown={async (event) => { - if (event.key === "Enter") { - setInput(""); + onKeyDown={async event => { + if (event.key === 'Enter') { + setInput(''); - setMessages((currentMessages) => [ + setMessages(currentMessages => [ ...currentMessages, - { role: "user", content: input }, + { role: 'user', content: input }, ]); - const response = await fetch("/api/generate-chat", { - method: "POST", + const response = await fetch('/api/generate-chat', { + method: 'POST', body: JSON.stringify({ - messages: [...messages, { role: "user", content: input }], + messages: [...messages, { role: 'user', content: input }], }), }); const { messages: newMessages } = await response.json(); - setMessages((currentMessages) => [ + setMessages(currentMessages => [ ...currentMessages, ...newMessages, ]); diff --git a/examples/next-openai-pages/pages/chat/stream-chat/index.tsx b/examples/next-openai-pages/pages/chat/stream-chat/index.tsx index c4d67672a25b..eeb591834f14 100644 --- a/examples/next-openai-pages/pages/chat/stream-chat/index.tsx +++ b/examples/next-openai-pages/pages/chat/stream-chat/index.tsx @@ -1,14 +1,14 @@ -import { useChat } from "ai/react"; +import { useChat } from 'ai/react'; export default function Page() { const { messages, input, handleSubmit, handleInputChange } = useChat({ - api: "/api/stream-chat", + api: '/api/stream-chat', }); return (
- {messages.map((message) => ( + {messages.map(message => (
{`${message.role}: `}
{message.content}
diff --git a/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx b/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx index 1ca574008c54..b673823f0ca5 100644 --- a/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx +++ b/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx @@ -1,18 +1,18 @@ -import { ToolInvocation } from "ai"; -import { useChat } from "ai/react"; +import { ToolInvocation } from 'ai'; +import { useChat } from 'ai/react'; export default function Page() { const { messages, input, handleInputChange, handleSubmit, addToolResult } = useChat({ - api: "/api/generative-ui-route", + api: '/api/generative-ui-route', maxToolRoundtrips: 5, async onToolCall({ toolCall }) { - if (toolCall.toolName === "getLocation") { + if (toolCall.toolName === 'getLocation') { const cities = [ - "New York", - "Los Angeles", - "Chicago", - "San Francisco", + 'New York', + 'Los Angeles', + 'Chicago', + 'San Francisco', ]; return cities[Math.floor(Math.random() * cities.length)]; @@ -24,12 +24,12 @@ export default function Page() { const toolCallId = tool.toolCallId; // render confirmation tool (client-side tool with user interaction) - if (tool.toolName === "askForConfirmation") { + if (tool.toolName === 'askForConfirmation') { return (
{tool.args.message}
- {"result" in tool ? ( + {'result' in tool ? (
{tool.result}
) : ( <> @@ -38,7 +38,7 @@ export default function Page() { onClick={() => addToolResult({ toolCallId, - result: "Yes, confirmed.", + result: 'Yes, confirmed.', }) } > @@ -49,7 +49,7 @@ export default function Page() { onClick={() => addToolResult({ toolCallId, - result: "No, denied", + result: 'No, denied', }) } > @@ -63,15 +63,15 @@ export default function Page() { } // other tools: - return "result" in tool ? ( - tool.toolName === "getWeatherInformation" ? ( + return 'result' in tool ? ( + tool.toolName === 'getWeatherInformation' ? (
- {tool.result.value}°{tool.result.unit === "celsius" ? "C" : "F"} + {tool.result.value}°{tool.result.unit === 'celsius' ? 'C' : 'F'}
@@ -85,7 +85,7 @@ export default function Page() { ))}
- ) : tool.toolName === "getLocation" ? ( + ) : tool.toolName === 'getLocation' ? (
- {messages.map((message) => ( + {messages.map(message => (
{`${ - message.toolInvocations ? "tool" : message.role + message.toolInvocations ? 'tool' : message.role }: `}
{message.toolInvocations - ? message.toolInvocations.map((tool) => renderToolResult(tool)) + ? message.toolInvocations.map(tool => renderToolResult(tool)) : message.content}
diff --git a/examples/next-openai-pages/pages/index.tsx b/examples/next-openai-pages/pages/index.tsx index e5667d402175..7bdfeafd7365 100644 --- a/examples/next-openai-pages/pages/index.tsx +++ b/examples/next-openai-pages/pages/index.tsx @@ -1,7 +1,7 @@ -import Image from "next/image"; -import { Inter } from "next/font/google"; +import Image from 'next/image'; +import { Inter } from 'next/font/google'; -const inter = Inter({ subsets: ["latin"] }); +const inter = Inter({ subsets: ['latin'] }); export default function Home() { return ( @@ -20,7 +20,7 @@ export default function Home() { target="_blank" rel="noopener noreferrer" > - By{" "} + By{' '} Vercel Logo

- Docs{" "} + Docs{' '} -> @@ -69,7 +69,7 @@ export default function Home() { rel="noopener noreferrer" >

- Learn{" "} + Learn{' '} -> @@ -86,7 +86,7 @@ export default function Home() { rel="noopener noreferrer" >

- Templates{" "} + Templates{' '} -> @@ -103,7 +103,7 @@ export default function Home() { rel="noopener noreferrer" >

- Deploy{" "} + Deploy{' '} -> diff --git a/examples/next-openai-pages/pages/tools/call-tool/index.tsx b/examples/next-openai-pages/pages/tools/call-tool/index.tsx index 55dc62c44d16..1ae75abd3246 100644 --- a/examples/next-openai-pages/pages/tools/call-tool/index.tsx +++ b/examples/next-openai-pages/pages/tools/call-tool/index.tsx @@ -1,23 +1,23 @@ -import { useChat } from "ai/react"; +import { useChat } from 'ai/react'; export default function Page() { const { messages, input, handleInputChange, handleSubmit } = useChat({ - api: "/api/call-tool", + api: '/api/call-tool', maxToolRoundtrips: 1, }); return (
- {messages.map((message) => ( + {messages.map(message => (
{`${ - message.toolInvocations ? "tool" : message.role + message.toolInvocations ? 'tool' : message.role }: `}
{message.toolInvocations ? message.toolInvocations.map( - (tool) => `${tool.toolName}(${JSON.stringify(tool.args)})` + tool => `${tool.toolName}(${JSON.stringify(tool.args)})`, ) : message.content}
diff --git a/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx b/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx index 967c2744fbca..1d60103b6cbc 100644 --- a/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx +++ b/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx @@ -1,22 +1,22 @@ -import { useChat } from "ai/react"; +import { useChat } from 'ai/react'; export default function Page() { const { messages, input, handleInputChange, handleSubmit } = useChat({ - api: "/api/call-tools-in-parallel", + api: '/api/call-tools-in-parallel', maxToolRoundtrips: 1, }); return (
- {messages.map((message) => ( + {messages.map(message => (
{`${ - message.toolInvocations ? "tool" : message.role + message.toolInvocations ? 'tool' : message.role }: `}
{message.toolInvocations - ? message.toolInvocations.map((tool) => ( + ? message.toolInvocations.map(tool => (
{`${ tool.toolName }(${JSON.stringify(tool.args)})`}
From 2a71c4ecf5d42c6c0e77fa27da709d1dabbd5301 Mon Sep 17 00:00:00 2001 From: jeremyphilemon Date: Fri, 7 Jun 2024 01:21:04 +0300 Subject: [PATCH 3/5] Update index page --- .../index.tsx | 0 examples/next-openai-pages/pages/index.tsx | 168 ++++++------------ 2 files changed, 59 insertions(+), 109 deletions(-) rename examples/next-openai-pages/pages/chat/{edge-runtime => stream-chat-edge}/index.tsx (100%) diff --git a/examples/next-openai-pages/pages/chat/edge-runtime/index.tsx b/examples/next-openai-pages/pages/chat/stream-chat-edge/index.tsx similarity index 100% rename from examples/next-openai-pages/pages/chat/edge-runtime/index.tsx rename to examples/next-openai-pages/pages/chat/stream-chat-edge/index.tsx diff --git a/examples/next-openai-pages/pages/index.tsx b/examples/next-openai-pages/pages/index.tsx index 7bdfeafd7365..f79b6945a81c 100644 --- a/examples/next-openai-pages/pages/index.tsx +++ b/examples/next-openai-pages/pages/index.tsx @@ -1,118 +1,68 @@ -import Image from 'next/image'; import { Inter } from 'next/font/google'; +import Link from 'next/link'; const inter = Inter({ subsets: ['latin'] }); +const examples = [ + { + title: 'Generate object', + link: '/basics/generate-object', + }, + { + title: 'Generate text', + link: '/basics/generate-text', + }, + { + title: 'Stream text', + link: '/basics/stream-text', + }, + { + title: 'Generate chat completion', + link: '/chat/generate-chat', + }, + { + title: 'Generate chat completion (edge)', + link: '/chat/stream-chat', + }, + { + title: 'Generate chat completion (edge)', + link: '/chat/stream-chat-edge', + }, + { + title: 'Stream chat completion', + link: '/chat/stream-chat', + }, + { + title: 'Call tools', + link: '/tools/call-tool', + }, + { + title: 'Call tools in parallel', + link: '/tools/call-tools-in-parallel', + }, + { + title: 'Route components using language model', + link: '/generative-user-interface/route-components', + }, + { + title: 'Stream OpenAI Assistant API response', + link: '/assistants/stream-assistant-response', + }, + { + title: 'Stream OpenAI Assistant API response with tool calls', + link: '/assistants/stream-assistant-response-with-tools', + }, +]; + export default function Home() { return ( -
-
-

- Get started by editing  - pages/index.tsx -

- -
- -
- Next.js Logo -
- - +
+ {examples.map((example, index) => ( + +
{index + 1}.
+
{example.title}
+ + ))}
); } From 2492db23a8fb17f9f228f7914a60de0d9a44672b Mon Sep 17 00:00:00 2001 From: jeremyphilemon Date: Fri, 7 Jun 2024 01:26:09 +0300 Subject: [PATCH 4/5] Fix build issue --- examples/next-openai-pages/pages/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/next-openai-pages/pages/index.tsx b/examples/next-openai-pages/pages/index.tsx index f79b6945a81c..5e34bbdd0950 100644 --- a/examples/next-openai-pages/pages/index.tsx +++ b/examples/next-openai-pages/pages/index.tsx @@ -58,7 +58,7 @@ export default function Home() { return (
{examples.map((example, index) => ( - +
{index + 1}.
{example.title}
From e475fd150e6e447aa4510049d326877df188096d Mon Sep 17 00:00:00 2001 From: jeremyphilemon Date: Fri, 7 Jun 2024 01:40:11 +0300 Subject: [PATCH 5/5] Add api route example --- .../pages/api/chat-api-route.ts | 19 +++++++++++++ .../chat/stream-chat-api-route/index.tsx | 28 +++++++++++++++++++ examples/next-openai-pages/pages/index.tsx | 10 +++++-- 3 files changed, 54 insertions(+), 3 deletions(-) create mode 100644 examples/next-openai-pages/pages/api/chat-api-route.ts create mode 100644 examples/next-openai-pages/pages/chat/stream-chat-api-route/index.tsx diff --git a/examples/next-openai-pages/pages/api/chat-api-route.ts b/examples/next-openai-pages/pages/api/chat-api-route.ts new file mode 100644 index 000000000000..9729540b8b56 --- /dev/null +++ b/examples/next-openai-pages/pages/api/chat-api-route.ts @@ -0,0 +1,19 @@ +import { openai } from '@ai-sdk/openai'; +import { streamText } from 'ai'; +import { NextApiRequest, NextApiResponse } from 'next'; + +export default async function handler( + request: NextApiRequest, + response: NextApiResponse, +) { + const { messages } = await request.body; + + const result = await streamText({ + model: openai('gpt-4-turbo-preview'), + messages, + }); + + // write the AI stream to the response + // Note: this is sent as a single response, not a stream + result.pipeAIStreamToResponse(response); +} diff --git a/examples/next-openai-pages/pages/chat/stream-chat-api-route/index.tsx b/examples/next-openai-pages/pages/chat/stream-chat-api-route/index.tsx new file mode 100644 index 000000000000..bec14121bcb8 --- /dev/null +++ b/examples/next-openai-pages/pages/chat/stream-chat-api-route/index.tsx @@ -0,0 +1,28 @@ +import { useChat } from 'ai/react'; + +export default function Chat() { + const { messages, input, handleInputChange, handleSubmit } = useChat({ + api: '/api/chat-api-route', + }); + + return ( +
+
+ {messages.map(message => ( +
+
{`${message.role}: `}
+
{message.content}
+
+ ))} +
+ + + + +
+ ); +} diff --git a/examples/next-openai-pages/pages/index.tsx b/examples/next-openai-pages/pages/index.tsx index 5e34bbdd0950..5b5fdf37d3e0 100644 --- a/examples/next-openai-pages/pages/index.tsx +++ b/examples/next-openai-pages/pages/index.tsx @@ -21,11 +21,15 @@ const examples = [ link: '/chat/generate-chat', }, { - title: 'Generate chat completion (edge)', + title: 'Generate chat completion', link: '/chat/stream-chat', }, { - title: 'Generate chat completion (edge)', + title: 'Generate chat completion (API route)', + link: '/chat/stream-chat-api-route', + }, + { + title: 'Generate chat completion (edge runtime)', link: '/chat/stream-chat-edge', }, { @@ -59,7 +63,7 @@ export default function Home() {
{examples.map((example, index) => ( -
{index + 1}.
+
{index + 1}.
{example.title}
))}