diff --git a/examples/next-openai-pages/.env.local.example b/examples/next-openai-pages/.env.local.example
index 8198c84d5c38..6b4834b21161 100644
--- a/examples/next-openai-pages/.env.local.example
+++ b/examples/next-openai-pages/.env.local.example
@@ -1 +1,2 @@
-OPENAI_API_KEY=xxxxxxx
\ No newline at end of file
+OPENAI_API_KEY=xxxxxxx
+ASSISTANT_ID=xxxxxxx
\ No newline at end of file
diff --git a/examples/next-openai-pages/app/api/assistant-tools/route.ts b/examples/next-openai-pages/app/api/assistant-tools/route.ts
new file mode 100644
index 000000000000..170c97602d3f
--- /dev/null
+++ b/examples/next-openai-pages/app/api/assistant-tools/route.ts
@@ -0,0 +1,71 @@
+import { AssistantResponse } from 'ai';
+import OpenAI from 'openai';
+
+const openai = new OpenAI({
+ apiKey: process.env.OPENAI_API_KEY || '',
+});
+
+export async function POST(req: Request) {
+ const input: {
+ threadId: string | null;
+ message: string;
+ } = await req.json();
+
+ const threadId = input.threadId ?? (await openai.beta.threads.create({})).id;
+
+ const createdMessage = await openai.beta.threads.messages.create(threadId, {
+ role: 'user',
+ content: input.message,
+ });
+
+ return AssistantResponse(
+ { threadId, messageId: createdMessage.id },
+ async ({ forwardStream }) => {
+ const runStream = openai.beta.threads.runs.stream(threadId, {
+ assistant_id:
+ process.env.ASSISTANT_ID ??
+ (() => {
+ throw new Error('ASSISTANT_ID is not set');
+ })(),
+ });
+
+ let runResult = await forwardStream(runStream);
+
+ while (
+ runResult?.status === 'requires_action' &&
+ runResult.required_action?.type === 'submit_tool_outputs'
+ ) {
+ const tool_outputs =
+ runResult.required_action.submit_tool_outputs.tool_calls.map(
+ (toolCall: any) => {
+ const parameters = JSON.parse(toolCall.function.arguments);
+
+ switch (toolCall.function.name) {
+ case 'celsiusToFahrenheit':
+ const celsius = parseFloat(parameters.value);
+ const fahrenheit = celsius * (9 / 5) + 32;
+
+ return {
+ tool_call_id: toolCall.id,
+ output: `${celsius}°C is ${fahrenheit.toFixed(2)}°F`,
+ };
+
+ default:
+ throw new Error(
+ `Unknown tool call function: ${toolCall.function.name}`,
+ );
+ }
+ },
+ );
+
+ runResult = await forwardStream(
+ openai.beta.threads.runs.submitToolOutputsStream(
+ threadId,
+ runResult.id,
+ { tool_outputs },
+ ),
+ );
+ }
+ },
+ );
+}
diff --git a/examples/next-openai-pages/app/api/assistant/route.ts b/examples/next-openai-pages/app/api/assistant/route.ts
new file mode 100644
index 000000000000..747c8ac5ece5
--- /dev/null
+++ b/examples/next-openai-pages/app/api/assistant/route.ts
@@ -0,0 +1,35 @@
+import { AssistantResponse } from 'ai';
+import OpenAI from 'openai';
+
+const openai = new OpenAI({
+ apiKey: process.env.OPENAI_API_KEY || '',
+});
+
+export async function POST(req: Request) {
+ const input: {
+ threadId: string | null;
+ message: string;
+ } = await req.json();
+
+ const threadId = input.threadId ?? (await openai.beta.threads.create({})).id;
+
+ const createdMessage = await openai.beta.threads.messages.create(threadId, {
+ role: 'user',
+ content: input.message,
+ });
+
+ return AssistantResponse(
+ { threadId, messageId: createdMessage.id },
+ async ({ forwardStream }) => {
+ const runStream = openai.beta.threads.runs.stream(threadId, {
+ assistant_id:
+ process.env.ASSISTANT_ID ??
+ (() => {
+ throw new Error('ASSISTANT_ID environment is not set');
+ })(),
+ });
+
+ await forwardStream(runStream);
+ },
+ );
+}
diff --git a/examples/next-openai-pages/app/api/call-tool/route.ts b/examples/next-openai-pages/app/api/call-tool/route.ts
new file mode 100644
index 000000000000..9826d9745b8f
--- /dev/null
+++ b/examples/next-openai-pages/app/api/call-tool/route.ts
@@ -0,0 +1,35 @@
+import { ToolInvocation, convertToCoreMessages, streamText } from 'ai';
+import { openai } from '@ai-sdk/openai';
+import { z } from 'zod';
+
+interface Message {
+ role: 'user' | 'assistant';
+ content: string;
+ toolInvocations?: ToolInvocation[];
+}
+
+export async function POST(req: Request) {
+ const { messages }: { messages: Message[] } = await req.json();
+
+ const result = await streamText({
+ model: openai('gpt-4'),
+ system: 'You are a helpful assistant.',
+ // @ts-ignore
+ messages: convertToCoreMessages(messages),
+ tools: {
+ celsiusToFahrenheit: {
+ description: 'Converts celsius to fahrenheit',
+ parameters: z.object({
+ value: z.string().describe('The value in celsius'),
+ }),
+ execute: async ({ value }) => {
+ const celsius = parseFloat(value);
+ const fahrenheit = celsius * (9 / 5) + 32;
+ return `${celsius}°C is ${fahrenheit.toFixed(2)}°F`;
+ },
+ },
+ },
+ });
+
+ return result.toAIStreamResponse();
+}
diff --git a/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts b/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts
new file mode 100644
index 000000000000..02c4ac379539
--- /dev/null
+++ b/examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts
@@ -0,0 +1,41 @@
+import { ToolInvocation, convertToCoreMessages, streamText } from 'ai';
+import { openai } from '@ai-sdk/openai';
+import { z } from 'zod';
+
+interface Message {
+ role: 'user' | 'assistant';
+ content: string;
+ toolInvocations?: ToolInvocation[];
+}
+
+function getWeather({ city, unit }: { city: string; unit: string }) {
+ return { value: 25, description: 'Sunny' };
+}
+
+export async function POST(req: Request) {
+ const { messages }: { messages: Message[] } = await req.json();
+
+ const result = await streamText({
+ model: openai('gpt-4o'),
+ system: 'You are a helpful assistant.',
+ // @ts-ignore
+ messages: convertToCoreMessages(messages),
+ tools: {
+ getWeather: {
+ description: 'Get the weather for a location',
+ parameters: z.object({
+ city: z.string().describe('The city to get the weather for'),
+ unit: z
+ .enum(['C', 'F'])
+ .describe('The unit to display the temperature in'),
+ }),
+ execute: async ({ city, unit }) => {
+ const weather = getWeather({ city, unit });
+ return `It is currently 25°${weather.value}°${unit} and ${weather.description} in ${city}!`;
+ },
+ },
+ },
+ });
+
+ return result.toAIStreamResponse();
+}
diff --git a/examples/next-openai-pages/app/api/chat-app-route/route.ts b/examples/next-openai-pages/app/api/chat-app-route/route.ts
deleted file mode 100644
index daf8bb1e5281..000000000000
--- a/examples/next-openai-pages/app/api/chat-app-route/route.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-import { openai } from '@ai-sdk/openai';
-import { streamText } from 'ai';
-
-// Allow streaming responses up to 30 seconds
-export const maxDuration = 30;
-
-export async function POST(req: Request) {
- // Extract the `messages` from the body of the request
- const { messages } = await req.json();
-
- // Call the language model
- const result = await streamText({
- model: openai('gpt-4-turbo'),
- messages,
- });
-
- // Respond with the stream
- return result.toAIStreamResponse();
-}
diff --git a/examples/next-openai-pages/app/api/generate-chat/route.ts b/examples/next-openai-pages/app/api/generate-chat/route.ts
new file mode 100644
index 000000000000..5500abd27490
--- /dev/null
+++ b/examples/next-openai-pages/app/api/generate-chat/route.ts
@@ -0,0 +1,14 @@
+import { CoreMessage, generateText } from 'ai';
+import { openai } from '@ai-sdk/openai';
+
+export async function POST(req: Request) {
+ const { messages }: { messages: CoreMessage[] } = await req.json();
+
+ const { responseMessages } = await generateText({
+ model: openai('gpt-4'),
+ system: 'You are a helpful assistant.',
+ messages,
+ });
+
+ return Response.json({ messages: responseMessages });
+}
diff --git a/examples/next-openai-pages/app/api/generate-object/route.ts b/examples/next-openai-pages/app/api/generate-object/route.ts
new file mode 100644
index 000000000000..cabff4128d87
--- /dev/null
+++ b/examples/next-openai-pages/app/api/generate-object/route.ts
@@ -0,0 +1,24 @@
+import { generateObject } from 'ai';
+import { openai } from '@ai-sdk/openai';
+import { z } from 'zod';
+
+export async function POST(req: Request) {
+ const { prompt }: { prompt: string } = await req.json();
+
+ const { object } = await generateObject({
+ model: openai('gpt-4'),
+ system: 'You are a helpful assistant.',
+ prompt,
+ schema: z.object({
+ notifications: z.array(
+ z.object({
+ name: z.string().describe('Name of a fictional person.'),
+ message: z.string().describe('Do not use emojis or links.'),
+ minutesAgo: z.number(),
+ }),
+ ),
+ }),
+ });
+
+ return Response.json({ object });
+}
diff --git a/examples/next-openai-pages/app/api/generate-text/route.ts b/examples/next-openai-pages/app/api/generate-text/route.ts
new file mode 100644
index 000000000000..907428e9b48f
--- /dev/null
+++ b/examples/next-openai-pages/app/api/generate-text/route.ts
@@ -0,0 +1,14 @@
+import { generateText } from 'ai';
+import { openai } from '@ai-sdk/openai';
+
+export async function POST(req: Request) {
+ const { prompt }: { prompt: string } = await req.json();
+
+ const { text } = await generateText({
+ model: openai('gpt-4'),
+ system: 'You are a helpful assistant.',
+ prompt,
+ });
+
+ return Response.json({ text });
+}
diff --git a/examples/next-openai-pages/pages/api/use-chat-tools-ui.ts b/examples/next-openai-pages/app/api/generative-ui-route/route.ts
similarity index 77%
rename from examples/next-openai-pages/pages/api/use-chat-tools-ui.ts
rename to examples/next-openai-pages/app/api/generative-ui-route/route.ts
index 30f0d14d20d3..06f8a1599dd9 100644
--- a/examples/next-openai-pages/pages/api/use-chat-tools-ui.ts
+++ b/examples/next-openai-pages/app/api/generative-ui-route/route.ts
@@ -1,19 +1,16 @@
+import { CoreMessage, convertToCoreMessages, streamText } from 'ai';
import { openai } from '@ai-sdk/openai';
-import { convertToCoreMessages, streamText } from 'ai';
-import { NextApiRequest, NextApiResponse } from 'next';
import { z } from 'zod';
-export default async function handler(
- request: NextApiRequest,
- response: NextApiResponse,
-) {
- const { messages } = await request.body;
+export async function POST(req: Request) {
+ const { messages }: { messages: CoreMessage[] } = await req.json();
const result = await streamText({
- model: openai('gpt-4-turbo'),
+ model: openai('gpt-4'),
+ system: 'You are a helpful assistant.',
+ // @ts-expect-error TODO: fix messages type
messages: convertToCoreMessages(messages),
tools: {
- // server-side tool with execute function:
getWeatherInformation: {
description: 'show the weather in a given city to the user',
parameters: z.object({ city: z.string() }),
@@ -49,5 +46,5 @@ export default async function handler(
},
});
- result.pipeAIStreamToResponse(response);
+ return result.toAIStreamResponse();
}
diff --git a/examples/next-openai-pages/app/api/stream-chat/route.ts b/examples/next-openai-pages/app/api/stream-chat/route.ts
new file mode 100644
index 000000000000..c05a7a044361
--- /dev/null
+++ b/examples/next-openai-pages/app/api/stream-chat/route.ts
@@ -0,0 +1,14 @@
+import { CoreMessage, streamText } from 'ai';
+import { openai } from '@ai-sdk/openai';
+
+export async function POST(req: Request) {
+ const { messages }: { messages: CoreMessage[] } = await req.json();
+
+ const result = await streamText({
+ model: openai('gpt-4'),
+ system: 'You are a helpful assistant.',
+ messages,
+ });
+
+ return result.toAIStreamResponse();
+}
diff --git a/examples/next-openai-pages/app/api/stream-text/route.ts b/examples/next-openai-pages/app/api/stream-text/route.ts
new file mode 100644
index 000000000000..295cd9dadb8d
--- /dev/null
+++ b/examples/next-openai-pages/app/api/stream-text/route.ts
@@ -0,0 +1,14 @@
+import { streamText } from 'ai';
+import { openai } from '@ai-sdk/openai';
+
+export async function POST(req: Request) {
+ const { prompt }: { prompt: string } = await req.json();
+
+ const result = await streamText({
+ model: openai('gpt-4'),
+ system: 'You are a helpful assistant.',
+ prompt,
+ });
+
+ return result.toAIStreamResponse();
+}
diff --git a/examples/next-openai-pages/package.json b/examples/next-openai-pages/package.json
index 8dd42540dcce..0e2af747eec8 100644
--- a/examples/next-openai-pages/package.json
+++ b/examples/next-openai-pages/package.json
@@ -12,19 +12,20 @@
"@ai-sdk/openai": "latest",
"ai": "latest",
"next": "latest",
+ "openai": "^4.33.0",
"react": "^18",
"react-dom": "^18",
"zod": "3.23.8"
},
"devDependencies": {
- "@types/node": "^17.0.12",
- "@types/react": "^18",
+ "@types/node": "^20.12.7",
+ "@types/react": "^18.3.3",
"@types/react-dom": "^18",
"autoprefixer": "^10.4.14",
"eslint": "^7.32.0",
"eslint-config-next": "14.2.3",
"postcss": "^8.4.23",
"tailwindcss": "^3.3.2",
- "typescript": "5.1.3"
+ "typescript": "5.4.5"
}
}
diff --git a/examples/next-openai-pages/pages/_app.tsx b/examples/next-openai-pages/pages/_app.tsx
index fd347395fa2b..c14313e83e53 100644
--- a/examples/next-openai-pages/pages/_app.tsx
+++ b/examples/next-openai-pages/pages/_app.tsx
@@ -1,9 +1,6 @@
-import './globals.css';
+import '@/styles/globals.css';
import type { AppProps } from 'next/app';
-export default function ExampleApp({
- Component,
- pageProps,
-}: AppProps): JSX.Element {
+export default function App({ Component, pageProps }: AppProps) {
return ;
}
diff --git a/examples/next-openai-pages/pages/api/chat-api-route.ts b/examples/next-openai-pages/pages/api/chat-api-route.ts
new file mode 100644
index 000000000000..9729540b8b56
--- /dev/null
+++ b/examples/next-openai-pages/pages/api/chat-api-route.ts
@@ -0,0 +1,19 @@
+import { openai } from '@ai-sdk/openai';
+import { streamText } from 'ai';
+import { NextApiRequest, NextApiResponse } from 'next';
+
+export default async function handler(
+ request: NextApiRequest,
+ response: NextApiResponse,
+) {
+ const { messages } = await request.body;
+
+ const result = await streamText({
+ model: openai('gpt-4-turbo-preview'),
+ messages,
+ });
+
+ // write the AI stream to the response
+ // Note: this is sent as a single response, not a stream
+ result.pipeAIStreamToResponse(response);
+}
diff --git a/examples/next-openai-pages/pages/api/chat-edge.ts b/examples/next-openai-pages/pages/api/chat-edge.ts
index be7da866440c..a6f82db7d9bd 100644
--- a/examples/next-openai-pages/pages/api/chat-edge.ts
+++ b/examples/next-openai-pages/pages/api/chat-edge.ts
@@ -1,22 +1,15 @@
-import { createOpenAI } from '@ai-sdk/openai';
+import { openai } from '@ai-sdk/openai';
import { streamText } from 'ai';
export const runtime = 'edge';
-// Create an OpenAI Provider instance
-const openai = createOpenAI({
- apiKey: process.env.OPENAI_API_KEY ?? '',
-});
-
export default async function handler(req: Request) {
const { messages } = await req.json();
- // Ask OpenAI for a streaming chat completion given the prompt
const result = await streamText({
model: openai('gpt-4-turbo-preview'),
messages,
});
- // Edge environment: return the AI stream as a single response
return result.toAIStreamResponse();
}
diff --git a/examples/next-openai-pages/pages/api/chat.ts b/examples/next-openai-pages/pages/api/chat.ts
deleted file mode 100644
index d86e0719cf39..000000000000
--- a/examples/next-openai-pages/pages/api/chat.ts
+++ /dev/null
@@ -1,25 +0,0 @@
-import { createOpenAI } from '@ai-sdk/openai';
-import { streamText } from 'ai';
-import { NextApiRequest, NextApiResponse } from 'next';
-
-// Create an OpenAI Provider instance
-const openai = createOpenAI({
- apiKey: process.env.OPENAI_API_KEY ?? '',
-});
-
-export default async function handler(
- req: NextApiRequest,
- res: NextApiResponse,
-) {
- const { messages } = await req.body;
-
- // Ask OpenAI for a streaming chat completion given the prompt
- const result = await streamText({
- model: openai('gpt-4-turbo-preview'),
- messages,
- });
-
- // write the AI stream to the response
- // Note: this is sent as a single response, not a stream
- result.pipeAIStreamToResponse(res);
-}
diff --git a/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx b/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx
new file mode 100644
index 000000000000..acd8c1f4420b
--- /dev/null
+++ b/examples/next-openai-pages/pages/assistants/stream-assistant-response-with-tools/index.tsx
@@ -0,0 +1,30 @@
+import { Message, useAssistant } from 'ai/react';
+
+export default function Page() {
+ const { status, messages, input, submitMessage, handleInputChange } =
+ useAssistant({ api: '/api/assistant-tools' });
+
+ return (
+
+
status: {status}
+
+
+ {messages.map((message: Message) => (
+
+
{`${message.role}: `}
+
{message.content}
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/assistants/stream-assistant-response/index.tsx b/examples/next-openai-pages/pages/assistants/stream-assistant-response/index.tsx
new file mode 100644
index 000000000000..083e9d9af87f
--- /dev/null
+++ b/examples/next-openai-pages/pages/assistants/stream-assistant-response/index.tsx
@@ -0,0 +1,30 @@
+import { Message, useAssistant } from 'ai/react';
+
+export default function Page() {
+ const { status, messages, input, submitMessage, handleInputChange } =
+ useAssistant({ api: '/api/assistant' });
+
+ return (
+
+
status: {status}
+
+
+ {messages.map((message: Message) => (
+
+
{`${message.role}: `}
+
{message.content}
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/basics/generate-object/index.tsx b/examples/next-openai-pages/pages/basics/generate-object/index.tsx
new file mode 100644
index 000000000000..32de3c97608e
--- /dev/null
+++ b/examples/next-openai-pages/pages/basics/generate-object/index.tsx
@@ -0,0 +1,38 @@
+import { useState } from 'react';
+
+export default function Page() {
+ const [generation, setGeneration] = useState('');
+ const [isLoading, setIsLoading] = useState(false);
+
+ return (
+
+
{
+ setIsLoading(true);
+
+ await fetch('/api/generate-object', {
+ method: 'POST',
+ body: JSON.stringify({
+ prompt: 'Messages during finals week.',
+ }),
+ }).then(response => {
+ response.json().then(json => {
+ console.log(json);
+ setGeneration(JSON.stringify(json.object, null, 2));
+ setIsLoading(false);
+ });
+ });
+ }}
+ >
+ Generate
+
+
+ {isLoading ? (
+ 'Loading...'
+ ) : (
+
{generation}
+ )}
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/basics/generate-text/index.tsx b/examples/next-openai-pages/pages/basics/generate-text/index.tsx
new file mode 100644
index 000000000000..b441ff0501ae
--- /dev/null
+++ b/examples/next-openai-pages/pages/basics/generate-text/index.tsx
@@ -0,0 +1,33 @@
+import { useState } from 'react';
+
+export default function Page() {
+ const [generation, setGeneration] = useState('');
+ const [isLoading, setIsLoading] = useState(false);
+
+ return (
+
+
{
+ setIsLoading(true);
+
+ await fetch('/api/generate-text', {
+ method: 'POST',
+ body: JSON.stringify({
+ prompt: 'Why is the sky blue?',
+ }),
+ }).then(response => {
+ response.json().then(json => {
+ setGeneration(json.text);
+ setIsLoading(false);
+ });
+ });
+ }}
+ >
+ Generate
+
+
+ {isLoading ? 'Loading...' : generation}
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/basics/stream-text/index.tsx b/examples/next-openai-pages/pages/basics/stream-text/index.tsx
new file mode 100644
index 000000000000..db139d2089a5
--- /dev/null
+++ b/examples/next-openai-pages/pages/basics/stream-text/index.tsx
@@ -0,0 +1,24 @@
+'use client';
+
+import { useCompletion } from 'ai/react';
+
+export default function Page() {
+ const { completion, complete } = useCompletion({
+ api: '/api/stream-text',
+ });
+
+ return (
+
+
{
+ await complete('Why is the sky blue?');
+ }}
+ >
+ Generate
+
+
+ {completion}
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/chat-app-route.tsx b/examples/next-openai-pages/pages/chat-app-route.tsx
deleted file mode 100644
index 4982dc86daea..000000000000
--- a/examples/next-openai-pages/pages/chat-app-route.tsx
+++ /dev/null
@@ -1,29 +0,0 @@
-'use client';
-
-import { useChat } from 'ai/react';
-
-export default function Chat() {
- const { messages, input, handleInputChange, handleSubmit } = useChat({
- api: '/api/chat-app-route',
- });
-
- return (
-
- {messages.map(m => (
-
- {m.role === 'user' ? 'User: ' : 'AI: '}
- {m.content}
-
- ))}
-
-
-
- );
-}
diff --git a/examples/next-openai-pages/pages/chat-edge.tsx b/examples/next-openai-pages/pages/chat-edge.tsx
deleted file mode 100644
index aaadf39bfd44..000000000000
--- a/examples/next-openai-pages/pages/chat-edge.tsx
+++ /dev/null
@@ -1,29 +0,0 @@
-'use client';
-
-import { useChat } from 'ai/react';
-
-export default function Chat() {
- const { messages, input, handleInputChange, handleSubmit } = useChat({
- api: '/api/chat-edge',
- });
-
- return (
-
- {messages.map(m => (
-
- {m.role === 'user' ? 'User: ' : 'AI: '}
- {m.content}
-
- ))}
-
-
-
- );
-}
diff --git a/examples/next-openai-pages/pages/chat/generate-chat/index.tsx b/examples/next-openai-pages/pages/chat/generate-chat/index.tsx
new file mode 100644
index 000000000000..9f4829084122
--- /dev/null
+++ b/examples/next-openai-pages/pages/chat/generate-chat/index.tsx
@@ -0,0 +1,63 @@
+import { CoreMessage } from 'ai';
+import { useState } from 'react';
+
+export default function Page() {
+ const [input, setInput] = useState('');
+ const [messages, setMessages] = useState([]);
+
+ return (
+
+
+ {messages.map((message, index) => (
+
+
{`${message.role}: `}
+
+ {typeof message.content === 'string'
+ ? message.content
+ : message.content
+ .filter(part => part.type === 'text')
+ .map((part, partIndex) => (
+ // @ts-ignore
+
{part.text}
+ ))}
+
+
+ ))}
+
+
+
+ {
+ setInput(event.target.value);
+ }}
+ className="bg-zinc-100 w-full p-2"
+ onKeyDown={async event => {
+ if (event.key === 'Enter') {
+ setInput('');
+
+ setMessages(currentMessages => [
+ ...currentMessages,
+ { role: 'user', content: input },
+ ]);
+
+ const response = await fetch('/api/generate-chat', {
+ method: 'POST',
+ body: JSON.stringify({
+ messages: [...messages, { role: 'user', content: input }],
+ }),
+ });
+
+ const { messages: newMessages } = await response.json();
+
+ setMessages(currentMessages => [
+ ...currentMessages,
+ ...newMessages,
+ ]);
+ }
+ }}
+ />
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/chat/stream-chat-api-route/index.tsx b/examples/next-openai-pages/pages/chat/stream-chat-api-route/index.tsx
new file mode 100644
index 000000000000..bec14121bcb8
--- /dev/null
+++ b/examples/next-openai-pages/pages/chat/stream-chat-api-route/index.tsx
@@ -0,0 +1,28 @@
+import { useChat } from 'ai/react';
+
+export default function Chat() {
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
+ api: '/api/chat-api-route',
+ });
+
+ return (
+
+
+ {messages.map(message => (
+
+
{`${message.role}: `}
+
{message.content}
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/chat/stream-chat-edge/index.tsx b/examples/next-openai-pages/pages/chat/stream-chat-edge/index.tsx
new file mode 100644
index 000000000000..8e545182bc6a
--- /dev/null
+++ b/examples/next-openai-pages/pages/chat/stream-chat-edge/index.tsx
@@ -0,0 +1,28 @@
+import { useChat } from 'ai/react';
+
+export default function Chat() {
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
+ api: '/api/chat-edge',
+ });
+
+ return (
+
+
+ {messages.map(message => (
+
+
{`${message.role}: `}
+
{message.content}
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/chat/stream-chat/index.tsx b/examples/next-openai-pages/pages/chat/stream-chat/index.tsx
new file mode 100644
index 000000000000..eeb591834f14
--- /dev/null
+++ b/examples/next-openai-pages/pages/chat/stream-chat/index.tsx
@@ -0,0 +1,28 @@
+import { useChat } from 'ai/react';
+
+export default function Page() {
+ const { messages, input, handleSubmit, handleInputChange } = useChat({
+ api: '/api/stream-chat',
+ });
+
+ return (
+
+
+ {messages.map(message => (
+
+
{`${message.role}: `}
+
{message.content}
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx b/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx
new file mode 100644
index 000000000000..b673823f0ca5
--- /dev/null
+++ b/examples/next-openai-pages/pages/generative-user-interface/route-components/index.tsx
@@ -0,0 +1,134 @@
+import { ToolInvocation } from 'ai';
+import { useChat } from 'ai/react';
+
+export default function Page() {
+ const { messages, input, handleInputChange, handleSubmit, addToolResult } =
+ useChat({
+ api: '/api/generative-ui-route',
+ maxToolRoundtrips: 5,
+ async onToolCall({ toolCall }) {
+ if (toolCall.toolName === 'getLocation') {
+ const cities = [
+ 'New York',
+ 'Los Angeles',
+ 'Chicago',
+ 'San Francisco',
+ ];
+
+ return cities[Math.floor(Math.random() * cities.length)];
+ }
+ },
+ });
+
+ const renderToolResult = (tool: ToolInvocation) => {
+ const toolCallId = tool.toolCallId;
+
+ // render confirmation tool (client-side tool with user interaction)
+ if (tool.toolName === 'askForConfirmation') {
+ return (
+
+ {tool.args.message}
+
+ {'result' in tool ? (
+
{tool.result}
+ ) : (
+ <>
+
+
+ >
+ )}
+
+
+ );
+ }
+
+ // other tools:
+ return 'result' in tool ? (
+ tool.toolName === 'getWeatherInformation' ? (
+
+
+
+ {tool.result.value}°{tool.result.unit === 'celsius' ? 'C' : 'F'}
+
+
+
+
+
+ {tool.result.weeklyForecast.map((forecast: any) => (
+
+
{forecast.day}
+
{forecast.value}°
+
+ ))}
+
+
+ ) : tool.toolName === 'getLocation' ? (
+
+ User is in {tool.result}.
+
+ ) : (
+
+ Tool call {`${tool.toolName}: `}
+ {tool.result}
+
+ )
+ ) : (
+
+ Calling {tool.toolName}...
+
+ );
+ };
+
+ return (
+
+
+ {messages.map(message => (
+
+
{`${
+ message.toolInvocations ? 'tool' : message.role
+ }: `}
+
+ {message.toolInvocations
+ ? message.toolInvocations.map(tool => renderToolResult(tool))
+ : message.content}
+
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/index.tsx b/examples/next-openai-pages/pages/index.tsx
index b3b33a897267..5b5fdf37d3e0 100644
--- a/examples/next-openai-pages/pages/index.tsx
+++ b/examples/next-openai-pages/pages/index.tsx
@@ -1,26 +1,72 @@
-'use client';
+import { Inter } from 'next/font/google';
+import Link from 'next/link';
-import { useChat } from 'ai/react';
+const inter = Inter({ subsets: ['latin'] });
-export default function Chat() {
- const { messages, input, handleInputChange, handleSubmit } = useChat();
+const examples = [
+ {
+ title: 'Generate object',
+ link: '/basics/generate-object',
+ },
+ {
+ title: 'Generate text',
+ link: '/basics/generate-text',
+ },
+ {
+ title: 'Stream text',
+ link: '/basics/stream-text',
+ },
+ {
+ title: 'Generate chat completion',
+ link: '/chat/generate-chat',
+ },
+ {
+ title: 'Generate chat completion',
+ link: '/chat/stream-chat',
+ },
+ {
+ title: 'Generate chat completion (API route)',
+ link: '/chat/stream-chat-api-route',
+ },
+ {
+ title: 'Generate chat completion (edge runtime)',
+ link: '/chat/stream-chat-edge',
+ },
+ {
+ title: 'Stream chat completion',
+ link: '/chat/stream-chat',
+ },
+ {
+ title: 'Call tools',
+ link: '/tools/call-tool',
+ },
+ {
+ title: 'Call tools in parallel',
+ link: '/tools/call-tools-in-parallel',
+ },
+ {
+ title: 'Route components using language model',
+ link: '/generative-user-interface/route-components',
+ },
+ {
+ title: 'Stream OpenAI Assistant API response',
+ link: '/assistants/stream-assistant-response',
+ },
+ {
+ title: 'Stream OpenAI Assistant API response with tool calls',
+ link: '/assistants/stream-assistant-response-with-tools',
+ },
+];
+
+export default function Home() {
return (
-
- {messages.map(m => (
-
- {m.role === 'user' ? 'User: ' : 'AI: '}
- {m.content}
-
+
+ {examples.map((example, index) => (
+
+ {index + 1}.
+ {example.title}
+
))}
-
-
-
+
);
}
diff --git a/examples/next-openai-pages/pages/tools/call-tool/index.tsx b/examples/next-openai-pages/pages/tools/call-tool/index.tsx
new file mode 100644
index 000000000000..1ae75abd3246
--- /dev/null
+++ b/examples/next-openai-pages/pages/tools/call-tool/index.tsx
@@ -0,0 +1,37 @@
+import { useChat } from 'ai/react';
+
+export default function Page() {
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
+ api: '/api/call-tool',
+ maxToolRoundtrips: 1,
+ });
+
+ return (
+
+
+ {messages.map(message => (
+
+
{`${
+ message.toolInvocations ? 'tool' : message.role
+ }: `}
+
+ {message.toolInvocations
+ ? message.toolInvocations.map(
+ tool => `${tool.toolName}(${JSON.stringify(tool.args)})`,
+ )
+ : message.content}
+
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx b/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx
new file mode 100644
index 000000000000..1d60103b6cbc
--- /dev/null
+++ b/examples/next-openai-pages/pages/tools/call-tools-in-parallel/index.tsx
@@ -0,0 +1,39 @@
+import { useChat } from 'ai/react';
+
+export default function Page() {
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
+ api: '/api/call-tools-in-parallel',
+ maxToolRoundtrips: 1,
+ });
+
+ return (
+
+
+ {messages.map(message => (
+
+
{`${
+ message.toolInvocations ? 'tool' : message.role
+ }: `}
+
+ {message.toolInvocations
+ ? message.toolInvocations.map(tool => (
+
{`${
+ tool.toolName
+ }(${JSON.stringify(tool.args)})`}
+ ))
+ : message.content}
+
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/examples/next-openai-pages/pages/use-chat-tools-ui.tsx b/examples/next-openai-pages/pages/use-chat-tools-ui.tsx
deleted file mode 100644
index 67df288a2cb0..000000000000
--- a/examples/next-openai-pages/pages/use-chat-tools-ui.tsx
+++ /dev/null
@@ -1,136 +0,0 @@
-import { ToolInvocation } from 'ai';
-import { Message, useChat } from 'ai/react';
-
-export default function Chat() {
- const { messages, input, handleInputChange, handleSubmit, addToolResult } =
- useChat({
- api: '/api/use-chat-tools-ui',
- maxAutomaticRoundtrips: 5,
-
- // run client-side tools that are automatically executed:
- async onToolCall({ toolCall }) {
- if (toolCall.toolName === 'getLocation') {
- const cities = [
- 'New York',
- 'Los Angeles',
- 'Chicago',
- 'San Francisco',
- ];
- return cities[Math.floor(Math.random() * cities.length)];
- }
- },
- });
-
- return (
-
- {messages?.map((m: Message) => (
-
-
{`${m.role}: `}
- {m.content}
- {m.toolInvocations?.map((toolInvocation: ToolInvocation) => {
- const toolCallId = toolInvocation.toolCallId;
-
- // render confirmation tool (client-side tool with user interaction)
- if (toolInvocation.toolName === 'askForConfirmation') {
- return (
-
- {toolInvocation.args.message}
-
- {'result' in toolInvocation ? (
- {toolInvocation.result}
- ) : (
- <>
-
-
- >
- )}
-
-
- );
- }
-
- // other tools:
- return 'result' in toolInvocation ? (
- toolInvocation.toolName === 'getWeatherInformation' ? (
-
-
-
- {toolInvocation.result.value}°
- {toolInvocation.result.unit === 'celsius' ? 'C' : 'F'}
-
-
-
-
-
- {toolInvocation.result.weeklyForecast.map(
- (forecast: any) => (
-
-
{forecast.day}
-
{forecast.value}°
-
- ),
- )}
-
-
- ) : toolInvocation.toolName === 'getLocation' ? (
-
- User is in {toolInvocation.result}.
-
- ) : (
-
- Tool call {`${toolInvocation.toolName}: `}
- {toolInvocation.result}
-
- )
- ) : (
-
- Calling {toolInvocation.toolName}...
-
- );
- })}
-
- ))}
-
-
-
- );
-}
diff --git a/examples/next-openai-pages/pages/globals.css b/examples/next-openai-pages/styles/globals.css
similarity index 100%
rename from examples/next-openai-pages/pages/globals.css
rename to examples/next-openai-pages/styles/globals.css
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 436522c02259..6fc2aa123edd 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -551,6 +551,9 @@ importers:
next:
specifier: latest
version: 14.2.3(react-dom@18.2.0)(react@18.2.0)
+ openai:
+ specifier: ^4.33.0
+ version: 4.47.1
react:
specifier: ^18
version: 18.2.0
@@ -562,11 +565,11 @@ importers:
version: 3.23.8
devDependencies:
'@types/node':
- specifier: ^17.0.12
- version: 17.0.45
+ specifier: ^20.12.7
+ version: 20.12.7
'@types/react':
- specifier: ^18
- version: 18.2.8
+ specifier: ^18.3.3
+ version: 18.3.3
'@types/react-dom':
specifier: ^18
version: 18.2.4
@@ -578,7 +581,7 @@ importers:
version: 7.32.0
eslint-config-next:
specifier: 14.2.3
- version: 14.2.3(eslint@7.32.0)(typescript@5.1.3)
+ version: 14.2.3(eslint@7.32.0)(typescript@5.4.5)
postcss:
specifier: ^8.4.23
version: 8.4.31
@@ -586,8 +589,8 @@ importers:
specifier: ^3.3.2
version: 3.3.5
typescript:
- specifier: 5.1.3
- version: 5.1.3
+ specifier: 5.4.5
+ version: 5.4.5
examples/next-openai-rate-limits:
dependencies:
@@ -1209,7 +1212,7 @@ importers:
dependencies:
eslint-config-next:
specifier: ^14.2.3
- version: 14.2.3(eslint@7.32.0)(typescript@5.1.3)
+ version: 14.2.3(eslint@7.32.0)(typescript@5.4.5)
eslint-config-prettier:
specifier: ^8.3.0
version: 8.10.0(eslint@7.32.0)
@@ -7028,7 +7031,7 @@ packages:
/@types/http-proxy@1.17.14:
resolution: {integrity: sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==}
dependencies:
- '@types/node': 20.11.20
+ '@types/node': 20.14.2
dev: true
/@types/is-ci@3.0.4:
@@ -7058,7 +7061,7 @@ packages:
/@types/node-fetch@2.6.9:
resolution: {integrity: sha512-bQVlnMLFJ2d35DkPNjEPmd9ueO/rh5EiaZt2bhqiSarPjZIuIV6bPQVqcrEyvNo+AfTrRGVazle1tl597w3gfA==}
dependencies:
- '@types/node': 20.11.20
+ '@types/node': 20.14.2
form-data: 4.0.0
/@types/node@12.20.55:
@@ -7077,6 +7080,18 @@ packages:
resolution: {integrity: sha512-7/rR21OS+fq8IyHTgtLkDK949uzsa6n8BkziAKtPVpugIkO6D+/ooXMvzXxDnZrmtXVfjb1bKQafYpb8s89LOg==}
dependencies:
undici-types: 5.26.5
+ dev: true
+
+ /@types/node@20.12.7:
+ resolution: {integrity: sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==}
+ dependencies:
+ undici-types: 5.26.5
+ dev: true
+
+ /@types/node@20.14.2:
+ resolution: {integrity: sha512-xyu6WAMVwv6AKFLB+e/7ySZVr/0zLCzOa7rSpq6jNwpqOrUbcACDWC+53d4n2QHOnDou0fbIsg8wZu/sxrnI4Q==}
+ dependencies:
+ undici-types: 5.26.5
/@types/node@20.9.0:
resolution: {integrity: sha512-nekiGu2NDb1BcVofVcEKMIwzlx4NjHlcjhoxxKBNLtz15Y1z7MYf549DFvkHSId02Ax6kGwWntIBPC3l/JZcmw==}
@@ -7111,6 +7126,13 @@ packages:
'@types/scheduler': 0.16.6
csstype: 3.1.2
+ /@types/react@18.3.3:
+ resolution: {integrity: sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw==}
+ dependencies:
+ '@types/prop-types': 15.7.10
+ csstype: 3.1.2
+ dev: true
+
/@types/resolve@1.20.2:
resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==}
@@ -7150,6 +7172,25 @@ packages:
transitivePeerDependencies:
- supports-color
+ /@typescript-eslint/parser@5.62.0(eslint@7.32.0)(typescript@5.4.5):
+ resolution: {integrity: sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ peerDependencies:
+ eslint: ^6.0.0 || ^7.0.0 || ^8.0.0
+ typescript: '*'
+ peerDependenciesMeta:
+ typescript:
+ optional: true
+ dependencies:
+ '@typescript-eslint/scope-manager': 5.62.0
+ '@typescript-eslint/types': 5.62.0
+ '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.4.5)
+ debug: 4.3.4
+ eslint: 7.32.0
+ typescript: 5.4.5
+ transitivePeerDependencies:
+ - supports-color
+
/@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.1.6):
resolution: {integrity: sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
@@ -7222,6 +7263,26 @@ packages:
- supports-color
dev: true
+ /@typescript-eslint/typescript-estree@5.62.0(typescript@5.4.5):
+ resolution: {integrity: sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ peerDependencies:
+ typescript: '*'
+ peerDependenciesMeta:
+ typescript:
+ optional: true
+ dependencies:
+ '@typescript-eslint/types': 5.62.0
+ '@typescript-eslint/visitor-keys': 5.62.0
+ debug: 4.3.4
+ globby: 11.1.0
+ is-glob: 4.0.3
+ semver: 7.5.4
+ tsutils: 3.21.0(typescript@5.4.5)
+ typescript: 5.4.5
+ transitivePeerDependencies:
+ - supports-color
+
/@typescript-eslint/visitor-keys@5.62.0:
resolution: {integrity: sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
@@ -10110,6 +10171,31 @@ packages:
transitivePeerDependencies:
- eslint-import-resolver-webpack
- supports-color
+ dev: true
+
+ /eslint-config-next@14.2.3(eslint@7.32.0)(typescript@5.4.5):
+ resolution: {integrity: sha512-ZkNztm3Q7hjqvB1rRlOX8P9E/cXRL9ajRcs8jufEtwMfTVYRqnmtnaSu57QqHyBlovMuiB8LEzfLBkh5RYV6Fg==}
+ peerDependencies:
+ eslint: ^7.23.0 || ^8.0.0
+ typescript: '>=3.3.1'
+ peerDependenciesMeta:
+ typescript:
+ optional: true
+ dependencies:
+ '@next/eslint-plugin-next': 14.2.3
+ '@rushstack/eslint-patch': 1.5.1
+ '@typescript-eslint/parser': 5.62.0(eslint@7.32.0)(typescript@5.4.5)
+ eslint: 7.32.0
+ eslint-import-resolver-node: 0.3.9
+ eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@5.62.0)(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.0)(eslint@7.32.0)
+ eslint-plugin-import: 2.29.0(@typescript-eslint/parser@5.62.0)(eslint-import-resolver-typescript@3.6.1)(eslint@7.32.0)
+ eslint-plugin-jsx-a11y: 6.8.0(eslint@7.32.0)
+ eslint-plugin-react: 7.34.1(eslint@7.32.0)
+ eslint-plugin-react-hooks: 5.0.0-canary-7118f5dd7-20230705(eslint@7.32.0)
+ typescript: 5.4.5
+ transitivePeerDependencies:
+ - eslint-import-resolver-webpack
+ - supports-color
/eslint-config-next@14.2.3(eslint@8.57.0)(typescript@5.1.6):
resolution: {integrity: sha512-ZkNztm3Q7hjqvB1rRlOX8P9E/cXRL9ajRcs8jufEtwMfTVYRqnmtnaSu57QqHyBlovMuiB8LEzfLBkh5RYV6Fg==}
@@ -12139,7 +12225,7 @@ packages:
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
engines: {node: '>= 10.13.0'}
dependencies:
- '@types/node': 20.11.20
+ '@types/node': 20.14.2
merge-stream: 2.0.0
supports-color: 8.1.1
dev: true
@@ -17025,8 +17111,8 @@ packages:
picocolors: 1.0.0
sade: 1.8.1
svelte: 4.2.3
- svelte-preprocess: 5.1.4(svelte@4.2.3)(typescript@5.1.6)
- typescript: 5.1.6
+ svelte-preprocess: 5.1.4(svelte@4.2.3)(typescript@5.4.5)
+ typescript: 5.4.5
transitivePeerDependencies:
- '@babel/core'
- coffeescript
@@ -17048,7 +17134,7 @@ packages:
svelte: 4.2.3
dev: true
- /svelte-preprocess@5.1.4(svelte@4.2.3)(typescript@5.1.6):
+ /svelte-preprocess@5.1.4(svelte@4.2.3)(typescript@5.4.5):
resolution: {integrity: sha512-IvnbQ6D6Ao3Gg6ftiM5tdbR6aAETwjhHV+UKGf5bHGYR69RQvF1ho0JKPcbUON4vy4R7zom13jPjgdOWCQ5hDA==}
engines: {node: '>= 16.0.0'}
requiresBuild: true
@@ -17092,7 +17178,7 @@ packages:
sorcery: 0.11.0
strip-indent: 3.0.0
svelte: 4.2.3
- typescript: 5.1.6
+ typescript: 5.4.5
dev: true
/svelte@4.2.3:
@@ -17607,6 +17693,15 @@ packages:
typescript: 5.1.6
dev: true
+ /tsutils@3.21.0(typescript@5.4.5):
+ resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==}
+ engines: {node: '>= 6'}
+ peerDependencies:
+ typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta'
+ dependencies:
+ tslib: 1.14.1
+ typescript: 5.4.5
+
/tsx@4.7.1:
resolution: {integrity: sha512-8d6VuibXHtlN5E3zFkgY8u4DX7Y3Z27zvvPKVmLon/D4AjuKzarkUBTLDBgj9iTQ0hg5xM7c/mYiRVM+HETf0g==}
engines: {node: '>=18.0.0'}
@@ -17836,6 +17931,11 @@ packages:
hasBin: true
dev: true
+ /typescript@5.4.5:
+ resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==}
+ engines: {node: '>=14.17'}
+ hasBin: true
+
/ufo@1.3.1:
resolution: {integrity: sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw==}
dev: true