Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add next pages examples to folder #1876

Merged
merged 5 commits into from
Jun 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion examples/next-openai-pages/.env.local.example
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
OPENAI_API_KEY=xxxxxxx
OPENAI_API_KEY=xxxxxxx
ASSISTANT_ID=xxxxxxx
71 changes: 71 additions & 0 deletions examples/next-openai-pages/app/api/assistant-tools/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { AssistantResponse } from 'ai';
import OpenAI from 'openai';

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
});

export async function POST(req: Request) {
const input: {
threadId: string | null;
message: string;
} = await req.json();

const threadId = input.threadId ?? (await openai.beta.threads.create({})).id;

const createdMessage = await openai.beta.threads.messages.create(threadId, {
role: 'user',
content: input.message,
});

return AssistantResponse(
{ threadId, messageId: createdMessage.id },
async ({ forwardStream }) => {
const runStream = openai.beta.threads.runs.stream(threadId, {
assistant_id:
process.env.ASSISTANT_ID ??
(() => {
throw new Error('ASSISTANT_ID is not set');
})(),
});

let runResult = await forwardStream(runStream);

while (
runResult?.status === 'requires_action' &&
runResult.required_action?.type === 'submit_tool_outputs'
) {
const tool_outputs =
runResult.required_action.submit_tool_outputs.tool_calls.map(
(toolCall: any) => {
const parameters = JSON.parse(toolCall.function.arguments);

switch (toolCall.function.name) {
case 'celsiusToFahrenheit':
const celsius = parseFloat(parameters.value);
const fahrenheit = celsius * (9 / 5) + 32;

return {
tool_call_id: toolCall.id,
output: `${celsius}°C is ${fahrenheit.toFixed(2)}°F`,
};

default:
throw new Error(
`Unknown tool call function: ${toolCall.function.name}`,
);
}
},
);

runResult = await forwardStream(
openai.beta.threads.runs.submitToolOutputsStream(
threadId,
runResult.id,
{ tool_outputs },
),
);
}
},
);
}
35 changes: 35 additions & 0 deletions examples/next-openai-pages/app/api/assistant/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import { AssistantResponse } from 'ai';
import OpenAI from 'openai';

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
});

export async function POST(req: Request) {
const input: {
threadId: string | null;
message: string;
} = await req.json();

const threadId = input.threadId ?? (await openai.beta.threads.create({})).id;

const createdMessage = await openai.beta.threads.messages.create(threadId, {
role: 'user',
content: input.message,
});

return AssistantResponse(
{ threadId, messageId: createdMessage.id },
async ({ forwardStream }) => {
const runStream = openai.beta.threads.runs.stream(threadId, {
assistant_id:
process.env.ASSISTANT_ID ??
(() => {
throw new Error('ASSISTANT_ID environment is not set');
})(),
});

await forwardStream(runStream);
},
);
}
35 changes: 35 additions & 0 deletions examples/next-openai-pages/app/api/call-tool/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import { ToolInvocation, convertToCoreMessages, streamText } from 'ai';
import { openai } from '@ai-sdk/openai';
import { z } from 'zod';

interface Message {
role: 'user' | 'assistant';
content: string;
toolInvocations?: ToolInvocation[];
}

export async function POST(req: Request) {
const { messages }: { messages: Message[] } = await req.json();

const result = await streamText({
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
// @ts-ignore
messages: convertToCoreMessages(messages),
tools: {
celsiusToFahrenheit: {
description: 'Converts celsius to fahrenheit',
parameters: z.object({
value: z.string().describe('The value in celsius'),
}),
execute: async ({ value }) => {
const celsius = parseFloat(value);
const fahrenheit = celsius * (9 / 5) + 32;
return `${celsius}°C is ${fahrenheit.toFixed(2)}°F`;
},
},
},
});

return result.toAIStreamResponse();
}
41 changes: 41 additions & 0 deletions examples/next-openai-pages/app/api/call-tools-in-parallel/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { ToolInvocation, convertToCoreMessages, streamText } from 'ai';
import { openai } from '@ai-sdk/openai';
import { z } from 'zod';

interface Message {
role: 'user' | 'assistant';
content: string;
toolInvocations?: ToolInvocation[];
}

function getWeather({ city, unit }: { city: string; unit: string }) {
return { value: 25, description: 'Sunny' };
}

export async function POST(req: Request) {
const { messages }: { messages: Message[] } = await req.json();

const result = await streamText({
model: openai('gpt-4o'),
system: 'You are a helpful assistant.',
// @ts-ignore
messages: convertToCoreMessages(messages),
tools: {
getWeather: {
description: 'Get the weather for a location',
parameters: z.object({
city: z.string().describe('The city to get the weather for'),
unit: z
.enum(['C', 'F'])
.describe('The unit to display the temperature in'),
}),
execute: async ({ city, unit }) => {
const weather = getWeather({ city, unit });
return `It is currently 25°${weather.value}°${unit} and ${weather.description} in ${city}!`;
},
},
},
});

return result.toAIStreamResponse();
}
19 changes: 0 additions & 19 deletions examples/next-openai-pages/app/api/chat-app-route/route.ts

This file was deleted.

14 changes: 14 additions & 0 deletions examples/next-openai-pages/app/api/generate-chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { CoreMessage, generateText } from 'ai';
import { openai } from '@ai-sdk/openai';

export async function POST(req: Request) {
const { messages }: { messages: CoreMessage[] } = await req.json();

const { responseMessages } = await generateText({
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
messages,
});

return Response.json({ messages: responseMessages });
}
24 changes: 24 additions & 0 deletions examples/next-openai-pages/app/api/generate-object/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { generateObject } from 'ai';
import { openai } from '@ai-sdk/openai';
import { z } from 'zod';

export async function POST(req: Request) {
const { prompt }: { prompt: string } = await req.json();

const { object } = await generateObject({
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
prompt,
schema: z.object({
notifications: z.array(
z.object({
name: z.string().describe('Name of a fictional person.'),
message: z.string().describe('Do not use emojis or links.'),
minutesAgo: z.number(),
}),
),
}),
});

return Response.json({ object });
}
14 changes: 14 additions & 0 deletions examples/next-openai-pages/app/api/generate-text/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { generateText } from 'ai';
import { openai } from '@ai-sdk/openai';

export async function POST(req: Request) {
const { prompt }: { prompt: string } = await req.json();

const { text } = await generateText({
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
prompt,
});

return Response.json({ text });
}
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
import { CoreMessage, convertToCoreMessages, streamText } from 'ai';
import { openai } from '@ai-sdk/openai';
import { convertToCoreMessages, streamText } from 'ai';
import { NextApiRequest, NextApiResponse } from 'next';
import { z } from 'zod';

export default async function handler(
request: NextApiRequest,
response: NextApiResponse,
) {
const { messages } = await request.body;
export async function POST(req: Request) {
const { messages }: { messages: CoreMessage[] } = await req.json();

const result = await streamText({
model: openai('gpt-4-turbo'),
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
// @ts-expect-error TODO: fix messages type
messages: convertToCoreMessages(messages),
tools: {
// server-side tool with execute function:
getWeatherInformation: {
description: 'show the weather in a given city to the user',
parameters: z.object({ city: z.string() }),
Expand Down Expand Up @@ -49,5 +46,5 @@ export default async function handler(
},
});

result.pipeAIStreamToResponse(response);
return result.toAIStreamResponse();
}
14 changes: 14 additions & 0 deletions examples/next-openai-pages/app/api/stream-chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { CoreMessage, streamText } from 'ai';
import { openai } from '@ai-sdk/openai';

export async function POST(req: Request) {
const { messages }: { messages: CoreMessage[] } = await req.json();

const result = await streamText({
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
messages,
});

return result.toAIStreamResponse();
}
14 changes: 14 additions & 0 deletions examples/next-openai-pages/app/api/stream-text/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { streamText } from 'ai';
import { openai } from '@ai-sdk/openai';

export async function POST(req: Request) {
const { prompt }: { prompt: string } = await req.json();

const result = await streamText({
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
prompt,
});

return result.toAIStreamResponse();
}
7 changes: 4 additions & 3 deletions examples/next-openai-pages/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,20 @@
"@ai-sdk/openai": "latest",
"ai": "latest",
"next": "latest",
"openai": "^4.33.0",
"react": "^18",
"react-dom": "^18",
"zod": "3.23.8"
},
"devDependencies": {
"@types/node": "^17.0.12",
"@types/react": "^18",
"@types/node": "^20.12.7",
"@types/react": "^18.3.3",
"@types/react-dom": "^18",
"autoprefixer": "^10.4.14",
"eslint": "^7.32.0",
"eslint-config-next": "14.2.3",
"postcss": "^8.4.23",
"tailwindcss": "^3.3.2",
"typescript": "5.1.3"
"typescript": "5.4.5"
}
}
7 changes: 2 additions & 5 deletions examples/next-openai-pages/pages/_app.tsx
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
import './globals.css';
import '@/styles/globals.css';
import type { AppProps } from 'next/app';

export default function ExampleApp({
Component,
pageProps,
}: AppProps): JSX.Element {
export default function App({ Component, pageProps }: AppProps) {
return <Component {...pageProps} />;
}
19 changes: 19 additions & 0 deletions examples/next-openai-pages/pages/api/chat-api-route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { openai } from '@ai-sdk/openai';
import { streamText } from 'ai';
import { NextApiRequest, NextApiResponse } from 'next';

export default async function handler(
request: NextApiRequest,
response: NextApiResponse,
) {
const { messages } = await request.body;

const result = await streamText({
model: openai('gpt-4-turbo-preview'),
messages,
});

// write the AI stream to the response
// Note: this is sent as a single response, not a stream
result.pipeAIStreamToResponse(response);
}
Loading
Loading