Skip to content

Commit

Permalink
feat(nx-dev): moved openai calls to edge function
Browse files Browse the repository at this point in the history
  • Loading branch information
mandarini committed Aug 23, 2023
1 parent 6bf3d97 commit dfcb380
Show file tree
Hide file tree
Showing 3 changed files with 105 additions and 48 deletions.
75 changes: 50 additions & 25 deletions nx-dev/data-access-ai/src/lib/data-access-ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,7 @@ import {
createClient,
} from '@supabase/supabase-js';
import GPT3Tokenizer from 'gpt3-tokenizer';
import {
Configuration,
OpenAIApi,
CreateModerationResponse,
CreateEmbeddingResponse,
CreateCompletionResponseUsage,
} from 'openai';
import { CreateEmbeddingResponse, CreateCompletionResponseUsage } from 'openai';
import {
ApplicationError,
ChatItem,
Expand All @@ -37,13 +31,8 @@ const MIN_CONTENT_LENGTH = 50;
// This is a temporary solution
const MAX_HISTORY_LENGTH = 30;

const openAiKey = process.env['NX_OPENAI_KEY'];
const supabaseUrl = process.env['NX_NEXT_PUBLIC_SUPABASE_URL'];
const supabaseServiceKey = process.env['NX_SUPABASE_SERVICE_ROLE_KEY'];
const config = new Configuration({
apiKey: openAiKey,
});
const openai = new OpenAIApi(config);

let chatFullHistory: ChatItem[] = [];

Expand Down Expand Up @@ -72,17 +61,25 @@ export async function queryAi(
}

try {
checkEnvVariables(openAiKey, supabaseUrl, supabaseServiceKey);
checkEnvVariables(supabaseUrl, supabaseServiceKey);

if (!query) {
throw new UserError('Missing query in request data');
}

// Moderate the content to comply with OpenAI T&C
const sanitizedQuery = query.trim();
const moderationResponse: CreateModerationResponse = await openai
.createModeration({ input: sanitizedQuery })
.then((res) => res.data);
const moderationResponseObj = await fetch('/api/openai-handler', {
method: 'POST',
body: JSON.stringify({
input: sanitizedQuery,
}),
headers: {
'Content-Type': 'application/json',
},
});

const moderationResponse = await moderationResponseObj.json();

const [results] = moderationResponse.results;

Expand All @@ -109,14 +106,29 @@ export async function queryAi(
* input: sanitizedQuery + aiResponse,
* });
*
* This costs more tokens, so if we see conts skyrocket we remove it.
* This costs more tokens, so if we see costs skyrocket we remove it.
* As it says in the docs, it's a design decision, and it may or may not really improve results.
*/
const embeddingResponse = await openai.createEmbedding({
model: 'text-embedding-ada-002',
input: sanitizedQuery + aiResponse,
const embeddingResponseObj = await fetch('/api/openai-handler', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
action: 'embedding',
input: {
model: 'text-embedding-ada-002',
input: sanitizedQuery + aiResponse,
},
}),
});

if (!embeddingResponseObj.ok) {
throw new Error(
`API call failed with status ${embeddingResponseObj.status}`
);
}

const embeddingResponse = await embeddingResponseObj.json();

if (embeddingResponse.status !== 200) {
throw new ApplicationError(
'Failed to create embedding for question',
Expand Down Expand Up @@ -196,13 +208,26 @@ export async function queryAi(

chatFullHistory = chatHistory;

const response = await openai.createChatCompletion({
model: 'gpt-3.5-turbo-16k',
messages: chatGptMessages,
temperature: 0,
stream: false,
const responseObj = await fetch('/api/openai-handler', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
action: 'chatCompletion',
input: {
model: 'gpt-3.5-turbo-16k',
messages: chatGptMessages,
temperature: 0,
stream: false,
},
}),
});

if (!responseObj.ok) {
throw new Error(`API call failed with status ${responseObj.status}`);
}

const response = await responseObj.json();

if (response.status !== 200) {
const error = response.data;
throw new ApplicationError('Failed to generate completion', error);
Expand Down
55 changes: 55 additions & 0 deletions nx-dev/nx-dev/pages/api/openai-handler.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { NextRequest } from 'next/server';
import { Configuration, OpenAIApi } from 'openai';

const openAiKey = process.env['NX_OPENAI_KEY'];
const config = new Configuration({
apiKey: openAiKey,
});
const openai = new OpenAIApi(config);

export default async function handler(request: NextRequest) {
const { action, input } = await request.json();

if (action === 'embedding') {
try {
const embeddingResponse = await openai.createEmbedding(input);
return new Response(JSON.stringify(embeddingResponse.data), {
status: embeddingResponse.status,
headers: {
'content-type': 'application/json',
},
});
} catch (e) {
console.error('Error processing the request:', e.message);
return new Response(e.message, { status: 500 });
}
} else if (action === 'chatCompletion') {
try {
const chatCompletionResponse = await openai.createChatCompletion(input);
return new Response(JSON.stringify(chatCompletionResponse.data), {
status: chatCompletionResponse.status,
headers: {
'content-type': 'application/json',
},
});
} catch (e) {
console.error('Error processing the request:', e.message);
return new Response(e.message, { status: 500 });
}
} else if (action === 'moderation') {
try {
const moderationResponse = await openai.createModeration(input);
return new Response(JSON.stringify(moderationResponse.data), {
status: moderationResponse.status,
headers: {
'content-type': 'application/json',
},
});
} catch (e) {
console.error('Error processing the request:', e.message);
return new Response(e.message, { status: 500 });
}
} else {
return new Response('Invalid action', { status: 400 });
}
}
23 changes: 0 additions & 23 deletions nx-dev/nx-dev/pages/api/openai.ts

This file was deleted.

0 comments on commit dfcb380

Please sign in to comment.