Skip to content

Commit

Permalink
feat(nx-dev): move openai call to edge function
Browse files Browse the repository at this point in the history
  • Loading branch information
mandarini committed Aug 23, 2023
1 parent d1da057 commit de57d37
Show file tree
Hide file tree
Showing 3 changed files with 96 additions and 41 deletions.
88 changes: 51 additions & 37 deletions nx-dev/data-access-ai/src/lib/data-access-ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,7 @@ import {
createClient,
} from '@supabase/supabase-js';
import GPT3Tokenizer from 'gpt3-tokenizer';
import {
Configuration,
OpenAIApi,
CreateModerationResponse,
CreateEmbeddingResponse,
CreateCompletionResponseUsage,
} from 'openai';
import { CreateEmbeddingResponse, CreateCompletionResponseUsage } from 'openai';
import {
ApplicationError,
ChatItem,
Expand All @@ -37,13 +31,8 @@ const MIN_CONTENT_LENGTH = 50;
// This is a temporary solution
const MAX_HISTORY_LENGTH = 30;

const openAiKey = process.env['NX_OPENAI_KEY'];
const supabaseUrl = process.env['NX_NEXT_PUBLIC_SUPABASE_URL'];
const supabaseServiceKey = process.env['NX_SUPABASE_SERVICE_ROLE_KEY'];
const config = new Configuration({
apiKey: openAiKey,
});
const openai = new OpenAIApi(config);

let chatFullHistory: ChatItem[] = [];

Expand Down Expand Up @@ -72,18 +61,26 @@ export async function queryAi(
}

try {
checkEnvVariables(openAiKey, supabaseUrl, supabaseServiceKey);
checkEnvVariables(supabaseUrl, supabaseServiceKey);

if (!query) {
throw new UserError('Missing query in request data');
}

// Moderate the content to comply with OpenAI T&C
const sanitizedQuery = query.trim();
const moderationResponse: CreateModerationResponse = await openai
.createModeration({ input: sanitizedQuery })
.then((res) => res.data);
const moderationResponseObj = await fetch('/api/openai-handler', {
method: 'POST',
body: JSON.stringify({
action: 'moderation',
input: { input: sanitizedQuery },
}),
headers: {
'Content-Type': 'application/json',
},
});

const moderationResponse = await moderationResponseObj.json();
const [results] = moderationResponse.results;

if (results.flagged) {
Expand All @@ -109,24 +106,31 @@ export async function queryAi(
* input: sanitizedQuery + aiResponse,
* });
*
* This costs more tokens, so if we see conts skyrocket we remove it.
* This costs more tokens, so if we see costs skyrocket we remove it.
* As it says in the docs, it's a design decision, and it may or may not really improve results.
*/
const embeddingResponse = await openai.createEmbedding({
model: 'text-embedding-ada-002',
input: sanitizedQuery + aiResponse,
const embeddingResponseObj = await fetch('/api/openai-handler', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
action: 'embedding',
input: {
model: 'text-embedding-ada-002',
input: sanitizedQuery + aiResponse,
},
}),
});

if (embeddingResponse.status !== 200) {
throw new ApplicationError(
'Failed to create embedding for question',
embeddingResponse
);
if (!embeddingResponseObj.ok) {
throw new ApplicationError('Failed to create embedding for question', {
data: embeddingResponseObj.status,
});
}

const embeddingResponse = await embeddingResponseObj.json();
const {
data: [{ embedding }],
}: CreateEmbeddingResponse = embeddingResponse.data;
}: CreateEmbeddingResponse = embeddingResponse;

const { error: matchError, data: pageSections } = await supabaseClient.rpc(
'match_page_sections_2',
Expand Down Expand Up @@ -196,33 +200,43 @@ export async function queryAi(

chatFullHistory = chatHistory;

const response = await openai.createChatCompletion({
model: 'gpt-3.5-turbo-16k',
messages: chatGptMessages,
temperature: 0,
stream: false,
const responseObj = await fetch('/api/openai-handler', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
action: 'chatCompletion',
input: {
model: 'gpt-3.5-turbo-16k',
messages: chatGptMessages,
temperature: 0,
stream: false,
},
}),
});

if (response.status !== 200) {
const error = response.data;
throw new ApplicationError('Failed to generate completion', error);
if (!responseObj.ok) {
throw new ApplicationError('Failed to generate completion', {
data: responseObj.status,
});
}

const response = await responseObj.json();

// Message asking to double-check
const callout: string =
'{% callout type="warning" title="Always double-check!" %}The results may not be accurate, so please always double check with our documentation.{% /callout %}\n';
// Append the warning message asking to double-check!
const message = [callout, getMessageFromResponse(response.data)].join('');
const message = [callout, getMessageFromResponse(response)].join('');

const responseWithoutBadLinks = await sanitizeLinksInResponse(message);

const sources = getListOfSources(pageSections);

totalTokensSoFar += response.data.usage?.total_tokens ?? 0;
totalTokensSoFar += response.usage?.total_tokens ?? 0;

return {
textResponse: responseWithoutBadLinks,
usage: response.data.usage as CreateCompletionResponseUsage,
usage: response.usage as CreateCompletionResponseUsage,
sources,
sourcesMarkdown: toMarkdownList(sources),
};
Expand Down
4 changes: 0 additions & 4 deletions nx-dev/data-access-ai/src/lib/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,9 @@ async function is404(url: string): Promise<boolean> {
}

export function checkEnvVariables(
openAiKey?: string,
supabaseUrl?: string,
supabaseServiceKey?: string
) {
if (!openAiKey) {
throw new ApplicationError('Missing environment variable NX_OPENAI_KEY');
}
if (!supabaseUrl) {
throw new ApplicationError(
'Missing environment variable NX_NEXT_PUBLIC_SUPABASE_URL'
Expand Down
45 changes: 45 additions & 0 deletions nx-dev/nx-dev/pages/api/openai-handler.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import { NextRequest } from 'next/server';

const openAiKey = process.env['NX_OPENAI_KEY'];
export const config = {
runtime: 'edge',
};

export default async function handler(request: NextRequest) {
const { action, input } = await request.json();

let apiUrl = 'https://api.openai.com/v1/';

if (action === 'embedding') {
apiUrl += 'embeddings';
} else if (action === 'chatCompletion') {
apiUrl += 'chat/completions';
} else if (action === 'moderation') {
apiUrl += 'moderations';
} else {
return new Response('Invalid action', { status: 400 });
}

try {
const response = await fetch(apiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${openAiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(input),
});

const responseData = await response.json();

return new Response(JSON.stringify(responseData), {
status: response.status,
headers: {
'content-type': 'application/json',
},
});
} catch (e) {
console.error('Error processing the request:', e.message);
return new Response(e.message, { status: 500 });
}
}

0 comments on commit de57d37

Please sign in to comment.