Skip to content

Commit

Permalink
trying out openai function calling
Browse files Browse the repository at this point in the history
  • Loading branch information
PrinceBaghel258025 committed Nov 4, 2023
1 parent fb88673 commit 3bfea39
Show file tree
Hide file tree
Showing 7 changed files with 295 additions and 84 deletions.
33 changes: 17 additions & 16 deletions next.config.mjs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import "./src/app/env.mjs";
import * as Sentry from "@sentry/nextjs"
/** @type {import('next').NextConfig} */
const nextConfig = {
experimental: {
appDir: true,
// appDir: true,
serverActions: true
},
redirects: async () => {
Expand All @@ -17,22 +16,24 @@ const nextConfig = {
},
};

export default Sentry.withSentryConfig(nextConfig, {authToken: process.env.SENTRY_AUTH_TOKEN, org: process.env.SENTRY_ORG, project: process.env.SENTRY_PROJECT, hideSourceMaps: true}, {
// For all available options, see:
// https://docs.sentry.io/platforms/javascript/guides/nextjs/manual-setup/
export default nextConfig;

// Upload a larger set of source maps for prettier stack traces (increases build time)
widenClientFileUpload: true,
// export default Sentry.withSentryConfig(nextConfig, {authToken: process.env.SENTRY_AUTH_TOKEN, org: process.env.SENTRY_ORG, project: process.env.SENTRY_PROJECT, hideSourceMaps: true}, {
// // For all available options, see:
// // https://docs.sentry.io/platforms/javascript/guides/nextjs/manual-setup/

// Transpiles SDK to be compatible with IE11 (increases bundle size)
transpileClientSDK: true,
// // Upload a larger set of source maps for prettier stack traces (increases build time)
// widenClientFileUpload: true,

// Routes browser requests to Sentry through a Next.js rewrite to circumvent ad-blockers (increases server load)
tunnelRoute: "/monitoring",
// // Transpiles SDK to be compatible with IE11 (increases bundle size)
// transpileClientSDK: true,

// Hides source maps from generated client bundles
hideSourceMaps: true,
// // Routes browser requests to Sentry through a Next.js rewrite to circumvent ad-blockers (increases server load)
// tunnelRoute: "/monitoring",

// Automatically tree-shake Sentry logger statements to reduce bundle size
disableLogger: true,
});
// // Hides source maps from generated client bundles
// hideSourceMaps: true,

// // Automatically tree-shake Sentry logger statements to reduce bundle size
// disableLogger: true,
// });
128 changes: 64 additions & 64 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,80 +17,80 @@
"pre-commit": "yarn format && yarn lint:fix && yarn type-check"
},
"dependencies": {
"@clerk/nextjs": "^4.19.0",
"@clerk/themes": "^1.7.4",
"@dqbd/tiktoken": "^1.0.3",
"@libsql/client": "^0.3.1",
"@liveblocks/client": "^1.1.6",
"@liveblocks/react": "^1.1.6",
"@mantine/hooks": "^6.0.17",
"@phosphor-icons/react": "^2.0.10",
"@planetscale/database": "^1.7.0",
"@radix-ui/react-avatar": "^1.0.3",
"@radix-ui/react-context-menu": "^2.1.4",
"@radix-ui/react-dialog": "^1.0.4",
"@radix-ui/react-dropdown-menu": "^2.0.5",
"@radix-ui/react-icons": "^1.3.0",
"@radix-ui/react-popover": "^1.0.6",
"@radix-ui/react-scroll-area": "^1.0.4",
"@radix-ui/react-slot": "^1.0.2",
"@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-toggle": "^1.0.3",
"@radix-ui/react-tooltip": "^1.0.6",
"@clerk/nextjs": "4.19.0",
"@clerk/themes": "1.7.4",
"@dqbd/tiktoken": "1.0.3",
"@libsql/client": "0.3.1",
"@liveblocks/client": "1.1.6",
"@liveblocks/react": "1.1.6",
"@mantine/hooks": "6.0.17",
"@phosphor-icons/react": "2.0.10",
"@planetscale/database": "1.7.0",
"@radix-ui/react-avatar": "1.0.3",
"@radix-ui/react-context-menu": "2.1.4",
"@radix-ui/react-dialog": "1.0.4",
"@radix-ui/react-dropdown-menu": "2.0.5",
"@radix-ui/react-icons": "1.3.0",
"@radix-ui/react-popover": "1.0.6",
"@radix-ui/react-scroll-area": "1.0.4",
"@radix-ui/react-slot": "1.0.2",
"@radix-ui/react-switch": "1.0.3",
"@radix-ui/react-toggle": "1.0.3",
"@radix-ui/react-tooltip": "1.0.6",
"@sentry/nextjs": "7.61.0",
"@t3-oss/env-core": "^0.3.1",
"@t3-oss/env-nextjs": "^0.3.1",
"@tanstack/react-query": "^4.29.25",
"@typeform/embed-react": "^2.31.0",
"@t3-oss/env-core": "0.3.1",
"@t3-oss/env-nextjs": "0.3.1",
"@tanstack/react-query": "4.29.25",
"@typeform/embed-react": "2.31.0",
"@types/node": "18.15.11",
"@types/react": "18.0.33",
"@types/react-dom": "18.0.11",
"@uploadthing/react": "^5.2.0",
"@upstash/redis": "^1.20.2",
"ai": "^2.1.13",
"class-variance-authority": "^0.6.0",
"clsx": "^1.2.1",
"drizzle-orm": "^0.26.5",
"drizzle-zod": "^0.4.2",
"@uploadthing/react": "5.2.0",
"@upstash/redis": "1.20.2",
"ai": "2.2.20",
"class-variance-authority": "0.6.0",
"clsx": "1.2.1",
"drizzle-orm": "0.26.5",
"drizzle-zod": "0.4.2",
"eslint": "8.37.0",
"eslint-config-next": "13.2.4",
"framer-motion": "^10.16.2",
"iconoir-react": "^6.8.0",
"js-tiktoken": "^1.0.7",
"langchain": "0.0.136",
"langsmith": "0.0.27",
"lucide-react": "^0.224.0",
"next": "^13.4.4",
"prettier": "^3.0.0",
"framer-motion": "10.16.2",
"iconoir-react": "6.8.0",
"js-tiktoken": "1.0.7",
"langchain": "0.0.180",
"langsmith": "0.0.48",
"lucide-react": "0.224.0",
"next": "13.5.5",
"prettier": "3.0.0",
"react": "18.2.0",
"react-div-100vh": "^0.7.0",
"react-div-100vh": "0.7.0",
"react-dom": "18.2.0",
"react-dropzone": "^14.2.3",
"react-intersection-observer": "^9.5.2",
"react-markdown": "^8.0.7",
"react-mic": "^12.4.6",
"react-syntax-highlighter": "^15.5.0",
"react-textarea-autosize": "^8.4.1",
"remark-gfm": "^3.0.1",
"remark-rehype": "^10.1.0",
"tailwind-merge": "^1.12.0",
"tailwindcss-animate": "^1.0.5",
"react-dropzone": "14.2.3",
"react-intersection-observer": "9.5.2",
"react-markdown": "8.0.7",
"react-mic": "12.4.6",
"react-syntax-highlighter": "15.5.0",
"react-textarea-autosize": "8.4.1",
"remark-gfm": "3.0.1",
"remark-rehype": "10.1.0",
"tailwind-merge": "1.12.0",
"tailwindcss-animate": "1.0.5",
"typescript": "5.0.3",
"zod": "^3.21.4"
"zod": "3.21.4"
},
"devDependencies": {
"@types/react-mic": "^12.4.3",
"@types/react-syntax-highlighter": "^15.5.7",
"autoprefixer": "^10.4.14",
"cross-env": "^7.0.3",
"dotenv": "^16.1.4",
"drizzle-kit": "^0.18.1",
"esbuild-register": "^3.4.2",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-unused-imports": "^3.0.0",
"path": "^0.12.7",
"postcss": "^8.4.21",
"tailwindcss": "^3.3.1",
"ts-node": "^10.9.1"
"@types/react-mic": "12.4.3",
"@types/react-syntax-highlighter": "15.5.7",
"autoprefixer": "10.4.14",
"cross-env": "7.0.3",
"dotenv": "16.1.4",
"drizzle-kit": "0.18.1",
"esbuild-register": "3.4.2",
"eslint-config-prettier": "8.8.0",
"eslint-plugin-unused-imports": "3.0.0",
"path": "0.12.7",
"postcss": "8.4.21",
"tailwindcss": "3.3.1",
"ts-node": "10.9.1"
}
}
180 changes: 180 additions & 0 deletions src/app/api/chatagent/[chatid]/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
import { StreamingTextResponse, LangChainStream } from "ai";
import { eq } from "drizzle-orm";
import { db } from "@/lib/db";
import { chats } from "@/lib/db/schema";
import { CHAT_COMPLETION_CONTENT, ChatEntry, ChatLog } from "@/lib/types";
import { systemPrompt } from "@/utils/prompts";
import {
chooseModel,
jsonToLangchain,
generateTitle,
// azureOpenAiChatModel,
// OPEN_AI_MODELS,
openAIChatModel,
} from "@/utils/apiHelper";
import { Calculator } from "langchain/tools/calculator";
import { NextResponse } from "next/server";
import { SerpAPI } from "langchain/tools";
import { initializeAgentExecutorWithOptions } from "langchain/agents";
import { BufferMemory, ChatMessageHistory } from "langchain/memory";
const apiKey =
"1d72abd1addcda2e0cba4c2fa96279ca11af10b43a4c6e21d41ade61cccd0156";

export const revalidate = 0; // disable cache

export async function POST(
request: Request,
params: { params: { chatid: string } },
) {
const body = await request.json();

const _chat = body.messages;
const isFast = body.isFast || true;
let orgId = "";
orgId = body.orgId;

let id = params.params.chatid as any;
// exceptional case
if (_chat.length === 0) {
console.error(
"somehow got the length 0, this shouldn't happen if validating messages length before calling the api",
);
return;
}
const msgs = jsonToLangchain(_chat, systemPrompt);
console.log("msgs", msgs[0]);

const { error, model } = chooseModel(isFast, msgs, systemPrompt);

if (error) {
const msg = {
content: CHAT_COMPLETION_CONTENT,
role: "assistant",
};
_chat.push(msg); // pushing the final message to identify that the chat is completed
await db
.update(chats)
.set({
messages: JSON.stringify({ log: _chat }),
updatedAt: new Date(),
})
.where(eq(chats.id, Number(id)))
.run();
return NextResponse.json(
{ ...msg },
{
status: 400,
},
);
}

const { stream, handlers } = LangChainStream({
onCompletion: async (fullResponse: string) => {
const latestReponse = { role: "assistant", content: fullResponse };
if (orgId !== "") {
// it means it is the first message in a specific chat id
// Handling organization chat inputs
if (_chat.length === 1) {
console.log("got in 1 length case");
_chat.push(latestReponse);
const title = await generateTitle(_chat as ChatEntry[]);
// popping up because inserted the prompt for generating the title so removing the title prompt
_chat.pop();
console.log("generated title", title);
await db
.update(chats)
.set({
messages: JSON.stringify({ log: _chat } as ChatLog),
title: title,
})
.where(eq(chats.id, Number(id)))
.run();
} else {
_chat.push(latestReponse);
await db
.update(chats)
.set({
messages: JSON.stringify({ log: _chat }),
updatedAt: new Date(),
})
.where(eq(chats.id, Number(id)))
.run();
}
}
// handling user's personal chat
// else {
// // it means it is the first message in a specific chat id
// if (_chat.length === 1) {
// _chat.push(latestReponse);
// await db.insert(chats).values({
// user_id: String(userId),
// messages: JSON.stringify({ log: _chat } as ChatLog),
// });
// } else {
// _chat.push(latestReponse);
// await db
// .update(chats)
// .set({ messages: JSON.stringify({ log: _chat }) })
// .where(eq(chats.id, Number(id)));
// }
// }
},
onFinal(completion) {
console.log(
"this is the data on the completion of function call",
completion,
);
},
onToken(token) {
console.log("this is onToken", token);
},
onStart() {
console.log("this is on start");
},
});

// const azure_chat_model = azureOpenAiChatModel(
// OPEN_AI_MODELS.gptTurbo16k,
// true,
// handlers,
// ); // here it is type unsafe
const tools = [
new SerpAPI(
apiKey,
// {
// location: "Austin,Texas,United States",
// hl: "en",
// gl: "us",
// }
),
new Calculator(),
];
const openai_chat_model = openAIChatModel(model, false);
const memory = new BufferMemory({ memoryKey: "chat_history" });
const executor = await initializeAgentExecutorWithOptions(
tools,
openai_chat_model,
{
// agentType: "chat-conversational-react-description",
agentType: "openai-functions",
memory: new BufferMemory({
memoryKey: "chat_history",
chatHistory: new ChatMessageHistory(msgs),
returnMessages: true,
outputKey: "output",
}),
returnIntermediateSteps: true,
// verbose: true,
},
);

// const modelWithFallback = openai_chat_model.withFallbacks({
// fallbacks: [azure_chat_model],
// });
// modelWithFallback.invoke(msgs);
// openai_chat_model.call(msgs, { tools : [new SerpAPI(apiKey), new Calculator()] });
const data = await executor.call({ input: msgs[msgs.length - 1].content });
console.log("this is data", data);
console.info("info", openai_chat_model.lc_kwargs);
return new Response(JSON.stringify(data));
}
4 changes: 4 additions & 0 deletions src/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,10 @@ export default function Chat(props: ChatProps) {
</div>
)}
<InputBar
chatId={props.chatId}
orgId={props.orgId}
messages={messages}
setMessages={setMessages}
username={props.username}
userId={props.uid}
choosenAI={choosenAI}
Expand Down
Loading

0 comments on commit 3bfea39

Please sign in to comment.