From 6513171413bea49170dce34f734e22d47cc14e5a Mon Sep 17 00:00:00 2001 From: Prince Baghel Date: Wed, 25 Sep 2024 12:23:17 +0530 Subject: [PATCH] patch: image-chat --- src/app/page.tsx | 2 +- src/components/chat.tsx | 153 +---------------------------- src/components/inputBar.tsx | 186 +++++++++++++++++++++++++++++++----- 3 files changed, 163 insertions(+), 178 deletions(-) diff --git a/src/app/page.tsx b/src/app/page.tsx index acc9615..cf9f85a 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -97,7 +97,7 @@ export default function Home() { try { const res = await fetch(`/api/generateNewChatId/${orgId}`, { method: "POST", - body: JSON.stringify({ type: "chat" }), + body: JSON.stringify({ type: chatType || "chat" }), }); const data = await res.json(); diff --git a/src/components/chat.tsx b/src/components/chat.tsx index 1fc3fec..7306588 100644 --- a/src/components/chat.tsx +++ b/src/components/chat.tsx @@ -1,7 +1,7 @@ "use client"; import { useState, useEffect, useCallback } from "react"; import { ChatType } from "@/lib/types"; -import InputBar, { Schema } from "@/components/inputBar"; +import InputBar from "@/components/inputBar"; import { Message, useChat } from "ai/react"; import Startnewchatbutton from "@/components/startnewchatbutton"; import ChatMessageCombinator from "@/components/chatmessagecombinator"; @@ -15,7 +15,6 @@ import { useDropzone } from "react-dropzone"; import { X } from "lucide-react"; import { useImageState } from "@/store/tlDrawImage"; import { useQueryState } from "next-usequerystate"; -import { nanoid } from "ai"; interface ChatProps { orgId: string; @@ -49,10 +48,7 @@ export default function Chat(props: ChatProps) { const [imageUrl, setImageUrl] = useState(""); const [imageName, setImageName] = useState(""); const queryClient = useQueryClient(); - const [isNewChat, setIsNewChat] = useQueryState("new"); - const [isFromClipboard, setIsFromClipboard] = useQueryState("clipboard"); const [incomingModel] = useQueryState("model"); - const [incomingInput] = useQueryState("input"); const [chattype, setChattype] = useState( props?.type || incomingModel || "chat", ); @@ -165,153 +161,6 @@ export default function Chat(props: ChatProps) { sendExtraMessageFields: true, }); - const handleFirstImageMessage = useCallback(async () => { - const params = new URLSearchParams(window.location.search); - if ( - params.get("imageUrl") && - params.get("imageName") && - params.get("imageType") && - params.get("imageSize") - ) { - const queryParams: { [key: string]: string } = {}; - params.forEach((value, key) => { - queryParams[key] = value; - }); - const ID = nanoid(); - const imageMessasgeId = nanoid(); - const message: Message = { - id: ID, - role: "user", - content: incomingInput || "", - name: `${props.username},${props.uid}`, - }; - const createFileFromBlobUrl = async ( - blobUrl: string, - fileName: string, - ) => { - const response = await fetch(blobUrl); - const blob = await response.blob(); - return new File([blob], fileName, { type: blob.type }); - }; - - const imageUrl = params.get("imageUrl")!; - const imageName = params.get("imageName")!; - const imageExtension = params.get("imageExtension")!; - - const file = await createFileFromBlobUrl( - imageUrl, - `image.${imageExtension}`, - ); - console.log("Created file from blob URL:", file); - const zodMessage: any = Schema.safeParse({ - imageName: params.get("imageName"), - imageType: params.get("imageType"), - imageSize: Number(params.get("imageSize")), - file: file, - value: input, - userId: props.uid, - orgId: props.orgId, - chatId: props.chatId, - message: [message], - id: ID, - chattype: chattype, - }); - console.log("zodMessageImage Extension:", imageExtension); - // console.log("zodmessage", zodMessage); - // console.log("dropzone", props.dropZoneActive); - console.log("zodMessage", zodMessage, imageExtension); - if (zodMessage.success) { - const zodMSG = JSON.stringify(zodMessage); - const formData = new FormData(); - formData.append("zodMessage", zodMSG); - formData.append("file", file); - const response = await fetch("/api/imageInput", { - method: "POST", - body: formData, - }); - if (response) { - console.log("responce", response); - let assistantMsg = ""; - const reader = response.body?.getReader(); - console.log("reader", reader); - const decoder = new TextDecoder(); - let charsReceived = 0; - let content = ""; - reader - ?.read() - .then(async function processText({ done, value }) { - if (done) { - console.log("Stream complete"); - return; - } - charsReceived += value.length; - const chunk = decoder.decode(value, { stream: true }); - assistantMsg += chunk === "" ? `${chunk} \n` : chunk; - content += chunk === "" ? `${chunk} \n` : chunk; - // console.log("assistMsg", assistantMsg); - setMessages([ - ...messages, - awsImageMessage, - message, - { - ...assistantMessage, - content: assistantMsg, - }, - ]); - reader.read().then(processText); - }) - .then((e) => { - console.error("error", e); - }); - const awsImageMessage = { - role: "user", - subRole: "input-image", - content: `${process.env.NEXT_PUBLIC_IMAGE_PREFIX_URL}imagefolder/${props.chatId}/${ID}.${imageExtension}`, - id: ID, - } as Message; - const assistantMessage: Message = { - id: ID, - role: "assistant", - content: content, - }; - - console.log("image chat", queryParams); - // image chat - } - } - } - }, []); - - //TODO: handle user incoming from dashboard when invoked a chat - useEffect(() => { - if (isNewChat === "true" && incomingInput) { - //TODO: use types for useQueryState - if (incomingInput && chattype !== "tldraw") { - const params = new URLSearchParams(window.location.search); - if ( - params.get("imageUrl") && - params.get("imageName") && - params.get("imageType") && - params.get("imageSize") - ) { - console.log("zodMessage", "we made to here", params); - handleFirstImageMessage(); - } else { - const newMessage = { - id: nanoid(), - role: "user", - content: incomingInput, - name: `${props.username},${props.uid}`, - audio: "", - } as Message; - append(newMessage); - } - } - } - setIsFromClipboard("false"); - setIsNewChat("false"); - }, [isFromClipboard, isNewChat]); - useEffect(() => { let mainArray: Message[][] = []; let subarray: Message[] = []; diff --git a/src/components/inputBar.tsx b/src/components/inputBar.tsx index d8a79eb..052b64e 100644 --- a/src/components/inputBar.tsx +++ b/src/components/inputBar.tsx @@ -6,6 +6,7 @@ import { Dispatch, FormEvent, SetStateAction, + useCallback, useEffect, useState, } from "react"; @@ -24,6 +25,7 @@ import { useImageState } from "@/store/tlDrawImage"; // import ModelSwitcher from "./modelswitcher"; // import VadAudio from "./vadAudio"; import VadAudio from "./VadAudio"; +import { useQueryState } from "next-usequerystate"; const isValidImageType = (value: string) => /^image\/(jpeg|png|jpg|webp)$/.test(value); @@ -100,7 +102,163 @@ const InputBar = (props: InputBarProps) => { const [disableInputs, setDisableInputs] = useState(false); const [isRagLoading, setIsRagLoading] = useState(false); const queryClient = useQueryClient(); + const [isNewChat, setIsNewChat] = useQueryState("new"); + const [isFromClipboard, setIsFromClipboard] = useQueryState("clipboard"); + const [incomingModel] = useQueryState("model"); + const [incomingInput] = useQueryState("input"); + const [chattype, setChattype] = useState( + props?.chattype || incomingModel || "chat", + ); + + const handleFirstImageMessage = useCallback(async () => { + const params = new URLSearchParams(window.location.search); + if ( + params.get("imageUrl") && + params.get("imageName") && + params.get("imageType") && + params.get("imageSize") + ) { + const queryParams: { [key: string]: string } = {}; + params.forEach((value, key) => { + queryParams[key] = value; + }); + const ID = nanoid(); + const message: Message = { + id: ID, + role: "user", + content: incomingInput || "", + name: `${props.username},${props.userId}`, + }; + const createFileFromBlobUrl = async ( + blobUrl: string, + fileName: string, + ) => { + const response = await fetch(blobUrl); + const blob = await response.blob(); + return new File([blob], fileName, { type: blob.type }); + }; + + const imageUrl = params.get("imageUrl")!; + const imageExtension = params.get("imageExtension")!; + + const file = await createFileFromBlobUrl( + imageUrl, + `image.${imageExtension}`, + ); + console.log("Created file from blob URL:", file); + const zodMessage: any = Schema.safeParse({ + imageName: params.get("imageName"), + imageType: params.get("imageType"), + imageSize: Number(params.get("imageSize")), + file: file, + value: incomingInput || "", + userId: props.userId, + orgId: props.orgId, + chatId: props.chatId, + message: [message], + id: ID, + chattype: chattype, + }); + console.log("zodMessageImage Extension:", imageExtension); + // console.log("zodmessage", zodMessage); + // console.log("dropzone", props.dropZoneActive); + console.log("zodMessage", zodMessage, imageExtension); + if (zodMessage.success) { + const zodMSG = JSON.stringify(zodMessage); + const formData = new FormData(); + formData.append("zodMessage", zodMSG); + formData.append("file", file); + setIsRagLoading(true); + const response = await fetch("/api/imageInput", { + method: "POST", + body: formData, + }); + if (response && response.status.toString().startsWith("2")) { + console.log("responce", response); + let assistantMsg = ""; + const reader = response.body?.getReader(); + console.log("reader", reader); + const decoder = new TextDecoder(); + let charsReceived = 0; + let content = ""; + reader + ?.read() + .then(async function processText({ done, value }) { + if (done) { + setIsRagLoading(false); + console.log("Stream complete"); + return; + } + charsReceived += value.length; + const chunk = decoder.decode(value, { stream: true }); + assistantMsg += chunk === "" ? `${chunk} \n` : chunk; + content += chunk === "" ? `${chunk} \n` : chunk; + // console.log("assistMsg", assistantMsg); + props.setMessages([ + ...props.messages, + awsImageMessage, + message, + { + ...assistantMessage, + content: assistantMsg, + }, + ]); + reader.read().then(processText); + }) + .then((e) => { + setIsRagLoading(false); + console.error("error", e); + }); + const awsImageMessage = { + role: "user", + subRole: "input-image", + content: `${process.env.NEXT_PUBLIC_IMAGE_PREFIX_URL}imagefolder/${props.chatId}/${ID}.${imageExtension}`, + id: ID, + } as Message; + const assistantMessage: Message = { + id: ID, + role: "assistant", + content: content, + }; + console.log("image chat", queryParams); + // image chat + } else { + //TODO: api thrown some error + setIsRagLoading(false); + } + } + } + }, []); + + useEffect(() => { + if (isNewChat === "true" && incomingInput) { + //TODO: use types for useQueryState + if (incomingInput && chattype !== "tldraw") { + const params = new URLSearchParams(window.location.search); + if ( + params.get("imageUrl") && + params.get("imageName") && + params.get("imageType") && + params.get("imageSize") + ) { + console.log("zodMessage", "we made to here", params); + handleFirstImageMessage(); + } else { + const newMessage = { + id: nanoid(), + role: "user", + content: incomingInput, + name: `${props.username},${props.userId}`, + audio: "", + } as Message; + props.append(newMessage); + } + } + } + setIsFromClipboard("false"); + setIsNewChat("false"); + }, [isFromClipboard, isNewChat]); // const ably = useAbly(); // console.log( @@ -319,30 +477,6 @@ const InputBar = (props: InputBarProps) => { props.setInput(""); }; - const handleAudio = async (audioFile: File) => { - setIsAudioWaveVisible(false); - setIsTranscribing(true); - const f = new FormData(); - f.append("file", audioFile); - // Buffer.from(audioFile) - console.log(audioFile); - try { - const res = await fetch("/api/transcript", { - method: "POST", - body: f, - }); - - // console.log('data', await data.json()); - const data = await res.json(); - console.log("got the data", data); - props.setInput(data.text); - setIsTranscribing(false); - } catch (err) { - console.error("got in error", err); - setIsTranscribing(false); - } - }; - const [audioId, setAudioId] = useState(0); const [transcriptHashTable, setTranscriptHashTable] = useState<{ [key: number]: string; @@ -417,7 +551,9 @@ const InputBar = (props: InputBarProps) => { if (countdown > 0) { updateStatus({ isTyping: true, - username: `Echoes is thinking (${countdown--} secs)`, + username: props?.isLoading + ? `Echoes is thinking (${countdown--} secs)` + : `Echoes is typing (${countdown--} secs)`, id: props.userId, }); } else {