Skip to content

Commit

Permalink
Merge pull request #249 from satrong/fix-041011
Browse files Browse the repository at this point in the history
存储消息的关联知识库信息
  • Loading branch information
sugarforever authored Apr 10, 2024
2 parents 9f58451 + 1fc6c82 commit cb1e7cf
Show file tree
Hide file tree
Showing 5 changed files with 78 additions and 42 deletions.
2 changes: 2 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
"html.format.wrapAttributes": "preserve-aligned",
"vue.format.wrapAttributes": "preserve-aligned",
"cSpell.words": [
"dexie",
"groq",
"knowledgebase",
"nuxt"
],
}
89 changes: 53 additions & 36 deletions components/Chat.vue
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,16 @@ import { type ChatBoxFormData } from '@/components/ChatInputBox.vue'
import { type ChatSessionSettings } from '~/pages/chat/index.vue'
import { ChatSettings } from '#components'
type RelevantDocument = Required<ChatHistory>['relevantDocs'][number]
type ResponseRelevantDocument = { type: 'relevant_documents', relevant_documents: RelevantDocument[] }
export interface Message {
id?: number
role: 'system' | 'assistant' | 'user'
content: string
type?: 'loading' | 'canceled'
timestamp: number
relevantDocs?: RelevantDocument[]
}
type Instruction = Awaited<ReturnType<typeof loadOllamaInstructions>>[number]
Expand Down Expand Up @@ -74,20 +78,31 @@ async function loadChatHistory(sessionId?: number) {
role: el.role,
timestamp: el.timestamp,
type: el.canceled ? 'canceled' : undefined,
relevantDocs: el.relevantDocs
} as const
})
}
return []
}
const processRelevantDocuments = (chunk) => {
if (chunk?.type === 'relevant_documents') {
const lastMessage = messages.value[messages.value.length - 1]
if (lastMessage?.role === 'assistant') {
lastMessage.relevant_documents = chunk?.relevant_documents
}
const processRelevantDocuments = async (chunk: ResponseRelevantDocument) => {
if (chunk.type !== 'relevant_documents') return
const lastMessage = messages.value[messages.value.length - 1]
if (lastMessage?.role === 'assistant' && chunk.relevant_documents) {
lastMessage.relevantDocs = chunk.relevant_documents
await clientDB.chatHistories
.where('id')
.equals(lastMessage.id!)
.modify({
relevantDocs: chunk.relevant_documents.map(el => {
const pageContent = el.pageContent.slice(0, 100) + (el.pageContent.length > 0 ? '...' : '') // Avoid saving large-sized content
return { ...el, pageContent }
})
})
emits('message', lastMessage)
}
}
const fetchStream = async (url: string, options: RequestInit) => {
const response = await fetch(url, options)
Expand All @@ -103,38 +118,40 @@ const fetchStream = async (url: string, options: RequestInit) => {
if (!line) continue
console.log('line: ', line)
const chatMessage = JSON.parse(line)
processRelevantDocuments(chatMessage)
const content = chatMessage?.message?.content
if (content) {
const lastItem = messages.value[messages.value.length - 1]
if (messages.value.length > 0 && lastItem.role === 'assistant') {
lastItem.content += content
if (lastItem.id && props.sessionId) {
await clientDB.chatHistories
.where('id')
.equals(lastItem.id)
.modify({ message: lastItem.content })
}
} else {
const timestamp = Date.now()
const id = await saveMessage({
message: content,
model: model.value || '',
role: 'assistant',
timestamp,
canceled: false
})
const itemData = { id, role: 'assistant', content, timestamp } as const
if (messages.value.length >= limitHistorySize) {
messages.value = [...messages.value, itemData].slice(-limitHistorySize)
const chatMessage = JSON.parse(line) as { message: Message } | ResponseRelevantDocument
if ('type' in chatMessage) {
await processRelevantDocuments(chatMessage)
} else {
const content = chatMessage?.message?.content
if (content) {
const lastItem = messages.value[messages.value.length - 1]
if (messages.value.length > 0 && lastItem.role === 'assistant') {
lastItem.content += content
if (lastItem.id && props.sessionId) {
await clientDB.chatHistories
.where('id')
.equals(lastItem.id)
.modify({ message: lastItem.content })
}
} else {
messages.value.push(itemData)
const timestamp = Date.now()
const id = await saveMessage({
message: content,
model: model.value || '',
role: 'assistant',
timestamp,
canceled: false
})
const itemData = { id, role: 'assistant', content, timestamp } as const
if (messages.value.length >= limitHistorySize) {
messages.value = [...messages.value, itemData].slice(-limitHistorySize)
} else {
messages.value.push(itemData)
}
}
emits('message', lastItem)
}
emits('message', lastItem)
}
}
}
Expand Down Expand Up @@ -318,7 +335,7 @@ async function saveMessage(data: Omit<ChatHistory, 'sessionId'>) {
<pre v-if="message.role === 'user'" v-html="message.content" class="whitespace-break-spaces"></pre>
<div v-else>
<div v-html="markdown.render(message.content)" class="markdown-body" />
<Sources :relevant_documents="message?.relevant_documents" />
<Sources :relevant_documents="message?.relevantDocs || []" />
</div>
</template>
</div>
Expand Down
2 changes: 1 addition & 1 deletion components/Sources.vue
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ defineProps<{
<UIcon name="i-heroicons-newspaper" /> Sources
</h3>
<div class="grid grid-cols-2 gap-4">
<div class="bg-white border border-gray-100 p-2 rounded"
<div class="bg-gray-100 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 p-2 rounded"
v-for="(relevant_document, index) in relevant_documents"
:key="index">
<h4 class="font-bold line-clamp-1 mb-2">{{ relevant_document?.metadata?.source }}</h4>
Expand Down
7 changes: 7 additions & 0 deletions composables/clientDB.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ export interface ChatHistory {
canceled: boolean
instructionId?: number
knowledgeBaseId?: number
relevantDocs?: Array<{
pageContent: string
metadata: {
blobType: string
source: string
}
}>
}

export class MySubClassedDexie extends Dexie {
Expand Down
20 changes: 15 additions & 5 deletions server/api/models/chat/index.post.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { Readable } from 'stream'
import { BaseMessage } from "@langchain/core/messages"
import { formatDocumentsAsString } from "langchain/util/document"
import { PromptTemplate } from "@langchain/core/prompts"
import { RunnableSequence } from "@langchain/core/runnables"
Expand All @@ -9,6 +8,17 @@ import prisma from "@/server/utils/prisma"
import { createChatModel, createEmbeddings } from '@/server/utils/models'
import { createRetriever } from '@/server/retriever'

interface RequestBody {
knowledgebaseId: number
model: string
family: string
messages: {
role: 'user' | 'assistant'
content: string
}[]
stream: any
}

const SYSTEM_TEMPLATE = `Answer the user's question based on the context below.
Present your answer in a structured Markdown format.
Expand All @@ -29,11 +39,11 @@ If the context doesn't contain any relevant information to the question, don't m
Answer:
`

const serializeMessages = (messages: Array<BaseMessage>): string =>
const serializeMessages = (messages: RequestBody['messages']): string =>
messages.map((message) => `${message.role}: ${message.content}`).join("\n")

export default defineEventHandler(async (event) => {
const { knowledgebaseId, model, family, messages, stream } = await readBody(event)
const { knowledgebaseId, model, family, messages, stream } = await readBody<RequestBody>(event)

if (knowledgebaseId) {
console.log("Chat with knowledge base with id: ", knowledgebaseId)
Expand All @@ -48,7 +58,7 @@ export default defineEventHandler(async (event) => {
return
}

const embeddings = createEmbeddings(knowledgebase.embedding, event)
const embeddings = createEmbeddings(knowledgebase.embedding!, event)
const retriever: BaseRetriever = await createRetriever(embeddings, `collection_${knowledgebase.id}`)

const chat = createChatModel(model, family, event)
Expand Down Expand Up @@ -114,7 +124,7 @@ export default defineEventHandler(async (event) => {
return sendStream(event, readableStream)
} else {
const llm = createChatModel(model, family, event)
const response = await llm?.stream(messages.map((message: BaseMessage) => {
const response = await llm?.stream(messages.map((message: RequestBody['messages'][number]) => {
return [message.role, message.content]
}))

Expand Down

0 comments on commit cb1e7cf

Please sign in to comment.