From 5f49cc50178d4abc67be420534f8cfeafb60c954 Mon Sep 17 00:00:00 2001 From: Saarika Bhasi <55930906+saarikabhasi@users.noreply.github.com> Date: Wed, 7 Aug 2024 16:03:53 -0400 Subject: [PATCH] [Search] [Playground]Improve follow up question flow (#189848) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Adds the query used for searching. This improves visibility to user on the query using for follow up questions. Screenshot 2024-08-03 at 3 24 10 PM ### Checklist Delete any items that are not applicable to this PR. - [x] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md) - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios --------- Co-authored-by: Elastic Machine --- .../message_list/assistant_message.test.tsx | 2 +- .../message_list/assistant_message.tsx | 104 +++++++++++------- .../public/hooks/use_ai_assist_chat.ts | 2 +- .../plugins/search_playground/public/types.ts | 4 +- .../public/utils/transform_to_messages.ts | 3 + .../server/lib/conversational_chain.ts | 11 ++ 6 files changed, 84 insertions(+), 42 deletions(-) diff --git a/x-pack/plugins/search_playground/public/components/message_list/assistant_message.test.tsx b/x-pack/plugins/search_playground/public/components/message_list/assistant_message.test.tsx index f3a903331cf3b..4608546616a51 100644 --- a/x-pack/plugins/search_playground/public/components/message_list/assistant_message.test.tsx +++ b/x-pack/plugins/search_playground/public/components/message_list/assistant_message.test.tsx @@ -38,7 +38,7 @@ describe('AssistantMessage component', () => { createdAt: new Date(), citations: [], retrievalDocs: [{ content: '', metadata: { _id: '1', _index: 'index', _score: 1 } }], - inputTokens: { context: 20, total: 10 }, + inputTokens: { context: 20, total: 10, searchQuery: 'Test question' }, }; it('renders message content correctly', () => { diff --git a/x-pack/plugins/search_playground/public/components/message_list/assistant_message.tsx b/x-pack/plugins/search_playground/public/components/message_list/assistant_message.tsx index 7e94059bdc272..e0b14c2a31934 100644 --- a/x-pack/plugins/search_playground/public/components/message_list/assistant_message.tsx +++ b/x-pack/plugins/search_playground/public/components/message_list/assistant_message.tsx @@ -53,53 +53,79 @@ export const AssistantMessage: React.FC = ({ message }) = return ( <> {!!retrievalDocs?.length && ( - + <> +

- {` `}

+ } + /> + + +

+ + {` `} +

+
- setIsDocsFlyoutOpen(true)} - > - - + setIsDocsFlyoutOpen(true)} + > + + - {isDocsFlyoutOpen && ( - setIsDocsFlyoutOpen(false)} - retrievalDocs={retrievalDocs} - /> - )} - - } - /> + {isDocsFlyoutOpen && ( + setIsDocsFlyoutOpen(false)} + retrievalDocs={retrievalDocs} + /> + )} + + } + /> + )} {retrievalDocs?.length === 0 && ( annotation.type === 'context_clipped' )?.count, + searchQuery: annotations?.find( + (annotation): annotation is AnnotationTokens => annotation.type === 'search_query' + )?.question, }, } as AIMessage; } diff --git a/x-pack/plugins/search_playground/server/lib/conversational_chain.ts b/x-pack/plugins/search_playground/server/lib/conversational_chain.ts index c63481e93c98f..922f672bda5c6 100644 --- a/x-pack/plugins/search_playground/server/lib/conversational_chain.ts +++ b/x-pack/plugins/search_playground/server/lib/conversational_chain.ts @@ -198,6 +198,13 @@ class ConversationalChainFn { context: RunnableSequence.from([(input) => input.question, retrievalChain]), question: (input) => input.question, }, + RunnableLambda.from((inputs) => { + data.appendMessageAnnotation({ + type: 'search_query', + question: inputs.question, + }); + return inputs; + }), RunnableLambda.from(clipContext(this.options?.rag?.inputTokensLimit, prompt, data)), RunnableLambda.from(registerContextTokenCounts(data)), prompt, @@ -236,6 +243,10 @@ class ConversationalChainFn { type: 'prompt_token_count', count: getTokenEstimateFromMessages(msg), }); + data.appendMessageAnnotation({ + type: 'search_query', + question, + }); } }, // callback for prompt based models (Bedrock uses ActionsClientLlm)