Skip to content

Commit

Permalink
[8.x] [Observability AI Assistant] migrate to inference client #197630 (
Browse files Browse the repository at this point in the history
#199286) (#203399)

# Backport

This will backport the following commits from `main` to `8.x`:
- [[Observability AI Assistant] migrate to inference client #197630
(#199286)](#199286)

<!--- Backport version: 9.4.3 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Arturo
Lidueña","email":"[email protected]"},"sourceCommit":{"committedDate":"2024-12-09T11:46:31Z","message":"[Observability
AI Assistant] migrate to inference client #197630 (#199286)\n\n##
Summary\r\n\r\nCloses
https://github.com/elastic/kibana/issues/183245\r\n\r\nCloses #197630
\r\n[Observability AI Assistant] Partially migrate to inference client
\r\n\r\nreplacing `inferenceClient.chatComplete`
to\r\n`observabilityAIAssistantClient.chat`
-\r\n`observabilityAIAssistantClient.complete` does a bunch of stuff on
top\r\nof `chat`. keepping `observabilityAIAssistantClient.chat` as a
wrapper\r\nfor now because it also adds instrumentation and
logging.","sha":"df0dfa5216c1d1d3b72a30b3b1d903781db90615","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","Team:Obs
AI
Assistant","ci:project-deploy-observability","backport:version","v8.18.0"],"title":"[Observability
AI Assistant] migrate to inference client
#197630","number":199286,"url":"https://github.com/elastic/kibana/pull/199286","mergeCommit":{"message":"[Observability
AI Assistant] migrate to inference client #197630 (#199286)\n\n##
Summary\r\n\r\nCloses
https://github.com/elastic/kibana/issues/183245\r\n\r\nCloses #197630
\r\n[Observability AI Assistant] Partially migrate to inference client
\r\n\r\nreplacing `inferenceClient.chatComplete`
to\r\n`observabilityAIAssistantClient.chat`
-\r\n`observabilityAIAssistantClient.complete` does a bunch of stuff on
top\r\nof `chat`. keepping `observabilityAIAssistantClient.chat` as a
wrapper\r\nfor now because it also adds instrumentation and
logging.","sha":"df0dfa5216c1d1d3b72a30b3b1d903781db90615"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/199286","number":199286,"mergeCommit":{"message":"[Observability
AI Assistant] migrate to inference client #197630 (#199286)\n\n##
Summary\r\n\r\nCloses
https://github.com/elastic/kibana/issues/183245\r\n\r\nCloses #197630
\r\n[Observability AI Assistant] Partially migrate to inference client
\r\n\r\nreplacing `inferenceClient.chatComplete`
to\r\n`observabilityAIAssistantClient.chat`
-\r\n`observabilityAIAssistantClient.complete` does a bunch of stuff on
top\r\nof `chat`. keepping `observabilityAIAssistantClient.chat` as a
wrapper\r\nfor now because it also adds instrumentation and
logging.","sha":"df0dfa5216c1d1d3b72a30b3b1d903781db90615"}},{"branch":"8.x","label":"v8.18.0","branchLabelMappingKey":"^v8.18.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->

Co-authored-by: Arturo Lidueña <[email protected]>
  • Loading branch information
kibanamachine and arturoliduena authored Dec 9, 2024
1 parent 4928233 commit 54d86c7
Show file tree
Hide file tree
Showing 47 changed files with 559 additions and 2,667 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,6 @@ export enum ObservabilityAIAssistantConnectorType {
Gemini = '.gemini',
}

export const SUPPORTED_CONNECTOR_TYPES = [
ObservabilityAIAssistantConnectorType.OpenAI,
ObservabilityAIAssistantConnectorType.Bedrock,
ObservabilityAIAssistantConnectorType.Gemini,
];

export function isSupportedConnectorType(
type: string
): type is ObservabilityAIAssistantConnectorType {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { TokenCount as TokenCountType, type Message } from './types';

export enum StreamingChatResponseEventType {
ChatCompletionChunk = 'chatCompletionChunk',
ChatCompletionMessage = 'chatCompletionMessage',
ConversationCreate = 'conversationCreate',
ConversationUpdate = 'conversationUpdate',
MessageAdd = 'messageAdd',
Expand All @@ -25,19 +26,26 @@ type StreamingChatResponseEventBase<
type: TEventType;
} & TData;

export type ChatCompletionChunkEvent = StreamingChatResponseEventBase<
StreamingChatResponseEventType.ChatCompletionChunk,
{
id: string;
message: {
content?: string;
function_call?: {
name?: string;
arguments?: string;
type BaseChatCompletionEvent<TType extends StreamingChatResponseEventType> =
StreamingChatResponseEventBase<
TType,
{
id: string;
message: {
content?: string;
function_call?: {
name?: string;
arguments?: string;
};
};
};
}
>;
}
>;

export type ChatCompletionChunkEvent =
BaseChatCompletionEvent<StreamingChatResponseEventType.ChatCompletionChunk>;

export type ChatCompletionMessageEvent =
BaseChatCompletionEvent<StreamingChatResponseEventType.ChatCompletionMessage>;

export type ConversationCreateEvent = StreamingChatResponseEventBase<
StreamingChatResponseEventType.ConversationCreate,
Expand Down Expand Up @@ -100,6 +108,7 @@ export type TokenCountEvent = StreamingChatResponseEventBase<

export type StreamingChatResponseEvent =
| ChatCompletionChunkEvent
| ChatCompletionMessageEvent
| ConversationCreateEvent
| ConversationUpdateEvent
| MessageAddEvent
Expand All @@ -112,7 +121,7 @@ export type StreamingChatResponseEventWithoutError = Exclude<
ChatCompletionErrorEvent
>;

export type ChatEvent = ChatCompletionChunkEvent | TokenCountEvent;
export type ChatEvent = ChatCompletionChunkEvent | TokenCountEvent | ChatCompletionMessageEvent;
export type MessageOrChatEvent = ChatEvent | MessageAddEvent;

export enum ChatCompletionErrorCode {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
* 2.0.
*/

import { Message, MessageRole } from '@kbn/observability-ai-assistant-plugin/common';
import {
AssistantMessage,
Message as InferenceMessage,
MessageRole as InferenceMessageRole,
} from '@kbn/inference-common';
import { generateFakeToolCallId } from '@kbn/inference-plugin/common';
import { Message, MessageRole } from '.';

export function convertMessagesForInference(messages: Message[]): InferenceMessage[] {
const inferenceMessages: InferenceMessage[] = [];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ export {

export type {
ChatCompletionChunkEvent,
ChatCompletionMessageEvent,
TokenCountEvent,
ConversationCreateEvent,
ConversationUpdateEvent,
MessageAddEvent,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
*/

import { cloneDeep } from 'lodash';
import { type Observable, scan } from 'rxjs';
import type { ChatCompletionChunkEvent } from '../conversation_complete';
import { type Observable, scan, filter, defaultIfEmpty } from 'rxjs';
import type { ChatCompletionChunkEvent, ChatEvent } from '../conversation_complete';
import { StreamingChatResponseEventType } from '../conversation_complete';
import { MessageRole } from '../types';

export interface ConcatenatedMessage {
Expand All @@ -24,8 +25,12 @@ export interface ConcatenatedMessage {

export const concatenateChatCompletionChunks =
() =>
(source: Observable<ChatCompletionChunkEvent>): Observable<ConcatenatedMessage> =>
(source: Observable<ChatEvent>): Observable<ConcatenatedMessage> =>
source.pipe(
filter(
(event): event is ChatCompletionChunkEvent =>
event.type === StreamingChatResponseEventType.ChatCompletionChunk
),
scan(
(acc, { message }) => {
acc.message.content += message.content ?? '';
Expand All @@ -45,5 +50,16 @@ export const concatenateChatCompletionChunks =
role: MessageRole.Assistant,
},
} as ConcatenatedMessage
)
),
defaultIfEmpty({
message: {
content: '',
function_call: {
name: '',
arguments: '',
trigger: MessageRole.Assistant,
},
role: MessageRole.Assistant,
},
})
);
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,15 @@ import {
OperatorFunction,
shareReplay,
withLatestFrom,
filter,
} from 'rxjs';
import { withoutTokenCountEvents } from './without_token_count_events';
import {
ChatCompletionChunkEvent,
type ChatCompletionChunkEvent,
ChatEvent,
MessageAddEvent,
StreamingChatResponseEventType,
StreamingChatResponseEvent,
} from '../conversation_complete';
import {
concatenateChatCompletionChunks,
Expand Down Expand Up @@ -51,13 +53,23 @@ function mergeWithEditedMessage(
);
}

function filterChunkEvents(): OperatorFunction<
StreamingChatResponseEvent,
ChatCompletionChunkEvent
> {
return filter(
(event): event is ChatCompletionChunkEvent =>
event.type === StreamingChatResponseEventType.ChatCompletionChunk
);
}

export function emitWithConcatenatedMessage<T extends ChatEvent>(
callback?: ConcatenateMessageCallback
): OperatorFunction<T, T | MessageAddEvent> {
return (source$) => {
const shared = source$.pipe(shareReplay());

const withoutTokenCount$ = shared.pipe(withoutTokenCountEvents());
const withoutTokenCount$ = shared.pipe(filterChunkEvents());

const response$ = concat(
shared,
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,38 +1,26 @@
{
"type": "plugin",
"id": "@kbn/observability-ai-assistant-plugin",
"owner": [
"@elastic/obs-ai-assistant"
],
"owner": ["@elastic/obs-ai-assistant"],
"group": "platform",
"visibility": "shared",
"plugin": {
"id": "observabilityAIAssistant",
"browser": true,
"server": true,
"configPath": [
"xpack",
"observabilityAIAssistant"
],
"configPath": ["xpack", "observabilityAIAssistant"],
"requiredPlugins": [
"actions",
"features",
"licensing",
"security",
"taskManager",
"dataViews"
],
"optionalPlugins": [
"cloud",
"serverless"
],
"requiredBundles": [
"kibanaReact",
"kibanaUtils"
],
"runtimePluginDependencies": [
"ml"
"dataViews",
"inference"
],
"optionalPlugins": ["cloud", "serverless"],
"requiredBundles": ["kibanaReact", "kibanaUtils"],
"runtimePluginDependencies": ["ml"],
"extraPublicDirs": []
}
}
Loading

0 comments on commit 54d86c7

Please sign in to comment.