Skip to content

Commit

Permalink
[Security Solution] AI Assistant: LLM Connector model chooser bug. Ne…
Browse files Browse the repository at this point in the history
…w chat does not use connector's model (#199303) (#204014)

## Summary

The PR fixes [this bug](#199303)

The issue happens with some of the locally setup LLMs (like
[Ollama](https://github.com/ollama/ollama)) which requires the correct
`model` to be passed as part of the [chat completions
API](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion).

We had a bug in our code when on new conversation creation we would not
pass all the connectors configuration and only `connectorId` and
`actionTypeId` would be passed. Here is the old code implementation:

```
const newConversation = await createConversation({
  title: NEW_CHAT,
  ...(currentConversation?.apiConfig != null &&
  currentConversation?.apiConfig?.actionTypeId != null
    ? {
          apiConfig: {
            connectorId: currentConversation.apiConfig.connectorId,
            actionTypeId: currentConversation.apiConfig.actionTypeId,
            ...(newSystemPrompt?.id != null ? { defaultSystemPromptId: newSystemPrompt.id } : {}),
          },
        }
      : {}),
});
```

and thus the new conversation would not have the complete connector
configuration which would cause to use default model (`gpt-4o`) as a
model we pass to the LLM.

Also, I updated the default body that we use on the Test connector page,
to make sure that we send a model parameter to the LLM in case of `Open
AI > Other (OpenAI Compatible Service)` kind of connectors.

### Testing notes

Steps to reproduce:
1. Install
[Ollama](https://github.com/ollama/ollama?tab=readme-ov-file#ollama)
locally
2. Setup an OpenAI connector using Other (OpenAI Compatible Service)
provider
3. Open AI Assistant and select created Ollama connector to chat
4. Create a "New Chat"
5. The Ollama connector should be selected
6. Send a message to LLM (for example "hello world")

Expected: there should be no errors saying `ActionsClientChatOpenAI: an
error occurred while running the action - Unexpected API Error: - 404
model "gpt-4o" not found, try pulling it first`
  • Loading branch information
e40pud authored Dec 14, 2024
1 parent c87a794 commit 7e4e859
Show file tree
Hide file tree
Showing 5 changed files with 53 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -265,18 +265,24 @@ export const useCurrentConversation = ({
}
const newSystemPrompt = getDefaultNewSystemPrompt(allSystemPrompts);

let conversation: Partial<Conversation> = {};
if (currentConversation?.apiConfig) {
const { defaultSystemPromptId: _, ...restApiConfig } = currentConversation?.apiConfig;
conversation =
restApiConfig.actionTypeId != null
? {
apiConfig: {
...restApiConfig,
...(newSystemPrompt?.id != null
? { defaultSystemPromptId: newSystemPrompt.id }
: {}),
},
}
: {};
}
const newConversation = await createConversation({
title: NEW_CHAT,
...(currentConversation?.apiConfig != null &&
currentConversation?.apiConfig?.actionTypeId != null
? {
apiConfig: {
connectorId: currentConversation.apiConfig.connectorId,
actionTypeId: currentConversation.apiConfig.actionTypeId,
...(newSystemPrompt?.id != null ? { defaultSystemPromptId: newSystemPrompt.id } : {}),
},
}
: {}),
...conversation,
});

if (newConversation) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,23 +11,48 @@ import { FormattedMessage } from '@kbn/i18n-react';
import { EuiLink } from '@elastic/eui';
import { DEFAULT_OPENAI_MODEL, OpenAiProviderType } from '../../../common/openai/constants';
import * as i18n from './translations';
import { Config } from './types';

export const DEFAULT_URL = 'https://api.openai.com/v1/chat/completions' as const;
export const DEFAULT_URL_AZURE =
'https://{your-resource-name}.openai.azure.com/openai/deployments/{deployment-id}/chat/completions?api-version={api-version}' as const;

export const DEFAULT_BODY = `{
const DEFAULT_BODY = `{
"messages": [{
"role":"user",
"content":"Hello world"
}]
}`;
export const DEFAULT_BODY_AZURE = `{
const DEFAULT_BODY_AZURE = `{
"messages": [{
"role":"user",
"content":"Hello world"
}]
}`;
const DEFAULT_BODY_OTHER = (defaultModel: string) => `{
"model": "${defaultModel}",
"messages": [{
"role":"user",
"content":"Hello world"
}]
}`;

export const getDefaultBody = (config?: Config) => {
if (!config) {
// default to OpenAiProviderType.OpenAi sample data
return DEFAULT_BODY;
}
if (config?.apiProvider === OpenAiProviderType.Other) {
// update sample data if Other (OpenAI Compatible Service)
return config.defaultModel ? DEFAULT_BODY_OTHER(config.defaultModel) : DEFAULT_BODY;
}
if (config?.apiProvider === OpenAiProviderType.AzureAi) {
// update sample data if AzureAi
return DEFAULT_BODY_AZURE;
}
// default to OpenAiProviderType.OpenAi sample data
return DEFAULT_BODY;
};

export const openAiConfig: ConfigFieldSchema[] = [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import React from 'react';
import { fireEvent, render } from '@testing-library/react';
import ParamsFields from './params';
import { OpenAiProviderType, SUB_ACTION } from '../../../common/openai/constants';
import { DEFAULT_BODY, DEFAULT_BODY_AZURE, DEFAULT_URL } from './constants';
import { DEFAULT_URL, getDefaultBody } from './constants';

const messageVariables = [
{
Expand Down Expand Up @@ -73,14 +73,15 @@ describe('Gen AI Params Fields renders', () => {
);
expect(editAction).toHaveBeenCalledTimes(2);
expect(editAction).toHaveBeenCalledWith('subAction', SUB_ACTION.RUN, 0);
const body = getDefaultBody(actionConnector.config);
if (apiProvider === OpenAiProviderType.OpenAi) {
expect(editAction).toHaveBeenCalledWith('subActionParams', { body: DEFAULT_BODY }, 0);
expect(editAction).toHaveBeenCalledWith('subActionParams', { body }, 0);
}
if (apiProvider === OpenAiProviderType.AzureAi) {
expect(editAction).toHaveBeenCalledWith('subActionParams', { body: DEFAULT_BODY_AZURE }, 0);
expect(editAction).toHaveBeenCalledWith('subActionParams', { body }, 0);
}
if (apiProvider === OpenAiProviderType.Other) {
expect(editAction).toHaveBeenCalledWith('subActionParams', { body: DEFAULT_BODY }, 0);
expect(editAction).toHaveBeenCalledWith('subActionParams', { body }, 0);
}
}
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ import {
ActionConnectorMode,
JsonEditorWithMessageVariables,
} from '@kbn/triggers-actions-ui-plugin/public';
import { OpenAiProviderType, SUB_ACTION } from '../../../common/openai/constants';
import { DEFAULT_BODY, DEFAULT_BODY_AZURE } from './constants';
import { SUB_ACTION } from '../../../common/openai/constants';
import { getDefaultBody } from './constants';
import { OpenAIActionConnector, ActionParams } from './types';

const ParamsFields: React.FunctionComponent<ActionParamsProps<ActionParams>> = ({
Expand Down Expand Up @@ -41,16 +41,10 @@ const ParamsFields: React.FunctionComponent<ActionParamsProps<ActionParams>> = (

useEffect(() => {
if (!subActionParams) {
// default to OpenAiProviderType.OpenAi sample data
let sampleBody = DEFAULT_BODY;

if (typedActionConnector?.config?.apiProvider === OpenAiProviderType.AzureAi) {
// update sample data if AzureAi
sampleBody = DEFAULT_BODY_AZURE;
}
const sampleBody = getDefaultBody(typedActionConnector?.config);
editAction('subActionParams', { body: sampleBody }, index);
}
}, [typedActionConnector?.config?.apiProvider, editAction, index, subActionParams]);
}, [typedActionConnector?.config, editAction, index, subActionParams]);

const editSubActionParams = useCallback(
(params: ActionParams['subActionParams']) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ export interface ActionParams {
export interface Config {
apiProvider: OpenAiProviderType;
apiUrl: string;
defaultModel?: string;
}

export interface Secrets {
Expand Down

0 comments on commit 7e4e859

Please sign in to comment.