From 9894558034582bf2bb6b822ce8e9ec4271c911c0 Mon Sep 17 00:00:00 2001 From: Ievgen Sorokopud Date: Sat, 14 Dec 2024 09:54:54 +0100 Subject: [PATCH] [Security Solution] AI Assistant: LLM Connector model chooser bug. New chat does not use connector's model (#199303) (#204014) ## Summary The PR fixes [this bug](https://github.com/elastic/kibana/issues/199303) The issue happens with some of the locally setup LLMs (like [Ollama](https://github.com/ollama/ollama)) which requires the correct `model` to be passed as part of the [chat completions API](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion). We had a bug in our code when on new conversation creation we would not pass all the connectors configuration and only `connectorId` and `actionTypeId` would be passed. Here is the old code implementation: ``` const newConversation = await createConversation({ title: NEW_CHAT, ...(currentConversation?.apiConfig != null && currentConversation?.apiConfig?.actionTypeId != null ? { apiConfig: { connectorId: currentConversation.apiConfig.connectorId, actionTypeId: currentConversation.apiConfig.actionTypeId, ...(newSystemPrompt?.id != null ? { defaultSystemPromptId: newSystemPrompt.id } : {}), }, } : {}), }); ``` and thus the new conversation would not have the complete connector configuration which would cause to use default model (`gpt-4o`) as a model we pass to the LLM. Also, I updated the default body that we use on the Test connector page, to make sure that we send a model parameter to the LLM in case of `Open AI > Other (OpenAI Compatible Service)` kind of connectors. ### Testing notes Steps to reproduce: 1. Install [Ollama](https://github.com/ollama/ollama?tab=readme-ov-file#ollama) locally 2. Setup an OpenAI connector using Other (OpenAI Compatible Service) provider 3. Open AI Assistant and select created Ollama connector to chat 4. Create a "New Chat" 5. The Ollama connector should be selected 6. Send a message to LLM (for example "hello world") Expected: there should be no errors saying `ActionsClientChatOpenAI: an error occurred while running the action - Unexpected API Error: - 404 model "gpt-4o" not found, try pulling it first` (cherry picked from commit 7e4e8592f45ceca822c4f34d18e9f047cfe3cde0) --- .../use_current_conversation/index.tsx | 26 ++++++++++------- .../connector_types/openai/constants.tsx | 29 +++++++++++++++++-- .../connector_types/openai/params.test.tsx | 9 +++--- .../public/connector_types/openai/params.tsx | 14 +++------ .../public/connector_types/openai/types.ts | 1 + 5 files changed, 53 insertions(+), 26 deletions(-) diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_current_conversation/index.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_current_conversation/index.tsx index 267c39c402a1c..ab5e5532a19cb 100644 --- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_current_conversation/index.tsx +++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_current_conversation/index.tsx @@ -265,18 +265,24 @@ export const useCurrentConversation = ({ } const newSystemPrompt = getDefaultNewSystemPrompt(allSystemPrompts); + let conversation: Partial = {}; + if (currentConversation?.apiConfig) { + const { defaultSystemPromptId: _, ...restApiConfig } = currentConversation?.apiConfig; + conversation = + restApiConfig.actionTypeId != null + ? { + apiConfig: { + ...restApiConfig, + ...(newSystemPrompt?.id != null + ? { defaultSystemPromptId: newSystemPrompt.id } + : {}), + }, + } + : {}; + } const newConversation = await createConversation({ title: NEW_CHAT, - ...(currentConversation?.apiConfig != null && - currentConversation?.apiConfig?.actionTypeId != null - ? { - apiConfig: { - connectorId: currentConversation.apiConfig.connectorId, - actionTypeId: currentConversation.apiConfig.actionTypeId, - ...(newSystemPrompt?.id != null ? { defaultSystemPromptId: newSystemPrompt.id } : {}), - }, - } - : {}), + ...conversation, }); if (newConversation) { diff --git a/x-pack/plugins/stack_connectors/public/connector_types/openai/constants.tsx b/x-pack/plugins/stack_connectors/public/connector_types/openai/constants.tsx index 5f4238e52af78..a24db86804f95 100644 --- a/x-pack/plugins/stack_connectors/public/connector_types/openai/constants.tsx +++ b/x-pack/plugins/stack_connectors/public/connector_types/openai/constants.tsx @@ -11,23 +11,48 @@ import { FormattedMessage } from '@kbn/i18n-react'; import { EuiLink } from '@elastic/eui'; import { DEFAULT_OPENAI_MODEL, OpenAiProviderType } from '../../../common/openai/constants'; import * as i18n from './translations'; +import { Config } from './types'; export const DEFAULT_URL = 'https://api.openai.com/v1/chat/completions' as const; export const DEFAULT_URL_AZURE = 'https://{your-resource-name}.openai.azure.com/openai/deployments/{deployment-id}/chat/completions?api-version={api-version}' as const; -export const DEFAULT_BODY = `{ +const DEFAULT_BODY = `{ "messages": [{ "role":"user", "content":"Hello world" }] }`; -export const DEFAULT_BODY_AZURE = `{ +const DEFAULT_BODY_AZURE = `{ "messages": [{ "role":"user", "content":"Hello world" }] }`; +const DEFAULT_BODY_OTHER = (defaultModel: string) => `{ + "model": "${defaultModel}", + "messages": [{ + "role":"user", + "content":"Hello world" + }] +}`; + +export const getDefaultBody = (config?: Config) => { + if (!config) { + // default to OpenAiProviderType.OpenAi sample data + return DEFAULT_BODY; + } + if (config?.apiProvider === OpenAiProviderType.Other) { + // update sample data if Other (OpenAI Compatible Service) + return config.defaultModel ? DEFAULT_BODY_OTHER(config.defaultModel) : DEFAULT_BODY; + } + if (config?.apiProvider === OpenAiProviderType.AzureAi) { + // update sample data if AzureAi + return DEFAULT_BODY_AZURE; + } + // default to OpenAiProviderType.OpenAi sample data + return DEFAULT_BODY; +}; export const openAiConfig: ConfigFieldSchema[] = [ { diff --git a/x-pack/plugins/stack_connectors/public/connector_types/openai/params.test.tsx b/x-pack/plugins/stack_connectors/public/connector_types/openai/params.test.tsx index 7539cc6bf6373..c03582ba0b229 100644 --- a/x-pack/plugins/stack_connectors/public/connector_types/openai/params.test.tsx +++ b/x-pack/plugins/stack_connectors/public/connector_types/openai/params.test.tsx @@ -9,7 +9,7 @@ import React from 'react'; import { fireEvent, render } from '@testing-library/react'; import ParamsFields from './params'; import { OpenAiProviderType, SUB_ACTION } from '../../../common/openai/constants'; -import { DEFAULT_BODY, DEFAULT_BODY_AZURE, DEFAULT_URL } from './constants'; +import { DEFAULT_URL, getDefaultBody } from './constants'; const messageVariables = [ { @@ -73,14 +73,15 @@ describe('Gen AI Params Fields renders', () => { ); expect(editAction).toHaveBeenCalledTimes(2); expect(editAction).toHaveBeenCalledWith('subAction', SUB_ACTION.RUN, 0); + const body = getDefaultBody(actionConnector.config); if (apiProvider === OpenAiProviderType.OpenAi) { - expect(editAction).toHaveBeenCalledWith('subActionParams', { body: DEFAULT_BODY }, 0); + expect(editAction).toHaveBeenCalledWith('subActionParams', { body }, 0); } if (apiProvider === OpenAiProviderType.AzureAi) { - expect(editAction).toHaveBeenCalledWith('subActionParams', { body: DEFAULT_BODY_AZURE }, 0); + expect(editAction).toHaveBeenCalledWith('subActionParams', { body }, 0); } if (apiProvider === OpenAiProviderType.Other) { - expect(editAction).toHaveBeenCalledWith('subActionParams', { body: DEFAULT_BODY }, 0); + expect(editAction).toHaveBeenCalledWith('subActionParams', { body }, 0); } } ); diff --git a/x-pack/plugins/stack_connectors/public/connector_types/openai/params.tsx b/x-pack/plugins/stack_connectors/public/connector_types/openai/params.tsx index ad4398482d2c8..000abfa4872be 100644 --- a/x-pack/plugins/stack_connectors/public/connector_types/openai/params.tsx +++ b/x-pack/plugins/stack_connectors/public/connector_types/openai/params.tsx @@ -12,8 +12,8 @@ import { ActionConnectorMode, JsonEditorWithMessageVariables, } from '@kbn/triggers-actions-ui-plugin/public'; -import { OpenAiProviderType, SUB_ACTION } from '../../../common/openai/constants'; -import { DEFAULT_BODY, DEFAULT_BODY_AZURE } from './constants'; +import { SUB_ACTION } from '../../../common/openai/constants'; +import { getDefaultBody } from './constants'; import { OpenAIActionConnector, ActionParams } from './types'; const ParamsFields: React.FunctionComponent> = ({ @@ -41,16 +41,10 @@ const ParamsFields: React.FunctionComponent> = ( useEffect(() => { if (!subActionParams) { - // default to OpenAiProviderType.OpenAi sample data - let sampleBody = DEFAULT_BODY; - - if (typedActionConnector?.config?.apiProvider === OpenAiProviderType.AzureAi) { - // update sample data if AzureAi - sampleBody = DEFAULT_BODY_AZURE; - } + const sampleBody = getDefaultBody(typedActionConnector?.config); editAction('subActionParams', { body: sampleBody }, index); } - }, [typedActionConnector?.config?.apiProvider, editAction, index, subActionParams]); + }, [typedActionConnector?.config, editAction, index, subActionParams]); const editSubActionParams = useCallback( (params: ActionParams['subActionParams']) => { diff --git a/x-pack/plugins/stack_connectors/public/connector_types/openai/types.ts b/x-pack/plugins/stack_connectors/public/connector_types/openai/types.ts index 3ba19c04d13a7..ea37fee0de879 100644 --- a/x-pack/plugins/stack_connectors/public/connector_types/openai/types.ts +++ b/x-pack/plugins/stack_connectors/public/connector_types/openai/types.ts @@ -18,6 +18,7 @@ export interface ActionParams { export interface Config { apiProvider: OpenAiProviderType; apiUrl: string; + defaultModel?: string; } export interface Secrets {