From 4a57e40093b323f29833905224a95099582a4859 Mon Sep 17 00:00:00 2001 From: Sandra G Date: Tue, 1 Oct 2024 11:50:42 -0400 Subject: [PATCH] [8.x][Obs AI Assistant] Serverless API integration tests (#192219) (#194582) Tests for serverless - copies over and modifies all tests from stateful to work in serverless. ~~deployment agnostic tests do not yet support enterprise license for stateful, so are tests don't yet qualify as being deployment agnostic~~. Given how difficult it is to see differences from the stateful tests, I've added PR comments where I've changed something that might be of interest. - changes to `createObservabilityAIAssistantApiClient` to use supertest without basic auth and accept headers for serverless and use roles - removes creating persisted users when tests start and [use roles](https://github.com/elastic/kibana/blob/main/x-pack/test_serverless/README.md#roles-based-testing) within tests. its not possible to create custom users with the serverless test framework at the moment. See https://github.com/elastic/kibana/issues/192711 Skipped tests - knowledge base tests https://github.com/elastic/kibana/issues/192886 - any test suite that uses the LLM proxy has been skipped on MKI https://github.com/elastic/kibana/issues/192751 - all tests that depend on the config.modelId skipped in MKI https://github.com/elastic/kibana/issues/192757 TODO: - [x] move over remaining tests - [x] test in MKI environment before merging - [x] create issues for skipped tests - [ ] this will not run on MKI (after merging) unless we ping the appex-qa team to add it to the pipeline. this is due to creating a separate config. ask appex-qa team to add our config. Followup / related issues to be tracked in a newly created issue: - [ ] https://github.com/elastic/kibana/issues/192757 - [ ] https://github.com/elastic/kibana/issues/192886 - [ ] https://github.com/elastic/kibana/issues/192751 - [ ] https://github.com/elastic/kibana/issues/192701 - [ ] https://github.com/elastic/kibana/issues/192497 - [ ] https://github.com/elastic/kibana/issues/192711 - [ ] https://github.com/elastic/kibana/issues/192718 - [ ] serverless functional tests - [ ] inquire with ml-ui-team to have the ability to delete system indices which we do after uninstalling tiny elser with .ml indices ## Summary Summarize your PR. If it involves visual changes include a screenshot or gif. ### Checklist Delete any items that are not applicable to this PR. - [ ] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md) - [ ] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [ ] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [ ] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed - [ ] Any UI touched in this PR is usable by keyboard only (learn more about [keyboard accessibility](https://webaim.org/techniques/keyboard/)) - [ ] Any UI touched in this PR does not create any new axe failures (run axe in browser: [FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/), [Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US)) - [ ] If a plugin configuration key changed, check if it needs to be allowlisted in the cloud and added to the [docker list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker) - [ ] This renders correctly on smaller devices using a responsive layout. (You can test this [in your browser](https://www.browserstack.com/guide/responsive-testing-on-local-server)) - [ ] This was checked for [cross-browser compatibility](https://www.elastic.co/support/matrix#matrix_browsers) ### Risk Matrix Delete this section if it is not applicable to this PR. Before closing this PR, invite QA, stakeholders, and other developers to identify risks that should be tested prior to the change/feature release. When forming the risk matrix, consider some of the following examples and how they may potentially impact the change: | Risk | Probability | Severity | Mitigation/Notes | |---------------------------|-------------|----------|-------------------------| | Multiple Spaces—unexpected behavior in non-default Kibana Space. | Low | High | Integration tests will verify that all features are still supported in non-default Kibana Space and when user switches between spaces. | | Multiple nodes—Elasticsearch polling might have race conditions when multiple Kibana nodes are polling for the same tasks. | High | Low | Tasks are idempotent, so executing them multiple times will not result in logical error, but will degrade performance. To test for this case we add plenty of unit tests around this logic and document manual testing procedure. | | Code should gracefully handle cases when feature X or plugin Y are disabled. | Medium | High | Unit tests will verify that any feature flag or plugin combination still results in our service operational. | | [See more potential risk examples](https://github.com/elastic/kibana/blob/main/RISK_MATRIX.mdx) | ### For maintainers - [ ] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process) --- .buildkite/ftr_oblt_serverless_configs.yml | 1 + .../common/conversation_complete.ts | 3 +- .../service/knowledge_base_service/index.ts | 16 +- .../tests/knowledge_base/helpers.ts | 3 +- .../ai_assistant/common/action_connectors.ts | 77 +++ .../common/ftr_provider_context.ts | 18 + .../observability_ai_assistant_api_client.ts | 191 +++++++ .../observability/ai_assistant/config.ts | 34 ++ .../ai_assistant/tests/chat/chat.spec.ts | 215 +++++++ .../tests/complete/complete.spec.ts | 528 ++++++++++++++++++ .../complete/functions/elasticsearch.spec.ts | 126 +++++ .../tests/complete/functions/helpers.ts | 75 +++ .../complete/functions/summarize.spec.ts | 96 ++++ .../tests/connectors/connectors.spec.ts | 117 ++++ .../tests/conversations/conversations.spec.ts | 291 ++++++++++ .../tests/conversations/helpers.ts | 106 ++++ .../observability/ai_assistant/tests/index.ts | 26 + .../tests/knowledge_base/helpers.ts | 20 + .../knowledge_base/knowledge_base.spec.ts | 278 +++++++++ .../knowledge_base_setup.spec.ts | 54 ++ .../knowledge_base_status.spec.ts | 75 +++ .../knowledge_base_user_instructions.spec.ts | 354 ++++++++++++ .../public_complete/public_complete.spec.ts | 319 +++++++++++ 23 files changed, 3013 insertions(+), 10 deletions(-) create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/action_connectors.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/ftr_provider_context.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/observability_ai_assistant_api_client.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/chat/chat.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/elasticsearch.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/helpers.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/summarize.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/connectors/connectors.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/conversations.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/helpers.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/index.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/helpers.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts create mode 100644 x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/public_complete/public_complete.spec.ts diff --git a/.buildkite/ftr_oblt_serverless_configs.yml b/.buildkite/ftr_oblt_serverless_configs.yml index 422c548c28ae7..9c836bfaf376d 100644 --- a/.buildkite/ftr_oblt_serverless_configs.yml +++ b/.buildkite/ftr_oblt_serverless_configs.yml @@ -9,6 +9,7 @@ disabled: - x-pack/test_serverless/api_integration/test_suites/observability/config.feature_flags.ts - x-pack/test_serverless/api_integration/test_suites/observability/common_configs/config.group1.ts - x-pack/test_serverless/api_integration/test_suites/observability/fleet/config.ts + - x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts - x-pack/test_serverless/functional/test_suites/observability/config.ts - x-pack/test_serverless/functional/test_suites/observability/config.examples.ts - x-pack/test_serverless/functional/test_suites/observability/config.feature_flags.ts diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/common/conversation_complete.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/common/conversation_complete.ts index c1bd72e285c72..3c4e2cd609f8b 100644 --- a/x-pack/plugins/observability_solution/observability_ai_assistant/common/conversation_complete.ts +++ b/x-pack/plugins/observability_solution/observability_ai_assistant/common/conversation_complete.ts @@ -104,7 +104,8 @@ export type StreamingChatResponseEvent = | ConversationUpdateEvent | MessageAddEvent | ChatCompletionErrorEvent - | TokenCountEvent; + | TokenCountEvent + | BufferFlushEvent; export type StreamingChatResponseEventWithoutError = Exclude< StreamingChatResponseEvent, diff --git a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts index 679d59a57dbc8..ee977b30f5cc7 100644 --- a/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts +++ b/x-pack/plugins/observability_solution/observability_ai_assistant/server/service/knowledge_base_service/index.ts @@ -47,8 +47,8 @@ export interface RecalledEntry { function isModelMissingOrUnavailableError(error: Error) { return ( error instanceof errors.ResponseError && - (error.body.error.type === 'resource_not_found_exception' || - error.body.error.type === 'status_exception') + (error.body?.error?.type === 'resource_not_found_exception' || + error.body?.error?.type === 'status_exception') ); } function isCreateModelValidationError(error: Error) { @@ -127,7 +127,7 @@ export class KnowledgeBaseService { }; const installModel = async () => { - this.dependencies.logger.info('Installing ELSER model'); + this.dependencies.logger.info(`Installing ${elserModelId} model`); try { await this.dependencies.esClient.asInternalUser.ml.putTrainedModel( { @@ -146,12 +146,12 @@ export class KnowledgeBaseService { throw error; } } - this.dependencies.logger.info('Finished installing ELSER model'); + this.dependencies.logger.info(`Finished installing ${elserModelId} model`); }; const pollForModelInstallCompleted = async () => { await pRetry(async () => { - this.dependencies.logger.info('Polling installation of ELSER model'); + this.dependencies.logger.info(`Polling installation of ${elserModelId} model`); const modelInstalledAndReady = await isModelInstalledAndReady(); if (!modelInstalledAndReady) { throwKnowledgeBaseNotReady({ @@ -169,7 +169,7 @@ export class KnowledgeBaseService { wait_for: 'fully_allocated', }); } catch (error) { - this.dependencies.logger.debug('Error starting model deployment'); + this.dependencies.logger.debug(`Error starting ${elserModelId} model deployment`); this.dependencies.logger.debug(error); if (!isModelMissingOrUnavailableError(error)) { throw error; @@ -191,13 +191,13 @@ export class KnowledgeBaseService { return Promise.resolve(); } - this.dependencies.logger.debug('Model is not allocated yet'); + this.dependencies.logger.debug(`${elserModelId} model is not allocated yet`); this.dependencies.logger.debug(() => JSON.stringify(response)); throw gatewayTimeout(); }, retryOptions); - this.dependencies.logger.info('Model is ready'); + this.dependencies.logger.info(`${elserModelId} model is ready`); this.ensureTaskScheduled(); }; diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts index 1818203f737c0..91286dab811fc 100644 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts +++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts @@ -21,8 +21,9 @@ export async function createKnowledgeBaseModel(ml: ReturnType) { diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/action_connectors.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/action_connectors.ts new file mode 100644 index 0000000000000..297a110ad2211 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/action_connectors.ts @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ToolingLog } from '@kbn/tooling-log'; +import type { + InternalRequestHeader, + RoleCredentials, + SupertestWithoutAuthProviderType, +} from '../../../../../shared/services'; + +export async function deleteActionConnector({ + supertest, + connectorId, + log, + roleAuthc, + internalReqHeader, +}: { + supertest: SupertestWithoutAuthProviderType; + connectorId: string; + log: ToolingLog; + roleAuthc: RoleCredentials; + internalReqHeader: InternalRequestHeader; +}) { + try { + await supertest + .delete(`/api/actions/connector/${connectorId}`) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .expect(204); + } catch (e) { + log.error(`Failed to delete action connector with id ${connectorId} due to: ${e}`); + throw e; + } +} + +export async function createProxyActionConnector({ + log, + supertest, + port, + roleAuthc, + internalReqHeader, +}: { + log: ToolingLog; + supertest: SupertestWithoutAuthProviderType; + port: number; + roleAuthc: RoleCredentials; + internalReqHeader: InternalRequestHeader; +}) { + try { + const res = await supertest + .post('/api/actions/connector') + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .send({ + name: 'OpenAI Proxy', + connector_type_id: '.gen-ai', + config: { + apiProvider: 'OpenAI', + apiUrl: `http://localhost:${port}`, + }, + secrets: { + apiKey: 'my-api-key', + }, + }) + .expect(200); + + const connectorId = res.body.id as string; + return connectorId; + } catch (e) { + log.error(`Failed to create action connector due to: ${e}`); + throw e; + } +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/ftr_provider_context.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/ftr_provider_context.ts new file mode 100644 index 0000000000000..dc8dbbed7536e --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/ftr_provider_context.ts @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { GenericFtrProviderContext } from '@kbn/test'; +import { InheritedServices, InheritedFtrProviderContext } from '../../../../services'; +import { ObservabilityAIAssistantApiClient } from './observability_ai_assistant_api_client'; + +export type ObservabilityAIAssistantServices = InheritedServices & { + observabilityAIAssistantAPIClient: ( + context: InheritedFtrProviderContext + ) => Promise; +}; + +export type FtrProviderContext = GenericFtrProviderContext; diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/observability_ai_assistant_api_client.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/observability_ai_assistant_api_client.ts new file mode 100644 index 0000000000000..bd54243ab93f6 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/common/observability_ai_assistant_api_client.ts @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { + APIReturnType, + ObservabilityAIAssistantAPIClientRequestParamsOf, + ObservabilityAIAssistantAPIEndpoint, +} from '@kbn/observability-ai-assistant-plugin/public'; +import { formatRequest } from '@kbn/server-route-repository'; +import supertest from 'supertest'; +import { Subtract } from 'utility-types'; +import { format } from 'url'; +import { Config } from '@kbn/test'; +import { InheritedFtrProviderContext } from '../../../../services'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../shared/services'; + +export function getObservabilityAIAssistantApiClient({ + svlSharedConfig, +}: { + svlSharedConfig: Config; +}) { + const kibanaServer = svlSharedConfig.get('servers.kibana'); + const cAuthorities = svlSharedConfig.get('servers.kibana.certificateAuthorities'); + + const url = format({ + ...kibanaServer, + auth: false, // don't use auth in serverless + }); + + return createObservabilityAIAssistantApiClient(supertest.agent(url, { ca: cAuthorities })); +} + +type ObservabilityAIAssistantApiClientKey = 'slsUser'; +export type ObservabilityAIAssistantApiClient = Record< + ObservabilityAIAssistantApiClientKey, + Awaited> +>; +export function createObservabilityAIAssistantApiClient(st: supertest.Agent) { + return ( + options: { + type?: 'form-data'; + endpoint: TEndpoint; + roleAuthc: RoleCredentials; + internalReqHeader: InternalRequestHeader; + } & ObservabilityAIAssistantAPIClientRequestParamsOf & { + params?: { query?: { _inspect?: boolean } }; + } + ): SupertestReturnType => { + const { endpoint, type, roleAuthc, internalReqHeader } = options; + + const params = 'params' in options ? (options.params as Record) : {}; + + const { method, pathname, version } = formatRequest(endpoint, params.path); + const url = format({ pathname, query: params?.query }); + + const headers: Record = { ...internalReqHeader, ...roleAuthc.apiKeyHeader }; + + if (version) { + headers['Elastic-Api-Version'] = version; + } + + let res: supertest.Test; + if (type === 'form-data') { + const fields: Array<[string, any]> = Object.entries(params.body); + const formDataRequest = st[method](url) + .set(headers) + .set('Content-type', 'multipart/form-data'); + for (const field of fields) { + void formDataRequest.field(field[0], field[1]); + } + + res = formDataRequest; + } else if (params.body) { + res = st[method](url).send(params.body).set(headers); + } else { + res = st[method](url).set(headers); + } + + return res as unknown as SupertestReturnType; + }; +} + +export type ObservabilityAIAssistantAPIClient = ReturnType< + typeof createObservabilityAIAssistantApiClient +>; + +type WithoutPromise> = Subtract>; + +// this is a little intense, but without it, method overrides are lost +// e.g., { +// end(one:string) +// end(one:string, two:string) +// } +// would lose the first signature. This keeps up to eight signatures. +type OverloadedParameters = T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; +} + ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 + : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + } + ? A1 | A2 | A3 | A4 | A5 | A6 | A7 + : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + } + ? A1 | A2 | A3 | A4 | A5 | A6 + : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + } + ? A1 | A2 | A3 | A4 | A5 + : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + } + ? A1 | A2 | A3 | A4 + : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + } + ? A1 | A2 | A3 + : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + } + ? A1 | A2 + : T extends (...args: infer A) => any + ? A + : any; + +type OverrideReturnType any, TNextReturnType> = ( + ...args: OverloadedParameters +) => WithoutPromise> & TNextReturnType; + +type OverwriteThisMethods, TNextReturnType> = TNextReturnType & { + [key in keyof T]: T[key] extends (...args: infer TArgs) => infer TReturnType + ? TReturnType extends Promise + ? OverrideReturnType + : (...args: TArgs) => TReturnType + : T[key]; +}; + +export type SupertestReturnType = + OverwriteThisMethods< + WithoutPromise, + Promise<{ + text: string; + status: number; + body: APIReturnType; + }> + >; + +export async function getObservabilityAIAssistantApiClientService({ + getService, +}: InheritedFtrProviderContext): Promise { + const svlSharedConfig = getService('config'); + // defaults to elastic_admin user when used without auth + return { + slsUser: await getObservabilityAIAssistantApiClient({ + svlSharedConfig, + }), + }; +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts new file mode 100644 index 0000000000000..36a60f29cb6ce --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/config.ts @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { SUPPORTED_TRAINED_MODELS } from '@kbn/test-suites-xpack/functional/services/ml/api'; +import { createTestConfig } from '../../../config.base'; +import { ObservabilityAIAssistantServices } from './common/ftr_provider_context'; +import { services as inheritedServices } from '../../../services'; +import { getObservabilityAIAssistantApiClientService } from './common/observability_ai_assistant_api_client'; + +export const services: ObservabilityAIAssistantServices = { + ...inheritedServices, + observabilityAIAssistantAPIClient: getObservabilityAIAssistantApiClientService, +}; + +export default createTestConfig({ + serverlessProject: 'oblt', + testFiles: [require.resolve('./tests')], + junit: { + reportName: 'Observability AI Assistant API Integration tests', + }, + suiteTags: { exclude: ['skipSvlOblt'] }, + services, + + // include settings from project controller + // https://github.com/elastic/project-controller/blob/main/internal/project/observability/config/elasticsearch.yml + esServerArgs: ['xpack.ml.dfa.enabled=false'], + kbnServerArgs: [ + `--xpack.observabilityAIAssistant.modelId=${SUPPORTED_TRAINED_MODELS.TINY_ELSER.name}`, + ], +}); diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/chat/chat.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/chat/chat.spec.ts new file mode 100644 index 0000000000000..d30839b60b0f1 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/chat/chat.spec.ts @@ -0,0 +1,215 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { MessageRole, type Message } from '@kbn/observability-ai-assistant-plugin/common'; +import { PassThrough } from 'stream'; +import { + LlmProxy, + createLlmProxy, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const supertestWithoutAuth = getService('supertestWithoutAuth'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + const log = getService('log'); + + const CHAT_API_URL = `/internal/observability_ai_assistant/chat`; + + const messages: Message[] = [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.System, + content: 'You are a helpful assistant', + }, + }, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Good morning!', + }, + }, + ]; + + describe('/internal/observability_ai_assistant/chat', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + let proxy: LlmProxy; + let connectorId: string; + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + proxy = await createLlmProxy(log); + connectorId = await createProxyActionConnector({ + supertest: supertestWithoutAuth, + log, + port: proxy.getPort(), + roleAuthc, + internalReqHeader, + }); + }); + + after(async () => { + proxy.close(); + await deleteActionConnector({ + supertest: supertestWithoutAuth, + connectorId, + log, + roleAuthc, + internalReqHeader, + }); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + + it("returns a 4xx if the connector doesn't exist", async () => { + await supertestWithoutAuth + .post(CHAT_API_URL) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .send({ + name: 'my_api_call', + messages, + connectorId: 'does not exist', + functions: [], + scope: 'all', + }) + .expect(404); + }); + + it('returns a streaming response from the server', async () => { + const NUM_RESPONSES = 5; + + await Promise.race([ + new Promise((resolve, reject) => { + setTimeout(() => { + reject(new Error('Test timed out')); + }, 5000); + }), + new Promise((resolve, reject) => { + async function runTest() { + const interceptor = proxy.intercept('conversation', () => true); + const receivedChunks: Array> = []; + + const passThrough = new PassThrough(); + supertestWithoutAuth + .post(CHAT_API_URL) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .on('error', reject) + .send({ + name: 'my_api_call', + messages, + connectorId, + functions: [], + scope: 'all', + }) + .pipe(passThrough); + + const simulator = await interceptor.waitForIntercept(); + + passThrough.on('data', (chunk) => { + receivedChunks.push(JSON.parse(chunk.toString())); + }); + + for (let i = 0; i < NUM_RESPONSES; i++) { + await simulator.next(`Part: i\n`); + } + + await simulator.complete(); + + await new Promise((innerResolve) => passThrough.on('end', () => innerResolve())); + + const chatCompletionChunks = receivedChunks.filter( + (chunk) => chunk.type === 'chatCompletionChunk' + ); + expect(chatCompletionChunks).to.have.length( + NUM_RESPONSES, + `received number of chat completion chunks did not match expected. This might be because of a 4xx or 5xx: ${JSON.stringify( + chatCompletionChunks, + null, + 2 + )}` + ); + + const tokenCountChunk = receivedChunks.find((chunk) => chunk.type === 'tokenCount'); + expect(tokenCountChunk).to.eql( + { + type: 'tokenCount', + tokens: { completion: 20, prompt: 33, total: 53 }, + }, + `received token count chunk did not match expected` + ); + } + + runTest().then(resolve, reject); + }), + ]); + }); + + it('returns a useful error if the request fails', async () => { + const interceptor = proxy.intercept('conversation', () => true); + + const passThrough = new PassThrough(); + + supertestWithoutAuth + .post(CHAT_API_URL) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .set('kbn-xsrf', 'foo') + .send({ + name: 'my_api_call', + messages, + connectorId, + functions: [], + scope: 'all', + }) + .expect(200) + .pipe(passThrough); + + let data: string = ''; + + passThrough.on('data', (chunk) => { + data += chunk.toString('utf-8'); + }); + + const simulator = await interceptor.waitForIntercept(); + + await simulator.status(400); + + await simulator.rawWrite( + JSON.stringify({ + error: { + code: 'context_length_exceeded', + message: + "This model's maximum context length is 8192 tokens. However, your messages resulted in 11036 tokens. Please reduce the length of the messages.", + param: 'messages', + type: 'invalid_request_error', + }, + }) + ); + + await simulator.rawEnd(); + + await new Promise((resolve) => passThrough.on('end', () => resolve())); + + const response = JSON.parse(data.trim()); + + expect(response.error.message).to.be( + `Token limit reached. Token limit is 8192, but the current conversation has 11036 tokens.` + ); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts new file mode 100644 index 0000000000000..970b99ab35613 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts @@ -0,0 +1,528 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import { Response } from 'supertest'; +import { MessageRole, type Message } from '@kbn/observability-ai-assistant-plugin/common'; +import { omit, pick } from 'lodash'; +import { PassThrough } from 'stream'; +import expect from '@kbn/expect'; +import { + ChatCompletionChunkEvent, + ConversationCreateEvent, + ConversationUpdateEvent, + MessageAddEvent, + StreamingChatResponseEvent, + StreamingChatResponseEventType, +} from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; +import { ObservabilityAIAssistantScreenContextRequest } from '@kbn/observability-ai-assistant-plugin/common/types'; +import { + createLlmProxy, + isFunctionTitleRequest, + LlmProxy, + LlmResponseSimulator, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { createOpenAiChunk } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_openai_chunk'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import { + decodeEvents, + getConversationCreatedEvent, + getConversationUpdatedEvent, +} from '../conversations/helpers'; +import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const supertestWithoutAuth = getService('supertestWithoutAuth'); + const log = getService('log'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + + const COMPLETE_API_URL = `/internal/observability_ai_assistant/chat/complete`; + + const messages: Message[] = [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.System, + content: 'You are a helpful assistant', + }, + }, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Good morning, bot!', + // make sure it doesn't 400 on `data` being set + data: '{}', + }, + }, + ]; + + describe('/internal/observability_ai_assistant/chat/complete', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + let proxy: LlmProxy; + let connectorId: string; + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + + async function getEvents( + params: { screenContexts?: ObservabilityAIAssistantScreenContextRequest[] }, + cb: (conversationSimulator: LlmResponseSimulator) => Promise + ) { + const titleInterceptor = proxy.intercept('title', (body) => isFunctionTitleRequest(body)); + + const conversationInterceptor = proxy.intercept( + 'conversation', + (body) => !isFunctionTitleRequest(body) + ); + const responsePromise = new Promise((resolve, reject) => { + supertestWithoutAuth + .post(COMPLETE_API_URL) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .send({ + messages, + connectorId, + persist: true, + screenContexts: params.screenContexts || [], + scope: 'all', + }) + .then((response: Response) => resolve(response)) + .catch((err: Error) => reject(err)); + }); + + const [conversationSimulator, titleSimulator] = await Promise.all([ + conversationInterceptor.waitForIntercept(), + titleInterceptor.waitForIntercept(), + ]); + + await titleSimulator.status(200); + await titleSimulator.next('My generated title'); + await titleSimulator.complete(); + + await conversationSimulator.status(200); + await cb(conversationSimulator); + + const response = await responsePromise; + + return ( + String(response.body) + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => JSON.parse(line) as StreamingChatResponseEvent) + // Filter BufferFlush events that appear if isCloudEnabled is true which is the case in serverless tests + .filter((event) => event.type !== StreamingChatResponseEventType.BufferFlush) + .slice(2) + ); // ignore context request/response, we're testing this elsewhere + } + + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + proxy = await createLlmProxy(log); + connectorId = await createProxyActionConnector({ + supertest: supertestWithoutAuth, + log, + port: proxy.getPort(), + roleAuthc, + internalReqHeader, + }); + }); + + after(async () => { + proxy.close(); + await deleteActionConnector({ + supertest: supertestWithoutAuth, + connectorId, + log, + roleAuthc, + internalReqHeader, + }); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + + it('returns a streaming response from the server', async () => { + const interceptor = proxy.intercept('conversation', () => true); + + const receivedChunks: any[] = []; + + const passThrough = new PassThrough(); + + supertestWithoutAuth + .post(COMPLETE_API_URL) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .send({ + messages, + connectorId, + persist: false, + screenContexts: [], + scope: 'all', + }) + .pipe(passThrough); + + passThrough.on('data', (chunk) => { + receivedChunks.push(chunk.toString()); + }); + + const simulator = await interceptor.waitForIntercept(); + + await simulator.status(200); + const chunk = JSON.stringify(createOpenAiChunk('Hello')); + + await simulator.rawWrite(`data: ${chunk.substring(0, 10)}`); + await simulator.rawWrite(`${chunk.substring(10)}\n\n`); + await simulator.complete(); + + await new Promise((resolve) => passThrough.on('end', () => resolve())); + + const parsedEvents = decodeEvents(receivedChunks.join('')); + + expect(parsedEvents.map((event) => event.type)).to.eql([ + StreamingChatResponseEventType.MessageAdd, + StreamingChatResponseEventType.MessageAdd, + StreamingChatResponseEventType.ChatCompletionChunk, + StreamingChatResponseEventType.MessageAdd, + ]); + + const messageEvents = parsedEvents.filter( + (msg): msg is MessageAddEvent => msg.type === StreamingChatResponseEventType.MessageAdd + ); + + const chunkEvents = parsedEvents.filter( + (msg): msg is ChatCompletionChunkEvent => + msg.type === StreamingChatResponseEventType.ChatCompletionChunk + ); + + expect(omit(messageEvents[0], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + content: '', + role: MessageRole.Assistant, + function_call: { + name: 'context', + trigger: MessageRole.Assistant, + }, + }, + }, + }); + + expect(omit(messageEvents[1], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + role: MessageRole.User, + name: 'context', + content: JSON.stringify({ screen_description: '', learnings: [] }), + }, + }, + }); + + expect(omit(chunkEvents[0], 'id')).to.eql({ + type: StreamingChatResponseEventType.ChatCompletionChunk, + message: { + content: 'Hello', + }, + }); + + expect(omit(messageEvents[2], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + content: 'Hello', + role: MessageRole.Assistant, + function_call: { + name: '', + arguments: '', + trigger: MessageRole.Assistant, + }, + }, + }, + }); + }); + describe('when creating a new conversation', () => { + let events: StreamingChatResponseEvent[]; + + before(async () => { + events = await getEvents({}, async (conversationSimulator) => { + await conversationSimulator.next('Hello'); + await conversationSimulator.next(' again'); + await conversationSimulator.complete(); + }); + }); + + it('creates a new conversation', async () => { + expect(omit(events[0], 'id')).to.eql({ + type: StreamingChatResponseEventType.ChatCompletionChunk, + message: { + content: 'Hello', + }, + }); + expect(omit(events[1], 'id')).to.eql({ + type: StreamingChatResponseEventType.ChatCompletionChunk, + message: { + content: ' again', + }, + }); + expect(omit(events[2], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + content: 'Hello again', + function_call: { + arguments: '', + name: '', + trigger: MessageRole.Assistant, + }, + role: MessageRole.Assistant, + }, + }, + }); + + expect( + omit( + events[3], + 'conversation.id', + 'conversation.last_updated', + 'conversation.token_count' + ) + ).to.eql({ + type: StreamingChatResponseEventType.ConversationCreate, + conversation: { + title: 'My generated title', + }, + }); + + const tokenCount = (events[3] as ConversationCreateEvent).conversation.token_count!; + + expect(tokenCount.completion).to.be.greaterThan(0); + expect(tokenCount.prompt).to.be.greaterThan(0); + + expect(tokenCount.total).to.eql(tokenCount.completion + tokenCount.prompt); + }); + + after(async () => { + const createdConversationId = events.filter( + (line): line is ConversationCreateEvent => + line.type === StreamingChatResponseEventType.ConversationCreate + )[0]?.conversation.id; + + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + roleAuthc, + internalReqHeader, + params: { + path: { + conversationId: createdConversationId, + }, + }, + }) + .expect(200); + }); + }); + + describe('after executing a screen context action', () => { + let events: StreamingChatResponseEvent[]; + + before(async () => { + events = await getEvents( + { + screenContexts: [ + { + actions: [ + { + name: 'my_action', + description: 'My action', + parameters: { + type: 'object', + properties: { + foo: { + type: 'string', + }, + }, + }, + }, + ], + }, + ], + }, + async (conversationSimulator) => { + await conversationSimulator.next({ + function_call: { name: 'my_action', arguments: JSON.stringify({ foo: 'bar' }) }, + }); + await conversationSimulator.complete(); + } + ); + }); + + it('closes the stream without persisting the conversation', () => { + expect( + pick( + events[events.length - 1], + 'message.message.content', + 'message.message.function_call', + 'message.message.role' + ) + ).to.eql({ + message: { + message: { + content: '', + function_call: { + name: 'my_action', + arguments: JSON.stringify({ foo: 'bar' }), + trigger: MessageRole.Assistant, + }, + role: MessageRole.Assistant, + }, + }, + }); + }); + + it('does not store the conversation', async () => { + expect( + events.filter((event) => event.type === StreamingChatResponseEventType.ConversationCreate) + .length + ).to.eql(0); + + const conversations = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + roleAuthc, + internalReqHeader, + }) + .expect(200); + + expect(conversations.body.conversations.length).to.be(0); + }); + }); + + describe('when updating an existing conversation', () => { + let conversationCreatedEvent: ConversationCreateEvent; + let conversationUpdatedEvent: ConversationUpdateEvent; + + before(async () => { + void proxy + .intercept('conversation_title', (body) => isFunctionTitleRequest(body), [ + { + function_call: { + name: 'title_conversation', + arguments: JSON.stringify({ title: 'LLM-generated title' }), + }, + }, + ]) + .completeAfterIntercept(); + + void proxy + .intercept('conversation', (body) => !isFunctionTitleRequest(body), 'Good morning, sir!') + .completeAfterIntercept(); + + const createResponse = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + roleAuthc, + internalReqHeader, + params: { + body: { + messages, + connectorId, + persist: true, + screenContexts: [], + scope: 'all', + }, + }, + }) + .expect(200); + + await proxy.waitForAllInterceptorsSettled(); + + conversationCreatedEvent = getConversationCreatedEvent(createResponse.body); + + const conversationId = conversationCreatedEvent.conversation.id; + const fullConversation = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId, + }, + }, + }); + + void proxy + .intercept('conversation', (body) => !isFunctionTitleRequest(body), 'Good night, sir!') + .completeAfterIntercept(); + + const updatedResponse = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + internalReqHeader, + roleAuthc, + params: { + body: { + messages: [ + ...fullConversation.body.messages, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Good night, bot!', + }, + }, + ], + connectorId, + persist: true, + screenContexts: [], + conversationId, + scope: 'all', + }, + }, + }) + .expect(200); + + await proxy.waitForAllInterceptorsSettled(); + + conversationUpdatedEvent = getConversationUpdatedEvent(updatedResponse.body); + }); + + after(async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: conversationCreatedEvent.conversation.id, + }, + }, + }) + .expect(200); + }); + + it('has correct token count for a new conversation', async () => { + expect(conversationCreatedEvent.conversation.token_count?.completion).to.be.greaterThan(0); + expect(conversationCreatedEvent.conversation.token_count?.prompt).to.be.greaterThan(0); + expect(conversationCreatedEvent.conversation.token_count?.total).to.be.greaterThan(0); + }); + + it('has correct token count for the updated conversation', async () => { + expect(conversationUpdatedEvent.conversation.token_count!.total).to.be.greaterThan( + conversationCreatedEvent.conversation.token_count!.total + ); + }); + }); + + // todo + it.skip('executes a function', async () => {}); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/elasticsearch.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/elasticsearch.spec.ts new file mode 100644 index 0000000000000..4ba276333cc55 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/elasticsearch.spec.ts @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { MessageAddEvent, MessageRole } from '@kbn/observability-ai-assistant-plugin/common'; +import expect from '@kbn/expect'; +import { apm, timerange } from '@kbn/apm-synthtrace-client'; +import { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace'; +import { ELASTICSEARCH_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/server/functions/elasticsearch'; +import { + LlmProxy, + createLlmProxy, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { FtrProviderContext } from '../../../common/ftr_provider_context'; +import { getMessageAddedEvents, invokeChatCompleteWithFunctionRequest } from './helpers'; +import { + createProxyActionConnector, + deleteActionConnector, +} from '../../../common/action_connectors'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const supertest = getService('supertest'); + const log = getService('log'); + const synthtrace = getService('synthtrace'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + + describe('when calling elasticsearch', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + let proxy: LlmProxy; + let connectorId: string; + let events: MessageAddEvent[]; + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + let apmSynthtraceEsClient: ApmSynthtraceEsClient; + + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + apmSynthtraceEsClient = await synthtrace.createSynthtraceEsClient(); + proxy = await createLlmProxy(log); + connectorId = await createProxyActionConnector({ + supertest, + log, + port: proxy.getPort(), + roleAuthc, + internalReqHeader, + }); + + // intercept the LLM request and return a fixed response + void proxy + .intercept('conversation', () => true, 'Hello from LLM Proxy') + .completeAfterIntercept(); + + await generateApmData(apmSynthtraceEsClient); + + const responseBody = await invokeChatCompleteWithFunctionRequest({ + connectorId, + observabilityAIAssistantAPIClient, + internalReqHeader, + roleAuthc, + functionCall: { + name: ELASTICSEARCH_FUNCTION_NAME, + trigger: MessageRole.User, + arguments: JSON.stringify({ + method: 'POST', + path: 'traces*/_search', + body: { + size: 0, + aggs: { + services: { + terms: { + field: 'service.name', + }, + }, + }, + }, + }), + }, + }); + + await proxy.waitForAllInterceptorsSettled(); + + events = getMessageAddedEvents(responseBody); + }); + + after(async () => { + proxy.close(); + await deleteActionConnector({ supertest, connectorId, log, roleAuthc, internalReqHeader }); + await apmSynthtraceEsClient.clean(); + }); + + it('returns elasticsearch function response', async () => { + const esFunctionResponse = events[0]; + const parsedEsResponse = JSON.parse(esFunctionResponse.message.message.content!).response; + + expect(esFunctionResponse.message.message.name).to.be('elasticsearch'); + expect(parsedEsResponse.hits.total.value).to.be(15); + expect(parsedEsResponse.aggregations.services.buckets).to.eql([ + { key: 'java-backend', doc_count: 15 }, + ]); + expect(events.length).to.be(2); + }); + }); +} + +export async function generateApmData(apmSynthtraceEsClient: ApmSynthtraceEsClient) { + const serviceA = apm + .service({ name: 'java-backend', environment: 'production', agentName: 'java' }) + .instance('a'); + + const events = timerange('now-15m', 'now') + .interval('1m') + .rate(1) + .generator((timestamp) => { + return serviceA.transaction({ transactionName: 'tx' }).timestamp(timestamp).duration(1000); + }); + + return apmSynthtraceEsClient.index(events); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/helpers.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/helpers.ts new file mode 100644 index 0000000000000..b9e1ec0865013 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/helpers.ts @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { + Message, + MessageAddEvent, + MessageRole, + StreamingChatResponseEvent, +} from '@kbn/observability-ai-assistant-plugin/common'; +import { AssistantScope } from '@kbn/observability-ai-assistant-plugin/common/types'; +import { Readable } from 'stream'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../../shared/services'; +import { ObservabilityAIAssistantApiClient } from '../../../common/observability_ai_assistant_api_client'; + +function decodeEvents(body: Readable | string) { + return String(body) + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => JSON.parse(line) as StreamingChatResponseEvent); +} + +export function getMessageAddedEvents(body: Readable | string) { + return decodeEvents(body).filter( + (event): event is MessageAddEvent => event.type === 'messageAdd' + ); +} + +export async function invokeChatCompleteWithFunctionRequest({ + connectorId, + observabilityAIAssistantAPIClient, + functionCall, + roleAuthc, + internalReqHeader, + scope, +}: { + connectorId: string; + observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient; + functionCall: Message['message']['function_call']; + scope?: AssistantScope; + roleAuthc: RoleCredentials; + internalReqHeader: InternalRequestHeader; +}) { + const { body } = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + internalReqHeader, + roleAuthc, + params: { + body: { + messages: [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.Assistant, + content: '', + function_call: functionCall, + }, + }, + ], + connectorId, + persist: false, + screenContexts: [], + scope: 'observability', + }, + }, + }) + .expect(200); + + return body; +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/summarize.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/summarize.spec.ts new file mode 100644 index 0000000000000..6be39a36c62a5 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/functions/summarize.spec.ts @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { MessageRole } from '@kbn/observability-ai-assistant-plugin/common'; +import expect from '@kbn/expect'; +import { + LlmProxy, + createLlmProxy, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { FtrProviderContext } from '../../../common/ftr_provider_context'; +import { invokeChatCompleteWithFunctionRequest } from './helpers'; +import { + createProxyActionConnector, + deleteActionConnector, +} from '../../../common/action_connectors'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const supertest = getService('supertest'); + const log = getService('log'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + + // Skipped until Elser is available in tests + describe.skip('when calling summarize function', () => { + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + let proxy: LlmProxy; + let connectorId: string; + + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + proxy = await createLlmProxy(log); + connectorId = await createProxyActionConnector({ + supertest, + log, + port: proxy.getPort(), + roleAuthc, + internalReqHeader, + }); + + // intercept the LLM request and return a fixed response + void proxy + .intercept('conversation', () => true, 'Hello from LLM Proxy') + .completeAfterIntercept(); + + await invokeChatCompleteWithFunctionRequest({ + connectorId, + observabilityAIAssistantAPIClient, + internalReqHeader, + roleAuthc, + functionCall: { + name: 'summarize', + trigger: MessageRole.User, + arguments: JSON.stringify({ + id: 'my-id', + text: 'Hello world', + is_correction: false, + confidence: 1, + public: false, + }), + }, + }); + + await proxy.waitForAllInterceptorsSettled(); + }); + + after(async () => { + proxy.close(); + await deleteActionConnector({ supertest, connectorId, log, roleAuthc, internalReqHeader }); + }); + + it('persists entry in knowledge base', async () => { + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + internalReqHeader, + roleAuthc, + params: { + query: { + query: '', + sortBy: 'doc_id', + sortDirection: 'asc', + }, + }, + }); + + expect(res.body.entries).to.have.length(1); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/connectors/connectors.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/connectors/connectors.spec.ts new file mode 100644 index 0000000000000..9305c0d1e14e9 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/connectors/connectors.spec.ts @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; +import type { + InternalRequestHeader, + RoleCredentials, + SupertestWithoutAuthProviderType, +} from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const supertestWithoutAuth = getService('supertestWithoutAuth'); + const log = getService('log'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + + describe('List connectors', () => { + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + await deleteAllActionConnectors({ + supertest: supertestWithoutAuth, + roleAuthc, + internalReqHeader, + }); + }); + + after(async () => { + await deleteAllActionConnectors({ + supertest: supertestWithoutAuth, + roleAuthc, + internalReqHeader, + }); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + + it('Returns a 2xx for enterprise license', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/connectors', + roleAuthc, + internalReqHeader, + }) + .expect(200); + }); + + it('returns an empty list of connectors', async () => { + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/connectors', + roleAuthc, + internalReqHeader, + }); + + expect(res.body.length).to.be(0); + }); + + it("returns the gen ai connector if it's been created", async () => { + const connectorId = await createProxyActionConnector({ + supertest: supertestWithoutAuth, + log, + port: 1234, + internalReqHeader, + roleAuthc, + }); + + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/connectors', + internalReqHeader, + roleAuthc, + }); + + expect(res.body.length).to.be(1); + + await deleteActionConnector({ + supertest: supertestWithoutAuth, + connectorId, + log, + internalReqHeader, + roleAuthc, + }); + }); + }); +} + +export async function deleteAllActionConnectors({ + supertest, + roleAuthc, + internalReqHeader, +}: { + supertest: SupertestWithoutAuthProviderType; + roleAuthc: RoleCredentials; + internalReqHeader: InternalRequestHeader; +}): Promise { + const res = await supertest + .get(`/api/actions/connectors`) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader); + + const body = res.body as Array<{ id: string; connector_type_id: string; name: string }>; + return Promise.all( + body.map(({ id }) => { + return supertest + .delete(`/api/actions/connector/${id}`) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader); + }) + ); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/conversations.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/conversations.spec.ts new file mode 100644 index 0000000000000..b4426744e4082 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/conversations.spec.ts @@ -0,0 +1,291 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { merge, omit } from 'lodash'; +import { + type ConversationCreateRequest, + type ConversationUpdateRequest, + MessageRole, +} from '@kbn/observability-ai-assistant-plugin/common/types'; +import type { FtrProviderContext } from '../../common/ftr_provider_context'; +import type { SupertestReturnType } from '../../common/observability_ai_assistant_api_client'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + + const conversationCreate: ConversationCreateRequest = { + '@timestamp': new Date().toISOString(), + conversation: { + title: 'My title', + }, + labels: {}, + numeric_labels: {}, + messages: [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'My message', + }, + }, + ], + public: false, + }; + + const conversationUpdate: ConversationUpdateRequest = merge({}, conversationCreate, { + conversation: { + id: '', + title: 'My updated title', + }, + }); + + describe('Conversations', () => { + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + }); + after(async () => { + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + describe('without conversations', () => { + it('returns no conversations when listing', async () => { + const response = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + internalReqHeader, + roleAuthc, + }) + .expect(200); + + expect(response.body).to.eql({ conversations: [] }); + }); + + it('returns a 404 for updating conversations', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: 'non-existing-conversation-id', + }, + body: { + conversation: conversationUpdate, + }, + }, + }) + .expect(404); + }); + + it('returns a 404 for retrieving a conversation', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: 'my-conversation-id', + }, + }, + }) + .expect(404); + }); + }); + + describe('when creating a conversation with the write user', function () { + let createResponse: Awaited< + SupertestReturnType<'POST /internal/observability_ai_assistant/conversation'> + >; + before(async () => { + createResponse = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/conversation', + roleAuthc, + internalReqHeader, + params: { + body: { + conversation: conversationCreate, + }, + }, + }) + .expect(200); + }); + + after(async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: createResponse.body.conversation.id, + }, + }, + }) + .expect(200); + + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: createResponse.body.conversation.id, + }, + }, + }) + .expect(404); + }); + it('returns the conversation', function () { + // delete user from response to avoid comparing it as it will be different in MKI + delete createResponse.body.user; + expect(createResponse.body).to.eql({ + '@timestamp': createResponse.body['@timestamp'], + conversation: { + id: createResponse.body.conversation.id, + last_updated: createResponse.body.conversation.last_updated, + title: conversationCreate.conversation.title, + }, + labels: conversationCreate.labels, + numeric_labels: conversationCreate.numeric_labels, + messages: conversationCreate.messages, + namespace: 'default', + public: conversationCreate.public, + }); + }); + + it('returns a 404 for updating a non-existing conversation', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', + roleAuthc, + internalReqHeader, + params: { + path: { + conversationId: 'non-existing-conversation-id', + }, + body: { + conversation: conversationUpdate, + }, + }, + }) + .expect(404); + }); + + it('returns a 404 for retrieving a non-existing conversation', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + roleAuthc, + internalReqHeader, + params: { + path: { + conversationId: 'non-existing-conversation-id', + }, + }, + }) + .expect(404); + }); + + it('returns the conversation that was created', async () => { + const response = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: createResponse.body.conversation.id, + }, + }, + }) + .expect(200); + + // delete user from response to avoid comparing it as it will be different in MKI + delete response.body.user; + expect(response.body).to.eql(createResponse.body); + }); + + it('returns the created conversation when listing', async () => { + const response = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + roleAuthc, + internalReqHeader, + }) + .expect(200); + // delete user from response to avoid comparing it as it will be different in MKI + delete response.body.conversations[0].user; + expect(response.body.conversations[0]).to.eql(createResponse.body); + }); + + // TODO + it.skip('returns a 404 when reading it with another user', () => {}); + + describe('after updating', () => { + let updateResponse: Awaited< + SupertestReturnType<'PUT /internal/observability_ai_assistant/conversation/{conversationId}'> + >; + + before(async () => { + updateResponse = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: createResponse.body.conversation.id, + }, + body: { + conversation: merge(omit(conversationUpdate, 'conversation.id'), { + conversation: { id: createResponse.body.conversation.id }, + }), + }, + }, + }) + .expect(200); + }); + + it('returns the updated conversation as response', async () => { + expect(updateResponse.body.conversation.title).to.eql( + conversationUpdate.conversation.title + ); + }); + + it('returns the updated conversation after get', async () => { + const updateAfterCreateResponse = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: createResponse.body.conversation.id, + }, + }, + }) + .expect(200); + + expect(updateAfterCreateResponse.body.conversation.title).to.eql( + conversationUpdate.conversation.title + ); + }); + }); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/helpers.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/helpers.ts new file mode 100644 index 0000000000000..7e72b4ed57bf9 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/helpers.ts @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { Readable } from 'stream'; +import { ToolingLog } from '@kbn/tooling-log'; +import { + ConversationCreateEvent, + ConversationUpdateEvent, + StreamingChatResponseEvent, + StreamingChatResponseEventType, +} from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; +import { ObservabilityAIAssistantApiClient } from '../../common/observability_ai_assistant_api_client'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export function decodeEvents(body: Readable | string) { + return String(body) + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => JSON.parse(line) as StreamingChatResponseEvent); +} + +export function getConversationCreatedEvent(body: Readable | string) { + const decodedEvents = decodeEvents(body); + const conversationCreatedEvent = decodedEvents.find( + (event) => event.type === StreamingChatResponseEventType.ConversationCreate + ) as ConversationCreateEvent; + + if (!conversationCreatedEvent) { + throw new Error( + `No conversation created event found: ${JSON.stringify(decodedEvents, null, 2)}` + ); + } + + return conversationCreatedEvent; +} + +export function getConversationUpdatedEvent(body: Readable | string) { + const decodedEvents = decodeEvents(body); + const conversationUpdatedEvent = decodedEvents.find( + (event) => event.type === StreamingChatResponseEventType.ConversationUpdate + ) as ConversationUpdateEvent; + + if (!conversationUpdatedEvent) { + throw new Error( + `No conversation created event found: ${JSON.stringify(decodedEvents, null, 2)}` + ); + } + + return conversationUpdatedEvent; +} + +export async function deleteAllConversations({ + observabilityAIAssistantAPIClient, + internalReqHeader, + roleAuthc, + log, +}: { + observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient; + internalReqHeader: InternalRequestHeader; + roleAuthc: RoleCredentials; + log: ToolingLog; +}) { + const findConversationsResponse = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + internalReqHeader, + roleAuthc, + params: { + body: { + query: '', + }, + }, + }) + .expect(200); + const conversations = findConversationsResponse.body.conversations; + + if (!conversations || conversations.length === 0) { + return; + } + + await Promise.all( + conversations.map(async (conversation) => { + try { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + internalReqHeader, + roleAuthc, + params: { + path: { + conversationId: conversation.conversation.id, + }, + }, + }) + .expect(200); + } catch (error) { + log.error(`Failed to delete conversation with ID: ${conversation.conversation.id}`); + } + }) + ); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/index.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/index.ts new file mode 100644 index 0000000000000..26c8a7b2839a9 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/index.ts @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import globby from 'globby'; +import path from 'path'; +import { FtrProviderContext } from '../../../../ftr_provider_context'; + +const cwd = path.join(__dirname); + +export default function observabilityAIAssistantApiIntegrationTests({ + loadTestFile, +}: FtrProviderContext) { + describe('Observability AI Assistant API tests', function () { + const filePattern = '**/*.spec.ts'; + const tests = globby.sync(filePattern, { cwd }); + + tests.forEach((testName) => { + describe(testName, () => { + loadTestFile(require.resolve(`./${testName}`)); + }); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/helpers.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/helpers.ts new file mode 100644 index 0000000000000..6affeeb861295 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/helpers.ts @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { MachineLearningProvider } from '@kbn/test-suites-xpack/api_integration/services/ml'; +import { SUPPORTED_TRAINED_MODELS } from '@kbn/test-suites-xpack/functional/services/ml/api'; + +export const TINY_ELSER = { + ...SUPPORTED_TRAINED_MODELS.TINY_ELSER, + id: SUPPORTED_TRAINED_MODELS.TINY_ELSER.name, +}; + +export async function deleteKnowledgeBaseModel(ml: ReturnType) { + await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true); + await ml.api.deleteTrainedModelES(TINY_ELSER.id); + await ml.testResources.cleanMLSavedObjects(); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts new file mode 100644 index 0000000000000..b540ee5829e59 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base.spec.ts @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { + clearKnowledgeBase, + createKnowledgeBaseModel, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers'; +import { deleteKnowledgeBaseModel } from './helpers'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const ml = getService('ml'); + const es = getService('es'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + + // TODO: https://github.com/elastic/kibana/issues/192886 + describe.skip('Knowledge base', function () { + // TODO: https://github.com/elastic/kibana/issues/192757 + this.tags(['skipMKI']); + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + await createKnowledgeBaseModel(ml); + }); + + after(async () => { + await deleteKnowledgeBaseModel(ml); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + + it('returns 200 on knowledge base setup', async () => { + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + roleAuthc, + internalReqHeader, + }) + .expect(200); + expect(res.body).to.eql({}); + }); + describe('when managing a single entry', () => { + const knowledgeBaseEntry = { + id: 'my-doc-id-1', + text: 'My content', + }; + it('returns 200 on create', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save', + params: { body: knowledgeBaseEntry }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'doc_id', + sortDirection: 'asc', + }, + }, + roleAuthc, + internalReqHeader, + }); + const entry = res.body.entries[0]; + expect(entry.id).to.equal(knowledgeBaseEntry.id); + expect(entry.text).to.equal(knowledgeBaseEntry.text); + }); + + it('returns 200 on get entries and entry exists', async () => { + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'doc_id', + sortDirection: 'asc', + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + const entry = res.body.entries[0]; + expect(entry.id).to.equal(knowledgeBaseEntry.id); + expect(entry.text).to.equal(knowledgeBaseEntry.text); + }); + + it('returns 200 on delete', async () => { + const entryId = 'my-doc-id-1'; + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'DELETE /internal/observability_ai_assistant/kb/entries/{entryId}', + params: { + path: { entryId }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'doc_id', + sortDirection: 'asc', + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + expect(res.body.entries.filter((entry) => entry.id.startsWith('my-doc-id')).length).to.eql( + 0 + ); + }); + + it('returns 500 on delete not found', async () => { + const entryId = 'my-doc-id-1'; + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'DELETE /internal/observability_ai_assistant/kb/entries/{entryId}', + params: { + path: { entryId }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(500); + }); + }); + describe('when managing multiple entries', () => { + before(async () => { + await clearKnowledgeBase(es); + }); + afterEach(async () => { + await clearKnowledgeBase(es); + }); + const knowledgeBaseEntries = [ + { + id: 'my_doc_a', + text: 'My content a', + }, + { + id: 'my_doc_b', + text: 'My content b', + }, + { + id: 'my_doc_c', + text: 'My content c', + }, + ]; + it('returns 200 on create', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/import', + params: { body: { entries: knowledgeBaseEntries } }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'doc_id', + sortDirection: 'asc', + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + expect(res.body.entries.filter((entry) => entry.id.startsWith('my_doc')).length).to.eql(3); + }); + + it('allows sorting', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/import', + params: { body: { entries: knowledgeBaseEntries } }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'doc_id', + sortDirection: 'desc', + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + const entries = res.body.entries.filter((entry) => entry.id.startsWith('my_doc')); + expect(entries[0].id).to.eql('my_doc_c'); + expect(entries[1].id).to.eql('my_doc_b'); + expect(entries[2].id).to.eql('my_doc_a'); + + // asc + const resAsc = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'doc_id', + sortDirection: 'asc', + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + const entriesAsc = resAsc.body.entries.filter((entry) => entry.id.startsWith('my_doc')); + expect(entriesAsc[0].id).to.eql('my_doc_a'); + expect(entriesAsc[1].id).to.eql('my_doc_b'); + expect(entriesAsc[2].id).to.eql('my_doc_c'); + }); + it('allows searching', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/import', + params: { body: { entries: knowledgeBaseEntries } }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: 'my_doc_a', + sortBy: 'doc_id', + sortDirection: 'asc', + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + expect(res.body.entries.length).to.eql(1); + expect(res.body.entries[0].id).to.eql('my_doc_a'); + }); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts new file mode 100644 index 0000000000000..ad5dd32f5c0b4 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_setup.spec.ts @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { createKnowledgeBaseModel } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers'; +import { deleteKnowledgeBaseModel } from './helpers'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const ml = getService('ml'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + + describe('/internal/observability_ai_assistant/kb/setup', function () { + // TODO: https://github.com/elastic/kibana/issues/192757 + this.tags(['skipMKI']); + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + }); + after(async () => { + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + it('returns empty object when successful', async () => { + await createKnowledgeBaseModel(ml); + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + roleAuthc, + internalReqHeader, + }) + .expect(200); + expect(res.body).to.eql({}); + await deleteKnowledgeBaseModel(ml); + }); + it('returns bad request if model cannot be installed', async () => { + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + roleAuthc, + internalReqHeader, + }) + .expect(400); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts new file mode 100644 index 0000000000000..60e02152fd4ac --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_status.spec.ts @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { + createKnowledgeBaseModel, + TINY_ELSER, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers'; +import { deleteKnowledgeBaseModel } from './helpers'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const ml = getService('ml'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + + describe('/internal/observability_ai_assistant/kb/status', function () { + // TODO: https://github.com/elastic/kibana/issues/192757 + this.tags(['skipMKI']); + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + before(async () => { + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + await createKnowledgeBaseModel(ml); + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + roleAuthc, + internalReqHeader, + }) + .expect(200); + }); + + after(async () => { + await deleteKnowledgeBaseModel(ml); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + + it('returns correct status after knowledge base is setup', async () => { + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + roleAuthc, + internalReqHeader, + }) + .expect(200); + expect(res.body.deployment_state).to.eql('started'); + expect(res.body.model_name).to.eql(TINY_ELSER.id); + }); + + it('returns correct status after elser is stopped', async () => { + await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true); + + const res = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + roleAuthc, + internalReqHeader, + }) + .expect(200); + + expect(res.body).to.eql({ + ready: false, + model_name: TINY_ELSER.id, + }); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts new file mode 100644 index 0000000000000..86232035d0c58 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts @@ -0,0 +1,354 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { sortBy } from 'lodash'; +import { Message, MessageRole } from '@kbn/observability-ai-assistant-plugin/common'; +import { CONTEXT_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/server/functions/context'; +import { + clearConversations, + clearKnowledgeBase, + createKnowledgeBaseModel, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers'; +import { getConversationCreatedEvent } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/conversations/helpers'; +import { + LlmProxy, + createLlmProxy, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { deleteKnowledgeBaseModel } from './helpers'; +import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const supertestWithoutAuth = getService('supertestWithoutAuth'); + const es = getService('es'); + const ml = getService('ml'); + const log = getService('log'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + + // TODO: https://github.com/elastic/kibana/issues/192711 cannot create custom users in serverless + // trying using built in users by using cookie auth + // TODO: https://github.com/elastic/kibana/issues/192757 + describe.skip('Knowledge base user instructions', function () { + this.tags(['skipMKI']); + let editorRoleAuthc: RoleCredentials; + let johnRoleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + before(async () => { + // Create API keys for 'editor' role, simulating different users + johnRoleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('admin'); + editorRoleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + await createKnowledgeBaseModel(ml); + + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + roleAuthc: editorRoleAuthc, + internalReqHeader, + }) + .expect(200); + }); + + after(async () => { + await deleteKnowledgeBaseModel(ml); + await clearKnowledgeBase(es); + await clearConversations(es); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(johnRoleAuthc); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(editorRoleAuthc); + }); + + describe('when creating private and public user instructions', () => { + before(async () => { + await clearKnowledgeBase(es); + + const promises = [ + { roleAuthc: editorRoleAuthc, username: 'editor', isPublic: true }, + { roleAuthc: editorRoleAuthc, username: 'editor', isPublic: false }, + { roleAuthc: johnRoleAuthc, username: 'john', isPublic: true }, + { roleAuthc: johnRoleAuthc, username: 'john', isPublic: false }, + ].map(async ({ roleAuthc, username, isPublic }) => { + const visibility = isPublic ? 'Public' : 'Private'; + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: `${visibility.toLowerCase()}-doc-from-${username}`, + text: `${visibility} user instruction from "${username}"`, + public: isPublic, + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + }); + + await Promise.all(promises); + }); + + it('"editor" can retrieve their own private instructions and the public instruction', async () => { + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + roleAuthc: editorRoleAuthc, + internalReqHeader, + }); + const instructions = res.body.userInstructions; + + const sortByDocId = (data: any) => sortBy(data, 'doc_id'); + expect(sortByDocId(instructions)).to.eql( + sortByDocId([ + { + doc_id: 'private-doc-from-editor', + public: false, + text: 'Private user instruction from "editor"', + }, + { + doc_id: 'public-doc-from-editor', + public: true, + text: 'Public user instruction from "editor"', + }, + { + doc_id: 'public-doc-from-john', + public: true, + text: 'Public user instruction from "john"', + }, + ]) + ); + }); + + it('"john" can retrieve their own private instructions and the public instruction', async () => { + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + roleAuthc: johnRoleAuthc, + internalReqHeader, + }); + const instructions = res.body.userInstructions; + + const sortByDocId = (data: any) => sortBy(data, 'doc_id'); + expect(sortByDocId(instructions)).to.eql( + sortByDocId([ + { + doc_id: 'public-doc-from-editor', + public: true, + text: 'Public user instruction from "editor"', + }, + { + doc_id: 'public-doc-from-john', + public: true, + text: 'Public user instruction from "john"', + }, + { + doc_id: 'private-doc-from-john', + public: false, + text: 'Private user instruction from "john"', + }, + ]) + ); + }); + }); + + describe('when updating an existing user instructions', () => { + before(async () => { + await clearKnowledgeBase(es); + + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'doc-to-update', + text: 'Initial text', + public: true, + }, + }, + roleAuthc: editorRoleAuthc, + internalReqHeader, + }) + .expect(200); + + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'doc-to-update', + text: 'Updated text', + public: false, + }, + }, + roleAuthc: editorRoleAuthc, + internalReqHeader, + }) + .expect(200); + }); + + it('updates the user instruction', async () => { + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + roleAuthc: editorRoleAuthc, + internalReqHeader, + }); + const instructions = res.body.userInstructions; + + expect(instructions).to.eql([ + { + doc_id: 'doc-to-update', + text: 'Updated text', + public: false, + }, + ]); + }); + }); + + describe('when a user instruction exists and a conversation is created', () => { + let proxy: LlmProxy; + let connectorId: string; + + const userInstructionText = + 'Be polite and use language that is easy to understand. Never disagree with the user.'; + + async function getConversationForUser(roleAuthc: RoleCredentials) { + // the user instruction is always created by "editor" user + await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'private-instruction-about-language', + text: userInstructionText, + public: false, + }, + }, + roleAuthc: editorRoleAuthc, + internalReqHeader, + }) + .expect(200); + + const interceptPromises = [ + proxy.interceptConversationTitle('LLM-generated title').completeAfterIntercept(), + proxy + .interceptConversation({ name: 'conversation', response: 'I, the LLM, hear you!' }) + .completeAfterIntercept(), + ]; + + const messages: Message[] = [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.System, + content: 'You are a helpful assistant', + }, + }, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Today we will be testing user instructions!', + }, + }, + ]; + + const createResponse = await observabilityAIAssistantAPIClient + .slsUser({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + params: { + body: { + messages, + connectorId, + persist: true, + screenContexts: [], + scope: 'observability', + }, + }, + roleAuthc, + internalReqHeader, + }) + .expect(200); + + await proxy.waitForAllInterceptorsSettled(); + const conversationCreatedEvent = getConversationCreatedEvent(createResponse.body); + const conversationId = conversationCreatedEvent.conversation.id; + + const res = await observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId, + }, + }, + roleAuthc, + internalReqHeader, + }); + + // wait for all interceptors to be settled + await Promise.all(interceptPromises); + + const conversation = res.body; + return conversation; + } + + before(async () => { + proxy = await createLlmProxy(log); + connectorId = await createProxyActionConnector({ + supertest: supertestWithoutAuth, + log, + port: proxy.getPort(), + roleAuthc: editorRoleAuthc, + internalReqHeader, + }); + }); + + after(async () => { + proxy.close(); + await deleteActionConnector({ + supertest: supertestWithoutAuth, + connectorId, + log, + roleAuthc: editorRoleAuthc, + internalReqHeader, + }); + }); + + it('adds the instruction to the system prompt', async () => { + const conversation = await getConversationForUser(editorRoleAuthc); + const systemMessage = conversation.messages.find( + (message) => message.message.role === MessageRole.System + )!; + expect(systemMessage.message.content).to.contain(userInstructionText); + }); + + it('does not add the instruction to the context', async () => { + const conversation = await getConversationForUser(editorRoleAuthc); + const contextMessage = conversation.messages.find( + (message) => message.message.name === CONTEXT_FUNCTION_NAME + ); + + // there should be no suggestions with the user instruction + expect(contextMessage?.message.content).to.not.contain(userInstructionText); + expect(contextMessage?.message.data).to.not.contain(userInstructionText); + + // there should be no suggestions at all + expect(JSON.parse(contextMessage?.message.data!).suggestions.length).to.be(0); + }); + + it('does not add the instruction conversation for other users', async () => { + const conversation = await getConversationForUser(johnRoleAuthc); + const systemMessage = conversation.messages.find( + (message) => message.message.role === MessageRole.System + )!; + + expect(systemMessage.message.content).to.not.contain(userInstructionText); + expect(conversation.messages.length).to.be(5); + }); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/public_complete/public_complete.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/public_complete/public_complete.spec.ts new file mode 100644 index 0000000000000..4f61634d8d6e6 --- /dev/null +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/public_complete/public_complete.spec.ts @@ -0,0 +1,319 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import expect from '@kbn/expect'; +import { + FunctionDefinition, + MessageRole, + type Message, +} from '@kbn/observability-ai-assistant-plugin/common'; +import { type StreamingChatResponseEvent } from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; +import { pick } from 'lodash'; +import type OpenAI from 'openai'; +import { type AdHocInstruction } from '@kbn/observability-ai-assistant-plugin/common/types'; +import { + createLlmProxy, + isFunctionTitleRequest, + LlmProxy, + LlmResponseSimulator, +} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { FtrProviderContext } from '../../common/ftr_provider_context'; +import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; +import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; +import { deleteAllConversations } from '../conversations/helpers'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const supertest = getService('supertestWithoutAuth'); + const svlUserManager = getService('svlUserManager'); + const svlCommonApi = getService('svlCommonApi'); + const log = getService('log'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + + const messages: Message[] = [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.System, + content: 'You are a helpful assistant', + }, + }, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Good morning, bot!', + }, + }, + ]; + describe('/api/observability_ai_assistant/chat/complete', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + + let proxy: LlmProxy; + let connectorId: string; + let roleAuthc: RoleCredentials; + let internalReqHeader: InternalRequestHeader; + + interface RequestOptions { + actions?: Array>; + instructions?: AdHocInstruction[]; + format?: 'openai' | 'default'; + } + + type ConversationSimulatorCallback = ( + conversationSimulator: LlmResponseSimulator + ) => Promise; + + async function getResponseBody( + { actions, instructions, format = 'default' }: RequestOptions, + conversationSimulatorCallback: ConversationSimulatorCallback + ) { + const titleInterceptor = proxy.intercept('title', (body) => isFunctionTitleRequest(body)); + + const conversationInterceptor = proxy.intercept( + 'conversation', + (body) => !isFunctionTitleRequest(body) + ); + + const responsePromise = observabilityAIAssistantAPIClient.slsUser({ + endpoint: 'POST /api/observability_ai_assistant/chat/complete 2023-10-31', + roleAuthc, + internalReqHeader, + params: { + query: { format }, + body: { + messages, + connectorId, + persist: true, + actions, + instructions, + }, + }, + }); + + const [conversationSimulator, titleSimulator] = await Promise.race([ + Promise.all([ + conversationInterceptor.waitForIntercept(), + titleInterceptor.waitForIntercept(), + ]), + // make sure any request failures (like 400s) are properly propagated + responsePromise.then(() => []), + ]); + + await titleSimulator.status(200); + await titleSimulator.next('My generated title'); + await titleSimulator.complete(); + + await conversationSimulator.status(200); + if (conversationSimulatorCallback) { + await conversationSimulatorCallback(conversationSimulator); + } + + const response = await responsePromise; + + return String(response.body); + } + + async function getEvents( + options: RequestOptions, + conversationSimulatorCallback: ConversationSimulatorCallback + ) { + const responseBody = await getResponseBody(options, conversationSimulatorCallback); + + return responseBody + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => JSON.parse(line) as StreamingChatResponseEvent) + .slice(2); // ignore context request/response, we're testing this elsewhere + } + + async function getOpenAIResponse(conversationSimulatorCallback: ConversationSimulatorCallback) { + const responseBody = await getResponseBody( + { + format: 'openai', + }, + conversationSimulatorCallback + ); + + return responseBody; + } + + before(async () => { + proxy = await createLlmProxy(log); + roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('admin'); + internalReqHeader = svlCommonApi.getInternalRequestHeader(); + connectorId = await createProxyActionConnector({ + supertest, + log, + port: proxy.getPort(), + internalReqHeader, + roleAuthc, + }); + }); + + after(async () => { + await deleteAllConversations({ + observabilityAIAssistantAPIClient, + internalReqHeader, + roleAuthc, + log, + }); + await deleteActionConnector({ supertest, connectorId, log, roleAuthc, internalReqHeader }); + proxy.close(); + await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); + }); + + describe('after executing an action', () => { + let events: StreamingChatResponseEvent[]; + + before(async () => { + events = await getEvents( + { + actions: [ + { + name: 'my_action', + description: 'My action', + parameters: { + type: 'object', + properties: { + foo: { + type: 'string', + }, + }, + }, + }, + ], + }, + async (conversationSimulator) => { + await conversationSimulator.next({ + function_call: { name: 'my_action', arguments: JSON.stringify({ foo: 'bar' }) }, + }); + await conversationSimulator.complete(); + } + ); + }); + + it('closes the stream without persisting the conversation', () => { + expect( + pick( + events[events.length - 1], + 'message.message.content', + 'message.message.function_call', + 'message.message.role' + ) + ).to.eql({ + message: { + message: { + content: '', + function_call: { + name: 'my_action', + arguments: JSON.stringify({ foo: 'bar' }), + trigger: MessageRole.Assistant, + }, + role: MessageRole.Assistant, + }, + }, + }); + }); + }); + + describe('after adding an instruction', () => { + let body: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming; + + before(async () => { + await getEvents( + { + instructions: [ + { + text: 'This is a random instruction', + instruction_type: 'user_instruction', + }, + ], + }, + async (conversationSimulator) => { + body = conversationSimulator.body; + + await conversationSimulator.next({ + function_call: { name: 'my_action', arguments: JSON.stringify({ foo: 'bar' }) }, + }); + await conversationSimulator.complete(); + } + ); + }); + + it('includes the instruction in the system message', async () => { + expect(body.messages[0].content).to.contain('This is a random instruction'); + }); + }); + + describe('with openai format', () => { + let responseBody: string; + + before(async () => { + responseBody = await getOpenAIResponse(async (conversationSimulator) => { + await conversationSimulator.next('Hello'); + await conversationSimulator.complete(); + }); + }); + + function extractDataParts(lines: string[]) { + return lines.map((line) => { + // .replace is easier, but we want to verify here whether + // it matches the SSE syntax (`data: ...`) + const [, dataPart] = line.match(/^data: (.*)$/) || ['', '']; + return dataPart.trim(); + }); + } + + function getLines() { + return responseBody.split('\n\n').filter(Boolean); + } + + it('outputs each line an SSE-compatible format (data: ...)', () => { + const lines = getLines(); + + lines.forEach((line) => { + expect(line.match(/^data: /)); + }); + }); + + it('ouputs one chunk, and one [DONE] event', () => { + const dataParts = extractDataParts(getLines()); + + expect(dataParts[0]).not.to.be.empty(); + expect(dataParts[1]).to.be('[DONE]'); + }); + + it('outuputs an OpenAI-compatible chunk', () => { + const [dataLine] = extractDataParts(getLines()); + + expect(() => { + JSON.parse(dataLine); + }).not.to.throwException(); + + const parsedChunk = JSON.parse(dataLine); + + expect(parsedChunk).to.eql({ + model: 'unknown', + choices: [ + { + delta: { + content: 'Hello', + }, + finish_reason: null, + index: 0, + }, + ], + object: 'chat.completion.chunk', + // just test that these are a string and a number + id: String(parsedChunk.id), + created: Number(parsedChunk.created), + }); + }); + }); + }); +}