Skip to content

Commit

Permalink
[8.x] OpenAI connector: send default model for "other" open…
Browse files Browse the repository at this point in the history
…AI provider (#204934) (#205107)

# Backport

This will backport the following commits from `main` to `8.x`:
- [OpenAI connector: send default model for "other" openAI
provider (#204934)](#204934)

<!--- Backport version: 9.4.3 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Pierre
Gayvallet","email":"[email protected]"},"sourceCommit":{"committedDate":"2024-12-23T16:16:33Z","message":"OpenAI
connector: send default model for \"other\" openAI provider
(#204934)\n\n## Summary\r\n\r\nPart of
https://github.com/elastic/kibana/issues/204116\r\n\r\nWhen model is not
present in the payload, use the default model as\r\nspecified in the
connector configuration.\r\n\r\nWe were already doing that for
OpenAI-OpenAI, but not for\r\n\"Other\"-OpenAI.\r\n\r\n### Some section
because I downloaded ollama just for that issue\r\n\r\n<img
width=\"950\" alt=\"Screenshot 2024-12-19 at 13 53
48\"\r\nsrc=\"https://github.com/user-attachments/assets/4a6e4b35-a0c5-46e5-9372-677e99d070f8\"\r\n/>\r\n\r\n<img
width=\"769\" alt=\"Screenshot 2024-12-19 at 13 54
54\"\r\nsrc=\"https://github.com/user-attachments/assets/a0a5a12a-ea1e-42b7-8fa1-6531bef5ae6c\"\r\n/>","sha":"d4bc9befdef02a80545e65507c15780fcf6933f5","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","backport:version","Team:AI
Infra","v8.18.0"],"title":"OpenAI connector: send default model for
\"other\" openAI
provider","number":204934,"url":"https://github.com/elastic/kibana/pull/204934","mergeCommit":{"message":"OpenAI
connector: send default model for \"other\" openAI provider
(#204934)\n\n## Summary\r\n\r\nPart of
https://github.com/elastic/kibana/issues/204116\r\n\r\nWhen model is not
present in the payload, use the default model as\r\nspecified in the
connector configuration.\r\n\r\nWe were already doing that for
OpenAI-OpenAI, but not for\r\n\"Other\"-OpenAI.\r\n\r\n### Some section
because I downloaded ollama just for that issue\r\n\r\n<img
width=\"950\" alt=\"Screenshot 2024-12-19 at 13 53
48\"\r\nsrc=\"https://github.com/user-attachments/assets/4a6e4b35-a0c5-46e5-9372-677e99d070f8\"\r\n/>\r\n\r\n<img
width=\"769\" alt=\"Screenshot 2024-12-19 at 13 54
54\"\r\nsrc=\"https://github.com/user-attachments/assets/a0a5a12a-ea1e-42b7-8fa1-6531bef5ae6c\"\r\n/>","sha":"d4bc9befdef02a80545e65507c15780fcf6933f5"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/204934","number":204934,"mergeCommit":{"message":"OpenAI
connector: send default model for \"other\" openAI provider
(#204934)\n\n## Summary\r\n\r\nPart of
https://github.com/elastic/kibana/issues/204116\r\n\r\nWhen model is not
present in the payload, use the default model as\r\nspecified in the
connector configuration.\r\n\r\nWe were already doing that for
OpenAI-OpenAI, but not for\r\n\"Other\"-OpenAI.\r\n\r\n### Some section
because I downloaded ollama just for that issue\r\n\r\n<img
width=\"950\" alt=\"Screenshot 2024-12-19 at 13 53
48\"\r\nsrc=\"https://github.com/user-attachments/assets/4a6e4b35-a0c5-46e5-9372-677e99d070f8\"\r\n/>\r\n\r\n<img
width=\"769\" alt=\"Screenshot 2024-12-19 at 13 54
54\"\r\nsrc=\"https://github.com/user-attachments/assets/a0a5a12a-ea1e-42b7-8fa1-6531bef5ae6c\"\r\n/>","sha":"d4bc9befdef02a80545e65507c15780fcf6933f5"}},{"branch":"8.x","label":"v8.18.0","branchLabelMappingKey":"^v8.18.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->

Co-authored-by: Pierre Gayvallet <[email protected]>
  • Loading branch information
kibanamachine and pgayvallet authored Dec 23, 2024
1 parent 3d88e86 commit eda8fc8
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -112,5 +112,42 @@ describe('Other (OpenAI Compatible Service) Utils', () => {
const sanitizedBodyString = getRequestWithStreamOption(bodyString, false);
expect(sanitizedBodyString).toEqual(bodyString);
});

it('sets model parameter if specified and not present in the body', () => {
const body = {
messages: [
{
role: 'user',
content: 'This is a test',
},
],
};

const sanitizedBodyString = getRequestWithStreamOption(JSON.stringify(body), true, 'llama-3');
expect(JSON.parse(sanitizedBodyString)).toEqual({
messages: [{ content: 'This is a test', role: 'user' }],
model: 'llama-3',
stream: true,
});
});

it('does not overrides model parameter if present in the body', () => {
const body = {
model: 'mistral',
messages: [
{
role: 'user',
content: 'This is a test',
},
],
};

const sanitizedBodyString = getRequestWithStreamOption(JSON.stringify(body), true, 'llama-3');
expect(JSON.parse(sanitizedBodyString)).toEqual({
messages: [{ content: 'This is a test', role: 'user' }],
model: 'mistral',
stream: true,
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,19 @@ export const sanitizeRequest = (body: string): string => {
* The stream parameter is accepted in the ChatCompletion
* API and the Completion API only
*/
export const getRequestWithStreamOption = (body: string, stream: boolean): string => {
export const getRequestWithStreamOption = (
body: string,
stream: boolean,
defaultModel?: string
): string => {
try {
const jsonBody = JSON.parse(body);
if (jsonBody) {
jsonBody.stream = stream;
}

if (defaultModel && !jsonBody.model) {
jsonBody.model = defaultModel;
}
return JSON.stringify(jsonBody);
} catch (err) {
// swallow the error
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,19 @@ describe('Utils', () => {
});

it('calls other_openai_utils getRequestWithStreamOption when provider is Other OpenAi', () => {
getRequestWithStreamOption(OpenAiProviderType.Other, OPENAI_CHAT_URL, bodyString, true);
getRequestWithStreamOption(
OpenAiProviderType.Other,
OPENAI_CHAT_URL,
bodyString,
true,
'default-model'
);

expect(mockOtherOpenAiGetRequestWithStreamOption).toHaveBeenCalledWith(bodyString, true);
expect(mockOtherOpenAiGetRequestWithStreamOption).toHaveBeenCalledWith(
bodyString,
true,
'default-model'
);
expect(mockOpenAiGetRequestWithStreamOption).not.toHaveBeenCalled();
expect(mockAzureAiGetRequestWithStreamOption).not.toHaveBeenCalled();
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ export function getRequestWithStreamOption(
case OpenAiProviderType.AzureAi:
return azureAiGetRequestWithStreamOption(url, body, stream);
case OpenAiProviderType.Other:
return otherOpenAiGetRequestWithStreamOption(body, stream);
return otherOpenAiGetRequestWithStreamOption(body, stream, defaultModel);
default:
return body;
}
Expand Down

0 comments on commit eda8fc8

Please sign in to comment.