Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OpenAI connector: send default model for "other" openAI provider #204934

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -112,5 +112,42 @@ describe('Other (OpenAI Compatible Service) Utils', () => {
const sanitizedBodyString = getRequestWithStreamOption(bodyString, false);
expect(sanitizedBodyString).toEqual(bodyString);
});

it('sets model parameter if specified and not present in the body', () => {
const body = {
messages: [
{
role: 'user',
content: 'This is a test',
},
],
};

const sanitizedBodyString = getRequestWithStreamOption(JSON.stringify(body), true, 'llama-3');
expect(JSON.parse(sanitizedBodyString)).toEqual({
messages: [{ content: 'This is a test', role: 'user' }],
model: 'llama-3',
stream: true,
});
});

it('does not overrides model parameter if present in the body', () => {
const body = {
model: 'mistral',
messages: [
{
role: 'user',
content: 'This is a test',
},
],
};

const sanitizedBodyString = getRequestWithStreamOption(JSON.stringify(body), true, 'llama-3');
expect(JSON.parse(sanitizedBodyString)).toEqual({
messages: [{ content: 'This is a test', role: 'user' }],
model: 'mistral',
stream: true,
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,19 @@ export const sanitizeRequest = (body: string): string => {
* The stream parameter is accepted in the ChatCompletion
* API and the Completion API only
*/
export const getRequestWithStreamOption = (body: string, stream: boolean): string => {
export const getRequestWithStreamOption = (
body: string,
stream: boolean,
defaultModel?: string
): string => {
try {
const jsonBody = JSON.parse(body);
if (jsonBody) {
jsonBody.stream = stream;
}

if (defaultModel && !jsonBody.model) {
jsonBody.model = defaultModel;
}
return JSON.stringify(jsonBody);
} catch (err) {
// swallow the error
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,19 @@ describe('Utils', () => {
});

it('calls other_openai_utils getRequestWithStreamOption when provider is Other OpenAi', () => {
getRequestWithStreamOption(OpenAiProviderType.Other, OPENAI_CHAT_URL, bodyString, true);
getRequestWithStreamOption(
OpenAiProviderType.Other,
OPENAI_CHAT_URL,
bodyString,
true,
'default-model'
);

expect(mockOtherOpenAiGetRequestWithStreamOption).toHaveBeenCalledWith(bodyString, true);
expect(mockOtherOpenAiGetRequestWithStreamOption).toHaveBeenCalledWith(
bodyString,
true,
'default-model'
);
expect(mockOpenAiGetRequestWithStreamOption).not.toHaveBeenCalled();
expect(mockAzureAiGetRequestWithStreamOption).not.toHaveBeenCalled();
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ export function getRequestWithStreamOption(
case OpenAiProviderType.AzureAi:
return azureAiGetRequestWithStreamOption(url, body, stream);
case OpenAiProviderType.Other:
return otherOpenAiGetRequestWithStreamOption(body, stream);
return otherOpenAiGetRequestWithStreamOption(body, stream, defaultModel);
default:
return body;
}
Expand Down
Loading