Skip to content

Commit

Permalink
chore: lint files
Browse files Browse the repository at this point in the history
  • Loading branch information
bracesproul committed Jul 25, 2024
1 parent 8e3fb90 commit 6015579
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 23 deletions.
18 changes: 10 additions & 8 deletions libs/langchain-groq/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,17 +207,17 @@ function groqResponseToChatMessage(
}
}

function _convertDeltaToolCallToToolCallChunk(toolCalls?: ChatCompletionsAPI.ChatCompletionChunk.Choice.Delta.ToolCall[]): ToolCallChunk[] | undefined {
function _convertDeltaToolCallToToolCallChunk(
toolCalls?: ChatCompletionsAPI.ChatCompletionChunk.Choice.Delta.ToolCall[]
): ToolCallChunk[] | undefined {
if (!toolCalls?.length) return undefined;

return toolCalls.map((tc) => {
return {
return toolCalls.map((tc) => ({
id: tc.id,
name: tc.function?.name,
args: tc.function?.arguments,
type: "tool_call_chunk"
}
})
type: "tool_call_chunk",
}));
}

function _convertDeltaToMessageChunk(
Expand Down Expand Up @@ -359,7 +359,9 @@ export class ChatGroq extends BaseChatModel<
async completionWithRetry(
request: ChatCompletionCreateParams,
options?: OpenAICoreRequestOptions
): Promise<AsyncIterable<ChatCompletionsAPI.ChatCompletionChunk> | ChatCompletion> {
): Promise<
AsyncIterable<ChatCompletionsAPI.ChatCompletionChunk> | ChatCompletion
> {
return this.caller.call(async () =>
this.client.chat.completions.create(request, options)
);
Expand Down Expand Up @@ -505,7 +507,7 @@ export class ChatGroq extends BaseChatModel<
completion_tokens: completionTokens,
prompt_tokens: promptTokens,
total_tokens: totalTokens,
} = data.usage as CompletionsAPI.CompletionUsage
} = data.usage as CompletionsAPI.CompletionUsage;

if (completionTokens) {
tokenUsage.completionTokens =
Expand Down
38 changes: 24 additions & 14 deletions libs/langchain-groq/src/tests/chat_models.int.test.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
import { test } from "@jest/globals";
import { AIMessage, AIMessageChunk, HumanMessage, ToolMessage } from "@langchain/core/messages";
import { ChatGroq } from "../chat_models.js";
import {
AIMessage,
AIMessageChunk,
HumanMessage,
ToolMessage,
} from "@langchain/core/messages";
import { tool } from "@langchain/core/tools";
import { z } from "zod";
import { concat } from "@langchain/core/utils/stream";
import { ChatGroq } from "../chat_models.js";

test("invoke", async () => {
const chat = new ChatGroq({
Expand Down Expand Up @@ -205,19 +210,24 @@ test("Groq can stream tool calls", async () => {
temperature: 0,
});

const weatherTool = tool((_) => {
return "The temperature is 24 degrees with hail.";
}, {
name: "get_current_weather",
schema: z.object({
location: z.string().describe("The location to get the current weather for."),
}),
description: "Get the current weather in a given location.",
})
const weatherTool = tool(
(_) => "The temperature is 24 degrees with hail.",
{
name: "get_current_weather",
schema: z.object({
location: z
.string()
.describe("The location to get the current weather for."),
}),
description: "Get the current weather in a given location.",
}
);

const modelWithTools = model.bindTools([weatherTool]);

const stream = await modelWithTools.stream("What is the weather in San Francisco?");
const stream = await modelWithTools.stream(
"What is the weather in San Francisco?"
);

let finalMessage: AIMessageChunk | undefined;
for await (const chunk of stream) {
Expand All @@ -231,5 +241,5 @@ test("Groq can stream tool calls", async () => {
if (!finalMessage.tool_calls?.[0]) return;

expect(finalMessage.tool_calls?.[0].name).toBe("get_current_weather");
expect(finalMessage.tool_calls?.[0].args).toHaveProperty("location")
})
expect(finalMessage.tool_calls?.[0].args).toHaveProperty("location");
});
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ class ChatGroqStandardIntegrationTests extends ChatModelIntegrationTests<
chatModelHasToolCalling: true,
chatModelHasStructuredOutput: true,
constructorArgs: {
model: "llama3-groq-70b-8192-tool-use-preview",
model: "llama-3.1-70b-versatile",
},
});
}
Expand Down

0 comments on commit 6015579

Please sign in to comment.