From 0e18ab0a5ddc8e6f846c8a388644ff69ce28b07c Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Tue, 14 Nov 2023 21:02:03 -0800 Subject: [PATCH] Allow custom system prompt for Ollama functions (#3264) --- .../integrations/chat/ollama_functions.mdx | 10 +++ .../chat/ollama_functions/custom_prompt.ts | 64 +++++++++++++++++++ .../chat_models/ollama_functions.ts | 2 +- 3 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 examples/src/models/chat/ollama_functions/custom_prompt.ts diff --git a/docs/core_docs/docs/integrations/chat/ollama_functions.mdx b/docs/core_docs/docs/integrations/chat/ollama_functions.mdx index d1f2072322d0..eb66ba383283 100644 --- a/docs/core_docs/docs/integrations/chat/ollama_functions.mdx +++ b/docs/core_docs/docs/integrations/chat/ollama_functions.mdx @@ -43,3 +43,13 @@ import OllamaFunctionsExtraction from "@examples/models/chat/ollama_functions/ex {OllamaFunctionsExtraction} You can see a LangSmith trace of what this looks like here: https://smith.langchain.com/public/31457ea4-71ca-4e29-a1e0-aa80e6828883/r + +## Customization + +Behind the scenes, this uses Ollama's JSON mode to constrain output to JSON, then passes tools schemas as JSON schema into the prompt. + +Because different models have different strengths, it may be helpful to pass in your own system prompt. Here's an example: + +import OllamaFunctionsCustomPrompt from "@examples/models/chat/ollama_functions/custom_prompt.ts"; + +{OllamaFunctionsCustomPrompt} diff --git a/examples/src/models/chat/ollama_functions/custom_prompt.ts b/examples/src/models/chat/ollama_functions/custom_prompt.ts new file mode 100644 index 000000000000..0c646b9bdae4 --- /dev/null +++ b/examples/src/models/chat/ollama_functions/custom_prompt.ts @@ -0,0 +1,64 @@ +import { OllamaFunctions } from "langchain/experimental/chat_models/ollama_functions"; +import { HumanMessage } from "langchain/schema"; +import { PromptTemplate } from "langchain/prompts"; + +// Custom system prompt to format tools. You must encourage the model +// to wrap output in a JSON object with "tool" and "tool_input" properties. +const toolSystemPrompt = + PromptTemplate.fromTemplate(`You have access to the following tools: + +{tools} + +To use a tool, respond with a JSON object with the following structure: +{{ + "tool": , + "tool_input": +}}`); + +const model = new OllamaFunctions({ + temperature: 0.1, + model: "mistral", + toolSystemPrompt, +}).bind({ + functions: [ + { + name: "get_current_weather", + description: "Get the current weather in a given location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The city and state, e.g. San Francisco, CA", + }, + unit: { type: "string", enum: ["celsius", "fahrenheit"] }, + }, + required: ["location"], + }, + }, + ], + // You can set the `function_call` arg to force the model to use a function + function_call: { + name: "get_current_weather", + }, +}); + +const response = await model.invoke([ + new HumanMessage({ + content: "What's the weather in Boston?", + }), +]); + +console.log(response); + +/* + AIMessage { + content: '', + additional_kwargs: { + function_call: { + name: 'get_current_weather', + arguments: '{"location":"Boston, MA","unit":"fahrenheit"}' + } + } + } +*/ diff --git a/langchain/src/experimental/chat_models/ollama_functions.ts b/langchain/src/experimental/chat_models/ollama_functions.ts index 406eb6deea26..ed96beff9050 100644 --- a/langchain/src/experimental/chat_models/ollama_functions.ts +++ b/langchain/src/experimental/chat_models/ollama_functions.ts @@ -93,7 +93,7 @@ export class OllamaFunctions extends BaseChatModel