From bd59d1bae1250ce5561e5375607de23b99c1be32 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Tue, 30 Jul 2024 11:29:18 -0700 Subject: [PATCH 1/7] scripts[minor]: Add CLI for creating integration docs --- .../docs/integrations/chat/openai.ipynb | 300 ++++++++++++++++++ libs/langchain-scripts/package.json | 3 +- libs/langchain-scripts/src/cli/docs/chat.ts | 67 ++++ libs/langchain-scripts/src/cli/docs/index.ts | 43 +++ .../src/cli/docs/templates}/chat.ipynb | 31 +- .../src/cli/utils/get-input.ts | 21 ++ yarn.lock | 10 +- 7 files changed, 457 insertions(+), 18 deletions(-) create mode 100644 docs/core_docs/docs/integrations/chat/openai.ipynb create mode 100644 libs/langchain-scripts/src/cli/docs/chat.ts create mode 100644 libs/langchain-scripts/src/cli/docs/index.ts rename libs/{create-langchain-integration/docs => langchain-scripts/src/cli/docs/templates}/chat.ipynb (82%) create mode 100644 libs/langchain-scripts/src/cli/utils/get-input.ts diff --git a/docs/core_docs/docs/integrations/chat/openai.ipynb b/docs/core_docs/docs/integrations/chat/openai.ipynb new file mode 100644 index 000000000000..4469b3c7744f --- /dev/null +++ b/docs/core_docs/docs/integrations/chat/openai.ipynb @@ -0,0 +1,300 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "afaf8039", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: ChatOpenAI\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "e49f1e0d", + "metadata": {}, + "source": [ + "# ChatOpenAI\n", + "\n", + "- TODO: Make sure API reference link is correct.\n", + "\n", + "This will help you getting started with ChatOpenAI [chat models](/docs/concepts/#chat-models). For detailed documentation of all ChatOpenAI features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_openai.ChatOpenAI.html).\n", + "\n", + "- TODO: Add any other relevant links, like information about models, prices, context windows, etc. See https://js.langchain.com/v0.2/docs/integrations/chat/openai/ for an example.\n", + "\n", + "## Overview\n", + "### Integration details\n", + "\n", + "- TODO: Fill in table features.\n", + "- TODO: Remove PY support link if not relevant, otherwise ensure link is correct.\n", + "- TODO: Make sure API reference links are correct.\n", + "\n", + "| Class | Package | Local | Serializable | [PY support](https:/py.langchain.com/v0.2/docs/integrations/chat/openai) | Package downloads | Package latest |\n", + "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", + "| [ChatOpenAI](https://api.js.langchain.com/classes/langchain_openai.ChatOpenAI.html) | [@langchain/openai](https://api.js.langchain.com/modules/langchain_openai.html) | ✅/❌ | beta/❌ | ✅/❌ | ![NPM - Downloads](https://img.shields.io/npm/dm/@langchain/openai?style=flat-square&label=%20) | ![NPM - Version](https://img.shields.io/npm/v/@langchain/openai?style=flat-square&label=%20) |\n", + "\n", + "### Model features\n", + "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", + "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", + "| ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | \n", + "\n", + "## Setup\n", + "\n", + "- TODO: Update with relevant info.\n", + "\n", + "To access ChatOpenAI models you'll need to create a/an ChatOpenAI account, get an API key, and install the `@langchain/openai` integration package.\n", + "\n", + "### Credentials\n", + "\n", + "- TODO: Update with relevant info.\n", + "\n", + "Head to (TODO: link) to sign up to ChatOpenAI and generate an API key. Once you've done this set the OPENAI_API_KEY environment variable:\n", + "\n", + "```{=mdx}\n", + "\n", + "```bash\n", + "export OPENAI_API_KEY=\"your-api-key\"\n", + "```\n", + "\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "72ee0c4b-9764-423a-9dbf-95129e185210", + "metadata": {}, + "source": [ + "If you want to get automated tracing of your model calls you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:\n", + "\n", + "```{=mdx}\n", + "\n", + "```bash\n", + "# export LANGCHAIN_TRACING_V2=\"true\"\n", + "# export LANGCHAIN_API_KEY=\"your-api-key\"\n", + "```\n", + "\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "0730d6a1-c893-4840-9817-5e5251676d5d", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "The LangChain ChatOpenAI integration lives in the `@langchain/openai` package:\n", + "\n", + "```{=mdx}\n", + "\n", + "```bash npm2yarn\n", + "npm i @langchain/openai\n", + "```\n", + "\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "a38cde65-254d-4219-a441-068766c0d4b5", + "metadata": {}, + "source": [ + "## Instantiation\n", + "\n", + "Now we can instantiate our model object and generate chat completions:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae", + "metadata": {}, + "outputs": [], + "source": [ + "import { ChatOpenAI } from \"@langchain/openai\" \n", + "\n", + "const llm = new ChatOpenAI({\n", + " model: \"gpt-4o\",\n", + " temperature: 0,\n", + " maxTokens: undefined,\n", + " timeout: undefined,\n", + " maxRetries: 2,\n", + " // other params...\n", + "})" + ] + }, + { + "cell_type": "markdown", + "id": "2b4f3e15", + "metadata": {}, + "source": [ + "## Invocation" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "62e0dbc3", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "AIMessage {\n", + " \"id\": \"chatcmpl-9qlrhSDIt1X2EaRf7juBxTo6zit5u\",\n", + " \"content\": \"J'adore la programmation.\",\n", + " \"additional_kwargs\": {},\n", + " \"response_metadata\": {\n", + " \"tokenUsage\": {\n", + " \"completionTokens\": 5,\n", + " \"promptTokens\": 31,\n", + " \"totalTokens\": 36\n", + " },\n", + " \"finish_reason\": \"stop\",\n", + " \"system_fingerprint\": \"fp_4e2b2da518\"\n", + " },\n", + " \"tool_calls\": [],\n", + " \"invalid_tool_calls\": [],\n", + " \"usage_metadata\": {\n", + " \"input_tokens\": 31,\n", + " \"output_tokens\": 5,\n", + " \"total_tokens\": 36\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "const aiMsg = await llm.invoke([\n", + " [\n", + " \"system\",\n", + " \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", + " ],\n", + " [\"human\", \"I love programming.\"],\n", + "])\n", + "aiMsg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d86145b3-bfef-46e8-b227-4dda5c9c2705", + "metadata": {}, + "outputs": [], + "source": [ + "console.log(aiMsg.content)" + ] + }, + { + "cell_type": "markdown", + "id": "18e2bfc0-7e78-4528-a73f-499ac150dca8", + "metadata": {}, + "source": [ + "## Chaining\n", + "\n", + "We can [chain](/docs/how_to/sequence/) our model with a prompt template like so:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "AIMessage {\n", + " \"id\": \"chatcmpl-9qlr4a1l5wf1jCPjmUtTR6Tfd38SK\",\n", + " \"content\": \"Ich liebe Programmieren.\",\n", + " \"additional_kwargs\": {},\n", + " \"response_metadata\": {\n", + " \"tokenUsage\": {\n", + " \"completionTokens\": 5,\n", + " \"promptTokens\": 26,\n", + " \"totalTokens\": 31\n", + " },\n", + " \"finish_reason\": \"stop\",\n", + " \"system_fingerprint\": \"fp_4e2b2da518\"\n", + " },\n", + " \"tool_calls\": [],\n", + " \"invalid_tool_calls\": [],\n", + " \"usage_metadata\": {\n", + " \"input_tokens\": 26,\n", + " \"output_tokens\": 5,\n", + " \"total_tokens\": 31\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "import { ChatPromptTemplate } from \"@langchain/core/prompts\"\n", + "\n", + "const prompt = ChatPromptTemplate.fromMessages(\n", + " [\n", + " [\n", + " \"system\",\n", + " \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n", + " ],\n", + " [\"human\", \"{input}\"],\n", + " ]\n", + ")\n", + "\n", + "const chain = prompt.pipe(llm);\n", + "await chain.invoke(\n", + " {\n", + " input_language: \"English\",\n", + " output_language: \"German\",\n", + " input: \"I love programming.\",\n", + " }\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "d1ee55bc-ffc8-4cfa-801c-993953a08cfd", + "metadata": {}, + "source": [ + "## TODO: Any functionality specific to this model provider\n", + "\n", + "E.g. creating/using finetuned models via this provider. Delete if not relevant." + ] + }, + { + "cell_type": "markdown", + "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all ChatOpenAI features and configurations head to the API reference: https://api.js.langchain.com/classes/langchain_openai.ChatOpenAI.html" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "TypeScript", + "language": "typescript", + "name": "tslab" + }, + "language_info": { + "codemirror_mode": { + "mode": "typescript", + "name": "javascript", + "typescript": true + }, + "file_extension": ".ts", + "mimetype": "text/typescript", + "name": "typescript", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/libs/langchain-scripts/package.json b/libs/langchain-scripts/package.json index 388c5222a2d6..c8f2e624e1fc 100644 --- a/libs/langchain-scripts/package.json +++ b/libs/langchain-scripts/package.json @@ -41,8 +41,9 @@ "dependencies": { "@rollup/wasm-node": "^4.19.0", "axios": "^1.6.7", - "commander": "^11.1.0", + "commander": "^12.1.0", "glob": "^10.3.10", + "readline": "^1.3.0", "rimraf": "^5.0.1", "rollup": "^4.5.2", "ts-morph": "^21.0.1", diff --git a/libs/langchain-scripts/src/cli/docs/chat.ts b/libs/langchain-scripts/src/cli/docs/chat.ts new file mode 100644 index 000000000000..c8e16ef54b73 --- /dev/null +++ b/libs/langchain-scripts/src/cli/docs/chat.ts @@ -0,0 +1,67 @@ +import * as path from "node:path"; +import * as fs from "node:fs"; + +const PACKAGE_NAME_PLACEHOLDER = "__package_name__"; +const PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER = "__package_name_short_snake_case__"; +const PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER = "__package_name_snake_case__"; +const PACKAGE_NAME_PRETTY_PLACEHOLDER = "__package_name_pretty__"; +const MODULE_NAME_PLACEHOLDER = "__ModuleName__"; +// This should not be prefixed with `Chat` as it's used for API keys. +const MODULE_NAME_ALL_CAPS_PLACEHOLDER = "__MODULE_NAME_ALL_CAPS__"; + +const API_REF_BASE_PACKAGE_URL = `https://api.js.langchain.com/modules/langchain_${PACKAGE_NAME_PLACEHOLDER}.html` +const API_REF_BASE_MODULE_URL = `https://api.js.langchain.com/classes/langchain_${PACKAGE_NAME_PLACEHOLDER}.${MODULE_NAME_PLACEHOLDER}.html` +const TEMPLATE_PATH = path.resolve("./src/cli/docs/templates/chat.ipynb"); +const INTEGRATIONS_DOCS_PATH = path.resolve("../../docs/core_docs/docs/integrations/chat"); + +const fetchAPIRefUrl = async (url: string): Promise => { + try { + const res = await fetch(url); + if (res.status !== 200) { + throw new Error(`API Reference URL ${url} not found.`); + } + return true; + } catch (_) { + return false; + } +} + +export async function fillChatIntegrationDocTemplate(fields: { + packageName: string; + moduleName: string; +}) { + const formattedApiRefPackageUrl = API_REF_BASE_PACKAGE_URL.replace(PACKAGE_NAME_PLACEHOLDER, fields.packageName); + const formattedApiRefModuleUrl = API_REF_BASE_MODULE_URL + .replace(PACKAGE_NAME_PLACEHOLDER, fields.packageName) + .replace(MODULE_NAME_PLACEHOLDER, fields.moduleName); + + const success = await Promise.all([ + fetchAPIRefUrl(formattedApiRefPackageUrl), + fetchAPIRefUrl(formattedApiRefModuleUrl) + ]); + if (success.some((s) => s === false)) { + console.error("Invalid package or module name. API reference not found."); + } + + let docTemplate = await fs.promises.readFile(TEMPLATE_PATH, "utf-8"); + const packageNameShortSnakeCase = fields.packageName.replaceAll("-", "_"); + const fullPackageNameSnakeCase = `langchain_${packageNameShortSnakeCase}`; + const packageNamePretty = `@langchain/${fields.packageName}`; + let moduleNameAllCaps = fields.moduleName.toUpperCase(); + if (moduleNameAllCaps.startsWith("CHAT")) { + moduleNameAllCaps = moduleNameAllCaps.replace("CHAT", ""); + } + + // Replace all instances of __package_name__ with fields.packageName + docTemplate = docTemplate.replaceAll(PACKAGE_NAME_PLACEHOLDER, fields.packageName); + docTemplate = docTemplate.replaceAll(PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER, fullPackageNameSnakeCase); + docTemplate = docTemplate.replaceAll(PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER, packageNameShortSnakeCase); + docTemplate = docTemplate.replaceAll(PACKAGE_NAME_PRETTY_PLACEHOLDER, packageNamePretty); + + docTemplate = docTemplate.replaceAll(MODULE_NAME_PLACEHOLDER, fields.moduleName); + docTemplate = docTemplate.replaceAll(MODULE_NAME_ALL_CAPS_PLACEHOLDER, moduleNameAllCaps); + + await fs.promises.writeFile(path.join(INTEGRATIONS_DOCS_PATH, packageNameShortSnakeCase), docTemplate); +} + +fillChatIntegrationDocTemplate({ packageName: "openai", moduleName: "ChatOpenAI" }); \ No newline at end of file diff --git a/libs/langchain-scripts/src/cli/docs/index.ts b/libs/langchain-scripts/src/cli/docs/index.ts new file mode 100644 index 000000000000..132192d08372 --- /dev/null +++ b/libs/langchain-scripts/src/cli/docs/index.ts @@ -0,0 +1,43 @@ +// --------------------------------------------- +// CLI for creating integration docs. +// --------------------------------------------- +import { Command } from "commander"; +import { fillChatIntegrationDocTemplate } from "./chat.js"; + +type CLIInput = { + package: string; + module: string; + type: string; +} + +async function main() { + const program = new Command(); + program + .description("Create a new chat model integration docs.") + .option("--package ", "Package name, eg openai. Should be value of @langchain/") + .option("--module ", "Module name, e.g ChatOpenAI") + .option("--type ", "Type of integration, e.g. 'chat'") + + program.parse(); + + const options = program.opts(); + + let { package: packageName, module: moduleName, type } = options; + + if (packageName.startsWith("@langchain/")) { + packageName = packageName.replace("@langchain/", ""); + } + + switch (type) { + case "chat": + await fillChatIntegrationDocTemplate({ packageName, moduleName }); + break; + default: + console.error(`Invalid type: ${type}.\nOnly 'chat' is supported at this time.`); + process.exit(1); + } +} + +if (require.main === module) { + main(); +} \ No newline at end of file diff --git a/libs/create-langchain-integration/docs/chat.ipynb b/libs/langchain-scripts/src/cli/docs/templates/chat.ipynb similarity index 82% rename from libs/create-langchain-integration/docs/chat.ipynb rename to libs/langchain-scripts/src/cli/docs/templates/chat.ipynb index 76af6036af47..d6129a8cffbb 100644 --- a/libs/create-langchain-integration/docs/chat.ipynb +++ b/libs/langchain-scripts/src/cli/docs/templates/chat.ipynb @@ -15,11 +15,11 @@ "id": "e49f1e0d", "metadata": {}, "source": [ - "# Chat__ModuleName__\n", + "# __ModuleName__\n", "\n", "- TODO: Make sure API reference link is correct.\n", "\n", - "This will help you getting started with __ModuleName__ [chat models](/docs/concepts/#chat-models). For detailed documentation of all Chat__ModuleName__ features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain___package_name__.__ModuleName__.html).\n", + "This will help you getting started with __ModuleName__ [chat models](/docs/concepts/#chat-models). For detailed documentation of all __ModuleName__ features and configurations head to the [API reference](https://api.js.langchain.com/classes/__package_name_snake_case__.__ModuleName__.html).\n", "\n", "- TODO: Add any other relevant links, like information about models, prices, context windows, etc. See https://js.langchain.com/v0.2/docs/integrations/chat/openai/ for an example.\n", "\n", @@ -30,9 +30,9 @@ "- TODO: Remove PY support link if not relevant, otherwise ensure link is correct.\n", "- TODO: Make sure API reference links are correct.\n", "\n", - "| Class | Package | Local | Serializable | [PY support](https:/py.langchain.com/v0.2/docs/integrations/chat/__package_name_short_snake__) | Package downloads | Package latest |\n", + "| Class | Package | Local | Serializable | [PY support](https:/py.langchain.com/v0.2/docs/integrations/chat/__package_name_short_snake_case__) | Package downloads | Package latest |\n", "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", - "| [Chat__ModuleName__](https://api.js.langchain.com/classes/langchain___PackageName__.__ModuleName__.html) | [__package_name__](https://api.js.langchain.com/modules/__package_snake_name__.html) | ✅/❌ | beta/❌ | ✅/❌ | ![NPM - Downloads](https://img.shields.io/npm/dm/__package_name__?style=flat-square&label=%20) | ![NPM - Version](https://img.shields.io/npm/v/__package_name__?style=flat-square&label=%20) |\n", + "| [__ModuleName__](https://api.js.langchain.com/classes/__package_name_snake_case__.__ModuleName__.html) | [__package_name_pretty__](https://api.js.langchain.com/modules/__package_name_snake_case__.html) | ✅/❌ | beta/❌ | ✅/❌ | ![NPM - Downloads](https://img.shields.io/npm/dm/__package_name_pretty__?style=flat-square&label=%20) | ![NPM - Version](https://img.shields.io/npm/v/__package_name_pretty__?style=flat-square&label=%20) |\n", "\n", "### Model features\n", "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", @@ -43,18 +43,18 @@ "\n", "- TODO: Update with relevant info.\n", "\n", - "To access __ModuleName__ models you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name__` integration package.\n", + "To access __ModuleName__ models you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name_pretty__` integration package.\n", "\n", "### Credentials\n", "\n", "- TODO: Update with relevant info.\n", "\n", - "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME___API_KEY environment variable:\n", + "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME_ALL_CAPS___API_KEY environment variable:\n", "\n", "```{=mdx}\n", "\n", "```bash\n", - "export __MODULE_NAME___API_KEY=\"your-api-key\"\n", + "export __MODULE_NAME_ALL_CAPS___API_KEY=\"your-api-key\"\n", "```\n", "\n", "```" @@ -84,12 +84,12 @@ "source": [ "### Installation\n", "\n", - "The LangChain __ModuleName__ integration lives in the `__package_name__` package:\n", + "The LangChain __ModuleName__ integration lives in the `__package_name_pretty__` package:\n", "\n", "```{=mdx}\n", "\n", "```bash npm2yarn\n", - "npm i __package_name__\n", + "npm i __package_name_pretty__\n", "```\n", "\n", "```" @@ -118,12 +118,12 @@ }, "outputs": [], "source": [ - "import { Chat__ModuleName__ } from \"__module_name__\" \n", + "import { __ModuleName__ } from \"__package_name_pretty__\" \n", "\n", - "const llm = new Chat__ModuleName__({\n", + "const llm = new __ModuleName__({\n", " model: \"model-name\",\n", " temperature: 0,\n", - " max_tokens: undefined,\n", + " maxTokens: undefined,\n", " timeout: undefined,\n", " maxRetries: 2,\n", " // other params...\n", @@ -152,14 +152,13 @@ }, "outputs": [], "source": [ - "const messages = [\n", + "const aiMsg = await llm.invoke([\n", " [\n", " \"system\",\n", " \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", " ],\n", " [\"human\", \"I love programming.\"],\n", - "]\n", - "const aiMsg = await llm.invoke(messages)\n", + "])\n", "aiMsg" ] }, @@ -239,7 +238,7 @@ "source": [ "## API reference\n", "\n", - "For detailed documentation of all Chat__ModuleName__ features and configurations head to the API reference: https://api.js.langchain.com/classes/langchain___package_name__.__ModuleName__.html" + "For detailed documentation of all __ModuleName__ features and configurations head to the API reference: https://api.js.langchain.com/classes/__package_name_snake_case__.__ModuleName__.html" ] } ], diff --git a/libs/langchain-scripts/src/cli/utils/get-input.ts b/libs/langchain-scripts/src/cli/utils/get-input.ts new file mode 100644 index 000000000000..a4c9f32dffd6 --- /dev/null +++ b/libs/langchain-scripts/src/cli/utils/get-input.ts @@ -0,0 +1,21 @@ +import * as readline from 'readline'; + +/** + * Prompts the user with a question and returns the user input. + * + * @param {string} question The question to log to the users terminal. + * @returns {Promise} The user input. + */ +export async function getUserInput(question: string): Promise { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + + return new Promise((resolve) => { + rl.question(`\x1b[30m\x1b[47m${question}\x1b[0m`, (input) => { + rl.close(); + resolve(input); + }); + }); +} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index d64172ea9af6..11d19f903088 100644 --- a/yarn.lock +++ b/yarn.lock @@ -12341,7 +12341,7 @@ __metadata: "@typescript-eslint/eslint-plugin": ^6.12.0 "@typescript-eslint/parser": ^6.12.0 axios: ^1.6.7 - commander: ^11.1.0 + commander: ^12.1.0 dotenv: ^16.3.1 dpdm: ^3.12.0 eslint: ^8.33.0 @@ -12354,6 +12354,7 @@ __metadata: jest: ^29.5.0 jest-environment-node: ^29.6.4 prettier: ^2.8.3 + readline: ^1.3.0 release-it: ^15.10.1 rimraf: ^5.0.1 rollup: ^4.5.2 @@ -22547,6 +22548,13 @@ __metadata: languageName: node linkType: hard +"commander@npm:^12.1.0": + version: 12.1.0 + resolution: "commander@npm:12.1.0" + checksum: 68e9818b00fc1ed9cdab9eb16905551c2b768a317ae69a5e3c43924c2b20ac9bb65b27e1cab36aeda7b6496376d4da908996ba2c0b5d79463e0fb1e77935d514 + languageName: node + linkType: hard + "commander@npm:^4.0.0": version: 4.1.1 resolution: "commander@npm:4.1.1" From 24c73f2c7dcca1b8f2fb1d02f515941a81a97b8d Mon Sep 17 00:00:00 2001 From: bracesproul Date: Tue, 30 Jul 2024 11:33:37 -0700 Subject: [PATCH 2/7] implemented script --- libs/langchain-scripts/package.json | 3 +- libs/langchain-scripts/src/cli/docs/chat.ts | 69 +++++++++++++------ libs/langchain-scripts/src/cli/docs/index.ts | 19 ++--- .../src/cli/utils/get-input.ts | 8 +-- 4 files changed, 64 insertions(+), 35 deletions(-) diff --git a/libs/langchain-scripts/package.json b/libs/langchain-scripts/package.json index c8f2e624e1fc..5b56e54f6479 100644 --- a/libs/langchain-scripts/package.json +++ b/libs/langchain-scripts/package.json @@ -34,7 +34,8 @@ "test:single": "NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", "test:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", "format": "prettier --write \"src\"", - "format:check": "prettier --check \"src\"" + "format:check": "prettier --check \"src\"", + "create:integration:doc": "node dist/cli/docs/index.js" }, "author": "LangChain", "license": "MIT", diff --git a/libs/langchain-scripts/src/cli/docs/chat.ts b/libs/langchain-scripts/src/cli/docs/chat.ts index c8e16ef54b73..56f72424635a 100644 --- a/libs/langchain-scripts/src/cli/docs/chat.ts +++ b/libs/langchain-scripts/src/cli/docs/chat.ts @@ -2,17 +2,20 @@ import * as path from "node:path"; import * as fs from "node:fs"; const PACKAGE_NAME_PLACEHOLDER = "__package_name__"; -const PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER = "__package_name_short_snake_case__"; +const PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER = + "__package_name_short_snake_case__"; const PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER = "__package_name_snake_case__"; const PACKAGE_NAME_PRETTY_PLACEHOLDER = "__package_name_pretty__"; const MODULE_NAME_PLACEHOLDER = "__ModuleName__"; // This should not be prefixed with `Chat` as it's used for API keys. const MODULE_NAME_ALL_CAPS_PLACEHOLDER = "__MODULE_NAME_ALL_CAPS__"; -const API_REF_BASE_PACKAGE_URL = `https://api.js.langchain.com/modules/langchain_${PACKAGE_NAME_PLACEHOLDER}.html` -const API_REF_BASE_MODULE_URL = `https://api.js.langchain.com/classes/langchain_${PACKAGE_NAME_PLACEHOLDER}.${MODULE_NAME_PLACEHOLDER}.html` +const API_REF_BASE_PACKAGE_URL = `https://api.js.langchain.com/modules/langchain_${PACKAGE_NAME_PLACEHOLDER}.html`; +const API_REF_BASE_MODULE_URL = `https://api.js.langchain.com/classes/langchain_${PACKAGE_NAME_PLACEHOLDER}.${MODULE_NAME_PLACEHOLDER}.html`; const TEMPLATE_PATH = path.resolve("./src/cli/docs/templates/chat.ipynb"); -const INTEGRATIONS_DOCS_PATH = path.resolve("../../docs/core_docs/docs/integrations/chat"); +const INTEGRATIONS_DOCS_PATH = path.resolve( + "../../docs/core_docs/docs/integrations/chat" +); const fetchAPIRefUrl = async (url: string): Promise => { try { @@ -24,25 +27,29 @@ const fetchAPIRefUrl = async (url: string): Promise => { } catch (_) { return false; } -} +}; export async function fillChatIntegrationDocTemplate(fields: { packageName: string; moduleName: string; }) { - const formattedApiRefPackageUrl = API_REF_BASE_PACKAGE_URL.replace(PACKAGE_NAME_PLACEHOLDER, fields.packageName); - const formattedApiRefModuleUrl = API_REF_BASE_MODULE_URL - .replace(PACKAGE_NAME_PLACEHOLDER, fields.packageName) - .replace(MODULE_NAME_PLACEHOLDER, fields.moduleName); - + const formattedApiRefPackageUrl = API_REF_BASE_PACKAGE_URL.replace( + PACKAGE_NAME_PLACEHOLDER, + fields.packageName + ); + const formattedApiRefModuleUrl = API_REF_BASE_MODULE_URL.replace( + PACKAGE_NAME_PLACEHOLDER, + fields.packageName + ).replace(MODULE_NAME_PLACEHOLDER, fields.moduleName); + const success = await Promise.all([ fetchAPIRefUrl(formattedApiRefPackageUrl), - fetchAPIRefUrl(formattedApiRefModuleUrl) + fetchAPIRefUrl(formattedApiRefModuleUrl), ]); if (success.some((s) => s === false)) { console.error("Invalid package or module name. API reference not found."); } - + let docTemplate = await fs.promises.readFile(TEMPLATE_PATH, "utf-8"); const packageNameShortSnakeCase = fields.packageName.replaceAll("-", "_"); const fullPackageNameSnakeCase = `langchain_${packageNameShortSnakeCase}`; @@ -52,16 +59,34 @@ export async function fillChatIntegrationDocTemplate(fields: { moduleNameAllCaps = moduleNameAllCaps.replace("CHAT", ""); } - // Replace all instances of __package_name__ with fields.packageName - docTemplate = docTemplate.replaceAll(PACKAGE_NAME_PLACEHOLDER, fields.packageName); - docTemplate = docTemplate.replaceAll(PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER, fullPackageNameSnakeCase); - docTemplate = docTemplate.replaceAll(PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER, packageNameShortSnakeCase); - docTemplate = docTemplate.replaceAll(PACKAGE_NAME_PRETTY_PLACEHOLDER, packageNamePretty); + docTemplate = docTemplate.replaceAll( + PACKAGE_NAME_PLACEHOLDER, + fields.packageName + ); + docTemplate = docTemplate.replaceAll( + PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER, + fullPackageNameSnakeCase + ); + docTemplate = docTemplate.replaceAll( + PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER, + packageNameShortSnakeCase + ); + docTemplate = docTemplate.replaceAll( + PACKAGE_NAME_PRETTY_PLACEHOLDER, + packageNamePretty + ); - docTemplate = docTemplate.replaceAll(MODULE_NAME_PLACEHOLDER, fields.moduleName); - docTemplate = docTemplate.replaceAll(MODULE_NAME_ALL_CAPS_PLACEHOLDER, moduleNameAllCaps); + docTemplate = docTemplate.replaceAll( + MODULE_NAME_PLACEHOLDER, + fields.moduleName + ); + docTemplate = docTemplate.replaceAll( + MODULE_NAME_ALL_CAPS_PLACEHOLDER, + moduleNameAllCaps + ); - await fs.promises.writeFile(path.join(INTEGRATIONS_DOCS_PATH, packageNameShortSnakeCase), docTemplate); + await fs.promises.writeFile( + path.join(INTEGRATIONS_DOCS_PATH, `${packageNameShortSnakeCase}.ipynb`), + docTemplate + ); } - -fillChatIntegrationDocTemplate({ packageName: "openai", moduleName: "ChatOpenAI" }); \ No newline at end of file diff --git a/libs/langchain-scripts/src/cli/docs/index.ts b/libs/langchain-scripts/src/cli/docs/index.ts index 132192d08372..df6e55782330 100644 --- a/libs/langchain-scripts/src/cli/docs/index.ts +++ b/libs/langchain-scripts/src/cli/docs/index.ts @@ -8,15 +8,18 @@ type CLIInput = { package: string; module: string; type: string; -} +}; async function main() { const program = new Command(); program .description("Create a new chat model integration docs.") - .option("--package ", "Package name, eg openai. Should be value of @langchain/") + .option( + "--package ", + "Package name, eg openai. Should be value of @langchain/" + ) .option("--module ", "Module name, e.g ChatOpenAI") - .option("--type ", "Type of integration, e.g. 'chat'") + .option("--type ", "Type of integration, e.g. 'chat'"); program.parse(); @@ -33,11 +36,11 @@ async function main() { await fillChatIntegrationDocTemplate({ packageName, moduleName }); break; default: - console.error(`Invalid type: ${type}.\nOnly 'chat' is supported at this time.`); + console.error( + `Invalid type: ${type}.\nOnly 'chat' is supported at this time.` + ); process.exit(1); - } + } } -if (require.main === module) { - main(); -} \ No newline at end of file +main(); diff --git a/libs/langchain-scripts/src/cli/utils/get-input.ts b/libs/langchain-scripts/src/cli/utils/get-input.ts index a4c9f32dffd6..f8bca08bf593 100644 --- a/libs/langchain-scripts/src/cli/utils/get-input.ts +++ b/libs/langchain-scripts/src/cli/utils/get-input.ts @@ -1,15 +1,15 @@ -import * as readline from 'readline'; +import * as readline from "readline"; /** * Prompts the user with a question and returns the user input. - * + * * @param {string} question The question to log to the users terminal. * @returns {Promise} The user input. */ export async function getUserInput(question: string): Promise { const rl = readline.createInterface({ input: process.stdin, - output: process.stdout + output: process.stdout, }); return new Promise((resolve) => { @@ -18,4 +18,4 @@ export async function getUserInput(question: string): Promise { resolve(input); }); }); -} \ No newline at end of file +} From fa71ccd0f93bfc26a9a0d03c69a5b6828a147413 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Tue, 30 Jul 2024 11:56:03 -0700 Subject: [PATCH 3/7] wrap up cli --- libs/langchain-scripts/src/cli/docs/chat.ts | 146 +++++++++++++++++- .../src/cli/docs/templates/chat.ipynb | 8 +- 2 files changed, 147 insertions(+), 7 deletions(-) diff --git a/libs/langchain-scripts/src/cli/docs/chat.ts b/libs/langchain-scripts/src/cli/docs/chat.ts index 56f72424635a..89d2e67ab4c8 100644 --- a/libs/langchain-scripts/src/cli/docs/chat.ts +++ b/libs/langchain-scripts/src/cli/docs/chat.ts @@ -1,5 +1,6 @@ import * as path from "node:path"; import * as fs from "node:fs"; +import { getUserInput } from "../utils/get-input.js"; const PACKAGE_NAME_PLACEHOLDER = "__package_name__"; const PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER = @@ -10,6 +11,19 @@ const MODULE_NAME_PLACEHOLDER = "__ModuleName__"; // This should not be prefixed with `Chat` as it's used for API keys. const MODULE_NAME_ALL_CAPS_PLACEHOLDER = "__MODULE_NAME_ALL_CAPS__"; +const TOOL_CALLING_PLACEHOLDER = "__tool_calling__"; +const JSON_MODE_PLACEHOLDER = "__json_mode__"; +const IMAGE_INPUT_PLACEHOLDER = "__image_input__"; +const AUDIO_INPUT_PLACEHOLDER = "__audio_input__"; +const VIDEO_INPUT_PLACEHOLDER = "__video_input__"; +const TOKEN_LEVEL_STREAMING_PLACEHOLDER = "__token_level_streaming__"; +const TOKEN_USAGE_PLACEHOLDER = "__token_usage__"; +const LOGPROBS_PLACEHOLDER = "__logprobs__"; + +const SERIALIZABLE_PLACEHOLDER = "__serializable__"; +const LOCAL_PLACEHOLDER = "__local__"; +const PY_SUPPORT_PLACEHOLDER = "__py_support__"; + const API_REF_BASE_PACKAGE_URL = `https://api.js.langchain.com/modules/langchain_${PACKAGE_NAME_PLACEHOLDER}.html`; const API_REF_BASE_MODULE_URL = `https://api.js.langchain.com/classes/langchain_${PACKAGE_NAME_PLACEHOLDER}.${MODULE_NAME_PLACEHOLDER}.html`; const TEMPLATE_PATH = path.resolve("./src/cli/docs/templates/chat.ipynb"); @@ -29,10 +43,86 @@ const fetchAPIRefUrl = async (url: string): Promise => { } }; +type ExtraFields = { + /** + * If tool calling is true, structured output will also be true. + */ + toolCalling: boolean; + jsonMode: boolean; + imageInput: boolean; + audioInput: boolean; + videoInput: boolean; + tokenLevelStreaming: boolean; + tokenUsage: boolean; + logprobs: boolean; + local: boolean; + serializable: boolean; + pySupport: boolean; +}; + +async function promptExtraFields(): Promise { + const hasToolCalling = await getUserInput( + "Does the tool support tool calling? (y/n) " + ); + const hasJsonMode = await getUserInput( + "Does the tool support JSON mode? (y/n) " + ); + const hasImageInput = await getUserInput( + "Does the tool support image input? (y/n) " + ); + const hasAudioInput = await getUserInput( + "Does the tool support audio input? (y/n) " + ); + const hasVideoInput = await getUserInput( + "Does the tool support video input? (y/n) " + ); + const hasTokenLevelStreaming = await getUserInput( + "Does the tool support token level streaming? (y/n) " + ); + const hasTokenUsage = await getUserInput( + "Does the tool support token usage? (y/n) " + ); + const hasLogprobs = await getUserInput( + "Does the tool support logprobs? (y/n) " + ); + const hasLocal = await getUserInput( + "Does the tool support local usage? (y/n) " + ); + const hasSerializable = await getUserInput( + "Does the tool support serializable output? (y/n) " + ); + const hasPySupport = await getUserInput( + "Does the tool support Python support? (y/n) " + ); + + return { + toolCalling: hasToolCalling.toLowerCase() === "y", + jsonMode: hasJsonMode.toLowerCase() === "y", + imageInput: hasImageInput.toLowerCase() === "y", + audioInput: hasAudioInput.toLowerCase() === "y", + videoInput: hasVideoInput.toLowerCase() === "y", + tokenLevelStreaming: hasTokenLevelStreaming.toLowerCase() === "y", + tokenUsage: hasTokenUsage.toLowerCase() === "y", + logprobs: hasLogprobs.toLowerCase() === "y", + local: hasLocal.toLowerCase() === "y", + serializable: hasSerializable.toLowerCase() === "y", + pySupport: hasPySupport.toLowerCase() === "y", + }; +} + export async function fillChatIntegrationDocTemplate(fields: { packageName: string; moduleName: string; }) { + // Ask the user if they'd like to fill in extra fields, if so, prompt them. + let extraFields: ExtraFields | undefined; + const shouldPromptExtraFields = await getUserInput( + "Would you like to fill out optional fields? (y/n) " + ); + if (shouldPromptExtraFields.toLowerCase() === "y") { + extraFields = await promptExtraFields(); + } + const formattedApiRefPackageUrl = API_REF_BASE_PACKAGE_URL.replace( PACKAGE_NAME_PLACEHOLDER, fields.packageName @@ -85,8 +175,58 @@ export async function fillChatIntegrationDocTemplate(fields: { moduleNameAllCaps ); - await fs.promises.writeFile( - path.join(INTEGRATIONS_DOCS_PATH, `${packageNameShortSnakeCase}.ipynb`), - docTemplate + if (extraFields) { + docTemplate.replaceAll( + TOOL_CALLING_PLACEHOLDER, + extraFields.toolCalling ? "✅" : "❌" + ); + docTemplate.replaceAll( + JSON_MODE_PLACEHOLDER, + extraFields.jsonMode ? "✅" : "❌" + ); + docTemplate.replaceAll( + IMAGE_INPUT_PLACEHOLDER, + extraFields.imageInput ? "✅" : "❌" + ); + docTemplate.replaceAll( + AUDIO_INPUT_PLACEHOLDER, + extraFields.audioInput ? "✅" : "❌" + ); + docTemplate.replaceAll( + VIDEO_INPUT_PLACEHOLDER, + extraFields.videoInput ? "✅" : "❌" + ); + docTemplate.replaceAll( + TOKEN_LEVEL_STREAMING_PLACEHOLDER, + extraFields.tokenLevelStreaming ? "✅" : "❌" + ); + docTemplate.replaceAll( + TOKEN_USAGE_PLACEHOLDER, + extraFields.tokenUsage ? "✅" : "❌" + ); + docTemplate.replaceAll( + LOGPROBS_PLACEHOLDER, + extraFields.logprobs ? "✅" : "❌" + ); + docTemplate.replaceAll(LOCAL_PLACEHOLDER, extraFields.local ? "✅" : "❌"); + docTemplate.replaceAll( + SERIALIZABLE_PLACEHOLDER, + extraFields.serializable ? "✅" : "❌" + ); + docTemplate.replaceAll( + PY_SUPPORT_PLACEHOLDER, + extraFields.pySupport ? "✅" : "❌" + ); + } + + const docPath = path.join( + INTEGRATIONS_DOCS_PATH, + `${packageNameShortSnakeCase}.ipynb` + ); + await fs.promises.writeFile(docPath, docTemplate); + + console.log( + "Successfully created new chat model integration doc at ${docPath}." + + "Please run the cells in the doc to record the outputs, and replace the Python documentation support URL with the proper URL." ); } diff --git a/libs/langchain-scripts/src/cli/docs/templates/chat.ipynb b/libs/langchain-scripts/src/cli/docs/templates/chat.ipynb index d6129a8cffbb..8aaca9a8d14c 100644 --- a/libs/langchain-scripts/src/cli/docs/templates/chat.ipynb +++ b/libs/langchain-scripts/src/cli/docs/templates/chat.ipynb @@ -32,12 +32,12 @@ "\n", "| Class | Package | Local | Serializable | [PY support](https:/py.langchain.com/v0.2/docs/integrations/chat/__package_name_short_snake_case__) | Package downloads | Package latest |\n", "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", - "| [__ModuleName__](https://api.js.langchain.com/classes/__package_name_snake_case__.__ModuleName__.html) | [__package_name_pretty__](https://api.js.langchain.com/modules/__package_name_snake_case__.html) | ✅/❌ | beta/❌ | ✅/❌ | ![NPM - Downloads](https://img.shields.io/npm/dm/__package_name_pretty__?style=flat-square&label=%20) | ![NPM - Version](https://img.shields.io/npm/v/__package_name_pretty__?style=flat-square&label=%20) |\n", + "| [__ModuleName__](https://api.js.langchain.com/classes/__package_name_snake_case__.__ModuleName__.html) | [__package_name_pretty__](https://api.js.langchain.com/modules/__package_name_snake_case__.html) | __local__ | __serializable__ | __py_support__ | ![NPM - Downloads](https://img.shields.io/npm/dm/__package_name_pretty__?style=flat-square&label=%20) | ![NPM - Version](https://img.shields.io/npm/v/__package_name_pretty__?style=flat-square&label=%20) |\n", "\n", "### Model features\n", - "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", - "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", - "| ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | \n", + "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", + "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", + "| __tool_calling__ | __tool_calling__ | __json_mode__ | __image_input__ | __audio_input__ | __video_input__ | __token_level_streaming__ | __token_usage__ | __logprobs__ | \n", "\n", "## Setup\n", "\n", From 5320181b2168e563f2df412e7c374d9e47178736 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Tue, 30 Jul 2024 11:58:45 -0700 Subject: [PATCH 4/7] lint/format --- libs/langchain-scripts/src/cli/docs/chat.ts | 2 +- libs/langchain-scripts/src/cli/docs/index.ts | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/libs/langchain-scripts/src/cli/docs/chat.ts b/libs/langchain-scripts/src/cli/docs/chat.ts index 89d2e67ab4c8..1c4797889b02 100644 --- a/libs/langchain-scripts/src/cli/docs/chat.ts +++ b/libs/langchain-scripts/src/cli/docs/chat.ts @@ -226,7 +226,7 @@ export async function fillChatIntegrationDocTemplate(fields: { await fs.promises.writeFile(docPath, docTemplate); console.log( - "Successfully created new chat model integration doc at ${docPath}." + + `Successfully created new chat model integration doc at ${docPath}.` + "Please run the cells in the doc to record the outputs, and replace the Python documentation support URL with the proper URL." ); } diff --git a/libs/langchain-scripts/src/cli/docs/index.ts b/libs/langchain-scripts/src/cli/docs/index.ts index df6e55782330..e8d802ace3f7 100644 --- a/libs/langchain-scripts/src/cli/docs/index.ts +++ b/libs/langchain-scripts/src/cli/docs/index.ts @@ -25,7 +25,8 @@ async function main() { const options = program.opts(); - let { package: packageName, module: moduleName, type } = options; + const { module: moduleName, type } = options; + let { package: packageName } = options; if (packageName.startsWith("@langchain/")) { packageName = packageName.replace("@langchain/", ""); @@ -43,4 +44,6 @@ async function main() { } } -main(); +main().catch((err) => { + throw err; +}); From 84ff973e10f7e992e489998e6075fa79734a10b4 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Tue, 30 Jul 2024 12:05:05 -0700 Subject: [PATCH 5/7] fix --- .../docs/integrations/chat/openai.ipynb | 300 ------------------ libs/langchain-scripts/src/cli/docs/chat.ts | 94 ++---- 2 files changed, 26 insertions(+), 368 deletions(-) delete mode 100644 docs/core_docs/docs/integrations/chat/openai.ipynb diff --git a/docs/core_docs/docs/integrations/chat/openai.ipynb b/docs/core_docs/docs/integrations/chat/openai.ipynb deleted file mode 100644 index 4469b3c7744f..000000000000 --- a/docs/core_docs/docs/integrations/chat/openai.ipynb +++ /dev/null @@ -1,300 +0,0 @@ -{ - "cells": [ - { - "cell_type": "raw", - "id": "afaf8039", - "metadata": {}, - "source": [ - "---\n", - "sidebar_label: ChatOpenAI\n", - "---" - ] - }, - { - "cell_type": "markdown", - "id": "e49f1e0d", - "metadata": {}, - "source": [ - "# ChatOpenAI\n", - "\n", - "- TODO: Make sure API reference link is correct.\n", - "\n", - "This will help you getting started with ChatOpenAI [chat models](/docs/concepts/#chat-models). For detailed documentation of all ChatOpenAI features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_openai.ChatOpenAI.html).\n", - "\n", - "- TODO: Add any other relevant links, like information about models, prices, context windows, etc. See https://js.langchain.com/v0.2/docs/integrations/chat/openai/ for an example.\n", - "\n", - "## Overview\n", - "### Integration details\n", - "\n", - "- TODO: Fill in table features.\n", - "- TODO: Remove PY support link if not relevant, otherwise ensure link is correct.\n", - "- TODO: Make sure API reference links are correct.\n", - "\n", - "| Class | Package | Local | Serializable | [PY support](https:/py.langchain.com/v0.2/docs/integrations/chat/openai) | Package downloads | Package latest |\n", - "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", - "| [ChatOpenAI](https://api.js.langchain.com/classes/langchain_openai.ChatOpenAI.html) | [@langchain/openai](https://api.js.langchain.com/modules/langchain_openai.html) | ✅/❌ | beta/❌ | ✅/❌ | ![NPM - Downloads](https://img.shields.io/npm/dm/@langchain/openai?style=flat-square&label=%20) | ![NPM - Version](https://img.shields.io/npm/v/@langchain/openai?style=flat-square&label=%20) |\n", - "\n", - "### Model features\n", - "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", - "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", - "| ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | ✅/❌ | \n", - "\n", - "## Setup\n", - "\n", - "- TODO: Update with relevant info.\n", - "\n", - "To access ChatOpenAI models you'll need to create a/an ChatOpenAI account, get an API key, and install the `@langchain/openai` integration package.\n", - "\n", - "### Credentials\n", - "\n", - "- TODO: Update with relevant info.\n", - "\n", - "Head to (TODO: link) to sign up to ChatOpenAI and generate an API key. Once you've done this set the OPENAI_API_KEY environment variable:\n", - "\n", - "```{=mdx}\n", - "\n", - "```bash\n", - "export OPENAI_API_KEY=\"your-api-key\"\n", - "```\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "72ee0c4b-9764-423a-9dbf-95129e185210", - "metadata": {}, - "source": [ - "If you want to get automated tracing of your model calls you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:\n", - "\n", - "```{=mdx}\n", - "\n", - "```bash\n", - "# export LANGCHAIN_TRACING_V2=\"true\"\n", - "# export LANGCHAIN_API_KEY=\"your-api-key\"\n", - "```\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "0730d6a1-c893-4840-9817-5e5251676d5d", - "metadata": {}, - "source": [ - "### Installation\n", - "\n", - "The LangChain ChatOpenAI integration lives in the `@langchain/openai` package:\n", - "\n", - "```{=mdx}\n", - "\n", - "```bash npm2yarn\n", - "npm i @langchain/openai\n", - "```\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "a38cde65-254d-4219-a441-068766c0d4b5", - "metadata": {}, - "source": [ - "## Instantiation\n", - "\n", - "Now we can instantiate our model object and generate chat completions:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae", - "metadata": {}, - "outputs": [], - "source": [ - "import { ChatOpenAI } from \"@langchain/openai\" \n", - "\n", - "const llm = new ChatOpenAI({\n", - " model: \"gpt-4o\",\n", - " temperature: 0,\n", - " maxTokens: undefined,\n", - " timeout: undefined,\n", - " maxRetries: 2,\n", - " // other params...\n", - "})" - ] - }, - { - "cell_type": "markdown", - "id": "2b4f3e15", - "metadata": {}, - "source": [ - "## Invocation" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "62e0dbc3", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "AIMessage {\n", - " \"id\": \"chatcmpl-9qlrhSDIt1X2EaRf7juBxTo6zit5u\",\n", - " \"content\": \"J'adore la programmation.\",\n", - " \"additional_kwargs\": {},\n", - " \"response_metadata\": {\n", - " \"tokenUsage\": {\n", - " \"completionTokens\": 5,\n", - " \"promptTokens\": 31,\n", - " \"totalTokens\": 36\n", - " },\n", - " \"finish_reason\": \"stop\",\n", - " \"system_fingerprint\": \"fp_4e2b2da518\"\n", - " },\n", - " \"tool_calls\": [],\n", - " \"invalid_tool_calls\": [],\n", - " \"usage_metadata\": {\n", - " \"input_tokens\": 31,\n", - " \"output_tokens\": 5,\n", - " \"total_tokens\": 36\n", - " }\n", - "}\n" - ] - } - ], - "source": [ - "const aiMsg = await llm.invoke([\n", - " [\n", - " \"system\",\n", - " \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", - " ],\n", - " [\"human\", \"I love programming.\"],\n", - "])\n", - "aiMsg" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d86145b3-bfef-46e8-b227-4dda5c9c2705", - "metadata": {}, - "outputs": [], - "source": [ - "console.log(aiMsg.content)" - ] - }, - { - "cell_type": "markdown", - "id": "18e2bfc0-7e78-4528-a73f-499ac150dca8", - "metadata": {}, - "source": [ - "## Chaining\n", - "\n", - "We can [chain](/docs/how_to/sequence/) our model with a prompt template like so:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "AIMessage {\n", - " \"id\": \"chatcmpl-9qlr4a1l5wf1jCPjmUtTR6Tfd38SK\",\n", - " \"content\": \"Ich liebe Programmieren.\",\n", - " \"additional_kwargs\": {},\n", - " \"response_metadata\": {\n", - " \"tokenUsage\": {\n", - " \"completionTokens\": 5,\n", - " \"promptTokens\": 26,\n", - " \"totalTokens\": 31\n", - " },\n", - " \"finish_reason\": \"stop\",\n", - " \"system_fingerprint\": \"fp_4e2b2da518\"\n", - " },\n", - " \"tool_calls\": [],\n", - " \"invalid_tool_calls\": [],\n", - " \"usage_metadata\": {\n", - " \"input_tokens\": 26,\n", - " \"output_tokens\": 5,\n", - " \"total_tokens\": 31\n", - " }\n", - "}\n" - ] - } - ], - "source": [ - "import { ChatPromptTemplate } from \"@langchain/core/prompts\"\n", - "\n", - "const prompt = ChatPromptTemplate.fromMessages(\n", - " [\n", - " [\n", - " \"system\",\n", - " \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n", - " ],\n", - " [\"human\", \"{input}\"],\n", - " ]\n", - ")\n", - "\n", - "const chain = prompt.pipe(llm);\n", - "await chain.invoke(\n", - " {\n", - " input_language: \"English\",\n", - " output_language: \"German\",\n", - " input: \"I love programming.\",\n", - " }\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "d1ee55bc-ffc8-4cfa-801c-993953a08cfd", - "metadata": {}, - "source": [ - "## TODO: Any functionality specific to this model provider\n", - "\n", - "E.g. creating/using finetuned models via this provider. Delete if not relevant." - ] - }, - { - "cell_type": "markdown", - "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3", - "metadata": {}, - "source": [ - "## API reference\n", - "\n", - "For detailed documentation of all ChatOpenAI features and configurations head to the API reference: https://api.js.langchain.com/classes/langchain_openai.ChatOpenAI.html" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "TypeScript", - "language": "typescript", - "name": "tslab" - }, - "language_info": { - "codemirror_mode": { - "mode": "typescript", - "name": "javascript", - "typescript": true - }, - "file_extension": ".ts", - "mimetype": "text/typescript", - "name": "typescript", - "version": "3.7.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/libs/langchain-scripts/src/cli/docs/chat.ts b/libs/langchain-scripts/src/cli/docs/chat.ts index 1c4797889b02..1086911f8097 100644 --- a/libs/langchain-scripts/src/cli/docs/chat.ts +++ b/libs/langchain-scripts/src/cli/docs/chat.ts @@ -149,75 +149,33 @@ export async function fillChatIntegrationDocTemplate(fields: { moduleNameAllCaps = moduleNameAllCaps.replace("CHAT", ""); } - docTemplate = docTemplate.replaceAll( - PACKAGE_NAME_PLACEHOLDER, - fields.packageName - ); - docTemplate = docTemplate.replaceAll( - PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER, - fullPackageNameSnakeCase - ); - docTemplate = docTemplate.replaceAll( - PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER, - packageNameShortSnakeCase - ); - docTemplate = docTemplate.replaceAll( - PACKAGE_NAME_PRETTY_PLACEHOLDER, - packageNamePretty - ); - - docTemplate = docTemplate.replaceAll( - MODULE_NAME_PLACEHOLDER, - fields.moduleName - ); - docTemplate = docTemplate.replaceAll( - MODULE_NAME_ALL_CAPS_PLACEHOLDER, - moduleNameAllCaps - ); - - if (extraFields) { - docTemplate.replaceAll( + docTemplate = docTemplate + .replaceAll(PACKAGE_NAME_PLACEHOLDER, fields.packageName) + .replaceAll(PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER, fullPackageNameSnakeCase) + .replaceAll( + PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER, + packageNameShortSnakeCase + ) + .replaceAll(PACKAGE_NAME_PRETTY_PLACEHOLDER, packageNamePretty) + .replaceAll(MODULE_NAME_PLACEHOLDER, fields.moduleName) + .replaceAll(MODULE_NAME_ALL_CAPS_PLACEHOLDER, moduleNameAllCaps) + .replaceAll( TOOL_CALLING_PLACEHOLDER, - extraFields.toolCalling ? "✅" : "❌" - ); - docTemplate.replaceAll( - JSON_MODE_PLACEHOLDER, - extraFields.jsonMode ? "✅" : "❌" - ); - docTemplate.replaceAll( - IMAGE_INPUT_PLACEHOLDER, - extraFields.imageInput ? "✅" : "❌" - ); - docTemplate.replaceAll( - AUDIO_INPUT_PLACEHOLDER, - extraFields.audioInput ? "✅" : "❌" - ); - docTemplate.replaceAll( - VIDEO_INPUT_PLACEHOLDER, - extraFields.videoInput ? "✅" : "❌" - ); - docTemplate.replaceAll( + extraFields?.toolCalling ? "✅" : "❌" + ) + .replace(JSON_MODE_PLACEHOLDER, extraFields?.jsonMode ? "✅" : "❌") + .replace(IMAGE_INPUT_PLACEHOLDER, extraFields?.imageInput ? "✅" : "❌") + .replace(AUDIO_INPUT_PLACEHOLDER, extraFields?.audioInput ? "✅" : "❌") + .replace(VIDEO_INPUT_PLACEHOLDER, extraFields?.videoInput ? "✅" : "❌") + .replace( TOKEN_LEVEL_STREAMING_PLACEHOLDER, - extraFields.tokenLevelStreaming ? "✅" : "❌" - ); - docTemplate.replaceAll( - TOKEN_USAGE_PLACEHOLDER, - extraFields.tokenUsage ? "✅" : "❌" - ); - docTemplate.replaceAll( - LOGPROBS_PLACEHOLDER, - extraFields.logprobs ? "✅" : "❌" - ); - docTemplate.replaceAll(LOCAL_PLACEHOLDER, extraFields.local ? "✅" : "❌"); - docTemplate.replaceAll( - SERIALIZABLE_PLACEHOLDER, - extraFields.serializable ? "✅" : "❌" - ); - docTemplate.replaceAll( - PY_SUPPORT_PLACEHOLDER, - extraFields.pySupport ? "✅" : "❌" - ); - } + extraFields?.tokenLevelStreaming ? "✅" : "❌" + ) + .replace(TOKEN_USAGE_PLACEHOLDER, extraFields?.tokenUsage ? "✅" : "❌") + .replace(LOGPROBS_PLACEHOLDER, extraFields?.logprobs ? "✅" : "❌") + .replace(LOCAL_PLACEHOLDER, extraFields?.local ? "✅" : "❌") + .replace(SERIALIZABLE_PLACEHOLDER, extraFields?.serializable ? "✅" : "❌") + .replace(PY_SUPPORT_PLACEHOLDER, extraFields?.pySupport ? "✅" : "❌"); const docPath = path.join( INTEGRATIONS_DOCS_PATH, @@ -226,7 +184,7 @@ export async function fillChatIntegrationDocTemplate(fields: { await fs.promises.writeFile(docPath, docTemplate); console.log( - `Successfully created new chat model integration doc at ${docPath}.` + + `Successfully created new chat model integration doc at ${docPath}.\n` + "Please run the cells in the doc to record the outputs, and replace the Python documentation support URL with the proper URL." ); } From 08ba362e058470dd0bc1732e878dc72629f65f5d Mon Sep 17 00:00:00 2001 From: bracesproul Date: Tue, 30 Jul 2024 12:14:22 -0700 Subject: [PATCH 6/7] cr --- libs/langchain-scripts/package.json | 2 +- libs/langchain-scripts/src/cli/docs/chat.ts | 8 ++++---- libs/langchain-scripts/src/cli/docs/index.ts | 2 +- yarn.lock | 9 +-------- 4 files changed, 7 insertions(+), 14 deletions(-) diff --git a/libs/langchain-scripts/package.json b/libs/langchain-scripts/package.json index 5b56e54f6479..82048e1f88a6 100644 --- a/libs/langchain-scripts/package.json +++ b/libs/langchain-scripts/package.json @@ -42,7 +42,7 @@ "dependencies": { "@rollup/wasm-node": "^4.19.0", "axios": "^1.6.7", - "commander": "^12.1.0", + "commander": "^11.1.0", "glob": "^10.3.10", "readline": "^1.3.0", "rimraf": "^5.0.1", diff --git a/libs/langchain-scripts/src/cli/docs/chat.ts b/libs/langchain-scripts/src/cli/docs/chat.ts index 1086911f8097..bae144aaa6d4 100644 --- a/libs/langchain-scripts/src/cli/docs/chat.ts +++ b/libs/langchain-scripts/src/cli/docs/chat.ts @@ -137,10 +137,10 @@ export async function fillChatIntegrationDocTemplate(fields: { fetchAPIRefUrl(formattedApiRefModuleUrl), ]); if (success.some((s) => s === false)) { + // Don't error out because this might be used before the package is released. console.error("Invalid package or module name. API reference not found."); } - let docTemplate = await fs.promises.readFile(TEMPLATE_PATH, "utf-8"); const packageNameShortSnakeCase = fields.packageName.replaceAll("-", "_"); const fullPackageNameSnakeCase = `langchain_${packageNameShortSnakeCase}`; const packageNamePretty = `@langchain/${fields.packageName}`; @@ -149,7 +149,7 @@ export async function fillChatIntegrationDocTemplate(fields: { moduleNameAllCaps = moduleNameAllCaps.replace("CHAT", ""); } - docTemplate = docTemplate + const docTemplate = (await fs.promises.readFile(TEMPLATE_PATH, "utf-8")) .replaceAll(PACKAGE_NAME_PLACEHOLDER, fields.packageName) .replaceAll(PACKAGE_NAME_SNAKE_CASE_PLACEHOLDER, fullPackageNameSnakeCase) .replaceAll( @@ -184,7 +184,7 @@ export async function fillChatIntegrationDocTemplate(fields: { await fs.promises.writeFile(docPath, docTemplate); console.log( - `Successfully created new chat model integration doc at ${docPath}.\n` + - "Please run the cells in the doc to record the outputs, and replace the Python documentation support URL with the proper URL." + `Successfully created new chat model integration doc at ${docPath}.\n +Please run the cells in the doc to record the outputs, and replace the Python documentation support URL with the proper URL.` ); } diff --git a/libs/langchain-scripts/src/cli/docs/index.ts b/libs/langchain-scripts/src/cli/docs/index.ts index e8d802ace3f7..87543142e703 100644 --- a/libs/langchain-scripts/src/cli/docs/index.ts +++ b/libs/langchain-scripts/src/cli/docs/index.ts @@ -13,7 +13,7 @@ type CLIInput = { async function main() { const program = new Command(); program - .description("Create a new chat model integration docs.") + .description("Create a new integration doc.") .option( "--package ", "Package name, eg openai. Should be value of @langchain/" diff --git a/yarn.lock b/yarn.lock index 11d19f903088..03dec4a5a6c7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -12341,7 +12341,7 @@ __metadata: "@typescript-eslint/eslint-plugin": ^6.12.0 "@typescript-eslint/parser": ^6.12.0 axios: ^1.6.7 - commander: ^12.1.0 + commander: ^11.1.0 dotenv: ^16.3.1 dpdm: ^3.12.0 eslint: ^8.33.0 @@ -22548,13 +22548,6 @@ __metadata: languageName: node linkType: hard -"commander@npm:^12.1.0": - version: 12.1.0 - resolution: "commander@npm:12.1.0" - checksum: 68e9818b00fc1ed9cdab9eb16905551c2b768a317ae69a5e3c43924c2b20ac9bb65b27e1cab36aeda7b6496376d4da908996ba2c0b5d79463e0fb1e77935d514 - languageName: node - linkType: hard - "commander@npm:^4.0.0": version: 4.1.1 resolution: "commander@npm:4.1.1" From 994c13ce1069c8228d1854a1e86859248014b9cd Mon Sep 17 00:00:00 2001 From: bracesproul Date: Tue, 30 Jul 2024 13:08:01 -0700 Subject: [PATCH 7/7] cr --- libs/langchain-scripts/src/cli/docs/chat.ts | 67 ++++++++++++++----- .../src/cli/utils/get-input.ts | 30 ++++++++- 2 files changed, 80 insertions(+), 17 deletions(-) diff --git a/libs/langchain-scripts/src/cli/docs/chat.ts b/libs/langchain-scripts/src/cli/docs/chat.ts index bae144aaa6d4..c0760bb32c54 100644 --- a/libs/langchain-scripts/src/cli/docs/chat.ts +++ b/libs/langchain-scripts/src/cli/docs/chat.ts @@ -1,6 +1,11 @@ import * as path from "node:path"; import * as fs from "node:fs"; -import { getUserInput } from "../utils/get-input.js"; +import { + boldText, + getUserInput, + greenText, + redBackground, +} from "../utils/get-input.js"; const PACKAGE_NAME_PLACEHOLDER = "__package_name__"; const PACKAGE_NAME_SHORT_SNAKE_CASE_PLACEHOLDER = @@ -62,37 +67,59 @@ type ExtraFields = { async function promptExtraFields(): Promise { const hasToolCalling = await getUserInput( - "Does the tool support tool calling? (y/n) " + "Does the tool support tool calling? (y/n) ", + undefined, + true ); const hasJsonMode = await getUserInput( - "Does the tool support JSON mode? (y/n) " + "Does the tool support JSON mode? (y/n) ", + undefined, + true ); const hasImageInput = await getUserInput( - "Does the tool support image input? (y/n) " + "Does the tool support image input? (y/n) ", + undefined, + true ); const hasAudioInput = await getUserInput( - "Does the tool support audio input? (y/n) " + "Does the tool support audio input? (y/n) ", + undefined, + true ); const hasVideoInput = await getUserInput( - "Does the tool support video input? (y/n) " + "Does the tool support video input? (y/n) ", + undefined, + true ); const hasTokenLevelStreaming = await getUserInput( - "Does the tool support token level streaming? (y/n) " + "Does the tool support token level streaming? (y/n) ", + undefined, + true ); const hasTokenUsage = await getUserInput( - "Does the tool support token usage? (y/n) " + "Does the tool support token usage? (y/n) ", + undefined, + true ); const hasLogprobs = await getUserInput( - "Does the tool support logprobs? (y/n) " + "Does the tool support logprobs? (y/n) ", + undefined, + true ); const hasLocal = await getUserInput( - "Does the tool support local usage? (y/n) " + "Does the tool support local usage? (y/n) ", + undefined, + true ); const hasSerializable = await getUserInput( - "Does the tool support serializable output? (y/n) " + "Does the tool support serializable output? (y/n) ", + undefined, + true ); const hasPySupport = await getUserInput( - "Does the tool support Python support? (y/n) " + "Does the tool support Python support? (y/n) ", + undefined, + true ); return { @@ -117,7 +144,8 @@ export async function fillChatIntegrationDocTemplate(fields: { // Ask the user if they'd like to fill in extra fields, if so, prompt them. let extraFields: ExtraFields | undefined; const shouldPromptExtraFields = await getUserInput( - "Would you like to fill out optional fields? (y/n) " + "Would you like to fill out optional fields? (y/n) ", + "white_background" ); if (shouldPromptExtraFields.toLowerCase() === "y") { extraFields = await promptExtraFields(); @@ -182,9 +210,18 @@ export async function fillChatIntegrationDocTemplate(fields: { `${packageNameShortSnakeCase}.ipynb` ); await fs.promises.writeFile(docPath, docTemplate); + const prettyDocPath = docPath.split("docs/core_docs/")[1]; + + const updatePythonDocUrlText = ` ${redBackground( + "- Update the Python documentation URL with the proper URL." + )}`; + const successText = `\nSuccessfully created new chat model integration doc at ${prettyDocPath}.`; console.log( - `Successfully created new chat model integration doc at ${docPath}.\n -Please run the cells in the doc to record the outputs, and replace the Python documentation support URL with the proper URL.` + `${greenText(successText)}\n +${boldText("Next steps:")} +${extraFields?.pySupport ? updatePythonDocUrlText : ""} + - Run all code cells in the generated doc to record the outputs. + - Add extra sections on integration specific features.\n` ); } diff --git a/libs/langchain-scripts/src/cli/utils/get-input.ts b/libs/langchain-scripts/src/cli/utils/get-input.ts index f8bca08bf593..0f753dd807ea 100644 --- a/libs/langchain-scripts/src/cli/utils/get-input.ts +++ b/libs/langchain-scripts/src/cli/utils/get-input.ts @@ -1,19 +1,45 @@ import * as readline from "readline"; +type Color = "green" | "red_background" | "white_background"; + +export const greenText = (text: string) => `\x1b[1m\x1b[92m${text}\x1b[0m`; +export const boldText = (text: string) => `\x1b[1m${text}\x1b[0m`; +export const redBackground = (text: string) => `\x1b[41m\x1b[37m${text}\x1b[0m`; +export const whiteBackground = (text: string) => + `\x1b[30m\x1b[47m${text}\x1b[0m`; + /** * Prompts the user with a question and returns the user input. * * @param {string} question The question to log to the users terminal. + * @param {Color | undefined} color The color to use for the question. + * @param {boolean | undefined} bold Whether to make the question bold. * @returns {Promise} The user input. */ -export async function getUserInput(question: string): Promise { +export async function getUserInput( + question: string, + color?: Color, + bold?: boolean +): Promise { const rl = readline.createInterface({ input: process.stdin, output: process.stdout, }); + let questionWithStyling = question; + if (bold) { + questionWithStyling = boldText(questionWithStyling); + } + if (color === "green") { + questionWithStyling = greenText(questionWithStyling); + } else if (color === "red_background") { + questionWithStyling = redBackground(questionWithStyling); + } else if (color === "white_background") { + questionWithStyling = whiteBackground(questionWithStyling); + } + return new Promise((resolve) => { - rl.question(`\x1b[30m\x1b[47m${question}\x1b[0m`, (input) => { + rl.question(questionWithStyling, (input) => { rl.close(); resolve(input); });