From 0e8f3ea572d581718187fe7223d46e5c53f80c98 Mon Sep 17 00:00:00 2001 From: Henry Date: Tue, 19 Dec 2023 17:48:45 +0000 Subject: [PATCH] add if else node and logic --- .../IfElseFunction/IfElseFunction.ts | 143 ++ .../IfElseFunction/ifelsefunction.svg | 1 + .../server/marketplaces/chatflows/IfElse.json | 1156 +++++++++++++++++ .../marketplaces/chatflows/SQL Prompt.json | 551 +++++++- packages/server/src/index.ts | 119 +- packages/server/src/utils/index.ts | 102 +- .../src/ui-component/json/SelectVariable.js | 6 +- packages/ui/src/utils/genericHelper.js | 9 - .../ui/src/views/canvas/NodeInputHandler.js | 4 +- .../ui/src/views/canvas/NodeOutputHandler.js | 132 +- packages/ui/src/views/canvas/index.js | 5 +- 11 files changed, 2085 insertions(+), 143 deletions(-) create mode 100644 packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts create mode 100644 packages/components/nodes/utilities/IfElseFunction/ifelsefunction.svg create mode 100644 packages/server/marketplaces/chatflows/IfElse.json diff --git a/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts b/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts new file mode 100644 index 00000000000..862521ebf3d --- /dev/null +++ b/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts @@ -0,0 +1,143 @@ +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { NodeVM } from 'vm2' +import { availableDependencies } from '../../../src/utils' + +class IfElseFunction_Utilities implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'IfElse Function' + this.name = 'ifElseFunction' + this.version = 1.0 + this.type = 'IfElseFunction' + this.icon = 'ifelsefunction.svg' + this.category = 'Utilities' + this.description = `Split flows based on If Else javascript functions` + this.baseClasses = [this.type, 'Utilities'] + this.inputs = [ + { + label: 'Input Variables', + name: 'functionInputVariables', + description: 'Input variables can be used in the function with prefix $. For example: $var', + type: 'json', + optional: true, + acceptVariable: true, + list: true + }, + { + label: 'IfElse Name', + name: 'functionName', + type: 'string', + optional: true, + placeholder: 'If Condition Match' + }, + { + label: 'If Function', + name: 'ifFunction', + description: 'Function must return a value', + type: 'code', + rows: 2, + default: `if ("hello" == "hello") { + return true; +}` + }, + { + label: 'Else Function', + name: 'elseFunction', + description: 'Function must return a value', + type: 'code', + rows: 2, + default: `return false;` + } + ] + this.outputs = [ + { + label: 'True', + name: 'returnTrue', + baseClasses: ['string', 'number', 'boolean', 'json', 'array'] + }, + { + label: 'False', + name: 'returnFalse', + baseClasses: ['string', 'number', 'boolean', 'json', 'array'] + } + ] + } + + async init(nodeData: INodeData, input: string): Promise { + const ifFunction = nodeData.inputs?.ifFunction as string + const elseFunction = nodeData.inputs?.elseFunction as string + const functionInputVariablesRaw = nodeData.inputs?.functionInputVariables + + let inputVars: ICommonObject = {} + if (functionInputVariablesRaw) { + try { + inputVars = + typeof functionInputVariablesRaw === 'object' ? functionInputVariablesRaw : JSON.parse(functionInputVariablesRaw) + } catch (exception) { + throw new Error("Invalid JSON in the PromptTemplate's promptValues: " + exception) + } + } + + let sandbox: any = { $input: input } + + if (Object.keys(inputVars).length) { + for (const item in inputVars) { + sandbox[`$${item}`] = inputVars[item] + } + } + + const defaultAllowBuiltInDep = [ + 'assert', + 'buffer', + 'crypto', + 'events', + 'http', + 'https', + 'net', + 'path', + 'querystring', + 'timers', + 'tls', + 'url', + 'zlib' + ] + + const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP + ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) + : defaultAllowBuiltInDep + const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] + const deps = availableDependencies.concat(externalDeps) + + const nodeVMOptions = { + console: 'inherit', + sandbox, + require: { + external: { modules: deps }, + builtin: builtinDeps + } + } as any + + const vm = new NodeVM(nodeVMOptions) + try { + const responseTrue = await vm.run(`module.exports = async function() {${ifFunction}}()`, __dirname) + if (responseTrue) return { output: responseTrue, type: true } + + const responseFalse = await vm.run(`module.exports = async function() {${elseFunction}}()`, __dirname) + return { output: responseFalse, type: false } + } catch (e) { + throw new Error(e) + } + } +} + +module.exports = { nodeClass: IfElseFunction_Utilities } diff --git a/packages/components/nodes/utilities/IfElseFunction/ifelsefunction.svg b/packages/components/nodes/utilities/IfElseFunction/ifelsefunction.svg new file mode 100644 index 00000000000..f5dd5979f69 --- /dev/null +++ b/packages/components/nodes/utilities/IfElseFunction/ifelsefunction.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/server/marketplaces/chatflows/IfElse.json b/packages/server/marketplaces/chatflows/IfElse.json new file mode 100644 index 00000000000..690d3ce5565 --- /dev/null +++ b/packages/server/marketplaces/chatflows/IfElse.json @@ -0,0 +1,1156 @@ +{ + "description": "Split flows based on if else condition", + "badge": "new", + "nodes": [ + { + "width": 300, + "height": 511, + "id": "promptTemplate_0", + "position": { + "x": 792.9464838535649, + "y": 527.1718536712464 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_0", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_0-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_0-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:", + "promptValues": "{\"objective\":\"{{question}}\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 792.9464838535649, + "y": 527.1718536712464 + }, + "dragging": false + }, + { + "width": 300, + "height": 511, + "id": "promptTemplate_1", + "position": { + "x": 1995.1328578238122, + "y": -14.648035759690174 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_1", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_1-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_1-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array.", + "promptValues": "{\"objective\":\"{{question}}\",\"result\":\"{{ifElseFunction_0.data.instance}}\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1995.1328578238122, + "y": -14.648035759690174 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "openAI_1", + "position": { + "x": 791.6102007244282, + "y": -83.71386876566092 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "version": 3, + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "openAI_1-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" + }, + { + "label": "babbage-002", + "name": "babbage-002" + }, + { + "label": "davinci-002", + "name": "davinci-002" + } + ], + "default": "gpt-3.5-turbo-instruct", + "optional": true, + "id": "openAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-basepath-string" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "openAI_1-input-cache-BaseCache" + } + ], + "inputs": { + "modelName": "gpt-3.5-turbo-instruct", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "", + "basepath": "" + }, + "outputAnchors": [ + { + "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 791.6102007244282, + "y": -83.71386876566092 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "openAI_2", + "position": { + "x": 2340.5995455075863, + "y": -310.7609446553905 + }, + "type": "customNode", + "data": { + "id": "openAI_2", + "label": "OpenAI", + "version": 3, + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "openAI_2-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-3.5-turbo-instruct", + "name": "gpt-3.5-turbo-instruct" + }, + { + "label": "babbage-002", + "name": "babbage-002" + }, + { + "label": "davinci-002", + "name": "davinci-002" + } + ], + "default": "gpt-3.5-turbo-instruct", + "optional": true, + "id": "openAI_2-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_2-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-basepath-string" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "openAI_2-input-cache-BaseCache" + } + ], + "inputs": { + "modelName": "gpt-3.5-turbo-instruct", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "", + "basepath": "" + }, + "outputAnchors": [ + { + "id": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2340.5995455075863, + "y": -310.7609446553905 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 1183.0899727188096, + "y": 385.0159960992951 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{openAI_1.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "FirstChain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "outputPrediction" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1183.0899727188096, + "y": 385.0159960992951 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_1", + "position": { + "x": 2773.675809586143, + "y": 114.39482869328754 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{openAI_2.data.instance}}", + "prompt": "{{promptTemplate_1.data.instance}}", + "outputParser": "", + "chainName": "LastChain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_1-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2773.675809586143, + "y": 114.39482869328754 + }, + "dragging": false + }, + { + "width": 300, + "height": 511, + "id": "promptTemplate_2", + "position": { + "x": 1992.5456174373144, + "y": 675.5277193898106 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_2", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_2-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_2-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "Politely say \"I'm not able to answer the query\"", + "promptValues": "{\"objective\":\"{{question}}\",\"result\":\"\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1992.5456174373144, + "y": 675.5277193898106 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 507, + "id": "llmChain_2", + "position": { + "x": 2830.477603228176, + "y": 907.9116984679802 + }, + "type": "customNode", + "data": { + "id": "llmChain_2", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_2-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_2-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_2-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" + }, + { + "label": "Input Moderation", + "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", + "name": "inputModeration", + "type": "Moderation", + "optional": true, + "list": true, + "id": "llmChain_2-input-inputModeration-Moderation" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{promptTemplate_2.data.instance}}", + "outputParser": "", + "inputModeration": "", + "chainName": "FallbackChain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_2-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2830.477603228176, + "y": 907.9116984679802 + }, + "dragging": false + }, + { + "width": 300, + "height": 755, + "id": "ifElseFunction_0", + "position": { + "x": 1590.6560099561739, + "y": 265.36655719326177 + }, + "type": "customNode", + "data": { + "id": "ifElseFunction_0", + "label": "IfElse Function", + "version": 1, + "name": "ifElseFunction", + "type": "IfElseFunction", + "baseClasses": ["IfElseFunction", "Utilities"], + "category": "Utilities", + "description": "Split flows based on If Else javascript functions", + "inputParams": [ + { + "label": "Input Variables", + "name": "functionInputVariables", + "description": "Input variables can be used in the function with prefix $. For example: $var", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "ifElseFunction_0-input-functionInputVariables-json" + }, + { + "label": "IfElse Name", + "name": "functionName", + "type": "string", + "optional": true, + "placeholder": "If Condition Match", + "id": "ifElseFunction_0-input-functionName-string" + }, + { + "label": "If Function", + "name": "ifFunction", + "description": "Function must return a value", + "type": "code", + "rows": 2, + "default": "if (\"hello\" == \"hello\") {\n return true;\n}", + "id": "ifElseFunction_0-input-ifFunction-code" + }, + { + "label": "Else Function", + "name": "elseFunction", + "description": "Function must return a value", + "type": "code", + "rows": 2, + "default": "return false;", + "id": "ifElseFunction_0-input-elseFunction-code" + } + ], + "inputAnchors": [], + "inputs": { + "functionInputVariables": "{\"task\":\"{{llmChain_0.data.instance}}\"}", + "functionName": "If Condition Match", + "ifFunction": "if (\"hello\" == \"21\") {\n return $task;\n}", + "elseFunction": "return false;" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", + "name": "returnTrue", + "label": "True", + "type": "string | number | boolean | json | array" + }, + { + "id": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", + "name": "returnFalse", + "label": "False", + "type": "string | number | boolean | json | array" + } + ], + "default": "returnTrue" + } + ], + "outputs": { + "output": "returnTrue" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1590.6560099561739, + "y": 265.36655719326177 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 2373.5711587130127, + "y": 487.8533802540226 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2373.5711587130127, + "y": 487.8533802540226 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_1", + "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "openAI_2", + "sourceHandle": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_2-openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_2", + "sourceHandle": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_2-promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate" + }, + { + "source": "llmChain_0", + "sourceHandle": "llmChain_0-output-outputPrediction-string|json", + "target": "ifElseFunction_0", + "targetHandle": "ifElseFunction_0-input-functionInputVariables-json", + "type": "buttonedge", + "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-ifElseFunction_0-ifElseFunction_0-input-functionInputVariables-json" + }, + { + "source": "ifElseFunction_0", + "sourceHandle": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", + "target": "promptTemplate_2", + "targetHandle": "promptTemplate_2-input-promptValues-json", + "type": "buttonedge", + "id": "ifElseFunction_0-ifElseFunction_0-output-returnFalse-string|number|boolean|json|array-promptTemplate_2-promptTemplate_2-input-promptValues-json" + }, + { + "source": "ifElseFunction_0", + "sourceHandle": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", + "target": "promptTemplate_1", + "targetHandle": "promptTemplate_1-input-promptValues-json", + "type": "buttonedge", + "id": "ifElseFunction_0-ifElseFunction_0-output-returnTrue-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json" + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel" + } + ] +} diff --git a/packages/server/marketplaces/chatflows/SQL Prompt.json b/packages/server/marketplaces/chatflows/SQL Prompt.json index 9244e8decae..ad08fed8500 100644 --- a/packages/server/marketplaces/chatflows/SQL Prompt.json +++ b/packages/server/marketplaces/chatflows/SQL Prompt.json @@ -7,8 +7,8 @@ "height": 511, "id": "promptTemplate_0", "position": { - "x": 638.5481508577102, - "y": 84.0454315632386 + "x": 384.84394025989127, + "y": 61.21205260943492 }, "type": "customNode", "data": { @@ -57,8 +57,8 @@ }, "selected": false, "positionAbsolute": { - "x": 638.5481508577102, - "y": 84.0454315632386 + "x": 384.84394025989127, + "y": 61.21205260943492 }, "dragging": false }, @@ -67,8 +67,8 @@ "height": 507, "id": "llmChain_0", "position": { - "x": 1095.1973126620626, - "y": -83.98379829183628 + "x": 770.4559230968546, + "y": -127.11351409346554 }, "type": "customNode", "data": { @@ -156,8 +156,8 @@ }, "selected": false, "positionAbsolute": { - "x": 1095.1973126620626, - "y": -83.98379829183628 + "x": 770.4559230968546, + "y": -127.11351409346554 }, "dragging": false }, @@ -166,8 +166,8 @@ "height": 574, "id": "chatOpenAI_0", "position": { - "x": 636.5762708317321, - "y": -543.3151550847003 + "x": 372.72389181000057, + "y": -561.0744498265477 }, "type": "customNode", "data": { @@ -346,8 +346,8 @@ }, "selected": false, "positionAbsolute": { - "x": 636.5762708317321, - "y": -543.3151550847003 + "x": 372.72389181000057, + "y": -561.0744498265477 }, "dragging": false }, @@ -645,8 +645,8 @@ "height": 669, "id": "customFunction_2", "position": { - "x": -152.63957160907668, - "y": -212.74538890862547 + "x": -395.18079694059173, + "y": -222.8935573325382 }, "type": "customNode", "data": { @@ -687,7 +687,7 @@ "inputs": { "functionInputVariables": "", "functionName": "Get SQL Schema Prompt", - "javascriptFunction": "const HOST = 'singlestore-host';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet sqlSchemaPrompt;\n\n/**\n * Ideal prompt contains schema info and examples\n * Follows best practices as specified form https://arxiv.org/abs/2204.00498\n * =========================================\n * CREATE TABLE samples (firstName varchar NOT NULL, lastName varchar)\n * SELECT * FROM samples LIMIT 3\n * firstName lastName\n * Stephen Tyler\n * Jack McGinnis\n * Steven Repici\n * =========================================\n*/\nfunction getSQLPrompt() {\n return new Promise(async (resolve, reject) => {\n \n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n\n // Get schema info\n const [schemaInfo] = await singleStoreConnection.execute(\n `SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = \"${TABLE}\"`\n );\n\n const createColumns = [];\n const columnNames = [];\n\n for (const schemaData of schemaInfo) {\n columnNames.push(`${schemaData['COLUMN_NAME']}`);\n createColumns.push(`${schemaData['COLUMN_NAME']} ${schemaData['COLUMN_TYPE']} ${schemaData['IS_NULLABLE'] === 'NO' ? 'NOT NULL' : ''}`);\n }\n\n const sqlCreateTableQuery = `CREATE TABLE samples (${createColumns.join(', ')})`;\n const sqlSelectTableQuery = `SELECT * FROM samples LIMIT 3`;\n\n // Get first 3 rows\n const [rows] = await singleStoreConnection.execute(\n sqlSelectTableQuery,\n );\n \n const allValues = [];\n for (const row of rows) {\n const rowValues = [];\n for (const colName in row) {\n rowValues.push(row[colName]);\n }\n allValues.push(rowValues.join(' '));\n }\n\n sqlSchemaPrompt = sqlCreateTableQuery + '\\n' + sqlSelectTableQuery + '\\n' + columnNames.join(' ') + '\\n' + allValues.join('\\n');\n \n resolve();\n });\n}\n\nasync function main() {\n await getSQLPrompt();\n}\n\nawait main();\n\nreturn sqlSchemaPrompt;" + "javascriptFunction": "const HOST = 'singlestore-host.com';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet sqlSchemaPrompt;\n\n/**\n * Ideal prompt contains schema info and examples\n * Follows best practices as specified form https://arxiv.org/abs/2204.00498\n * =========================================\n * CREATE TABLE samples (firstName varchar NOT NULL, lastName varchar)\n * SELECT * FROM samples LIMIT 3\n * firstName lastName\n * Stephen Tyler\n * Jack McGinnis\n * Steven Repici\n * =========================================\n*/\nfunction getSQLPrompt() {\n return new Promise(async (resolve, reject) => {\n try {\n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n \n // Get schema info\n const [schemaInfo] = await singleStoreConnection.execute(\n `SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = \"${TABLE}\"`\n );\n \n const createColumns = [];\n const columnNames = [];\n \n for (const schemaData of schemaInfo) {\n columnNames.push(`${schemaData['COLUMN_NAME']}`);\n createColumns.push(`${schemaData['COLUMN_NAME']} ${schemaData['COLUMN_TYPE']} ${schemaData['IS_NULLABLE'] === 'NO' ? 'NOT NULL' : ''}`);\n }\n \n const sqlCreateTableQuery = `CREATE TABLE samples (${createColumns.join(', ')})`;\n const sqlSelectTableQuery = `SELECT * FROM samples LIMIT 3`;\n \n // Get first 3 rows\n const [rows] = await singleStoreConnection.execute(\n sqlSelectTableQuery,\n );\n \n const allValues = [];\n for (const row of rows) {\n const rowValues = [];\n for (const colName in row) {\n rowValues.push(row[colName]);\n }\n allValues.push(rowValues.join(' '));\n }\n \n sqlSchemaPrompt = sqlCreateTableQuery + '\\n' + sqlSelectTableQuery + '\\n' + columnNames.join(' ') + '\\n' + allValues.join('\\n');\n \n resolve();\n } catch (e) {\n console.error(e);\n return reject(e);\n }\n });\n}\n\nasync function main() {\n await getSQLPrompt();\n}\n\nawait main();\n\nreturn sqlSchemaPrompt;" }, "outputAnchors": [ { @@ -712,8 +712,8 @@ }, "selected": false, "positionAbsolute": { - "x": -152.63957160907668, - "y": -212.74538890862547 + "x": -395.18079694059173, + "y": -222.8935573325382 }, "dragging": false }, @@ -764,7 +764,7 @@ "inputs": { "functionInputVariables": "{\"sqlQuery\":\"{{setVariable_1.data.instance}}\"}", "functionName": "Run SQL Query", - "javascriptFunction": "const HOST = 'singlestore-host';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet result;\n\nfunction getSQLResult() {\n return new Promise(async (resolve, reject) => {\n \n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n \n const [rows] = await singleStoreConnection.execute(\n $sqlQuery\n );\n\n result = JSON.stringify(rows)\n \n resolve();\n });\n}\n\nasync function main() {\n await getSQLResult();\n}\n\nawait main();\n\nreturn result;" + "javascriptFunction": "const HOST = 'singlestore-host.com';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet result;\n\nfunction getSQLResult() {\n return new Promise(async (resolve, reject) => {\n try {\n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n \n const [rows] = await singleStoreConnection.execute(\n $sqlQuery\n );\n \n result = JSON.stringify(rows)\n \n resolve();\n } catch (e) {\n console.error(e);\n return reject(e);\n }\n });\n}\n\nasync function main() {\n await getSQLResult();\n}\n\nawait main();\n\nreturn result;" }, "outputAnchors": [ { @@ -859,8 +859,8 @@ "height": 355, "id": "setVariable_0", "position": { - "x": 247.02296459986826, - "y": -60.27462140472403 + "x": 18.689175061831122, + "y": -62.81166351070223 }, "type": "customNode", "data": { @@ -918,8 +918,8 @@ }, "selected": false, "positionAbsolute": { - "x": 247.02296459986826, - "y": -60.27462140472403 + "x": 18.689175061831122, + "y": -62.81166351070223 }, "dragging": false }, @@ -1046,8 +1046,8 @@ "height": 355, "id": "setVariable_1", "position": { - "x": 1482.8091395089693, - "y": -33.943355212355016 + "x": 1516.338224315744, + "y": -133.6986023683283 }, "type": "customNode", "data": { @@ -1079,7 +1079,7 @@ } ], "inputs": { - "input": ["{{llmChain_0.data.instance}}"], + "input": ["{{ifElseFunction_0.data.instance}}"], "variableName": "sqlQuery" }, "outputAnchors": [ @@ -1105,8 +1105,454 @@ }, "selected": false, "positionAbsolute": { - "x": 1482.8091395089693, - "y": -33.943355212355016 + "x": 1516.338224315744, + "y": -133.6986023683283 + }, + "dragging": false + }, + { + "width": 300, + "height": 755, + "id": "ifElseFunction_0", + "position": { + "x": 1147.8020838770517, + "y": -237.39478763322148 + }, + "type": "customNode", + "data": { + "id": "ifElseFunction_0", + "label": "IfElse Function", + "version": 1, + "name": "ifElseFunction", + "type": "IfElseFunction", + "baseClasses": ["IfElseFunction", "Utilities"], + "category": "Utilities", + "description": "Split flows based on If Else javascript functions", + "inputParams": [ + { + "label": "Input Variables", + "name": "functionInputVariables", + "description": "Input variables can be used in the function with prefix $. For example: $var", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "ifElseFunction_0-input-functionInputVariables-json" + }, + { + "label": "IfElse Name", + "name": "functionName", + "type": "string", + "optional": true, + "placeholder": "If Condition Match", + "id": "ifElseFunction_0-input-functionName-string" + }, + { + "label": "If Function", + "name": "ifFunction", + "description": "Function must return a value", + "type": "code", + "rows": 2, + "default": "if (\"hello\" == \"hello\") {\n return true;\n}", + "id": "ifElseFunction_0-input-ifFunction-code" + }, + { + "label": "Else Function", + "name": "elseFunction", + "description": "Function must return a value", + "type": "code", + "rows": 2, + "default": "return false;", + "id": "ifElseFunction_0-input-elseFunction-code" + } + ], + "inputAnchors": [], + "inputs": { + "functionInputVariables": "{\"sqlQuery\":\"{{llmChain_0.data.instance}}\"}", + "functionName": "IF SQL Query contains SELECT and WHERE", + "ifFunction": "const sqlQuery = $sqlQuery.trim();\n\nif (sqlQuery.includes(\"SELECT\") && sqlQuery.includes(\"WHERE\")) {\n return sqlQuery;\n}", + "elseFunction": "return $sqlQuery;" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", + "name": "returnTrue", + "label": "True", + "type": "string | number | boolean | json | array" + }, + { + "id": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", + "name": "returnFalse", + "label": "False", + "type": "string | number | boolean | json | array" + } + ], + "default": "returnTrue" + } + ], + "outputs": { + "output": "returnTrue" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1147.8020838770517, + "y": -237.39478763322148 + }, + "dragging": false + }, + { + "width": 300, + "height": 511, + "id": "promptTemplate_2", + "position": { + "x": 1530.0647779039386, + "y": 944.9904482583751 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_2", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_2-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_2-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "Politely say \"I'm not able to answer query\"", + "promptValues": "{\"schema\":\"{{setVariable_0.data.instance}}\",\"question\":\"{{question}}\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1530.0647779039386, + "y": 944.9904482583751 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_2", + "position": { + "x": 1537.0307928738125, + "y": 330.7727229610632 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_2", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_2-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_2-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_2-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_2-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": "0.7", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1537.0307928738125, + "y": 330.7727229610632 + }, + "dragging": false + }, + { + "width": 300, + "height": 507, + "id": "llmChain_2", + "position": { + "x": 2077.2866807477812, + "y": 958.6594167386253 + }, + "type": "customNode", + "data": { + "id": "llmChain_2", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_2-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_2-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_2-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" + }, + { + "label": "Input Moderation", + "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", + "name": "inputModeration", + "type": "Moderation", + "optional": true, + "list": true, + "id": "llmChain_2-input-inputModeration-Moderation" + } + ], + "inputs": { + "model": "{{chatOpenAI_2.data.instance}}", + "prompt": "{{promptTemplate_2.data.instance}}", + "outputParser": "", + "inputModeration": "", + "chainName": "Fallback Chain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_2-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2077.2866807477812, + "y": 958.6594167386253 }, "dragging": false } @@ -1211,17 +1657,6 @@ "label": "" } }, - { - "source": "llmChain_0", - "sourceHandle": "llmChain_0-output-outputPrediction-string|json", - "target": "setVariable_1", - "targetHandle": "setVariable_1-input-input-string | number | boolean | json | array", - "type": "buttonedge", - "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-setVariable_1-setVariable_1-input-input-string | number | boolean | json | array", - "data": { - "label": "" - } - }, { "source": "setVariable_1", "sourceHandle": "setVariable_1-output-output-string|number|boolean|json|array", @@ -1232,6 +1667,46 @@ "data": { "label": "" } + }, + { + "source": "llmChain_0", + "sourceHandle": "llmChain_0-output-outputPrediction-string|json", + "target": "ifElseFunction_0", + "targetHandle": "ifElseFunction_0-input-functionInputVariables-json", + "type": "buttonedge", + "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-ifElseFunction_0-ifElseFunction_0-input-functionInputVariables-json" + }, + { + "source": "ifElseFunction_0", + "sourceHandle": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", + "target": "setVariable_1", + "targetHandle": "setVariable_1-input-input-string | number | boolean | json | array", + "type": "buttonedge", + "id": "ifElseFunction_0-ifElseFunction_0-output-returnTrue-string|number|boolean|json|array-setVariable_1-setVariable_1-input-input-string | number | boolean | json | array" + }, + { + "source": "ifElseFunction_0", + "sourceHandle": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", + "target": "promptTemplate_2", + "targetHandle": "promptTemplate_2-input-promptValues-json", + "type": "buttonedge", + "id": "ifElseFunction_0-ifElseFunction_0-output-returnFalse-string|number|boolean|json|array-promptTemplate_2-promptTemplate_2-input-promptValues-json" + }, + { + "source": "chatOpenAI_2", + "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel" + }, + { + "source": "promptTemplate_2", + "sourceHandle": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_2-promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate" } ] } diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 9c31a33374d..8766e19365f 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -20,13 +20,14 @@ import { ICredentialReturnResponse, chatType, IChatMessage, - IReactFlowEdge + IReactFlowEdge, + IDepthQueue } from './Interface' import { getNodeModulesPackagePath, getStartingNodes, buildLangchain, - getEndingNode, + getEndingNodes, constructGraphs, resolveVariables, isStartNodeDependOnInput, @@ -432,19 +433,24 @@ export class App { const edges = parsedFlowData.edges const { graph, nodeDependencies } = constructGraphs(nodes, edges) - const endingNodeId = getEndingNode(nodeDependencies, graph) - if (!endingNodeId) return res.status(500).send(`Ending node ${endingNodeId} not found`) + const endingNodeIds = getEndingNodes(nodeDependencies, graph) + if (!endingNodeIds.length) return res.status(500).send(`Ending nodes not found`) - const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data - if (!endingNodeData) return res.status(500).send(`Ending node ${endingNodeId} data not found`) + const endingNodes = nodes.filter((nd) => endingNodeIds.includes(nd.id)) - if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') { - return res.status(500).send(`Ending node must be either a Chain or Agent`) - } + let isStreaming = false + for (const endingNode of endingNodes) { + const endingNodeData = endingNode.data + if (!endingNodeData) return res.status(500).send(`Ending node ${endingNode.id} data not found`) - const obj = { - isStreaming: isFlowValidForStream(nodes, endingNodeData) + if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') { + return res.status(500).send(`Ending node must be either a Chain or Agent`) + } + + isStreaming = isFlowValidForStream(nodes, endingNodeData) } + + const obj = { isStreaming } return res.json(obj) }) @@ -1460,48 +1466,65 @@ export class App { /*** Get Ending Node with Directed Graph ***/ const { graph, nodeDependencies } = constructGraphs(nodes, edges) const directedGraph = graph - const endingNodeId = getEndingNode(nodeDependencies, directedGraph) - if (!endingNodeId) return res.status(500).send(`Ending node ${endingNodeId} not found`) + const endingNodeIds = getEndingNodes(nodeDependencies, directedGraph) + if (!endingNodeIds.length) return res.status(500).send(`Ending nodes not found`) - const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data - if (!endingNodeData) return res.status(500).send(`Ending node ${endingNodeId} data not found`) + const endingNodes = nodes.filter((nd) => endingNodeIds.includes(nd.id)) + for (const endingNode of endingNodes) { + const endingNodeData = endingNode.data + if (!endingNodeData) return res.status(500).send(`Ending node ${endingNode.id} data not found`) - if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents' && !isUpsert) { - return res.status(500).send(`Ending node must be either a Chain or Agent`) - } + if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') { + return res.status(500).send(`Ending node must be either a Chain or Agent`) + } - if ( - endingNodeData.outputs && - Object.keys(endingNodeData.outputs).length && - !Object.values(endingNodeData.outputs).includes(endingNodeData.name) && - !isUpsert - ) { - return res - .status(500) - .send( - `Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction` - ) - } + if ( + endingNodeData.outputs && + Object.keys(endingNodeData.outputs).length && + !Object.values(endingNodeData.outputs).includes(endingNodeData.name) && + !isUpsert + ) { + return res + .status(500) + .send( + `Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction` + ) + } - isStreamValid = isFlowValidForStream(nodes, endingNodeData) + isStreamValid = isFlowValidForStream(nodes, endingNodeData) + } let chatHistory: IMessage[] | string = incomingInput.history - if ( - endingNodeData.inputs?.memory && - !incomingInput.history && - (incomingInput.chatId || incomingInput.overrideConfig?.sessionId) - ) { - const memoryNodeId = endingNodeData.inputs?.memory.split('.')[0].replace('{{', '') - const memoryNode = nodes.find((node) => node.data.id === memoryNodeId) - if (memoryNode) { - chatHistory = await replaceChatHistory(memoryNode, incomingInput, this.AppDataSource, databaseEntities, logger) + + // When {{chat_history}} is used in Prompt Template, fetch the chat conversations from memory + for (const endingNode of endingNodes) { + const endingNodeData = endingNode.data + if (!endingNodeData.inputs?.memory) continue + if ( + endingNodeData.inputs?.memory && + !incomingInput.history && + (incomingInput.chatId || incomingInput.overrideConfig?.sessionId) + ) { + const memoryNodeId = endingNodeData.inputs?.memory.split('.')[0].replace('{{', '') + const memoryNode = nodes.find((node) => node.data.id === memoryNodeId) + if (memoryNode) { + chatHistory = await replaceChatHistory(memoryNode, incomingInput, this.AppDataSource, databaseEntities, logger) + } } } - /*** Get Starting Nodes with Non-Directed Graph ***/ - const constructedObj = constructGraphs(nodes, edges, true) + /*** Get Starting Nodes with Reversed Graph ***/ + const constructedObj = constructGraphs(nodes, edges, { isReversed: true }) const nonDirectedGraph = constructedObj.graph - const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId) + let startingNodeIds: string[] = [] + let depthQueue: IDepthQueue = {} + for (const endingNodeId of endingNodeIds) { + const res = getStartingNodes(nonDirectedGraph, endingNodeId) + startingNodeIds.push(...res.startingNodeIds) + depthQueue = Object.assign(depthQueue, res.depthQueue) + } + startingNodeIds = [...new Set(startingNodeIds)] + const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id)) logger.debug(`[server]: Start building chatflow ${chatflowid}`) @@ -1509,6 +1532,7 @@ export class App { const reactFlowNodes = await buildLangchain( startingNodeIds, nodes, + edges, graph, depthQueue, this.nodesPool.componentNodes, @@ -1522,13 +1546,18 @@ export class App { isUpsert, incomingInput.stopNodeId ) + + // If request is upsert, stop here if (isUpsert) { this.chatflowPool.add(chatflowid, undefined, startingNodes, incomingInput?.overrideConfig) return res.status(201).send('Successfully Upserted') } - const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId) - if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`) + const nodeToExecute = + endingNodeIds.length === 1 + ? reactFlowNodes.find((node: IReactFlowNode) => endingNodeIds[0] === node.id) + : reactFlowNodes[reactFlowNodes.length - 1] + if (!nodeToExecute) return res.status(404).send(`Node not found`) if (incomingInput.overrideConfig) { nodeToExecute.data = replaceInputsWithConfig(nodeToExecute.data, incomingInput.overrideConfig) diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 8fa9e8d03a6..49cc430f276 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -95,9 +95,13 @@ export const getNodeModulesPackagePath = (packageName: string): string => { * Construct graph and node dependencies score * @param {IReactFlowNode[]} reactFlowNodes * @param {IReactFlowEdge[]} reactFlowEdges - * @param {boolean} isNondirected + * @param {{ isNonDirected?: boolean, isReversed?: boolean }} options */ -export const constructGraphs = (reactFlowNodes: IReactFlowNode[], reactFlowEdges: IReactFlowEdge[], isNondirected = false) => { +export const constructGraphs = ( + reactFlowNodes: IReactFlowNode[], + reactFlowEdges: IReactFlowEdge[], + options?: { isNonDirected?: boolean; isReversed?: boolean } +) => { const nodeDependencies = {} as INodeDependencies const graph = {} as INodeDirectedGraph @@ -107,6 +111,23 @@ export const constructGraphs = (reactFlowNodes: IReactFlowNode[], reactFlowEdges graph[nodeId] = [] } + if (options && options.isReversed) { + for (let i = 0; i < reactFlowEdges.length; i += 1) { + const source = reactFlowEdges[i].source + const target = reactFlowEdges[i].target + + if (Object.prototype.hasOwnProperty.call(graph, target)) { + graph[target].push(source) + } else { + graph[target] = [source] + } + + nodeDependencies[target] += 1 + } + + return { graph, nodeDependencies } + } + for (let i = 0; i < reactFlowEdges.length; i += 1) { const source = reactFlowEdges[i].source const target = reactFlowEdges[i].target @@ -117,7 +138,7 @@ export const constructGraphs = (reactFlowNodes: IReactFlowNode[], reactFlowEdges graph[source] = [target] } - if (isNondirected) { + if (options && options.isNonDirected) { if (Object.prototype.hasOwnProperty.call(graph, target)) { graph[target].push(source) } else { @@ -179,21 +200,49 @@ export const getStartingNodes = (graph: INodeDirectedGraph, endNodeId: string) = return { startingNodeIds, depthQueue: depthQueueReversed } } +/** + * Get all connected nodes from startnode + * @param {INodeDependencies} graph + * @param {string} startNodeId + */ +export const getAllConnectedNodes = (graph: INodeDirectedGraph, startNodeId: string) => { + const visited = new Set() + const queue: Array<[string]> = [[startNodeId]] + + while (queue.length > 0) { + const [currentNode] = queue.shift()! + + if (visited.has(currentNode)) { + continue + } + + visited.add(currentNode) + + for (const neighbor of graph[currentNode]) { + if (!visited.has(neighbor)) { + queue.push([neighbor]) + } + } + } + + return [...visited] +} + /** * Get ending node and check if flow is valid * @param {INodeDependencies} nodeDependencies * @param {INodeDirectedGraph} graph */ -export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeDirectedGraph) => { - let endingNodeId = '' +export const getEndingNodes = (nodeDependencies: INodeDependencies, graph: INodeDirectedGraph) => { + const endingNodeIds: string[] = [] Object.keys(graph).forEach((nodeId) => { if (Object.keys(nodeDependencies).length === 1) { - endingNodeId = nodeId + endingNodeIds.push(nodeId) } else if (!graph[nodeId].length && nodeDependencies[nodeId] > 0) { - endingNodeId = nodeId + endingNodeIds.push(nodeId) } }) - return endingNodeId + return endingNodeIds } /** @@ -213,6 +262,7 @@ export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeD export const buildLangchain = async ( startingNodeIds: string[], reactFlowNodes: IReactFlowNode[], + reactFlowEdges: IReactFlowEdge[], graph: INodeDirectedGraph, depthQueue: IDepthQueue, componentNodes: IComponentNodes, @@ -232,6 +282,7 @@ export const buildLangchain = async ( const nodeQueue = [] as INodeQueue[] const exploredNode = {} as IExploredNode const dynamicVariables = {} as Record + let ignoreNodeIds: string[] = [] // In the case of infinite loop, only max 3 loops will be executed const maxLoop = 3 @@ -296,6 +347,29 @@ export const buildLangchain = async ( outputResult = outputResult?.output } + // Determine which nodes to route next when it comes to ifElse + if (reactFlowNode.data.name === 'ifElseFunction' && typeof outputResult === 'object') { + let sourceHandle = '' + if (outputResult.type === true) { + sourceHandle = `${nodeId}-output-returnFalse-string|number|boolean|json|array` + } else if (outputResult.type === false) { + sourceHandle = `${nodeId}-output-returnTrue-string|number|boolean|json|array` + } + + const ifElseEdge = reactFlowEdges.find((edg) => edg.source === nodeId && edg.sourceHandle === sourceHandle) + if (ifElseEdge) { + const { graph } = constructGraphs( + reactFlowNodes, + reactFlowEdges.filter((edg) => !(edg.source === nodeId && edg.sourceHandle === sourceHandle)), + { isNonDirected: true } + ) + ignoreNodeIds.push(ifElseEdge.target, ...getAllConnectedNodes(graph, ifElseEdge.target)) + ignoreNodeIds = [...new Set(ignoreNodeIds)] + } + + outputResult = outputResult?.output + } + flowNodes[nodeIndex].data.instance = outputResult logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) @@ -305,7 +379,7 @@ export const buildLangchain = async ( throw new Error(e) } - const neighbourNodeIds = graph[nodeId] + let neighbourNodeIds = graph[nodeId] const nextDepth = depth + 1 // Find other nodes that are on the same depth level @@ -316,9 +390,11 @@ export const buildLangchain = async ( neighbourNodeIds.push(id) } + neighbourNodeIds = neighbourNodeIds.filter((neigh) => !ignoreNodeIds.includes(neigh)) + for (let i = 0; i < neighbourNodeIds.length; i += 1) { const neighNodeId = neighbourNodeIds[i] - + if (ignoreNodeIds.includes(neighNodeId)) continue // If nodeId has been seen, cycle detected if (Object.prototype.hasOwnProperty.call(exploredNode, neighNodeId)) { const { remainingLoop, lastSeenDepth } = exploredNode[neighNodeId] @@ -336,6 +412,12 @@ export const buildLangchain = async ( nodeQueue.push({ nodeId: neighNodeId, depth: nextDepth }) } } + + // Move end node to last + if (!neighbourNodeIds.length) { + const index = flowNodes.findIndex((nd) => nd.data.id === nodeId) + flowNodes.push(flowNodes.splice(index, 1)[0]) + } } return flowNodes } diff --git a/packages/ui/src/ui-component/json/SelectVariable.js b/packages/ui/src/ui-component/json/SelectVariable.js index 4ff7b5da13e..50c9f83998f 100644 --- a/packages/ui/src/ui-component/json/SelectVariable.js +++ b/packages/ui/src/ui-component/json/SelectVariable.js @@ -147,7 +147,11 @@ const SelectVariable = ({ availableNodesForVariable, disabled = false, onSelectA node.data.inputs.variableName ?? node.data.id } - secondary={`${selectedOutputAnchor?.label ?? 'output'} from ${node.data.label}`} + secondary={ + node.data.name === 'ifElseFunction' + ? `${node.data.description}` + : `${selectedOutputAnchor?.label ?? 'output'} from ${node.data.label}` + } /> diff --git a/packages/ui/src/utils/genericHelper.js b/packages/ui/src/utils/genericHelper.js index b34d6c720f0..57ba88926f9 100644 --- a/packages/ui/src/utils/genericHelper.js +++ b/packages/ui/src/utils/genericHelper.js @@ -182,15 +182,6 @@ export const initNode = (nodeData, newNodeId) => { return nodeData } -export const getEdgeLabelName = (source) => { - const sourceSplit = source.split('-') - if (sourceSplit.length && sourceSplit[0].includes('ifElse')) { - const outputAnchorsIndex = sourceSplit[sourceSplit.length - 1] - return outputAnchorsIndex === '0' ? 'true' : 'false' - } - return '' -} - export const isValidConnection = (connection, reactFlowInstance) => { const sourceHandle = connection.sourceHandle const targetHandle = connection.targetHandle diff --git a/packages/ui/src/views/canvas/NodeInputHandler.js b/packages/ui/src/views/canvas/NodeInputHandler.js index 92a43cf8097..33e99736271 100644 --- a/packages/ui/src/views/canvas/NodeInputHandler.js +++ b/packages/ui/src/views/canvas/NodeInputHandler.js @@ -330,11 +330,11 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA {inputParam.type === 'code' && ( <>
-
+
{ )} - {outputAnchor.type === 'options' && outputAnchor.options && outputAnchor.options.length > 0 && ( - <> - opt.name === data.outputs?.[outputAnchor.name])?.type ?? outputAnchor.type - } - > - opt.name === data.outputs?.[outputAnchor.name])?.id ?? ''} - isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)} - style={{ - height: 10, - width: 10, - backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, - top: position - }} - /> - - - { - setDropdownValue(newValue) - data.outputs[outputAnchor.name] = newValue - }} - value={data.outputs[outputAnchor.name] ?? outputAnchor.default ?? 'choose an option'} - /> - - + {data.name === 'ifElseFunction' && outputAnchor.type === 'options' && outputAnchor.options && ( +
+
+ opt.name === data.outputs?.[outputAnchor.name])?.type ?? + outputAnchor.type + } + > + opt.name === 'returnTrue')?.id ?? ''} + id={outputAnchor.options.find((opt) => opt.name === 'returnTrue')?.id ?? ''} + isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position - 25 + }} + /> + +
+ + True + +
+
+ opt.name === data.outputs?.[outputAnchor.name])?.type ?? + outputAnchor.type + } + > + opt.name === 'returnFalse')?.id ?? ''} + id={outputAnchor.options.find((opt) => opt.name === 'returnFalse')?.id ?? ''} + isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + 25 + }} + /> + +
+ + False + +
+
)} + {data.name !== 'ifElseFunction' && + outputAnchor.type === 'options' && + outputAnchor.options && + outputAnchor.options.length > 0 && ( + <> + opt.name === data.outputs?.[outputAnchor.name])?.type ?? + outputAnchor.type + } + > + opt.name === data.outputs?.[outputAnchor.name])?.id ?? ''} + isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + + { + setDropdownValue(newValue) + data.outputs[outputAnchor.name] = newValue + }} + value={data.outputs[outputAnchor.name] ?? outputAnchor.default ?? 'choose an option'} + /> + + + )}
) } diff --git a/packages/ui/src/views/canvas/index.js b/packages/ui/src/views/canvas/index.js index 08698398798..9aa53cc623c 100644 --- a/packages/ui/src/views/canvas/index.js +++ b/packages/ui/src/views/canvas/index.js @@ -40,7 +40,7 @@ import useConfirm from 'hooks/useConfirm' import { IconX } from '@tabler/icons' // utils -import { getUniqueNodeId, initNode, getEdgeLabelName, rearrangeToolsOrdering, getUpsertDetails } from 'utils/genericHelper' +import { getUniqueNodeId, initNode, rearrangeToolsOrdering, getUpsertDetails } from 'utils/genericHelper' import useNotifier from 'utils/useNotifier' // const @@ -100,8 +100,7 @@ const Canvas = () => { const newEdge = { ...params, type: 'buttonedge', - id: `${params.source}-${params.sourceHandle}-${params.target}-${params.targetHandle}`, - data: { label: getEdgeLabelName(params.sourceHandle) } + id: `${params.source}-${params.sourceHandle}-${params.target}-${params.targetHandle}` } const targetNodeId = params.targetHandle.split('-')[0]