Skip to content

Commit

Permalink
fix bedrock model empty bug
Browse files Browse the repository at this point in the history
  • Loading branch information
HenryHengZJ committed Dec 19, 2023
1 parent a1fb5b8 commit 7952ef8
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ class AWSChatBedrock_ChatModels implements INode {

const obj: BaseBedrockInput & BaseChatModelParams = {
region: iRegion,
model: customModel ?? iModel,
model: customModel ? customModel : iModel,
maxTokens: parseInt(iMax_tokens_to_sample, 10),
temperature: parseFloat(iTemperature),
streaming: streaming ?? true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
const customModel = nodeData.inputs?.customModel as string

const obj: BedrockEmbeddingsParams = {
model: customModel ?? iModel,
model: customModel ? customModel : iModel,
region: iRegion
}

Expand Down
2 changes: 1 addition & 1 deletion packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ class AWSBedrock_LLMs implements INode {
const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string
const cache = nodeData.inputs?.cache as BaseCache
const obj: Partial<BaseBedrockInput> & BaseLLMParams = {
model: customModel ?? iModel,
model: customModel ? customModel : iModel,
region: iRegion,
temperature: parseFloat(iTemperature),
maxTokens: parseInt(iMax_tokens_to_sample, 10)
Expand Down

0 comments on commit 7952ef8

Please sign in to comment.