From 38206a14b324c01bc0ca69fa3e39929ee8fb3395 Mon Sep 17 00:00:00 2001 From: Marcello Fitton <106866560+angelplusultra@users.noreply.github.com> Date: Tue, 21 Apr 2026 14:39:42 -0700 Subject: [PATCH] fix: omit temperature param for Bedrock Claude Opus 4.7 (#5472) * addconditionally pass temperature based on aws bedrock model id * move to config --------- Co-authored-by: Timothy Carambat --- server/utils/AiProviders/bedrock/index.js | 28 +++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/server/utils/AiProviders/bedrock/index.js b/server/utils/AiProviders/bedrock/index.js index 2ac112bf..4338e7ca 100644 --- a/server/utils/AiProviders/bedrock/index.js +++ b/server/utils/AiProviders/bedrock/index.js @@ -35,6 +35,15 @@ class AWSBedrockLLM { // Add other models here if identified ]; + /** + * List of Bedrock models observed to not support the `temperature` inference parameter. + * @type {string[]} + */ + noTemperatureModels = [ + "anthropic.claude-opus-4-7", + // Add other models here if identified + ]; + /** * Initializes the AWS Bedrock LLM connector. * @param {object | null} [embedder=null] - An optional embedder instance. Defaults to NativeEmbedder. @@ -103,6 +112,21 @@ class AWSBedrockLLM { return createBedrockCredentials(this.authMethod); } + /** + * Gets the temperature configuration for the AWS Bedrock LLM. + * @param {number} temperature - The temperature to use. + * @returns {{temperature: number}} The temperature configuration object with the temperature value as a float. + */ + temperatureConfig(temperature = this.defaultTemp) { + if (typeof temperature !== "number") return {}; + + // So model names prefix `us.` and may not be exact matches - so we check with includes to see if the model + // substring matches any of the models in the noTemperatureModels array. + if (this.noTemperatureModels.some((model) => this.model.includes(model))) + return {}; + return { temperature: parseFloat(temperature) }; + } + /** * Gets the configured AWS authentication method ('iam' or 'sessionToken'). * Defaults to 'iam' if the environment variable is invalid. @@ -408,7 +432,7 @@ class AWSBedrockLLM { messages: history, inferenceConfig: { maxTokens: maxTokensToSend, - temperature: temperature ?? this.defaultTemp, + ...this.temperatureConfig(temperature), }, system: systemBlock, }) @@ -483,7 +507,7 @@ class AWSBedrockLLM { messages: history, inferenceConfig: { maxTokens: maxTokensToSend, - temperature: temperature ?? this.defaultTemp, + ...this.temperatureConfig(temperature), }, system: systemBlock, })