patch AWS credential issue in docker context (#4842)

path AWS credential issue in docker context
This commit is contained in:
Timothy Carambat 2026-01-08 17:06:49 -08:00 committed by GitHub
parent 54b4b34b75
commit 133b62f9f6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 58 additions and 36 deletions

View File

@ -6,7 +6,7 @@ concurrency:
on: on:
push: push:
branches: ['upgrade-yt-scraper'] # put your current branch to create a build. Core team only. branches: ['4841-aws-bedrock-api-key'] # put your current branch to create a build. Core team only.
paths-ignore: paths-ignore:
- '**.md' - '**.md'
- 'cloud-deployments/*' - 'cloud-deployments/*'

View File

@ -414,15 +414,11 @@ class AWSBedrockLLM {
`Bedrock Converse API Error (getChatCompletion): ${e.message}`, `Bedrock Converse API Error (getChatCompletion): ${e.message}`,
e e
); );
if ( AWSBedrockLLM.errorToHumanReadable(e, {
e.name === "ValidationException" && model: this.model,
e.message.includes("maximum tokens") maxTokens: maxTokensToSend,
) { method: "getChatCompletion",
throw new Error( });
`AWSBedrock::getChatCompletion failed. Model ${this.model} rejected maxTokens value of ${maxTokensToSend}. Check model documentation for its maximum output token limit and set AWS_BEDROCK_LLM_MAX_OUTPUT_TOKENS if needed. Original error: ${e.message}`
);
}
throw new Error(`AWSBedrock::getChatCompletion failed. ${e.message}`);
}) })
); );
@ -502,18 +498,11 @@ class AWSBedrockLLM {
`Bedrock Converse API Error (streamGetChatCompletion setup): ${e.message}`, `Bedrock Converse API Error (streamGetChatCompletion setup): ${e.message}`,
e e
); );
if ( AWSBedrockLLM.errorToHumanReadable(e, {
e.name === "ValidationException" && model: this.model,
e.message.includes("maximum tokens") maxTokens: maxTokensToSend,
) { method: "streamGetChatCompletion",
throw new Error( });
`AWSBedrock::streamGetChatCompletion failed during setup. Model ${this.model} rejected maxTokens value of ${maxTokensToSend}. Check model documentation for its maximum output token limit and set AWS_BEDROCK_LLM_MAX_OUTPUT_TOKENS if needed. Original error: ${e.message}`
);
}
throw new Error(
`AWSBedrock::streamGetChatCompletion failed during setup. ${e.message}`
);
} }
} }
@ -733,6 +722,34 @@ class AWSBedrockLLM {
const messageArray = this.constructPrompt(promptArgs); const messageArray = this.constructPrompt(promptArgs);
return await messageArrayCompressor(this, messageArray, rawHistory); return await messageArrayCompressor(this, messageArray, rawHistory);
} }
static errorToHumanReadable(
error,
options = { method: "chat", model: "unknown", maxTokens: "unknown" }
) {
if (
error.name === "ValidationException" &&
error.message.includes("maximum tokens")
) {
throw new Error(
`AWSBedrock::${options.method} failed during setup. Model ${options.model} rejected maxTokens value of ${options.maxTokens}. Check model documentation for its maximum output token limit and set AWS_BEDROCK_LLM_MAX_OUTPUT_TOKENS if needed. Original error: ${error.message}`
);
}
if (
error.name === "CredentialsProviderError" &&
error.message.includes("Could not load credentials from any providers")
) {
throw new Error(
`AWSBedrock::${options.method} authentication failed. AWS Bedrock requires a discoverable IAM credentials to be available in the environment (AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY) or by resolving credentials from ~/.aws/credentials or ~/.aws/config files. Original error: ${error.message}`
);
}
// Generic error
throw new Error(
`AWSBedrock::${options.method} failed during setup. ${error.message}`
);
}
} }
module.exports = { module.exports = {

View File

@ -70,11 +70,15 @@ function createBedrockRuntimeClient(authMethod, credentials) {
const clientOpts = { const clientOpts = {
region: process.env.AWS_BEDROCK_LLM_REGION, region: process.env.AWS_BEDROCK_LLM_REGION,
}; };
if (authMethod === "apiKey") {
switch (authMethod) {
case "apiKey":
clientOpts.token = credentials; clientOpts.token = credentials;
clientOpts.authSchemePreference = ["httpBearerAuth"]; clientOpts.authSchemePreference = ["httpBearerAuth"];
} else { break;
default:
clientOpts.credentials = credentials; clientOpts.credentials = credentials;
break;
} }
return new BedrockRuntimeClient(clientOpts); return new BedrockRuntimeClient(clientOpts);
} }

View File

@ -3,6 +3,7 @@ const {
getBedrockAuthMethod, getBedrockAuthMethod,
createBedrockChatClient, createBedrockChatClient,
} = require("../../../AiProviders/bedrock/utils.js"); } = require("../../../AiProviders/bedrock/utils.js");
const { AWSBedrockLLM } = require("../../../AiProviders/bedrock/index.js");
const Provider = require("./ai-provider.js"); const Provider = require("./ai-provider.js");
const InheritMultiple = require("./helpers/classes.js"); const InheritMultiple = require("./helpers/classes.js");
const UnTooled = require("./helpers/untooled.js"); const UnTooled = require("./helpers/untooled.js");
@ -33,6 +34,10 @@ class AWSBedrockProvider extends InheritMultiple([Provider, UnTooled]) {
this.verbose = true; this.verbose = true;
} }
get supportsAgentStreaming() {
return false;
}
/** /**
* Gets the credentials for the AWS Bedrock LLM based on the authentication method provided. * Gets the credentials for the AWS Bedrock LLM based on the authentication method provided.
* @returns {object} The credentials object. * @returns {object} The credentials object.
@ -136,7 +141,10 @@ class AWSBedrockProvider extends InheritMultiple([Provider, UnTooled]) {
cost: 0, cost: 0,
}; };
} catch (error) { } catch (error) {
throw error; AWSBedrockLLM.errorToHumanReadable(error, {
method: "complete",
model: this.model,
});
} }
} }

View File

@ -139,14 +139,7 @@ class AgentHandler {
); );
break; break;
case "bedrock": case "bedrock":
if ( // No validations since there are many possible authentication methods
!process.env.AWS_BEDROCK_LLM_ACCESS_KEY_ID ||
!process.env.AWS_BEDROCK_LLM_ACCESS_KEY ||
!process.env.AWS_BEDROCK_LLM_REGION
)
throw new Error(
"AWS Bedrock Access Keys and region must be provided to use agents."
);
break; break;
case "fireworksai": case "fireworksai":
if (!process.env.FIREWORKS_AI_LLM_API_KEY) if (!process.env.FIREWORKS_AI_LLM_API_KEY)