From ea9187472f9a2e92524bafc101d3529bb47df2a0 Mon Sep 17 00:00:00 2001 From: PQ32 Developer Date: Sun, 10 May 2026 15:10:11 -0700 Subject: [PATCH] Patch 2: Increase context limit fallback via AGENT_CONTEXT_WINDOW_FALLBACK env var Default fallback increased from 8,000 to 2,000,000 tokens. When provider doesn't declare promptWindowLimit (e.g. Grok via OpenRouter), the fallback was far too small causing severe truncation. Configure via AGENT_CONTEXT_WINDOW_FALLBACK in .env to override. --- server/utils/agents/aibitat/providers/ai-provider.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/utils/agents/aibitat/providers/ai-provider.js b/server/utils/agents/aibitat/providers/ai-provider.js index 2a51bb16..731f7fcb 100644 --- a/server/utils/agents/aibitat/providers/ai-provider.js +++ b/server/utils/agents/aibitat/providers/ai-provider.js @@ -420,7 +420,7 @@ class Provider { */ static contextLimit(provider = "openai", modelName) { const llm = getLLMProviderClass({ provider }); - if (!llm || !llm.hasOwnProperty("promptWindowLimit")) return 8_000; + if (!llm || !llm.hasOwnProperty("promptWindowLimit")) return Number(process.env.AGENT_CONTEXT_WINDOW_FALLBACK) || 2_000_000; return llm.promptWindowLimit(modelName); }