Fix streaming issue for LLM instruction blocks (#5382)

This commit is contained in:
Timothy Carambat 2026-04-07 12:03:07 -07:00 committed by GitHub
parent b7dfa4c278
commit b2404801d1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -23,13 +23,17 @@ async function executeLLMInstruction(config, context) {
if (typeof input === "object") input = JSON.stringify(input); if (typeof input === "object") input = JSON.stringify(input);
if (typeof input !== "string") input = String(input); if (typeof input !== "string") input = String(input);
let completion;
const provider = aibitat.getProviderForConfig(aibitat.defaultProvider); const provider = aibitat.getProviderForConfig(aibitat.defaultProvider);
const completion = await provider.complete([ if (provider.supportsAgentStreaming) {
{ completion = await provider.stream(
role: "user", [{ role: "user", content: input }],
content: input, [],
}, null
]); );
} else {
completion = await provider.complete([{ role: "user", content: input }]);
}
introspect(`Successfully received LLM response`); introspect(`Successfully received LLM response`);
if (resultVariable) config.resultVariable = resultVariable; if (resultVariable) config.resultVariable = resultVariable;