remove LLM Instruction inputVariable requirement (#3954)

This commit is contained in:
Timothy Carambat 2025-06-05 07:43:25 -07:00 committed by GitHub
parent 2938aafae7
commit 271a682824
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 4 additions and 31 deletions

View File

@ -115,7 +115,6 @@ const BLOCK_INFO = {
description: "Process data using LLM instructions",
defaultConfig: {
instruction: "",
inputVariable: "",
resultVariable: "",
},
getSummary: (config) => config.instruction || "No instruction",

View File

@ -7,17 +7,6 @@ export default function LLMInstructionNode({
}) {
return (
<div className="space-y-4">
<div>
<label className="block text-sm font-medium text-theme-text-primary mb-2">
Input Variable
</label>
{renderVariableSelect(
config.inputVariable,
(value) => onConfigChange({ ...config, inputVariable: value }),
"Select input variable"
)}
</div>
<div>
<label className="block text-sm font-medium text-theme-text-primary mb-2">
Instruction

View File

@ -1,24 +1,17 @@
/**
* Execute an LLM instruction flow step
* @param {Object} config Flow step configuration
* @param {{introspect: Function, variables: Object, logger: Function}} context Execution context with introspect function
* @param {{introspect: Function, logger: Function}} context Execution context with introspect function
* @returns {Promise<string>} Processed result
*/
async function executeLLMInstruction(config, context) {
const { instruction, inputVariable, resultVariable } = config;
const { introspect, variables, logger, aibitat } = context;
const { instruction, resultVariable } = config;
const { introspect, logger, aibitat } = context;
logger(
`\x1b[43m[AgentFlowToolExecutor]\x1b[0m - executing LLM Instruction block`
);
introspect(`Processing data with LLM instruction...`);
if (!variables[inputVariable]) {
logger(`Input variable ${inputVariable} (${inputVariable}) not found`);
throw new Error(
`Input variable ${inputVariable} (${inputVariable}) not found`
);
}
try {
logger(
`Sending request to LLM (${aibitat.defaultProvider.provider}::${aibitat.defaultProvider.model})`
@ -26,16 +19,12 @@ async function executeLLMInstruction(config, context) {
introspect(`Sending request to LLM...`);
// Ensure the input is a string since we are sending it to the LLM direct as a message
let input = variables[inputVariable];
let input = instruction;
if (typeof input === "object") input = JSON.stringify(input);
if (typeof input !== "string") input = String(input);
const provider = aibitat.getProviderForConfig(aibitat.defaultProvider);
const completion = await provider.complete([
{
role: "system",
content: `Follow these instructions carefully: ${instruction}`,
},
{
role: "user",
content: input,

View File

@ -104,10 +104,6 @@ const FLOW_TYPES = {
type: "string",
description: "The instruction for the LLM to follow",
},
inputVariable: {
type: "string",
description: "Variable containing the input data to process",
},
resultVariable: {
type: "string",
description: "Variable to store the processed result",