import React, { useEffect, useRef, useState } from "react";
import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
import AgentLLMItem from "./AgentLLMItem";
import { AVAILABLE_LLM_PROVIDERS } from "@/pages/GeneralSettings/LLMPreference";
import { CaretUpDown, Gauge, MagnifyingGlass, X } from "@phosphor-icons/react";
import AgentModelSelection from "../AgentModelSelection";
import { useTranslation } from "react-i18next";
const ENABLED_PROVIDERS = [
"openai",
"anthropic",
"lmstudio",
"ollama",
"localai",
"groq",
"azure",
"koboldcpp",
"togetherai",
"openrouter",
"novita",
"mistral",
"perplexity",
"textgenwebui",
"generic-openai",
"bedrock",
"fireworksai",
"deepseek",
"ppio",
"litellm",
"apipie",
"xai",
"nvidia-nim",
"gemini",
"moonshotai",
"cometapi",
"foundry",
"zai",
"giteeai",
// TODO: More agent support.
// "cohere", // Has tool calling and will need to build explicit support
// "huggingface" // Can be done but already has issues with no-chat templated. Needs to be tested.
];
const WARN_PERFORMANCE = [
"lmstudio",
"koboldcpp",
"ollama",
"localai",
"textgenwebui",
];
const LLM_DEFAULT = {
name: "System Default",
value: "none",
logo: AnythingLLMIcon,
options: () => ,
description:
"Agents will use the workspace or system LLM unless otherwise specified.",
requiredConfig: [],
};
const LLMS = [
LLM_DEFAULT,
...AVAILABLE_LLM_PROVIDERS.filter((llm) =>
ENABLED_PROVIDERS.includes(llm.value)
),
];
export default function AgentLLMSelection({
settings,
workspace,
setHasChanges,
}) {
const [filteredLLMs, setFilteredLLMs] = useState([]);
const [selectedLLM, setSelectedLLM] = useState(
workspace?.agentProvider ?? "none"
);
const [searchQuery, setSearchQuery] = useState("");
const [searchMenuOpen, setSearchMenuOpen] = useState(false);
const searchInputRef = useRef(null);
const { t } = useTranslation();
function updateLLMChoice(selection) {
setSearchQuery("");
setSelectedLLM(selection);
setSearchMenuOpen(false);
setHasChanges(true);
}
function handleXButton() {
if (searchQuery.length > 0) {
setSearchQuery("");
if (searchInputRef.current) searchInputRef.current.value = "";
} else {
setSearchMenuOpen(!searchMenuOpen);
}
}
useEffect(() => {
const filtered = LLMS.filter((llm) =>
llm.name.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredLLMs(filtered);
}, [searchQuery, selectedLLM]);
const selectedLLMObject = LLMS.find((llm) => llm.value === selectedLLM);
return (
{WARN_PERFORMANCE.includes(selectedLLM) && (
{t("agent.performance-warning")}
)}
{t("agent.provider.description")}
{searchMenuOpen && (
setSearchMenuOpen(false)}
/>
)}
{searchMenuOpen ? (
) : (
)}
{selectedLLM !== "none" && (
)}
);
}