Enable UI/UX for model swapping in chat window (#3969)

* Enable UI/UX for model swapping in chat window

* forgot component

* patch useGetProviders hook to set loading on change of provider

* dev build

* normalize translations

* patch how model default is provided

---------

Co-authored-by: shatfield4 <seanhatfield5@gmail.com>
This commit is contained in:
Timothy Carambat 2025-06-09 09:59:17 -07:00 committed by GitHub
parent 4eb951d40e
commit c0d66e6c19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 907 additions and 2 deletions

View File

@ -6,7 +6,7 @@ concurrency:
on:
push:
branches: ['model-map-staleness'] # put your current branch to create a build. Core team only.
branches: ['2095-model-swap-in-chat'] # put your current branch to create a build. Core team only.
paths-ignore:
- '**.md'
- 'cloud-deployments/*'

View File

@ -9,8 +9,11 @@ import { createPortal } from "react-dom";
*/
/**
*
* @param {ModalWrapperProps} props - ModalWrapperProps to pass
* @returns {import("react").ReactNode}
*
* @todo Add a closeModal prop to the ModalWrapper component so we can escape dismiss anywhere this is used
*/
export default function ModalWrapper({ children, isOpen, noPortal = false }) {
if (!isOpen) return null;

View File

@ -0,0 +1,120 @@
import useGetProviderModels, {
DISABLED_PROVIDERS,
} from "@/hooks/useGetProvidersModels";
import { useTranslation } from "react-i18next";
export default function ChatModelSelection({
provider,
setHasChanges,
selectedLLMModel,
setSelectedLLMModel,
}) {
const { defaultModels, customModels, loading } =
useGetProviderModels(provider);
const { t } = useTranslation();
if (DISABLED_PROVIDERS.includes(provider)) return null;
if (loading) {
return (
<div>
<div className="flex flex-col">
<label htmlFor="name" className="block input-label">
{t("chat_window.workspace_llm_manager.available_models", {
provider,
})}
</label>
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
{t(
"chat_window.workspace_llm_manager.available_models_description"
)}
</p>
</div>
<select
required={true}
disabled={true}
className="border-theme-modal-border border border-solid bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- waiting for models --
</option>
</select>
</div>
);
}
return (
<div>
<div className="flex flex-col">
<label htmlFor="name" className="block input-label">
{t("chat_window.workspace_llm_manager.available_models", {
provider,
})}
</label>
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
{t("chat_window.workspace_llm_manager.available_models_description")}
</p>
</div>
<select
id="workspace-llm-model-select"
required={true}
value={selectedLLMModel}
onChange={(e) => {
setHasChanges(true);
setSelectedLLMModel(e.target.value);
}}
className="border-theme-modal-border border border-solid bg-theme-settings-input-bg text-white text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
>
{defaultModels.length > 0 && (
<optgroup label="General models">
{defaultModels.map((model) => {
return (
<option
key={model}
value={model}
selected={selectedLLMModel === model}
>
{model}
</option>
);
})}
</optgroup>
)}
{Array.isArray(customModels) && customModels.length > 0 && (
<optgroup label="Discovered models">
{customModels.map((model) => {
return (
<option
key={model.id}
value={model.id}
selected={selectedLLMModel === model.id}
>
{model.id}
</option>
);
})}
</optgroup>
)}
{/* For providers like TogetherAi where we partition model by creator entity. */}
{!Array.isArray(customModels) &&
Object.keys(customModels).length > 0 && (
<>
{Object.entries(customModels).map(([organization, models]) => (
<optgroup key={organization} label={organization}>
{models.map((model) => (
<option
key={model.id}
value={model.id}
selected={selectedLLMModel === model.id}
>
{model.name}
</option>
))}
</optgroup>
))}
</>
)}
</select>
</div>
);
}

View File

@ -0,0 +1,42 @@
import { useTranslation } from "react-i18next";
export default function LLMSelectorSidePanel({
availableProviders,
selectedLLMProvider,
onSearchChange,
onProviderClick,
}) {
const { t } = useTranslation();
return (
<div className="w-[40%] h-full flex flex-col gap-y-1 border-r-2 border-theme-modal-border py-2 px-[5px]">
<input
id="llm-search-input"
type="search"
placeholder={t("chat_window.workspace_llm_manager.search")}
onChange={onSearchChange}
className="search-input bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder outline-none text-sm rounded-lg px-2 py-2 w-full h-[32px] border-theme-modal-border border border-solid"
/>
<div className="flex flex-col gap-y-2 overflow-y-scroll ">
{availableProviders.map((llm) => (
<button
key={llm.value}
type="button"
data-llm-value={llm.value}
className={`border-none hover:cursor-pointer hover:bg-theme-checklist-item-bg-hover flex gap-x-2 items-center p-2 rounded-md ${selectedLLMProvider === llm.value ? "bg-theme-checklist-item-bg" : ""}`}
onClick={() => onProviderClick(llm.value)}
>
<img
src={llm.logo}
alt={`${llm.name} logo`}
className="w-6 h-6 rounded-md"
/>
<div className="flex flex-col">
<div className="text-xs text-theme-text-primary">{llm.name}</div>
</div>
</button>
))}
</div>
</div>
);
}

View File

@ -0,0 +1,109 @@
import { createPortal } from "react-dom";
import ModalWrapper from "@/components/ModalWrapper";
import { X } from "@phosphor-icons/react";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { useTranslation } from "react-i18next";
export default function SetupProvider({
isOpen,
closeModal,
postSubmit,
settings,
llmProvider,
}) {
if (!isOpen) return null;
async function handleUpdate(e) {
e.preventDefault();
e.stopPropagation();
const data = {};
const form = new FormData(e.target);
for (var [key, value] of form.entries()) data[key] = value;
const { error } = await System.updateSystem(data);
if (error) {
showToast(
`Failed to save ${llmProvider.name} settings: ${error}`,
"error"
);
return;
}
closeModal();
postSubmit();
return false;
}
return createPortal(
<ModalWrapper isOpen={isOpen}>
<div className="fixed inset-0 z-50 overflow-auto bg-black bg-opacity-50 flex items-center justify-center">
<div className="relative w-full max-w-2xl bg-theme-bg-secondary rounded-lg shadow border-2 border-theme-modal-border">
<div className="relative p-6 border-b rounded-t border-theme-modal-border">
<div className="w-full flex gap-x-2 items-center">
<h3 className="text-xl font-semibold text-white overflow-hidden overflow-ellipsis whitespace-nowrap">
{llmProvider.name} Settings
</h3>
</div>
<button
onClick={closeModal}
type="button"
className="absolute top-4 right-4 transition-all duration-300 bg-transparent rounded-lg text-sm p-1 inline-flex items-center hover:bg-theme-modal-border hover:border-theme-modal-border hover:border-opacity-50 border-transparent border"
>
<X size={24} weight="bold" className="text-white" />
</button>
</div>
<form id="provider-form" onSubmit={handleUpdate}>
<div className="px-7 py-6">
<div className="space-y-6 max-h-[60vh] overflow-y-auto p-1">
<p className="text-sm text-white/60">
To use {llmProvider.name} as this workspace's LLM you need to
set it up first.
</p>
<div>
{llmProvider.options(settings, { credentialsOnly: true })}
</div>
</div>
</div>
<div className="flex justify-between items-center mt-6 pt-6 border-t border-theme-modal-border px-7 pb-6">
<button
type="button"
onClick={closeModal}
className="transition-all duration-300 text-white hover:bg-zinc-700 px-4 py-2 rounded-lg text-sm"
>
Cancel
</button>
<button
type="submit"
form="provider-form"
className="transition-all duration-300 bg-white text-black hover:opacity-60 px-4 py-2 rounded-lg text-sm"
>
Save settings
</button>
</div>
</form>
</div>
</div>
</ModalWrapper>,
document.body
);
}
export function NoSetupWarning({ showing, onSetupClick }) {
const { t } = useTranslation();
if (!showing) return null;
return (
<button
type="button"
onClick={onSetupClick}
className="border border-blue-500 rounded-lg p-2 flex flex-col items-center gap-y-2 bg-blue-600/10 text-blue-600 hover:bg-blue-600/20 transition-all duration-300"
>
<p className="text-sm text-center">
<b>{t("chat_window.workspace_llm_manager.missing_credentials")}</b>
</p>
<p className="text-xs text-center">
{t("chat_window.workspace_llm_manager.missing_credentials_description")}
</p>
</button>
);
}

View File

@ -0,0 +1,131 @@
import { Tooltip } from "react-tooltip";
import { Brain, CheckCircle } from "@phosphor-icons/react";
import LLMSelectorModal from "./index";
import { useTheme } from "@/hooks/useTheme";
import { useRef, useEffect, useState } from "react";
import useUser from "@/hooks/useUser";
import { useModal } from "@/hooks/useModal";
import SetupProvider from "./SetupProvider";
export const TOGGLE_LLM_SELECTOR_EVENT = "toggle_llm_selector";
export const SAVE_LLM_SELECTOR_EVENT = "save_llm_selector";
export const PROVIDER_SETUP_EVENT = "provider_setup_requested";
export default function LLMSelectorAction() {
const tooltipRef = useRef(null);
const { theme } = useTheme();
const { user } = useUser();
const [saved, setSaved] = useState(false);
const {
isOpen: isSetupProviderOpen,
openModal: openSetupProviderModal,
closeModal: closeSetupProviderModal,
} = useModal();
const [config, setConfig] = useState({
settings: {},
provider: null,
});
function toggleLLMSelectorTooltip() {
if (!tooltipRef.current) return;
tooltipRef.current.isOpen
? tooltipRef.current.close()
: tooltipRef.current.open();
}
function handleSaveLLMSelector() {
if (!tooltipRef.current) return;
tooltipRef.current.close();
setSaved(true);
}
useEffect(() => {
window.addEventListener(
TOGGLE_LLM_SELECTOR_EVENT,
toggleLLMSelectorTooltip
);
window.addEventListener(SAVE_LLM_SELECTOR_EVENT, handleSaveLLMSelector);
return () => {
window.removeEventListener(
TOGGLE_LLM_SELECTOR_EVENT,
toggleLLMSelectorTooltip
);
window.removeEventListener(
SAVE_LLM_SELECTOR_EVENT,
handleSaveLLMSelector
);
};
}, []);
useEffect(() => {
if (!saved) return;
setTimeout(() => {
setSaved(false);
}, 1500);
}, [saved]);
useEffect(() => {
function handleProviderSetupEvent(e) {
const { provider, settings } = e.detail;
setConfig({
settings,
provider,
});
setTimeout(() => {
openSetupProviderModal();
}, 300);
}
window.addEventListener(PROVIDER_SETUP_EVENT, handleProviderSetupEvent);
return () =>
window.removeEventListener(
PROVIDER_SETUP_EVENT,
handleProviderSetupEvent
);
}, []);
// This feature is disabled for multi-user instances where the user is not an admin
// This is because of the limitations of model selection currently and other nuances in controls.
if (!!user && user.role !== "admin") return null;
return (
<>
<div
id="llm-selector-btn"
data-tooltip-id="tooltip-llm-selector-btn"
aria-label="LLM Selector"
className={`border-none relative flex justify-center items-center opacity-60 hover:opacity-100 light:opacity-100 light:hover:opacity-60 cursor-pointer`}
>
{saved ? (
<CheckCircle className="w-[22px] h-[22px] pointer-events-none text-green-400" />
) : (
<Brain className="w-[22px] h-[22px] pointer-events-none text-[var(--theme-sidebar-footer-icon-fill)]" />
)}
</div>
<Tooltip
ref={tooltipRef}
id="tooltip-llm-selector-btn"
place="top"
opacity={1}
clickable={true}
delayShow={300} // dont trigger tooltip instantly to not spam the UI
delayHide={800} // Prevent the travel time from icon to window hiding tooltip
arrowColor={
theme === "light"
? "var(--theme-modal-border)"
: "var(--theme-bg-primary)"
}
className="z-99 !w-[500px] !bg-theme-bg-primary !px-[5px] !rounded-lg !pointer-events-auto light:border-2 light:border-theme-modal-border"
>
<LLMSelectorModal tooltipRef={tooltipRef} />
</Tooltip>
<SetupProvider
isOpen={isSetupProviderOpen}
closeModal={closeSetupProviderModal}
postSubmit={() => closeSetupProviderModal()}
settings={config.settings}
llmProvider={config.provider}
/>
</>
);
}

View File

@ -0,0 +1,151 @@
import { useState, useEffect } from "react";
import { useParams } from "react-router-dom";
import PreLoader from "@/components/Preloader";
import ChatModelSelection from "./ChatModelSelection";
import { useTranslation } from "react-i18next";
import { PROVIDER_SETUP_EVENT, SAVE_LLM_SELECTOR_EVENT } from "./action";
import {
WORKSPACE_LLM_PROVIDERS,
autoScrollToSelectedLLMProvider,
hasMissingCredentials,
validatedModelSelection,
} from "./utils";
import LLMSelectorSidePanel from "./LLMSelector";
import { NoSetupWarning } from "./SetupProvider";
import showToast from "@/utils/toast";
import Workspace from "@/models/workspace";
import System from "@/models/system";
export default function LLMSelectorModal() {
const { slug } = useParams();
const { t } = useTranslation();
const [loading, setLoading] = useState(false);
const [settings, setSettings] = useState(null);
const [selectedLLMProvider, setSelectedLLMProvider] = useState(null);
const [selectedLLMModel, setSelectedLLMModel] = useState("");
const [availableProviders, setAvailableProviders] = useState(
WORKSPACE_LLM_PROVIDERS
);
const [hasChanges, setHasChanges] = useState(false);
const [saving, setSaving] = useState(false);
const [missingCredentials, setMissingCredentials] = useState(false);
useEffect(() => {
if (!slug) return;
setLoading(true);
Promise.all([Workspace.bySlug(slug), System.keys()])
.then(([workspace, systemSettings]) => {
const selectedLLMProvider =
workspace.chatProvider ?? systemSettings.LLMProvider;
const selectedLLMModel = workspace.chatModel ?? systemSettings.LLMModel;
setSettings(systemSettings);
setSelectedLLMProvider(selectedLLMProvider);
autoScrollToSelectedLLMProvider(selectedLLMProvider);
setSelectedLLMModel(selectedLLMModel);
})
.finally(() => setLoading(false));
}, [slug]);
function handleSearch(e) {
const searchTerm = e.target.value.toLowerCase();
const filteredProviders = WORKSPACE_LLM_PROVIDERS.filter((provider) =>
provider.name.toLowerCase().includes(searchTerm)
);
setAvailableProviders(filteredProviders);
}
function handleProviderSelection(provider) {
setSelectedLLMProvider(provider);
setAvailableProviders(WORKSPACE_LLM_PROVIDERS);
autoScrollToSelectedLLMProvider(provider, 50);
document.getElementById("llm-search-input").value = "";
setHasChanges(true);
setMissingCredentials(hasMissingCredentials(settings, provider));
}
async function handleSave() {
setSaving(true);
try {
setHasChanges(false);
const validatedModel = validatedModelSelection(selectedLLMModel);
if (!validatedModel) throw new Error("Invalid model selection");
const { message } = await Workspace.update(slug, {
chatProvider: selectedLLMProvider,
chatModel: validatedModel,
});
if (!!message) throw new Error(message);
window.dispatchEvent(new Event(SAVE_LLM_SELECTOR_EVENT));
} catch (error) {
console.error(error);
showToast(error.message, "error", { clear: true });
} finally {
setSaving(false);
}
}
if (loading) {
return (
<div
id="llm-selector-modal"
className="w-full h-[500px] p-0 overflow-y-scroll flex flex-col items-center justify-center"
>
<PreLoader size={12} />
<p className="text-theme-text-secondary text-sm mt-2">
{t("chat_window.workspace_llm_manager.loading_workspace_settings")}
</p>
</div>
);
}
return (
<div
id="llm-selector-modal"
className="w-full h-[500px] p-0 overflow-y-scroll flex"
>
<LLMSelectorSidePanel
availableProviders={availableProviders}
selectedLLMProvider={selectedLLMProvider}
onSearchChange={handleSearch}
onProviderClick={handleProviderSelection}
/>
<div className="w-[60%] h-full px-2 flex flex-col gap-y-2">
<NoSetupWarning
showing={missingCredentials}
onSetupClick={() => {
window.dispatchEvent(
new CustomEvent(PROVIDER_SETUP_EVENT, {
detail: {
provider: WORKSPACE_LLM_PROVIDERS.find(
(p) => p.value === selectedLLMProvider
),
settings,
},
})
);
}}
/>
<ChatModelSelection
provider={selectedLLMProvider}
setHasChanges={setHasChanges}
selectedLLMModel={selectedLLMModel}
setSelectedLLMModel={setSelectedLLMModel}
/>
{hasChanges && (
<button
type="button"
disabled={saving}
onClick={handleSave}
className={`border-none text-xs px-4 py-1 font-semibold light:text-[#ffffff] rounded-lg bg-primary-button hover:bg-secondary hover:text-white h-[34px] whitespace-nowrap w-full`}
>
{saving
? t("chat_window.workspace_llm_manager.saving")
: t("chat_window.workspace_llm_manager.save")}
</button>
)}
</div>
</div>
);
}

View File

@ -0,0 +1,61 @@
import { AVAILABLE_LLM_PROVIDERS } from "@/pages/GeneralSettings/LLMPreference";
import { DISABLED_PROVIDERS } from "@/hooks/useGetProvidersModels";
export function autoScrollToSelectedLLMProvider(
selectedLLMProvider,
timeout = 500
) {
setTimeout(() => {
const selectedButton = document.querySelector(
`[data-llm-value="${selectedLLMProvider}"]`
);
if (!selectedButton) return;
selectedButton.scrollIntoView({ behavior: "smooth", block: "nearest" });
}, timeout);
}
/**
* Validates the model selection by checking if the model is in the select option in the available models
* dropdown. If the model is not in the dropdown, it will return the first model in the dropdown.
*
* This exists when the user swaps providers, but did not select a model in the new provider's dropdown
* and assumed the first model in the picker was OK. This prevents invalid provider<>model selection issues
* @param {string} model - The model to validate
* @returns {string} - The validated model
*/
export function validatedModelSelection(model) {
try {
// If the entire select element is not found, return the model as is and cross our fingers
const selectOption = document.getElementById(`workspace-llm-model-select`);
if (!selectOption) return model;
// If the model is not in the dropdown, return the first model in the dropdown
// to prevent invalid provider<>model selection issues
const selectedOption = selectOption.querySelector(
`option[value="${model}"]`
);
if (!selectedOption) return selectOption.querySelector(`option`).value;
// If the model is in the dropdown, return the model as is
return model;
} catch (error) {
return null; // If the dropdown was empty or something else went wrong, return null to abort the save
}
}
export function hasMissingCredentials(settings, provider) {
const providerEntry = AVAILABLE_LLM_PROVIDERS.find(
(p) => p.value === provider
);
if (!providerEntry) return false;
for (const requiredKey of providerEntry.requiredConfig) {
if (!settings.hasOwnProperty(requiredKey)) return true;
if (!settings[requiredKey]) return true;
}
return false;
}
export const WORKSPACE_LLM_PROVIDERS = AVAILABLE_LLM_PROVIDERS.filter(
(provider) => !DISABLED_PROVIDERS.includes(provider.value)
);

View File

@ -11,6 +11,7 @@ import AvailableAgentsButton, {
useAvailableAgents,
} from "./AgentMenu";
import TextSizeButton from "./TextSizeMenu";
import LLMSelectorAction from "./LLMSelector/action";
import SpeechToText from "./SpeechToText";
import { Tooltip } from "react-tooltip";
import AttachmentManager from "./Attachments";
@ -323,6 +324,7 @@ export default function PromptInput({
setShowAgents={setShowAgents}
/>
<TextSizeButton />
<LLMSelectorAction />
</div>
<div className="flex gap-x-2">
<SpeechToText sendCommand={sendCommand} />

View File

@ -61,6 +61,7 @@ export default function useGetProviderModels(provider = null) {
useEffect(() => {
async function fetchProviderModels() {
if (!provider) return;
setLoading(true);
const { models = [] } = await System.customModels(provider);
if (
PROVIDER_DEFAULT_MODELS.hasOwnProperty(provider) &&

View File

@ -710,6 +710,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -913,6 +923,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -748,6 +748,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: "Rediger konto",
@ -952,6 +962,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -746,6 +746,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: "Account bearbeiten",
@ -957,6 +967,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -978,6 +978,16 @@ const TRANSLATIONS = {
small: "Small",
normal: "Normal",
large: "Large",
workspace_llm_manager: {
search: "Search LLM providers",
loading_workspace_settings: "Loading workspace settings...",
available_models: "Available Models for {{provider}}",
available_models_description: "Select a model to use for this workspace.",
save: "Use this model",
saving: "Setting model as workspace default...",
missing_credentials: "This provider is missing credentials!",
missing_credentials_description: "Click to set up credentials",
},
},
profile_settings: {
@ -1014,6 +1024,7 @@ const TRANSLATIONS = {
llmPreferences: "LLM Preferences",
chatSettings: "Chat Settings",
help: "Show keyboard shortcuts help",
showLLMSelector: "Show workspace LLM Selector",
},
},
};

View File

@ -709,6 +709,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -917,6 +927,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -702,6 +702,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -905,6 +915,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -710,6 +710,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -913,6 +923,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -695,6 +695,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -898,6 +908,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -708,6 +708,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -911,6 +921,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -740,6 +740,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: "アカウントを編集",
@ -947,6 +957,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -695,6 +695,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -898,6 +908,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -935,6 +935,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: "Rediģēt kontu",
@ -969,6 +979,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -705,6 +705,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -908,6 +918,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -916,6 +916,16 @@ const TRANSLATIONS = {
small: "Pequeno",
normal: "Normal",
large: "Grande",
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: "Editar conta",
@ -950,6 +960,7 @@ const TRANSLATIONS = {
llmPreferences: "Preferências do LLM",
chatSettings: "Ajustes do chat",
help: "Exibe ajuda e atalhos",
showLLMSelector: null,
},
},
};

View File

@ -749,6 +749,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: "Редактировать учётную запись",
@ -953,6 +963,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -705,6 +705,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -908,6 +918,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -704,6 +704,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -907,6 +917,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -875,6 +875,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: "编辑帐户",
@ -909,6 +919,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -707,6 +707,16 @@ const TRANSLATIONS = {
small: null,
normal: null,
large: null,
workspace_llm_manager: {
search: null,
loading_workspace_settings: null,
available_models: null,
available_models_description: null,
save: null,
saving: null,
missing_credentials: null,
missing_credentials_description: null,
},
},
profile_settings: {
edit_account: null,
@ -910,6 +920,7 @@ const TRANSLATIONS = {
llmPreferences: null,
chatSettings: null,
help: null,
showLLMSelector: null,
},
},
};

View File

@ -1,6 +1,7 @@
import paths from "./paths";
import { useEffect, useState } from "react";
import { useEffect } from "react";
import { userFromStorage } from "./request";
import { TOGGLE_LLM_SELECTOR_EVENT } from "@/components/WorkspaceChat/ChatContainer/PromptInput/LLMSelector/action";
export const KEYBOARD_SHORTCUTS_HELP_EVENT = "keyboard-shortcuts-help";
export const isMac = navigator.platform.toUpperCase().indexOf("MAC") >= 0;
@ -61,6 +62,12 @@ export const SHORTCUTS = {
);
},
},
"⌘ + Shift + L": {
translationKey: "showLLMSelector",
action: () => {
window.dispatchEvent(new Event(TOGGLE_LLM_SELECTOR_EVENT));
},
},
};
const LISTENERS = {};

View File

@ -8,6 +8,7 @@ const prisma = require("../utils/prisma");
const { v4 } = require("uuid");
const { MetaGenerator } = require("../utils/boot/MetaGenerator");
const { PGVector } = require("../utils/vectorDbProviders/pgvector");
const { getBaseLLMProviderModel } = require("../utils/helpers");
function isNullOrNaN(value) {
if (value === null) return true;
@ -227,6 +228,7 @@ const SystemSettings = {
// LLM Provider Selection Settings & Configs
// --------------------------------------------------------
LLMProvider: llmProvider,
LLMModel: getBaseLLMProviderModel({ provider: llmProvider }) || null,
...this.llmPreferenceKeys(),
// --------------------------------------------------------

View File

@ -358,6 +358,72 @@ function getLLMProviderClass({ provider = null } = {}) {
}
}
/**
* Returns the defined model (if available) for the given provider.
* @param {{provider: string | null} | null} params - Initialize params for LLMs provider
* @returns {string | null}
*/
function getBaseLLMProviderModel({ provider = null } = {}) {
switch (provider) {
case "openai":
return process.env.OPEN_MODEL_PREF;
case "azure":
return process.env.OPEN_MODEL_PREF;
case "anthropic":
return process.env.ANTHROPIC_MODEL_PREF;
case "gemini":
return process.env.GEMINI_LLM_MODEL_PREF;
case "lmstudio":
return process.env.LMSTUDIO_MODEL_PREF;
case "localai":
return process.env.LOCAL_AI_MODEL_PREF;
case "ollama":
return process.env.OLLAMA_MODEL_PREF;
case "togetherai":
return process.env.TOGETHER_AI_MODEL_PREF;
case "fireworksai":
return process.env.FIREWORKS_AI_LLM_MODEL_PREF;
case "perplexity":
return process.env.PERPLEXITY_MODEL_PREF;
case "openrouter":
return process.env.OPENROUTER_MODEL_PREF;
case "mistral":
return process.env.MISTRAL_MODEL_PREF;
case "huggingface":
return null;
case "groq":
return process.env.GROQ_MODEL_PREF;
case "koboldcpp":
return process.env.KOBOLD_CPP_MODEL_PREF;
case "textgenwebui":
return process.env.TEXT_GEN_WEB_UI_API_KEY;
case "cohere":
return process.env.COHERE_MODEL_PREF;
case "litellm":
return process.env.LITE_LLM_MODEL_PREF;
case "generic-openai":
return process.env.GENERIC_OPEN_AI_EMBEDDING_API_KEY;
case "bedrock":
return process.env.AWS_BEDROCK_LLM_MODEL_PREFERENCE;
case "deepseek":
return process.env.DEEPSEEK_MODEL_PREF;
case "apipie":
return process.env.APIPIE_LLM_API_KEY;
case "novita":
return process.env.NOVITA_LLM_MODEL_PREF;
case "xai":
return process.env.XAI_LLM_MODEL_PREF;
case "nvidia-nim":
return process.env.NVIDIA_NIM_LLM_MODEL_PREF;
case "ppio":
return process.env.PPIO_API_KEY;
case "dpais":
return process.env.DPAIS_LLM_MODEL_PREF;
default:
return null;
}
}
// Some models have lower restrictions on chars that can be encoded in a single pass
// and by default we assume it can handle 1,000 chars, but some models use work with smaller
// chars so here we can override that value when embedding information.
@ -383,6 +449,7 @@ module.exports = {
maximumChunkLength,
getVectorDbClass,
getLLMProviderClass,
getBaseLLMProviderModel,
getLLMProvider,
toChunks,
};