+ {wasFallback && (
+
+ {t("scheduledJobs.builder.fallbackWarning")}
+
+ )}
+
+
+ {t("scheduledJobs.builder.run")}
+ update({ frequency: e.target.value })}
+ className={inputClass}
+ >
+
+ {t("scheduledJobs.builder.frequency.minute")}
+
+
+ {t("scheduledJobs.builder.frequency.hour")}
+
+
+ {t("scheduledJobs.builder.frequency.day")}
+
+
+ {t("scheduledJobs.builder.frequency.week")}
+
+
+ {t("scheduledJobs.builder.frequency.month")}
+
+
+
+
+ {state.frequency === "minute" && (
+
+ {t("scheduledJobs.builder.every")}
+
+ update({ minuteInterval: parseInt(e.target.value, 10) })
+ }
+ className={inputClass}
+ >
+ {MINUTE_INTERVALS.map((n) => (
+
+ {n === 1
+ ? t("scheduledJobs.builder.minuteOne")
+ : t("scheduledJobs.builder.minuteOther", { count: n })}
+
+ ))}
+
+
+ )}
+
+ {state.frequency === "hour" && (
+
+
+ {t("scheduledJobs.builder.atMinute")}
+
+
+ update({ hourMinuteOffset: parseInt(e.target.value, 10) })
+ }
+ className={inputClass}
+ >
+ {MINUTES.map((n) => (
+
+ {pad2(n)}
+
+ ))}
+
+
+ {t("scheduledJobs.builder.pastEveryHour")}
+
+
+ )}
+
+ {(state.frequency === "day" ||
+ state.frequency === "week" ||
+ state.frequency === "month") && (
+
+ {t("scheduledJobs.builder.at")}
+ {
+ const [h, m] = e.target.value.split(":");
+ update({
+ hour: parseInt(h, 10) || 0,
+ minute: parseInt(m, 10) || 0,
+ });
+ }}
+ className={`${inputClass} [color-scheme:dark] light:[color-scheme:light]`}
+ />
+
+ )}
+
+ {state.frequency === "week" && (
+
+
{t("scheduledJobs.builder.on")}
+
+ {WEEKDAYS.map((day) => {
+ const selected = state.weekdays.includes(day.value);
+ return (
+ {
+ const next = selected
+ ? state.weekdays.filter((d) => d !== day.value)
+ : [...state.weekdays, day.value];
+ update({ weekdays: next.length ? next : [day.value] });
+ }}
+ className={`border-none px-3 py-1 text-xs rounded-full transition-colors ${
+ selected
+ ? "bg-zinc-50 text-zinc-950 light:bg-zinc-950 light:text-white"
+ : "bg-white/5 text-theme-text-secondary hover:bg-white/10 hover:text-theme-text-primary light:bg-slate-200 light:hover:bg-slate-300"
+ }`}
+ >
+ {day.label}
+
+ );
+ })}
+
+
+ )}
+
+ {state.frequency === "month" && (
+
+ {t("scheduledJobs.builder.onDay")}
+
+ update({ dayOfMonth: parseInt(e.target.value, 10) })
+ }
+ className={inputClass}
+ >
+ {DAYS_OF_MONTH.map((n) => (
+
+ {n}
+
+ ))}
+
+
+ {t("scheduledJobs.builder.ofEveryMonth")}
+
+
+ )}
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/FormActions.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/FormActions.jsx
new file mode 100644
index 00000000..57c1b829
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/FormActions.jsx
@@ -0,0 +1,28 @@
+import { useTranslation } from "react-i18next";
+
+export default function FormActions({ isEditing, saving, onClose }) {
+ const { t } = useTranslation();
+
+ return (
+
+
+
+ {t("scheduledJobs.modal.scheduleLabel")}{" "}
+ *
+
+ {error && (
+
+ {t("scheduledJobs.modal.required", "Required")}
+
+ )}
+
+
+
+ {[
+ {
+ value: "builder",
+ label: t("scheduledJobs.modal.modeBuilder"),
+ },
+ { value: "custom", label: t("scheduledJobs.modal.modeCustom") },
+ ].map((tab) => (
+ onModeChange(tab.value)}
+ className={`border-none px-3 py-1 text-xs rounded-md transition-colors ${
+ scheduleMode === tab.value
+ ? "bg-zinc-50 text-zinc-950 light:bg-zinc-950 light:text-white"
+ : "text-theme-text-secondary hover:text-theme-text-primary"
+ }`}
+ >
+ {tab.label}
+
+ ))}
+
+
+ {scheduleMode === "builder" && (
+
+
+
+ )}
+
+ {scheduleMode === "custom" && (
+
+ )}
+
+
+ {t("scheduledJobs.modal.currentSchedule")}{" "}
+ {schedule}
+ {schedule && (
+
+ — {humanizeCron(schedule, i18n.language)}
+
+ )}
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/ToolsSelector.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/ToolsSelector.jsx
new file mode 100644
index 00000000..6aa64ba7
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/ToolsSelector.jsx
@@ -0,0 +1,352 @@
+import { useEffect, useMemo, useRef, useState } from "react";
+import {
+ CaretDown,
+ CaretRight,
+ Check,
+ MagnifyingGlass,
+ Minus,
+ Warning,
+ X,
+} from "@phosphor-icons/react";
+import { useTranslation } from "react-i18next";
+import paths from "@/utils/paths";
+
+/**
+ * Build a flat lookup map from categorized tools for quick label resolution.
+ * @param {Array} categories - Categorized tools from backend
+ * @returns {Map}
+ */
+function buildToolLookup(categories) {
+ const lookup = new Map();
+ for (const cat of categories) {
+ for (const item of cat.items || []) {
+ lookup.set(item.id, item);
+ }
+ }
+ return lookup;
+}
+
+function Checkbox({ state, disabled = false }) {
+ const filled = state === "checked" || state === "indeterminate";
+ return (
+
+ {state === "checked" && (
+
+ )}
+ {state === "indeterminate" && (
+
+ )}
+
+ );
+}
+
+export default function ToolsSelector({
+ availableTools,
+ selectedTools,
+ onChange,
+}) {
+ const { t } = useTranslation();
+ const [search, setSearch] = useState("");
+ const [open, setOpen] = useState(false);
+ const [placement, setPlacement] = useState("bottom");
+ const [expandedCategories, setExpandedCategories] = useState({});
+ const containerRef = useRef(null);
+
+ useEffect(() => {
+ if (!open) return;
+ const onClick = (e) => {
+ if (!containerRef.current?.contains(e.target)) setOpen(false);
+ };
+ const onKey = (e) => {
+ if (e.key === "Escape") setOpen(false);
+ };
+ document.addEventListener("mousedown", onClick);
+ document.addEventListener("keydown", onKey);
+ return () => {
+ document.removeEventListener("mousedown", onClick);
+ document.removeEventListener("keydown", onKey);
+ };
+ }, [open]);
+
+ useEffect(() => {
+ if (!open) return;
+ const POPOVER_MAX_HEIGHT = 320;
+ const updatePlacement = () => {
+ const rect = containerRef.current?.getBoundingClientRect();
+ if (!rect) return;
+ const spaceBelow = window.innerHeight - rect.bottom;
+ const spaceAbove = rect.top;
+ setPlacement(
+ spaceBelow < POPOVER_MAX_HEIGHT && spaceAbove > spaceBelow
+ ? "top"
+ : "bottom"
+ );
+ };
+ updatePlacement();
+ window.addEventListener("resize", updatePlacement);
+ window.addEventListener("scroll", updatePlacement, true);
+ return () => {
+ window.removeEventListener("resize", updatePlacement);
+ window.removeEventListener("scroll", updatePlacement, true);
+ };
+ }, [open]);
+
+ const toolLookup = useMemo(
+ () => buildToolLookup(availableTools),
+ [availableTools]
+ );
+
+ const filteredCategories = useMemo(() => {
+ const q = search.trim().toLowerCase();
+ if (!q) return availableTools;
+ return availableTools
+ .map((cat) => {
+ if (cat.name.toLowerCase().includes(q)) return cat;
+ const items = (cat.items || []).filter(
+ (item) =>
+ item.name.toLowerCase().includes(q) ||
+ item.description?.toLowerCase().includes(q)
+ );
+ return items.length ? { ...cat, items } : null;
+ })
+ .filter(Boolean);
+ }, [availableTools, search]);
+
+ const isCategoryExpanded = (catId) =>
+ search.trim() || expandedCategories[catId];
+
+ const toggleCategory = (catId) => {
+ setExpandedCategories((prev) => ({ ...prev, [catId]: !prev[catId] }));
+ };
+
+ const categoryState = (cat) => {
+ const ids = (cat.items || []).map((i) => i.id);
+ const count = ids.filter((id) => selectedTools.includes(id)).length;
+ if (count === 0) return "unchecked";
+ if (count === ids.length) return "checked";
+ return "indeterminate";
+ };
+
+ const toggleCategorySelection = (cat, e) => {
+ e.stopPropagation();
+ const ids = (cat.items || []).map((i) => i.id);
+ if (categoryState(cat) === "checked") {
+ onChange(selectedTools.filter((id) => !ids.includes(id)));
+ } else {
+ onChange([...new Set([...selectedTools, ...ids])]);
+ }
+ };
+
+ const toggleTool = (id) => {
+ onChange(
+ selectedTools.includes(id)
+ ? selectedTools.filter((x) => x !== id)
+ : [...selectedTools, id]
+ );
+ };
+
+ const removeTool = (id) => onChange(selectedTools.filter((x) => x !== id));
+ const labelFor = (id) => toolLookup.get(id)?.name || id;
+
+ return (
+
+
+ {t("scheduledJobs.modal.toolsLabel", "Tools (Optional)")}
+
+
+ {t(
+ "scheduledJobs.modal.toolsDescription",
+ "Select which agent tools this job can use. If none are selected, the job runs without any tools."
+ )}
+
+
+
+
+ setSearch(e.target.value)}
+ onFocus={() => setOpen(true)}
+ onClick={() => setOpen(true)}
+ placeholder={t("scheduledJobs.modal.toolsSearch", "Search tools")}
+ className="border border-transparent light:border-slate-300 bg-zinc-800 light:bg-white text-zinc-300 light:text-slate-700 placeholder:text-zinc-400 light:placeholder:text-slate-500 text-sm rounded-lg focus:outline-sky-500 outline-none block w-full px-3.5 py-2.5 pr-9"
+ />
+
+
+
+ {open && (
+
+ {filteredCategories.length === 0 ? (
+
+ {t("scheduledJobs.modal.toolsNoResults", "No tools match")}
+
+ ) : (
+ filteredCategories.map((cat) => {
+ const expanded = isCategoryExpanded(cat.category);
+ const state = categoryState(cat);
+ const enabledCount = (cat.items || []).filter((i) =>
+ selectedTools.includes(i.id)
+ ).length;
+
+ return (
+
+ );
+ })
+ )}
+
+ )}
+
+
+ {selectedTools.length > 0 && (
+
+ {selectedTools.map((id) => (
+
+ {labelFor(id)}
+ removeTool(id)}
+ aria-label={`Remove ${labelFor(id)}`}
+ className="border-none text-zinc-400 light:text-slate-500 hover:text-zinc-50 light:hover:text-slate-900 transition-colors"
+ >
+
+
+
+ ))}
+
+ )}
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/index.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/index.jsx
new file mode 100644
index 00000000..be0051ff
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/JobFormModal/index.jsx
@@ -0,0 +1,163 @@
+import { useState, useEffect } from "react";
+import { X, WarningCircle } from "@phosphor-icons/react";
+import ScheduledJobs from "@/models/scheduledJobs";
+import showToast from "@/utils/toast";
+import { safeJsonParse } from "@/utils/request";
+import { useTranslation } from "react-i18next";
+import JobDescription from "./JobDescription";
+import JobSchedule from "./JobSchedule";
+import ToolsSelector from "./ToolsSelector";
+import FormActions from "./FormActions";
+
+function setDefaultFormState(job) {
+ return {
+ name: job?.name || "",
+ prompt: job?.prompt || "",
+ schedule: job?.schedule || "0 9 * * *",
+ scheduleMode: "builder",
+ selectedTools: job?.tools ? safeJsonParse(job.tools, []) : [],
+ };
+}
+
+export default function JobFormModal({ job = null, onClose, onSaved }) {
+ const { t } = useTranslation();
+ const isEditing = !!job;
+ const [form, setForm] = useState(setDefaultFormState(job));
+ const [availableTools, setAvailableTools] = useState([]);
+ const [saving, setSaving] = useState(false);
+ const [errors, setErrors] = useState({
+ name: false,
+ prompt: false,
+ schedule: false,
+ });
+ const hasErrors = () => Object.values(errors).some(Boolean);
+
+ const clearError = (field) => {
+ setErrors((prev) => (prev[field] ? { ...prev, [field]: false } : prev));
+ };
+
+ useEffect(() => {
+ ScheduledJobs.availableTools().then(({ tools }) => {
+ setAvailableTools(tools || []);
+ });
+ }, []);
+
+ const handleChange = (e) => {
+ const { name, value } = e.target;
+ setForm((prev) => ({ ...prev, [name]: value }));
+ clearError(name);
+ };
+
+ const handleScheduleChange = (cron) => {
+ setForm((prev) => ({ ...prev, schedule: cron }));
+ clearError("schedule");
+ };
+
+ const handleModeChange = (mode) => {
+ setForm((prev) => ({ ...prev, scheduleMode: mode }));
+ };
+
+ const setSelectedTools = (selectedTools) => {
+ setForm((prev) => ({ ...prev, selectedTools }));
+ };
+
+ const handleSubmit = async (e) => {
+ e.preventDefault();
+ const nextErrors = {
+ name: !form.name.trim(),
+ prompt: !form.prompt.trim(),
+ schedule: !form.schedule.trim(),
+ };
+ if (nextErrors.name || nextErrors.prompt || nextErrors.schedule) {
+ setErrors(nextErrors);
+ return;
+ }
+
+ setSaving(true);
+ const data = {
+ name: form.name.trim(),
+ prompt: form.prompt.trim(),
+ schedule: form.schedule.trim(),
+ tools: form.selectedTools,
+ };
+
+ const result = isEditing
+ ? await ScheduledJobs.update(job.id, data)
+ : await ScheduledJobs.create(data);
+
+ setSaving(false);
+
+ if (result.error) {
+ showToast(result.error, "error");
+ return;
+ }
+
+ showToast(
+ isEditing
+ ? t("scheduledJobs.modal.jobUpdated")
+ : t("scheduledJobs.modal.jobCreated"),
+ "success"
+ );
+ onSaved();
+ };
+
+ return (
+
+
+
+
+
+ {isEditing
+ ? t("scheduledJobs.modal.titleEdit")
+ : t("scheduledJobs.modal.titleNew")}
+
+
+
+
+
+ {hasErrors() && (
+
+
+
+ {t(
+ "scheduledJobs.modal.requiredFieldsBanner",
+ "Please fill out all required fields in order to create job."
+ )}
+
+
+ )}
+
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/RunDetailPage.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/RunDetailPage.jsx
new file mode 100644
index 00000000..6ae29835
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/RunDetailPage.jsx
@@ -0,0 +1,434 @@
+import { useEffect, useState } from "react";
+import { useParams, useNavigate } from "react-router-dom";
+import { useTranslation } from "react-i18next";
+import Sidebar from "@/components/SettingsSidebar";
+import { isMobile } from "react-device-detect";
+import {
+ ArrowLeft,
+ ChatText,
+ Brain,
+ Wrench,
+ File,
+ Stop,
+} from "@phosphor-icons/react";
+import ScheduledJobs from "@/models/scheduledJobs";
+import usePolling from "@/hooks/usePolling";
+import showToast from "@/utils/toast";
+import paths from "@/utils/paths";
+import renderMarkdown from "@/utils/chat/markdown";
+import CollapsibleSection from "./components/CollapsibleSection";
+import ToolCallCard from "./components/ToolCallCard";
+import GeneratedFileCard from "./components/GeneratedFileCard";
+import moment from "moment";
+import { formatDuration, numberWithCommas } from "@/utils/numbers";
+import DOMPurify from "@/utils/chat/purify";
+import {
+ THOUGHT_REGEX_COMPLETE,
+ THOUGHT_REGEX_OPEN,
+ THOUGHT_REGEX_CLOSE,
+} from "@/components/WorkspaceChat/ChatContainer/ChatHistory/ThoughtContainer";
+
+export default function RunDetailPage() {
+ const { t } = useTranslation();
+ const { id, runId } = useParams();
+ const navigate = useNavigate();
+ const [loading, setLoading] = useState(true);
+ const [run, setRun] = useState(null);
+ const [job, setJob] = useState(null);
+ const [continuing, setContinuing] = useState(false);
+ const [killing, setKilling] = useState(false);
+
+ useEffect(() => {
+ fetchRun();
+ }, [runId]);
+
+ const fetchRun = async () => {
+ const data = await ScheduledJobs.getRun(runId);
+ setRun(data.run);
+ setJob(data.job);
+ setLoading(false);
+
+ if (data.run && !data.run.readAt) {
+ ScheduledJobs.markRunRead(runId);
+ }
+ };
+
+ const isNonTerminal = run?.status === "running" || run?.status === "queued";
+ // Poll every 3s while a run is in progress so the trace/status updates live.
+ // Stops automatically once the run reaches a terminal state.
+ usePolling(fetchRun, 3000, isNonTerminal);
+
+ const handleContinueInThread = async () => {
+ setContinuing(true);
+ const { workspaceSlug, threadSlug, error } =
+ await ScheduledJobs.continueInThread(runId);
+
+ if (error || !workspaceSlug || !threadSlug) {
+ showToast(error || t("scheduledJobs.runDetail.threadFailed"), "error");
+ setContinuing(false);
+ return;
+ }
+
+ navigate(paths.workspace.thread(workspaceSlug, threadSlug));
+ };
+
+ const handleKillRun = async () => {
+ setKilling(true);
+ const { success, error } = await ScheduledJobs.killRun(runId);
+ setKilling(false);
+
+ if (!success) {
+ showToast(error || t("scheduledJobs.toast.killFailed"), "error");
+ return;
+ }
+
+ showToast(t("scheduledJobs.toast.killed"), "success");
+ fetchRun();
+ };
+
+ if (loading) {
+ return (
+
+
+ {t("scheduledJobs.runDetail.loading")}
+
+
+ );
+ }
+
+ if (!run) {
+ return (
+
+
+ {t("scheduledJobs.runDetail.notFound")}
+
+
+ );
+ }
+
+ const result = run.result || {};
+ return (
+
+ navigate(paths.settings.scheduledJobRuns(id))}
+ onContinueInThread={handleContinueInThread}
+ onKillRun={handleKillRun}
+ />
+
+
+
+ );
+}
+
+function RunDetailLayout({ children }) {
+ return (
+
+ );
+}
+
+function RunHeader({
+ t,
+ job,
+ run,
+ result,
+ continuing,
+ killing,
+ onBack,
+ onContinueInThread,
+ onKillRun,
+}) {
+ function getStatusInfo() {
+ return {
+ completed: {
+ text: t("scheduledJobs.status.completed"),
+ style: "text-green-400 light:text-green-600",
+ },
+ failed: {
+ text: t("scheduledJobs.status.failed"),
+ style: "text-red-400 light:text-red-600",
+ },
+ timed_out: {
+ text: t("scheduledJobs.status.timed_out"),
+ style: "text-orange-400 light:text-orange-600",
+ },
+ running: {
+ text: t("scheduledJobs.status.running"),
+ style: "text-yellow-400 light:text-yellow-600",
+ },
+ queued: {
+ text: t("scheduledJobs.status.queued"),
+ style: "text-blue-400 light:text-blue-600",
+ },
+ default: {
+ text: "—",
+ style: "text-zinc-400 light:text-slate-600",
+ },
+ };
+ }
+ const statusInfo = getStatusInfo();
+ const { text, style } = statusInfo[run.status] || statusInfo.default;
+ const isKillable = ["running", "queued"].includes(run.status);
+
+ return (
+
+
+
+
+ {t("scheduledJobs.runDetail.back")}
+
+
+ {t("scheduledJobs.runDetail.runHeading", {
+ name: job?.name || t("scheduledJobs.runDetail.unknownJob"),
+ id: run.id,
+ })}
+
+
+ {text}
+
+ {moment(run.startedAt).format("LTS")}
+
+ {result.duration && (
+
+ {t("scheduledJobs.runDetail.duration", {
+ value: formatDuration(result.duration / 1000),
+ })}
+
+ )}
+
+
+
+ {isKillable && (
+
+
+ {killing
+ ? t("scheduledJobs.runDetail.killing")
+ : t("scheduledJobs.runDetail.stopJob")}
+
+ )}
+ {run.status === "completed" && (
+
+ {continuing
+ ? t("scheduledJobs.runDetail.creating")
+ : t("scheduledJobs.runDetail.continueInThread")}
+
+ )}
+
+
+ );
+}
+
+function PromptSection({ t, prompt }) {
+ return (
+
+
+ {t("scheduledJobs.runDetail.sections.prompt")}
+
+
+ {prompt || "—"}
+
+
+ );
+}
+
+function ErrorSection({ t, error }) {
+ if (!error) return null;
+ return (
+
+
+ {t("scheduledJobs.runDetail.sections.error")}
+
+
{error}
+
+ );
+}
+
+function AgentThoughtsSection({ t, result }) {
+ if (!result.thoughts || result.thoughts.length === 0) return null;
+
+ return (
+
+
+ {result.thoughts.map((thought, i) => (
+
+
+ {i + 1}.
+
+ {thought}
+
+ ))}
+
+
+ );
+}
+
+function ToolCallsSection({ t, result }) {
+ if (!result.toolCalls || result.toolCalls.length === 0) return null;
+
+ return (
+
+
+ {result.toolCalls.map((toolCall, i) => (
+
+ ))}
+
+
+ );
+}
+
+function GeneratedFilesSection({ t, result }) {
+ // outputs contains {type, payload} objects from agent plugins
+ const outputs = result.outputs || [];
+ if (outputs.length === 0) return null;
+
+ return (
+
+
+ {outputs.map((output, i) => (
+
+ ))}
+
+
+ );
+}
+
+function FinalResponseSection({ t, result }) {
+ if (!result.text) return null;
+ let reasoning = null;
+ let msgToRender = result.text;
+
+ if (result.text.match(THOUGHT_REGEX_COMPLETE)) {
+ reasoning = result.text.match(THOUGHT_REGEX_COMPLETE)?.[0];
+ if (reasoning)
+ reasoning = reasoning
+ .replace(THOUGHT_REGEX_OPEN, "")
+ .replace(THOUGHT_REGEX_CLOSE, "")
+ .trim();
+ msgToRender = result.text.replace(THOUGHT_REGEX_COMPLETE, "");
+ }
+
+ return (
+
+ {reasoning && (
+
+
+
+ )}
+
+
+ );
+}
+
+function MetricsSection({ t, metrics }) {
+ if (!metrics || Object.keys(metrics).length === 0) return null;
+
+ // Todo: there is a bug where if you create a job that has no tools, we wont get any metrics
+ // To avoid confusion, we should not show the metrics section if there are no metrics.
+ if (!metrics.prompt_tokens || !metrics.completion_tokens) return null;
+
+ function renderModel(metrics) {
+ if (!metrics.model) return null;
+ return (
+
+ ({metrics.model})
+
+ );
+ }
+
+ return (
+
+
+ {t("scheduledJobs.runDetail.sections.metrics")} {renderModel(metrics)}
+
+
+ {metrics.prompt_tokens != null && (
+
+ {t("scheduledJobs.runDetail.metrics.promptTokens")}{" "}
+
+ {numberWithCommas(metrics.prompt_tokens)}
+
+
+ )}
+ {metrics.completion_tokens != null && (
+
+ {t("scheduledJobs.runDetail.metrics.completionTokens")}{" "}
+
+ {numberWithCommas(metrics.completion_tokens)}
+
+
+ )}
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/RunHistoryPage.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/RunHistoryPage.jsx
new file mode 100644
index 00000000..316d08d4
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/RunHistoryPage.jsx
@@ -0,0 +1,165 @@
+import { useEffect, useState } from "react";
+import { useParams, useNavigate } from "react-router-dom";
+import { useTranslation } from "react-i18next";
+import Sidebar from "@/components/SettingsSidebar";
+import { isMobile } from "react-device-detect";
+import { ArrowLeft } from "@phosphor-icons/react";
+import ScheduledJobs from "@/models/scheduledJobs";
+import usePolling from "@/hooks/usePolling";
+import showToast from "@/utils/toast";
+import paths from "@/utils/paths";
+import RunRow from "./components/RunRow";
+import { humanizeCron } from "./utils/cron";
+
+export default function RunHistoryPage() {
+ const { t } = useTranslation();
+ const { id } = useParams();
+ const [job, setJob] = useState(null);
+ const [runs, setRuns] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [triggering, setTriggering] = useState(false);
+ const hasInFlightRun = runs.some(
+ (r) => r.status === "queued" || r.status === "running"
+ );
+
+ const fetchRuns = async () => {
+ const { runs: foundRuns } = await ScheduledJobs.runs(id);
+ setRuns(foundRuns || []);
+ setLoading(false);
+ };
+
+ useEffect(() => {
+ ScheduledJobs.get(id).then(({ job }) => setJob(job));
+ fetchRuns();
+ }, [id]);
+
+ // Poll every 5s while visible so new runs appear and running statuses update.
+ usePolling(fetchRuns, 5000);
+
+ const handleRunNow = async () => {
+ setTriggering(true);
+ const { success, skipped, error } = await ScheduledJobs.trigger(id);
+ setTriggering(false);
+ if (!success) {
+ showToast(error || t("scheduledJobs.toast.triggerFailed"), "error");
+ return;
+ }
+
+ if (skipped) {
+ showToast(
+ t(
+ "scheduledJobs.toast.triggerSkipped",
+ "A run is already in progress for this job"
+ ),
+ "info"
+ );
+ } else {
+ showToast(t("scheduledJobs.toast.triggered"), "success");
+ }
+ fetchRuns();
+ };
+
+ if (loading) {
+ return (
+
+
+
+ {t("scheduledJobs.loading")}
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
+ {t("scheduledJobs.runHistory.table.status")}
+
+
+ {t("scheduledJobs.runHistory.table.started")}
+
+
+ {t("scheduledJobs.runHistory.table.duration")}
+
+
+ {t("scheduledJobs.runHistory.table.error")}
+
+
+
+
+ {runs.length === 0 ? (
+
+
+
+ {t("scheduledJobs.runHistory.emptyTitle")}
+
+
+ {t("scheduledJobs.runHistory.emptySubtitle")}
+
+
+
+ {t("scheduledJobs.runHistory.runNow")}
+
+
+ ) : (
+
+ {runs.map((run) => (
+
+ ))}
+
+ )}
+
+
+ );
+}
+
+function RunHistoryLayout({ job, children }) {
+ const { t, i18n } = useTranslation();
+ const navigate = useNavigate();
+
+ return (
+
+
+
+
+
+
navigate(paths.settings.scheduledJobs())}
+ className="border-none flex items-center gap-2 text-zinc-400 light:text-slate-600 hover:text-zinc-50 light:hover:text-slate-950 text-sm transition-colors w-fit"
+ >
+
+ {t("scheduledJobs.runHistory.back")}
+
+
+ {t("scheduledJobs.runHistory.title", {
+ name: job?.name || "...",
+ })}
+
+
+ {t("scheduledJobs.runHistory.schedule")}{" "}
+ {humanizeCron(job?.schedule, i18n.language) || "—"}
+
+
+ {children}
+
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/components/CollapsibleSection.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/CollapsibleSection.jsx
new file mode 100644
index 00000000..efe305d5
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/CollapsibleSection.jsx
@@ -0,0 +1,71 @@
+import { useState } from "react";
+import {
+ CaretDown,
+ CaretRight,
+ CheckCircle,
+ Copy,
+} from "@phosphor-icons/react";
+import { copyMarkdownAsRichText } from "@/utils/clipboard";
+
+// Generic expand/collapse panel used by the run-detail page to wrap each
+// trace section (thinking, tool calls, response, files).
+export default function CollapsibleSection({
+ title,
+ icon: Icon,
+ children,
+ defaultOpen = false,
+ copyableContent = null,
+}) {
+ const [open, setOpen] = useState(defaultOpen);
+ return (
+
+
+ setOpen(!open)}
+ className="border-none flex-1 h-12 flex items-center gap-2 px-[18px] hover:bg-white/5 light:hover:bg-slate-100 transition-colors text-left"
+ >
+ {open ? (
+
+ ) : (
+
+ )}
+
+
+ {title}
+
+
+ {copyableContent && }
+
+ {open && (
+
+ {children}
+
+ )}
+
+ );
+}
+
+function CopyButton({ content }) {
+ const [copied, setCopied] = useState(false);
+ const copyToClipboard = async () => {
+ await copyMarkdownAsRichText(content);
+ setCopied(true);
+ setTimeout(() => setCopied(false), 2000);
+ };
+
+ return (
+
+ {copied ? (
+
+ ) : (
+
+ )}
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/components/GeneratedFileCard.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/GeneratedFileCard.jsx
new file mode 100644
index 00000000..26765f05
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/GeneratedFileCard.jsx
@@ -0,0 +1,134 @@
+import { useState } from "react";
+import { useTranslation } from "react-i18next";
+import { CircleNotch, DownloadSimple } from "@phosphor-icons/react";
+import StorageFiles from "@/models/files";
+import showToast from "@/utils/toast";
+import { humanFileSize } from "@/utils/numbers";
+
+// File extension → display badge/colors. Lifted out of RunDetailPage with
+// the file card itself; nothing else uses this map so it lives here.
+const FILE_DISPLAY_MAP = {
+ pptx: {
+ badge: "PPT",
+ bg: "bg-orange-100",
+ text: "text-orange-800",
+ label: (t) => t("scheduledJobs.file.types.powerpoint"),
+ },
+ ppt: {
+ badge: "PPT",
+ bg: "bg-orange-100",
+ text: "text-orange-800",
+ label: (t) => t("scheduledJobs.file.types.powerpoint"),
+ },
+ pdf: {
+ badge: "PDF",
+ bg: "bg-red-100",
+ text: "text-red-800",
+ label: (t) => t("scheduledJobs.file.types.pdf"),
+ },
+ doc: {
+ badge: "DOC",
+ bg: "bg-blue-100",
+ text: "text-blue-800",
+ label: (t) => t("scheduledJobs.file.types.word"),
+ },
+ docx: {
+ badge: "DOC",
+ bg: "bg-blue-100",
+ text: "text-blue-800",
+ label: (t) => t("scheduledJobs.file.types.word"),
+ },
+ xls: {
+ badge: "XLS",
+ bg: "bg-green-100",
+ text: "text-green-800",
+ label: (t) => t("scheduledJobs.file.types.spreadsheet"),
+ },
+ xlsx: {
+ badge: "XLS",
+ bg: "bg-green-100",
+ text: "text-green-800",
+ label: (t) => t("scheduledJobs.file.types.spreadsheet"),
+ },
+ csv: {
+ badge: "CSV",
+ bg: "bg-green-100",
+ text: "text-green-800",
+ label: (t) => t("scheduledJobs.file.types.spreadsheet"),
+ },
+ default: {
+ badge: "FILE",
+ bg: "bg-zinc-200",
+ text: "text-zinc-800",
+ label: (t) => t("scheduledJobs.file.types.generic"),
+ },
+};
+
+function getFileDisplayInfo(filename, t) {
+ const extension = filename?.split(".")?.pop()?.toLowerCase() ?? "txt";
+ const fileDisplayInfo =
+ FILE_DISPLAY_MAP[extension] || FILE_DISPLAY_MAP.default;
+ return {
+ ...fileDisplayInfo,
+ type: fileDisplayInfo.label(t),
+ };
+}
+
+// Card representing a file generated by a job run. Streams the file from
+// the storage API on download via dynamic import of file-saver.
+export default function GeneratedFileCard({ file }) {
+ const { t } = useTranslation();
+ const [downloading, setDownloading] = useState(false);
+ const { badge, bg, text, type } = getFileDisplayInfo(file.filename, t);
+
+ const handleDownload = async () => {
+ if (downloading || !file.storageFilename) return;
+ setDownloading(true);
+ try {
+ const blob = await StorageFiles.download(file.storageFilename);
+ if (!blob) throw new Error("Failed to download file");
+ const { saveAs } = await import("file-saver");
+ saveAs(blob, file.filename || file.storageFilename);
+ } catch {
+ showToast(t("scheduledJobs.file.downloadFailed"), "error");
+ } finally {
+ setDownloading(false);
+ }
+ };
+
+ return (
+
+
+
+ {badge}
+
+
+
+ {file.filename || t("scheduledJobs.file.unknown")}
+
+
+ {humanFileSize(file.fileSize, true, 1)}
+ {file.fileSize && type ? " · " : ""}
+ {type}
+
+
+
+
+ {downloading ? (
+
+ ) : (
+
+ )}
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/components/JobRow.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/JobRow.jsx
new file mode 100644
index 00000000..467f3bea
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/JobRow.jsx
@@ -0,0 +1,107 @@
+import { useNavigate } from "react-router-dom";
+import { Play, PencilSimple, X } from "@phosphor-icons/react";
+import paths from "@/utils/paths";
+import { humanizeCron } from "../utils/cron";
+import { useTranslation } from "react-i18next";
+
+// One row of the scheduled-jobs list. Clicking the name navigates to the
+// run history; CRUD callbacks come from the parent.
+export default function JobRow({ job, onTrigger, onToggle, onEdit, onDelete }) {
+ const navigate = useNavigate();
+ const { t, i18n } = useTranslation();
+ // A job has at most one in-flight run; disable "Run now" while it's queued
+ // or running so users get visible feedback that their click registered and
+ // so the backend dedup never has to drop a manual trigger silently.
+ const inFlight =
+ job.latestRun?.status === "running" || job.latestRun?.status === "queued";
+
+ const statusText = job.latestRun
+ ? t(`scheduledJobs.status.${job.latestRun.status}`, job.latestRun.status)
+ : t("scheduledJobs.row.neverRun");
+
+ const stop = (handler) => (e) => {
+ e.stopPropagation();
+ handler();
+ };
+
+ return (
+ navigate(paths.settings.scheduledJobRuns(job.id))}
+ onKeyDown={(e) => {
+ if (e.key === "Enter" || e.key === " ") {
+ e.preventDefault();
+ navigate(paths.settings.scheduledJobRuns(job.id));
+ }
+ }}
+ className="flex items-center justify-between px-4 h-14 hover:bg-white/5 light:hover:bg-slate-200 transition-colors cursor-pointer"
+ title={t("scheduledJobs.row.viewRuns")}
+ >
+
+ {job.name}
+
+
+ {humanizeCron(job.schedule, i18n.language)}
+
+
+ {statusText}
+
+
+ {job.lastRunAt ? new Date(job.lastRunAt).toLocaleString() : "—"}
+
+
+ {job.enabled && job.nextRunAt
+ ? new Date(job.nextRunAt).toLocaleString()
+ : "—"}
+
+
+
onDelete(job.id))}
+ className="border-none p-2 rounded-full text-zinc-400 light:text-slate-950 hover:text-red-400 light:hover:text-red-600 hover:bg-white/10 light:hover:bg-slate-300/50 transition-colors"
+ title={t("scheduledJobs.row.delete")}
+ >
+
+
+
onEdit(job))}
+ className="border-none p-2 rounded-full text-zinc-400 light:text-slate-950 hover:text-white light:hover:text-slate-700 hover:bg-white/10 light:hover:bg-slate-300/50 transition-colors"
+ title={t("scheduledJobs.row.edit")}
+ >
+
+
+
onTrigger(job.id))}
+ disabled={inFlight}
+ className="border-none p-2 rounded-full text-zinc-400 light:text-slate-950 hover:text-white light:hover:text-slate-700 hover:bg-white/10 light:hover:bg-slate-300/50 transition-colors disabled:opacity-40 disabled:cursor-not-allowed disabled:hover:bg-transparent"
+ title={t("scheduledJobs.row.runNow")}
+ >
+
+
+
onToggle(job.id))}
+ title={
+ job.enabled
+ ? t("scheduledJobs.row.disable")
+ : t("scheduledJobs.row.enable")
+ }
+ className={`border-none relative h-[15px] w-7 rounded-full p-0.5 transition-colors ${
+ job.enabled ? "bg-green-400" : "bg-zinc-600 light:bg-slate-300"
+ }`}
+ >
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/components/RunRow.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/RunRow.jsx
new file mode 100644
index 00000000..d8f978ac
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/RunRow.jsx
@@ -0,0 +1,101 @@
+import { useState } from "react";
+import { useNavigate } from "react-router-dom";
+import { useTranslation } from "react-i18next";
+import { Circle, Stop } from "@phosphor-icons/react";
+import moment from "moment";
+import paths from "@/utils/paths";
+import StatusBadge from "./StatusBadge";
+import ScheduledJobs from "@/models/scheduledJobs";
+import showToast from "@/utils/toast";
+import { formatDuration } from "@/utils/numbers";
+
+/**
+ * Format a run's elapsed time as ms / s / m.
+ * @param {Object} run - The run object.
+ * @returns {string} The formatted duration.
+ */
+function formatRunDuration(run) {
+ if (!run.completedAt || !run.startedAt) return "—";
+ const duration = moment.duration(
+ moment(run.completedAt).diff(moment(run.startedAt))
+ );
+ return formatDuration(duration.asSeconds());
+}
+
+/**
+ * One row of the run history table. The whole row is clickable and
+ * navigates to the run detail page.
+ * @param {Object} run - The run object.
+ * @param {string} jobId - The ID of the job.
+ * @param {function} onKilled - Callback when a run is killed (to refresh the list).
+ * @returns {React.ReactNode} The rendered row.
+ */
+export default function RunRow({ run, jobId, onKilled }) {
+ const { t } = useTranslation();
+ const navigate = useNavigate();
+ const [killing, setKilling] = useState(false);
+ const unreadAndTerminal =
+ !run.readAt && !["running", "queued"].includes(run.status);
+ const isKillable = ["running", "queued"].includes(run.status);
+
+ const handleKill = async (e) => {
+ e.stopPropagation();
+ setKilling(true);
+ const { success, error } = await ScheduledJobs.killRun(run.id);
+ setKilling(false);
+
+ if (!success) {
+ showToast(error || t("scheduledJobs.toast.killFailed"), "error");
+ return;
+ }
+
+ showToast(t("scheduledJobs.toast.killed"), "success");
+ onKilled?.();
+ };
+
+ return (
+
+ navigate(paths.settings.scheduledJobRunDetail(jobId, run.id))
+ }
+ className="border-none flex items-center px-4 h-14 hover:bg-white/5 light:hover:bg-slate-200 transition-colors text-left w-full"
+ >
+
+ {unreadAndTerminal && (
+
+ )}
+ {isKillable && (
+
+
+
+ )}
+
+
+
+ {new Date(run.startedAt).toLocaleString()}
+
+
+ {formatRunDuration(run)}
+
+
+ {run.error || "—"}
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/components/StatusBadge.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/StatusBadge.jsx
new file mode 100644
index 00000000..f5d2ae61
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/StatusBadge.jsx
@@ -0,0 +1,51 @@
+import { useTranslation } from "react-i18next";
+
+/**
+ * Per-status text styling per the run history figma:
+ * - non-terminal (queued, running) → italic zinc-400
+ * - completed → white medium
+ * - failed / timed_out → red-400
+ * @param {string} status - The status of the run.
+ * @returns {string} The styled status text.
+ */
+function getStatusesMap(t) {
+ return {
+ completed: {
+ text: t("scheduledJobs.status.completed"),
+ style: "font-medium text-white light:text-slate-950",
+ },
+ failed: {
+ text: t("scheduledJobs.status.failed"),
+ style: "text-red-400 light:text-red-600",
+ },
+ timed_out: {
+ text: t("scheduledJobs.status.timed_out"),
+ style: "text-red-400 light:text-red-600",
+ },
+ running: {
+ text: t("scheduledJobs.status.running"),
+ style: "italic text-zinc-400 light:text-slate-600",
+ },
+ queued: {
+ text: t("scheduledJobs.status.queued"),
+ style: "italic text-zinc-400 light:text-slate-600",
+ },
+ default: {
+ text: "—",
+ style: "text-zinc-400 light:text-slate-600",
+ },
+ };
+}
+
+/**
+ * Status text shown in the run history table. Plain text — no pill — to
+ * match the run history figma.
+ * @param {string} status - The status of the run.
+ * @returns {string} The status text.
+ */
+export default function StatusBadge({ status }) {
+ const { t } = useTranslation();
+ const statusesMap = getStatusesMap(t);
+ const { text, style } = statusesMap[status] || statusesMap.default;
+ return {text} ;
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/components/ToolCallCard.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/ToolCallCard.jsx
new file mode 100644
index 00000000..408bfcc3
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/components/ToolCallCard.jsx
@@ -0,0 +1,144 @@
+import { useState } from "react";
+import { useTranslation } from "react-i18next";
+import { Wrench } from "@phosphor-icons/react";
+import hljs from "highlight.js";
+import { safeJsonParse } from "@/utils/request";
+import { useTheme } from "@/hooks/useTheme";
+import DOMPurify from "@/utils/chat/purify";
+import truncate from "truncate";
+import moment from "moment";
+
+const MAX_RESULT_LENGTH = 5000;
+
+/**
+ * Get the appropriate highlight.js theme based on the theme.
+ * @param {boolean} isLight - Whether the theme is light.
+ * @returns {string} The highlight.js theme.
+ */
+function getHljsTheme(isLight) {
+ return isLight ? "github" : "github-dark";
+}
+
+/**
+ * Try to render `value` as syntax-highlighted JSON. Returns a `dangerouslySetInnerHTML`
+ * payload, or null if the value isn't an object (caller should fall back to plain text).
+ * @param {string} value - The value to format and highlight.
+ * @returns {Object} The formatted and highlighted value.
+ */
+function formatAndHighlight(value) {
+ const parsed =
+ typeof value === "string" ? safeJsonParse(value, value) : value;
+ if (typeof parsed !== "object" || parsed === null) return null;
+
+ const formatted = JSON.stringify(parsed, null, 2);
+ const truncatedFormatted = truncate(formatted, MAX_RESULT_LENGTH);
+ const highlighted = hljs.highlight(truncatedFormatted, {
+ language: "json",
+ }).value;
+ return { __html: DOMPurify.sanitize(highlighted) };
+}
+
+/**
+ * Single tool call inside the run trace. Shows the tool name, arguments, and
+ * (on demand) the tool's result. JSON arguments and results are pretty-printed
+ * and syntax-highlighted; non-JSON values fall back to plain text.
+ * @param {Object} toolCall - The tool call object.
+ * @returns {React.ReactNode} The rendered tool call card.
+ */
+export default function ToolCallCard({ toolCall }) {
+ const [showResult, setShowResult] = useState(false);
+ return (
+
+
+
+
+
+ {toolCall.toolName}
+
+
+
+
+
+
+
+ );
+}
+
+function ToolCallTimestamp({ toolCall }) {
+ if (!toolCall.timestamp) return null;
+ return (
+
+ {moment(toolCall.timestamp).format("LTS")}
+
+ );
+}
+
+function ToolCallArguments({ toolCall }) {
+ const { t } = useTranslation();
+ const { isLight } = useTheme();
+ if (!toolCall.arguments) return null;
+
+ const highlightedArgs = formatAndHighlight(toolCall.arguments);
+ return (
+
+
+ {t("scheduledJobs.toolCall.arguments")}
+
+ {highlightedArgs ? (
+
+ ) : (
+
+ {typeof toolCall.arguments === "string"
+ ? toolCall.arguments
+ : JSON.stringify(toolCall.arguments, null, 2)}
+
+ )}
+
+ );
+}
+
+function ToolCallResult({ toolCall, showResult, setShowResult }) {
+ const { t } = useTranslation();
+ const { isLight } = useTheme();
+ if (!toolCall.result) return null;
+
+ const resultText =
+ typeof toolCall.result === "string"
+ ? toolCall.result
+ : JSON.stringify(toolCall.result, null, 2);
+ const truncatedResult = truncate(resultText, MAX_RESULT_LENGTH);
+ const highlightedResult = formatAndHighlight(resultText);
+
+ if (!resultText) return null;
+ return (
+
+
setShowResult(!showResult)}
+ className="border-none text-xs text-blue-400 hover:text-blue-300 transition-colors"
+ >
+ {showResult
+ ? t("scheduledJobs.toolCall.hideResult")
+ : t("scheduledJobs.toolCall.showResult")}
+
+ {showResult &&
+ (highlightedResult ? (
+
+ ) : (
+
+ {truncatedResult}
+
+ ))}
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/index.jsx b/frontend/src/pages/GeneralSettings/ScheduledJobs/index.jsx
new file mode 100644
index 00000000..92ae4c20
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/index.jsx
@@ -0,0 +1,242 @@
+import { useEffect, useState } from "react";
+import { useTranslation } from "react-i18next";
+import Sidebar from "@/components/SettingsSidebar";
+import { isMobile } from "react-device-detect";
+import ScheduledJobs from "@/models/scheduledJobs";
+import { subscribeToPushNotifications } from "@/hooks/useWebPushNotifications";
+import useWebPushNotifications from "@/hooks/useWebPushNotifications";
+import usePolling from "@/hooks/usePolling";
+import JobFormModal from "./JobFormModal";
+import ModalWrapper from "@/components/ModalWrapper";
+import { useModal } from "@/hooks/useModal";
+import showToast from "@/utils/toast";
+import JobRow from "./components/JobRow";
+import { Bell } from "@phosphor-icons/react";
+import { Tooltip } from "react-tooltip";
+
+export default function ScheduledJobsPage() {
+ const { t } = useTranslation();
+ useWebPushNotifications(false);
+ const { isOpen, openModal, closeModal } = useModal();
+ const [loading, setLoading] = useState(true);
+ const [jobs, setJobs] = useState([]);
+ const [editingJob, setEditingJob] = useState(null);
+
+ const fetchJobs = async () => {
+ const { jobs: foundJobs } = await ScheduledJobs.list();
+ setJobs(foundJobs || []);
+ setLoading(false);
+ };
+
+ useEffect(() => {
+ fetchJobs();
+ }, []);
+
+ // Poll every 5s while tab is visible so status badges and run timestamps stay in sync.
+ usePolling(fetchJobs, 5000);
+
+ const handleDelete = async (id) => {
+ if (!window.confirm(t("scheduledJobs.confirmDelete"))) return;
+ await ScheduledJobs.delete(id);
+ showToast(t("scheduledJobs.toast.deleted"), "success", { clear: true });
+ fetchJobs();
+ };
+
+ const handleToggle = async (id) => {
+ const result = await ScheduledJobs.toggle(id);
+ if (result?.error) showToast(result.error, "error", { clear: true });
+ fetchJobs();
+ };
+
+ const handleTrigger = async (id) => {
+ const { success, skipped, error } = await ScheduledJobs.trigger(id);
+ if (!success) {
+ showToast(error || t("scheduledJobs.toast.triggerFailed"), "error", {
+ clear: true,
+ });
+ } else if (skipped) {
+ showToast(
+ t(
+ "scheduledJobs.toast.triggerSkipped",
+ "A run is already in progress for this job"
+ ),
+ "info",
+ { clear: true }
+ );
+ } else {
+ showToast(t("scheduledJobs.toast.triggered"), "success", { clear: true });
+ }
+ fetchJobs();
+ };
+
+ const handleEdit = (job) => {
+ setEditingJob(job);
+ openModal();
+ };
+
+ const handleCreate = () => {
+ setEditingJob(null);
+ openModal();
+ };
+
+ if (loading) {
+ return (
+
+
+ {t("scheduledJobs.loading")}
+
+
+ );
+ }
+
+ return (
+
+
+
+ {t("scheduledJobs.table.name")}
+ {t("scheduledJobs.table.schedule")}
+ {t("scheduledJobs.table.status")}
+ {t("scheduledJobs.table.lastRun")}
+ {t("scheduledJobs.table.nextRun")}
+
+ {t("scheduledJobs.table.actions")}
+
+
+
+
+ {jobs.length === 0 ? (
+
+
+
+ {t("scheduledJobs.emptyTitle")}
+
+
+ {t("scheduledJobs.emptySubtitle")}
+
+
+
+ {t("scheduledJobs.newJob")}
+
+
+ ) : (
+
+ {jobs.map((job) => (
+
+ ))}
+
+ )}
+
+
+
+ {
+ closeModal();
+ fetchJobs();
+ }}
+ />
+
+
+ );
+}
+
+function BaseLayout({
+ showNewJobButton = false,
+ handleCreate = () => {},
+ children,
+}) {
+ const { t } = useTranslation();
+
+ return (
+
+
+
+
+
+
+
+ {t("scheduledJobs.title")}
+
+
+ {t("scheduledJobs.description")}
+
+
+
+
+ {showNewJobButton && (
+
+ {t("scheduledJobs.newJob")}
+
+ )}
+
+
+ {children}
+
+
+
+ );
+}
+
+function NotificationBellButton() {
+ const { t } = useTranslation();
+ const [permissionState, setPermissionState] = useState(
+ typeof Notification !== "undefined" ? Notification.permission : "denied"
+ );
+
+ if (
+ !("serviceWorker" in navigator) ||
+ !("PushManager" in window) ||
+ permissionState === "granted"
+ ) {
+ return null;
+ }
+
+ const handleClick = async () => {
+ await subscribeToPushNotifications();
+ setPermissionState(Notification.permission);
+ };
+
+ return (
+ <>
+
+
+
+
+ >
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/ScheduledJobs/utils/cron.js b/frontend/src/pages/GeneralSettings/ScheduledJobs/utils/cron.js
new file mode 100644
index 00000000..8657ccca
--- /dev/null
+++ b/frontend/src/pages/GeneralSettings/ScheduledJobs/utils/cron.js
@@ -0,0 +1,277 @@
+import cronstrue from "cronstrue/i18n";
+import moment from "moment";
+
+/**
+ * Convert a local hour and minute to UTC using moment.js.
+ * Handles DST and timezone edge cases properly.
+ * @param {number} localHour - Hour in local time (0-23).
+ * @param {number} localMinute - Minute (0-59).
+ * @returns {{ hour: number, minute: number }} Hour and minute in UTC.
+ */
+export function localTimeToUTC(localHour, localMinute = 0) {
+ const local = moment().hour(localHour).minute(localMinute).second(0);
+ const utc = local.clone().utc();
+ return { hour: utc.hour(), minute: utc.minute() };
+}
+
+/**
+ * Convert a UTC hour and minute to local time using moment.js.
+ * Handles DST and timezone edge cases properly.
+ * @param {number} utcHour - Hour in UTC (0-23).
+ * @param {number} utcMinute - Minute (0-59).
+ * @returns {{ hour: number, minute: number }} Hour and minute in local time.
+ */
+export function utcTimeToLocal(utcHour, utcMinute = 0) {
+ const utc = moment.utc().hour(utcHour).minute(utcMinute).second(0);
+ const local = utc.clone().local();
+ return { hour: local.hour(), minute: local.minute() };
+}
+
+/**
+ * Get the user's timezone abbreviation for display.
+ * @returns {string} Timezone abbreviation (e.g., "PST", "EST", "UTC").
+ */
+export function getTimezoneAbbreviation() {
+ try {
+ const formatter = new Intl.DateTimeFormat(undefined, {
+ timeZoneName: "short",
+ });
+ const parts = formatter.formatToParts(new Date());
+ const tzPart = parts.find((p) => p.type === "timeZoneName");
+ return tzPart?.value || "local time";
+ } catch {
+ return "local time";
+ }
+}
+
+/**
+ * Humanize a cron expression for display in the user's local timezone.
+ * The cron is stored in UTC, so we convert it to local time for display.
+ * @param {string} cron - The cron expression (in UTC).
+ * @param {string} locale - The locale.
+ * @returns {string} The humanized cron expression with timezone indicator.
+ */
+export function humanizeCron(cron, locale) {
+ if (!cron) return "";
+ try {
+ const localCron = convertCronToLocalTime(cron);
+ const humanized = cronstrue.toString(localCron, {
+ throwExceptionOnParseError: false,
+ locale: toCronstrueLocale(locale),
+ });
+ return `${humanized} ${getTimezoneAbbreviation()}`;
+ } catch {
+ return cron;
+ }
+}
+
+/**
+ * Convert a UTC cron expression to local time for display purposes.
+ * Only converts the hour field for patterns that have a specific hour.
+ * @param {string} cron - The cron expression in UTC.
+ * @returns {string} The cron expression adjusted to local time.
+ */
+function convertCronToLocalTime(cron) {
+ if (!cron || typeof cron !== "string") return cron;
+ const parts = cron.trim().split(/\s+/);
+ if (parts.length !== 5) return cron;
+
+ const [minute, hour, dom, mon, dow] = parts;
+
+ // Only convert if hour is a specific number (not * or */n)
+ if (/^\d+$/.test(hour) && /^\d+$/.test(minute)) {
+ const local = utcTimeToLocal(parseInt(hour, 10), parseInt(minute, 10));
+ return `${local.minute} ${local.hour} ${dom} ${mon} ${dow}`;
+ }
+
+ return cron;
+}
+
+/**
+ * Convert a locale code to a cronstrue locale code.
+ * i18next uses BCP-47-ish codes like "zh-tw"; cronstrue's i18n bundle
+ * uses "zh_TW". Convert "xx-yy" → "xx_YY" so region-tagged locales resolve
+ * instead of silently falling back to English.
+ * @param {string} locale - The locale code.
+ * @returns {string} The cronstrue locale code.
+ */
+function toCronstrueLocale(locale) {
+ if (!locale) return undefined;
+ const [lang, region] = locale.split("-");
+ return region ? `${lang}_${region.toUpperCase()}` : lang;
+}
+
+/**
+ * Default state for the visual builder. Used when an incoming cron expression
+ * cannot be reverse-parsed into one of the builder's supported shapes.
+ * @returns {Object} The default builder state.
+ */
+export const DEFAULT_BUILDER_STATE = {
+ frequency: "day",
+ minuteInterval: 1,
+ hourMinuteOffset: 0,
+ hour: 9,
+ minute: 0,
+ weekdays: [1],
+ dayOfMonth: 1,
+};
+
+/**
+ * Parse a 5-field cron expression into the visual builder's state shape.
+ * Only recognizes the patterns the builder itself can produce; anything else
+ * (ranges, step values in non-minute fields, multiple months, etc.) is
+ * reported with `wasFallback: true` so the UI can warn the user that the
+ * stored expression cannot be edited visually.
+ *
+ * IMPORTANT: The cron expression is stored in UTC on the backend. This function
+ * converts the hour to local time so the builder shows the user's local time.
+ *
+ * Returns: { state, wasFallback }
+ * @param {string} cron - The cron expression (in UTC).
+ * @returns {Object} The builder state (with hours in local time).
+ */
+export function parseCronToBuilderState(cron) {
+ const fallback = { state: { ...DEFAULT_BUILDER_STATE }, wasFallback: true };
+ if (!cron || typeof cron !== "string") return fallback;
+ const parts = cron.trim().split(/\s+/);
+ if (parts.length !== 5) return fallback;
+ const [m, h, dom, mon, dow] = parts;
+ if (mon !== "*") return fallback;
+
+ // Every minute
+ if (m === "*" && h === "*" && dom === "*" && dow === "*") {
+ return {
+ state: {
+ ...DEFAULT_BUILDER_STATE,
+ frequency: "minute",
+ minuteInterval: 1,
+ },
+ wasFallback: false,
+ };
+ }
+
+ // Every N minutes
+ const stepMatch = m.match(/^\*\/(\d+)$/);
+ if (stepMatch && h === "*" && dom === "*" && dow === "*") {
+ return {
+ state: {
+ ...DEFAULT_BUILDER_STATE,
+ frequency: "minute",
+ minuteInterval: parseInt(stepMatch[1], 10),
+ },
+ wasFallback: false,
+ };
+ }
+
+ // Hourly at minute X
+ if (/^\d+$/.test(m) && h === "*" && dom === "*" && dow === "*") {
+ return {
+ state: {
+ ...DEFAULT_BUILDER_STATE,
+ frequency: "hour",
+ hourMinuteOffset: parseInt(m, 10),
+ },
+ wasFallback: false,
+ };
+ }
+
+ // Daily at H:M - convert UTC to local
+ if (/^\d+$/.test(m) && /^\d+$/.test(h) && dom === "*" && dow === "*") {
+ const local = utcTimeToLocal(parseInt(h, 10), parseInt(m, 10));
+ return {
+ state: {
+ ...DEFAULT_BUILDER_STATE,
+ frequency: "day",
+ hour: local.hour,
+ minute: local.minute,
+ },
+ wasFallback: false,
+ };
+ }
+
+ // Weekly on one or more weekdays at H:M - convert UTC to local
+ if (
+ /^\d+$/.test(m) &&
+ /^\d+$/.test(h) &&
+ dom === "*" &&
+ /^\d+(,\d+)*$/.test(dow)
+ ) {
+ const days = [
+ ...new Set(
+ dow
+ .split(",")
+ .map((d) => parseInt(d, 10) % 7)
+ .filter((d) => d >= 0 && d <= 6)
+ ),
+ ];
+ const local = utcTimeToLocal(parseInt(h, 10), parseInt(m, 10));
+ return {
+ state: {
+ ...DEFAULT_BUILDER_STATE,
+ frequency: "week",
+ hour: local.hour,
+ minute: local.minute,
+ weekdays: days.length ? days : [1],
+ },
+ wasFallback: false,
+ };
+ }
+
+ // Monthly on day D at H:M - convert UTC to local
+ if (/^\d+$/.test(m) && /^\d+$/.test(h) && /^\d+$/.test(dom) && dow === "*") {
+ const local = utcTimeToLocal(parseInt(h, 10), parseInt(m, 10));
+ return {
+ state: {
+ ...DEFAULT_BUILDER_STATE,
+ frequency: "month",
+ hour: local.hour,
+ minute: local.minute,
+ dayOfMonth: parseInt(dom, 10),
+ },
+ wasFallback: false,
+ };
+ }
+
+ return fallback;
+}
+
+/**
+ * Build a 5-field cron expression from the visual builder's state.
+ *
+ * IMPORTANT: The builder state has hours in local time. This function
+ * converts the time to UTC before building the cron expression, since
+ * the backend stores and executes cron expressions in UTC.
+ *
+ * @param {Object} state - The builder state (with time in local timezone).
+ * @returns {string} The cron expression (in UTC).
+ */
+export function buildCronFromBuilderState(state) {
+ switch (state.frequency) {
+ case "minute": {
+ const n = state.minuteInterval || 1;
+ return n === 1 ? "* * * * *" : `*/${n} * * * *`;
+ }
+ case "hour":
+ return `${state.hourMinuteOffset} * * * *`;
+ case "day": {
+ const utc = localTimeToUTC(state.hour, state.minute);
+ return `${utc.minute} ${utc.hour} * * *`;
+ }
+ case "week": {
+ const utc = localTimeToUTC(state.hour, state.minute);
+ const days = (state.weekdays?.length ? state.weekdays : [1])
+ .slice()
+ .sort((a, b) => a - b)
+ .join(",");
+ return `${utc.minute} ${utc.hour} * * ${days}`;
+ }
+ case "month": {
+ const utc = localTimeToUTC(state.hour, state.minute);
+ return `${utc.minute} ${utc.hour} ${state.dayOfMonth} * *`;
+ }
+ default: {
+ const utc = localTimeToUTC(9, 0);
+ return `${utc.minute} ${utc.hour} * * *`;
+ }
+ }
+}
diff --git a/frontend/src/utils/clipboard.js b/frontend/src/utils/clipboard.js
new file mode 100644
index 00000000..48f3ab90
--- /dev/null
+++ b/frontend/src/utils/clipboard.js
@@ -0,0 +1,25 @@
+import renderMarkdown from "./chat/markdown";
+
+/**
+ * Copies the given markdown string as rich text to the clipboard.
+ * @param {string} markdownString - The markdown string to copy.
+ * @returns {Promise}
+ */
+export async function copyMarkdownAsRichText(markdownString) {
+ try {
+ const htmlContent = renderMarkdown(markdownString);
+ const blobHTML = new Blob([htmlContent], { type: "text/html" });
+ const blobText = new Blob([markdownString], { type: "text/plain" });
+
+ const data = [
+ new ClipboardItem({
+ "text/html": blobHTML,
+ "text/plain": blobText,
+ }),
+ ];
+
+ await navigator.clipboard.write(data);
+ } catch (error) {
+ console.error("Failed to copy markdown as rich text: ", error);
+ }
+}
diff --git a/frontend/src/utils/numbers.js b/frontend/src/utils/numbers.js
index 0b4da3cb..b3c13222 100644
--- a/frontend/src/utils/numbers.js
+++ b/frontend/src/utils/numbers.js
@@ -58,3 +58,32 @@ export function milliToHms(milli = 0) {
var sDisplay = s >= 0.01 ? s.toFixed(2) + "s" : "";
return hDisplay + mDisplay + sDisplay;
}
+
+/**
+ * Format a duration in milliseconds to a human readable string
+ * - Less than 1 second - show milliseconds (50ms)
+ * - Less than 60 seconds - show seconds (5s)
+ * - Less than 1 hour - show min:sec (1m 30s)
+ * - 1 hour or more - show h:min:sec (1h 30m 5s)
+ * @param {number} duration - duration in milliseconds
+ * @returns {string}
+ */
+export function formatDuration(duration) {
+ try {
+ if (duration < 0) return "";
+ if (duration < 1) return `${(duration * 1000).toFixed(0)}ms`;
+ if (duration < 60) return `${duration.toFixed(1)}s`;
+ if (duration < 3600) {
+ const minutes = Math.floor(duration / 60);
+ const seconds = Math.floor(duration % 60);
+ return `${minutes}m ${seconds}s`;
+ }
+
+ const hours = Math.floor(duration / 3600);
+ const minutes = Math.floor((duration % 3600) / 60);
+ const seconds = Math.floor(duration % 60);
+ return `${hours}h ${minutes}m ${seconds}s`;
+ } catch {
+ return "";
+ }
+}
diff --git a/frontend/src/utils/paths.js b/frontend/src/utils/paths.js
index 3111efcf..acde3950 100644
--- a/frontend/src/utils/paths.js
+++ b/frontend/src/utils/paths.js
@@ -173,6 +173,15 @@ export default {
telegram: () => {
return `/settings/external-connections/telegram`;
},
+ scheduledJobs: () => {
+ return `/settings/scheduled-jobs`;
+ },
+ scheduledJobRuns: (jobId) => {
+ return `/settings/scheduled-jobs/${jobId}/runs`;
+ },
+ scheduledJobRunDetail: (jobId, runId) => {
+ return `/settings/scheduled-jobs/${jobId}/runs/${runId}`;
+ },
},
agents: {
builder: () => {
diff --git a/frontend/yarn.lock b/frontend/yarn.lock
index b932057b..d6738b43 100644
--- a/frontend/yarn.lock
+++ b/frontend/yarn.lock
@@ -1711,6 +1711,11 @@ cosmiconfig@^7.0.0:
path-type "^4.0.0"
yaml "^1.10.0"
+cronstrue@^2.50.0:
+ version "2.61.0"
+ resolved "https://registry.yarnpkg.com/cronstrue/-/cronstrue-2.61.0.tgz#97c79c77045c052afb44cb9f5f8eaf54398094f2"
+ integrity sha512-ootN5bvXbIQI9rW94+QsXN5eROtXWwew6NkdGxIRpS/UFWRggL0G5Al7a9GTBFEsuvVhJ2K3CntIIVt7L2ILhA==
+
cross-env@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf"
diff --git a/server/.env.example b/server/.env.example
index 0d151684..70fba3cd 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -458,6 +458,14 @@ TTS_PROVIDER="native"
# AGENT_SKILL_RERANKER_ENABLED="true"
# AGENT_SKILL_RERANKER_TOP_N=15 # (optional) Number of top tools to keep after reranking (default: 15)
+# (optional) Maximum number of scheduled jobs that can run concurrently.
+# Default is 1. Increase if using a cloud LLM provider with high rate limits.
+# SCHEDULED_JOB_MAX_CONCURRENT=1
+
+# (optional) Maximum time in milliseconds a scheduled job can run before being terminated.
+# Default is 5 minutes (300000ms).
+# SCHEDULED_JOB_TIMEOUT_MS=300000
+
# (optional) Comma-separated list of skills that are auto-approved.
# This will allow the skill to be invoked without user interaction.
-# AGENT_AUTO_APPROVED_SKILLS=create-pdf-file,create-word-file
\ No newline at end of file
+# AGENT_AUTO_APPROVED_SKILLS=create-pdf-file,create-word-file
diff --git a/server/endpoints/agentFileServer.js b/server/endpoints/agentFileServer.js
index daaf5002..98e35719 100644
--- a/server/endpoints/agentFileServer.js
+++ b/server/endpoints/agentFileServer.js
@@ -10,7 +10,9 @@ const {
} = require("../utils/middleware/multiUserProtected");
const { WorkspaceChats } = require("../models/workspaceChats");
const { Workspace } = require("../models/workspace");
+const { ScheduledJobRun } = require("../models/scheduledJobRun");
const createFilesLib = require("../utils/agents/aibitat/plugins/create-files/lib");
+const { Telemetry } = require("../models/telemetry");
/**
* Endpoints for serving agent-generated files (PPTX, etc.) with authentication
@@ -42,14 +44,13 @@ function agentFileServerEndpoints(app) {
.json({ error: "Invalid filename format" });
}
- // Find a chat record that references this file and that the user can access
- const validChat = await findValidChatForFile(
- filename,
+ // Find a chat or scheduled job run that references this file
+ const fileSource = await findFileSource(filename, {
user,
- multiUserMode(response)
- );
+ isMultiUser: multiUserMode(response),
+ });
- if (!validChat) {
+ if (!fileSource) {
return response.status(404).json({
error: "File not found or access denied",
});
@@ -66,7 +67,7 @@ function agentFileServerEndpoints(app) {
// Get mime type and set headers for download
const mimeType = createFilesLib.getMimeType(`.${parsed.extension}`);
const safeFilename = createFilesLib.sanitizeFilenameForHeader(
- validChat.displayFilename || filename
+ fileSource.displayFilename || filename
);
response.setHeader("Content-Type", mimeType);
response.setHeader(
@@ -74,7 +75,11 @@ function agentFileServerEndpoints(app) {
`attachment; filename="${safeFilename}"`
);
response.setHeader("Content-Length", fileData.buffer.length);
- return response.send(fileData.buffer);
+ response.send(fileData.buffer);
+ Telemetry.sendTelemetry("agent_generated_file_downloaded", {
+ type: mimeType,
+ }).catch(() => {});
+ return;
} catch (error) {
console.error("[agentFileServer] Download error:", error.message);
return response.status(500).json({ error: "Failed to download file" });
@@ -84,59 +89,90 @@ function agentFileServerEndpoints(app) {
}
/**
- * Finds a valid chat record that references the given storage filename
- * and that the user has access to.
- * @param {string} storageFilename - The storage filename to search for
- * @param {object|null} user - The user object (null in single-user mode)
- * @param {boolean} isMultiUser - Whether multi-user mode is enabled
- * @returns {Promise<{workspaceId: number, displayFilename: string}|null>}
+ * Locates the source record (a workspace chat or a scheduled job run) that
+ * references the given storage filename, and confirms the requester has access.
+ *
+ * Search order:
+ * 1. Workspace chats the user can access (per multi-user permissions).
+ * 2. Scheduled job runs — single-user only, so no per-user access check.
+ *
+ * @param {string} storageFilename
+ * @param {{ user: object|null, isMultiUser: boolean }} ctx
+ * @returns {Promise<{workspaceId: number|null, displayFilename: string}|null>}
*/
-async function findValidChatForFile(storageFilename, user, isMultiUser) {
+async function findFileSource(storageFilename, { user, isMultiUser }) {
try {
- // Get all workspaces the user has access to.
- // In single-user mode, all workspaces are accessible.
- // In multi-user mode, only workspaces assigned to the user are accessible.
- let workspaceIds;
- if (isMultiUser && user) {
- const workspaces = await Workspace.whereWithUser(user);
- workspaceIds = workspaces.map((w) => w.id);
- } else {
- const workspaces = await Workspace.where();
- workspaceIds = workspaces.map((w) => w.id);
- }
-
- if (workspaceIds.length === 0) return null;
-
- // Use database-level filtering to only fetch chats that contain the filename
- // This avoids loading all chats into memory
- const chats = await WorkspaceChats.where({
- workspaceId: { in: workspaceIds },
- include: true,
- response: { contains: storageFilename },
+ const fromChat = await findInWorkspaceChats(storageFilename, {
+ user,
+ isMultiUser,
});
+ if (fromChat) return fromChat;
- for (const chat of chats) {
- try {
- const response = safeJsonParse(chat.response, { outputs: [] });
- const output = response.outputs.find(
- (o) => o?.payload?.storageFilename === storageFilename
- );
- if (!output) continue;
- return {
- workspaceId: chat.workspaceId,
- displayFilename:
- output.payload.filename || output.payload.displayFilename,
- };
- } catch {
- continue;
- }
- }
+ if (isMultiUser) return null;
- return null;
+ return await findInScheduledJobRuns(storageFilename);
} catch (error) {
- console.error("[findValidChatForFile] Error:", error.message);
+ console.error("[findFileSource] Error:", error.message);
return null;
}
}
+// Search workspace chats the user has access to. In single-user mode all
+// workspaces are accessible; in multi-user mode only workspaces assigned to
+// the user are. Returns the matching chat's workspace + display filename.
+async function findInWorkspaceChats(storageFilename, { user, isMultiUser }) {
+ const workspaces =
+ isMultiUser && user
+ ? await Workspace.whereWithUser(user)
+ : await Workspace.where();
+
+ const workspaceIds = workspaces.map((w) => w.id);
+ if (workspaceIds.length === 0) return null;
+
+ // DB-level filter so we don't load every chat into memory.
+ const chats = await WorkspaceChats.where({
+ workspaceId: { in: workspaceIds },
+ include: true,
+ response: { contains: storageFilename },
+ });
+
+ for (const chat of chats) {
+ const { outputs = [] } = safeJsonParse(chat.response, { outputs: [] });
+ const output = outputs.find(
+ (o) => o?.payload?.storageFilename === storageFilename
+ );
+ if (!output) continue;
+ return {
+ workspaceId: chat.workspaceId,
+ displayFilename:
+ output.payload.filename || output.payload.displayFilename,
+ };
+ }
+
+ return null;
+}
+
+// Search completed scheduled job runs. Scheduled jobs are single-user only,
+// so this skips access control. Returns the matching run's display filename.
+async function findInScheduledJobRuns(storageFilename) {
+ const runs = await ScheduledJobRun.where({
+ status: "completed",
+ result: { contains: storageFilename },
+ });
+
+ for (const run of runs) {
+ const { outputs = [] } = safeJsonParse(run.result, { outputs: [] });
+ const output = outputs.find(
+ (o) => o?.payload?.storageFilename === storageFilename
+ );
+ if (!output) continue;
+ return {
+ workspaceId: null,
+ displayFilename: output.payload.filename || storageFilename,
+ };
+ }
+
+ return null;
+}
+
module.exports = { agentFileServerEndpoints };
diff --git a/server/endpoints/scheduledJobs.js b/server/endpoints/scheduledJobs.js
new file mode 100644
index 00000000..bbe93a83
--- /dev/null
+++ b/server/endpoints/scheduledJobs.js
@@ -0,0 +1,376 @@
+const { ScheduledJob } = require("../models/scheduledJob");
+const { ScheduledJobRun } = require("../models/scheduledJobRun");
+const { validatedRequest } = require("../utils/middleware/validatedRequest");
+const { isSingleUserMode } = require("../utils/middleware/multiUserProtected");
+const { reqBody, safeJsonParse } = require("../utils/http");
+const { BackgroundService } = require("../utils/BackgroundWorkers");
+const { Telemetry } = require("../models/telemetry");
+
+// BackgroundService is a singleton, so `new BackgroundService()` anywhere in
+// the codebase returns the same instance that `server/index.js` booted. We
+// grab that reference once and reuse it across handlers.
+const backgroundService = new BackgroundService();
+
+function scheduledJobEndpoints(app) {
+ if (!app) return;
+
+ // List available tools for job configuration
+ app.get(
+ "/scheduled-jobs/available-tools",
+ [validatedRequest, isSingleUserMode],
+ async (_request, response) => {
+ try {
+ const tools = await ScheduledJob.availableTools();
+ return response.status(200).json({ tools });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500).json({ tools: [] });
+ }
+ }
+ );
+
+ // Get a single run detail
+ app.get(
+ "/scheduled-jobs/runs/:runId",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const run = await ScheduledJobRun.get({
+ id: Number(request.params.runId),
+ });
+ if (!run) {
+ return response
+ .status(404)
+ .json({ run: null, error: "Run not found" });
+ }
+
+ const job = await ScheduledJob.get({ id: run.jobId });
+ return response.status(200).json({
+ run: {
+ ...run,
+ result: safeJsonParse(run.result, null),
+ },
+ job,
+ });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // Mark a run as read or continue in thread, or kill a running or queued job run
+ app.post(
+ "/scheduled-jobs/runs/:runId/:action",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const { action } = request.params;
+
+ if (!["read", "continue", "kill"].includes(action))
+ throw new Error("Invalid action");
+
+ if (action === "read") {
+ await ScheduledJobRun.markRead(Number(request.params.runId));
+ return response.status(200).json({ success: true });
+ }
+
+ if (action === "continue") {
+ const { workspace, thread, error } =
+ await ScheduledJobRun.continueInThread(
+ Number(request.params.runId)
+ );
+ if (error) return response.status(500).json({ error });
+
+ return response.status(200).json({
+ workspaceSlug: workspace.slug,
+ threadSlug: thread.slug,
+ });
+ }
+
+ if (action === "kill") {
+ const run = await ScheduledJobRun.get({
+ id: Number(request.params.runId),
+ });
+ if (!run)
+ return response.status(404).json({ error: "Run not found" });
+ if (!["queued", "running"].includes(run.status)) {
+ return response.status(400).json({
+ error: "Only running or queued jobs can be killed",
+ });
+ }
+
+ const killed = backgroundService.killRun(run.jobId, run.id);
+ if (!killed) await ScheduledJobRun.kill(run.id);
+ return response.status(200).json({ success: true });
+ }
+ } catch {
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // List all scheduled jobs
+ app.get(
+ "/scheduled-jobs",
+ [validatedRequest, isSingleUserMode],
+ async (_request, response) => {
+ try {
+ const jobs = await ScheduledJob.where({}, null, null, {
+ runs: {
+ take: 1,
+ orderBy: { startedAt: "desc" },
+ },
+ });
+
+ const jobsWithStatus = jobs.map(({ runs, ...job }) => ({
+ ...job,
+ latestRun: runs[0] || null,
+ }));
+
+ return response.status(200).json({ jobs: jobsWithStatus });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // Create a new scheduled job
+ app.post(
+ "/scheduled-jobs/new",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const { name, prompt, tools, schedule } = reqBody(request);
+ let errorMessage = null;
+
+ if (!name?.trim()) {
+ errorMessage = "Name is required";
+ } else if (!prompt?.trim()) {
+ errorMessage = "Prompt is required";
+ } else if (!schedule?.trim()) {
+ errorMessage = "Schedule is required";
+ } else if (!ScheduledJob.isValidCron(schedule)) {
+ errorMessage = "Invalid cron expression";
+ } else if (tools?.length > 0 && !Array.isArray(tools)) {
+ errorMessage = "Tools must be an array";
+ }
+ if (errorMessage)
+ return response.status(400).json({
+ job: null,
+ error: errorMessage,
+ });
+
+ // New jobs default to enabled, so creating one always counts as an
+ // activation. Reject if it would push us past the configured cap.
+ const activation = await ScheduledJob.canActivate();
+ if (!activation.allowed) {
+ return response.status(400).json({
+ job: null,
+ error: `Cannot create: maximum of ${activation.limit} active scheduled jobs reached. Disable another job first.`,
+ });
+ }
+
+ const { job, error } = await ScheduledJob.create({
+ name: name.trim(),
+ prompt: prompt.trim(),
+ tools: tools || null,
+ schedule: schedule.trim(),
+ });
+
+ if (error) {
+ return response.status(400).json({ job: null, error });
+ }
+
+ backgroundService.addScheduledJob(job);
+ Telemetry.sendTelemetry("scheduled_job_created").catch(() => {});
+ return response.status(201).json({ job, error: null });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // Get a single scheduled job
+ app.get(
+ "/scheduled-jobs/:id",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const job = await ScheduledJob.get({
+ id: Number(request.params.id),
+ });
+ if (!job) {
+ return response
+ .status(404)
+ .json({ job: null, error: "Job not found" });
+ }
+ return response.status(200).json({ job });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // Update a scheduled job
+ app.put(
+ "/scheduled-jobs/:id",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const { name, prompt, tools, schedule, enabled } = reqBody(request);
+ const updates = {};
+
+ if (name !== undefined) updates.name = String(name).trim();
+ if (prompt !== undefined) updates.prompt = String(prompt).trim();
+ if (tools !== undefined) updates.tools = tools;
+ if (enabled !== undefined) updates.enabled = Boolean(enabled);
+ if (schedule !== undefined) {
+ if (!ScheduledJob.isValidCron(schedule)) {
+ return response
+ .status(400)
+ .json({ job: null, error: "Invalid cron expression" });
+ }
+ updates.schedule = String(schedule).trim();
+ }
+
+ // If this update would activate the job, enforce the active-jobs cap.
+ // We pass excludeId so a re-save of an already-enabled job is not
+ // double-counted against the limit.
+ if (updates.enabled === true) {
+ const activation = await ScheduledJob.canActivate({
+ excludeId: Number(request.params.id),
+ });
+ if (!activation.allowed) {
+ return response.status(400).json({
+ job: null,
+ error: `Cannot enable: maximum of ${activation.limit} active scheduled jobs reached. Disable another job first.`,
+ });
+ }
+ }
+
+ const { job, error } = await ScheduledJob.update(
+ Number(request.params.id),
+ updates
+ );
+
+ if (error) {
+ return response.status(400).json({ job: null, error });
+ }
+
+ await backgroundService.syncScheduledJob(job.id);
+
+ return response.status(200).json({ job, error: null });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // Delete a scheduled job
+ app.delete(
+ "/scheduled-jobs/:id",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ backgroundService.removeScheduledJob(Number(request.params.id));
+
+ const success = await ScheduledJob.delete(Number(request.params.id));
+ return response.status(200).json({ success });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // Toggle enable/disable
+ app.post(
+ "/scheduled-jobs/:id/toggle",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const job = await ScheduledJob.get({
+ id: Number(request.params.id),
+ });
+ if (!job) {
+ return response.status(404).json({ error: "Job not found" });
+ }
+
+ // Toggling a disabled job to enabled is an activation — enforce the cap.
+ // Disabling never needs a check.
+ if (!job.enabled) {
+ const activation = await ScheduledJob.canActivate({
+ excludeId: job.id,
+ });
+ if (!activation.allowed) {
+ return response.status(400).json({
+ job: null,
+ error: `Cannot enable: maximum of ${activation.limit} active scheduled jobs reached. Disable another job first.`,
+ });
+ }
+ }
+
+ const { job: updated } = await ScheduledJob.update(job.id, {
+ enabled: !job.enabled,
+ });
+
+ await backgroundService.syncScheduledJob(job.id);
+
+ return response.status(200).json({ job: updated });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // Manual trigger — runs the job immediately
+ app.post(
+ "/scheduled-jobs/:id/trigger",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const job = await ScheduledJob.get({
+ id: Number(request.params.id),
+ });
+ if (!job) {
+ return response.status(404).json({ error: "Job not found" });
+ }
+
+ const run = await backgroundService.enqueueScheduledJob(job.id);
+ return response
+ .status(200)
+ .json({ success: true, skipped: !run, error: null });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+
+ // List runs for a job
+ app.get(
+ "/scheduled-jobs/:id/runs",
+ [validatedRequest, isSingleUserMode],
+ async (request, response) => {
+ try {
+ const runs = await ScheduledJobRun.where(
+ { jobId: Number(request.params.id) },
+ 50,
+ { startedAt: "desc" }
+ );
+ return response.status(200).json({ runs });
+ } catch (e) {
+ console.error(e.message, e);
+ response.sendStatus(500);
+ }
+ }
+ );
+}
+
+module.exports = { scheduledJobEndpoints };
diff --git a/server/index.js b/server/index.js
index 9cfdce0d..726a0e5f 100644
--- a/server/index.js
+++ b/server/index.js
@@ -35,6 +35,7 @@ const { mcpServersEndpoints } = require("./endpoints/mcpServers");
const { mobileEndpoints } = require("./endpoints/mobile");
const { webPushEndpoints } = require("./endpoints/webPush");
const { telegramEndpoints } = require("./endpoints/telegram");
+const { scheduledJobEndpoints } = require("./endpoints/scheduledJobs");
const {
outlookAgentEndpoints,
} = require("./endpoints/utils/outlookAgentUtils");
@@ -95,6 +96,7 @@ mcpServersEndpoints(apiRouter);
mobileEndpoints(apiRouter);
webPushEndpoints(apiRouter);
telegramEndpoints(apiRouter);
+scheduledJobEndpoints(apiRouter);
outlookAgentEndpoints(apiRouter);
googleAgentSkillEndpoints(apiRouter);
// Externally facing embedder endpoints
diff --git a/server/jobs/cleanup-generated-files.js b/server/jobs/cleanup-generated-files.js
index c8c6ee46..64aa7d4d 100644
--- a/server/jobs/cleanup-generated-files.js
+++ b/server/jobs/cleanup-generated-files.js
@@ -1,5 +1,6 @@
const { log, conclude } = require("./helpers/index.js");
const { WorkspaceChats } = require("../models/workspaceChats.js");
+const { ScheduledJobRun } = require("../models/scheduledJobRun.js");
const createFilesLib = require("../utils/agents/aibitat/plugins/create-files/lib.js");
const { safeJsonParse } = require("../utils/http/index.js");
@@ -66,8 +67,16 @@ const { safeJsonParse } = require("../utils/http/index.js");
* @returns {Promise>}
*/
async function getActiveStorageFilenames(batchSize = 50) {
- const storageFilenames = new Set();
+ const [workspaceChats, scheduledJobRuns] = await Promise.all([
+ workspaceChatGeneratedFilenames(batchSize),
+ scheduledJobRunGeneratedFilenames(batchSize),
+ ]);
+ return new Set([...workspaceChats, ...scheduledJobRuns]);
+}
+
+async function workspaceChatGeneratedFilenames(batchSize = 50) {
+ const storageFilenames = new Set();
try {
let offset = 0;
let hasMore = true;
@@ -89,8 +98,9 @@ async function getActiveStorageFilenames(batchSize = 50) {
try {
const response = safeJsonParse(chat.response, { outputs: [] });
for (const output of response.outputs) {
- if (output?.payload?.storageFilename)
- storageFilenames.add(output.payload.storageFilename);
+ if (!output || !output.payload || !output.payload.storageFilename)
+ continue;
+ storageFilenames.add(output.payload.storageFilename);
}
} catch {
continue;
@@ -101,7 +111,49 @@ async function getActiveStorageFilenames(batchSize = 50) {
hasMore = chats.length === batchSize;
}
} catch (error) {
- console.error("[getActiveStorageFilenames] Error:", error.message);
+ console.error("[workspaceChatGeneratedFilenames] Error:", error.message);
+ }
+
+ return storageFilenames;
+}
+
+async function scheduledJobRunGeneratedFilenames(batchSize = 50) {
+ const storageFilenames = new Set();
+ try {
+ let offset = 0;
+ let hasMore = true;
+
+ while (hasMore) {
+ const runs = await ScheduledJobRun.where(
+ { status: "completed" },
+ batchSize,
+ { id: "asc" },
+ {},
+ offset
+ );
+
+ if (runs.length === 0) {
+ hasMore = false;
+ break;
+ }
+
+ for (const run of runs) {
+ try {
+ const response = safeJsonParse(run.result, { outputs: [] });
+ for (const output of response.outputs) {
+ if (!output?.payload?.storageFilename) continue;
+ storageFilenames.add(output.payload.storageFilename);
+ }
+ } catch {
+ continue;
+ }
+ }
+
+ offset += runs.length;
+ hasMore = runs.length === batchSize;
+ }
+ } catch (error) {
+ console.error("[scheduledJobRunGeneratedFilenames] Error:", error.message);
}
return storageFilenames;
diff --git a/server/jobs/helpers/index.js b/server/jobs/helpers/index.js
index e4449d06..4de9ef9a 100644
--- a/server/jobs/helpers/index.js
+++ b/server/jobs/helpers/index.js
@@ -27,8 +27,28 @@ function updateSourceDocument(docPath = null, jsonContent = {}) {
});
}
+/**
+ * Strips thought/thinking tags from text (e.g., ... )
+ * Useful for cleaning LLM responses before sending notifications.
+ * @param {string} text - The text to strip thoughts from.
+ * @returns {string} - The text with thought tags and their content removed.
+ */
+const THOUGHT_KEYWORDS = ["thought", "thinking", "think", "thought_chain"];
+const THOUGHT_REGEX_COMPLETE = new RegExp(
+ THOUGHT_KEYWORDS.map(
+ (keyword) =>
+ `<${keyword}\\s*(?:[^>]*?)?\\s*>[\\s\\S]*?<\\/${keyword}\\s*(?:[^>]*?)?>`
+ ).join("|"),
+ "gi"
+);
+
+function stripThinkingFromText(text = "") {
+ return text.replace(THOUGHT_REGEX_COMPLETE, "").trim();
+}
+
module.exports = {
log,
conclude,
updateSourceDocument,
+ stripThinkingFromText,
};
diff --git a/server/jobs/helpers/scheduled-job-helper.js b/server/jobs/helpers/scheduled-job-helper.js
new file mode 100644
index 00000000..41d925c3
--- /dev/null
+++ b/server/jobs/helpers/scheduled-job-helper.js
@@ -0,0 +1,111 @@
+const { safeJsonParse } = require("../../utils/http");
+const { stripThinkingFromText } = require("./index.js");
+
+/**
+ * Maximum time in milliseconds a scheduled job can run before being terminated.
+ * @type {number}
+ */
+const SCHEDULED_JOB_TIMEOUT_MS =
+ Number(process.env.SCHEDULED_JOB_TIMEOUT_MS) || 5 * 60 * 1000;
+
+/**
+ * Create a callback function for the agent action.
+ * This is for intercepting messages from the agent and storing the results or thoughts, executions, traces, etc.
+ * @returns {object}
+ */
+function agentActionCb() {
+ const thoughts = [];
+ const toolCalls = [];
+
+ // Use a container object so the reference is preserved when values are updated
+ const state = {
+ textResponse: "",
+ metrics: {},
+ };
+
+ const handler = {
+ send(jsonStr) {
+ const data = safeJsonParse(jsonStr, null);
+ if (!data) return;
+
+ if (data.type === "statusResponse" && data.content) {
+ thoughts.push(data.content);
+ return;
+ }
+
+ if (data.type === "reportStreamEvent" && data.content) {
+ const inner = data.content;
+ if (inner.type === "textResponseChunk" && inner.content)
+ state.textResponse += inner.content;
+ if (inner.type === "fullTextResponse" && inner.content)
+ state.textResponse = inner.content;
+ if (inner.type === "usageMetrics" && inner.metrics)
+ state.metrics = inner.metrics;
+ return;
+ }
+
+ // Final message from agent (onMessage event)
+ if (data.content && data.from && data.from !== "USER") {
+ if (!state.textResponse) state.textResponse = data.content;
+ }
+ },
+ close() {},
+ };
+
+ return {
+ /** Handler to intercept messages from the agent */
+ handler,
+ /** Thoughts from the agent @type {string[]} */
+ thoughts,
+ /** Tool calls from the agent @type {object[]} */
+ toolCalls,
+ /** State container for textResponse and metrics - access via state.textResponse and state.metrics */
+ state,
+ };
+}
+
+function truncateNotificationBody(bodyText = "") {
+ if (!bodyText) return "Job completed";
+ if (bodyText.length <= 100) return bodyText;
+ return bodyText.slice(0, 100) + (bodyText.length > 100 ? "..." : "");
+}
+
+/**
+ * Send a web push notification to the primary user.
+ * @param {object} job - The scheduled job object.
+ * @param {string} runId - The ID of the scheduled job run.
+ * @param {string} textResponse - The text response from the agent.
+ * @param {function} logFn - The function to log the error.
+ * @returns {Promise}
+ */
+async function sendWebPushNotification(job, runId, textResponse, logFn) {
+ try {
+ const {
+ pushNotificationService,
+ } = require("../../utils/PushNotifications/index.js");
+ await pushNotificationService.loadSubscriptions();
+
+ // Strip thinking tags from the text response and then truncate to 100 characters
+ // if the response is longer than 100 characters.
+ let notificationBody = stripThinkingFromText(textResponse);
+ notificationBody = truncateNotificationBody(notificationBody);
+ await pushNotificationService.sendNotification({
+ to: "primary",
+ payload: {
+ title: `${job.name} completed`,
+ body: notificationBody,
+ data: {
+ onClickUrl: `/settings/scheduled-jobs/${job.id}/runs/${runId}`,
+ },
+ },
+ });
+ } catch (pushError) {
+ logFn(`Failed to send push notification: ${pushError.message}`);
+ }
+}
+
+module.exports = {
+ sendWebPushNotification,
+ SCHEDULED_JOB_TIMEOUT_MS,
+ agentActionCb,
+};
diff --git a/server/jobs/run-scheduled-job.js b/server/jobs/run-scheduled-job.js
new file mode 100644
index 00000000..0ddcdab5
--- /dev/null
+++ b/server/jobs/run-scheduled-job.js
@@ -0,0 +1,157 @@
+const { log, conclude } = require("./helpers/index.js");
+const { v4: uuidv4 } = require("uuid");
+const { safeJsonParse } = require("../utils/http");
+const {
+ agentActionCb,
+ SCHEDULED_JOB_TIMEOUT_MS,
+ sendWebPushNotification,
+} = require("./helpers/scheduled-job-helper.js");
+const { ScheduledJob } = require("../models/scheduledJob.js");
+const { ScheduledJobRun } = require("../models/scheduledJobRun.js");
+
+/** Status of the scheduled job run @type {'success' | 'failed' | 'timed_out' | 'not_found' | 'killed' | undefined} */
+let status;
+let runId = null;
+
+process.on("SIGTERM", async () => {
+ status = "killed";
+ log("Received SIGTERM, marking job as killed by user");
+ if (runId) await ScheduledJobRun.kill(runId);
+ conclude();
+});
+
+process.on("message", async (payload) => {
+ const { jobId, runId: payloadRunId } = payload;
+ runId = payloadRunId;
+ let timeoutId = null;
+ let errorMessage = null;
+
+ // The run row was created by the parent process (BackgroundService) in
+ // status `queued` (it may have been waiting in p-queue). The worker
+ // transitions it to `running` here so `startedAt` reflects actual execution
+ // start, then runs to a terminal state. If the job has been deleted between
+ // enqueue and now, fail the row.
+ try {
+ if (!jobId || !runId) return;
+
+ const job = await ScheduledJob.get({ id: Number(jobId) });
+ if (!job) {
+ log(`Scheduled job ${jobId} not found`);
+ status = "not_found";
+ return;
+ }
+
+ // Transition queued -> running. If this returns false, the row was
+ // already moved to a terminal state (e.g. parent failed it because it
+ // thought the worker had died). Bail out without touching it further.
+ const transitioned = await ScheduledJobRun.markRunning(runId);
+ if (!transitioned) {
+ log(
+ `Scheduled job "${job.name}" (id=${job.id}) is no longer queued, skipping`
+ );
+ return;
+ }
+
+ log(
+ `Starting scheduled job: "${job.name}" (id=${job.id}) with timeout ${SCHEDULED_JOB_TIMEOUT_MS}ms`
+ );
+ await ScheduledJob.updateRunTimestamps(job.id);
+ const { handler, thoughts, toolCalls, state } = agentActionCb();
+
+ const { EphemeralAgentHandler } = require("../utils/agents/ephemeral.js");
+ const agentHandler = await new EphemeralAgentHandler({
+ uuid: uuidv4(),
+ prompt: job.prompt,
+ }).init();
+
+ // Tool overrides control which tools the agent can use:
+ // - Array with items: only those specific tools are loaded
+ // - Empty array: no tools are loaded
+ const toolOverrides = safeJsonParse(job.tools, []);
+ await agentHandler.createAIbitat({
+ handler,
+ toolOverrides,
+ });
+
+ // Auto-approve all tool invocations when running a scheduled job
+ agentHandler.aibitat.requestToolApproval = async () => {
+ log("Tool approval requested for scheduled job, auto-approving");
+ return {
+ approved: true,
+ message: "Auto-approved by scheduled job runner.",
+ };
+ };
+
+ // Capture tool results for the execution trace
+ agentHandler.aibitat.onToolCallResult(
+ ({ toolName, arguments: args, result }) => {
+ toolCalls.push({
+ toolName,
+ arguments: args,
+ result,
+ timestamp: Date.now(),
+ });
+ }
+ );
+
+ const startTime = Date.now();
+ await Promise.race([
+ agentHandler.startAgentCluster(),
+ new Promise((_, reject) => {
+ timeoutId = setTimeout(
+ () => reject(new Error("SCHEDULED_JOB_TIMEOUT")),
+ SCHEDULED_JOB_TIMEOUT_MS
+ );
+ }),
+ ]).finally(() => {
+ if (!timeoutId) return;
+ clearTimeout(timeoutId);
+ timeoutId = null;
+ });
+ const duration = Date.now() - startTime;
+
+ // Get outputs from aibitat which include proper type info (e.g., PptxFileDownload, ExcelFileDownload)
+ // for correct re-rendering when porting to workspace chat
+ const outputs = agentHandler.getPendingOutputs();
+
+ status = "success";
+ await ScheduledJobRun.complete(runId, {
+ result: {
+ text: state.textResponse,
+ thoughts,
+ toolCalls,
+ outputs,
+ metrics: state.metrics,
+ duration,
+ },
+ });
+ log(`Scheduled job "${job.name}" completed in ${duration}ms)`);
+ await sendWebPushNotification(job, runId, state.textResponse, log);
+ } catch (error) {
+ if (error.message === "SCHEDULED_JOB_TIMEOUT") {
+ status = "timed_out";
+ log("Scheduled job timed out");
+ } else {
+ status = "failed";
+ log(`Scheduled job error: ${error.message}`);
+ errorMessage = error.message;
+ }
+ } finally {
+ switch (status) {
+ case "not_found":
+ await ScheduledJobRun.failIfNotTerminal(runId, "Job no longer exists");
+ break;
+ case "timed_out":
+ await ScheduledJobRun.timeout(runId);
+ break;
+ case "failed":
+ await ScheduledJobRun.fail(runId, { error: errorMessage });
+ break;
+ default: // Do nothing by default (success, killed, other)
+ break;
+ }
+
+ if (timeoutId) clearTimeout(timeoutId);
+ conclude();
+ }
+});
diff --git a/server/models/scheduledJob.js b/server/models/scheduledJob.js
new file mode 100644
index 00000000..fa663918
--- /dev/null
+++ b/server/models/scheduledJob.js
@@ -0,0 +1,499 @@
+const prisma = require("../utils/prisma");
+const later = require("@breejs/later");
+const cronValidate = require("cron-validate").default;
+
+// Use UTC time for cron interpretation. This ensures consistent behavior
+// regardless of server timezone (e.g., when running in containers).
+// The frontend is responsible for converting user's local time to UTC
+// when creating/editing schedules, and converting UTC back to local time
+// when displaying.
+later.date.UTC();
+
+const ScheduledJob = {
+ writable: ["name", "prompt", "tools", "schedule", "enabled"],
+
+ /**
+ * Maximum number of scheduled jobs that can be enabled at once.
+ * null = no limit. Set to a positive integer to cap concurrent active jobs;
+ * attempting to enable a job past the cap will be rejected at the API layer.
+ * @todo: add a configuration option for this
+ * @type {number|null}
+ */
+ MAX_ACTIVE: null,
+
+ /**
+ * Compute the next run time from a cron expression.
+ * Uses @breejs/later which is already available via Bree.
+ * @param {string} cronExpression
+ * @returns {Date|null}
+ */
+ computeNextRunAt: function (cronExpression) {
+ try {
+ const sched = later.parse.cron(cronExpression);
+ const next = later.schedule(sched).next(1);
+ return next || null;
+ } catch (error) {
+ console.error(
+ "Failed to compute next run time from cron:",
+ error.message
+ );
+ return null;
+ }
+ },
+
+ /**
+ * Validate a cron expression.
+ * Uses cron-validate which is already available via Bree.
+ * @param {string} cronExpression
+ * @returns {boolean}
+ */
+ isValidCron: function (cronExpression) {
+ try {
+ return cronValidate(cronExpression).isValid();
+ } catch {
+ return false;
+ }
+ },
+
+ create: async function ({ name, prompt, tools = null, schedule } = {}) {
+ try {
+ const nextRunAt = this.computeNextRunAt(schedule);
+ const job = await prisma.scheduled_jobs.create({
+ data: {
+ name: String(name),
+ prompt: String(prompt),
+ tools: tools ? JSON.stringify(tools) : null,
+ schedule: String(schedule),
+ nextRunAt,
+ },
+ });
+ return { job, error: null };
+ } catch (error) {
+ console.error("Failed to create scheduled job:", error.message);
+ return { job: null, error: error.message };
+ }
+ },
+
+ update: async function (id, data = {}) {
+ try {
+ const updates = {};
+ for (const key of this.writable) {
+ if (data.hasOwnProperty(key)) {
+ if (key === "tools") {
+ updates[key] = data[key] ? JSON.stringify(data[key]) : null;
+ } else {
+ updates[key] = data[key];
+ }
+ }
+ }
+
+ // Recompute nextRunAt if schedule changed
+ if (updates.schedule) {
+ updates.nextRunAt = this.computeNextRunAt(updates.schedule);
+ }
+
+ updates.updatedAt = new Date();
+
+ const job = await prisma.scheduled_jobs.update({
+ where: { id: Number(id) },
+ data: updates,
+ });
+ return { job, error: null };
+ } catch (error) {
+ console.error("Failed to update scheduled job:", error.message);
+ return { job: null, error: error.message };
+ }
+ },
+
+ get: async function (clause = {}) {
+ try {
+ const job = await prisma.scheduled_jobs.findFirst({ where: clause });
+ return job || null;
+ } catch (error) {
+ console.error("Failed to get scheduled job:", error.message);
+ return null;
+ }
+ },
+
+ where: async function (
+ clause = {},
+ limit = null,
+ orderBy = null,
+ include = {}
+ ) {
+ try {
+ const results = await prisma.scheduled_jobs.findMany({
+ where: clause,
+ ...(limit !== null ? { take: limit } : {}),
+ ...(orderBy !== null
+ ? { orderBy }
+ : { orderBy: { createdAt: "desc" } }),
+ ...(Object.keys(include).length > 0 ? { include } : {}),
+ });
+ return results;
+ } catch (error) {
+ console.error("Failed to query scheduled jobs:", error.message);
+ return [];
+ }
+ },
+
+ delete: async function (id) {
+ try {
+ await prisma.scheduled_jobs.delete({ where: { id: Number(id) } });
+ return true;
+ } catch (error) {
+ console.error("Failed to delete scheduled job:", error.message);
+ return false;
+ }
+ },
+
+ allEnabled: async function () {
+ try {
+ return await prisma.scheduled_jobs.findMany({
+ where: { enabled: true },
+ });
+ } catch (error) {
+ console.error("Failed to get enabled scheduled jobs:", error.message);
+ return [];
+ }
+ },
+
+ /**
+ * Count enabled scheduled jobs, optionally excluding a single job by id.
+ * `excludeId` is used by canActivate so that re-saving an already-enabled job
+ * is not double-counted against the limit.
+ * @param {number|null} excludeId
+ * @returns {Promise}
+ */
+ countActive: async function (excludeId = null) {
+ try {
+ return await prisma.scheduled_jobs.count({
+ where: {
+ enabled: true,
+ ...(excludeId != null ? { NOT: { id: Number(excludeId) } } : {}),
+ },
+ });
+ } catch (error) {
+ console.error("Failed to count active scheduled jobs:", error.message);
+ return 0;
+ }
+ },
+
+ /**
+ * Check whether a job can be activated without exceeding MAX_ACTIVE.
+ * Pass `excludeId` when re-saving an existing job to avoid counting it twice.
+ * @param {{ excludeId?: number|null }} [opts]
+ * @returns {Promise<{ allowed: boolean, limit: number|null, current: number }>}
+ */
+ canActivate: async function ({ excludeId = null } = {}) {
+ const limit = this.MAX_ACTIVE;
+ if (limit == null) {
+ return { allowed: true, limit: null, current: 0 };
+ }
+ const current = await this.countActive(excludeId);
+ return { allowed: current < limit, limit, current };
+ },
+
+ /**
+ * Recompute nextRunAt from the current time.
+ * Used on cold startup to correct stale nextRunAt values.
+ * @param {number} id
+ */
+ recomputeNextRunAt: async function (id) {
+ try {
+ const job = await this.get({ id: Number(id) });
+ if (!job) return;
+
+ const nextRunAt = this.computeNextRunAt(job.schedule);
+ if (!nextRunAt) return;
+
+ await prisma.scheduled_jobs.update({
+ where: { id: Number(id) },
+ data: { nextRunAt, updatedAt: new Date() },
+ });
+ } catch (error) {
+ console.error("Failed to recompute nextRunAt:", error.message);
+ }
+ },
+
+ /**
+ * Update lastRunAt and nextRunAt after a job run.
+ * @param {number} id
+ */
+ updateRunTimestamps: async function (id) {
+ try {
+ const job = await this.get({ id: Number(id) });
+ if (!job) return;
+
+ const nextRunAt = this.computeNextRunAt(job.schedule);
+ await prisma.scheduled_jobs.update({
+ where: { id: Number(id) },
+ data: {
+ lastRunAt: new Date(),
+ nextRunAt,
+ updatedAt: new Date(),
+ },
+ });
+ } catch (error) {
+ console.error("Failed to update run timestamps:", error.message);
+ }
+ },
+
+ /**
+ * Get ALL available tools for scheduled jobs to choose from.
+ * Unlike the global agent settings, each scheduled job can have its own tool configuration.
+ * This returns all possible tools so users can enable different tools for different scheduled tasks.
+ *
+ * @returns {Promise<{
+ * category: string,
+ * name: string,
+ * items: Array<{ id: string, name: string, description?: string, requiresSetup?: boolean }>
+ * }[]>}
+ */
+ availableTools: async function () {
+ const AgentPlugins = require("../utils/agents/aibitat/plugins");
+ const ImportedPlugin = require("../utils/agents/imported");
+ const { AgentFlows } = require("../utils/agentFlows");
+ const MCPCompatibilityLayer = require("../utils/MCP");
+ const {
+ listSQLConnections,
+ } = require("../utils/agents/aibitat/plugins/sql-agent/SQLConnectors");
+ const {
+ GmailBridge,
+ } = require("../utils/agents/aibitat/plugins/gmail/lib");
+ const {
+ GoogleCalendarBridge,
+ } = require("../utils/agents/aibitat/plugins/google-calendar/lib");
+ const {
+ OutlookBridge,
+ } = require("../utils/agents/aibitat/plugins/outlook/lib");
+
+ const categories = [];
+
+ // Check which skills need setup
+ const sqlConnections = await listSQLConnections();
+ const sqlNeedsSetup = sqlConnections.length === 0;
+
+ const gmailConfig = await GmailBridge.getConfig();
+ const gmailNeedsSetup = !gmailConfig.deploymentId || !gmailConfig.apiKey;
+
+ const gcalConfig = await GoogleCalendarBridge.getConfig();
+ const gcalNeedsSetup = !gcalConfig.deploymentId || !gcalConfig.apiKey;
+
+ const outlookConfig = await OutlookBridge.getConfig();
+ const outlookNeedsSetup =
+ !outlookConfig.clientId ||
+ !outlookConfig.clientSecret ||
+ !outlookConfig.accessToken;
+
+ // Default skills (always available)
+ const DEFAULT_SKILLS = [
+ {
+ id: "rag-memory",
+ name: "RAG Memory",
+ description: "Recall and cite information from embedded documents",
+ },
+ {
+ id: "document-summarizer",
+ name: "Document Summarizer",
+ description: "Summarize documents in the workspace",
+ },
+ {
+ id: "web-scraping",
+ name: "Web Scraping",
+ description: "Scrape content from web pages",
+ },
+ ];
+
+ // Configurable skills without sub-skills
+ const SIMPLE_CONFIGURABLE_SKILLS = [
+ {
+ id: "create-chart",
+ name: "Create Charts",
+ description: "Generate data visualization charts",
+ },
+ {
+ id: "web-browsing",
+ name: "Web Browsing",
+ description: "Search and browse the web",
+ },
+ {
+ id: "sql-agent",
+ name: "SQL Agent",
+ description: "Query connected SQL databases",
+ requiresSetup: sqlNeedsSetup,
+ },
+ ];
+
+ // Build agent skills category
+ const agentSkillItems = [...DEFAULT_SKILLS, ...SIMPLE_CONFIGURABLE_SKILLS];
+
+ if (agentSkillItems.length > 0) {
+ categories.push({
+ category: "agent-skills",
+ name: "Agent Skills",
+ items: agentSkillItems,
+ });
+ }
+
+ // Helper to prettify a sub-skill name (e.g., "gmail-get-inbox" -> "Get Inbox")
+ const prettifySubSkillName = (name, prefix) => {
+ let cleaned = name;
+ const prefixes = [prefix, "gcal", "filesystem", "create"];
+ for (const p of prefixes) {
+ if (cleaned.startsWith(`${p}-`)) {
+ cleaned = cleaned.slice(p.length + 1);
+ break;
+ }
+ }
+ return cleaned
+ .split("-")
+ .map((w) => w.charAt(0).toUpperCase() + w.slice(1))
+ .join(" ");
+ };
+
+ // Helper function to build sub-skill items from AgentPlugins
+ const buildSubSkillItems = (pluginKey, namePrefix) => {
+ const plugin = AgentPlugins[pluginKey];
+ if (!plugin || !Array.isArray(plugin.plugin)) return [];
+
+ return plugin.plugin.map((subPlugin) => ({
+ id: `${plugin.name}#${subPlugin.name}`,
+ name: prettifySubSkillName(subPlugin.name, namePrefix),
+ description: subPlugin.description || null,
+ }));
+ };
+
+ // Filesystem Agent (has sub-skills)
+ const filesystemItems = buildSubSkillItems("filesystemAgent", "filesystem");
+ if (filesystemItems.length > 0) {
+ categories.push({
+ category: "filesystem-agent",
+ name: "File System",
+ items: filesystemItems,
+ });
+ }
+
+ // Create Files Agent (has sub-skills)
+ const createFilesItems = buildSubSkillItems("createFilesAgent", "create");
+ if (createFilesItems.length > 0) {
+ categories.push({
+ category: "create-files-agent",
+ name: "Create Files",
+ items: createFilesItems,
+ });
+ }
+
+ // Gmail Agent (has sub-skills)
+ const gmailItems = buildSubSkillItems("gmailAgent", "gmail");
+ if (gmailItems.length > 0) {
+ categories.push({
+ category: "gmail-agent",
+ name: "Gmail",
+ items: gmailItems.map((item) => ({
+ ...item,
+ requiresSetup: gmailNeedsSetup,
+ })),
+ requiresSetup: gmailNeedsSetup,
+ });
+ }
+
+ // Google Calendar Agent (has sub-skills)
+ const googleCalendarItems = buildSubSkillItems(
+ "googleCalendarAgent",
+ "gcal"
+ );
+ if (googleCalendarItems.length > 0) {
+ categories.push({
+ category: "google-calendar-agent",
+ name: "Google Calendar",
+ items: googleCalendarItems.map((item) => ({
+ ...item,
+ requiresSetup: gcalNeedsSetup,
+ })),
+ requiresSetup: gcalNeedsSetup,
+ });
+ }
+
+ // Outlook Agent (has sub-skills)
+ const outlookItems = buildSubSkillItems("outlookAgent", "outlook");
+ if (outlookItems.length > 0) {
+ categories.push({
+ category: "outlook-agent",
+ name: "Outlook",
+ items: outlookItems.map((item) => ({
+ ...item,
+ requiresSetup: outlookNeedsSetup,
+ })),
+ requiresSetup: outlookNeedsSetup,
+ });
+ }
+
+ // Custom/imported skills category
+ const importedPlugins = ImportedPlugin.listImportedPlugins();
+ if (importedPlugins.length > 0) {
+ const customSkillItems = importedPlugins.map((plugin) => ({
+ id: `@@${plugin.hubId}`,
+ name: plugin.name || plugin.hubId,
+ description: plugin.description || null,
+ }));
+
+ categories.push({
+ category: "custom-skills",
+ name: "Custom Skills",
+ items: customSkillItems,
+ });
+ }
+
+ // Agent flows category
+ const allFlows = AgentFlows.listFlows();
+ if (allFlows.length > 0) {
+ const flowItems = allFlows.map((flow) => ({
+ id: `@@flow_${flow.uuid}`,
+ name: flow.name,
+ description: flow.description || null,
+ }));
+
+ categories.push({
+ category: "agent-flows",
+ name: "Agent Flows",
+ items: flowItems,
+ });
+ }
+
+ // MCP servers category - get all servers
+ // MCP servers are selected as a whole (@@mcp_serverName), not individual tools.
+ // The agent loader expands the server into its individual tools at runtime.
+ try {
+ const mcpLayer = new MCPCompatibilityLayer();
+ const servers = await mcpLayer.servers();
+
+ const mcpItems = [];
+ for (const server of servers) {
+ const toolCount = server.tools?.length || 0;
+ mcpItems.push({
+ id: `@@mcp_${server.name}`,
+ name: server.name,
+ description:
+ toolCount > 0
+ ? `${toolCount} tools available`
+ : "No tools available",
+ });
+ }
+
+ if (mcpItems.length > 0) {
+ categories.push({
+ category: "mcp-servers",
+ name: "MCP Servers",
+ items: mcpItems,
+ });
+ }
+ } catch (error) {
+ console.error("Failed to load MCP servers for available tools:", error);
+ }
+
+ return categories;
+ },
+};
+
+module.exports = { ScheduledJob };
diff --git a/server/models/scheduledJobRun.js b/server/models/scheduledJobRun.js
new file mode 100644
index 00000000..4be96d6a
--- /dev/null
+++ b/server/models/scheduledJobRun.js
@@ -0,0 +1,362 @@
+const prisma = require("../utils/prisma");
+
+const ScheduledJobRun = {
+ statuses: {
+ queued: "queued",
+ running: "running",
+ completed: "completed",
+ failed: "failed",
+ timed_out: "timed_out",
+ },
+
+ // Non-terminal statuses — a row in any of these states is considered
+ // "in flight" for dedup purposes. The parent claims a row as `queued`
+ // when enqueuing; the worker transitions it to `running` once it
+ // actually begins executing (it may sit in p-queue first).
+ nonTerminalStatuses: ["queued", "running"],
+
+ /**
+ * Claim a new run for a job. At most one in-flight run per job is allowed —
+ * if a `queued` or `running` row already exists, this returns null and the
+ * caller should drop the request. The check + insert run inside an
+ * interactive transaction so two concurrent callers cannot both pass;
+ * SQLite serializes writes.
+ *
+ * The row is created in `queued` status; the worker transitions it to
+ * `running` via markRunning() once it actually begins executing.
+ *
+ * @param {number} jobId
+ * @returns {Promise} The created run row, or null if a run is
+ * already in progress for this job (or on failure).
+ */
+ start: async function (jobId) {
+ try {
+ return await prisma.$transaction(async (tx) => {
+ const existing = await tx.scheduled_job_runs.findFirst({
+ where: {
+ jobId: Number(jobId),
+ status: { in: this.nonTerminalStatuses },
+ },
+ select: { id: true },
+ });
+ if (existing) return null;
+
+ return tx.scheduled_job_runs.create({
+ data: {
+ jobId: Number(jobId),
+ status: this.statuses.queued,
+ },
+ });
+ });
+ } catch (error) {
+ console.error("Failed to enqueue scheduled job run:", error.message);
+ return null;
+ }
+ },
+
+ /**
+ * Transition a queued run into the running state. Called by the worker as
+ * its first DB write, so `startedAt` reflects actual execution start rather
+ * than queue-claim time. Filtered updateMany makes it a no-op if the row
+ * has already been transitioned to a terminal state (e.g. parent failed it
+ * because the worker failed to spawn, then a stale child somehow boots).
+ *
+ * @param {number} id - scheduled_job_runs.id
+ * @returns {Promise} true if the row transitioned, false otherwise
+ */
+ markRunning: async function (id) {
+ try {
+ const result = await prisma.scheduled_job_runs.updateMany({
+ where: { id: Number(id), status: this.statuses.queued },
+ data: {
+ status: this.statuses.running,
+ startedAt: new Date(),
+ },
+ });
+ return result.count > 0;
+ } catch (error) {
+ console.error(
+ "Failed to transition scheduled job run to running:",
+ error.message
+ );
+ return false;
+ }
+ },
+
+ /**
+ * Mark a run as failed only if it has not already reached a terminal state.
+ * Used by the parent process when a worker exits unexpectedly — atomic
+ * filtered update prevents clobbering a row the worker already transitioned
+ * to `completed` (the rare race where the worker succeeded but exited
+ * non-zero during cleanup).
+ * @param {number} id - scheduled_job_runs.id
+ * @param {string} errorMsg
+ */
+ failIfNotTerminal: async function (id, errorMsg) {
+ try {
+ const result = await prisma.scheduled_job_runs.updateMany({
+ where: {
+ id: Number(id),
+ status: { in: this.nonTerminalStatuses },
+ },
+ data: {
+ status: this.statuses.failed,
+ error: String(errorMsg || "Worker exited unexpectedly"),
+ completedAt: new Date(),
+ },
+ });
+ return result.count > 0;
+ } catch (error) {
+ console.error(
+ "Failed to conditionally fail scheduled job run:",
+ error.message
+ );
+ return false;
+ }
+ },
+
+ complete: async function (id, { result } = {}) {
+ try {
+ const run = await prisma.scheduled_job_runs.update({
+ where: { id: Number(id) },
+ data: {
+ status: this.statuses.completed,
+ result: typeof result === "string" ? result : JSON.stringify(result),
+ completedAt: new Date(),
+ },
+ });
+ return run;
+ } catch (error) {
+ console.error("Failed to complete scheduled job run:", error.message);
+ return null;
+ }
+ },
+
+ fail: async function (id, { error: errorMsg } = {}) {
+ try {
+ // Use updateMany with a filter to avoid overwriting a run that was
+ // already moved to a terminal state (e.g., killed by user).
+ const result = await prisma.scheduled_job_runs.updateMany({
+ where: {
+ id: Number(id),
+ status: { in: this.nonTerminalStatuses },
+ },
+ data: {
+ status: this.statuses.failed,
+ error: String(errorMsg || "Unknown error"),
+ completedAt: new Date(),
+ },
+ });
+ if (result.count === 0) return null;
+ return await this.get({ id: Number(id) });
+ } catch (error) {
+ console.error(
+ "Failed to mark scheduled job run as failed:",
+ error.message
+ );
+ return null;
+ }
+ },
+
+ timeout: async function (id) {
+ try {
+ // Use updateMany with a filter to avoid overwriting a run that was
+ // already moved to a terminal state (e.g., killed by user).
+ const result = await prisma.scheduled_job_runs.updateMany({
+ where: {
+ id: Number(id),
+ status: { in: this.nonTerminalStatuses },
+ },
+ data: {
+ status: this.statuses.timed_out,
+ error: "Job execution timed out",
+ completedAt: new Date(),
+ },
+ });
+ if (result.count === 0) return null;
+ return await this.get({ id: Number(id) });
+ } catch (error) {
+ console.error(
+ "Failed to mark scheduled job run as timed out:",
+ error.message
+ );
+ return null;
+ }
+ },
+
+ /**
+ * Kill a running or queued job run. This marks the run as failed with a
+ * user-initiated kill message. The actual worker process termination is
+ * handled by BackgroundService.killRun().
+ * - Killing a run will also mark it as read (user killed it, so dont bother with unread status)
+ * @param {number} id - scheduled_job_runs.id
+ * @returns {Promise} The updated run row, or null if not killable
+ */
+ kill: async function (id) {
+ try {
+ const result = await prisma.scheduled_job_runs.updateMany({
+ where: {
+ id: Number(id),
+ status: { in: this.nonTerminalStatuses },
+ },
+ data: {
+ status: this.statuses.failed,
+ error: "Job killed by user",
+ completedAt: new Date(),
+ readAt: new Date(),
+ },
+ });
+ if (result.count === 0) return null;
+ return await this.get({ id: Number(id) });
+ } catch (error) {
+ console.error("Failed to kill scheduled job run:", error.message);
+ return null;
+ }
+ },
+
+ get: async function (clause = {}, include = {}) {
+ try {
+ const run = await prisma.scheduled_job_runs.findFirst({
+ where: clause,
+ ...(Object.keys(include).length > 0 ? { include } : {}),
+ });
+ return run || null;
+ } catch (error) {
+ console.error("Failed to get scheduled job run:", error.message);
+ return null;
+ }
+ },
+
+ where: async function (
+ clause = {},
+ limit = null,
+ orderBy = null,
+ include = {},
+ offset = 0
+ ) {
+ try {
+ const results = await prisma.scheduled_job_runs.findMany({
+ where: clause,
+ ...(limit !== null ? { take: limit } : {}),
+ ...(orderBy !== null
+ ? { orderBy }
+ : { orderBy: { startedAt: "desc" } }),
+ ...(Object.keys(include).length > 0 ? { include } : {}),
+ ...(offset !== null ? { skip: offset } : {}),
+ });
+ return results;
+ } catch (error) {
+ console.error("Failed to query scheduled job runs:", error.message);
+ return [];
+ }
+ },
+
+ markRead: async function (id) {
+ try {
+ await prisma.scheduled_job_runs.update({
+ where: { id: Number(id) },
+ data: { readAt: new Date() },
+ });
+ return true;
+ } catch (error) {
+ console.error("Failed to mark run as read:", error.message);
+ return false;
+ }
+ },
+
+ delete: async function (clause = {}) {
+ try {
+ await prisma.scheduled_job_runs.deleteMany({ where: clause });
+ return true;
+ } catch (error) {
+ console.error("Failed to delete scheduled job runs:", error.message);
+ return false;
+ }
+ },
+
+ /**
+ * Mark all orphaned in-flight runs (queued or running) as failed — used on
+ * cold startup to recover rows whose owning worker died with the server.
+ */
+ failOrphanedRuns: async function () {
+ try {
+ const result = await prisma.scheduled_job_runs.updateMany({
+ where: { status: { in: this.nonTerminalStatuses } },
+ data: {
+ status: this.statuses.failed,
+ error: "Server restarted during execution",
+ completedAt: new Date(),
+ },
+ });
+ return result.count;
+ } catch (error) {
+ console.error("Failed to fail orphaned runs:", error.message);
+ return 0;
+ }
+ },
+
+ /**
+ * Continue a run in a workspace thread.
+ * This will create a new workspace and thread specific for the run if they do not exist, and add the run's response to the thread.
+ * @param {number} runId - The ID of the run to continue.
+ * @returns {Promise<{workspace: import("@prisma/client").workspaces | null, thread: import("@prisma/client").workspace_threads | null, error: string | null}>} A promise that resolves to an object containing the workspace, thread, and an error message if applicable.
+ */
+ continueInThread: async function (runId) {
+ try {
+ const { Workspace } = require("./workspace");
+ const { WorkspaceThread } = require("./workspaceThread");
+ const { WorkspaceChats } = require("./workspaceChats");
+ const { safeJsonParse } = require("../utils/http");
+
+ const run = await this.get({ id: Number(runId) }, { job: true });
+ if (!run) throw new Error("Run not found");
+
+ const result = safeJsonParse(run.result, {});
+ const responseText = result?.text || "No response was generated.";
+
+ // Get or create the "Scheduled Jobs" workspace
+ const { workspace, error: workspaceError } = await Workspace.upsert(
+ { slug: "scheduled-jobs" },
+ {
+ name: "Scheduled Jobs",
+ slug: "scheduled-jobs",
+ chatMode: "automatic",
+ }
+ );
+ if (workspaceError)
+ throw new Error(workspaceError || "Failed to create workspace");
+
+ const { thread, message: threadError } =
+ await WorkspaceThread.new(workspace);
+ if (threadError)
+ throw new Error(threadError || "Failed to create thread");
+
+ await WorkspaceChats.new({
+ workspaceId: workspace.id,
+ prompt: run.job.prompt,
+ response: {
+ text: responseText,
+ sources: result.sources || [],
+ outputs: result.outputs || [],
+ type: "chat",
+ },
+ threadId: thread.id,
+ include: true,
+ });
+
+ return {
+ workspace,
+ thread,
+ error: null,
+ };
+ } catch (error) {
+ return {
+ workspace: null,
+ thread: null,
+ error: error.message ?? "Unknown error",
+ };
+ }
+ },
+};
+
+module.exports = { ScheduledJobRun };
diff --git a/server/models/telemetry.js b/server/models/telemetry.js
index 0257d030..266e4d3d 100644
--- a/server/models/telemetry.js
+++ b/server/models/telemetry.js
@@ -28,6 +28,7 @@ const Telemetry = {
link_uploaded: 30,
raw_document_uploaded: 30,
document_parsed: 30,
+ agent_generated_file_downloaded: 30,
},
id: async function () {
diff --git a/server/models/workspace.js b/server/models/workspace.js
index 8cb7fc6b..0e92f4ac 100644
--- a/server/models/workspace.js
+++ b/server/models/workspace.js
@@ -566,6 +566,29 @@ const Workspace = {
}
},
+ /**
+ * Upsert a workspace.
+ * If the workspace does not exist, it will be created.
+ * If the workspace exists, it will be updated (if data is provided).
+ * @param {Object} clause - The clause to upsert the workspace by.
+ * @param {Object} createData - The data to create the workspace with.
+ * @param {Object} updateData - The data to update the workspace with if it already exists.
+ * @returns {Promise<{workspace: import("@prisma/client").workspaces | null, error: string | null}>} A promise that resolves to an object containing the upserted workspace and an error message if applicable.
+ */
+ upsert: async function (clause = {}, createData = {}, updateData = {}) {
+ try {
+ const workspace = await prisma.workspaces.upsert({
+ where: clause,
+ update: updateData,
+ create: createData,
+ });
+ return { workspace, error: null };
+ } catch (error) {
+ console.error(error.message);
+ return { workspace: null, error: error.message };
+ }
+ },
+
/**
* Get the prompt history for a workspace.
* @param {Object} options - The options to get prompt history for.
diff --git a/server/package.json b/server/package.json
index 13362d75..17a128f4 100644
--- a/server/package.json
+++ b/server/package.json
@@ -22,6 +22,7 @@
"dependencies": {
"@anthropic-ai/sdk": "^0.39.0",
"@aws-sdk/client-bedrock-runtime": "^3.775.0",
+ "@breejs/later": "4.2.0",
"@datastax/astra-db-ts": "^0.1.3",
"@ladjs/graceful": "^3.2.2",
"@lancedb/lancedb": "0.15.0",
@@ -54,6 +55,7 @@
"chromadb": "^2.0.1",
"cohere-ai": "^7.19.0",
"cors": "^2.8.5",
+ "cron-validate": "1.4.5",
"diff": "7.0.0",
"docx": "9.6.1",
"dompurify": "3.3.3",
@@ -82,6 +84,7 @@
"node-telegram-bot-api": "^0.67.0",
"ollama": "^0.6.3",
"openai": "4.95.1",
+ "p-queue": "6.6.2",
"pdf-lib": "1.17.1",
"pg": "^8.11.5",
"pinecone-client": "^1.1.0",
diff --git a/server/prisma/migrations/20260423191158_init/migration.sql b/server/prisma/migrations/20260423191158_init/migration.sql
new file mode 100644
index 00000000..db4b139c
--- /dev/null
+++ b/server/prisma/migrations/20260423191158_init/migration.sql
@@ -0,0 +1,29 @@
+-- CreateTable
+CREATE TABLE "scheduled_jobs" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "name" TEXT NOT NULL,
+ "prompt" TEXT NOT NULL,
+ "tools" TEXT,
+ "schedule" TEXT NOT NULL,
+ "enabled" BOOLEAN NOT NULL DEFAULT true,
+ "lastRunAt" DATETIME,
+ "nextRunAt" DATETIME,
+ "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "updatedAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
+);
+
+-- CreateTable
+CREATE TABLE "scheduled_job_runs" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "jobId" INTEGER NOT NULL,
+ "status" TEXT NOT NULL DEFAULT 'queued',
+ "result" TEXT,
+ "error" TEXT,
+ "startedAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "completedAt" DATETIME,
+ "readAt" DATETIME,
+ CONSTRAINT "scheduled_job_runs_jobId_fkey" FOREIGN KEY ("jobId") REFERENCES "scheduled_jobs" ("id") ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+-- CreateIndex
+CREATE INDEX "scheduled_job_runs_jobId_idx" ON "scheduled_job_runs"("jobId");
diff --git a/server/prisma/schema.prisma b/server/prisma/schema.prisma
index ef8765af..9ea3ecd5 100644
--- a/server/prisma/schema.prisma
+++ b/server/prisma/schema.prisma
@@ -395,3 +395,31 @@ model external_communication_connectors {
createdAt DateTime @default(now())
lastUpdatedAt DateTime @default(now())
}
+
+model scheduled_jobs {
+ id Int @id @default(autoincrement())
+ name String
+ prompt String
+ tools String? // JSON array of tool identifiers (null = use all enabled agent skills)
+ schedule String // Cron expression
+ enabled Boolean @default(true)
+ lastRunAt DateTime?
+ nextRunAt DateTime?
+ createdAt DateTime @default(now())
+ updatedAt DateTime @default(now())
+ runs scheduled_job_runs[]
+}
+
+model scheduled_job_runs {
+ id Int @id @default(autoincrement())
+ jobId Int
+ status String @default("queued") // queued | running | completed | failed | timed_out — model always sets explicitly
+ result String? // JSON execution trace
+ error String?
+ startedAt DateTime @default(now())
+ completedAt DateTime?
+ readAt DateTime? // null = unread
+ job scheduled_jobs @relation(fields: [jobId], references: [id], onDelete: Cascade)
+
+ @@index([jobId])
+}
diff --git a/server/utils/BackgroundWorkers/index.js b/server/utils/BackgroundWorkers/index.js
index d0f0099d..09a25eaf 100644
--- a/server/utils/BackgroundWorkers/index.js
+++ b/server/utils/BackgroundWorkers/index.js
@@ -1,13 +1,28 @@
const path = require("path");
const Graceful = require("@ladjs/graceful");
const Bree = require("@mintplex-labs/bree");
+const later = require("@breejs/later");
+const PQueue = require("p-queue").default;
const setLogger = require("../logger");
+// Use UTC time for cron interpretation. This ensures consistent behavior
+// regardless of server timezone (e.g., when running in containers).
+later.date.UTC();
+
class BackgroundService {
name = "BackgroundWorkerService";
static _instance = null;
documentSyncEnabled = false;
#root = path.resolve(__dirname, "../../jobs");
+ #scheduledJobTimers = new Map();
+ #scheduledJobQueue = new PQueue({
+ concurrency: Number(process.env.SCHEDULED_JOB_MAX_CONCURRENT) || 1,
+ });
+ // Tracks in-flight worker processes per scheduled jobId so we can kill any
+ // active runs when the job is deleted. Without this, a running worker
+ // outlives the cascade-delete of its scheduled_job_runs row and throws when
+ // it tries to write the result back (prisma.update on a missing row).
+ #scheduledJobWorkers = new Map();
#alwaysRunJobs = [
{
@@ -76,7 +91,18 @@ class BackgroundService {
async boot() {
const { DocumentSyncQueue } = require("../../models/documentSyncQueue");
+ const { ScheduledJobRun } = require("../../models/scheduledJobRun");
+
this.documentSyncEnabled = await DocumentSyncQueue.enabled();
+
+ // Mark any orphaned scheduled job runs as failed (server crashed mid-execution)
+ const orphanedCount = await ScheduledJobRun.failOrphanedRuns();
+ if (orphanedCount > 0) {
+ this.#log(
+ `Marked ${orphanedCount} orphaned scheduled job run(s) as failed`
+ );
+ }
+
const jobsToRun = this.jobs();
this.#log("Starting...");
@@ -90,15 +116,30 @@ class BackgroundService {
});
this.graceful = new Graceful({ brees: [this.bree], logger: this.logger });
this.graceful.listen();
+
this.bree.start();
this.#log(
`Service started with ${jobsToRun.length} jobs`,
jobsToRun.map((j) => j.name)
);
+
+ await this.#bootScheduledJobs();
+ }
+
+ /**
+ * Cleanup scheduled jobs (in-process cron timers + p-queue)
+ */
+ #cleanupScheduledJobs() {
+ for (const [id, timer] of this.#scheduledJobTimers) {
+ timer.clear();
+ this.#scheduledJobTimers.delete(id);
+ }
+ this.#scheduledJobQueue.clear();
}
async stop() {
this.#log("Stopping...");
+ this.#cleanupScheduledJobs();
if (!!this.graceful && !!this.bree) this.graceful.stopBree(this.bree, 0);
this.bree = null;
this.graceful = null;
@@ -163,6 +204,192 @@ class BackgroundService {
/* Job may already be removed */
}
}
+
+ // ---------------------------------------------------------------
+ // Scheduled Jobs — in-process cron timers + p-queue
+ //
+ // Bree tightly couples scheduling with worker spawning — when a
+ // Bree cron fires, it directly calls run() which immediately
+ // spawns a child process with no way to intercept it. We manage
+ // our own cron timers (via later.setInterval) to decouple
+ // scheduling from execution so we can route jobs through p-queue
+ // for global concurrency control before spawning workers.
+ //
+ // Per-job dedup lives in the database, not in process memory: any
+ // non-terminal row (`queued` or `running`) in scheduled_job_runs means
+ // the job has a run in flight. ScheduledJobRun.start() does the check +
+ // insert atomically and creates the row in `queued` status. The worker
+ // transitions it to `running` once it actually begins executing, so
+ // `startedAt` reflects execution start rather than queue-claim time.
+ // Cron-fired and manually-triggered enqueues use the same rule —
+ // at most one in-flight run per job, regardless of source.
+ // ---------------------------------------------------------------
+
+ /**
+ * Register cron timers for all enabled scheduled jobs on startup.
+ */
+ async #bootScheduledJobs() {
+ const { ScheduledJob } = require("../../models/scheduledJob");
+ const enabledJobs = await ScheduledJob.allEnabled();
+
+ for (const job of enabledJobs) {
+ await ScheduledJob.recomputeNextRunAt(job.id);
+ this.addScheduledJob(job);
+ }
+
+ if (enabledJobs.length > 0) {
+ this.#log(
+ `Registered ${enabledJobs.length} scheduled job(s) (max concurrent: ${this.#scheduledJobQueue.concurrency})`,
+ enabledJobs.map((j) => `${j.name} (${j.schedule})`)
+ );
+ }
+ }
+
+ /**
+ * Register an in-process cron timer for a scheduled job.
+ * When the cron fires, the jobId is enqueued for execution.
+ * @param {object} job - scheduled_jobs DB record
+ */
+ addScheduledJob(job) {
+ this.removeScheduledJob(job.id);
+ const sched = later.parse.cron(job.schedule);
+ const timer = later.setInterval(() => {
+ this.enqueueScheduledJob(job.id);
+ }, sched);
+ this.#scheduledJobTimers.set(job.id, timer);
+ }
+
+ /**
+ * Remove an in-process cron timer for a scheduled job and kill any in-flight
+ * worker processes for it. Killing in-flight workers prevents them from
+ * writing results back to a scheduled_job_runs row that the FK cascade (from
+ * a subsequent ScheduledJob.delete) is about to remove.
+ * @param {number} jobId - scheduled_jobs.id
+ */
+ removeScheduledJob(jobId) {
+ const timer = this.#scheduledJobTimers.get(jobId);
+ if (timer) timer.clear();
+ this.#scheduledJobTimers.delete(jobId);
+
+ const workers = this.#scheduledJobWorkers.get(jobId);
+ if (workers) {
+ for (const worker of workers) {
+ try {
+ worker.kill("SIGTERM");
+ } catch {
+ /* worker may have already exited */
+ }
+ }
+ this.#scheduledJobWorkers.delete(jobId);
+ }
+ }
+
+ /**
+ * Re-sync a scheduled job's cron timer after an update.
+ * Removes the old timer and re-adds if still enabled.
+ * @param {number} jobId - scheduled_jobs.id
+ */
+ async syncScheduledJob(jobId) {
+ const { ScheduledJob } = require("../../models/scheduledJob");
+ this.removeScheduledJob(jobId);
+ const job = await ScheduledJob.get({ id: Number(jobId) });
+ if (job && job.enabled) {
+ this.addScheduledJob(job);
+ }
+ }
+
+ /**
+ * Kill a specific run's worker process. This terminates the worker but does
+ * not update the database — the caller should use ScheduledJobRun.kill()
+ * before or after calling this to mark the run as failed.
+ *
+ * @param {number} jobId - scheduled_jobs.id (parent job)
+ * @param {number} runId - scheduled_job_runs.id (not directly used, but for
+ * future multi-run support; currently we kill all workers for the jobId)
+ * @returns {boolean} true if a worker was found and killed, false otherwise
+ */
+ killRun(jobId, _runId) {
+ const workers = this.#scheduledJobWorkers.get(Number(jobId));
+ if (!workers || workers.size === 0) return false;
+
+ let killed = false;
+ for (const worker of workers) {
+ try {
+ worker.kill("SIGTERM");
+ killed = true;
+ } catch {
+ /* worker may have already exited */
+ }
+ }
+ return killed;
+ }
+
+ /**
+ * Enqueue a scheduled job for execution. Called by both the cron timer
+ * (in addScheduledJob) and the manual trigger endpoint. ScheduledJobRun.start()
+ * atomically rejects the call if the job already has a run in flight.
+ *
+ * @param {number} jobId - scheduled_jobs.id
+ * @returns {Promise} the created run row, or null if skipped
+ * because a run is already in flight for this job.
+ */
+ async enqueueScheduledJob(jobId) {
+ const { ScheduledJobRun } = require("../../models/scheduledJobRun");
+
+ const run = await ScheduledJobRun.start(jobId);
+ // if start returns null, skip enqueuing, schueduled job already has a run in flight
+ if (!run) return null;
+
+ this.#scheduledJobQueue.add(() =>
+ this.#runScheduledJobWorker(jobId, run.id).catch(async (err) => {
+ this.#log(`Scheduled job ${jobId} failed: ${err.message}`);
+ await ScheduledJobRun.failIfNotTerminal(run.id, err.message);
+ })
+ );
+ return run;
+ }
+
+ /**
+ * Spawn the run-scheduled-job worker for a given run and resolve when it
+ * exits so p-queue can advance. The worker reads its payload from an IPC
+ * `process.on("message", ...)` handler.
+ *
+ * @param {number} jobId
+ * @param {number} runId
+ * @returns {Promise}
+ */
+ async #runScheduledJobWorker(jobId, runId) {
+ const scriptPath = path.resolve(this.jobsRoot, "run-scheduled-job.js");
+ const { worker, jobId: workerId } = await this.spawnWorker(scriptPath);
+
+ if (!this.#scheduledJobWorkers.has(jobId)) {
+ this.#scheduledJobWorkers.set(jobId, new Set());
+ }
+ this.#scheduledJobWorkers.get(jobId).add(worker);
+
+ try {
+ worker.send({ jobId, runId });
+ await new Promise((resolve, reject) => {
+ worker.on("exit", (code, signal) => {
+ // SIGTERM is sent by removeScheduledJob when the job is deleted
+ // mid-run; treat that as a normal exit rather than a worker failure.
+ if (code === 0 || code == null || signal === "SIGTERM") {
+ resolve();
+ } else {
+ reject(new Error(`Worker exited with code ${code}`));
+ }
+ });
+ worker.on("error", reject);
+ });
+ } finally {
+ const workers = this.#scheduledJobWorkers.get(jobId);
+ if (workers) {
+ workers.delete(worker);
+ if (workers.size === 0) this.#scheduledJobWorkers.delete(jobId);
+ }
+ await this.removeJob(workerId).catch(() => {});
+ }
+ }
}
module.exports.BackgroundService = BackgroundService;
diff --git a/server/utils/PushNotifications/index.js b/server/utils/PushNotifications/index.js
index fad5c64a..0bf6b122 100644
--- a/server/utils/PushNotifications/index.js
+++ b/server/utils/PushNotifications/index.js
@@ -170,7 +170,7 @@ class PushNotifications {
* @param {Object} options - The options for the notification.
* @param {"primary"|number} [options.to] - The subscription to send the notification to. "all" sends to all subscriptions, "primary" sends to the primary user (single user mode only), a number sends subscription to specific user
* @param {PushNotificationPayload} [options.payload] - The payload to send to the clients.
- * @returns {void}
+ * @returns {Promise}
*/
sendNotification({ to = "primary", payload = {} } = {}) {
if (this.#subscriptions.size === 0)
@@ -180,10 +180,16 @@ class PushNotifications {
`.sendNotification() - Subscription for user ${to} not found`
);
this.#log(`.sendNotification() - Sending notification to user ${to}`);
- this.pushService.sendNotification(
- this.#subscriptions.get(to),
- JSON.stringify(payload)
- );
+ return this.pushService
+ .sendNotification(this.#subscriptions.get(to), JSON.stringify(payload))
+ .then((res) => {
+ this.#log(
+ `.sendNotification() - Delivered (status: ${res.statusCode})`
+ );
+ })
+ .catch((err) => {
+ this.#log(`.sendNotification() - Failed: ${err.message}`);
+ });
}
/**
diff --git a/server/utils/agents/aibitat/index.js b/server/utils/agents/aibitat/index.js
index edfef8b9..f32f38ed 100644
--- a/server/utils/agents/aibitat/index.js
+++ b/server/utils/agents/aibitat/index.js
@@ -417,6 +417,18 @@ class AIbitat {
return this;
}
+ /**
+ * Triggered when a tool call completes and returns a result.
+ * Used by scheduled jobs to capture tool results for the execution trace.
+ *
+ * @param listener
+ * @returns
+ */
+ onToolCallResult(listener = () => null) {
+ this.emitter.on("toolCallResult", listener);
+ return this;
+ }
+
/**
* Register an error in the chat history.
* This will trigger the `onError` event.
@@ -926,6 +938,11 @@ https://docs.anythingllm.com/agent/intelligent-tool-selection
const result = await fn.handler(args);
Telemetry.sendTelemetry("agent_tool_call", { tool: name }, null, true);
+ this.emitter.emit("toolCallResult", {
+ toolName: name,
+ arguments: args,
+ result,
+ });
/**
* If the tool call has direct output enabled, return the result directly to the chat
@@ -1079,6 +1096,11 @@ https://docs.anythingllm.com/agent/intelligent-tool-selection
const result = await fn.handler(args);
Telemetry.sendTelemetry("agent_tool_call", { tool: name }, null, true);
+ this.emitter.emit("toolCallResult", {
+ toolName: name,
+ arguments: args,
+ result,
+ });
if (this.skipHandleExecution) {
this.skipHandleExecution = false;
diff --git a/server/utils/agents/ephemeral.js b/server/utils/agents/ephemeral.js
index dd0626bf..59b0fecb 100644
--- a/server/utils/agents/ephemeral.js
+++ b/server/utils/agents/ephemeral.js
@@ -55,7 +55,7 @@ class EphemeralAgentHandler extends AgentHandler {
/**
* @param {{
* uuid: string,
- * workspace: import("@prisma/client").workspaces,
+ * workspace: import("@prisma/client").workspaces|null,
* prompt: string,
* userId: import("@prisma/client").users["id"]|null,
* threadId: import("@prisma/client").workspace_threads["id"]|null,
@@ -65,7 +65,7 @@ class EphemeralAgentHandler extends AgentHandler {
*/
constructor({
uuid,
- workspace,
+ workspace = null,
prompt,
userId = null,
threadId = null,
@@ -95,6 +95,8 @@ class EphemeralAgentHandler extends AgentHandler {
}
async #chatHistory(limit = 10) {
+ if (!this.#workspace) return [];
+
try {
const rawHistory = (
await WorkspaceChats.where(
@@ -144,7 +146,7 @@ class EphemeralAgentHandler extends AgentHandler {
*/
#getFallbackProvider() {
// First, fallback to the workspace chat provider and model if they exist
- if (this.#workspace.chatProvider && this.#workspace.chatModel) {
+ if (this.#workspace?.chatProvider && this.#workspace?.chatModel) {
return {
provider: this.#workspace.chatProvider,
model: this.#workspace.chatModel,
@@ -183,7 +185,7 @@ class EphemeralAgentHandler extends AgentHandler {
}
// The provider was explicitly set, so check if the workspace has an agent model set.
- if (this.#workspace.agentModel) return this.#workspace.agentModel;
+ if (this.#workspace?.agentModel) return this.#workspace.agentModel;
// Otherwise, we have no model to use - so guess a default model to use via the provider
// and it's system ENV params and if that fails - we return either a base model or null.
@@ -191,7 +193,7 @@ class EphemeralAgentHandler extends AgentHandler {
}
#providerSetupAndCheck() {
- this.provider = this.#workspace.agentProvider ?? null;
+ this.provider = this.#workspace?.agentProvider ?? null;
this.model = this.#fetchModel();
if (!this.provider)
@@ -367,6 +369,8 @@ class EphemeralAgentHandler extends AgentHandler {
* @returns {Promise} Formatted context string to append to user message
*/
async #fetchParsedFileContext() {
+ if (!this.#workspace) return "";
+
const user = this.#userId ? { id: this.#userId } : null;
const thread = this.#threadId ? { id: this.#threadId } : null;
const documentManager = new DocumentManager({
@@ -437,6 +441,7 @@ class EphemeralAgentHandler extends AgentHandler {
args = {
handler: null,
telegramChatId: null,
+ toolOverrides: null,
}
) {
this.aibitat = new AIbitat({
@@ -446,7 +451,7 @@ class EphemeralAgentHandler extends AgentHandler {
handlerProps: {
invocation: {
workspace: this.#workspace,
- workspace_id: this.#workspace.id,
+ workspace_id: this.#workspace?.id ?? null,
},
log: this.log,
},
@@ -471,6 +476,13 @@ class EphemeralAgentHandler extends AgentHandler {
// Load required agents (Default + custom)
await this.#loadAgents();
+ // Override tools if specified (e.g., for scheduled jobs with per-job tool selection)
+ if (args.toolOverrides) {
+ this.#funcsToLoad = args.toolOverrides;
+ const agentDef = this.aibitat.agents.get("@agent");
+ if (agentDef) agentDef.functions = args.toolOverrides;
+ }
+
// Attach all required plugins for functions to operate.
await this.#attachPlugins(args);
}
@@ -484,6 +496,15 @@ class EphemeralAgentHandler extends AgentHandler {
});
}
+ /**
+ * Gets pending outputs registered by plugins (e.g., file downloads).
+ * These outputs include the proper type for re-rendering in chat history.
+ * @returns {Array<{type: string, payload: object}>}
+ */
+ getPendingOutputs() {
+ return this.aibitat?._pendingOutputs ?? [];
+ }
+
/**
* Determine if the message should invoke the agent handler.
* This is true when the user explicitly invokes an agent (via @agent prefix)
diff --git a/server/yarn.lock b/server/yarn.lock
index 55d32742..a84adabc 100644
--- a/server/yarn.lock
+++ b/server/yarn.lock
@@ -1595,7 +1595,7 @@
dependencies:
regenerator-runtime "^0.14.0"
-"@breejs/later@^4.2.0":
+"@breejs/later@4.2.0", "@breejs/later@^4.2.0":
version "4.2.0"
resolved "https://registry.npmjs.org/@breejs/later/-/later-4.2.0.tgz"
integrity sha512-EVMD0SgJtOuFeg0lAVbCwa+qeTKILb87jqvLyUtQswGD9+ce2nB52Y5zbTF1Hc0MDFfbydcMcxb47jSdhikVHA==
@@ -4989,7 +4989,7 @@ crc32-stream@^4.0.2:
crc-32 "^1.2.0"
readable-stream "^3.4.0"
-cron-validate@^1.4.5:
+cron-validate@1.4.5, cron-validate@^1.4.5:
version "1.4.5"
resolved "https://registry.npmjs.org/cron-validate/-/cron-validate-1.4.5.tgz"
integrity sha512-nKlOJEnYKudMn/aNyNH8xxWczlfpaazfWV32Pcx/2St51r2bxWbGhZD7uwzMcRhunA/ZNL+Htm/i0792Z59UMQ==
@@ -8467,7 +8467,7 @@ p-locate@^5.0.0:
dependencies:
p-limit "^3.0.2"
-p-queue@^6.6.2:
+p-queue@6.6.2, p-queue@^6.6.2:
version "6.6.2"
resolved "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz"
integrity sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==