fix: prevent Citations UI glitching during streaming chats (#4897)

* fix: prevent Citations UI glitching during streaming chats

* replaced random keys with stable keys

* simplify citation glitch fix

* Remove unneeded memo()

* Simplify key logic

* Replace Boolean(source) with !!source

* change cohere to behave with citations like other models

---------

Co-authored-by: shatfield4 <seanhatfield5@gmail.com>
Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
Co-authored-by: Marcello Fitton <macfittondev@gmail.com>
This commit is contained in:
Neha Prasad 2026-01-30 00:14:34 +05:30 committed by GitHub
parent b96f38486d
commit 3fc2432684
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 11 additions and 12 deletions

View File

@ -1,5 +1,4 @@
import { memo, useState } from "react";
import { v4 } from "uuid";
import { Fragment, useState } from "react";
import { decode as HTMLDecode } from "he";
import truncate from "truncate";
import ModalWrapper from "@/components/ModalWrapper";
@ -70,9 +69,9 @@ export default function Citations({ sources = [] }) {
</button>
{open && (
<div className="flex flex-wrap flex-col items-start overflow-x-scroll no-scroll mt-1 ml-14 gap-y-2">
{combineLikeSources(sources).map((source) => (
{combineLikeSources(sources).map((source, idx) => (
<Citation
key={v4()}
key={source.title || idx.toString()}
source={source}
onClick={() => setSelectedSource(source)}
textSizeClass={textSizeClass}
@ -90,7 +89,7 @@ export default function Citations({ sources = [] }) {
);
}
const Citation = memo(({ source, onClick, textSizeClass }) => {
const Citation = ({ source, onClick, textSizeClass }) => {
const { title, references = 1 } = source;
if (!title) return null;
const chunkSourceInfo = parseChunkSource(source);
@ -120,7 +119,7 @@ const Citation = memo(({ source, onClick, textSizeClass }) => {
</div>
</button>
);
});
};
function omitChunkHeader(text) {
if (!text.includes("<document_metadata>")) return text;
@ -132,7 +131,7 @@ function CitationDetailModal({ source, onClose }) {
const { isUrl, text: webpageUrl, href: linkTo } = parseChunkSource(source);
return (
<ModalWrapper isOpen={source}>
<ModalWrapper isOpen={!!source}>
<div className="w-full max-w-2xl bg-theme-bg-secondary rounded-lg shadow border-2 border-theme-modal-border overflow-hidden">
<div className="relative p-6 border-b rounded-t border-theme-modal-border">
<div className="w-full flex gap-x-2 items-center">
@ -175,8 +174,8 @@ function CitationDetailModal({ source, onClose }) {
>
<div className="py-7 px-9 space-y-2 flex-col">
{chunks.map(({ text, score }, idx) => (
<>
<div key={idx} className="pt-6 text-white">
<Fragment key={idx}>
<div className="pt-6 text-white">
<div className="flex flex-col w-full justify-start pb-6 gap-y-1">
<p className="text-white whitespace-pre-line">
{HTMLDecode(omitChunkHeader(text))}
@ -199,7 +198,7 @@ function CitationDetailModal({ source, onClose }) {
{idx !== chunks.length - 1 && (
<hr className="border-theme-modal-border" />
)}
</>
</Fragment>
))}
<div className="mb-6"></div>
</div>

View File

@ -112,7 +112,7 @@ export default function handleChat(
updatedHistory = {
...existingHistory,
content: existingHistory.content + textResponse,
sources,
...(sources && sources.length > 0 ? { sources } : {}),
error,
closed: close,
animate: !close,

View File

@ -193,7 +193,7 @@ class CohereLLM {
writeResponseChunk(response, {
uuid,
sources,
sources: [],
type: "textResponseChunk",
textResponse: text,
close: false,