From eeb29948e3504dda3771cbc3214de49371c93cdb Mon Sep 17 00:00:00 2001 From: Chris Z <535257617@qq.com> Date: Sat, 25 Apr 2026 03:55:51 +0800 Subject: [PATCH] fix(embedder): surface Mistral embedding failures (#5513) * fix(embedder): surface Mistral embedding failures * fix pr --------- Co-authored-by: Timothy Carambat --- .../utils/EmbeddingEngines/mistral/index.js | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/server/utils/EmbeddingEngines/mistral/index.js b/server/utils/EmbeddingEngines/mistral/index.js index 23f4ddc8..fffd4c24 100644 --- a/server/utils/EmbeddingEngines/mistral/index.js +++ b/server/utils/EmbeddingEngines/mistral/index.js @@ -12,16 +12,10 @@ class MistralEmbedder { } async embedTextInput(textInput) { - try { - const response = await this.openai.embeddings.create({ - model: this.model, - input: textInput, - }); - return response?.data[0]?.embedding || []; - } catch (error) { - console.error("Failed to get embedding from Mistral.", error.message); - return []; - } + const result = await this.embedChunks( + Array.isArray(textInput) ? textInput : [textInput] + ); + return result?.[0] || []; } async embedChunks(textChunks = []) { @@ -30,10 +24,13 @@ class MistralEmbedder { model: this.model, input: textChunks, }); - return response?.data?.map((emb) => emb.embedding) || []; + const embeddings = response?.data?.map((emb) => emb.embedding) || []; + if (embeddings.length === 0) + throw new Error("Mistral returned empty embeddings for batch"); + return embeddings; } catch (error) { console.error("Failed to get embeddings from Mistral.", error.message); - return new Array(textChunks.length).fill([]); + throw new Error(`Mistral Failed to embed: ${error.message}`); } } }