* Add automatic chat mode with native tool calling support
Introduces a new automatic chat mode (now the default) that automatically invokes tools when the provider supports native tool calling. Conditionally shows/hides the @agent command based on whether native tooling is available.
- Add supportsNativeToolCalling() to AI providers (OpenAI, Anthropic, Azure always support; others opt-in via ENV)
- Update all locale translations with new mode descriptions
- Enhance translator to preserve Trans component tags
- Remove deprecated ability tags UI
* rebase translations
* WIP on image attachments. Supports initial image attachment + subsequent attachments
* persist images
* Image attachments and updates for providers
* desktop pre-change
* always show command on failure
* add back gemini streaming detection
* move provider native tooling flag to Provider func
* whoops - forgot to delete
* strip "@agent" from prompts to prevent weird replies
* translations for automatic-mode (#5145)
* translations for automatic-mode
* rebase
* translations
* lint
* fix dead translations
* change default for now to chat mode just for rollout
* remove pfp for workspace
* passthrough workspace for showAgentCommand detection and rendering
* Agent API automatic mode support
* ephemeral attachments passthrough
* support reading of pinned documents in agent context
212 lines
5.5 KiB
JavaScript
212 lines
5.5 KiB
JavaScript
const OpenAI = require("openai");
|
|
const Provider = require("./ai-provider.js");
|
|
const InheritMultiple = require("./helpers/classes.js");
|
|
const UnTooled = require("./helpers/untooled.js");
|
|
const { tooledStream, tooledComplete } = require("./helpers/tooled.js");
|
|
const { RetryError } = require("../error.js");
|
|
const { toValidNumber } = require("../../../http/index.js");
|
|
|
|
class DeepSeekProvider extends InheritMultiple([Provider, UnTooled]) {
|
|
model;
|
|
|
|
constructor(config = {}) {
|
|
super();
|
|
const { model = "deepseek-chat" } = config;
|
|
const client = new OpenAI({
|
|
baseURL: "https://api.deepseek.com/v1",
|
|
apiKey: process.env.DEEPSEEK_API_KEY ?? null,
|
|
maxRetries: 3,
|
|
});
|
|
|
|
this._client = client;
|
|
this.model = model;
|
|
this.verbose = true;
|
|
this.maxTokens = process.env.DEEPSEEK_MAX_TOKENS
|
|
? toValidNumber(process.env.DEEPSEEK_MAX_TOKENS, 1024)
|
|
: 1024;
|
|
}
|
|
|
|
get client() {
|
|
return this._client;
|
|
}
|
|
|
|
get supportsAgentStreaming() {
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* All current DeepSeek models (deepseek-chat and deepseek-reasoner)
|
|
* support native OpenAI-compatible tool calling.
|
|
* @returns {boolean}
|
|
*/
|
|
supportsNativeToolCalling() {
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* DeepSeek models do not support vision/image inputs.
|
|
* Strip attachments from messages to prevent API errors.
|
|
* @param {Object} message - Message with potential attachments
|
|
* @returns {Object} Message without attachments
|
|
*/
|
|
formatMessageWithAttachments(message) {
|
|
const { attachments: _, ...rest } = message;
|
|
return rest;
|
|
}
|
|
|
|
get #isThinkingModel() {
|
|
return this.model === "deepseek-reasoner";
|
|
}
|
|
|
|
get #tooledOptions() {
|
|
return {
|
|
provider: this,
|
|
...(this.#isThinkingModel ? { injectReasoningContent: true } : {}),
|
|
};
|
|
}
|
|
|
|
async #handleFunctionCallChat({ messages = [] }) {
|
|
return await this.client.chat.completions
|
|
.create({
|
|
model: this.model,
|
|
messages,
|
|
max_tokens: this.maxTokens,
|
|
})
|
|
.then((result) => {
|
|
if (!result.hasOwnProperty("choices"))
|
|
throw new Error("DeepSeek chat: No results!");
|
|
if (result.choices.length === 0)
|
|
throw new Error("DeepSeek chat: No results length!");
|
|
return result.choices[0].message.content;
|
|
})
|
|
.catch((_) => {
|
|
return null;
|
|
});
|
|
}
|
|
|
|
async #handleFunctionCallStream({ messages = [] }) {
|
|
return await this.client.chat.completions.create({
|
|
model: this.model,
|
|
stream: true,
|
|
messages,
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Strip attachments from all messages since DeepSeek doesn't support vision.
|
|
* @param {Array} messages - Array of messages
|
|
* @returns {Array} Messages with attachments removed
|
|
*/
|
|
#stripAttachments(messages) {
|
|
let hasAttachments = false;
|
|
const stripped = messages.map((msg) => {
|
|
if (msg.attachments && msg.attachments.length > 0) {
|
|
hasAttachments = true;
|
|
const { attachments: _, ...rest } = msg;
|
|
return rest;
|
|
}
|
|
return msg;
|
|
});
|
|
if (hasAttachments) {
|
|
this.providerLog(
|
|
"DeepSeek does not support vision - stripped image attachments from messages."
|
|
);
|
|
}
|
|
return stripped;
|
|
}
|
|
|
|
async stream(messages, functions = [], eventHandler = null) {
|
|
const useNative = functions.length > 0 && this.supportsNativeToolCalling();
|
|
const cleanedMessages = this.#stripAttachments(messages);
|
|
|
|
if (!useNative) {
|
|
return await UnTooled.prototype.stream.call(
|
|
this,
|
|
cleanedMessages,
|
|
functions,
|
|
this.#handleFunctionCallStream.bind(this),
|
|
eventHandler
|
|
);
|
|
}
|
|
|
|
this.providerLog(
|
|
"Provider.stream (tooled) - will process this chat completion."
|
|
);
|
|
|
|
try {
|
|
return await tooledStream(
|
|
this.client,
|
|
this.model,
|
|
cleanedMessages,
|
|
functions,
|
|
eventHandler,
|
|
this.#tooledOptions
|
|
);
|
|
} catch (error) {
|
|
console.error(error.message, error);
|
|
if (error instanceof OpenAI.AuthenticationError) throw error;
|
|
if (
|
|
error instanceof OpenAI.RateLimitError ||
|
|
error instanceof OpenAI.InternalServerError ||
|
|
error instanceof OpenAI.APIError
|
|
) {
|
|
throw new RetryError(error.message);
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async complete(messages, functions = []) {
|
|
const useNative = functions.length > 0 && this.supportsNativeToolCalling();
|
|
const cleanedMessages = this.#stripAttachments(messages);
|
|
|
|
if (!useNative) {
|
|
return await UnTooled.prototype.complete.call(
|
|
this,
|
|
cleanedMessages,
|
|
functions,
|
|
this.#handleFunctionCallChat.bind(this)
|
|
);
|
|
}
|
|
|
|
try {
|
|
const result = await tooledComplete(
|
|
this.client,
|
|
this.model,
|
|
cleanedMessages,
|
|
functions,
|
|
this.getCost.bind(this),
|
|
this.#tooledOptions
|
|
);
|
|
|
|
if (result.retryWithError) {
|
|
return this.complete([...messages, result.retryWithError], functions);
|
|
}
|
|
|
|
return result;
|
|
} catch (error) {
|
|
if (error instanceof OpenAI.AuthenticationError) throw error;
|
|
if (
|
|
error instanceof OpenAI.RateLimitError ||
|
|
error instanceof OpenAI.InternalServerError ||
|
|
error instanceof OpenAI.APIError
|
|
) {
|
|
throw new RetryError(error.message);
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get the cost of the completion.
|
|
*
|
|
* @param _usage The completion to get the cost for.
|
|
* @returns The cost of the completion.
|
|
*/
|
|
getCost(_usage) {
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
module.exports = DeepSeekProvider;
|