merlyn/server/utils/AiProviders/modelMap/legacy.js
方程 90e474abcb
Support Gitee AI(LLM Provider) (#3361)
* Support Gitee AI(LLM Provider)

* refactor(server): 重构 GiteeAI 模型窗口限制功能,暂时将窗口限制硬编码,计划使用外部 API 数据和缓存

* updates for Gitee AI

* use legacy lookup since gitee does not enable getting token context windows

* add more missing records

* reorder imports

---------

Co-authored-by: 方程 <fangcheng@oschina.cn>
Co-authored-by: timothycarambat <rambat1010@gmail.com>
2025-11-25 14:19:32 -08:00

147 lines
4.5 KiB
JavaScript

const LEGACY_MODEL_MAP = {
anthropic: {
"claude-instant-1.2": 100000,
"claude-2.0": 100000,
"claude-2.1": 200000,
"claude-3-haiku-20240307": 200000,
"claude-3-sonnet-20240229": 200000,
"claude-3-opus-20240229": 200000,
"claude-3-opus-latest": 200000,
"claude-3-5-haiku-latest": 200000,
"claude-3-5-haiku-20241022": 200000,
"claude-3-5-sonnet-latest": 200000,
"claude-3-5-sonnet-20241022": 200000,
"claude-3-5-sonnet-20240620": 200000,
"claude-3-7-sonnet-20250219": 200000,
"claude-3-7-sonnet-latest": 200000,
},
cohere: {
"command-r": 128000,
"command-r-plus": 128000,
command: 4096,
"command-light": 4096,
"command-nightly": 8192,
"command-light-nightly": 8192,
"command-r-plus-08-2024": 132096,
"command-a-03-2025": 288000,
"c4ai-aya-vision-32b": 16384,
"command-a-reasoning-08-2025": 288768,
"command-r-08-2024": 132096,
"c4ai-aya-vision-8b": 16384,
"command-r7b-12-2024": 132000,
"command-r7b-arabic-02-2025": 128000,
"command-a-vision-07-2025": 128000,
"c4ai-aya-expanse-8b": 8192,
"c4ai-aya-expanse-32b": 128000,
"command-a-translate-08-2025": 8992,
},
gemini: {
"gemini-1.5-pro-001": 2000000,
"gemini-1.5-pro-002": 2000000,
"gemini-1.5-pro": 2000000,
"gemini-1.5-flash-001": 1000000,
"gemini-1.5-flash": 1000000,
"gemini-1.5-flash-002": 1000000,
"gemini-1.5-flash-8b": 1000000,
"gemini-1.5-flash-8b-001": 1000000,
"gemini-2.0-flash": 1048576,
"gemini-2.0-flash-001": 1048576,
"gemini-2.0-flash-lite-001": 1048576,
"gemini-2.0-flash-lite": 1048576,
"gemini-1.5-pro-latest": 2000000,
"gemini-1.5-flash-latest": 1000000,
"gemini-1.5-flash-8b-latest": 1000000,
"gemini-1.5-flash-8b-exp-0827": 1000000,
"gemini-1.5-flash-8b-exp-0924": 1000000,
"gemini-2.5-pro-exp-03-25": 1048576,
"gemini-2.5-pro-preview-03-25": 1048576,
"gemini-2.0-flash-exp": 1048576,
"gemini-2.0-flash-exp-image-generation": 1048576,
"gemini-2.0-flash-lite-preview-02-05": 1048576,
"gemini-2.0-flash-lite-preview": 1048576,
"gemini-2.0-pro-exp": 1048576,
"gemini-2.0-pro-exp-02-05": 1048576,
"gemini-exp-1206": 1048576,
"gemini-2.0-flash-thinking-exp-01-21": 1048576,
"gemini-2.0-flash-thinking-exp": 1048576,
"gemini-2.0-flash-thinking-exp-1219": 1048576,
"learnlm-1.5-pro-experimental": 32767,
"gemma-3-1b-it": 32768,
"gemma-3-4b-it": 32768,
"gemma-3-12b-it": 32768,
"gemma-3-27b-it": 131072,
},
groq: {
"gemma2-9b-it": 8192,
"gemma-7b-it": 8192,
"llama3-70b-8192": 8192,
"llama3-8b-8192": 8192,
"llama-3.1-70b-versatile": 8000,
"llama-3.1-8b-instant": 8000,
"mixtral-8x7b-32768": 32768,
},
openai: {
"gpt-3.5-turbo": 16385,
"gpt-3.5-turbo-1106": 16385,
"gpt-4o": 128000,
"gpt-4o-2024-08-06": 128000,
"gpt-4o-2024-05-13": 128000,
"gpt-4o-mini": 128000,
"gpt-4o-mini-2024-07-18": 128000,
"gpt-4-turbo": 128000,
"gpt-4-1106-preview": 128000,
"gpt-4-turbo-preview": 128000,
"gpt-4": 8192,
"gpt-4-32k": 32000,
"gpt-4.1": 1047576,
"gpt-4.1-2025-04-14": 1047576,
"gpt-4.1-mini": 1047576,
"gpt-4.1-mini-2025-04-14": 1047576,
"gpt-4.1-nano": 1047576,
"gpt-4.1-nano-2025-04-14": 1047576,
"gpt-4.5-preview": 128000,
"gpt-4.5-preview-2025-02-27": 128000,
"o1-preview": 128000,
"o1-preview-2024-09-12": 128000,
"o1-mini": 128000,
"o1-mini-2024-09-12": 128000,
o1: 200000,
"o1-2024-12-17": 200000,
"o1-pro": 200000,
"o1-pro-2025-03-19": 200000,
"o3-mini": 200000,
"o3-mini-2025-01-31": 200000,
},
deepseek: {
"deepseek-chat": 128000,
"deepseek-coder": 128000,
"deepseek-reasoner": 128000,
},
xai: {
"grok-beta": 131072,
},
giteeai: {
"Qwen2.5-72B-Instruct": 16_384,
"Qwen2.5-14B-Instruct": 24_576,
"Qwen2-7B-Instruct": 24_576,
"Qwen2.5-32B-Instruct": 32_768,
"Qwen2-72B-Instruct": 32_768,
"Qwen2-VL-72B": 32_768,
"QwQ-32B-Preview": 32_768,
"Yi-34B-Chat": 4_096,
"glm-4-9b-chat": 32_768,
"deepseek-coder-33B-instruct": 8_192,
"codegeex4-all-9b": 32_768,
"InternVL2-8B": 32_768,
"InternVL2.5-26B": 32_768,
"InternVL2.5-78B": 32_768,
"DeepSeek-R1-Distill-Qwen-32B": 32_768,
"DeepSeek-R1-Distill-Qwen-1.5B": 32_768,
"DeepSeek-R1-Distill-Qwen-14B": 32_768,
"DeepSeek-R1-Distill-Qwen-7B": 32_768,
"DeepSeek-V3": 32_768,
"DeepSeek-R1": 32_768,
},
};
module.exports = LEGACY_MODEL_MAP;