Persist Ollama context preferences in LC tools (#4908)
* Persist Ollama context preferences in LC tools * remove comment
This commit is contained in:
parent
cd5530de39
commit
64bff91998
@ -28,6 +28,7 @@ const {
|
|||||||
const {
|
const {
|
||||||
createBedrockChatClient,
|
createBedrockChatClient,
|
||||||
} = require("../../../AiProviders/bedrock/utils");
|
} = require("../../../AiProviders/bedrock/utils");
|
||||||
|
const { OllamaAILLM } = require("../../../AiProviders/ollama");
|
||||||
|
|
||||||
const DEFAULT_WORKSPACE_PROMPT =
|
const DEFAULT_WORKSPACE_PROMPT =
|
||||||
"You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions.";
|
"You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions.";
|
||||||
@ -255,10 +256,7 @@ class Provider {
|
|||||||
// ...config,
|
// ...config,
|
||||||
// });
|
// });
|
||||||
case "ollama":
|
case "ollama":
|
||||||
return new ChatOllama({
|
return OllamaLangchainChatModel.create(config);
|
||||||
baseUrl: process.env.OLLAMA_BASE_PATH,
|
|
||||||
...config,
|
|
||||||
});
|
|
||||||
case "lmstudio":
|
case "lmstudio":
|
||||||
return new ChatOpenAI({
|
return new ChatOpenAI({
|
||||||
configuration: {
|
configuration: {
|
||||||
@ -450,4 +448,34 @@ class Provider {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Langchain Wrappers
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ollama Langchain Chat Model that supports passing in context window options
|
||||||
|
* so that context window preferences are respected between Ollama chat/agent and in
|
||||||
|
* Langchain tooling.
|
||||||
|
*/
|
||||||
|
class OllamaLangchainChatModel {
|
||||||
|
static create(config = {}) {
|
||||||
|
return new ChatOllama({
|
||||||
|
baseUrl: process.env.OLLAMA_BASE_PATH,
|
||||||
|
...this.queryOptions(config),
|
||||||
|
...config,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
static performanceMode() {
|
||||||
|
return process.env.OLLAMA_PERFORMANCE_MODE || "base";
|
||||||
|
}
|
||||||
|
|
||||||
|
static queryOptions(config = {}) {
|
||||||
|
const model = config?.model || process.env.OLLAMA_MODEL_PREF;
|
||||||
|
return {
|
||||||
|
...(this.performanceMode() === "base"
|
||||||
|
? {}
|
||||||
|
: { num_ctx: OllamaAILLM.promptWindowLimit(model) }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = Provider;
|
module.exports = Provider;
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user