This commit is contained in:
Christian Hapke 2025-07-22 18:28:11 +02:00 committed by GitHub
commit 30c6f8aa85
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 31 additions and 2 deletions

View file

@ -21,6 +21,8 @@ MODEL_NAME = ""
[MODELS.OLLAMA] [MODELS.OLLAMA]
API_URL = "" # Ollama API URL - http://host.docker.internal:11434 API_URL = "" # Ollama API URL - http://host.docker.internal:11434
API_KEY = ""
MODEL_NAME = ""
[MODELS.DEEPSEEK] [MODELS.DEEPSEEK]
API_KEY = "" API_KEY = ""

View file

@ -31,6 +31,8 @@ interface Config {
}; };
OLLAMA: { OLLAMA: {
API_URL: string; API_URL: string;
API_KEY: string;
MODEL_NAME: string;
}; };
DEEPSEEK: { DEEPSEEK: {
API_KEY: string; API_KEY: string;
@ -85,6 +87,8 @@ export const getSearxngApiEndpoint = () =>
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG; process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL; export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
export const getOllamaModelName = () => loadConfig().MODELS.OLLAMA.MODEL_NAME;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY; export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;

View file

@ -112,7 +112,8 @@ export const getAvailableChatModelProviders = async () => {
const customOpenAiApiUrl = getCustomOpenaiApiUrl(); const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName(); const customOpenAiModelName = getCustomOpenaiModelName();
models['custom_openai'] = { if (customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName) {
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName ...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
? { ? {
[customOpenAiModelName]: { [customOpenAiModelName]: {
@ -128,6 +129,7 @@ export const getAvailableChatModelProviders = async () => {
}, },
} }
: {}), : {}),
};
}; };
return models; return models;

View file

@ -1,5 +1,5 @@
import axios from 'axios'; import axios from 'axios';
import { getKeepAlive, getOllamaApiEndpoint } from '../config'; import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey, getOllamaModelName } from '../config';
import { ChatModel, EmbeddingModel } from '.'; import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
@ -8,6 +8,20 @@ export const PROVIDER_INFO = {
}; };
import { ChatOllama } from '@langchain/community/chat_models/ollama'; import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { get } from 'http';
const getOllamaHttpHeaders = (): Record<string, string> => {
const result: Record<string, string> = {};
if (getOllamaApiKey()) {
result["Authorization"] = `Bearer ${getOllamaApiKey()}`;
}
if (process.env.OLLAMA_API_KEY) {
result["Authorization"] = `Bearer ${process.env.OLLAMA_API_KEY}`;
}
return result;
};
export const loadOllamaChatModels = async () => { export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint(); const ollamaApiEndpoint = getOllamaApiEndpoint();
@ -18,6 +32,7 @@ export const loadOllamaChatModels = async () => {
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, { const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
...getOllamaHttpHeaders(),
}, },
}); });
@ -26,6 +41,9 @@ export const loadOllamaChatModels = async () => {
const chatModels: Record<string, ChatModel> = {}; const chatModels: Record<string, ChatModel> = {};
models.forEach((model: any) => { models.forEach((model: any) => {
if (getOllamaModelName() && !model.model.startsWith(getOllamaModelName())) {
return; // Skip models that do not match the configured model name
}
chatModels[model.model] = { chatModels[model.model] = {
displayName: model.name, displayName: model.name,
model: new ChatOllama({ model: new ChatOllama({
@ -33,6 +51,7 @@ export const loadOllamaChatModels = async () => {
model: model.model, model: model.model,
temperature: 0.7, temperature: 0.7,
keepAlive: getKeepAlive(), keepAlive: getKeepAlive(),
headers: getOllamaHttpHeaders(),
}), }),
}; };
}); });
@ -53,6 +72,7 @@ export const loadOllamaEmbeddingModels = async () => {
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, { const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
...getOllamaHttpHeaders(),
}, },
}); });
@ -66,6 +86,7 @@ export const loadOllamaEmbeddingModels = async () => {
model: new OllamaEmbeddings({ model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint, baseUrl: ollamaApiEndpoint,
model: model.model, model: model.model,
headers: getOllamaHttpHeaders(),
}), }),
}; };
}); });