Perplexica/src/lib/providers/index.ts

99 lines
3 KiB
TypeScript
Raw Normal View History

2025-03-19 16:23:27 +05:30
import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels } from './openai';
2025-02-15 11:29:08 +05:30
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
2025-03-19 16:23:27 +05:30
} from '../config';
2025-02-15 11:29:08 +05:30
import { ChatOpenAI } from '@langchain/openai';
2025-03-19 16:23:27 +05:30
import { loadOllamaChatModels, loadOllamaEmbeddingModels } from './ollama';
import { loadGroqChatModels } from './groq';
import { loadAnthropicChatModels } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
2025-03-20 11:48:26 +05:30
import { loadTransformersEmbeddingsModels } from './transformers';
2025-04-06 13:37:43 +05:30
import { loadDeepseekChatModels } from './deepseek';
import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels } from './lmstudio';
2025-03-19 16:23:27 +05:30
export interface ChatModel {
displayName: string;
model: BaseChatModel;
}
export interface EmbeddingModel {
displayName: string;
model: Embeddings;
}
export const chatModelProviders: Record<
string,
() => Promise<Record<string, ChatModel>>
> = {
openai: loadOpenAIChatModels,
ollama: loadOllamaChatModels,
2025-03-19 16:23:27 +05:30
groq: loadGroqChatModels,
2024-07-15 21:20:16 +05:30
anthropic: loadAnthropicChatModels,
2024-11-28 20:47:18 +05:30
gemini: loadGeminiChatModels,
2025-04-06 13:37:43 +05:30
deepseek: loadDeepseekChatModels,
lmstudio: loadLMStudioChatModels,
};
2025-03-19 16:23:27 +05:30
export const embeddingModelProviders: Record<
string,
() => Promise<Record<string, EmbeddingModel>>
> = {
openai: loadOpenAIEmbeddingModels,
ollama: loadOllamaEmbeddingModels,
gemini: loadGeminiEmbeddingModels,
2025-03-20 11:48:26 +05:30
transformers: loadTransformersEmbeddingsModels,
lmstudio: loadLMStudioEmbeddingsModels,
};
export const getAvailableChatModelProviders = async () => {
2025-03-19 16:23:27 +05:30
const models: Record<string, Record<string, ChatModel>> = {};
for (const provider in chatModelProviders) {
2024-07-08 15:39:27 +05:30
const providerModels = await chatModelProviders[provider]();
if (Object.keys(providerModels).length > 0) {
2024-07-15 21:20:16 +05:30
models[provider] = providerModels;
2024-07-08 15:39:27 +05:30
}
}
2025-02-15 11:29:08 +05:30
const customOpenAiApiKey = getCustomOpenaiApiKey();
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName();
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
? {
[customOpenAiModelName]: {
displayName: customOpenAiModelName,
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
temperature: 0.7,
configuration: {
baseURL: customOpenAiApiUrl,
},
2025-03-19 16:23:27 +05:30
}) as unknown as BaseChatModel,
2025-02-15 11:29:08 +05:30
},
}
: {}),
};
2024-07-08 15:24:45 +05:30
return models;
};
export const getAvailableEmbeddingModelProviders = async () => {
2025-03-19 16:23:27 +05:30
const models: Record<string, Record<string, EmbeddingModel>> = {};
for (const provider in embeddingModelProviders) {
2024-07-08 15:39:27 +05:30
const providerModels = await embeddingModelProviders[provider]();
if (Object.keys(providerModels).length > 0) {
2024-07-15 21:20:16 +05:30
models[provider] = providerModels;
2024-07-08 15:39:27 +05:30
}
}
return models;
};