feat(providers): added openrouter support

This PR is adapted from namanbnsl's original
at https://github.com/namanbnsl/Perplexica/tree/add-openrouter-support

Added minor modifications to bring the openrouter support in line
with the changes in this forked repo
This commit is contained in:
kmac 2025-05-31 21:34:23 -04:00
parent 6422c5b7e0
commit 94315c169a
7 changed files with 108 additions and 0 deletions

View file

@ -79,6 +79,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
- `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**. - `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**.
- `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**.
- `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**.
- `OPENROUTER`: Your OpenRouter API key. **You only need to fill this if you wish to use models via OpenRouter**.
- `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**. - `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**.
**Note**: You can change these after starting Perplexica from the settings dialog. **Note**: You can change these after starting Perplexica from the settings dialog.

View file

@ -9,6 +9,9 @@ API_KEY = ""
[MODELS.GROQ] [MODELS.GROQ]
API_KEY = "" API_KEY = ""
[MODELS.OPENROUTER]
API_KEY = ""
[MODELS.ANTHROPIC] [MODELS.ANTHROPIC]
API_KEY = "" API_KEY = ""

View file

@ -8,6 +8,7 @@ import {
getGroqApiKey, getGroqApiKey,
getOllamaApiEndpoint, getOllamaApiEndpoint,
getOpenaiApiKey, getOpenaiApiKey,
getOpenrouterApiKey,
getDeepseekApiKey, getDeepseekApiKey,
getLMStudioApiEndpoint, getLMStudioApiEndpoint,
updateConfig, updateConfig,
@ -62,6 +63,7 @@ export const GET = async (req: Request) => {
config['anthropicApiKey'] = protectApiKey(getAnthropicApiKey()); config['anthropicApiKey'] = protectApiKey(getAnthropicApiKey());
config['geminiApiKey'] = protectApiKey(getGeminiApiKey()); config['geminiApiKey'] = protectApiKey(getGeminiApiKey());
config['deepseekApiKey'] = protectApiKey(getDeepseekApiKey()); config['deepseekApiKey'] = protectApiKey(getDeepseekApiKey());
config['openrouterApiKey'] = protectApiKey(getOpenrouterApiKey());
config['customOpenaiApiKey'] = protectApiKey(getCustomOpenaiApiKey()); config['customOpenaiApiKey'] = protectApiKey(getCustomOpenaiApiKey());
// Non-sensitive values remain unchanged // Non-sensitive values remain unchanged
@ -130,6 +132,12 @@ export const POST = async (req: Request) => {
LM_STUDIO: { LM_STUDIO: {
API_URL: config.lmStudioApiUrl, API_URL: config.lmStudioApiUrl,
}, },
OPENROUTER: {
API_KEY: getUpdatedProtectedValue(
config.openrouterApiKey,
getOpenrouterApiKey(),
),
},
CUSTOM_OPENAI: { CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl, API_URL: config.customOpenaiApiUrl,
API_KEY: getUpdatedProtectedValue( API_KEY: getUpdatedProtectedValue(

View file

@ -28,6 +28,7 @@ interface SettingsType {
}; };
openaiApiKey: string; openaiApiKey: string;
groqApiKey: string; groqApiKey: string;
openrouterApiKey: string;
anthropicApiKey: string; anthropicApiKey: string;
geminiApiKey: string; geminiApiKey: string;
ollamaApiUrl: string; ollamaApiUrl: string;
@ -1352,6 +1353,25 @@ export default function SettingsPage() {
/> />
</div> </div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
OpenRouter API Key
</p>
<InputComponent
type="password"
placeholder="OpenRouter API Key"
value={config.openrouterApiKey}
isSaving={savingStates['openrouterApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
openrouterApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('openrouterApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1"> <div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
Anthropic API Key Anthropic API Key

View file

@ -39,6 +39,9 @@ interface Config {
LM_STUDIO: { LM_STUDIO: {
API_URL: string; API_URL: string;
}; };
OPENROUTER: {
API_KEY: string;
};
CUSTOM_OPENAI: { CUSTOM_OPENAI: {
API_URL: string; API_URL: string;
API_KEY: string; API_KEY: string;
@ -77,6 +80,8 @@ export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY; export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
export const getOpenrouterApiKey = () => loadConfig().MODELS.OPENROUTER.API_KEY;
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY; export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY; export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;

View file

@ -40,6 +40,10 @@ import {
loadLMStudioEmbeddingsModels, loadLMStudioEmbeddingsModels,
PROVIDER_INFO as LMStudioInfo, PROVIDER_INFO as LMStudioInfo,
} from './lmstudio'; } from './lmstudio';
import {
loadOpenrouterChatModels,
PROVIDER_INFO as OpenRouterInfo,
} from './openrouter';
export const PROVIDER_METADATA = { export const PROVIDER_METADATA = {
openai: OpenAIInfo, openai: OpenAIInfo,
@ -50,6 +54,7 @@ export const PROVIDER_METADATA = {
transformers: TransformersInfo, transformers: TransformersInfo,
deepseek: DeepseekInfo, deepseek: DeepseekInfo,
lmstudio: LMStudioInfo, lmstudio: LMStudioInfo,
openrouter: OpenRouterInfo,
custom_openai: { custom_openai: {
key: 'custom_openai', key: 'custom_openai',
displayName: 'Custom OpenAI', displayName: 'Custom OpenAI',
@ -77,6 +82,7 @@ export const chatModelProviders: Record<
gemini: loadGeminiChatModels, gemini: loadGeminiChatModels,
deepseek: loadDeepseekChatModels, deepseek: loadDeepseekChatModels,
lmstudio: loadLMStudioChatModels, lmstudio: loadLMStudioChatModels,
openrouter: loadOpenrouterChatModels,
}; };
export const embeddingModelProviders: Record< export const embeddingModelProviders: Record<

View file

@ -0,0 +1,65 @@
export const PROVIDER_INFO = {
key: 'openrouter',
displayName: 'OpenRouter',
};
import { ChatOpenAI } from '@langchain/openai';
import { getOpenrouterApiKey } from '../config';
import { ChatModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
let openrouterChatModels: Record<string, string>[] = [];
async function fetchModelList(): Promise<void> {
try {
const response = await fetch('https://openrouter.ai/api/v1/models', {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (!response.ok) {
throw new Error(`API request failed with status: ${response.status}`);
}
const data = await response.json();
openrouterChatModels = data.data.map((model: any) => ({
displayName: model.name,
key: model.id,
}));
} catch (error) {
console.error('Error fetching models:', error);
}
}
export const loadOpenrouterChatModels = async () => {
await fetchModelList();
const openrouterApikey = getOpenrouterApiKey();
if (!openrouterApikey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
openrouterChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: openrouterApikey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://openrouter.ai/api/v1',
},
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Openrouter models: ${err}`);
return {};
}
};