Merge pull request #1 from kmac/add-openrouter-support

feat(providers): added openrouter support
This commit is contained in:
Willie Zutz 2025-07-16 23:50:38 -06:00 committed by GitHub
commit 0d7f83d4fb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 108 additions and 0 deletions

View file

@ -78,6 +78,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
- `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**.
- `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**.
- `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**.
- `OPENROUTER`: Your OpenRouter API key. **You only need to fill this if you wish to use models via OpenRouter**.
- `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**.
- `Gemini`: Your Gemini API key. **You only need to fill this if you wish to use Google's models**.
- `DEEPSEEK`: Your Deepseek API key. **Only needed if you want Deepseek models.**

View file

@ -10,6 +10,9 @@ API_KEY = ""
[MODELS.GROQ]
API_KEY = ""
[MODELS.OPENROUTER]
API_KEY = ""
[MODELS.ANTHROPIC]
API_KEY = ""

View file

@ -8,6 +8,7 @@ import {
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
getOpenrouterApiKey,
getDeepseekApiKey,
getAimlApiKey,
getLMStudioApiEndpoint,
@ -64,6 +65,7 @@ export const GET = async (req: Request) => {
config['anthropicApiKey'] = protectApiKey(getAnthropicApiKey());
config['geminiApiKey'] = protectApiKey(getGeminiApiKey());
config['deepseekApiKey'] = protectApiKey(getDeepseekApiKey());
config['openrouterApiKey'] = protectApiKey(getOpenrouterApiKey());
config['customOpenaiApiKey'] = protectApiKey(getCustomOpenaiApiKey());
config['aimlApiKey'] = protectApiKey(getAimlApiKey());
@ -143,6 +145,12 @@ export const POST = async (req: Request) => {
LM_STUDIO: {
API_URL: config.lmStudioApiUrl,
},
OPENROUTER: {
API_KEY: getUpdatedProtectedValue(
config.openrouterApiKey,
getOpenrouterApiKey(),
),
},
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: getUpdatedProtectedValue(

View file

@ -33,6 +33,7 @@ interface SettingsType {
};
openaiApiKey: string;
groqApiKey: string;
openrouterApiKey: string;
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
@ -1673,6 +1674,25 @@ export default function SettingsPage() {
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
OpenRouter API Key
</p>
<InputComponent
type="password"
placeholder="OpenRouter API Key"
value={config.openrouterApiKey}
isSaving={savingStates['openrouterApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
openrouterApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('openrouterApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Anthropic API Key

View file

@ -43,6 +43,9 @@ interface Config {
LM_STUDIO: {
API_URL: string;
};
OPENROUTER: {
API_KEY: string;
};
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
@ -115,6 +118,8 @@ export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
export const getOpenrouterApiKey = () => loadConfig().MODELS.OPENROUTER.API_KEY;
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;

View file

@ -46,6 +46,10 @@ import {
loadLMStudioEmbeddingsModels,
PROVIDER_INFO as LMStudioInfo,
} from './lmstudio';
import {
loadOpenrouterChatModels,
PROVIDER_INFO as OpenRouterInfo,
} from './openrouter';
export const PROVIDER_METADATA = {
openai: OpenAIInfo,
@ -57,6 +61,7 @@ export const PROVIDER_METADATA = {
deepseek: DeepseekInfo,
aimlapi: AimlApiInfo,
lmstudio: LMStudioInfo,
openrouter: OpenRouterInfo,
custom_openai: {
key: 'custom_openai',
displayName: 'Custom OpenAI',
@ -85,6 +90,7 @@ export const chatModelProviders: Record<
deepseek: loadDeepseekChatModels,
aimlapi: loadAimlApiChatModels,
lmstudio: loadLMStudioChatModels,
openrouter: loadOpenrouterChatModels,
};
export const embeddingModelProviders: Record<

View file

@ -0,0 +1,65 @@
export const PROVIDER_INFO = {
key: 'openrouter',
displayName: 'OpenRouter',
};
import { ChatOpenAI } from '@langchain/openai';
import { getOpenrouterApiKey } from '../config';
import { ChatModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
let openrouterChatModels: Record<string, string>[] = [];
async function fetchModelList(): Promise<void> {
try {
const response = await fetch('https://openrouter.ai/api/v1/models', {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (!response.ok) {
throw new Error(`API request failed with status: ${response.status}`);
}
const data = await response.json();
openrouterChatModels = data.data.map((model: any) => ({
displayName: model.name,
key: model.id,
}));
} catch (error) {
console.error('Error fetching models:', error);
}
}
export const loadOpenrouterChatModels = async () => {
await fetchModelList();
const openrouterApikey = getOpenrouterApiKey();
if (!openrouterApikey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
openrouterChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: openrouterApikey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://openrouter.ai/api/v1',
},
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Openrouter models: ${err}`);
return {};
}
};