Merge branch 'master' of github.com:boarder2/Perplexica
This commit is contained in:
commit
00e483f975
7 changed files with 108 additions and 0 deletions
|
|
@ -8,6 +8,7 @@ import {
|
|||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
getOpenrouterApiKey,
|
||||
getDeepseekApiKey,
|
||||
getAimlApiKey,
|
||||
getLMStudioApiEndpoint,
|
||||
|
|
@ -64,6 +65,7 @@ export const GET = async (req: Request) => {
|
|||
config['anthropicApiKey'] = protectApiKey(getAnthropicApiKey());
|
||||
config['geminiApiKey'] = protectApiKey(getGeminiApiKey());
|
||||
config['deepseekApiKey'] = protectApiKey(getDeepseekApiKey());
|
||||
config['openrouterApiKey'] = protectApiKey(getOpenrouterApiKey());
|
||||
config['customOpenaiApiKey'] = protectApiKey(getCustomOpenaiApiKey());
|
||||
config['aimlApiKey'] = protectApiKey(getAimlApiKey());
|
||||
|
||||
|
|
@ -140,6 +142,12 @@ export const POST = async (req: Request) => {
|
|||
LM_STUDIO: {
|
||||
API_URL: config.lmStudioApiUrl,
|
||||
},
|
||||
OPENROUTER: {
|
||||
API_KEY: getUpdatedProtectedValue(
|
||||
config.openrouterApiKey,
|
||||
getOpenrouterApiKey(),
|
||||
),
|
||||
},
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: config.customOpenaiApiUrl,
|
||||
API_KEY: getUpdatedProtectedValue(
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ interface SettingsType {
|
|||
};
|
||||
openaiApiKey: string;
|
||||
groqApiKey: string;
|
||||
openrouterApiKey: string;
|
||||
anthropicApiKey: string;
|
||||
geminiApiKey: string;
|
||||
ollamaApiUrl: string;
|
||||
|
|
@ -1673,6 +1674,25 @@ export default function SettingsPage() {
|
|||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
OpenRouter API Key
|
||||
</p>
|
||||
<InputComponent
|
||||
type="password"
|
||||
placeholder="OpenRouter API Key"
|
||||
value={config.openrouterApiKey}
|
||||
isSaving={savingStates['openrouterApiKey']}
|
||||
onChange={(e) => {
|
||||
setConfig((prev) => ({
|
||||
...prev!,
|
||||
openrouterApiKey: e.target.value,
|
||||
}));
|
||||
}}
|
||||
onSave={(value) => saveConfig('openrouterApiKey', value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Anthropic API Key
|
||||
|
|
|
|||
|
|
@ -43,6 +43,9 @@ interface Config {
|
|||
LM_STUDIO: {
|
||||
API_URL: string;
|
||||
};
|
||||
OPENROUTER: {
|
||||
API_KEY: string;
|
||||
};
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
|
|
@ -115,6 +118,8 @@ export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
|||
|
||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
||||
|
||||
export const getOpenrouterApiKey = () => loadConfig().MODELS.OPENROUTER.API_KEY;
|
||||
|
||||
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
||||
|
||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
||||
|
|
|
|||
|
|
@ -46,6 +46,10 @@ import {
|
|||
loadLMStudioEmbeddingsModels,
|
||||
PROVIDER_INFO as LMStudioInfo,
|
||||
} from './lmstudio';
|
||||
import {
|
||||
loadOpenrouterChatModels,
|
||||
PROVIDER_INFO as OpenRouterInfo,
|
||||
} from './openrouter';
|
||||
|
||||
export const PROVIDER_METADATA = {
|
||||
openai: OpenAIInfo,
|
||||
|
|
@ -57,6 +61,7 @@ export const PROVIDER_METADATA = {
|
|||
deepseek: DeepseekInfo,
|
||||
aimlapi: AimlApiInfo,
|
||||
lmstudio: LMStudioInfo,
|
||||
openrouter: OpenRouterInfo,
|
||||
custom_openai: {
|
||||
key: 'custom_openai',
|
||||
displayName: 'Custom OpenAI',
|
||||
|
|
@ -85,6 +90,7 @@ export const chatModelProviders: Record<
|
|||
deepseek: loadDeepseekChatModels,
|
||||
aimlapi: loadAimlApiChatModels,
|
||||
lmstudio: loadLMStudioChatModels,
|
||||
openrouter: loadOpenrouterChatModels,
|
||||
};
|
||||
|
||||
export const embeddingModelProviders: Record<
|
||||
|
|
|
|||
65
src/lib/providers/openrouter.ts
Normal file
65
src/lib/providers/openrouter.ts
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
export const PROVIDER_INFO = {
|
||||
key: 'openrouter',
|
||||
displayName: 'OpenRouter',
|
||||
};
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { getOpenrouterApiKey } from '../config';
|
||||
import { ChatModel } from '.';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
let openrouterChatModels: Record<string, string>[] = [];
|
||||
|
||||
async function fetchModelList(): Promise<void> {
|
||||
try {
|
||||
const response = await fetch('https://openrouter.ai/api/v1/models', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed with status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
openrouterChatModels = data.data.map((model: any) => ({
|
||||
displayName: model.name,
|
||||
key: model.id,
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error('Error fetching models:', error);
|
||||
}
|
||||
}
|
||||
|
||||
export const loadOpenrouterChatModels = async () => {
|
||||
await fetchModelList();
|
||||
|
||||
const openrouterApikey = getOpenrouterApiKey();
|
||||
|
||||
if (!openrouterApikey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
openrouterChatModels.forEach((model) => {
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatOpenAI({
|
||||
openAIApiKey: openrouterApikey,
|
||||
modelName: model.key,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: 'https://openrouter.ai/api/v1',
|
||||
},
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Openrouter models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
Loading…
Add table
Add a link
Reference in a new issue