Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Willie Zutz 2025-07-16 23:37:23 -06:00
commit 70cdad1cd4
66 changed files with 8154 additions and 381 deletions

View file

@ -9,6 +9,7 @@ import {
getOllamaApiEndpoint,
getOpenaiApiKey,
getDeepseekApiKey,
getAimlApiKey,
getLMStudioApiEndpoint,
getHiddenModels,
updateConfig,
@ -64,6 +65,7 @@ export const GET = async (req: Request) => {
config['geminiApiKey'] = protectApiKey(getGeminiApiKey());
config['deepseekApiKey'] = protectApiKey(getDeepseekApiKey());
config['customOpenaiApiKey'] = protectApiKey(getCustomOpenaiApiKey());
config['aimlApiKey'] = protectApiKey(getAimlApiKey());
// Non-sensitive values remain unchanged
config['ollamaApiUrl'] = getOllamaApiEndpoint();
@ -132,6 +134,12 @@ export const POST = async (req: Request) => {
getDeepseekApiKey(),
),
},
AIMLAPI: {
API_KEY: getUpdatedProtectedValue(
config.aimlApiKey,
getAimlApiKey(),
),
},
LM_STUDIO: {
API_URL: config.lmStudioApiUrl,
},

View file

@ -16,28 +16,43 @@ const topics = ['AI', 'tech']; /* TODO: Add UI to customize this */
export const GET = async (req: Request) => {
try {
const data = (
await Promise.all([
...new Array(articleWebsites.length * topics.length)
.fill(0)
.map(async (_, i) => {
return (
await searchSearxng(
`site:${articleWebsites[i % articleWebsites.length]} ${
topics[i % topics.length]
}`,
{
engines: ['bing news'],
pageno: 1,
},
)
).results;
}),
])
)
.map((result) => result)
.flat()
.sort(() => Math.random() - 0.5);
const params = new URL(req.url).searchParams;
const mode: 'normal' | 'preview' =
(params.get('mode') as 'normal' | 'preview') || 'normal';
let data = [];
if (mode === 'normal') {
data = (
await Promise.all([
...new Array(articleWebsites.length * topics.length)
.fill(0)
.map(async (_, i) => {
return (
await searchSearxng(
`site:${articleWebsites[i % articleWebsites.length]} ${
topics[i % topics.length]
}`,
{
engines: ['bing news'],
pageno: 1,
},
)
).results;
}),
])
)
.map((result) => result)
.flat()
.sort(() => Math.random() - 0.5);
} else {
data = (
await searchSearxng(
`site:${articleWebsites[Math.floor(Math.random() * articleWebsites.length)]} ${topics[Math.floor(Math.random() * topics.length)]}`,
{ engines: ['bing news'], pageno: 1 },
)
).results;
}
return Response.json(
{

View file

@ -0,0 +1,164 @@
export const POST = async (req: Request) => {
try {
const body: { lat: number; lng: number } = await req.json();
if (!body.lat || !body.lng) {
return Response.json(
{
message: 'Invalid request.',
},
{ status: 400 },
);
}
const res = await fetch(
`https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}&current=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto`,
);
const data = await res.json();
if (data.error) {
console.error(`Error fetching weather data: ${data.reason}`);
return Response.json(
{
message: 'An error has occurred.',
},
{ status: 500 },
);
}
const weather: {
temperature: number;
condition: string;
humidity: number;
windSpeed: number;
icon: string;
} = {
temperature: data.current.temperature_2m,
condition: '',
humidity: data.current.relative_humidity_2m,
windSpeed: data.current.wind_speed_10m,
icon: '',
};
const code = data.current.weather_code;
const isDay = data.current.is_day === 1;
const dayOrNight = isDay ? 'day' : 'night';
switch (code) {
case 0:
weather.icon = `clear-${dayOrNight}`;
weather.condition = 'Clear';
break;
case 1:
weather.condition = 'Mainly Clear';
case 2:
weather.condition = 'Partly Cloudy';
case 3:
weather.icon = `cloudy-1-${dayOrNight}`;
weather.condition = 'Cloudy';
break;
case 45:
weather.condition = 'Fog';
case 48:
weather.icon = `fog-${dayOrNight}`;
weather.condition = 'Fog';
break;
case 51:
weather.condition = 'Light Drizzle';
case 53:
weather.condition = 'Moderate Drizzle';
case 55:
weather.icon = `rainy-1-${dayOrNight}`;
weather.condition = 'Dense Drizzle';
break;
case 56:
weather.condition = 'Light Freezing Drizzle';
case 57:
weather.icon = `frost-${dayOrNight}`;
weather.condition = 'Dense Freezing Drizzle';
break;
case 61:
weather.condition = 'Slight Rain';
case 63:
weather.condition = 'Moderate Rain';
case 65:
weather.condition = 'Heavy Rain';
weather.icon = `rainy-2-${dayOrNight}`;
break;
case 66:
weather.condition = 'Light Freezing Rain';
case 67:
weather.condition = 'Heavy Freezing Rain';
weather.icon = 'rain-and-sleet-mix';
break;
case 71:
weather.condition = 'Slight Snow Fall';
case 73:
weather.condition = 'Moderate Snow Fall';
case 75:
weather.condition = 'Heavy Snow Fall';
weather.icon = `snowy-2-${dayOrNight}`;
break;
case 77:
weather.condition = 'Snow';
weather.icon = `snowy-1-${dayOrNight}`;
break;
case 80:
weather.condition = 'Slight Rain Showers';
case 81:
weather.condition = 'Moderate Rain Showers';
case 82:
weather.condition = 'Heavy Rain Showers';
weather.icon = `rainy-3-${dayOrNight}`;
break;
case 85:
weather.condition = 'Slight Snow Showers';
case 86:
weather.condition = 'Moderate Snow Showers';
case 87:
weather.condition = 'Heavy Snow Showers';
weather.icon = `snowy-3-${dayOrNight}`;
break;
case 95:
weather.condition = 'Thunderstorm';
weather.icon = `scattered-thunderstorms-${dayOrNight}`;
break;
case 96:
weather.condition = 'Thunderstorm with Slight Hail';
case 99:
weather.condition = 'Thunderstorm with Heavy Hail';
weather.icon = 'severe-thunderstorm';
break;
default:
weather.icon = `clear-${dayOrNight}`;
weather.condition = 'Clear';
break;
}
return Response.json(weather);
} catch (err) {
console.error('An error occurred while getting home widgets', err);
return Response.json(
{
message: 'An error has occurred.',
},
{
status: 500,
},
);
}
};

View file

@ -25,3 +25,11 @@
}
}
}
@media screen and (-webkit-min-device-pixel-ratio: 0) {
select,
textarea,
input {
font-size: 16px !important;
}
}

54
src/app/manifest.ts Normal file
View file

@ -0,0 +1,54 @@
import type { MetadataRoute } from 'next';
export default function manifest(): MetadataRoute.Manifest {
return {
name: 'Perplexica - Chat with the internet',
short_name: 'Perplexica',
description:
'Perplexica is an AI powered chatbot that is connected to the internet.',
start_url: '/',
display: 'standalone',
background_color: '#0a0a0a',
theme_color: '#0a0a0a',
screenshots: [
{
src: '/screenshots/p1.png',
form_factor: 'wide',
sizes: '2560x1600',
},
{
src: '/screenshots/p2.png',
form_factor: 'wide',
sizes: '2560x1600',
},
{
src: '/screenshots/p1_small.png',
form_factor: 'narrow',
sizes: '828x1792',
},
{
src: '/screenshots/p2_small.png',
form_factor: 'narrow',
sizes: '828x1792',
},
],
icons: [
{
src: '/icon-50.png',
sizes: '50x50',
type: 'image/png' as const,
},
{
src: '/icon-100.png',
sizes: '100x100',
type: 'image/png',
},
{
src: '/icon.png',
sizes: '440x440',
type: 'image/png',
purpose: 'any',
},
],
};
}

View file

@ -38,6 +38,7 @@ interface SettingsType {
ollamaApiUrl: string;
lmStudioApiUrl: string;
deepseekApiKey: string;
aimlApiKey: string;
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
@ -228,7 +229,7 @@ export default function SettingsPage() {
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
string | null
>(null);
const [isLoading, setIsLoading] = useState(false);
const [isLoading, setIsLoading] = useState(true);
const [automaticSuggestions, setAutomaticSuggestions] = useState(true);
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
const [contextWindowSize, setContextWindowSize] = useState(2048);
@ -267,7 +268,6 @@ export default function SettingsPage() {
useEffect(() => {
const fetchConfig = async () => {
setIsLoading(true);
const res = await fetch(`/api/config`, {
headers: {
'Content-Type': 'application/json',
@ -1730,6 +1730,44 @@ export default function SettingsPage() {
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
AI/ML API Key
</p>
<InputComponent
type="text"
placeholder="AI/ML API Key"
value={config.aimlApiKey}
isSaving={savingStates['aimlApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
aimlApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('aimlApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
AI/ML API Key
</p>
<InputComponent
type="text"
placeholder="AI/ML API Key"
value={config.aimlApiKey}
isSaving={savingStates['aimlApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
aimlApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('aimlApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
LM Studio API URL

View file

@ -93,14 +93,29 @@ const checkConfig = async (
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
chatModelProvider || Object.keys(chatModelProviders)[0];
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider =
chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
if (!chatModelProviders || Object.keys(chatModelProviders).length === 0)
return toast.error('No chat models available');
}
if (!embeddingModel || !embeddingModelProvider) {
@ -128,7 +143,8 @@ const checkConfig = async (
if (
Object.keys(chatModelProviders).length > 0 &&
!chatModelProviders[chatModelProvider]
(!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) {
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
@ -143,6 +159,16 @@ const checkConfig = async (
chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel]
) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(
chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0
@ -150,6 +176,7 @@ const checkConfig = async (
: Object.keys(chatModelProviders)[0]
],
)[0];
localStorage.setItem('chatModel', chatModel);
}

View file

@ -3,6 +3,8 @@ import { useState } from 'react';
import { File } from './ChatWindow';
import Link from 'next/link';
import MessageInput from './MessageInput';
import WeatherWidget from './WeatherWidget';
import NewsArticleWidget from './NewsArticleWidget';
const EmptyChat = ({
sendMessage,
@ -29,8 +31,6 @@ const EmptyChat = ({
files: File[];
setFiles: (files: File[]) => void;
}) => {
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
return (
<div className="relative">
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
@ -38,25 +38,35 @@ const EmptyChat = ({
<Settings className="cursor-pointer lg:hidden" />
</Link>
</div>
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8">
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
Research begins here.
</h2>
<MessageInput
firstMessage={true}
loading={false}
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
systemPromptIds={systemPromptIds}
setSystemPromptIds={setSystemPromptIds}
setFiles={setFiles}
/>
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-4">
<div className="flex flex-col items-center justify-center w-full space-y-8">
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
Research begins here.
</h2>
<MessageInput
firstMessage={true}
loading={false}
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
systemPromptIds={systemPromptIds}
setSystemPromptIds={setSystemPromptIds}
setFiles={setFiles}
/>
</div>
<div className="flex flex-col w-full gap-4 mt-2 sm:flex-row sm:justify-center">
<div className="flex-1 w-full">
<WeatherWidget />
</div>
<div className="flex-1 w-full">
<NewsArticleWidget />
</div>
</div>
</div>
</div>
);

View file

@ -1,8 +1,122 @@
import { Clock, Edit, Share, Trash } from 'lucide-react';
import { Clock, Edit, Share, Trash, FileText, FileDown } from 'lucide-react';
import { Message } from './ChatWindow';
import { useEffect, useState } from 'react';
import { useEffect, useState, Fragment } from 'react';
import { formatTimeDifference } from '@/lib/utils';
import DeleteChat from './DeleteChat';
import {
Popover,
PopoverButton,
PopoverPanel,
Transition,
} from '@headlessui/react';
import jsPDF from 'jspdf';
const downloadFile = (filename: string, content: string, type: string) => {
const blob = new Blob([content], { type });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
URL.revokeObjectURL(url);
}, 0);
};
const exportAsMarkdown = (messages: Message[], title: string) => {
const date = new Date(messages[0]?.createdAt || Date.now()).toLocaleString();
let md = `# 💬 Chat Export: ${title}\n\n`;
md += `*Exported on: ${date}*\n\n---\n`;
messages.forEach((msg, idx) => {
md += `\n---\n`;
md += `**${msg.role === 'user' ? '🧑 User' : '🤖 Assistant'}**
`;
md += `*${new Date(msg.createdAt).toLocaleString()}*\n\n`;
md += `> ${msg.content.replace(/\n/g, '\n> ')}\n`;
if (msg.sources && msg.sources.length > 0) {
md += `\n**Citations:**\n`;
msg.sources.forEach((src: any, i: number) => {
const url = src.metadata?.url || '';
md += `- [${i + 1}] [${url}](${url})\n`;
});
}
});
md += '\n---\n';
downloadFile(`${title || 'chat'}.md`, md, 'text/markdown');
};
const exportAsPDF = (messages: Message[], title: string) => {
const doc = new jsPDF();
const date = new Date(messages[0]?.createdAt || Date.now()).toLocaleString();
let y = 15;
const pageHeight = doc.internal.pageSize.height;
doc.setFontSize(18);
doc.text(`Chat Export: ${title}`, 10, y);
y += 8;
doc.setFontSize(11);
doc.setTextColor(100);
doc.text(`Exported on: ${date}`, 10, y);
y += 8;
doc.setDrawColor(200);
doc.line(10, y, 200, y);
y += 6;
doc.setTextColor(30);
messages.forEach((msg, idx) => {
if (y > pageHeight - 30) {
doc.addPage();
y = 15;
}
doc.setFont('helvetica', 'bold');
doc.text(`${msg.role === 'user' ? 'User' : 'Assistant'}`, 10, y);
doc.setFont('helvetica', 'normal');
doc.setFontSize(10);
doc.setTextColor(120);
doc.text(`${new Date(msg.createdAt).toLocaleString()}`, 40, y);
y += 6;
doc.setTextColor(30);
doc.setFontSize(12);
const lines = doc.splitTextToSize(msg.content, 180);
for (let i = 0; i < lines.length; i++) {
if (y > pageHeight - 20) {
doc.addPage();
y = 15;
}
doc.text(lines[i], 12, y);
y += 6;
}
if (msg.sources && msg.sources.length > 0) {
doc.setFontSize(11);
doc.setTextColor(80);
if (y > pageHeight - 20) {
doc.addPage();
y = 15;
}
doc.text('Citations:', 12, y);
y += 5;
msg.sources.forEach((src: any, i: number) => {
const url = src.metadata?.url || '';
if (y > pageHeight - 15) {
doc.addPage();
y = 15;
}
doc.text(`- [${i + 1}] ${url}`, 15, y);
y += 5;
});
doc.setTextColor(30);
}
y += 6;
doc.setDrawColor(230);
if (y > pageHeight - 10) {
doc.addPage();
y = 15;
}
doc.line(10, y, 200, y);
y += 4;
});
doc.save(`${title || 'chat'}.pdf`);
};
const Navbar = ({
chatId,
@ -59,10 +173,39 @@ const Navbar = ({
<p className="hidden lg:flex">{title}</p>
<div className="flex flex-row items-center space-x-4">
<Share
size={17}
className="active:scale-95 transition duration-100 cursor-pointer"
/>
<Popover className="relative">
<PopoverButton className="active:scale-95 transition duration-100 cursor-pointer p-2 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary">
<Share size={17} />
</PopoverButton>
<Transition
as={Fragment}
enter="transition ease-out duration-100"
enterFrom="opacity-0 translate-y-1"
enterTo="opacity-100 translate-y-0"
leave="transition ease-in duration-75"
leaveFrom="opacity-100 translate-y-0"
leaveTo="opacity-0 translate-y-1"
>
<PopoverPanel className="absolute right-0 mt-2 w-64 rounded-xl shadow-xl bg-light-primary dark:bg-dark-primary border border-light-200 dark:border-dark-200 z-50">
<div className="flex flex-col py-3 px-3 gap-2">
<button
className="flex items-center gap-2 px-4 py-2 text-left hover:bg-light-secondary dark:hover:bg-dark-secondary transition-colors text-black dark:text-white rounded-lg font-medium"
onClick={() => exportAsMarkdown(messages, title || '')}
>
<FileText size={17} className="text-[#24A0ED]" />
Export as Markdown
</button>
<button
className="flex items-center gap-2 px-4 py-2 text-left hover:bg-light-secondary dark:hover:bg-dark-secondary transition-colors text-black dark:text-white rounded-lg font-medium"
onClick={() => exportAsPDF(messages, title || '')}
>
<FileDown size={17} className="text-[#24A0ED]" />
Export as PDF
</button>
</div>
</PopoverPanel>
</Transition>
</Popover>
<DeleteChat redirect chatId={chatId} chats={[]} setChats={() => {}} />
</div>
</div>

View file

@ -0,0 +1,71 @@
import { useEffect, useState } from 'react';
interface Article {
title: string;
content: string;
url: string;
thumbnail: string;
}
const NewsArticleWidget = () => {
const [article, setArticle] = useState<Article | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(false);
useEffect(() => {
fetch('/api/discover?mode=preview')
.then((res) => res.json())
.then((data) => {
const articles = (data.blogs || []).filter((a: Article) => a.thumbnail);
setArticle(articles[Math.floor(Math.random() * articles.length)]);
setLoading(false);
})
.catch(() => {
setError(true);
setLoading(false);
});
}, []);
return (
<div className="bg-light-secondary dark:bg-dark-secondary rounded-xl border border-light-200 dark:border-dark-200 shadow-sm flex flex-row items-center w-full h-24 min-h-[96px] max-h-[96px] px-3 py-2 gap-3 overflow-hidden">
{loading ? (
<>
<div className="animate-pulse flex flex-row items-center w-full h-full">
<div className="rounded-lg w-16 min-w-16 max-w-16 h-16 min-h-16 max-h-16 bg-light-200 dark:bg-dark-200 mr-3" />
<div className="flex flex-col justify-center flex-1 h-full w-0 gap-2">
<div className="h-4 w-3/4 rounded bg-light-200 dark:bg-dark-200" />
<div className="h-3 w-1/2 rounded bg-light-200 dark:bg-dark-200" />
</div>
</div>
</>
) : error ? (
<div className="w-full text-xs text-red-400">Could not load news.</div>
) : article ? (
<a
href={`/?q=Summary: ${article.url}`}
className="flex flex-row items-center w-full h-full group"
>
<img
className="object-cover rounded-lg w-16 min-w-16 max-w-16 h-16 min-h-16 max-h-16 border border-light-200 dark:border-dark-200 bg-light-200 dark:bg-dark-200 group-hover:opacity-90 transition"
src={
new URL(article.thumbnail).origin +
new URL(article.thumbnail).pathname +
`?id=${new URL(article.thumbnail).searchParams.get('id')}`
}
alt={article.title}
/>
<div className="flex flex-col justify-center flex-1 h-full pl-3 w-0">
<div className="font-bold text-xs text-black dark:text-white leading-tight truncate overflow-hidden whitespace-nowrap">
{article.title}
</div>
<p className="text-black/70 dark:text-white/70 text-xs leading-snug truncate overflow-hidden whitespace-nowrap">
{article.content}
</p>
</div>
</a>
) : null}
</div>
);
};
export default NewsArticleWidget;

View file

@ -0,0 +1,155 @@
import { Cloud, Sun, CloudRain, CloudSnow, Wind } from 'lucide-react';
import { useEffect, useState } from 'react';
const WeatherWidget = () => {
const [data, setData] = useState({
temperature: 0,
condition: '',
location: '',
humidity: 0,
windSpeed: 0,
icon: '',
});
const [loading, setLoading] = useState(true);
useEffect(() => {
const getApproxLocation = async () => {
const res = await fetch('https://ipwhois.app/json/');
const data = await res.json();
return {
latitude: data.latitude,
longitude: data.longitude,
city: data.city,
};
};
const getLocation = async (
callback: (location: {
latitude: number;
longitude: number;
city: string;
}) => void,
) => {
if (navigator.geolocation) {
const result = await navigator.permissions.query({
name: 'geolocation',
});
if (result.state === 'granted') {
navigator.geolocation.getCurrentPosition(async (position) => {
const res = await fetch(
`https://api-bdc.io/data/reverse-geocode-client?latitude=${position.coords.latitude}&longitude=${position.coords.longitude}&localityLanguage=en`,
{
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
},
);
const data = await res.json();
callback({
latitude: position.coords.latitude,
longitude: position.coords.longitude,
city: data.locality,
});
});
} else if (result.state === 'prompt') {
callback(await getApproxLocation());
navigator.geolocation.getCurrentPosition((position) => {});
} else if (result.state === 'denied') {
callback(await getApproxLocation());
}
} else {
callback(await getApproxLocation());
}
};
getLocation(async (location) => {
const res = await fetch(`/api/weather`, {
method: 'POST',
body: JSON.stringify({
lat: location.latitude,
lng: location.longitude,
}),
});
const data = await res.json();
if (res.status !== 200) {
console.error('Error fetching weather data');
setLoading(false);
return;
}
setData({
temperature: data.temperature,
condition: data.condition,
location: location.city,
humidity: data.humidity,
windSpeed: data.windSpeed,
icon: data.icon,
});
setLoading(false);
});
}, []);
return (
<div className="bg-light-secondary dark:bg-dark-secondary rounded-xl border border-light-200 dark:border-dark-200 shadow-sm flex flex-row items-center w-full h-24 min-h-[96px] max-h-[96px] px-3 py-2 gap-3">
{loading ? (
<>
<div className="flex flex-col items-center justify-center w-16 min-w-16 max-w-16 h-full animate-pulse">
<div className="h-10 w-10 rounded-full bg-light-200 dark:bg-dark-200 mb-2" />
<div className="h-4 w-10 rounded bg-light-200 dark:bg-dark-200" />
</div>
<div className="flex flex-col justify-between flex-1 h-full py-1 animate-pulse">
<div className="flex flex-row items-center justify-between">
<div className="h-3 w-20 rounded bg-light-200 dark:bg-dark-200" />
<div className="h-3 w-12 rounded bg-light-200 dark:bg-dark-200" />
</div>
<div className="h-3 w-16 rounded bg-light-200 dark:bg-dark-200 mt-1" />
<div className="flex flex-row justify-between w-full mt-auto pt-1 border-t border-light-200 dark:border-dark-200">
<div className="h-3 w-16 rounded bg-light-200 dark:bg-dark-200" />
<div className="h-3 w-8 rounded bg-light-200 dark:bg-dark-200" />
</div>
</div>
</>
) : (
<>
<div className="flex flex-col items-center justify-center w-16 min-w-16 max-w-16 h-full">
<img
src={`/weather-ico/${data.icon}.svg`}
alt={data.condition}
className="h-10 w-auto"
/>
<span className="text-base font-semibold text-black dark:text-white">
{data.temperature}°C
</span>
</div>
<div className="flex flex-col justify-between flex-1 h-full py-1">
<div className="flex flex-row items-center justify-between">
<span className="text-xs font-medium text-black dark:text-white">
{data.location}
</span>
<span className="flex items-center text-xs text-black/60 dark:text-white/60">
<Wind className="w-3 h-3 mr-1" />
{data.windSpeed} km/h
</span>
</div>
<span className="text-xs text-black/60 dark:text-white/60 mt-1">
{data.condition}
</span>
<div className="flex flex-row justify-between w-full mt-auto pt-1 border-t border-light-200 dark:border-dark-200 text-xs text-black/60 dark:text-white/60">
<span>Humidity: {data.humidity}%</span>
<span>Now</span>
</div>
</div>
</>
)}
</div>
);
};
export default WeatherWidget;

View file

@ -37,6 +37,9 @@ interface Config {
DEEPSEEK: {
API_KEY: string;
};
AIMLAPI: {
API_KEY: string;
};
LM_STUDIO: {
API_URL: string;
};
@ -123,6 +126,8 @@ export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
export const getCustomOpenaiApiKey = () =>
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;

View file

@ -3,7 +3,8 @@ import Database from 'better-sqlite3';
import * as schema from './schema';
import path from 'path';
const sqlite = new Database(path.join(process.cwd(), 'data/db.sqlite'));
const DATA_DIR = process.env.DATA_DIR || process.cwd();
const sqlite = new Database(path.join(DATA_DIR, './data/db.sqlite'));
const db = drizzle(sqlite, {
schema: schema,
});

5
src/lib/db/migrate.ts Normal file
View file

@ -0,0 +1,5 @@
import db from './';
import { migrate } from 'drizzle-orm/better-sqlite3/migrator';
import path from 'path';
migrate(db, { migrationsFolder: path.join(process.cwd(), 'drizzle') });

View file

@ -0,0 +1,94 @@
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { getAimlApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
import axios from 'axios';
export const PROVIDER_INFO = {
key: 'aimlapi',
displayName: 'AI/ML API',
};
interface AimlApiModel {
id: string;
name?: string;
type?: string;
}
const API_URL = 'https://api.aimlapi.com';
export const loadAimlApiChatModels = async () => {
const apiKey = getAimlApiKey();
if (!apiKey) return {};
try {
const response = await axios.get(`${API_URL}/models`, {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
});
const chatModels: Record<string, ChatModel> = {};
response.data.data.forEach((model: AimlApiModel) => {
if (model.type === 'chat-completion') {
chatModels[model.id] = {
displayName: model.name || model.id,
model: new ChatOpenAI({
openAIApiKey: apiKey,
modelName: model.id,
temperature: 0.7,
configuration: {
baseURL: API_URL,
},
}) as unknown as BaseChatModel,
};
}
});
return chatModels;
} catch (err) {
console.error(`Error loading AI/ML API models: ${err}`);
return {};
}
};
export const loadAimlApiEmbeddingModels = async () => {
const apiKey = getAimlApiKey();
if (!apiKey) return {};
try {
const response = await axios.get(`${API_URL}/models`, {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
});
const embeddingModels: Record<string, EmbeddingModel> = {};
response.data.data.forEach((model: AimlApiModel) => {
if (model.type === 'embedding') {
embeddingModels[model.id] = {
displayName: model.name || model.id,
model: new OpenAIEmbeddings({
openAIApiKey: apiKey,
modelName: model.id,
configuration: {
baseURL: API_URL,
},
}) as unknown as Embeddings,
};
}
});
return embeddingModels;
} catch (err) {
console.error(`Error loading AI/ML API embeddings models: ${err}`);
return {};
}
};

View file

@ -13,9 +13,17 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
const geminiChatModels: Record<string, string>[] = [
{
displayName: 'Gemini 2.5 Flash Preview 05-20',
key: 'gemini-2.5-flash-preview-05-20',
},
{
displayName: 'Gemini 2.5 Pro Preview',
key: 'gemini-2.5-pro-preview-05-06',
},
{
displayName: 'Gemini 2.5 Pro Experimental',
key: 'gemini-2.5-pro-exp-03-25',
key: 'gemini-2.5-pro-preview-05-06',
},
{
displayName: 'Gemini 2.0 Flash',

View file

@ -36,6 +36,11 @@ import {
loadDeepseekChatModels,
PROVIDER_INFO as DeepseekInfo,
} from './deepseek';
import {
loadAimlApiChatModels,
loadAimlApiEmbeddingModels,
PROVIDER_INFO as AimlApiInfo,
} from './aimlapi';
import {
loadLMStudioChatModels,
loadLMStudioEmbeddingsModels,
@ -50,6 +55,7 @@ export const PROVIDER_METADATA = {
gemini: GeminiInfo,
transformers: TransformersInfo,
deepseek: DeepseekInfo,
aimlapi: AimlApiInfo,
lmstudio: LMStudioInfo,
custom_openai: {
key: 'custom_openai',
@ -77,6 +83,7 @@ export const chatModelProviders: Record<
anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels,
deepseek: loadDeepseekChatModels,
aimlapi: loadAimlApiChatModels,
lmstudio: loadLMStudioChatModels,
};
@ -88,6 +95,7 @@ export const embeddingModelProviders: Record<
ollama: loadOllamaEmbeddingModels,
gemini: loadGeminiEmbeddingModels,
transformers: loadTransformersEmbeddingsModels,
aimlapi: loadAimlApiEmbeddingModels,
lmstudio: loadLMStudioEmbeddingsModels,
};