diff --git a/.assets/manifest.json b/.assets/manifest.json new file mode 100644 index 0000000..e69de29 diff --git a/.gitignore b/.gitignore index c95173d..9fb5e4c 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,5 @@ Thumbs.db # Db db.sqlite /searxng + +certificates \ No newline at end of file diff --git a/README.md b/README.md index 18c9f84..5eb0713 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@
-[![Discord](https://dcbadge.vercel.app/api/server/26aArMy8tT?style=flat&compact=true)](https://discord.gg/26aArMy8tT) +[![Discord](https://dcbadge.limes.pink/api/server/26aArMy8tT?style=flat)](https://discord.gg/26aArMy8tT) ![preview](.assets/perplexica-screenshot.png?) @@ -90,6 +90,9 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**. - `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**. + - `Gemini`: Your Gemini API key. **You only need to fill this if you wish to use Google's models**. + - `DEEPSEEK`: Your Deepseek API key. **Only needed if you want Deepseek models.** + - `AIMLAPI`: Your AI/ML API key. **Only needed if you want to use AI/ML API models and embeddings.** **Note**: You can change these after starting Perplexica from the settings dialog. @@ -111,7 +114,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. 2. Clone the repository and rename the `sample.config.toml` file to `config.toml` in the root directory. Ensure you complete all required fields in this file. 3. After populating the configuration run `npm i`. 4. Install the dependencies and then execute `npm run build`. -5. Finally, start the app by running `npm rum start` +5. Finally, start the app by running `npm run start` **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. @@ -132,7 +135,7 @@ If you're encountering an Ollama connection error, it is likely due to the backe 3. **Linux Users - Expose Ollama to Network:** - - Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0"`. Then restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux) + - Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0:11434"`. (Change the port number if you are using a different one.) Then reload the systemd manager configuration with `systemctl daemon-reload`, and restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux) - Ensure that the port (default is 11434) is not blocked by your firewall. diff --git a/docs/installation/UPDATING.md b/docs/installation/UPDATING.md index 972142f..66edf5c 100644 --- a/docs/installation/UPDATING.md +++ b/docs/installation/UPDATING.md @@ -41,6 +41,6 @@ To update Perplexica to the latest version, follow these steps: 3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly. 4. After populating the configuration run `npm i`. 5. Install the dependencies and then execute `npm run build`. -6. Finally, start the app by running `npm rum start` +6. Finally, start the app by running `npm run start` --- diff --git a/package.json b/package.json index f62543b..5715c2a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-frontend", - "version": "1.11.0-rc1", + "version": "1.11.0-rc2", "license": "MIT", "author": "ItzCrazyKns", "scripts": { @@ -15,11 +15,13 @@ "@headlessui/react": "^2.2.0", "@iarna/toml": "^2.2.5", "@icons-pack/react-simple-icons": "^12.3.0", - "@langchain/anthropic": "^0.3.15", - "@langchain/community": "^0.3.36", - "@langchain/core": "^0.3.42", - "@langchain/google-genai": "^0.1.12", - "@langchain/openai": "^0.0.25", + "@langchain/anthropic": "^0.3.24", + "@langchain/community": "^0.3.49", + "@langchain/core": "^0.3.66", + "@langchain/google-genai": "^0.2.15", + "@langchain/groq": "^0.2.3", + "@langchain/ollama": "^0.2.3", + "@langchain/openai": "^0.6.2", "@langchain/textsplitters": "^0.1.0", "@tailwindcss/typography": "^0.5.12", "@xenova/transformers": "^2.17.2", @@ -31,7 +33,7 @@ "drizzle-orm": "^0.40.1", "html-to-text": "^9.0.5", "jspdf": "^3.0.1", - "langchain": "^0.1.30", + "langchain": "^0.3.30", "lucide-react": "^0.363.0", "mammoth": "^1.9.1", "markdown-to-jsx": "^7.7.2", diff --git a/public/icon-100.png b/public/icon-100.png new file mode 100644 index 0000000..98fa242 Binary files /dev/null and b/public/icon-100.png differ diff --git a/public/icon-50.png b/public/icon-50.png new file mode 100644 index 0000000..9bb7a0e Binary files /dev/null and b/public/icon-50.png differ diff --git a/public/icon.png b/public/icon.png new file mode 100644 index 0000000..f6fe3c7 Binary files /dev/null and b/public/icon.png differ diff --git a/public/screenshots/p1.png b/public/screenshots/p1.png new file mode 100644 index 0000000..02f01e5 Binary files /dev/null and b/public/screenshots/p1.png differ diff --git a/public/screenshots/p1_small.png b/public/screenshots/p1_small.png new file mode 100644 index 0000000..13d9a42 Binary files /dev/null and b/public/screenshots/p1_small.png differ diff --git a/public/screenshots/p2.png b/public/screenshots/p2.png new file mode 100644 index 0000000..1171675 Binary files /dev/null and b/public/screenshots/p2.png differ diff --git a/public/screenshots/p2_small.png b/public/screenshots/p2_small.png new file mode 100644 index 0000000..bd8d673 Binary files /dev/null and b/public/screenshots/p2_small.png differ diff --git a/sample.config.toml b/sample.config.toml index 1db2125..ba3e98e 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -25,6 +25,9 @@ API_URL = "" # Ollama API URL - http://host.docker.internal:11434 [MODELS.DEEPSEEK] API_KEY = "" +[MODELS.AIMLAPI] +API_KEY = "" # Required to use AI/ML API chat and embedding models + [MODELS.LM_STUDIO] API_URL = "" # LM Studio API URL - http://host.docker.internal:1234 diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts index e566edb..2d53b75 100644 --- a/src/app/api/chat/route.ts +++ b/src/app/api/chat/route.ts @@ -223,7 +223,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - openAIApiKey: getCustomOpenaiApiKey(), + apiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index c1e5bbd..0c11b23 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -8,6 +8,7 @@ import { getOllamaApiEndpoint, getOpenaiApiKey, getDeepseekApiKey, + getAimlApiKey, getLMStudioApiEndpoint, updateConfig, } from '@/lib/config'; @@ -57,6 +58,7 @@ export const GET = async (req: Request) => { config['groqApiKey'] = getGroqApiKey(); config['geminiApiKey'] = getGeminiApiKey(); config['deepseekApiKey'] = getDeepseekApiKey(); + config['aimlApiKey'] = getAimlApiKey(); config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl(); config['customOpenaiApiKey'] = getCustomOpenaiApiKey(); config['customOpenaiModelName'] = getCustomOpenaiModelName(); @@ -95,6 +97,9 @@ export const POST = async (req: Request) => { DEEPSEEK: { API_KEY: config.deepseekApiKey, }, + AIMLAPI: { + API_KEY: config.aimlApiKey, + }, LM_STUDIO: { API_URL: config.lmStudioApiUrl, }, diff --git a/src/app/api/discover/route.ts b/src/app/api/discover/route.ts index 1a1b1a5..b170b0e 100644 --- a/src/app/api/discover/route.ts +++ b/src/app/api/discover/route.ts @@ -1,55 +1,79 @@ import { searchSearxng } from '@/lib/searxng'; -const articleWebsites = [ - 'yahoo.com', - 'www.exchangewire.com', - 'businessinsider.com', - /* 'wired.com', - 'mashable.com', - 'theverge.com', - 'gizmodo.com', - 'cnet.com', - 'venturebeat.com', */ -]; +const websitesForTopic = { + tech: { + query: ['technology news', 'latest tech', 'AI', 'science and innovation'], + links: ['techcrunch.com', 'wired.com', 'theverge.com'], + }, + finance: { + query: ['finance news', 'economy', 'stock market', 'investing'], + links: ['bloomberg.com', 'cnbc.com', 'marketwatch.com'], + }, + art: { + query: ['art news', 'culture', 'modern art', 'cultural events'], + links: ['artnews.com', 'hyperallergic.com', 'theartnewspaper.com'], + }, + sports: { + query: ['sports news', 'latest sports', 'cricket football tennis'], + links: ['espn.com', 'bbc.com/sport', 'skysports.com'], + }, + entertainment: { + query: ['entertainment news', 'movies', 'TV shows', 'celebrities'], + links: ['hollywoodreporter.com', 'variety.com', 'deadline.com'], + }, +}; -const topics = ['AI', 'tech']; /* TODO: Add UI to customize this */ +type Topic = keyof typeof websitesForTopic; export const GET = async (req: Request) => { try { const params = new URL(req.url).searchParams; + const mode: 'normal' | 'preview' = (params.get('mode') as 'normal' | 'preview') || 'normal'; + const topic: Topic = (params.get('topic') as Topic) || 'tech'; + + const selectedTopic = websitesForTopic[topic]; let data = []; if (mode === 'normal') { + const seenUrls = new Set(); + data = ( - await Promise.all([ - ...new Array(articleWebsites.length * topics.length) - .fill(0) - .map(async (_, i) => { + await Promise.all( + selectedTopic.links.flatMap((link) => + selectedTopic.query.map(async (query) => { return ( - await searchSearxng( - `site:${articleWebsites[i % articleWebsites.length]} ${topics[i % topics.length] - }`, - { - categories: ['news'], - time_range: ['month'], - pageno: 1, - }, - ) + await searchSearxng(`site:${link} ${query}`, { + categories: ['news'], + time_range: ['month'], + language: 'en', + pageno: 1, + }) ).results; }), - ]) + ), + ) ) - .map((result) => result) .flat() + .filter((item) => { + const url = item.url?.toLowerCase().trim(); + if (seenUrls.has(url)) return false; + seenUrls.add(url); + return true; + }) .sort(() => Math.random() - 0.5); } else { data = ( await searchSearxng( - `site:${articleWebsites[Math.floor(Math.random() * articleWebsites.length)]} ${topics[Math.floor(Math.random() * topics.length)]}`, - { categories: ['news'], time_range: ['month'], pageno: 1 }, + `site:${selectedTopic.links[Math.floor(Math.random() * selectedTopic.links.length)]} ${selectedTopic.query[Math.floor(Math.random() * selectedTopic.query.length)]}`, + { + categories: ['news'], + time_range: ['month'], + language: 'en', + pageno: 1, + }, ) ).results; } diff --git a/src/app/api/images/route.ts b/src/app/api/images/route.ts index db39d9f..e02854d 100644 --- a/src/app/api/images/route.ts +++ b/src/app/api/images/route.ts @@ -49,7 +49,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - openAIApiKey: getCustomOpenaiApiKey(), + apiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { diff --git a/src/app/api/search/route.ts b/src/app/api/search/route.ts index 970ec42..5f752ec 100644 --- a/src/app/api/search/route.ts +++ b/src/app/api/search/route.ts @@ -81,8 +81,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ modelName: body.chatModel?.name || getCustomOpenaiModelName(), - openAIApiKey: - body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(), + apiKey: body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(), temperature: 0.7, configuration: { baseURL: diff --git a/src/app/api/suggestions/route.ts b/src/app/api/suggestions/route.ts index e92e5ec..99179d2 100644 --- a/src/app/api/suggestions/route.ts +++ b/src/app/api/suggestions/route.ts @@ -48,7 +48,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - openAIApiKey: getCustomOpenaiApiKey(), + apiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { diff --git a/src/app/api/videos/route.ts b/src/app/api/videos/route.ts index 34ae7fd..7e8288b 100644 --- a/src/app/api/videos/route.ts +++ b/src/app/api/videos/route.ts @@ -49,7 +49,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - openAIApiKey: getCustomOpenaiApiKey(), + apiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { diff --git a/src/app/api/weather/route.ts b/src/app/api/weather/route.ts index 7594aa9..afaf8a6 100644 --- a/src/app/api/weather/route.ts +++ b/src/app/api/weather/route.ts @@ -1,6 +1,10 @@ export const POST = async (req: Request) => { try { - const body: { lat: number; lng: number } = await req.json(); + const body: { + lat: number; + lng: number; + measureUnit: 'Imperial' | 'Metric'; + } = await req.json(); if (!body.lat || !body.lng) { return Response.json( @@ -12,7 +16,9 @@ export const POST = async (req: Request) => { } const res = await fetch( - `https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}¤t=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto`, + `https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}¤t=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${ + body.measureUnit === 'Metric' ? '' : '&temperature_unit=fahrenheit' + }${body.measureUnit === 'Metric' ? '' : '&wind_speed_unit=mph'}`, ); const data = await res.json(); @@ -33,12 +39,16 @@ export const POST = async (req: Request) => { humidity: number; windSpeed: number; icon: string; + temperatureUnit: 'C' | 'F'; + windSpeedUnit: 'm/s' | 'mph'; } = { temperature: data.current.temperature_2m, condition: '', humidity: data.current.relative_humidity_2m, windSpeed: data.current.wind_speed_10m, icon: '', + temperatureUnit: body.measureUnit === 'Metric' ? 'C' : 'F', + windSpeedUnit: body.measureUnit === 'Metric' ? 'm/s' : 'mph', }; const code = data.current.weather_code; diff --git a/src/app/discover/page.tsx b/src/app/discover/page.tsx index eb7de7f..8e20e50 100644 --- a/src/app/discover/page.tsx +++ b/src/app/discover/page.tsx @@ -4,6 +4,7 @@ import { Search } from 'lucide-react'; import { useEffect, useState } from 'react'; import Link from 'next/link'; import { toast } from 'sonner'; +import { cn } from '@/lib/utils'; interface Discover { title: string; @@ -12,60 +13,66 @@ interface Discover { thumbnail: string; } +const topics: { key: string; display: string }[] = [ + { + display: 'Tech & Science', + key: 'tech', + }, + { + display: 'Finance', + key: 'finance', + }, + { + display: 'Art & Culture', + key: 'art', + }, + { + display: 'Sports', + key: 'sports', + }, + { + display: 'Entertainment', + key: 'entertainment', + }, +]; + const Page = () => { const [discover, setDiscover] = useState(null); const [loading, setLoading] = useState(true); + const [activeTopic, setActiveTopic] = useState(topics[0].key); + + const fetchArticles = async (topic: string) => { + setLoading(true); + try { + const res = await fetch(`/api/discover?topic=${topic}`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.message); + } + + data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail); + + setDiscover(data.blogs); + } catch (err: any) { + console.error('Error fetching data:', err.message); + toast.error('Error fetching data'); + } finally { + setLoading(false); + } + }; useEffect(() => { - const fetchData = async () => { - try { - const res = await fetch(`/api/discover`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }); + fetchArticles(activeTopic); + }, [activeTopic]); - const data = await res.json(); - - if (!res.ok) { - throw new Error(data.message); - } - - data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail); - - setDiscover(data.blogs); - } catch (err: any) { - console.error('Error fetching data:', err.message); - toast.error('Error fetching data'); - } finally { - setLoading(false); - } - }; - - fetchData(); - }, []); - - return loading ? ( -
- -
- ) : ( + return ( <>
@@ -76,35 +83,73 @@ const Page = () => {
-
- {discover && - discover?.map((item, i) => ( - - {item.title} -
-
- {item.title.slice(0, 100)}... -
-

- {item.content.slice(0, 100)}... -

-
- - ))} +
+ {topics.map((t, i) => ( +
setActiveTopic(t.key)} + > + {t.display} +
+ ))}
+ + {loading ? ( +
+ +
+ ) : ( +
+ {discover && + discover?.map((item, i) => ( + + {item.title} +
+
+ {item.title.slice(0, 100)}... +
+

+ {item.content.slice(0, 100)}... +

+
+ + ))} +
+ )}
); diff --git a/src/app/globals.css b/src/app/globals.css index f75daca..6bdc1a8 100644 --- a/src/app/globals.css +++ b/src/app/globals.css @@ -11,3 +11,11 @@ display: none; } } + +@media screen and (-webkit-min-device-pixel-ratio: 0) { + select, + textarea, + input { + font-size: 16px !important; + } +} diff --git a/src/app/manifest.ts b/src/app/manifest.ts new file mode 100644 index 0000000..792e752 --- /dev/null +++ b/src/app/manifest.ts @@ -0,0 +1,54 @@ +import type { MetadataRoute } from 'next'; + +export default function manifest(): MetadataRoute.Manifest { + return { + name: 'Perplexica - Chat with the internet', + short_name: 'Perplexica', + description: + 'Perplexica is an AI powered chatbot that is connected to the internet.', + start_url: '/', + display: 'standalone', + background_color: '#0a0a0a', + theme_color: '#0a0a0a', + screenshots: [ + { + src: '/screenshots/p1.png', + form_factor: 'wide', + sizes: '2560x1600', + }, + { + src: '/screenshots/p2.png', + form_factor: 'wide', + sizes: '2560x1600', + }, + { + src: '/screenshots/p1_small.png', + form_factor: 'narrow', + sizes: '828x1792', + }, + { + src: '/screenshots/p2_small.png', + form_factor: 'narrow', + sizes: '828x1792', + }, + ], + icons: [ + { + src: '/icon-50.png', + sizes: '50x50', + type: 'image/png' as const, + }, + { + src: '/icon-100.png', + sizes: '100x100', + type: 'image/png', + }, + { + src: '/icon.png', + sizes: '440x440', + type: 'image/png', + purpose: 'any', + }, + ], + }; +} diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx index 6f20f01..1b13c9c 100644 --- a/src/app/settings/page.tsx +++ b/src/app/settings/page.tsx @@ -23,6 +23,7 @@ interface SettingsType { ollamaApiUrl: string; lmStudioApiUrl: string; deepseekApiKey: string; + aimlApiKey: string; customOpenaiApiKey: string; customOpenaiApiUrl: string; customOpenaiModelName: string; @@ -147,6 +148,9 @@ const Page = () => { const [automaticImageSearch, setAutomaticImageSearch] = useState(false); const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false); const [systemInstructions, setSystemInstructions] = useState(''); + const [measureUnit, setMeasureUnit] = useState<'Imperial' | 'Metric'>( + 'Metric', + ); const [savingStates, setSavingStates] = useState>({}); useEffect(() => { @@ -209,6 +213,10 @@ const Page = () => { setSystemInstructions(localStorage.getItem('systemInstructions')!); + setMeasureUnit( + localStorage.getItem('measureUnit')! as 'Imperial' | 'Metric', + ); + setIsLoading(false); }; @@ -367,6 +375,8 @@ const Page = () => { localStorage.setItem('embeddingModel', value); } else if (key === 'systemInstructions') { localStorage.setItem('systemInstructions', value); + } else if (key === 'measureUnit') { + localStorage.setItem('measureUnit', value.toString()); } } catch (err) { console.error('Failed to save:', err); @@ -415,13 +425,35 @@ const Page = () => { ) : ( config && (
- +

Theme

+
+

+ Measurement Units +

+