From 0af66f8b72320b67b1a573d6a02f8ef2d2eee141 Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Wed, 8 May 2024 09:57:11 +0800 Subject: [PATCH 001/399] fix(Chat): list map element must specify a unique key --- ui/components/Chat.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui/components/Chat.tsx b/ui/components/Chat.tsx index 294520b..ddd2957 100644 --- a/ui/components/Chat.tsx +++ b/ui/components/Chat.tsx @@ -1,6 +1,6 @@ 'use client'; -import { useEffect, useRef, useState } from 'react'; +import { Fragment, useEffect, useRef, useState } from 'react'; import MessageInput from './MessageInput'; import { Message } from './ChatWindow'; import MessageBox from './MessageBox'; @@ -53,7 +53,7 @@ const Chat = ({ const isLast = i === messages.length - 1; return ( - <> + )} - + ); })} {loading && !messageAppeared && } From 4f5f6be85f157b9fc0992fa1583f4cc1060e9522 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Wed, 8 May 2024 20:05:29 +0530 Subject: [PATCH 002/399] feat(working): fix grammatical mistake --- docs/architecture/WORKING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/architecture/WORKING.md b/docs/architecture/WORKING.md index 8718b22..e39de7a 100644 --- a/docs/architecture/WORKING.md +++ b/docs/architecture/WORKING.md @@ -5,7 +5,7 @@ Curious about how Perplexica works? Don't worry, we'll cover it here. Before we We'll understand how Perplexica works by taking an example of a scenario where a user asks: "How does an A.C. work?". We'll break down the process into steps to make it easier to understand. The steps are as follows: 1. The message is sent via WS to the backend server where it invokes the chain. The chain will depend on your focus mode. For this example, let's assume we use the "webSearch" focus mode. -2. The chain is now invoked; first, the message is passed to another chain where it first predicts (using the chat history and the question) whether there is a need for sources or searching the web. If there is, it will generate a query (in accordance with the chat history) for searching the web that we'll take up later. If not, the chain will end there, and then the answer generator chain, also known as the response generator, will be started. +2. The chain is now invoked; first, the message is passed to another chain where it first predicts (using the chat history and the question) whether there is a need for sources and searching the web. If there is, it will generate a query (in accordance with the chat history) for searching the web that we'll take up later. If not, the chain will end there, and then the answer generator chain, also known as the response generator, will be started. 3. The query returned by the first chain is passed to SearXNG to search the web for information. 4. After the information is retrieved, it is based on keyword-based search. We then convert the information into embeddings and the query as well, then we perform a similarity search to find the most relevant sources to answer the query. 5. After all this is done, the sources are passed to the response generator. This chain takes all the chat history, the query, and the sources. It generates a response that is streamed to the UI. From ac4cba32c801c2ef79ed6096f285d5702e4bcb1c Mon Sep 17 00:00:00 2001 From: Chuck <403chuck@gmail.com> Date: Thu, 9 May 2024 15:53:57 +0800 Subject: [PATCH 003/399] fix(SettingsDialog): baseURL storage key --- ui/components/SettingsDialog.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index be94db2..57f79f6 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -89,7 +89,7 @@ const SettingsDialog = ({ setSelectedEmbeddingModelProvider(embeddingModelProvider); setSelectedEmbeddingModel(embeddingModel); setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || ''); - setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl') || ''); + setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || ''); setIsLoading(false); }; From 5e940914a3465fa920307a093febe4227b2a71a5 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Thu, 9 May 2024 20:39:38 +0530 Subject: [PATCH 004/399] feat(output-parsers): add list line output parser --- src/lib/outputParsers/listLineOutputParser.ts | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 src/lib/outputParsers/listLineOutputParser.ts diff --git a/src/lib/outputParsers/listLineOutputParser.ts b/src/lib/outputParsers/listLineOutputParser.ts new file mode 100644 index 0000000..4fde080 --- /dev/null +++ b/src/lib/outputParsers/listLineOutputParser.ts @@ -0,0 +1,43 @@ +import { BaseOutputParser } from '@langchain/core/output_parsers'; + +interface LineListOutputParserArgs { + key?: string; +} + +class LineListOutputParser extends BaseOutputParser { + private key = 'questions'; + + constructor(args?: LineListOutputParserArgs) { + super(); + this.key = args.key || this.key; + } + + static lc_name() { + return 'LineListOutputParser'; + } + + lc_namespace = ['langchain', 'output_parsers', 'line_list_output_parser']; + + async parse(text: string): Promise { + const regex = /^(\s*(-|\*|\d+\.\s|\d+\)\s|\u2022)\s*)+/; + const startKeyIndex = text.indexOf(`<${this.key}>`); + const endKeyIndex = text.indexOf(``); + const questionsStartIndex = + startKeyIndex === -1 ? 0 : startKeyIndex + `<${this.key}>`.length; + const questionsEndIndex = endKeyIndex === -1 ? text.length : endKeyIndex; + const lines = text + .slice(questionsStartIndex, questionsEndIndex) + .trim() + .split('\n') + .filter((line) => line.trim() !== '') + .map((line) => line.replace(regex, '')); + + return lines; + } + + getFormatInstructions(): string { + throw new Error('Not implemented.'); + } +} + +export default LineListOutputParser; From 0f6986fc9b1f6bb131f686d439afdd1d4fc7d37d Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Thu, 9 May 2024 20:41:43 +0530 Subject: [PATCH 005/399] feat(agents): add suggestion generator agent --- src/agents/suggestionGeneratorAgent.ts | 55 ++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 src/agents/suggestionGeneratorAgent.ts diff --git a/src/agents/suggestionGeneratorAgent.ts b/src/agents/suggestionGeneratorAgent.ts new file mode 100644 index 0000000..59bd9ea --- /dev/null +++ b/src/agents/suggestionGeneratorAgent.ts @@ -0,0 +1,55 @@ +import { RunnableSequence, RunnableMap } from '@langchain/core/runnables'; +import ListLineOutputParser from '../lib/outputParsers/listLineOutputParser'; +import { PromptTemplate } from '@langchain/core/prompts'; +import formatChatHistoryAsString from '../utils/formatHistory'; +import { BaseMessage } from '@langchain/core/messages'; +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { ChatOpenAI } from '@langchain/openai'; + +const suggestionGeneratorPrompt = ` +You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information. +You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information. +Make sure the suggestions are medium in length and are informative and relevant to the conversation. + +Provide these suggestions separated by newlines between the XML tags and . For example: + + +Suggestion 1 +Suggestion 2 +Suggestion 3 + + +Conversation: +{chat_history} +`; + +type SuggestionGeneratorInput = { + chat_history: BaseMessage[]; +}; + +const outputParser = new ListLineOutputParser({ + key: 'suggestions', +}); + +const createSuggestionGeneratorChain = (llm: BaseChatModel) => { + return RunnableSequence.from([ + RunnableMap.from({ + chat_history: (input: SuggestionGeneratorInput) => + formatChatHistoryAsString(input.chat_history), + }), + PromptTemplate.fromTemplate(suggestionGeneratorPrompt), + llm, + outputParser, + ]); +}; + +const generateSuggestions = ( + input: SuggestionGeneratorInput, + llm: BaseChatModel, +) => { + (llm as ChatOpenAI).temperature = 0; + const suggestionGeneratorChain = createSuggestionGeneratorChain(llm); + return suggestionGeneratorChain.invoke(input); +}; + +export default generateSuggestions; From 09463999c2d93d9be6799a10a95e8e3a5c737cd4 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Thu, 9 May 2024 20:42:03 +0530 Subject: [PATCH 006/399] feat(routes): add suggestions route --- src/routes/index.ts | 2 ++ src/routes/suggestions.ts | 46 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 src/routes/suggestions.ts diff --git a/src/routes/index.ts b/src/routes/index.ts index 04390cd..257e677 100644 --- a/src/routes/index.ts +++ b/src/routes/index.ts @@ -3,6 +3,7 @@ import imagesRouter from './images'; import videosRouter from './videos'; import configRouter from './config'; import modelsRouter from './models'; +import suggestionsRouter from './suggestions'; const router = express.Router(); @@ -10,5 +11,6 @@ router.use('/images', imagesRouter); router.use('/videos', videosRouter); router.use('/config', configRouter); router.use('/models', modelsRouter); +router.use('/suggestions', suggestionsRouter); export default router; diff --git a/src/routes/suggestions.ts b/src/routes/suggestions.ts new file mode 100644 index 0000000..10e5715 --- /dev/null +++ b/src/routes/suggestions.ts @@ -0,0 +1,46 @@ +import express from 'express'; +import generateSuggestions from '../agents/suggestionGeneratorAgent'; +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { getAvailableChatModelProviders } from '../lib/providers'; +import { HumanMessage, AIMessage } from '@langchain/core/messages'; +import logger from '../utils/logger'; + +const router = express.Router(); + +router.post('/', async (req, res) => { + try { + let { chat_history, chat_model, chat_model_provider } = req.body; + + chat_history = chat_history.map((msg: any) => { + if (msg.role === 'user') { + return new HumanMessage(msg.content); + } else if (msg.role === 'assistant') { + return new AIMessage(msg.content); + } + }); + + const chatModels = await getAvailableChatModelProviders(); + const provider = chat_model_provider || Object.keys(chatModels)[0]; + const chatModel = chat_model || Object.keys(chatModels[provider])[0]; + + let llm: BaseChatModel | undefined; + + if (chatModels[provider] && chatModels[provider][chatModel]) { + llm = chatModels[provider][chatModel] as BaseChatModel | undefined; + } + + if (!llm) { + res.status(500).json({ message: 'Invalid LLM model selected' }); + return; + } + + const suggestions = await generateSuggestions({ chat_history }, llm); + + res.status(200).json({ suggestions: suggestions }); + } catch (err) { + res.status(500).json({ message: 'An error has occurred.' }); + logger.error(`Error in generating suggestions: ${err.message}`); + } +}); + +export default router; From 9a7af945b04d9986e68f81eeeee15accea97ed73 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Thu, 9 May 2024 20:43:04 +0530 Subject: [PATCH 007/399] lint --- docker-compose.yaml | 2 +- package.json | 2 +- src/lib/providers.ts | 7 +++---- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 5eef31e..f9b3757 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -36,4 +36,4 @@ services: - perplexica-network networks: - perplexica-network: \ No newline at end of file + perplexica-network: diff --git a/package.json b/package.json index a4af068..c3aa58d 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "scripts": { "start": "node dist/app.js", "build": "tsc", - "dev": "nodemon src/app.ts" , + "dev": "nodemon src/app.ts", "format": "prettier . --check", "format:write": "prettier . --write" }, diff --git a/src/lib/providers.ts b/src/lib/providers.ts index d2e40f0..c817f87 100644 --- a/src/lib/providers.ts +++ b/src/lib/providers.ts @@ -157,7 +157,6 @@ export const getAvailableEmbeddingModelProviders = async () => { }); return acc; }, {}); - } catch (err) { logger.error(`Error loading Ollama embeddings: ${err}`); } @@ -172,11 +171,11 @@ export const getAvailableEmbeddingModelProviders = async () => { modelName: 'Xenova/gte-small', }), 'Bert Multilingual': new HuggingFaceTransformersEmbeddings({ - modelName: 'Xenova/bert-base-multilingual-uncased' + modelName: 'Xenova/bert-base-multilingual-uncased', }), }; - } catch(err) { - logger.error(`Error loading local embeddings: ${err}`); + } catch (err) { + logger.error(`Error loading local embeddings: ${err}`); } return models; From 7eace1e6bd3b7279df09e1a9cd8c84e2a5a6ebd1 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Fri, 10 May 2024 20:55:08 +0530 Subject: [PATCH 008/399] feat(searxng-container): bind mount & add limiter --- docker-compose.yaml | 6 +-- searxng.dockerfile | 3 -- searxng/limiter.toml | 3 ++ searxng-settings.yml => searxng/settings.yml | 0 searxng/uwsgi.ini | 50 ++++++++++++++++++++ 5 files changed, 56 insertions(+), 6 deletions(-) delete mode 100644 searxng.dockerfile create mode 100644 searxng/limiter.toml rename searxng-settings.yml => searxng/settings.yml (100%) create mode 100644 searxng/uwsgi.ini diff --git a/docker-compose.yaml b/docker-compose.yaml index f9b3757..6304f34 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,10 +1,10 @@ services: searxng: - build: - context: . - dockerfile: searxng.dockerfile + image: docker.io/searxng/searxng:latest ports: - 4000:8080 + volumes: + - ./searxng:/etc/searxng:rw networks: - perplexica-network diff --git a/searxng.dockerfile b/searxng.dockerfile deleted file mode 100644 index 8bcd2b2..0000000 --- a/searxng.dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM searxng/searxng - -COPY searxng-settings.yml /etc/searxng/settings.yml \ No newline at end of file diff --git a/searxng/limiter.toml b/searxng/limiter.toml new file mode 100644 index 0000000..ae69bd3 --- /dev/null +++ b/searxng/limiter.toml @@ -0,0 +1,3 @@ +[botdetection.ip_limit] +# activate link_token method in the ip_limit method +link_token = true \ No newline at end of file diff --git a/searxng-settings.yml b/searxng/settings.yml similarity index 100% rename from searxng-settings.yml rename to searxng/settings.yml diff --git a/searxng/uwsgi.ini b/searxng/uwsgi.ini new file mode 100644 index 0000000..dd1247a --- /dev/null +++ b/searxng/uwsgi.ini @@ -0,0 +1,50 @@ +[uwsgi] +# Who will run the code +uid = searxng +gid = searxng + +# Number of workers (usually CPU count) +# default value: %k (= number of CPU core, see Dockerfile) +workers = %k + +# Number of threads per worker +# default value: 4 (see Dockerfile) +threads = 4 + +# The right granted on the created socket +chmod-socket = 666 + +# Plugin to use and interpreter config +single-interpreter = true +master = true +plugin = python3 +lazy-apps = true +enable-threads = 4 + +# Module to import +module = searx.webapp + +# Virtualenv and python path +pythonpath = /usr/local/searxng/ +chdir = /usr/local/searxng/searx/ + +# automatically set processes name to something meaningful +auto-procname = true + +# Disable request logging for privacy +disable-logging = true +log-5xx = true + +# Set the max size of a request (request-body excluded) +buffer-size = 8192 + +# No keep alive +# See https://github.com/searx/searx-docker/issues/24 +add-header = Connection: close + +# uwsgi serves the static files +static-map = /static=/usr/local/searxng/searx/static +# expires set to one day +static-expires = /* 86400 +static-gzip-all = True +offload-threads = 4 From a60145137c677a021df611e236fb8f3f478976ae Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 11 May 2024 10:23:05 +0530 Subject: [PATCH 009/399] feat(docs): add networking --- README.md | 5 ++ docs/installation/NETWORKING.md | 96 +++++++++++++++++++++++++++++++++ 2 files changed, 101 insertions(+) create mode 100644 docs/installation/NETWORKING.md diff --git a/README.md b/README.md index e45e80a..4735c7f 100644 --- a/README.md +++ b/README.md @@ -9,6 +9,7 @@ - [Features](#features) - [Installation](#installation) - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) + - [Exposing to network](#exposing-to-network) - [Non-Docker Installation](#non-docker-installation) - [Ollama connection errors](#ollama-connection-errors) - [One-Click Deployment](#one-click-deployment) @@ -82,6 +83,10 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. **Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal. +#### Exposing to network + +If you wish to expose Perplexica to your network and use it accross all devices on that network you can read [NETWORKING.md](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md). + ### Non-Docker Installation 1. Clone the repository and rename the `sample.config.toml` file to `config.toml` in the root directory. Ensure you complete all required fields in this file. diff --git a/docs/installation/NETWORKING.md b/docs/installation/NETWORKING.md new file mode 100644 index 0000000..25f994a --- /dev/null +++ b/docs/installation/NETWORKING.md @@ -0,0 +1,96 @@ +# Expose Perplexica to a network + +This guide will show you how to make Perplexica available over a network. Follow these steps to allow computers on the same network to interact with Perplexica. Choose the instructions that match the operating system you are using. + +## Windows + +1. Open PowerShell as Administrator + +2. Navigate to the directory containing the `docker-compose.yaml` file + +3. Stop and remove the existing Perplexica containers and images: +``` +docker compose down --rmi all +``` + +4. Open the `docker-compose.yaml` file in a text editor like Notepad++ + +5. Replace `127.0.0.1` with the IP address of the server Perplexica is running on in these two lines: +``` +args: + - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 +``` + +6. Save and close the `docker-compose.yaml` file + +7. Rebuild and restart the Perplexica container: +``` +docker compose up -d --build +``` + +## macOS + +1. Open the Terminal application + +2. Navigate to the directory with the `docker-compose.yaml` file: +``` +cd /path/to/docker-compose.yaml +``` + +3. Stop and remove existing containers and images: +``` +docker compose down --rmi all +``` + +4. Open `docker-compose.yaml` in a text editor like Sublime Text: +``` +nano docker-compose.yaml +``` + +5. Replace `127.0.0.1` with the server IP in these lines: +``` +args: + - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 +``` + +6. Save and exit the editor + +7. Rebuild and restart Perplexica: +``` +docker compose up -d --build +``` + +## Linux + +1. Open the terminal + +2. Navigate to the `docker-compose.yaml` directory: +``` +cd /path/to/docker-compose.yaml +``` + +3. Stop and remove containers and images: +``` +docker compose down --rmi all +``` + +4. Edit `docker-compose.yaml`: +``` +nano docker-compose.yaml +``` + +5. Replace `127.0.0.1` with the server IP: +``` +args: + - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 +``` + +6. Save and exit the editor + +7. Rebuild and restart Perplexica: +``` +docker compose up -d --build +``` \ No newline at end of file From 7a28be9e1a86fef4508a6f46840f12ac1e0c07fb Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 11 May 2024 12:09:08 +0530 Subject: [PATCH 010/399] feat(readme): add installation docs --- README.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 4735c7f..bb8f1dd 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,6 @@ - [Features](#features) - [Installation](#installation) - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) - - [Exposing to network](#exposing-to-network) - [Non-Docker Installation](#non-docker-installation) - [Ollama connection errors](#ollama-connection-errors) - [One-Click Deployment](#one-click-deployment) @@ -83,10 +82,6 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. **Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal. -#### Exposing to network - -If you wish to expose Perplexica to your network and use it accross all devices on that network you can read [NETWORKING.md](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md). - ### Non-Docker Installation 1. Clone the repository and rename the `sample.config.toml` file to `config.toml` in the root directory. Ensure you complete all required fields in this file. @@ -97,6 +92,8 @@ If you wish to expose Perplexica to your network and use it accross all devices **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. +See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information. + ### Ollama connection errors If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following: From 3ef39c69a76eb67278aba6af2132e69da461cbe7 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 11 May 2024 12:09:39 +0530 Subject: [PATCH 011/399] feat(chat-window): add ability to use `q` query param --- ui/components/ChatWindow.tsx | 31 +++++++++++++++++++++---- ui/components/EmptyChatMessageInput.tsx | 2 +- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx index 6f58757..1cc6ae0 100644 --- a/ui/components/ChatWindow.tsx +++ b/ui/components/ChatWindow.tsx @@ -6,6 +6,7 @@ import Navbar from './Navbar'; import Chat from './Chat'; import EmptyChat from './EmptyChat'; import { toast } from 'sonner'; +import { useSearchParams } from 'next/navigation'; export type Message = { id: string; @@ -15,7 +16,7 @@ export type Message = { sources?: Document[]; }; -const useSocket = (url: string) => { +const useSocket = (url: string, setIsReady: (ready: boolean) => void) => { const [ws, setWs] = useState(null); useEffect(() => { @@ -101,9 +102,17 @@ const useSocket = (url: string) => { ws.onopen = () => { console.log('[DEBUG] open'); - setWs(ws); }; + const stateCheckInterval = setInterval(() => { + if (ws.readyState === 1) { + setIsReady(true); + clearInterval(stateCheckInterval); + } + }, 100); + + setWs(ws); + ws.onmessage = (e) => { const parsedData = JSON.parse(e.data); if (parsedData.type === 'error') { @@ -122,13 +131,18 @@ const useSocket = (url: string) => { ws?.close(); console.log('[DEBUG] closed'); }; - }, [ws, url]); + }, [ws, url, setIsReady]); return ws; }; const ChatWindow = () => { - const ws = useSocket(process.env.NEXT_PUBLIC_WS_URL!); + const searchParams = useSearchParams(); + const initialMessage = searchParams.get('q'); + + const [isReady, setIsReady] = useState(false); + const ws = useSocket(process.env.NEXT_PUBLIC_WS_URL!, setIsReady); + const [chatHistory, setChatHistory] = useState<[string, string][]>([]); const [messages, setMessages] = useState([]); const [loading, setLoading] = useState(false); @@ -250,7 +264,14 @@ const ChatWindow = () => { sendMessage(message.content); }; - return ws ? ( + useEffect(() => { + if (isReady && initialMessage) { + sendMessage(initialMessage); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isReady, initialMessage]); + + return isReady ? (
{messages.length > 0 ? ( <> diff --git a/ui/components/EmptyChatMessageInput.tsx b/ui/components/EmptyChatMessageInput.tsx index 2e3ab53..4932803 100644 --- a/ui/components/EmptyChatMessageInput.tsx +++ b/ui/components/EmptyChatMessageInput.tsx @@ -1,7 +1,7 @@ import { ArrowRight } from 'lucide-react'; import { useState } from 'react'; import TextareaAutosize from 'react-textarea-autosize'; -import { Attach, CopilotToggle, Focus } from './MessageInputActions'; +import { CopilotToggle, Focus } from './MessageInputActions'; const EmptyChatMessageInput = ({ sendMessage, From 954b4bf89ac09525d2f29d06db906aeedc7c408c Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 11 May 2024 12:14:49 +0530 Subject: [PATCH 012/399] feat(readme): add search engine guide --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index bb8f1dd..be16cc1 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,7 @@ - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) - [Non-Docker Installation](#non-docker-installation) - [Ollama connection errors](#ollama-connection-errors) +- [Using as a Search Engine](#using-as-a-search-engine) - [One-Click Deployment](#one-click-deployment) - [Upcoming Features](#upcoming-features) - [Support Us](#support-us) @@ -104,6 +105,15 @@ On Linux: `http://private_ip_of_computer_hosting_ollama:11434` You need to edit the ports accordingly. +## Using as a Search Engine + +If you wish to use Perplexica as an alternative to traditional search engines like Google or Bing, or if you want to add a shortcut for quick access from your browser's search bar, follow these steps: + +1. Open your browser's settings. +2. Navigate to the 'Search Engines' section. +3. Add a new site search with the following URL: `http://localhost:3000/?q=%s`. Replace `localhost` with your IP address or domain name, and `3000` with the port number if Perplexica is not hosted locally. +4. Click the add button. Now, you can use Perplexica directly from your browser's search bar. + ## One-Click Deployment [![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267) From c852bee8ed99092d7956b913bdd611ddb8eca300 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 11 May 2024 21:19:38 +0530 Subject: [PATCH 013/399] feat(app): add suspense boundary --- ui/app/page.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ui/app/page.tsx b/ui/app/page.tsx index 982763a..e18aca9 100644 --- a/ui/app/page.tsx +++ b/ui/app/page.tsx @@ -1,5 +1,6 @@ import ChatWindow from '@/components/ChatWindow'; import { Metadata } from 'next'; +import { Suspense } from 'react'; export const metadata: Metadata = { title: 'Chat - Perplexica', @@ -9,7 +10,9 @@ export const metadata: Metadata = { const Home = () => { return (
- + + +
); }; From 828eeb0c776d947c4ee74fdbbf24386a33f5cdc3 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 12:14:52 +0530 Subject: [PATCH 014/399] feat(app-dockerfile): add `PORT` arg --- app.dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app.dockerfile b/app.dockerfile index 105cf86..ab8bfaa 100644 --- a/app.dockerfile +++ b/app.dockerfile @@ -2,8 +2,11 @@ FROM node:alpine ARG NEXT_PUBLIC_WS_URL ARG NEXT_PUBLIC_API_URL +ARG PORT + ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL} ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} +ENV PORT=${PORT} WORKDIR /home/perplexica From 9816eb1d3662c6911ba8849a721cc006437767e0 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 12:15:25 +0530 Subject: [PATCH 015/399] feat(server): add bind address --- sample.config.toml | 5 +++-- src/app.ts | 5 +++-- src/config.ts | 3 +++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/sample.config.toml b/sample.config.toml index 7bc8880..5342fe4 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -1,5 +1,6 @@ [GENERAL] -PORT = 3001 # Port to run the server on +PORT = 31338 # Port to run the server on +BIND_ADDRESS = "0.0.0.0" SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" [API_KEYS] @@ -8,4 +9,4 @@ GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL -OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 +OLLAMA = "" # Ollama API URL - http://127.0.0.1:11434 diff --git a/src/app.ts b/src/app.ts index b8c2371..33184bf 100644 --- a/src/app.ts +++ b/src/app.ts @@ -3,10 +3,11 @@ import express from 'express'; import cors from 'cors'; import http from 'http'; import routes from './routes'; -import { getPort } from './config'; +import { getBindAddress, getPort } from './config'; import logger from './utils/logger'; const port = getPort(); +const bindAddress = getBindAddress(); const app = express(); const server = http.createServer(app); @@ -23,7 +24,7 @@ app.get('/api', (_, res) => { res.status(200).json({ status: 'ok' }); }); -server.listen(port, () => { +server.listen(port, bindAddress, () => { logger.info(`Server is running on port ${port}`); }); diff --git a/src/config.ts b/src/config.ts index 7c0c7f1..f600f58 100644 --- a/src/config.ts +++ b/src/config.ts @@ -7,6 +7,7 @@ const configFileName = 'config.toml'; interface Config { GENERAL: { PORT: number; + BIND_ADDRESS: string; SIMILARITY_MEASURE: string; }; API_KEYS: { @@ -30,6 +31,8 @@ const loadConfig = () => export const getPort = () => loadConfig().GENERAL.PORT; +export const getBindAddress = () => loadConfig().GENERAL.BIND_ADDRESS; + export const getSimilarityMeasure = () => loadConfig().GENERAL.SIMILARITY_MEASURE; From 1b18715f8f2ccc6ec54fc80aa7f22e51b77f068d Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 12:15:53 +0530 Subject: [PATCH 016/399] feat(docs): update PORT --- README.md | 17 +++-------------- docs/installation/NETWORKING.md | 33 +++++++++++++++++++++++---------- ui/.env.example | 5 +++-- 3 files changed, 29 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index be16cc1..81043c8 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,6 @@ - [Installation](#installation) - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) - [Non-Docker Installation](#non-docker-installation) - - [Ollama connection errors](#ollama-connection-errors) - [Using as a Search Engine](#using-as-a-search-engine) - [One-Click Deployment](#one-click-deployment) - [Upcoming Features](#upcoming-features) @@ -66,7 +65,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. 4. Rename the `sample.config.toml` file to `config.toml`. For Docker setups, you need only fill in the following fields: - `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**. - - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. + - `OLLAMA`: Your Ollama API URL. If Ollama is hosted on the same computer as Perplexica, you should enter it as `http://127.0.0.1:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://127.0.0.1:11434`. For other ports, adjust accordingly. If Ollama is running on some other server use the server's IP with port or domain in place of it. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models** **Note**: You can change these after starting Perplexica from the settings dialog. @@ -79,7 +78,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. docker compose up -d ``` -6. Wait a few minutes for the setup to complete. You can access Perplexica at http://localhost:3000 in your web browser. +6. Wait a few minutes for the setup to complete. You can access Perplexica at http://localhost:31337 in your web browser. **Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal. @@ -95,23 +94,13 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information. -### Ollama connection errors - -If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following: - -On Windows: `http://host.docker.internal:11434`
-On Mac: `http://host.docker.internal:11434`
-On Linux: `http://private_ip_of_computer_hosting_ollama:11434` - -You need to edit the ports accordingly. - ## Using as a Search Engine If you wish to use Perplexica as an alternative to traditional search engines like Google or Bing, or if you want to add a shortcut for quick access from your browser's search bar, follow these steps: 1. Open your browser's settings. 2. Navigate to the 'Search Engines' section. -3. Add a new site search with the following URL: `http://localhost:3000/?q=%s`. Replace `localhost` with your IP address or domain name, and `3000` with the port number if Perplexica is not hosted locally. +3. Add a new site search with the following URL: `http://localhost:31337/?q=%s`. Replace `localhost` with your IP address or domain name, and `31337` with the port number if Perplexica is not hosted locally. 4. Click the add button. Now, you can use Perplexica directly from your browser's search bar. ## One-Click Deployment diff --git a/docs/installation/NETWORKING.md b/docs/installation/NETWORKING.md index 25f994a..29e603f 100644 --- a/docs/installation/NETWORKING.md +++ b/docs/installation/NETWORKING.md @@ -9,6 +9,7 @@ This guide will show you how to make Perplexica available over a network. Follow 2. Navigate to the directory containing the `docker-compose.yaml` file 3. Stop and remove the existing Perplexica containers and images: + ``` docker compose down --rmi all ``` @@ -16,81 +17,93 @@ docker compose down --rmi all 4. Open the `docker-compose.yaml` file in a text editor like Notepad++ 5. Replace `127.0.0.1` with the IP address of the server Perplexica is running on in these two lines: + ``` args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 ``` 6. Save and close the `docker-compose.yaml` file 7. Rebuild and restart the Perplexica container: + ``` docker compose up -d --build ``` -## macOS +## macOS 1. Open the Terminal application 2. Navigate to the directory with the `docker-compose.yaml` file: + ``` cd /path/to/docker-compose.yaml ``` 3. Stop and remove existing containers and images: + ``` docker compose down --rmi all ``` 4. Open `docker-compose.yaml` in a text editor like Sublime Text: + ``` nano docker-compose.yaml ``` 5. Replace `127.0.0.1` with the server IP in these lines: + ``` args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 ``` 6. Save and exit the editor 7. Rebuild and restart Perplexica: + ``` -docker compose up -d --build +docker compose up -d --build ``` ## Linux -1. Open the terminal +1. Open the terminal 2. Navigate to the `docker-compose.yaml` directory: + ``` cd /path/to/docker-compose.yaml ``` 3. Stop and remove containers and images: + ``` docker compose down --rmi all ``` 4. Edit `docker-compose.yaml`: + ``` nano docker-compose.yaml ``` 5. Replace `127.0.0.1` with the server IP: + ``` args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 ``` 6. Save and exit the editor 7. Rebuild and restart Perplexica: + ``` docker compose up -d --build -``` \ No newline at end of file +``` diff --git a/ui/.env.example b/ui/.env.example index 57a3ed9..4d12554 100644 --- a/ui/.env.example +++ b/ui/.env.example @@ -1,2 +1,3 @@ -NEXT_PUBLIC_WS_URL=ws://localhost:3001 -NEXT_PUBLIC_API_URL=http://localhost:3001/api \ No newline at end of file +NEXT_PUBLIC_WS_URL=ws://localhost:31338 +NEXT_PUBLIC_API_URL=http://localhost:31338/api +PORT=31337 \ No newline at end of file From b622df5a9fbacd468a8deeaa492fabd01a26ec6a Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 12:16:08 +0530 Subject: [PATCH 017/399] feat(docker-compose): update ports, change network type --- docker-compose.yaml | 25 ++++++++----------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 6304f34..e11bf4e 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,39 +1,30 @@ services: searxng: image: docker.io/searxng/searxng:latest - ports: - - 4000:8080 volumes: - ./searxng:/etc/searxng:rw - networks: - - perplexica-network + ports: + - 31336:8080 perplexica-backend: build: context: . dockerfile: backend.dockerfile args: - - SEARXNG_API_URL=http://searxng:8080 + - SEARXNG_API_URL=http://127.0.0.1:31336 depends_on: - searxng - ports: - - 3001:3001 - networks: - - perplexica-network + network_mode: host perplexica-frontend: build: context: . dockerfile: app.dockerfile args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 + - PORT=31337 depends_on: - perplexica-backend ports: - - 3000:3000 - networks: - - perplexica-network - -networks: - perplexica-network: + - 31337:31337 From 9d30224faad7b288cc112cde2ec6c694e6d4e8c6 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 12:24:36 +0530 Subject: [PATCH 018/399] feat(readme): update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 81043c8..9b81615 100644 --- a/README.md +++ b/README.md @@ -92,7 +92,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. -See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information. +See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information like exposing it your network, etc. ## Using as a Search Engine From 22aee27cda8d635228901cb35b54a8aed290d8a2 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 12:48:01 +0530 Subject: [PATCH 019/399] feat(env): remove port --- ui/.env.example | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ui/.env.example b/ui/.env.example index 4d12554..9869283 100644 --- a/ui/.env.example +++ b/ui/.env.example @@ -1,3 +1,2 @@ NEXT_PUBLIC_WS_URL=ws://localhost:31338 -NEXT_PUBLIC_API_URL=http://localhost:31338/api -PORT=31337 \ No newline at end of file +NEXT_PUBLIC_API_URL=http://localhost:31338/api \ No newline at end of file From 100872f2d9da03a5a85eb710b212de93f7779461 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 14:04:05 +0530 Subject: [PATCH 020/399] feat(docker-compose): revert network changes --- README.md | 13 ++++++++++++- docker-compose.yaml | 14 ++++++++++++-- sample.config.toml | 2 +- 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 9b81615..9235e4e 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ - [Installation](#installation) - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) - [Non-Docker Installation](#non-docker-installation) + - [Ollama connection errors](#ollama-connection-errors) - [Using as a Search Engine](#using-as-a-search-engine) - [One-Click Deployment](#one-click-deployment) - [Upcoming Features](#upcoming-features) @@ -65,7 +66,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. 4. Rename the `sample.config.toml` file to `config.toml`. For Docker setups, you need only fill in the following fields: - `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**. - - `OLLAMA`: Your Ollama API URL. If Ollama is hosted on the same computer as Perplexica, you should enter it as `http://127.0.0.1:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://127.0.0.1:11434`. For other ports, adjust accordingly. If Ollama is running on some other server use the server's IP with port or domain in place of it. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. + - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models** **Note**: You can change these after starting Perplexica from the settings dialog. @@ -94,6 +95,16 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information like exposing it your network, etc. +### Ollama connection errors + +If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following: + +On Windows: `http://host.docker.internal:11434`
+On Mac: `http://host.docker.internal:11434`
+On Linux: `http://private_ip_of_computer_hosting_ollama:11434` + +You need to edit the ports accordingly. + ## Using as a Search Engine If you wish to use Perplexica as an alternative to traditional search engines like Google or Bing, or if you want to add a shortcut for quick access from your browser's search bar, follow these steps: diff --git a/docker-compose.yaml b/docker-compose.yaml index e11bf4e..dc55c29 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -5,16 +5,21 @@ services: - ./searxng:/etc/searxng:rw ports: - 31336:8080 + networks: + - perplexica-network perplexica-backend: build: context: . dockerfile: backend.dockerfile args: - - SEARXNG_API_URL=http://127.0.0.1:31336 + - SEARXNG_API_URL=http://searxng:8080 depends_on: - searxng - network_mode: host + ports: + - 31338:31338 + networks: + - perplexica-network perplexica-frontend: build: @@ -28,3 +33,8 @@ services: - perplexica-backend ports: - 31337:31337 + networks: + - perplexica-network + +networks: + perplexica-network: \ No newline at end of file diff --git a/sample.config.toml b/sample.config.toml index 5342fe4..3c7f31b 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -9,4 +9,4 @@ GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL -OLLAMA = "" # Ollama API URL - http://127.0.0.1:11434 +OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 From 0993c5a760bfe894ce210a714cb2ee83b8a89880 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 13 May 2024 19:58:17 +0530 Subject: [PATCH 021/399] feat(app): revert port & network changes --- app.dockerfile | 3 --- docker-compose.yaml | 13 ++++++------- sample.config.toml | 5 ++--- src/app.ts | 5 ++--- src/config.ts | 3 --- ui/.env.example | 4 ++-- 6 files changed, 12 insertions(+), 21 deletions(-) diff --git a/app.dockerfile b/app.dockerfile index ab8bfaa..105cf86 100644 --- a/app.dockerfile +++ b/app.dockerfile @@ -2,11 +2,8 @@ FROM node:alpine ARG NEXT_PUBLIC_WS_URL ARG NEXT_PUBLIC_API_URL -ARG PORT - ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL} ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} -ENV PORT=${PORT} WORKDIR /home/perplexica diff --git a/docker-compose.yaml b/docker-compose.yaml index dc55c29..ac83575 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -4,7 +4,7 @@ services: volumes: - ./searxng:/etc/searxng:rw ports: - - 31336:8080 + - 4000:8080 networks: - perplexica-network @@ -17,7 +17,7 @@ services: depends_on: - searxng ports: - - 31338:31338 + - 3001:3001 networks: - perplexica-network @@ -26,15 +26,14 @@ services: context: . dockerfile: app.dockerfile args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 - - PORT=31337 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 depends_on: - perplexica-backend ports: - - 31337:31337 + - 3000:3000 networks: - perplexica-network networks: - perplexica-network: \ No newline at end of file + perplexica-network: diff --git a/sample.config.toml b/sample.config.toml index 3c7f31b..8d35666 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -1,6 +1,5 @@ [GENERAL] -PORT = 31338 # Port to run the server on -BIND_ADDRESS = "0.0.0.0" +PORT = 3001 # Port to run the server on SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" [API_KEYS] @@ -9,4 +8,4 @@ GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL -OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 +OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 \ No newline at end of file diff --git a/src/app.ts b/src/app.ts index 33184bf..b8c2371 100644 --- a/src/app.ts +++ b/src/app.ts @@ -3,11 +3,10 @@ import express from 'express'; import cors from 'cors'; import http from 'http'; import routes from './routes'; -import { getBindAddress, getPort } from './config'; +import { getPort } from './config'; import logger from './utils/logger'; const port = getPort(); -const bindAddress = getBindAddress(); const app = express(); const server = http.createServer(app); @@ -24,7 +23,7 @@ app.get('/api', (_, res) => { res.status(200).json({ status: 'ok' }); }); -server.listen(port, bindAddress, () => { +server.listen(port, () => { logger.info(`Server is running on port ${port}`); }); diff --git a/src/config.ts b/src/config.ts index f600f58..7c0c7f1 100644 --- a/src/config.ts +++ b/src/config.ts @@ -7,7 +7,6 @@ const configFileName = 'config.toml'; interface Config { GENERAL: { PORT: number; - BIND_ADDRESS: string; SIMILARITY_MEASURE: string; }; API_KEYS: { @@ -31,8 +30,6 @@ const loadConfig = () => export const getPort = () => loadConfig().GENERAL.PORT; -export const getBindAddress = () => loadConfig().GENERAL.BIND_ADDRESS; - export const getSimilarityMeasure = () => loadConfig().GENERAL.SIMILARITY_MEASURE; diff --git a/ui/.env.example b/ui/.env.example index 9869283..57a3ed9 100644 --- a/ui/.env.example +++ b/ui/.env.example @@ -1,2 +1,2 @@ -NEXT_PUBLIC_WS_URL=ws://localhost:31338 -NEXT_PUBLIC_API_URL=http://localhost:31338/api \ No newline at end of file +NEXT_PUBLIC_WS_URL=ws://localhost:3001 +NEXT_PUBLIC_API_URL=http://localhost:3001/api \ No newline at end of file From 0e2f4514b421058372b26a10dda6117694b016b5 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 13 May 2024 20:10:44 +0530 Subject: [PATCH 022/399] feat(readme): update readme --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9235e4e..0cf197b 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. docker compose up -d ``` -6. Wait a few minutes for the setup to complete. You can access Perplexica at http://localhost:31337 in your web browser. +6. Wait a few minutes for the setup to complete. You can access Perplexica at http://localhost:3000 in your web browser. **Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal. @@ -111,7 +111,7 @@ If you wish to use Perplexica as an alternative to traditional search engines li 1. Open your browser's settings. 2. Navigate to the 'Search Engines' section. -3. Add a new site search with the following URL: `http://localhost:31337/?q=%s`. Replace `localhost` with your IP address or domain name, and `31337` with the port number if Perplexica is not hosted locally. +3. Add a new site search with the following URL: `http://localhost:3000/?q=%s`. Replace `localhost` with your IP address or domain name, and `3000` with the port number if Perplexica is not hosted locally. 4. Click the add button. Now, you can use Perplexica directly from your browser's search bar. ## One-Click Deployment From 180e204c2d6da5395011d8548ee0649107fa37a8 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Tue, 14 May 2024 19:33:54 +0530 Subject: [PATCH 023/399] feat(providers): add GPT-4 omni --- src/lib/providers.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/lib/providers.ts b/src/lib/providers.ts index c817f87..3223193 100644 --- a/src/lib/providers.ts +++ b/src/lib/providers.ts @@ -34,6 +34,11 @@ export const getAvailableChatModelProviders = async () => { modelName: 'gpt-4-turbo', temperature: 0.7, }), + 'GPT-4 omni': new ChatOpenAI({ + openAIApiKey, + modelName: 'gpt-4o', + temperature: 0.7, + }), }; } catch (err) { logger.error(`Error loading OpenAI models: ${err}`); From 68b595023e8da06a9c87bf26c4d376ee7e9ecc5e Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 18 May 2024 13:10:09 +0530 Subject: [PATCH 024/399] feat(suggestion-generator): update prompt --- src/agents/suggestionGeneratorAgent.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/agents/suggestionGeneratorAgent.ts b/src/agents/suggestionGeneratorAgent.ts index 59bd9ea..0efdfa9 100644 --- a/src/agents/suggestionGeneratorAgent.ts +++ b/src/agents/suggestionGeneratorAgent.ts @@ -14,9 +14,9 @@ Make sure the suggestions are medium in length and are informative and relevant Provide these suggestions separated by newlines between the XML tags and . For example: -Suggestion 1 -Suggestion 2 -Suggestion 3 +Tell me more about SpaceX and their recent projects +What is the latest news on SpaceX? +Who is the CEO of SpaceX? Conversation: From 3bfaf9be2804387ce91d2f8bd4da5a88c0f78ce8 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 18 May 2024 13:10:39 +0530 Subject: [PATCH 025/399] feat(app): add suggestion generation --- src/lib/outputParsers/listLineOutputParser.ts | 2 +- ui/components/Chat.tsx | 1 + ui/components/ChatWindow.tsx | 31 +++++++++++++++++-- ui/lib/actions.ts | 22 +++++++++++++ 4 files changed, 53 insertions(+), 3 deletions(-) create mode 100644 ui/lib/actions.ts diff --git a/src/lib/outputParsers/listLineOutputParser.ts b/src/lib/outputParsers/listLineOutputParser.ts index 4fde080..57a9bbc 100644 --- a/src/lib/outputParsers/listLineOutputParser.ts +++ b/src/lib/outputParsers/listLineOutputParser.ts @@ -9,7 +9,7 @@ class LineListOutputParser extends BaseOutputParser { constructor(args?: LineListOutputParserArgs) { super(); - this.key = args.key || this.key; + this.key = args.key ?? this.key; } static lc_name() { diff --git a/ui/components/Chat.tsx b/ui/components/Chat.tsx index ddd2957..7b0c1b3 100644 --- a/ui/components/Chat.tsx +++ b/ui/components/Chat.tsx @@ -63,6 +63,7 @@ const Chat = ({ dividerRef={isLast ? dividerRef : undefined} isLast={isLast} rewrite={rewrite} + sendMessage={sendMessage} /> {!isLast && msg.role === 'assistant' && (
diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx index 1cc6ae0..5f266b5 100644 --- a/ui/components/ChatWindow.tsx +++ b/ui/components/ChatWindow.tsx @@ -1,18 +1,20 @@ 'use client'; -import { useEffect, useState } from 'react'; +import { useEffect, useRef, useState } from 'react'; import { Document } from '@langchain/core/documents'; import Navbar from './Navbar'; import Chat from './Chat'; import EmptyChat from './EmptyChat'; import { toast } from 'sonner'; import { useSearchParams } from 'next/navigation'; +import { getSuggestions } from '@/lib/actions'; export type Message = { id: string; createdAt: Date; content: string; role: 'user' | 'assistant'; + suggestions?: string[]; sources?: Document[]; }; @@ -145,10 +147,15 @@ const ChatWindow = () => { const [chatHistory, setChatHistory] = useState<[string, string][]>([]); const [messages, setMessages] = useState([]); + const messagesRef = useRef([]); const [loading, setLoading] = useState(false); const [messageAppeared, setMessageAppeared] = useState(false); const [focusMode, setFocusMode] = useState('webSearch'); + useEffect(() => { + messagesRef.current = messages; + }, [messages]); + const sendMessage = async (message: string) => { if (loading) return; setLoading(true); @@ -177,7 +184,7 @@ const ChatWindow = () => { }, ]); - const messageHandler = (e: MessageEvent) => { + const messageHandler = async (e: MessageEvent) => { const data = JSON.parse(e.data); if (data.type === 'error') { @@ -239,8 +246,28 @@ const ChatWindow = () => { ['human', message], ['assistant', recievedMessage], ]); + ws?.removeEventListener('message', messageHandler); setLoading(false); + + const lastMsg = messagesRef.current[messagesRef.current.length - 1]; + + if ( + lastMsg.role === 'assistant' && + lastMsg.sources && + lastMsg.sources.length > 0 && + !lastMsg.suggestions + ) { + const suggestions = await getSuggestions(messagesRef.current); + setMessages((prev) => + prev.map((msg) => { + if (msg.id === lastMsg.id) { + return { ...msg, suggestions: suggestions }; + } + return msg; + }), + ); + } } }; diff --git a/ui/lib/actions.ts b/ui/lib/actions.ts new file mode 100644 index 0000000..d7eb71f --- /dev/null +++ b/ui/lib/actions.ts @@ -0,0 +1,22 @@ +import { Message } from '@/components/ChatWindow'; + +export const getSuggestions = async (chatHisory: Message[]) => { + const chatModel = localStorage.getItem('chatModel'); + const chatModelProvider = localStorage.getItem('chatModelProvider'); + + const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + chat_history: chatHisory, + chat_model: chatModel, + chat_model_provider: chatModelProvider, + }), + }); + + const data = (await res.json()) as { suggestions: string[] }; + + return data.suggestions; +}; From fcff93a594392868c7f0111f5994da9a3a200aef Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 18 May 2024 13:10:54 +0530 Subject: [PATCH 026/399] feat(message-actions): update rewrite button --- ui/components/MessageActions/Rewrite.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ui/components/MessageActions/Rewrite.tsx b/ui/components/MessageActions/Rewrite.tsx index 3ae282d..3282e7d 100644 --- a/ui/components/MessageActions/Rewrite.tsx +++ b/ui/components/MessageActions/Rewrite.tsx @@ -10,9 +10,10 @@ const Rewrite = ({ return ( ); }; From c61facef13ed0c1cce3265d96c0e81d33bad6efb Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 18 May 2024 13:11:15 +0530 Subject: [PATCH 027/399] feat(message-box): display suggestions --- ui/components/MessageBox.tsx | 52 +++++++++++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/ui/components/MessageBox.tsx b/ui/components/MessageBox.tsx index 9712a23..8084d5f 100644 --- a/ui/components/MessageBox.tsx +++ b/ui/components/MessageBox.tsx @@ -4,7 +4,15 @@ import React, { MutableRefObject, useEffect, useState } from 'react'; import { Message } from './ChatWindow'; import { cn } from '@/lib/utils'; -import { BookCopy, Disc3, Share, Volume2, StopCircle } from 'lucide-react'; +import { + BookCopy, + Disc3, + Share, + Volume2, + StopCircle, + Layers3, + Plus, +} from 'lucide-react'; import Markdown from 'markdown-to-jsx'; import Copy from './MessageActions/Copy'; import Rewrite from './MessageActions/Rewrite'; @@ -21,6 +29,7 @@ const MessageBox = ({ dividerRef, isLast, rewrite, + sendMessage, }: { message: Message; messageIndex: number; @@ -29,6 +38,7 @@ const MessageBox = ({ dividerRef?: MutableRefObject; isLast: boolean; rewrite: (messageId: string) => void; + sendMessage: (message: string) => void; }) => { const [parsedMessage, setParsedMessage] = useState(message.content); const [speechMessage, setSpeechMessage] = useState(message.content); @@ -98,9 +108,9 @@ const MessageBox = ({ {loading && isLast ? null : (
- + */}
@@ -124,6 +134,42 @@ const MessageBox = ({
)} + {isLast && + message.suggestions && + message.suggestions.length > 0 && + message.role === 'assistant' && + !loading && ( + <> +
+
+
+ +

Related

+
+
+ {message.suggestions.map((suggestion, i) => ( +
+
+
{ + sendMessage(suggestion); + }} + className="cursor-pointer flex flex-row justify-between font-medium space-x-2 items-center" + > +

+ {suggestion} +

+ +
+
+ ))} +
+
+ + )}
From 64ea4b4289d41e56c52744cfcb34d99661251635 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sat, 18 May 2024 13:11:24 +0530 Subject: [PATCH 028/399] feat(package): bump version --- package.json | 2 +- ui/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index c3aa58d..0308e93 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-backend", - "version": "1.4.0", + "version": "1.5.0", "license": "MIT", "author": "ItzCrazyKns", "scripts": { diff --git a/ui/package.json b/ui/package.json index a06581e..ff61082 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-frontend", - "version": "1.4.0", + "version": "1.5.0", "license": "MIT", "author": "ItzCrazyKns", "scripts": { From 7853c18b6feb574700ab6827645e2e73270b9d73 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 19 May 2024 11:35:28 +0530 Subject: [PATCH 029/399] feat(docs): update port --- docs/installation/NETWORKING.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/installation/NETWORKING.md b/docs/installation/NETWORKING.md index 29e603f..baad296 100644 --- a/docs/installation/NETWORKING.md +++ b/docs/installation/NETWORKING.md @@ -20,8 +20,8 @@ docker compose down --rmi all ``` args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 ``` 6. Save and close the `docker-compose.yaml` file @@ -58,8 +58,8 @@ nano docker-compose.yaml ``` args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 ``` 6. Save and exit the editor @@ -96,8 +96,8 @@ nano docker-compose.yaml ``` args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:31338/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:31338 + - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api + - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 ``` 6. Save and exit the editor From d04ba91c85e917f41bed48b92c1df800af4c1fc7 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Wed, 22 May 2024 10:45:16 +0530 Subject: [PATCH 030/399] feat(routes): use coalescing operator --- src/routes/images.ts | 4 ++-- src/routes/suggestions.ts | 4 ++-- src/routes/videos.ts | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/routes/images.ts b/src/routes/images.ts index d8ad8e1..6bd43d3 100644 --- a/src/routes/images.ts +++ b/src/routes/images.ts @@ -20,8 +20,8 @@ router.post('/', async (req, res) => { }); const chatModels = await getAvailableChatModelProviders(); - const provider = chat_model_provider || Object.keys(chatModels)[0]; - const chatModel = chat_model || Object.keys(chatModels[provider])[0]; + const provider = chat_model_provider ?? Object.keys(chatModels)[0]; + const chatModel = chat_model ?? Object.keys(chatModels[provider])[0]; let llm: BaseChatModel | undefined; diff --git a/src/routes/suggestions.ts b/src/routes/suggestions.ts index 10e5715..b15ff5f 100644 --- a/src/routes/suggestions.ts +++ b/src/routes/suggestions.ts @@ -20,8 +20,8 @@ router.post('/', async (req, res) => { }); const chatModels = await getAvailableChatModelProviders(); - const provider = chat_model_provider || Object.keys(chatModels)[0]; - const chatModel = chat_model || Object.keys(chatModels[provider])[0]; + const provider = chat_model_provider ?? Object.keys(chatModels)[0]; + const chatModel = chat_model ?? Object.keys(chatModels[provider])[0]; let llm: BaseChatModel | undefined; diff --git a/src/routes/videos.ts b/src/routes/videos.ts index e117a5a..0ffdb2c 100644 --- a/src/routes/videos.ts +++ b/src/routes/videos.ts @@ -20,8 +20,8 @@ router.post('/', async (req, res) => { }); const chatModels = await getAvailableChatModelProviders(); - const provider = chat_model_provider || Object.keys(chatModels)[0]; - const chatModel = chat_model || Object.keys(chatModels[provider])[0]; + const provider = chat_model_provider ?? Object.keys(chatModels)[0]; + const chatModel = chat_model ?? Object.keys(chatModels[provider])[0]; let llm: BaseChatModel | undefined; From 79cfd0a7229d00b6cc8ae269301d3fde059b74df Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Fri, 24 May 2024 17:32:14 +0800 Subject: [PATCH 031/399] chore(ui): add next-themes --- ui/package.json | 1 + ui/yarn.lock | 26 +++++++++++++++++++++++--- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/ui/package.json b/ui/package.json index ff61082..90a71f4 100644 --- a/ui/package.json +++ b/ui/package.json @@ -20,6 +20,7 @@ "lucide-react": "^0.363.0", "markdown-to-jsx": "^7.4.5", "next": "14.1.4", + "next-themes": "^0.3.0", "react": "^18", "react-dom": "^18", "react-text-to-speech": "^0.14.5", diff --git a/ui/yarn.lock b/ui/yarn.lock index ec8b3d7..d348a8c 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -2244,6 +2244,11 @@ natural-compare@^1.4.0: resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== +next-themes@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/next-themes/-/next-themes-0.3.0.tgz#b4d2a866137a67d42564b07f3a3e720e2ff3871a" + integrity sha512-/QHIrsYpd6Kfk7xakK4svpDI5mmXP0gfvCoJdGpZQ2TOrQZmsW0QxjaiLn8wbIKjtm4BTSqLoix4lxYYOnLJ/w== + next@14.1.4: version "14.1.4" resolved "https://registry.yarnpkg.com/next/-/next-14.1.4.tgz#203310f7310578563fd5c961f0db4729ce7a502d" @@ -2854,8 +2859,16 @@ streamsearch@^1.1.0: resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0: - name string-width-cjs +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.1.0: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -2919,7 +2932,14 @@ string.prototype.trimstart@^1.0.8: define-properties "^1.2.1" es-object-atoms "^1.0.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== From f9664d48e7f9d8f5f473938d5074b56bba3ff034 Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Fri, 24 May 2024 18:20:15 +0800 Subject: [PATCH 032/399] feat: setup theme context config --- ui/app/layout.tsx | 25 +++++++------ ui/components/theme/Provider.tsx | 14 +++++++ ui/components/theme/Switcher.tsx | 63 ++++++++++++++++++++++++++++++++ ui/tailwind.config.ts | 1 + 4 files changed, 92 insertions(+), 11 deletions(-) create mode 100644 ui/components/theme/Provider.tsx create mode 100644 ui/components/theme/Switcher.tsx diff --git a/ui/app/layout.tsx b/ui/app/layout.tsx index b3f5005..87144cf 100644 --- a/ui/app/layout.tsx +++ b/ui/app/layout.tsx @@ -4,6 +4,7 @@ import './globals.css'; import { cn } from '@/lib/utils'; import Sidebar from '@/components/Sidebar'; import { Toaster } from 'sonner'; +import { ThemeProviderComponent } from '@/components/theme/Provider'; const montserrat = Montserrat({ weight: ['300', '400', '500', '700'], @@ -24,18 +25,20 @@ export default function RootLayout({ children: React.ReactNode; }>) { return ( - + - {children} - + + {children} + + ); diff --git a/ui/components/theme/Provider.tsx b/ui/components/theme/Provider.tsx new file mode 100644 index 0000000..2e110f6 --- /dev/null +++ b/ui/components/theme/Provider.tsx @@ -0,0 +1,14 @@ +'use client'; +import { ThemeProvider } from 'next-themes'; + +export function ThemeProviderComponent({ + children, +}: { + children: React.ReactNode; +}) { + return ( + + {children} + + ); +} diff --git a/ui/components/theme/Switcher.tsx b/ui/components/theme/Switcher.tsx new file mode 100644 index 0000000..d1f44a3 --- /dev/null +++ b/ui/components/theme/Switcher.tsx @@ -0,0 +1,63 @@ +'use client'; +import { useTheme } from 'next-themes'; +import { SunIcon, MoonIcon, MonitorIcon } from 'lucide-react'; +import { useCallback, useEffect, useState } from 'react'; + +type Theme = 'dark' | 'light' | 'system'; + +export function ThemeSwitcher() { + const [mounted, setMounted] = useState(false); + + const { theme, setTheme } = useTheme(); + + const isTheme = useCallback((t: Theme) => t === theme, [theme]); + + const handleThemeSwitch = (theme: Theme) => { + setTheme(theme); + }; + + useEffect(() => { + setMounted(true); + }, []); + + useEffect(() => { + if (isTheme('system')) { + const preferDarkScheme = window.matchMedia( + '(prefers-color-scheme: dark)', + ); + + const detectThemeChange = (event: MediaQueryListEvent) => { + const theme: Theme = event.matches ? 'dark' : 'light'; + setTheme(theme); + }; + + preferDarkScheme.addEventListener('change', detectThemeChange); + + return () => { + preferDarkScheme.removeEventListener('change', detectThemeChange); + }; + } + }, [isTheme, setTheme, theme]); + + // Avoid Hydration Mismatch + if (!mounted) { + return null; + } + + return isTheme('dark') ? ( + handleThemeSwitch('light')} + /> + ) : isTheme('light') ? ( + handleThemeSwitch('dark')} + /> + ) : ( + handleThemeSwitch('system')} + /> + ); +} diff --git a/ui/tailwind.config.ts b/ui/tailwind.config.ts index 05f107d..a757263 100644 --- a/ui/tailwind.config.ts +++ b/ui/tailwind.config.ts @@ -6,6 +6,7 @@ const config: Config = { './components/**/*.{js,ts,jsx,tsx,mdx}', './app/**/*.{js,ts,jsx,tsx,mdx}', ], + darkMode: 'class', theme: { extend: {}, }, From 996cc1b674e169b54c91b1e269a71d6e51f43943 Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Fri, 24 May 2024 20:29:49 +0800 Subject: [PATCH 033/399] feat: adaptive light mode --- ui/app/layout.tsx | 2 +- ui/components/Chat.tsx | 2 +- ui/components/EmptyChat.tsx | 2 +- ui/components/EmptyChatMessageInput.tsx | 6 +-- ui/components/Layout.tsx | 2 +- ui/components/MessageActions/Copy.tsx | 2 +- ui/components/MessageActions/Rewrite.tsx | 2 +- ui/components/MessageBox.tsx | 30 +++++++----- ui/components/MessageBoxLoading.tsx | 8 +-- ui/components/MessageInput.tsx | 8 +-- ui/components/MessageInputActions.tsx | 24 +++++---- ui/components/MessageSources.tsx | 24 ++++----- ui/components/Navbar.tsx | 2 +- ui/components/SearchImages.tsx | 8 +-- ui/components/SearchVideos.tsx | 12 ++--- ui/components/SettingsDialog.tsx | 62 ++++++++++++++---------- ui/components/Sidebar.tsx | 41 +++++++++++----- ui/tailwind.config.ts | 14 +++++- 18 files changed, 149 insertions(+), 102 deletions(-) diff --git a/ui/app/layout.tsx b/ui/app/layout.tsx index 87144cf..124a3b5 100644 --- a/ui/app/layout.tsx +++ b/ui/app/layout.tsx @@ -34,7 +34,7 @@ export default function RootLayout({ unstyled: true, classNames: { toast: - 'dark:dark:bg-[#111111] text-white rounded-lg p-4 flex flex-row items-center space-x-2', + 'bg-primaryLight dark:bg-primaryDark text-white rounded-lg p-4 flex flex-row items-center space-x-2', }, }} /> diff --git a/ui/components/Chat.tsx b/ui/components/Chat.tsx index 7b0c1b3..60096b3 100644 --- a/ui/components/Chat.tsx +++ b/ui/components/Chat.tsx @@ -66,7 +66,7 @@ const Chat = ({ sendMessage={sendMessage} /> {!isLast && msg.role === 'assistant' && ( -
+
)} ); diff --git a/ui/components/EmptyChat.tsx b/ui/components/EmptyChat.tsx index 30bb883..6b9fddb 100644 --- a/ui/components/EmptyChat.tsx +++ b/ui/components/EmptyChat.tsx @@ -11,7 +11,7 @@ const EmptyChat = ({ }) => { return (
-

+

Research begins here.

-
+
setMessage(e.target.value)} minRows={2} - className="bg-transparent placeholder:text-white/50 text-sm text-white resize-none focus:outline-none w-full max-h-24 lg:max-h-36 xl:max-h-48" + className="bg-transparent placeholder:text-black/50 dark:placeholder:text-white/50 text-sm text-black dark:text-white resize-none focus:outline-none w-full max-h-24 lg:max-h-36 xl:max-h-48" placeholder="Ask anything..." />
@@ -51,7 +51,7 @@ const EmptyChatMessageInput = ({ /> diff --git a/ui/components/Layout.tsx b/ui/components/Layout.tsx index e517e00..cb128db 100644 --- a/ui/components/Layout.tsx +++ b/ui/components/Layout.tsx @@ -1,6 +1,6 @@ const Layout = ({ children }: { children: React.ReactNode }) => { return ( -
+
{children}
); diff --git a/ui/components/MessageActions/Copy.tsx b/ui/components/MessageActions/Copy.tsx index b19d8d4..5418811 100644 --- a/ui/components/MessageActions/Copy.tsx +++ b/ui/components/MessageActions/Copy.tsx @@ -19,7 +19,7 @@ const Copy = ({ setCopied(true); setTimeout(() => setCopied(false), 1000); }} - className="p-2 text-white/70 rounded-xl hover:bg-[#1c1c1c] transition duration-200 hover:text-white" + className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-secondLight dark:hover:bg-secondDark transition duration-200 hover:text-black dark:hover:text-white" > {copied ? : } diff --git a/ui/components/MessageActions/Rewrite.tsx b/ui/components/MessageActions/Rewrite.tsx index 3282e7d..b5f865b 100644 --- a/ui/components/MessageActions/Rewrite.tsx +++ b/ui/components/MessageActions/Rewrite.tsx @@ -10,7 +10,7 @@ const Rewrite = ({ return ( */} @@ -123,7 +127,7 @@ const MessageBox = ({ start(); } }} - className="p-2 text-white/70 rounded-xl hover:bg-[#1c1c1c] transition duration-200 hover:text-white" + className="p-2 text-black/70 dark:text-white/70 rounded-xl hover(bg-secondLight dark:bg-secondDark) transition duration-200 hover:text-black dark:hover:text-white" > {speechStatus === 'started' ? ( @@ -140,8 +144,8 @@ const MessageBox = ({ message.role === 'assistant' && !loading && ( <> -
-
+
+

Related

@@ -152,7 +156,7 @@ const MessageBox = ({ className="flex flex-col space-y-3 text-sm" key={i} > -
+
{ sendMessage(suggestion); diff --git a/ui/components/MessageBoxLoading.tsx b/ui/components/MessageBoxLoading.tsx index e070a27..3a80fe8 100644 --- a/ui/components/MessageBoxLoading.tsx +++ b/ui/components/MessageBoxLoading.tsx @@ -1,9 +1,9 @@ const MessageBoxLoading = () => { return ( -
-
-
-
+
+
+
+
); }; diff --git a/ui/components/MessageInput.tsx b/ui/components/MessageInput.tsx index baf6095..4fbea7c 100644 --- a/ui/components/MessageInput.tsx +++ b/ui/components/MessageInput.tsx @@ -40,7 +40,7 @@ const MessageInput = ({ } }} className={cn( - 'bg-[#111111] p-4 flex items-center overflow-hidden border border-[#1C1C1C]', + 'bg-primaryLight dark:bg-primaryDark p-4 flex items-center overflow-hidden border border-light dark:border-dark', mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full', )} > @@ -51,7 +51,7 @@ const MessageInput = ({ onHeightChange={(height, props) => { setTextareaRows(Math.ceil(height / props.rowHeight)); }} - className="transition bg-transparent placeholder:text-white/50 placeholder:text-sm text-sm text-white resize-none focus:outline-none w-full px-2 max-h-24 lg:max-h-36 xl:max-h-48 flex-grow flex-shrink" + className="transition bg-transparent dark:placeholder:text-white/50 placeholder:text-sm text-sm dark:text-white resize-none focus:outline-none w-full px-2 max-h-24 lg:max-h-36 xl:max-h-48 flex-grow flex-shrink" placeholder="Ask a follow-up" /> {mode === 'single' && ( @@ -62,7 +62,7 @@ const MessageInput = ({ /> @@ -78,7 +78,7 @@ const MessageInput = ({ /> diff --git a/ui/components/MessageInputActions.tsx b/ui/components/MessageInputActions.tsx index 9c00c4d..22fc708 100644 --- a/ui/components/MessageInputActions.tsx +++ b/ui/components/MessageInputActions.tsx @@ -16,7 +16,7 @@ export const Attach = () => { return ( @@ -85,7 +85,7 @@ export const Focus = ({ {focusMode !== 'webSearch' ? (
@@ -109,7 +109,7 @@ export const Focus = ({ leaveTo="opacity-0 translate-y-1" > -
+
{focusModes.map((mode, i) => ( setFocusMode(mode.key)} @@ -117,20 +117,24 @@ export const Focus = ({ className={cn( 'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition', focusMode === mode.key - ? 'bg-[#111111]' - : 'hover:bg-[#111111]', + ? 'bg-primaryLight dark:bg-primaryDark' + : 'hover:bg-primaryLight dark:bg-primaryDark', )} >
{mode.icon}

{mode.title}

-

{mode.description}

+

+ {mode.description} +

))}
@@ -152,14 +156,14 @@ export const CopilotToggle = ({ Copilot @@ -170,7 +174,7 @@ export const CopilotToggle = ({ 'text-xs font-medium transition-colors duration-150 ease-in-out', copilotEnabled ? 'text-[#24A0ED]' - : 'text-white/50 group-hover:text-white', + : 'text-black/50 dark:text-white/50 group-hover:text-black dark:group-hover:text-white', )} > Copilot diff --git a/ui/components/MessageSources.tsx b/ui/components/MessageSources.tsx index 5816f8d..476c73c 100644 --- a/ui/components/MessageSources.tsx +++ b/ui/components/MessageSources.tsx @@ -20,12 +20,12 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
{sources.slice(0, 3).map((source, i) => ( -

+

{source.metadata.title}

@@ -37,11 +37,11 @@ const MessageSources = ({ sources }: { sources: Document[] }) => { alt="favicon" className="rounded-lg h-4 w-4" /> -

+

{source.metadata.url.replace(/.+\/\/|www.|\..+/g, '')}

-
+
{i + 1}
@@ -51,7 +51,7 @@ const MessageSources = ({ sources }: { sources: Document[] }) => { {sources.length > 3 && ( @@ -83,19 +83,19 @@ const MessageSources = ({ sources }: { sources: Document[] }) => { leaveFrom="opacity-100 scale-200" leaveTo="opacity-0 scale-95" > - - + + Sources
{sources.map((source, i) => ( -

+

{source.metadata.title}

@@ -107,14 +107,14 @@ const MessageSources = ({ sources }: { sources: Document[] }) => { alt="favicon" className="rounded-lg h-4 w-4" /> -

+

{source.metadata.url.replace( /.+\/\/|www.|\..+/g, '', )}

-
+
{i + 1}
diff --git a/ui/components/Navbar.tsx b/ui/components/Navbar.tsx index 75c34a6..c07d6fd 100644 --- a/ui/components/Navbar.tsx +++ b/ui/components/Navbar.tsx @@ -38,7 +38,7 @@ const Navbar = ({ messages }: { messages: Message[] }) => { }, []); return ( -
+
@@ -76,7 +76,7 @@ const SearchImages = ({ {[...Array(4)].map((_, i) => (
))}
@@ -120,7 +120,7 @@ const SearchImages = ({ {images.length > 4 && ( diff --git a/ui/components/SearchVideos.tsx b/ui/components/SearchVideos.tsx index b5ff6c5..05c3180 100644 --- a/ui/components/SearchVideos.tsx +++ b/ui/components/SearchVideos.tsx @@ -77,7 +77,7 @@ const Searchvideos = ({ ); setLoading(false); }} - className="border border-dashed border-[#1C1C1C] hover:bg-[#1c1c1c] active:scale-95 duration-200 transition px-4 py-2 flex flex-row items-center justify-between rounded-lg text-white text-sm w-full" + className="border border-dashed border-light dark:border-dark hover(bg-secondLight dark:bg-secondDark) active:scale-95 duration-200 transition px-4 py-2 flex flex-row items-center justify-between rounded-lg dark:text-white text-sm w-full" >
@@ -91,7 +91,7 @@ const Searchvideos = ({ {[...Array(4)].map((_, i) => (
))}
@@ -118,7 +118,7 @@ const Searchvideos = ({ alt={video.title} className="relative h-full w-full aspect-video object-cover rounded-lg" /> -
+

Video

@@ -142,7 +142,7 @@ const Searchvideos = ({ alt={video.title} className="relative h-full w-full aspect-video object-cover rounded-lg" /> -
+

Video

@@ -151,7 +151,7 @@ const Searchvideos = ({ {videos.length > 4 && ( diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index 57f79f6..1942179 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -145,7 +145,7 @@ const SettingsDialog = ({ leaveFrom="opacity-100" leaveTo="opacity-0" > -
+
@@ -158,15 +158,15 @@ const SettingsDialog = ({ leaveFrom="opacity-100 scale-200" leaveTo="opacity-0 scale-95" > - - + + Settings {config && !isLoading && (
{config.chatModelProviders && (
-

+

Chat model Provider

setSelectedChatModel(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" > {config.chatModelProviders[ selectedChatModelProvider @@ -231,7 +233,9 @@ const SettingsDialog = ({ selectedChatModelProvider === 'custom_openai' && ( <>
-

Model name

+

+ Model name +

setSelectedChatModel(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
-

+

Custom OpenAI API Key

setCustomOpenAIApiKey(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
-

+

Custom OpenAI Base URL

setCustomOpenAIBaseURL(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
@@ -275,7 +279,7 @@ const SettingsDialog = ({ {/* Embedding models */} {config.embeddingModelProviders && (
-

+

Embedding model Provider

setSelectedEmbeddingModel(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" > {config.embeddingModelProviders[ selectedEmbeddingModelProvider @@ -336,7 +342,9 @@ const SettingsDialog = ({
)}
-

OpenAI API Key

+

+ OpenAI API Key +

-

Ollama API URL

+

+ Ollama API URL +

-

GROQ API Key

+

+ GROQ API Key +

)} {isLoading && ( -
+
)}
-

+

We'll refresh the page after updating the settings.

+ ); +} const Sidebar = ({ children }: { children: React.ReactNode }) => { const segments = useSelectedLayoutSegments(); @@ -38,31 +45,39 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => { return (
-
+
- + -
+ {navLinks.map((link, i) => ( {link.active && ( -
+
)} ))} -
- setIsSettingsOpen(!isSettingsOpen)} - className="text-white cursor-pointer" - /> + + + + + + setIsSettingsOpen(!isSettingsOpen)} + className="cursor-pointer" + /> + + {
-
+
{navLinks.map((link, i) => ( Date: Fri, 24 May 2024 21:58:14 +0800 Subject: [PATCH 034/399] refactor(SettingDialog): extract reduplicate code to common component DO NOT REPEAT YOURSELF! --- ui/components/SettingsDialog.tsx | 213 ++++++++++++++++++------------- 1 file changed, 127 insertions(+), 86 deletions(-) diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index 1942179..4d80b9c 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -1,6 +1,51 @@ +import { cn } from '@/lib/utils'; import { Dialog, Transition } from '@headlessui/react'; import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react'; -import React, { Fragment, useEffect, useState } from 'react'; +import React, { + Fragment, + useEffect, + useMemo, + useState, + type SelectHTMLAttributes, +} from 'react'; + +interface InputProps extends React.InputHTMLAttributes {} + +function Input({ className, ...restProps }: InputProps) { + return ( + + ); +} + +interface SelectProps extends SelectHTMLAttributes { + options: { value: string; label: string; disabled?: boolean }[]; +} + +function Select({ className, options, ...restProps }: SelectProps) { + return ( + + ); +} interface SettingsType { chatModelProviders: { @@ -169,7 +214,7 @@ const SettingsDialog = ({

Chat model Provider

- + />
)} {selectedChatModelProvider && @@ -196,37 +239,40 @@ const SettingsDialog = ({

Chat Model

- + ]; + + return chatModelProvider + ? chatModelProvider.length > 0 + ? chatModelProvider.map((model) => ({ + value: model, + label: model, + })) + : [ + { + value: '', + label: 'No models available', + disabled: true, + }, + ] + : [ + { + value: '', + label: + 'Invalid provider, please check backend logs', + disabled: true, + }, + ]; + })()} + />
)} {selectedChatModelProvider && @@ -236,42 +282,39 @@ const SettingsDialog = ({

Model name

- setSelectedChatModel(e.target.value) } - className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />

Custom OpenAI API Key

- setCustomOpenAIApiKey(e.target.value) } - className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />

Custom OpenAI Base URL

- setCustomOpenAIBaseURL(e.target.value) } - className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
@@ -282,7 +325,7 @@ const SettingsDialog = ({

Embedding model Provider

- + options={Object.keys( + config.embeddingModelProviders, + ).map((provider) => ({ + label: + provider.charAt(0).toUpperCase() + + provider.slice(1), + value: provider, + }))} + />
)} {selectedEmbeddingModelProvider && ( @@ -308,44 +349,47 @@ const SettingsDialog = ({

Embedding Model

- + ]; + + return embeddingModelProvider + ? embeddingModelProvider.length > 0 + ? embeddingModelProvider.map((model) => ({ + label: model, + value: model, + })) + : [ + { + label: 'No embedding models available', + value: '', + disabled: true, + }, + ] + : [ + { + label: + 'Invalid provider, please check backend logs', + value: '', + disabled: true, + }, + ]; + })()} + />
)}

OpenAI API Key

-

Ollama API URL

-

GROQ API Key

-
From 89c30530bc44013d6c7d088f8a7011c11c4c450c Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Fri, 24 May 2024 22:08:47 +0800 Subject: [PATCH 035/399] update(Navbar): update Navbar light mode background --- ui/components/Navbar.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/components/Navbar.tsx b/ui/components/Navbar.tsx index c07d6fd..f95a455 100644 --- a/ui/components/Navbar.tsx +++ b/ui/components/Navbar.tsx @@ -38,7 +38,7 @@ const Navbar = ({ messages }: { messages: Message[] }) => { }, []); return ( -
+
Date: Fri, 24 May 2024 22:41:06 +0800 Subject: [PATCH 036/399] update(SearchVideos): video cover label style adapt light mode --- ui/components/SearchVideos.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/components/SearchVideos.tsx b/ui/components/SearchVideos.tsx index 05c3180..eaad078 100644 --- a/ui/components/SearchVideos.tsx +++ b/ui/components/SearchVideos.tsx @@ -118,7 +118,7 @@ const Searchvideos = ({ alt={video.title} className="relative h-full w-full aspect-video object-cover rounded-lg" /> -
+

Video

From 382fa295e57593fb80b9371af47dbd1e9abfecee Mon Sep 17 00:00:00 2001 From: Devin Stokes Date: Fri, 24 May 2024 08:19:15 -0700 Subject: [PATCH 037/399] fix: add extra_hosts to docker-compose.yaml to allow connection to ollama --- docker-compose.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose.yaml b/docker-compose.yaml index ac83575..d8e1047 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -18,6 +18,8 @@ services: - searxng ports: - 3001:3001 + extra_hosts: + - "host.docker.internal:host-gateway" networks: - perplexica-network @@ -32,6 +34,8 @@ services: - perplexica-backend ports: - 3000:3000 + extra_hosts: + - "host.docker.internal:host-gateway" networks: - perplexica-network From c97a4347230f505a68339d0c08d13baf2cf67696 Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Sat, 25 May 2024 06:57:24 +0800 Subject: [PATCH 038/399] fix(ui): hover style class uses --- ui/components/MessageActions/Rewrite.tsx | 2 +- ui/components/MessageBox.tsx | 4 ++-- ui/components/MessageInputActions.tsx | 2 +- ui/components/MessageSources.tsx | 6 +++--- ui/components/SearchImages.tsx | 4 ++-- ui/components/SearchVideos.tsx | 4 ++-- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/ui/components/MessageActions/Rewrite.tsx b/ui/components/MessageActions/Rewrite.tsx index b5f865b..76a422f 100644 --- a/ui/components/MessageActions/Rewrite.tsx +++ b/ui/components/MessageActions/Rewrite.tsx @@ -10,7 +10,7 @@ const Rewrite = ({ return ( */} @@ -127,7 +127,7 @@ const MessageBox = ({ start(); } }} - className="p-2 text-black/70 dark:text-white/70 rounded-xl hover(bg-secondLight dark:bg-secondDark) transition duration-200 hover:text-black dark:hover:text-white" + className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-secondLight dark:hover:bg-secondDark transition duration-200 hover:text-black dark:hover:text-white" > {speechStatus === 'started' ? ( diff --git a/ui/components/MessageInputActions.tsx b/ui/components/MessageInputActions.tsx index 22fc708..8b6d784 100644 --- a/ui/components/MessageInputActions.tsx +++ b/ui/components/MessageInputActions.tsx @@ -85,7 +85,7 @@ export const Focus = ({ {focusMode !== 'webSearch' ? (
diff --git a/ui/components/MessageSources.tsx b/ui/components/MessageSources.tsx index 476c73c..292b8c6 100644 --- a/ui/components/MessageSources.tsx +++ b/ui/components/MessageSources.tsx @@ -20,7 +20,7 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
{sources.slice(0, 3).map((source, i) => ( { {sources.length > 3 && ( diff --git a/ui/components/MessageActions/Rewrite.tsx b/ui/components/MessageActions/Rewrite.tsx index 76a422f..80fadb3 100644 --- a/ui/components/MessageActions/Rewrite.tsx +++ b/ui/components/MessageActions/Rewrite.tsx @@ -10,7 +10,7 @@ const Rewrite = ({ return ( */} @@ -127,7 +127,7 @@ const MessageBox = ({ start(); } }} - className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-secondLight dark:hover:bg-secondDark transition duration-200 hover:text-black dark:hover:text-white" + className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white" > {speechStatus === 'started' ? ( @@ -144,7 +144,7 @@ const MessageBox = ({ message.role === 'assistant' && !loading && ( <> -
+
@@ -156,7 +156,7 @@ const MessageBox = ({ className="flex flex-col space-y-3 text-sm" key={i} > -
+
{ sendMessage(suggestion); diff --git a/ui/components/MessageBoxLoading.tsx b/ui/components/MessageBoxLoading.tsx index 3a80fe8..caa6f18 100644 --- a/ui/components/MessageBoxLoading.tsx +++ b/ui/components/MessageBoxLoading.tsx @@ -1,9 +1,9 @@ const MessageBoxLoading = () => { return ( -
-
-
-
+
+
+
+
); }; diff --git a/ui/components/MessageInput.tsx b/ui/components/MessageInput.tsx index 4fbea7c..d215787 100644 --- a/ui/components/MessageInput.tsx +++ b/ui/components/MessageInput.tsx @@ -40,7 +40,7 @@ const MessageInput = ({ } }} className={cn( - 'bg-primaryLight dark:bg-primaryDark p-4 flex items-center overflow-hidden border border-light dark:border-dark', + 'bg-light-primary dark:bg-dark-primary p-4 flex items-center overflow-hidden border border-light-300 dark:border-dark-200', mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full', )} > diff --git a/ui/components/MessageInputActions.tsx b/ui/components/MessageInputActions.tsx index 8b6d784..80b2797 100644 --- a/ui/components/MessageInputActions.tsx +++ b/ui/components/MessageInputActions.tsx @@ -16,7 +16,7 @@ export const Attach = () => { return ( @@ -85,7 +85,7 @@ export const Focus = ({ {focusMode !== 'webSearch' ? (
@@ -109,7 +109,7 @@ export const Focus = ({ leaveTo="opacity-0 translate-y-1" > -
+
{focusModes.map((mode, i) => ( setFocusMode(mode.key)} @@ -117,8 +117,8 @@ export const Focus = ({ className={cn( 'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition', focusMode === mode.key - ? 'bg-primaryLight dark:bg-primaryDark' - : 'hover:bg-primaryLight dark:bg-primaryDark', + ? 'bg-light-primary dark:bg-dark-primary' + : 'hover:bg-light-primary dark:bg-dark-primary', )} >
Copilot {
{sources.slice(0, 3).map((source, i) => ( { {sources.length > 3 && (