Embedding model Provider
From f8fd2a6fb0e6db238559b27fade42bb1dc3e8fa9 Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Sat, 4 May 2024 15:04:43 +0530
Subject: [PATCH 017/434] feat(package): bump version
---
package.json | 2 +-
ui/package.json | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/package.json b/package.json
index fe97c49..a82ee31 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "perplexica-backend",
- "version": "1.3.0",
+ "version": "1.3.1",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {
diff --git a/ui/package.json b/ui/package.json
index 0aec67d..31dd895 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
- "version": "1.3.0",
+ "version": "1.3.1",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {
From ba7b92ffded3cf0e764b1101200376b9e7451e19 Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Sun, 5 May 2024 10:53:27 +0530
Subject: [PATCH 018/434] feat(providers): add `Content-Type` header
---
src/lib/providers.ts | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
diff --git a/src/lib/providers.ts b/src/lib/providers.ts
index d6904c0..751d23f 100644
--- a/src/lib/providers.ts
+++ b/src/lib/providers.ts
@@ -90,7 +90,11 @@ export const getAvailableChatModelProviders = async () => {
if (ollamaEndpoint) {
try {
- const response = await fetch(`${ollamaEndpoint}/api/tags`);
+ const response = await fetch(`${ollamaEndpoint}/api/tags`, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
const { models: ollamaModels } = (await response.json()) as any;
@@ -137,7 +141,11 @@ export const getAvailableEmbeddingModelProviders = async () => {
if (ollamaEndpoint) {
try {
- const response = await fetch(`${ollamaEndpoint}/api/tags`);
+ const response = await fetch(`${ollamaEndpoint}/api/tags`, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
const { models: ollamaModels } = (await response.json()) as any;
From 6e61c88c9e655e238cabf5a4ac50481c2bdeb48d Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Sun, 5 May 2024 16:28:46 +0530
Subject: [PATCH 019/434] feat(error-object): add `key`
---
src/websocket/connectionManager.ts | 3 ++-
src/websocket/messageHandler.ts | 30 ++++++++++++++++++++++++++----
2 files changed, 28 insertions(+), 5 deletions(-)
diff --git a/src/websocket/connectionManager.ts b/src/websocket/connectionManager.ts
index 88efb6b..daa4cbc 100644
--- a/src/websocket/connectionManager.ts
+++ b/src/websocket/connectionManager.ts
@@ -70,7 +70,8 @@ export const handleConnection = async (
ws.send(
JSON.stringify({
type: 'error',
- data: 'Invalid LLM or embeddings model selected',
+ data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
+ key: 'INVALID_MODEL_SELECTED',
}),
);
ws.close();
diff --git a/src/websocket/messageHandler.ts b/src/websocket/messageHandler.ts
index 537651f..98f67c2 100644
--- a/src/websocket/messageHandler.ts
+++ b/src/websocket/messageHandler.ts
@@ -57,7 +57,13 @@ const handleEmitterEvents = (
});
emitter.on('error', (data) => {
const parsedData = JSON.parse(data);
- ws.send(JSON.stringify({ type: 'error', data: parsedData.data }));
+ ws.send(
+ JSON.stringify({
+ type: 'error',
+ data: parsedData.data,
+ key: 'CHAIN_ERROR',
+ }),
+ );
});
};
@@ -73,7 +79,11 @@ export const handleMessage = async (
if (!parsedMessage.content)
return ws.send(
- JSON.stringify({ type: 'error', data: 'Invalid message format' }),
+ JSON.stringify({
+ type: 'error',
+ data: 'Invalid message format',
+ key: 'INVALID_FORMAT',
+ }),
);
const history: BaseMessage[] = parsedMessage.history.map((msg) => {
@@ -99,11 +109,23 @@ export const handleMessage = async (
);
handleEmitterEvents(emitter, ws, id);
} else {
- ws.send(JSON.stringify({ type: 'error', data: 'Invalid focus mode' }));
+ ws.send(
+ JSON.stringify({
+ type: 'error',
+ data: 'Invalid focus mode',
+ key: 'INVALID_FOCUS_MODE',
+ }),
+ );
}
}
} catch (err) {
- ws.send(JSON.stringify({ type: 'error', data: 'Invalid message format' }));
+ ws.send(
+ JSON.stringify({
+ type: 'error',
+ data: 'Invalid message format',
+ key: 'INVALID_FORMAT',
+ }),
+ );
logger.error(`Failed to handle message: ${err}`);
}
};
From 94ea6c372a5c42afb2f6f9cc6942d57681a5e9fb Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Sun, 5 May 2024 16:29:40 +0530
Subject: [PATCH 020/434] feat(chat-window): clear storage after error
---
ui/components/ChatWindow.tsx | 3 +++
1 file changed, 3 insertions(+)
diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx
index 79f93a8..ca1aaa3 100644
--- a/ui/components/ChatWindow.tsx
+++ b/ui/components/ChatWindow.tsx
@@ -108,6 +108,9 @@ const useSocket = (url: string) => {
const parsedData = JSON.parse(e.data);
if (parsedData.type === 'error') {
toast.error(parsedData.data);
+ if (parsedData.key === 'INVALID_MODEL_SELECTED') {
+ localStorage.clear();
+ }
}
};
};
From 9b088cd1614cfb96eab27506a8d74d816d4551ca Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Sun, 5 May 2024 16:35:06 +0530
Subject: [PATCH 021/434] feat(package): bump version
---
package.json | 2 +-
ui/package.json | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/package.json b/package.json
index a82ee31..6838da8 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "perplexica-backend",
- "version": "1.3.1",
+ "version": "1.3.2",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {
diff --git a/ui/package.json b/ui/package.json
index 31dd895..2a0ba02 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
- "version": "1.3.1",
+ "version": "1.3.2",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {
From f28257b480b4706e0d9199418123122dc723f31e Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Mon, 6 May 2024 12:34:59 +0530
Subject: [PATCH 022/434] feat(settings): fetch localStorage at state change
---
ui/components/SettingsDialog.tsx | 19 ++++++++-----------
1 file changed, 8 insertions(+), 11 deletions(-)
diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx
index f77a95c..d704488 100644
--- a/ui/components/SettingsDialog.tsx
+++ b/ui/components/SettingsDialog.tsx
@@ -53,6 +53,14 @@ const SettingsDialog = ({
});
const data = await res.json();
setConfig(data);
+ setSelectedChatModelProvider(localStorage.getItem('chatModelProvider'));
+ setSelectedChatModel(localStorage.getItem('chatModel'));
+ setSelectedEmbeddingModelProvider(
+ localStorage.getItem('embeddingModelProvider'),
+ );
+ setSelectedEmbeddingModel(localStorage.getItem('embeddingModel'));
+ setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey'));
+ setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl'));
setIsLoading(false);
};
@@ -61,17 +69,6 @@ const SettingsDialog = ({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isOpen]);
- useEffect(() => {
- setSelectedChatModelProvider(localStorage.getItem('chatModelProvider'));
- setSelectedChatModel(localStorage.getItem('chatModel'));
- setSelectedEmbeddingModelProvider(
- localStorage.getItem('embeddingModelProvider'),
- );
- setSelectedEmbeddingModel(localStorage.getItem('embeddingModel'));
- setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey'));
- setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl'));
- }, []);
-
const handleSubmit = async () => {
setIsUpdating(true);
From 38b19956771e77a14cabfcd4f82ab67418bb8225 Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Mon, 6 May 2024 12:36:13 +0530
Subject: [PATCH 023/434] feat(package): bump version
---
package.json | 2 +-
ui/package.json | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/package.json b/package.json
index 6838da8..36e35d9 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "perplexica-backend",
- "version": "1.3.2",
+ "version": "1.3.3",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {
diff --git a/ui/package.json b/ui/package.json
index 2a0ba02..99b2334 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
- "version": "1.3.2",
+ "version": "1.3.3",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {
From b4d787d33359f09d7ffb1a193157204fe028bed5 Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Mon, 6 May 2024 12:58:40 +0530
Subject: [PATCH 024/434] feat(readme): add troubleshooting
---
README.md | 15 +++++++++++++++
1 file changed, 15 insertions(+)
diff --git a/README.md b/README.md
index 7c0b09a..fb01b4b 100644
--- a/README.md
+++ b/README.md
@@ -10,6 +10,8 @@
- [Installation](#installation)
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
- [Non-Docker Installation](#non-docker-installation)
+ - [Troubleshooting](#troubleshooting)
+ - [Ollama connection errors](#ollama-connection-errors)
- [One-Click Deployment](#one-click-deployment)
- [Upcoming Features](#upcoming-features)
- [Support Us](#support-us)
@@ -90,6 +92,19 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.
+### Troubleshooting
+Enocuntered a bug or an issue? Feel free to join our [Discord community](https://discord.gg/2bdhg2R3ze) where we can do our best to help you.
+
+#### Ollama connection errors
+
+If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following:
+
+On Windows: `http://host.docker.internal:11434`
+On Mac: `http://host.docker.internal:11434`
+On Linux: `http://private_ip_of_computer_hosting_ollama:11434`
+
+You need to edit the ports accordingly.
+
## One-Click Deployment
[](https://repocloud.io/details/?app_id=267)
From ed47191d9baec836dc6a526032d3055084bc6d40 Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Mon, 6 May 2024 13:00:07 +0530
Subject: [PATCH 025/434] feat(readme): update readme
---
README.md | 4 ----
1 file changed, 4 deletions(-)
diff --git a/README.md b/README.md
index fb01b4b..750006c 100644
--- a/README.md
+++ b/README.md
@@ -10,7 +10,6 @@
- [Installation](#installation)
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
- [Non-Docker Installation](#non-docker-installation)
- - [Troubleshooting](#troubleshooting)
- [Ollama connection errors](#ollama-connection-errors)
- [One-Click Deployment](#one-click-deployment)
- [Upcoming Features](#upcoming-features)
@@ -92,9 +91,6 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.
-### Troubleshooting
-Enocuntered a bug or an issue? Feel free to join our [Discord community](https://discord.gg/2bdhg2R3ze) where we can do our best to help you.
-
#### Ollama connection errors
If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following:
From e8fe74ae7c82c4a3f9399a3dcd03eaa7793c329b Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Mon, 6 May 2024 19:59:13 +0530
Subject: [PATCH 026/434] feat(ws-managers): implement better error handling
---
src/websocket/connectionManager.ts | 125 ++++++++++++++++-------------
ui/components/ChatWindow.tsx | 4 +-
2 files changed, 71 insertions(+), 58 deletions(-)
diff --git a/src/websocket/connectionManager.ts b/src/websocket/connectionManager.ts
index daa4cbc..5cb075b 100644
--- a/src/websocket/connectionManager.ts
+++ b/src/websocket/connectionManager.ts
@@ -14,74 +14,87 @@ export const handleConnection = async (
ws: WebSocket,
request: IncomingMessage,
) => {
- const searchParams = new URL(request.url, `http://${request.headers.host}`)
- .searchParams;
+ try {
+ const searchParams = new URL(request.url, `http://${request.headers.host}`)
+ .searchParams;
- const [chatModelProviders, embeddingModelProviders] = await Promise.all([
- getAvailableChatModelProviders(),
- getAvailableEmbeddingModelProviders(),
- ]);
+ const [chatModelProviders, embeddingModelProviders] = await Promise.all([
+ getAvailableChatModelProviders(),
+ getAvailableEmbeddingModelProviders(),
+ ]);
- const chatModelProvider =
- searchParams.get('chatModelProvider') || Object.keys(chatModelProviders)[0];
- const chatModel =
- searchParams.get('chatModel') ||
- Object.keys(chatModelProviders[chatModelProvider])[0];
+ const chatModelProvider =
+ searchParams.get('chatModelProvider') ||
+ Object.keys(chatModelProviders)[0];
+ const chatModel =
+ searchParams.get('chatModel') ||
+ Object.keys(chatModelProviders[chatModelProvider])[0];
- const embeddingModelProvider =
- searchParams.get('embeddingModelProvider') ||
- Object.keys(embeddingModelProviders)[0];
- const embeddingModel =
- searchParams.get('embeddingModel') ||
- Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
+ const embeddingModelProvider =
+ searchParams.get('embeddingModelProvider') ||
+ Object.keys(embeddingModelProviders)[0];
+ const embeddingModel =
+ searchParams.get('embeddingModel') ||
+ Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
- let llm: BaseChatModel | undefined;
- let embeddings: Embeddings | undefined;
+ let llm: BaseChatModel | undefined;
+ let embeddings: Embeddings | undefined;
- if (
- chatModelProviders[chatModelProvider] &&
- chatModelProviders[chatModelProvider][chatModel] &&
- chatModelProvider != 'custom_openai'
- ) {
- llm = chatModelProviders[chatModelProvider][chatModel] as
- | BaseChatModel
- | undefined;
- } else if (chatModelProvider == 'custom_openai') {
- llm = new ChatOpenAI({
- modelName: chatModel,
- openAIApiKey: searchParams.get('openAIApiKey'),
- temperature: 0.7,
- configuration: {
- baseURL: searchParams.get('openAIBaseURL'),
- },
- });
- }
+ if (
+ chatModelProviders[chatModelProvider] &&
+ chatModelProviders[chatModelProvider][chatModel] &&
+ chatModelProvider != 'custom_openai'
+ ) {
+ llm = chatModelProviders[chatModelProvider][chatModel] as
+ | BaseChatModel
+ | undefined;
+ } else if (chatModelProvider == 'custom_openai') {
+ llm = new ChatOpenAI({
+ modelName: chatModel,
+ openAIApiKey: searchParams.get('openAIApiKey'),
+ temperature: 0.7,
+ configuration: {
+ baseURL: searchParams.get('openAIBaseURL'),
+ },
+ });
+ }
- if (
- embeddingModelProviders[embeddingModelProvider] &&
- embeddingModelProviders[embeddingModelProvider][embeddingModel]
- ) {
- embeddings = embeddingModelProviders[embeddingModelProvider][
- embeddingModel
- ] as Embeddings | undefined;
- }
+ if (
+ embeddingModelProviders[embeddingModelProvider] &&
+ embeddingModelProviders[embeddingModelProvider][embeddingModel]
+ ) {
+ embeddings = embeddingModelProviders[embeddingModelProvider][
+ embeddingModel
+ ] as Embeddings | undefined;
+ }
- if (!llm || !embeddings) {
+ if (!llm || !embeddings) {
+ ws.send(
+ JSON.stringify({
+ type: 'error',
+ data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
+ key: 'INVALID_MODEL_SELECTED',
+ }),
+ );
+ ws.close();
+ }
+
+ ws.on(
+ 'message',
+ async (message) =>
+ await handleMessage(message.toString(), ws, llm, embeddings),
+ );
+
+ ws.on('close', () => logger.debug('Connection closed'));
+ } catch (err) {
ws.send(
JSON.stringify({
type: 'error',
- data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
- key: 'INVALID_MODEL_SELECTED',
+ data: 'Internal server error.',
+ key: 'INTERNAL_SERVER_ERROR',
}),
);
ws.close();
+ logger.error(err);
}
-
- ws.on(
- 'message',
- async (message) =>
- await handleMessage(message.toString(), ws, llm, embeddings),
- );
-
- ws.on('close', () => logger.debug('Connection closed'));
};
diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx
index ca1aaa3..6f58757 100644
--- a/ui/components/ChatWindow.tsx
+++ b/ui/components/ChatWindow.tsx
@@ -50,13 +50,13 @@ const useSocket = (url: string) => {
!chatModelProviders ||
Object.keys(chatModelProviders).length === 0
)
- return console.error('No chat models available');
+ return toast.error('No chat models available');
if (
!embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0
)
- return console.error('No embedding models available');
+ return toast.error('No embedding models available');
chatModelProvider = Object.keys(chatModelProviders)[0];
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
From 4cb0aeeee3e9a9085e95eebcf72f28d3bc2609be Mon Sep 17 00:00:00 2001
From: ItzCrazyKns
Date: Mon, 6 May 2024 20:00:56 +0530
Subject: [PATCH 027/434] feat(settings): conditionally pick selected models
---
ui/components/SettingsDialog.tsx | 59 ++++++++++++++++++++++++--------
1 file changed, 44 insertions(+), 15 deletions(-)
diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx
index d704488..634a163 100644
--- a/ui/components/SettingsDialog.tsx
+++ b/ui/components/SettingsDialog.tsx
@@ -33,12 +33,8 @@ const SettingsDialog = ({
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
string | null
>(null);
- const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState(
- null,
- );
- const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState(
- null,
- );
+ const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState('');
+ const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState('');
const [isLoading, setIsLoading] = useState(false);
const [isUpdating, setIsUpdating] = useState(false);
@@ -51,16 +47,49 @@ const SettingsDialog = ({
'Content-Type': 'application/json',
},
});
- const data = await res.json();
+
+ const data = (await res.json()) as SettingsType;
setConfig(data);
- setSelectedChatModelProvider(localStorage.getItem('chatModelProvider'));
- setSelectedChatModel(localStorage.getItem('chatModel'));
- setSelectedEmbeddingModelProvider(
- localStorage.getItem('embeddingModelProvider'),
+
+ const chatModelProvidersKeys = Object.keys(
+ data.chatModelProviders || {},
);
- setSelectedEmbeddingModel(localStorage.getItem('embeddingModel'));
- setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey'));
- setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl'));
+ const embeddingModelProvidersKeys = Object.keys(
+ data.embeddingModelProviders || {},
+ );
+
+ const defaultChatModelProvider =
+ chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : '';
+ const defaultEmbeddingModelProvider =
+ embeddingModelProvidersKeys.length > 0
+ ? embeddingModelProvidersKeys[0]
+ : '';
+
+ const chatModelProvider =
+ localStorage.getItem('chatModelProvider') ||
+ defaultChatModelProvider ||
+ '';
+ const chatModel =
+ localStorage.getItem('chatModel') ||
+ (data.chatModelProviders &&
+ data.chatModelProviders[chatModelProvider]?.[0]) ||
+ '';
+ const embeddingModelProvider =
+ localStorage.getItem('embeddingModelProvider') ||
+ defaultEmbeddingModelProvider ||
+ '';
+ const embeddingModel =
+ localStorage.getItem('embeddingModel') ||
+ (data.embeddingModelProviders &&
+ data.embeddingModelProviders[embeddingModelProvider]?.[0]) ||
+ '';
+
+ setSelectedChatModelProvider(chatModelProvider);
+ setSelectedChatModel(chatModel);
+ setSelectedEmbeddingModelProvider(embeddingModelProvider);
+ setSelectedEmbeddingModel(embeddingModel);
+ setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
+ setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl') || '');
setIsLoading(false);
};
@@ -223,7 +252,7 @@ const SettingsDialog = ({