diff --git a/src/components/EmptyChatMessageInput.tsx b/src/components/EmptyChatMessageInput.tsx
index a03d0ba..9d60cce 100644
--- a/src/components/EmptyChatMessageInput.tsx
+++ b/src/components/EmptyChatMessageInput.tsx
@@ -1,35 +1,16 @@
import { ArrowRight } from 'lucide-react';
import { useEffect, useRef, useState } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
-import CopilotToggle from './MessageInputActions/Copilot';
import Focus from './MessageInputActions/Focus';
import Optimization from './MessageInputActions/Optimization';
import Attach from './MessageInputActions/Attach';
-import { File } from './ChatWindow';
import { useTranslations } from 'next-intl';
+import { useChat } from '@/lib/hooks/useChat';
-const EmptyChatMessageInput = ({
- sendMessage,
- focusMode,
- setFocusMode,
- optimizationMode,
- setOptimizationMode,
- fileIds,
- setFileIds,
- files,
- setFiles,
-}: {
- sendMessage: (message: string) => void;
- focusMode: string;
- setFocusMode: (mode: string) => void;
- optimizationMode: string;
- setOptimizationMode: (mode: string) => void;
- fileIds: string[];
- setFileIds: (fileIds: string[]) => void;
- files: File[];
- setFiles: (files: File[]) => void;
-}) => {
- const [copilotEnabled, setCopilotEnabled] = useState(false);
+const EmptyChatMessageInput = () => {
+ const { sendMessage } = useChat();
+
+ /* const [copilotEnabled, setCopilotEnabled] = useState(false); */
const [message, setMessage] = useState('');
const t = useTranslations('components');
@@ -86,20 +67,11 @@ const EmptyChatMessageInput = ({
/>
-
+
);
diff --git a/src/i18n/de.json b/src/i18n/de.json
index 26627c1..f509720 100644
--- a/src/i18n/de.json
+++ b/src/i18n/de.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API-Schlüssel",
"ollamaApiUrl": "Ollama API-URL",
+ "ollamaApiKey": "Ollama API-Schlüssel (Kann leer gelassen werden)",
"groqApiKey": "GROQ API-Schlüssel",
"anthropicApiKey": "Anthropic API-Schlüssel",
"geminiApiKey": "Gemini API-Schlüssel",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/en-GB.json b/src/i18n/en-GB.json
index 3b31c30..2ffadd1 100644
--- a/src/i18n/en-GB.json
+++ b/src/i18n/en-GB.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API Key",
"ollamaApiUrl": "Ollama API URL",
+ "ollamaApiKey": "Ollama API Key (Can be left blank)",
"groqApiKey": "GROQ API Key",
"anthropicApiKey": "Anthropic API Key",
"geminiApiKey": "Gemini API Key",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/en-US.json b/src/i18n/en-US.json
index 3b31c30..2ffadd1 100644
--- a/src/i18n/en-US.json
+++ b/src/i18n/en-US.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API Key",
"ollamaApiUrl": "Ollama API URL",
+ "ollamaApiKey": "Ollama API Key (Can be left blank)",
"groqApiKey": "GROQ API Key",
"anthropicApiKey": "Anthropic API Key",
"geminiApiKey": "Gemini API Key",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/fr-CA.json b/src/i18n/fr-CA.json
index ba415f1..63bd97d 100644
--- a/src/i18n/fr-CA.json
+++ b/src/i18n/fr-CA.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "Clé API OpenAI",
"ollamaApiUrl": "URL de l'API Ollama",
+ "ollamaApiKey": "Clé API Ollama (peut être laissée vide)",
"groqApiKey": "Clé API GROQ",
"anthropicApiKey": "Clé API Anthropic",
"geminiApiKey": "Clé API Gemini",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/fr-FR.json b/src/i18n/fr-FR.json
index d3d179b..de42994 100644
--- a/src/i18n/fr-FR.json
+++ b/src/i18n/fr-FR.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "Clé API OpenAI",
"ollamaApiUrl": "URL de l'API Ollama",
+ "ollamaApiKey": "Clé API Ollama (peut être laissée vide)",
"groqApiKey": "Clé API GROQ",
"anthropicApiKey": "Clé API Anthropic",
"geminiApiKey": "Clé API Gemini",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/ja.json b/src/i18n/ja.json
index b62f3e9..3916732 100644
--- a/src/i18n/ja.json
+++ b/src/i18n/ja.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API キー",
"ollamaApiUrl": "Ollama API URL",
+ "ollamaApiKey": "Ollama API キー(空白のままにすることもできます)",
"groqApiKey": "GROQ API キー",
"anthropicApiKey": "Anthropic API キー",
"geminiApiKey": "Gemini API キー",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/ko.json b/src/i18n/ko.json
index c6d9128..9464f74 100644
--- a/src/i18n/ko.json
+++ b/src/i18n/ko.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API 키",
"ollamaApiUrl": "Ollama API URL",
+ "ollamaApiKey": "Ollama API 키 (비워둘 수 있음)",
"groqApiKey": "GROQ API 키",
"anthropicApiKey": "Anthropic API 키",
"geminiApiKey": "Gemini API 키",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/zh-CN.json b/src/i18n/zh-CN.json
index b2e0f4e..5b31962 100644
--- a/src/i18n/zh-CN.json
+++ b/src/i18n/zh-CN.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API 密钥",
"ollamaApiUrl": "Ollama API 地址",
+ "ollamaApiKey": "Ollama API 密钥(可留空)",
"groqApiKey": "GROQ API 密钥",
"anthropicApiKey": "Anthropic API 密钥",
"geminiApiKey": "Gemini API 密钥",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/zh-HK.json b/src/i18n/zh-HK.json
index 7b1ca84..4c1ce33 100644
--- a/src/i18n/zh-HK.json
+++ b/src/i18n/zh-HK.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API 金鑰",
"ollamaApiUrl": "Ollama API 位址",
+ "ollamaApiKey": "Ollama API 金鑰(可留空)",
"groqApiKey": "GROQ API 金鑰",
"anthropicApiKey": "Anthropic API 金鑰",
"geminiApiKey": "Gemini API 金鑰",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/i18n/zh-TW.json b/src/i18n/zh-TW.json
index 798111b..390603c 100644
--- a/src/i18n/zh-TW.json
+++ b/src/i18n/zh-TW.json
@@ -132,6 +132,7 @@
"api": {
"openaiApiKey": "OpenAI API 金鑰",
"ollamaApiUrl": "Ollama API 位址",
+ "ollamaApiKey": "Ollama API 金鑰(可留空)",
"groqApiKey": "GROQ API 金鑰",
"anthropicApiKey": "Anthropic API 金鑰",
"geminiApiKey": "Gemini API 金鑰",
@@ -248,4 +249,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/lib/config.ts b/src/lib/config.ts
index d885e13..79d69dc 100644
--- a/src/lib/config.ts
+++ b/src/lib/config.ts
@@ -31,6 +31,7 @@ interface Config {
};
OLLAMA: {
API_URL: string;
+ API_KEY: string;
};
DEEPSEEK: {
API_KEY: string;
@@ -86,6 +87,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
+export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
+
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
diff --git a/src/lib/hooks/useChat.tsx b/src/lib/hooks/useChat.tsx
new file mode 100644
index 0000000..8402b88
--- /dev/null
+++ b/src/lib/hooks/useChat.tsx
@@ -0,0 +1,646 @@
+'use client';
+
+import { Message } from '@/components/ChatWindow';
+import { createContext, useContext, useEffect, useRef, useState } from 'react';
+import crypto from 'crypto';
+import { useSearchParams } from 'next/navigation';
+import { toast } from 'sonner';
+import { Document } from '@langchain/core/documents';
+import { useLocale, useTranslations } from 'next-intl';
+import { getSuggestions } from '../actions';
+
+type ChatContext = {
+ messages: Message[];
+ chatHistory: [string, string][];
+ files: File[];
+ fileIds: string[];
+ focusMode: string;
+ chatId: string | undefined;
+ optimizationMode: string;
+ isMessagesLoaded: boolean;
+ loading: boolean;
+ notFound: boolean;
+ messageAppeared: boolean;
+ isReady: boolean;
+ hasError: boolean;
+ setOptimizationMode: (mode: string) => void;
+ setFocusMode: (mode: string) => void;
+ setFiles: (files: File[]) => void;
+ setFileIds: (fileIds: string[]) => void;
+ sendMessage: (
+ message: string,
+ messageId?: string,
+ rewrite?: boolean,
+ ) => Promise
;
+ rewrite: (messageId: string) => void;
+};
+
+export interface File {
+ fileName: string;
+ fileExtension: string;
+ fileId: string;
+}
+
+interface ChatModelProvider {
+ name: string;
+ provider: string;
+}
+
+interface EmbeddingModelProvider {
+ name: string;
+ provider: string;
+}
+
+const checkConfig = async (
+ setChatModelProvider: (provider: ChatModelProvider) => void,
+ setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
+ setIsConfigReady: (ready: boolean) => void,
+ setHasError: (hasError: boolean) => void,
+ t: (key: string) => string,
+) => {
+ try {
+ let chatModel = localStorage.getItem('chatModel');
+ let chatModelProvider = localStorage.getItem('chatModelProvider');
+ let embeddingModel = localStorage.getItem('embeddingModel');
+ let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
+
+ const autoImageSearch = localStorage.getItem('autoImageSearch');
+ const autoVideoSearch = localStorage.getItem('autoVideoSearch');
+
+ if (!autoImageSearch) {
+ localStorage.setItem('autoImageSearch', 'true');
+ }
+
+ if (!autoVideoSearch) {
+ localStorage.setItem('autoVideoSearch', 'false');
+ }
+
+ const providers = await fetch(`/api/models`, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ }).then(async (res) => {
+ if (!res.ok)
+ throw new Error(
+ `Failed to fetch models: ${res.status} ${res.statusText}`,
+ );
+ return res.json();
+ });
+
+ if (
+ !chatModel ||
+ !chatModelProvider ||
+ !embeddingModel ||
+ !embeddingModelProvider
+ ) {
+ if (!chatModel || !chatModelProvider) {
+ const chatModelProviders = providers.chatModelProviders;
+ const chatModelProvidersKeys = Object.keys(chatModelProviders);
+
+ if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
+ return toast.error(t('common.errors.noChatModelsAvailable'));
+ } else {
+ chatModelProvider =
+ chatModelProvidersKeys.find(
+ (provider) =>
+ Object.keys(chatModelProviders[provider]).length > 0,
+ ) || chatModelProvidersKeys[0];
+ }
+
+ if (
+ chatModelProvider === 'custom_openai' &&
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0
+ ) {
+ toast.error(t('common.errors.chatProviderNotConfigured'));
+ return setHasError(true);
+ }
+
+ chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
+ }
+
+ if (!embeddingModel || !embeddingModelProvider) {
+ const embeddingModelProviders = providers.embeddingModelProviders;
+
+ if (
+ !embeddingModelProviders ||
+ Object.keys(embeddingModelProviders).length === 0
+ )
+ return toast.error(t('common.errors.noEmbeddingModelsAvailable'));
+
+ embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
+ embeddingModel = Object.keys(
+ embeddingModelProviders[embeddingModelProvider],
+ )[0];
+ }
+
+ localStorage.setItem('chatModel', chatModel!);
+ localStorage.setItem('chatModelProvider', chatModelProvider);
+ localStorage.setItem('embeddingModel', embeddingModel!);
+ localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
+ } else {
+ const chatModelProviders = providers.chatModelProviders;
+ const embeddingModelProviders = providers.embeddingModelProviders;
+
+ if (
+ Object.keys(chatModelProviders).length > 0 &&
+ (!chatModelProviders[chatModelProvider] ||
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0)
+ ) {
+ const chatModelProvidersKeys = Object.keys(chatModelProviders);
+ chatModelProvider =
+ chatModelProvidersKeys.find(
+ (key) => Object.keys(chatModelProviders[key]).length > 0,
+ ) || chatModelProvidersKeys[0];
+
+ localStorage.setItem('chatModelProvider', chatModelProvider);
+ }
+
+ if (
+ chatModelProvider &&
+ !chatModelProviders[chatModelProvider][chatModel]
+ ) {
+ if (
+ chatModelProvider === 'custom_openai' &&
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0
+ ) {
+ toast.error(t('common.errors.chatProviderNotConfigured'));
+ return setHasError(true);
+ }
+
+ chatModel = Object.keys(
+ chatModelProviders[
+ Object.keys(chatModelProviders[chatModelProvider]).length > 0
+ ? chatModelProvider
+ : Object.keys(chatModelProviders)[0]
+ ],
+ )[0];
+
+ localStorage.setItem('chatModel', chatModel);
+ }
+
+ if (
+ Object.keys(embeddingModelProviders).length > 0 &&
+ !embeddingModelProviders[embeddingModelProvider]
+ ) {
+ embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
+ localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
+ }
+
+ if (
+ embeddingModelProvider &&
+ !embeddingModelProviders[embeddingModelProvider][embeddingModel]
+ ) {
+ embeddingModel = Object.keys(
+ embeddingModelProviders[embeddingModelProvider],
+ )[0];
+ localStorage.setItem('embeddingModel', embeddingModel);
+ }
+ }
+
+ setChatModelProvider({
+ name: chatModel!,
+ provider: chatModelProvider,
+ });
+
+ setEmbeddingModelProvider({
+ name: embeddingModel!,
+ provider: embeddingModelProvider,
+ });
+
+ setIsConfigReady(true);
+ } catch (err) {
+ console.error('An error occurred while checking the configuration:', err);
+ setIsConfigReady(false);
+ setHasError(true);
+ }
+};
+
+const loadMessages = async (
+ chatId: string,
+ setMessages: (messages: Message[]) => void,
+ setIsMessagesLoaded: (loaded: boolean) => void,
+ setChatHistory: (history: [string, string][]) => void,
+ setFocusMode: (mode: string) => void,
+ setNotFound: (notFound: boolean) => void,
+ setFiles: (files: File[]) => void,
+ setFileIds: (fileIds: string[]) => void,
+) => {
+ const res = await fetch(`/api/chats/${chatId}`, {
+ method: 'GET',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
+
+ if (res.status === 404) {
+ setNotFound(true);
+ setIsMessagesLoaded(true);
+ return;
+ }
+
+ const data = await res.json();
+
+ const messages = data.messages.map((msg: any) => {
+ return {
+ ...msg,
+ ...JSON.parse(msg.metadata),
+ };
+ }) as Message[];
+
+ setMessages(messages);
+
+ const history = messages.map((msg) => {
+ return [msg.role, msg.content];
+ }) as [string, string][];
+
+ console.debug(new Date(), 'app:messages_loaded');
+
+ document.title = messages[0].content;
+
+ const files = data.chat.files.map((file: any) => {
+ return {
+ fileName: file.name,
+ fileExtension: file.name.split('.').pop(),
+ fileId: file.fileId,
+ };
+ });
+
+ setFiles(files);
+ setFileIds(files.map((file: File) => file.fileId));
+
+ setChatHistory(history);
+ setFocusMode(data.chat.focusMode);
+ setIsMessagesLoaded(true);
+};
+
+export const chatContext = createContext({
+ chatHistory: [],
+ chatId: '',
+ fileIds: [],
+ files: [],
+ focusMode: '',
+ hasError: false,
+ isMessagesLoaded: false,
+ isReady: false,
+ loading: false,
+ messageAppeared: false,
+ messages: [],
+ notFound: false,
+ optimizationMode: '',
+ rewrite: () => {},
+ sendMessage: async () => {},
+ setFileIds: () => {},
+ setFiles: () => {},
+ setFocusMode: () => {},
+ setOptimizationMode: () => {},
+});
+
+export const ChatProvider = ({
+ children,
+ id,
+}: {
+ children: React.ReactNode;
+ id?: string;
+}) => {
+ const searchParams = useSearchParams();
+ const initialMessage = searchParams.get('q');
+
+ const [chatId, setChatId] = useState(id);
+ const [newChatCreated, setNewChatCreated] = useState(false);
+
+ const [loading, setLoading] = useState(false);
+ const [messageAppeared, setMessageAppeared] = useState(false);
+
+ const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
+ const [messages, setMessages] = useState([]);
+
+ const [files, setFiles] = useState([]);
+ const [fileIds, setFileIds] = useState([]);
+
+ const [focusMode, setFocusMode] = useState('webSearch');
+ const [optimizationMode, setOptimizationMode] = useState('speed');
+
+ const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
+
+ const [notFound, setNotFound] = useState(false);
+
+ const [chatModelProvider, setChatModelProvider] = useState(
+ {
+ name: '',
+ provider: '',
+ },
+ );
+
+ const [embeddingModelProvider, setEmbeddingModelProvider] =
+ useState({
+ name: '',
+ provider: '',
+ });
+
+ const [isConfigReady, setIsConfigReady] = useState(false);
+ const [hasError, setHasError] = useState(false);
+ const [isReady, setIsReady] = useState(false);
+
+ const messagesRef = useRef([]);
+
+ const t = useTranslations();
+ const locale = useLocale();
+
+ useEffect(() => {
+ checkConfig(
+ setChatModelProvider,
+ setEmbeddingModelProvider,
+ setIsConfigReady,
+ setHasError,
+ t,
+ );
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
+ useEffect(() => {
+ if (
+ chatId &&
+ !newChatCreated &&
+ !isMessagesLoaded &&
+ messages.length === 0
+ ) {
+ loadMessages(
+ chatId,
+ setMessages,
+ setIsMessagesLoaded,
+ setChatHistory,
+ setFocusMode,
+ setNotFound,
+ setFiles,
+ setFileIds,
+ );
+ } else if (!chatId) {
+ setNewChatCreated(true);
+ setIsMessagesLoaded(true);
+ setChatId(crypto.randomBytes(20).toString('hex'));
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
+ useEffect(() => {
+ messagesRef.current = messages;
+ }, [messages]);
+
+ useEffect(() => {
+ if (isMessagesLoaded && isConfigReady) {
+ setIsReady(true);
+ console.debug(new Date(), 'app:ready');
+ } else {
+ setIsReady(false);
+ }
+ }, [isMessagesLoaded, isConfigReady]);
+
+ const rewrite = (messageId: string) => {
+ const index = messages.findIndex((msg) => msg.messageId === messageId);
+
+ if (index === -1) return;
+
+ const message = messages[index - 1];
+
+ setMessages((prev) => {
+ return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
+ });
+ setChatHistory((prev) => {
+ return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
+ });
+
+ sendMessage(message.content, message.messageId, true);
+ };
+
+ useEffect(() => {
+ if (isReady && initialMessage && isConfigReady) {
+ if (!isConfigReady) {
+ toast.error(t('common.errors.cannotSendBeforeConfigReady'));
+ return;
+ }
+ sendMessage(initialMessage);
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [isConfigReady, isReady, initialMessage]);
+
+ const sendMessage: ChatContext['sendMessage'] = async (
+ message,
+ messageId,
+ rewrite = false,
+ ) => {
+ if (loading) return;
+ setLoading(true);
+ setMessageAppeared(false);
+
+ let sources: Document[] | undefined = undefined;
+ let recievedMessage = '';
+ let added = false;
+
+ messageId = messageId ?? crypto.randomBytes(7).toString('hex');
+
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: message,
+ messageId: messageId,
+ chatId: chatId!,
+ role: 'user',
+ createdAt: new Date(),
+ },
+ ]);
+
+ const messageHandler = async (data: any) => {
+ if (data.type === 'error') {
+ toast.error(data.data);
+ setLoading(false);
+ return;
+ }
+
+ if (data.type === 'sources') {
+ sources = data.data;
+ if (!added) {
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: '',
+ messageId: data.messageId,
+ chatId: chatId!,
+ role: 'assistant',
+ sources: sources,
+ createdAt: new Date(),
+ },
+ ]);
+ added = true;
+ }
+ setMessageAppeared(true);
+ }
+
+ if (data.type === 'message') {
+ if (!added) {
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: data.data,
+ messageId: data.messageId,
+ chatId: chatId!,
+ role: 'assistant',
+ sources: sources,
+ createdAt: new Date(),
+ },
+ ]);
+ added = true;
+ }
+
+ setMessages((prev) =>
+ prev.map((message) => {
+ if (message.messageId === data.messageId) {
+ return { ...message, content: message.content + data.data };
+ }
+
+ return message;
+ }),
+ );
+
+ recievedMessage += data.data;
+ setMessageAppeared(true);
+ }
+
+ if (data.type === 'messageEnd') {
+ setChatHistory((prevHistory) => [
+ ...prevHistory,
+ ['human', message],
+ ['assistant', recievedMessage],
+ ]);
+
+ setLoading(false);
+
+ const lastMsg = messagesRef.current[messagesRef.current.length - 1];
+
+ const autoImageSearch = localStorage.getItem('autoImageSearch');
+ const autoVideoSearch = localStorage.getItem('autoVideoSearch');
+
+ if (autoImageSearch === 'true') {
+ document
+ .getElementById(`search-images-${lastMsg.messageId}`)
+ ?.click();
+ }
+
+ if (autoVideoSearch === 'true') {
+ document
+ .getElementById(`search-videos-${lastMsg.messageId}`)
+ ?.click();
+ }
+
+ if (
+ lastMsg.role === 'assistant' &&
+ lastMsg.sources &&
+ lastMsg.sources.length > 0 &&
+ !lastMsg.suggestions
+ ) {
+ const suggestions = await getSuggestions(messagesRef.current, locale);
+ setMessages((prev) =>
+ prev.map((msg) => {
+ if (msg.messageId === lastMsg.messageId) {
+ return { ...msg, suggestions: suggestions };
+ }
+ return msg;
+ }),
+ );
+ }
+ }
+ };
+
+ const messageIndex = messages.findIndex((m) => m.messageId === messageId);
+
+ const res = await fetch('/api/chat', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ content: message,
+ message: {
+ messageId: messageId,
+ chatId: chatId!,
+ content: message,
+ },
+ chatId: chatId!,
+ files: fileIds,
+ focusMode: focusMode,
+ optimizationMode: optimizationMode,
+ history: rewrite
+ ? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
+ : chatHistory,
+ chatModel: {
+ name: chatModelProvider.name,
+ provider: chatModelProvider.provider,
+ },
+ embeddingModel: {
+ name: embeddingModelProvider.name,
+ provider: embeddingModelProvider.provider,
+ },
+ systemInstructions: localStorage.getItem('systemInstructions'),
+ locale,
+ }),
+ });
+
+ if (!res.body) throw new Error('No response body');
+
+ const reader = res.body?.getReader();
+ const decoder = new TextDecoder('utf-8');
+
+ let partialChunk = '';
+
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done) break;
+
+ partialChunk += decoder.decode(value, { stream: true });
+
+ try {
+ const messages = partialChunk.split('\n');
+ for (const msg of messages) {
+ if (!msg.trim()) continue;
+ const json = JSON.parse(msg);
+ messageHandler(json);
+ }
+ partialChunk = '';
+ } catch (error) {
+ console.warn('Incomplete JSON, waiting for next chunk...');
+ }
+ }
+ };
+
+ return (
+
+ {children}
+
+ );
+};
+
+export const useChat = () => {
+ const ctx = useContext(chatContext);
+ return ctx;
+};
diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts
index d5c7899..cb0b848 100644
--- a/src/lib/providers/ollama.ts
+++ b/src/lib/providers/ollama.ts
@@ -1,5 +1,5 @@
import axios from 'axios';
-import { getKeepAlive, getOllamaApiEndpoint } from '../config';
+import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
@@ -11,6 +11,7 @@ import { OllamaEmbeddings } from '@langchain/ollama';
export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
+ const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -33,6 +34,9 @@ export const loadOllamaChatModels = async () => {
model: model.model,
temperature: 0.7,
keepAlive: getKeepAlive(),
+ ...(ollamaApiKey
+ ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
+ : {}),
}),
};
});
@@ -46,6 +50,7 @@ export const loadOllamaChatModels = async () => {
export const loadOllamaEmbeddingModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
+ const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -66,6 +71,9 @@ export const loadOllamaEmbeddingModels = async () => {
model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint,
model: model.model,
+ ...(ollamaApiKey
+ ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
+ : {}),
}),
};
});