diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts
index 2d53b75..ba88da6 100644
--- a/src/app/api/chat/route.ts
+++ b/src/app/api/chat/route.ts
@@ -1,11 +1,7 @@
-import prompts from '@/lib/prompts';
-import MetaSearchAgent from '@/lib/search/metaSearchAgent';
import crypto from 'crypto';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { EventEmitter } from 'stream';
import {
- chatModelProviders,
- embeddingModelProviders,
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '@/lib/providers';
@@ -138,6 +134,8 @@ const handleHistorySave = async (
where: eq(chats.id, message.chatId),
});
+ const fileData = files.map(getFileDetails);
+
if (!chat) {
await db
.insert(chats)
@@ -146,9 +144,15 @@ const handleHistorySave = async (
title: message.content,
createdAt: new Date().toString(),
focusMode: focusMode,
- files: files.map(getFileDetails),
+ files: fileData,
})
.execute();
+ } else if (JSON.stringify(chat.files ?? []) != JSON.stringify(fileData)) {
+ db.update(chats)
+ .set({
+ files: files.map(getFileDetails),
+ })
+ .where(eq(chats.id, message.chatId));
}
const messageExists = await db.query.messages.findFirst({
diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts
index 0c11b23..f117cce 100644
--- a/src/app/api/config/route.ts
+++ b/src/app/api/config/route.ts
@@ -11,6 +11,7 @@ import {
getAimlApiKey,
getLMStudioApiEndpoint,
updateConfig,
+ getOllamaApiKey,
} from '@/lib/config';
import {
getAvailableChatModelProviders,
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
+ config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
@@ -93,6 +95,7 @@ export const POST = async (req: Request) => {
},
OLLAMA: {
API_URL: config.ollamaApiUrl,
+ API_KEY: config.ollamaApiKey,
},
DEEPSEEK: {
API_KEY: config.deepseekApiKey,
diff --git a/src/app/c/[chatId]/page.tsx b/src/app/c/[chatId]/page.tsx
index aac125a..672107a 100644
--- a/src/app/c/[chatId]/page.tsx
+++ b/src/app/c/[chatId]/page.tsx
@@ -1,9 +1,17 @@
-import ChatWindow from '@/components/ChatWindow';
-import React from 'react';
+'use client';
-const Page = ({ params }: { params: Promise<{ chatId: string }> }) => {
- const { chatId } = React.use(params);
- return ;
+import ChatWindow from '@/components/ChatWindow';
+import { useParams } from 'next/navigation';
+import React from 'react';
+import { ChatProvider } from '@/lib/hooks/useChat';
+
+const Page = () => {
+ const { chatId }: { chatId: string } = useParams();
+ return (
+
+
+
+ );
};
export default Page;
diff --git a/src/app/page.tsx b/src/app/page.tsx
index e18aca9..25981b5 100644
--- a/src/app/page.tsx
+++ b/src/app/page.tsx
@@ -1,4 +1,5 @@
import ChatWindow from '@/components/ChatWindow';
+import { ChatProvider } from '@/lib/hooks/useChat';
import { Metadata } from 'next';
import { Suspense } from 'react';
@@ -11,7 +12,9 @@ const Home = () => {
return (
-
+
+
+
);
diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx
index 1b13c9c..6fb8255 100644
--- a/src/app/settings/page.tsx
+++ b/src/app/settings/page.tsx
@@ -21,6 +21,7 @@ interface SettingsType {
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
+ ollamaApiKey: string;
lmStudioApiUrl: string;
deepseekApiKey: string;
aimlApiKey: string;
@@ -818,6 +819,25 @@ const Page = () => {
/>
+
+
+ Ollama API Key (Can be left blank)
+
+
{
+ setConfig((prev) => ({
+ ...prev!,
+ ollamaApiKey: e.target.value,
+ }));
+ }}
+ onSave={(value) => saveConfig('ollamaApiKey', value)}
+ />
+
+
GROQ API Key
diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx
index 0cf125b..a5d8cf9 100644
--- a/src/components/Chat.tsx
+++ b/src/components/Chat.tsx
@@ -5,28 +5,11 @@ import MessageInput from './MessageInput';
import { File, Message } from './ChatWindow';
import MessageBox from './MessageBox';
import MessageBoxLoading from './MessageBoxLoading';
+import { useChat } from '@/lib/hooks/useChat';
+
+const Chat = () => {
+ const { messages, loading, messageAppeared } = useChat();
-const Chat = ({
- loading,
- messages,
- sendMessage,
- messageAppeared,
- rewrite,
- fileIds,
- setFileIds,
- files,
- setFiles,
-}: {
- messages: Message[];
- sendMessage: (message: string) => void;
- loading: boolean;
- messageAppeared: boolean;
- rewrite: (messageId: string) => void;
- fileIds: string[];
- setFileIds: (fileIds: string[]) => void;
- files: File[];
- setFiles: (files: File[]) => void;
-}) => {
const [dividerWidth, setDividerWidth] = useState(0);
const dividerRef = useRef(null);
const messageEnd = useRef(null);
@@ -72,12 +55,8 @@ const Chat = ({
key={i}
message={msg}
messageIndex={i}
- history={messages}
- loading={loading}
dividerRef={isLast ? dividerRef : undefined}
isLast={isLast}
- rewrite={rewrite}
- sendMessage={sendMessage}
/>
{!isLast && msg.role === 'assistant' && (
@@ -92,14 +71,7 @@ const Chat = ({
className="bottom-24 lg:bottom-10 fixed z-40"
style={{ width: dividerWidth }}
>
-
+
)}
diff --git a/src/components/ChatWindow.tsx b/src/components/ChatWindow.tsx
index 6ff7d22..0d40c83 100644
--- a/src/components/ChatWindow.tsx
+++ b/src/components/ChatWindow.tsx
@@ -1,17 +1,13 @@
'use client';
-import { useEffect, useRef, useState } from 'react';
import { Document } from '@langchain/core/documents';
import Navbar from './Navbar';
import Chat from './Chat';
import EmptyChat from './EmptyChat';
-import crypto from 'crypto';
-import { toast } from 'sonner';
-import { useSearchParams } from 'next/navigation';
-import { getSuggestions } from '@/lib/actions';
import { Settings } from 'lucide-react';
import Link from 'next/link';
import NextError from 'next/error';
+import { useChat } from '@/lib/hooks/useChat';
export type Message = {
messageId: string;
@@ -29,547 +25,8 @@ export interface File {
fileId: string;
}
-interface ChatModelProvider {
- name: string;
- provider: string;
-}
-
-interface EmbeddingModelProvider {
- name: string;
- provider: string;
-}
-
-const checkConfig = async (
- setChatModelProvider: (provider: ChatModelProvider) => void,
- setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
- setIsConfigReady: (ready: boolean) => void,
- setHasError: (hasError: boolean) => void,
-) => {
- try {
- let chatModel = localStorage.getItem('chatModel');
- let chatModelProvider = localStorage.getItem('chatModelProvider');
- let embeddingModel = localStorage.getItem('embeddingModel');
- let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
-
- const autoImageSearch = localStorage.getItem('autoImageSearch');
- const autoVideoSearch = localStorage.getItem('autoVideoSearch');
-
- if (!autoImageSearch) {
- localStorage.setItem('autoImageSearch', 'true');
- }
-
- if (!autoVideoSearch) {
- localStorage.setItem('autoVideoSearch', 'false');
- }
-
- const providers = await fetch(`/api/models`, {
- headers: {
- 'Content-Type': 'application/json',
- },
- }).then(async (res) => {
- if (!res.ok)
- throw new Error(
- `Failed to fetch models: ${res.status} ${res.statusText}`,
- );
- return res.json();
- });
-
- if (
- !chatModel ||
- !chatModelProvider ||
- !embeddingModel ||
- !embeddingModelProvider
- ) {
- if (!chatModel || !chatModelProvider) {
- const chatModelProviders = providers.chatModelProviders;
- const chatModelProvidersKeys = Object.keys(chatModelProviders);
-
- if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
- return toast.error('No chat models available');
- } else {
- chatModelProvider =
- chatModelProvidersKeys.find(
- (provider) =>
- Object.keys(chatModelProviders[provider]).length > 0,
- ) || chatModelProvidersKeys[0];
- }
-
- if (
- chatModelProvider === 'custom_openai' &&
- Object.keys(chatModelProviders[chatModelProvider]).length === 0
- ) {
- toast.error(
- "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
- );
- return setHasError(true);
- }
-
- chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
- }
-
- if (!embeddingModel || !embeddingModelProvider) {
- const embeddingModelProviders = providers.embeddingModelProviders;
-
- if (
- !embeddingModelProviders ||
- Object.keys(embeddingModelProviders).length === 0
- )
- return toast.error('No embedding models available');
-
- embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
- embeddingModel = Object.keys(
- embeddingModelProviders[embeddingModelProvider],
- )[0];
- }
-
- localStorage.setItem('chatModel', chatModel!);
- localStorage.setItem('chatModelProvider', chatModelProvider);
- localStorage.setItem('embeddingModel', embeddingModel!);
- localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
- } else {
- const chatModelProviders = providers.chatModelProviders;
- const embeddingModelProviders = providers.embeddingModelProviders;
-
- if (
- Object.keys(chatModelProviders).length > 0 &&
- (!chatModelProviders[chatModelProvider] ||
- Object.keys(chatModelProviders[chatModelProvider]).length === 0)
- ) {
- const chatModelProvidersKeys = Object.keys(chatModelProviders);
- chatModelProvider =
- chatModelProvidersKeys.find(
- (key) => Object.keys(chatModelProviders[key]).length > 0,
- ) || chatModelProvidersKeys[0];
-
- localStorage.setItem('chatModelProvider', chatModelProvider);
- }
-
- if (
- chatModelProvider &&
- !chatModelProviders[chatModelProvider][chatModel]
- ) {
- if (
- chatModelProvider === 'custom_openai' &&
- Object.keys(chatModelProviders[chatModelProvider]).length === 0
- ) {
- toast.error(
- "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
- );
- return setHasError(true);
- }
-
- chatModel = Object.keys(
- chatModelProviders[
- Object.keys(chatModelProviders[chatModelProvider]).length > 0
- ? chatModelProvider
- : Object.keys(chatModelProviders)[0]
- ],
- )[0];
-
- localStorage.setItem('chatModel', chatModel);
- }
-
- if (
- Object.keys(embeddingModelProviders).length > 0 &&
- !embeddingModelProviders[embeddingModelProvider]
- ) {
- embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
- localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
- }
-
- if (
- embeddingModelProvider &&
- !embeddingModelProviders[embeddingModelProvider][embeddingModel]
- ) {
- embeddingModel = Object.keys(
- embeddingModelProviders[embeddingModelProvider],
- )[0];
- localStorage.setItem('embeddingModel', embeddingModel);
- }
- }
-
- setChatModelProvider({
- name: chatModel!,
- provider: chatModelProvider,
- });
-
- setEmbeddingModelProvider({
- name: embeddingModel!,
- provider: embeddingModelProvider,
- });
-
- setIsConfigReady(true);
- } catch (err) {
- console.error('An error occurred while checking the configuration:', err);
- setIsConfigReady(false);
- setHasError(true);
- }
-};
-
-const loadMessages = async (
- chatId: string,
- setMessages: (messages: Message[]) => void,
- setIsMessagesLoaded: (loaded: boolean) => void,
- setChatHistory: (history: [string, string][]) => void,
- setFocusMode: (mode: string) => void,
- setNotFound: (notFound: boolean) => void,
- setFiles: (files: File[]) => void,
- setFileIds: (fileIds: string[]) => void,
-) => {
- const res = await fetch(`/api/chats/${chatId}`, {
- method: 'GET',
- headers: {
- 'Content-Type': 'application/json',
- },
- });
-
- if (res.status === 404) {
- setNotFound(true);
- setIsMessagesLoaded(true);
- return;
- }
-
- const data = await res.json();
-
- const messages = data.messages.map((msg: any) => {
- return {
- ...msg,
- ...JSON.parse(msg.metadata),
- };
- }) as Message[];
-
- setMessages(messages);
-
- const history = messages.map((msg) => {
- return [msg.role, msg.content];
- }) as [string, string][];
-
- console.debug(new Date(), 'app:messages_loaded');
-
- document.title = messages[0].content;
-
- const files = data.chat.files.map((file: any) => {
- return {
- fileName: file.name,
- fileExtension: file.name.split('.').pop(),
- fileId: file.fileId,
- };
- });
-
- setFiles(files);
- setFileIds(files.map((file: File) => file.fileId));
-
- setChatHistory(history);
- setFocusMode(data.chat.focusMode);
- setIsMessagesLoaded(true);
-};
-
-const ChatWindow = ({ id }: { id?: string }) => {
- const searchParams = useSearchParams();
- const initialMessage = searchParams.get('q');
-
- const [chatId, setChatId] = useState(id);
- const [newChatCreated, setNewChatCreated] = useState(false);
-
- const [chatModelProvider, setChatModelProvider] = useState(
- {
- name: '',
- provider: '',
- },
- );
-
- const [embeddingModelProvider, setEmbeddingModelProvider] =
- useState({
- name: '',
- provider: '',
- });
-
- const [isConfigReady, setIsConfigReady] = useState(false);
- const [hasError, setHasError] = useState(false);
- const [isReady, setIsReady] = useState(false);
-
- useEffect(() => {
- checkConfig(
- setChatModelProvider,
- setEmbeddingModelProvider,
- setIsConfigReady,
- setHasError,
- );
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, []);
-
- const [loading, setLoading] = useState(false);
- const [messageAppeared, setMessageAppeared] = useState(false);
-
- const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
- const [messages, setMessages] = useState([]);
-
- const [files, setFiles] = useState([]);
- const [fileIds, setFileIds] = useState([]);
-
- const [focusMode, setFocusMode] = useState('webSearch');
- const [optimizationMode, setOptimizationMode] = useState('speed');
-
- const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
-
- const [notFound, setNotFound] = useState(false);
-
- useEffect(() => {
- if (
- chatId &&
- !newChatCreated &&
- !isMessagesLoaded &&
- messages.length === 0
- ) {
- loadMessages(
- chatId,
- setMessages,
- setIsMessagesLoaded,
- setChatHistory,
- setFocusMode,
- setNotFound,
- setFiles,
- setFileIds,
- );
- } else if (!chatId) {
- setNewChatCreated(true);
- setIsMessagesLoaded(true);
- setChatId(crypto.randomBytes(20).toString('hex'));
- }
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, []);
-
- const messagesRef = useRef([]);
-
- useEffect(() => {
- messagesRef.current = messages;
- }, [messages]);
-
- useEffect(() => {
- if (isMessagesLoaded && isConfigReady) {
- setIsReady(true);
- console.debug(new Date(), 'app:ready');
- } else {
- setIsReady(false);
- }
- }, [isMessagesLoaded, isConfigReady]);
-
- const sendMessage = async (
- message: string,
- messageId?: string,
- rewrite = false,
- ) => {
- if (loading) return;
- if (!isConfigReady) {
- toast.error('Cannot send message before the configuration is ready');
- return;
- }
-
- setLoading(true);
- setMessageAppeared(false);
-
- let sources: Document[] | undefined = undefined;
- let recievedMessage = '';
- let added = false;
-
- messageId = messageId ?? crypto.randomBytes(7).toString('hex');
-
- setMessages((prevMessages) => [
- ...prevMessages,
- {
- content: message,
- messageId: messageId,
- chatId: chatId!,
- role: 'user',
- createdAt: new Date(),
- },
- ]);
-
- const messageHandler = async (data: any) => {
- if (data.type === 'error') {
- toast.error(data.data);
- setLoading(false);
- return;
- }
-
- if (data.type === 'sources') {
- sources = data.data;
- if (!added) {
- setMessages((prevMessages) => [
- ...prevMessages,
- {
- content: '',
- messageId: data.messageId,
- chatId: chatId!,
- role: 'assistant',
- sources: sources,
- createdAt: new Date(),
- },
- ]);
- added = true;
- }
- setMessageAppeared(true);
- }
-
- if (data.type === 'message') {
- if (!added) {
- setMessages((prevMessages) => [
- ...prevMessages,
- {
- content: data.data,
- messageId: data.messageId,
- chatId: chatId!,
- role: 'assistant',
- sources: sources,
- createdAt: new Date(),
- },
- ]);
- added = true;
- }
-
- setMessages((prev) =>
- prev.map((message) => {
- if (message.messageId === data.messageId) {
- return { ...message, content: message.content + data.data };
- }
-
- return message;
- }),
- );
-
- recievedMessage += data.data;
- setMessageAppeared(true);
- }
-
- if (data.type === 'messageEnd') {
- setChatHistory((prevHistory) => [
- ...prevHistory,
- ['human', message],
- ['assistant', recievedMessage],
- ]);
-
- setLoading(false);
-
- const lastMsg = messagesRef.current[messagesRef.current.length - 1];
-
- const autoImageSearch = localStorage.getItem('autoImageSearch');
- const autoVideoSearch = localStorage.getItem('autoVideoSearch');
-
- if (autoImageSearch === 'true') {
- document
- .getElementById(`search-images-${lastMsg.messageId}`)
- ?.click();
- }
-
- if (autoVideoSearch === 'true') {
- document
- .getElementById(`search-videos-${lastMsg.messageId}`)
- ?.click();
- }
-
- if (
- lastMsg.role === 'assistant' &&
- lastMsg.sources &&
- lastMsg.sources.length > 0 &&
- !lastMsg.suggestions
- ) {
- const suggestions = await getSuggestions(messagesRef.current);
- setMessages((prev) =>
- prev.map((msg) => {
- if (msg.messageId === lastMsg.messageId) {
- return { ...msg, suggestions: suggestions };
- }
- return msg;
- }),
- );
- }
- }
- };
-
- const messageIndex = messages.findIndex((m) => m.messageId === messageId);
-
- const res = await fetch('/api/chat', {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify({
- content: message,
- message: {
- messageId: messageId,
- chatId: chatId!,
- content: message,
- },
- chatId: chatId!,
- files: fileIds,
- focusMode: focusMode,
- optimizationMode: optimizationMode,
- history: rewrite
- ? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
- : chatHistory,
- chatModel: {
- name: chatModelProvider.name,
- provider: chatModelProvider.provider,
- },
- embeddingModel: {
- name: embeddingModelProvider.name,
- provider: embeddingModelProvider.provider,
- },
- systemInstructions: localStorage.getItem('systemInstructions'),
- }),
- });
-
- if (!res.body) throw new Error('No response body');
-
- const reader = res.body?.getReader();
- const decoder = new TextDecoder('utf-8');
-
- let partialChunk = '';
-
- while (true) {
- const { value, done } = await reader.read();
- if (done) break;
-
- partialChunk += decoder.decode(value, { stream: true });
-
- try {
- const messages = partialChunk.split('\n');
- for (const msg of messages) {
- if (!msg.trim()) continue;
- const json = JSON.parse(msg);
- messageHandler(json);
- }
- partialChunk = '';
- } catch (error) {
- console.warn('Incomplete JSON, waiting for next chunk...');
- }
- }
- };
-
- const rewrite = (messageId: string) => {
- const index = messages.findIndex((msg) => msg.messageId === messageId);
-
- if (index === -1) return;
-
- const message = messages[index - 1];
-
- setMessages((prev) => {
- return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
- });
- setChatHistory((prev) => {
- return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
- });
-
- sendMessage(message.content, message.messageId, true);
- };
-
- useEffect(() => {
- if (isReady && initialMessage && isConfigReady) {
- sendMessage(initialMessage);
- }
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [isConfigReady, isReady, initialMessage]);
-
+const ChatWindow = () => {
+ const { hasError, isReady, notFound, messages } = useChat();
if (hasError) {
return (
@@ -594,31 +51,11 @@ const ChatWindow = ({ id }: { id?: string }) => {
{messages.length > 0 ? (
<>
-
-
+
+
>
) : (
-
+
)}
)
diff --git a/src/components/EmptyChat.tsx b/src/components/EmptyChat.tsx
index 0eb76ac..e40a338 100644
--- a/src/components/EmptyChat.tsx
+++ b/src/components/EmptyChat.tsx
@@ -5,27 +5,7 @@ import Link from 'next/link';
import WeatherWidget from './WeatherWidget';
import NewsArticleWidget from './NewsArticleWidget';
-const EmptyChat = ({
- sendMessage,
- focusMode,
- setFocusMode,
- optimizationMode,
- setOptimizationMode,
- fileIds,
- setFileIds,
- files,
- setFiles,
-}: {
- sendMessage: (message: string) => void;
- focusMode: string;
- setFocusMode: (mode: string) => void;
- optimizationMode: string;
- setOptimizationMode: (mode: string) => void;
- fileIds: string[];
- setFileIds: (fileIds: string[]) => void;
- files: File[];
- setFiles: (files: File[]) => void;
-}) => {
+const EmptyChat = () => {
return (
@@ -38,17 +18,7 @@ const EmptyChat = ({
Research begins here.
-
+
diff --git a/src/components/EmptyChatMessageInput.tsx b/src/components/EmptyChatMessageInput.tsx
index 43d1e28..3c5ff6b 100644
--- a/src/components/EmptyChatMessageInput.tsx
+++ b/src/components/EmptyChatMessageInput.tsx
@@ -1,34 +1,15 @@
import { ArrowRight } from 'lucide-react';
import { useEffect, useRef, useState } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
-import CopilotToggle from './MessageInputActions/Copilot';
import Focus from './MessageInputActions/Focus';
import Optimization from './MessageInputActions/Optimization';
import Attach from './MessageInputActions/Attach';
-import { File } from './ChatWindow';
+import { useChat } from '@/lib/hooks/useChat';
-const EmptyChatMessageInput = ({
- sendMessage,
- focusMode,
- setFocusMode,
- optimizationMode,
- setOptimizationMode,
- fileIds,
- setFileIds,
- files,
- setFiles,
-}: {
- sendMessage: (message: string) => void;
- focusMode: string;
- setFocusMode: (mode: string) => void;
- optimizationMode: string;
- setOptimizationMode: (mode: string) => void;
- fileIds: string[];
- setFileIds: (fileIds: string[]) => void;
- files: File[];
- setFiles: (files: File[]) => void;
-}) => {
- const [copilotEnabled, setCopilotEnabled] = useState(false);
+const EmptyChatMessageInput = () => {
+ const { sendMessage } = useChat();
+
+ /* const [copilotEnabled, setCopilotEnabled] = useState(false); */
const [message, setMessage] = useState('');
const inputRef = useRef
(null);
@@ -84,20 +65,11 @@ const EmptyChatMessageInput = ({
/>
-
+
);
diff --git a/src/lib/config.ts b/src/lib/config.ts
index d885e13..79d69dc 100644
--- a/src/lib/config.ts
+++ b/src/lib/config.ts
@@ -31,6 +31,7 @@ interface Config {
};
OLLAMA: {
API_URL: string;
+ API_KEY: string;
};
DEEPSEEK: {
API_KEY: string;
@@ -86,6 +87,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
+export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
+
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
diff --git a/src/lib/hooks/useChat.tsx b/src/lib/hooks/useChat.tsx
new file mode 100644
index 0000000..573ac6b
--- /dev/null
+++ b/src/lib/hooks/useChat.tsx
@@ -0,0 +1,643 @@
+'use client';
+
+import { Message } from '@/components/ChatWindow';
+import { createContext, useContext, useEffect, useRef, useState } from 'react';
+import crypto from 'crypto';
+import { useSearchParams } from 'next/navigation';
+import { toast } from 'sonner';
+import { Document } from '@langchain/core/documents';
+import { getSuggestions } from '../actions';
+
+type ChatContext = {
+ messages: Message[];
+ chatHistory: [string, string][];
+ files: File[];
+ fileIds: string[];
+ focusMode: string;
+ chatId: string | undefined;
+ optimizationMode: string;
+ isMessagesLoaded: boolean;
+ loading: boolean;
+ notFound: boolean;
+ messageAppeared: boolean;
+ isReady: boolean;
+ hasError: boolean;
+ setOptimizationMode: (mode: string) => void;
+ setFocusMode: (mode: string) => void;
+ setFiles: (files: File[]) => void;
+ setFileIds: (fileIds: string[]) => void;
+ sendMessage: (
+ message: string,
+ messageId?: string,
+ rewrite?: boolean,
+ ) => Promise
;
+ rewrite: (messageId: string) => void;
+};
+
+export interface File {
+ fileName: string;
+ fileExtension: string;
+ fileId: string;
+}
+
+interface ChatModelProvider {
+ name: string;
+ provider: string;
+}
+
+interface EmbeddingModelProvider {
+ name: string;
+ provider: string;
+}
+
+const checkConfig = async (
+ setChatModelProvider: (provider: ChatModelProvider) => void,
+ setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
+ setIsConfigReady: (ready: boolean) => void,
+ setHasError: (hasError: boolean) => void,
+) => {
+ try {
+ let chatModel = localStorage.getItem('chatModel');
+ let chatModelProvider = localStorage.getItem('chatModelProvider');
+ let embeddingModel = localStorage.getItem('embeddingModel');
+ let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
+
+ const autoImageSearch = localStorage.getItem('autoImageSearch');
+ const autoVideoSearch = localStorage.getItem('autoVideoSearch');
+
+ if (!autoImageSearch) {
+ localStorage.setItem('autoImageSearch', 'true');
+ }
+
+ if (!autoVideoSearch) {
+ localStorage.setItem('autoVideoSearch', 'false');
+ }
+
+ const providers = await fetch(`/api/models`, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ }).then(async (res) => {
+ if (!res.ok)
+ throw new Error(
+ `Failed to fetch models: ${res.status} ${res.statusText}`,
+ );
+ return res.json();
+ });
+
+ if (
+ !chatModel ||
+ !chatModelProvider ||
+ !embeddingModel ||
+ !embeddingModelProvider
+ ) {
+ if (!chatModel || !chatModelProvider) {
+ const chatModelProviders = providers.chatModelProviders;
+ const chatModelProvidersKeys = Object.keys(chatModelProviders);
+
+ if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
+ return toast.error('No chat models available');
+ } else {
+ chatModelProvider =
+ chatModelProvidersKeys.find(
+ (provider) =>
+ Object.keys(chatModelProviders[provider]).length > 0,
+ ) || chatModelProvidersKeys[0];
+ }
+
+ if (
+ chatModelProvider === 'custom_openai' &&
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0
+ ) {
+ toast.error(
+ "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
+ );
+ return setHasError(true);
+ }
+
+ chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
+ }
+
+ if (!embeddingModel || !embeddingModelProvider) {
+ const embeddingModelProviders = providers.embeddingModelProviders;
+
+ if (
+ !embeddingModelProviders ||
+ Object.keys(embeddingModelProviders).length === 0
+ )
+ return toast.error('No embedding models available');
+
+ embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
+ embeddingModel = Object.keys(
+ embeddingModelProviders[embeddingModelProvider],
+ )[0];
+ }
+
+ localStorage.setItem('chatModel', chatModel!);
+ localStorage.setItem('chatModelProvider', chatModelProvider);
+ localStorage.setItem('embeddingModel', embeddingModel!);
+ localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
+ } else {
+ const chatModelProviders = providers.chatModelProviders;
+ const embeddingModelProviders = providers.embeddingModelProviders;
+
+ if (
+ Object.keys(chatModelProviders).length > 0 &&
+ (!chatModelProviders[chatModelProvider] ||
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0)
+ ) {
+ const chatModelProvidersKeys = Object.keys(chatModelProviders);
+ chatModelProvider =
+ chatModelProvidersKeys.find(
+ (key) => Object.keys(chatModelProviders[key]).length > 0,
+ ) || chatModelProvidersKeys[0];
+
+ localStorage.setItem('chatModelProvider', chatModelProvider);
+ }
+
+ if (
+ chatModelProvider &&
+ !chatModelProviders[chatModelProvider][chatModel]
+ ) {
+ if (
+ chatModelProvider === 'custom_openai' &&
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0
+ ) {
+ toast.error(
+ "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
+ );
+ return setHasError(true);
+ }
+
+ chatModel = Object.keys(
+ chatModelProviders[
+ Object.keys(chatModelProviders[chatModelProvider]).length > 0
+ ? chatModelProvider
+ : Object.keys(chatModelProviders)[0]
+ ],
+ )[0];
+
+ localStorage.setItem('chatModel', chatModel);
+ }
+
+ if (
+ Object.keys(embeddingModelProviders).length > 0 &&
+ !embeddingModelProviders[embeddingModelProvider]
+ ) {
+ embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
+ localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
+ }
+
+ if (
+ embeddingModelProvider &&
+ !embeddingModelProviders[embeddingModelProvider][embeddingModel]
+ ) {
+ embeddingModel = Object.keys(
+ embeddingModelProviders[embeddingModelProvider],
+ )[0];
+ localStorage.setItem('embeddingModel', embeddingModel);
+ }
+ }
+
+ setChatModelProvider({
+ name: chatModel!,
+ provider: chatModelProvider,
+ });
+
+ setEmbeddingModelProvider({
+ name: embeddingModel!,
+ provider: embeddingModelProvider,
+ });
+
+ setIsConfigReady(true);
+ } catch (err) {
+ console.error('An error occurred while checking the configuration:', err);
+ setIsConfigReady(false);
+ setHasError(true);
+ }
+};
+
+const loadMessages = async (
+ chatId: string,
+ setMessages: (messages: Message[]) => void,
+ setIsMessagesLoaded: (loaded: boolean) => void,
+ setChatHistory: (history: [string, string][]) => void,
+ setFocusMode: (mode: string) => void,
+ setNotFound: (notFound: boolean) => void,
+ setFiles: (files: File[]) => void,
+ setFileIds: (fileIds: string[]) => void,
+) => {
+ const res = await fetch(`/api/chats/${chatId}`, {
+ method: 'GET',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
+
+ if (res.status === 404) {
+ setNotFound(true);
+ setIsMessagesLoaded(true);
+ return;
+ }
+
+ const data = await res.json();
+
+ const messages = data.messages.map((msg: any) => {
+ return {
+ ...msg,
+ ...JSON.parse(msg.metadata),
+ };
+ }) as Message[];
+
+ setMessages(messages);
+
+ const history = messages.map((msg) => {
+ return [msg.role, msg.content];
+ }) as [string, string][];
+
+ console.debug(new Date(), 'app:messages_loaded');
+
+ document.title = messages[0].content;
+
+ const files = data.chat.files.map((file: any) => {
+ return {
+ fileName: file.name,
+ fileExtension: file.name.split('.').pop(),
+ fileId: file.fileId,
+ };
+ });
+
+ setFiles(files);
+ setFileIds(files.map((file: File) => file.fileId));
+
+ setChatHistory(history);
+ setFocusMode(data.chat.focusMode);
+ setIsMessagesLoaded(true);
+};
+
+export const chatContext = createContext({
+ chatHistory: [],
+ chatId: '',
+ fileIds: [],
+ files: [],
+ focusMode: '',
+ hasError: false,
+ isMessagesLoaded: false,
+ isReady: false,
+ loading: false,
+ messageAppeared: false,
+ messages: [],
+ notFound: false,
+ optimizationMode: '',
+ rewrite: () => {},
+ sendMessage: async () => {},
+ setFileIds: () => {},
+ setFiles: () => {},
+ setFocusMode: () => {},
+ setOptimizationMode: () => {},
+});
+
+export const ChatProvider = ({
+ children,
+ id,
+}: {
+ children: React.ReactNode;
+ id?: string;
+}) => {
+ const searchParams = useSearchParams();
+ const initialMessage = searchParams.get('q');
+
+ const [chatId, setChatId] = useState(id);
+ const [newChatCreated, setNewChatCreated] = useState(false);
+
+ const [loading, setLoading] = useState(false);
+ const [messageAppeared, setMessageAppeared] = useState(false);
+
+ const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
+ const [messages, setMessages] = useState([]);
+
+ const [files, setFiles] = useState([]);
+ const [fileIds, setFileIds] = useState([]);
+
+ const [focusMode, setFocusMode] = useState('webSearch');
+ const [optimizationMode, setOptimizationMode] = useState('speed');
+
+ const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
+
+ const [notFound, setNotFound] = useState(false);
+
+ const [chatModelProvider, setChatModelProvider] = useState(
+ {
+ name: '',
+ provider: '',
+ },
+ );
+
+ const [embeddingModelProvider, setEmbeddingModelProvider] =
+ useState({
+ name: '',
+ provider: '',
+ });
+
+ const [isConfigReady, setIsConfigReady] = useState(false);
+ const [hasError, setHasError] = useState(false);
+ const [isReady, setIsReady] = useState(false);
+
+ const messagesRef = useRef([]);
+
+ useEffect(() => {
+ checkConfig(
+ setChatModelProvider,
+ setEmbeddingModelProvider,
+ setIsConfigReady,
+ setHasError,
+ );
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
+ useEffect(() => {
+ if (
+ chatId &&
+ !newChatCreated &&
+ !isMessagesLoaded &&
+ messages.length === 0
+ ) {
+ loadMessages(
+ chatId,
+ setMessages,
+ setIsMessagesLoaded,
+ setChatHistory,
+ setFocusMode,
+ setNotFound,
+ setFiles,
+ setFileIds,
+ );
+ } else if (!chatId) {
+ setNewChatCreated(true);
+ setIsMessagesLoaded(true);
+ setChatId(crypto.randomBytes(20).toString('hex'));
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
+ useEffect(() => {
+ messagesRef.current = messages;
+ }, [messages]);
+
+ useEffect(() => {
+ if (isMessagesLoaded && isConfigReady) {
+ setIsReady(true);
+ console.debug(new Date(), 'app:ready');
+ } else {
+ setIsReady(false);
+ }
+ }, [isMessagesLoaded, isConfigReady]);
+
+ const rewrite = (messageId: string) => {
+ const index = messages.findIndex((msg) => msg.messageId === messageId);
+
+ if (index === -1) return;
+
+ const message = messages[index - 1];
+
+ setMessages((prev) => {
+ return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
+ });
+ setChatHistory((prev) => {
+ return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
+ });
+
+ sendMessage(message.content, message.messageId, true);
+ };
+
+ useEffect(() => {
+ if (isReady && initialMessage && isConfigReady) {
+ if (!isConfigReady) {
+ toast.error('Cannot send message before the configuration is ready');
+ return;
+ }
+ sendMessage(initialMessage);
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [isConfigReady, isReady, initialMessage]);
+
+ const sendMessage: ChatContext['sendMessage'] = async (
+ message,
+ messageId,
+ rewrite = false,
+ ) => {
+ if (loading) return;
+ setLoading(true);
+ setMessageAppeared(false);
+
+ let sources: Document[] | undefined = undefined;
+ let recievedMessage = '';
+ let added = false;
+
+ messageId = messageId ?? crypto.randomBytes(7).toString('hex');
+
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: message,
+ messageId: messageId,
+ chatId: chatId!,
+ role: 'user',
+ createdAt: new Date(),
+ },
+ ]);
+
+ const messageHandler = async (data: any) => {
+ if (data.type === 'error') {
+ toast.error(data.data);
+ setLoading(false);
+ return;
+ }
+
+ if (data.type === 'sources') {
+ sources = data.data;
+ if (!added) {
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: '',
+ messageId: data.messageId,
+ chatId: chatId!,
+ role: 'assistant',
+ sources: sources,
+ createdAt: new Date(),
+ },
+ ]);
+ added = true;
+ }
+ setMessageAppeared(true);
+ }
+
+ if (data.type === 'message') {
+ if (!added) {
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: data.data,
+ messageId: data.messageId,
+ chatId: chatId!,
+ role: 'assistant',
+ sources: sources,
+ createdAt: new Date(),
+ },
+ ]);
+ added = true;
+ }
+
+ setMessages((prev) =>
+ prev.map((message) => {
+ if (message.messageId === data.messageId) {
+ return { ...message, content: message.content + data.data };
+ }
+
+ return message;
+ }),
+ );
+
+ recievedMessage += data.data;
+ setMessageAppeared(true);
+ }
+
+ if (data.type === 'messageEnd') {
+ setChatHistory((prevHistory) => [
+ ...prevHistory,
+ ['human', message],
+ ['assistant', recievedMessage],
+ ]);
+
+ setLoading(false);
+
+ const lastMsg = messagesRef.current[messagesRef.current.length - 1];
+
+ const autoImageSearch = localStorage.getItem('autoImageSearch');
+ const autoVideoSearch = localStorage.getItem('autoVideoSearch');
+
+ if (autoImageSearch === 'true') {
+ document
+ .getElementById(`search-images-${lastMsg.messageId}`)
+ ?.click();
+ }
+
+ if (autoVideoSearch === 'true') {
+ document
+ .getElementById(`search-videos-${lastMsg.messageId}`)
+ ?.click();
+ }
+
+ if (
+ lastMsg.role === 'assistant' &&
+ lastMsg.sources &&
+ lastMsg.sources.length > 0 &&
+ !lastMsg.suggestions
+ ) {
+ const suggestions = await getSuggestions(messagesRef.current);
+ setMessages((prev) =>
+ prev.map((msg) => {
+ if (msg.messageId === lastMsg.messageId) {
+ return { ...msg, suggestions: suggestions };
+ }
+ return msg;
+ }),
+ );
+ }
+ }
+ };
+
+ const messageIndex = messages.findIndex((m) => m.messageId === messageId);
+
+ const res = await fetch('/api/chat', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ content: message,
+ message: {
+ messageId: messageId,
+ chatId: chatId!,
+ content: message,
+ },
+ chatId: chatId!,
+ files: fileIds,
+ focusMode: focusMode,
+ optimizationMode: optimizationMode,
+ history: rewrite
+ ? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
+ : chatHistory,
+ chatModel: {
+ name: chatModelProvider.name,
+ provider: chatModelProvider.provider,
+ },
+ embeddingModel: {
+ name: embeddingModelProvider.name,
+ provider: embeddingModelProvider.provider,
+ },
+ systemInstructions: localStorage.getItem('systemInstructions'),
+ }),
+ });
+
+ if (!res.body) throw new Error('No response body');
+
+ const reader = res.body?.getReader();
+ const decoder = new TextDecoder('utf-8');
+
+ let partialChunk = '';
+
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done) break;
+
+ partialChunk += decoder.decode(value, { stream: true });
+
+ try {
+ const messages = partialChunk.split('\n');
+ for (const msg of messages) {
+ if (!msg.trim()) continue;
+ const json = JSON.parse(msg);
+ messageHandler(json);
+ }
+ partialChunk = '';
+ } catch (error) {
+ console.warn('Incomplete JSON, waiting for next chunk...');
+ }
+ }
+ };
+
+ return (
+
+ {children}
+
+ );
+};
+
+export const useChat = () => {
+ const ctx = useContext(chatContext);
+ return ctx;
+};
diff --git a/src/lib/providers/anthropic.ts b/src/lib/providers/anthropic.ts
index 2b0f2cc..6af2115 100644
--- a/src/lib/providers/anthropic.ts
+++ b/src/lib/providers/anthropic.ts
@@ -9,6 +9,18 @@ export const PROVIDER_INFO = {
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
const anthropicChatModels: Record[] = [
+ {
+ displayName: 'Claude 4.1 Opus',
+ key: 'claude-opus-4-1-20250805',
+ },
+ {
+ displayName: 'Claude 4 Opus',
+ key: 'claude-opus-4-20250514',
+ },
+ {
+ displayName: 'Claude 4 Sonnet',
+ key: 'claude-sonnet-4-20250514',
+ },
{
displayName: 'Claude 3.7 Sonnet',
key: 'claude-3-7-sonnet-20250219',
diff --git a/src/lib/providers/gemini.ts b/src/lib/providers/gemini.ts
index a9ef4d5..418e0a4 100644
--- a/src/lib/providers/gemini.ts
+++ b/src/lib/providers/gemini.ts
@@ -17,6 +17,10 @@ const geminiChatModels: Record[] = [
displayName: 'Gemini 2.5 Flash',
key: 'gemini-2.5-flash',
},
+ {
+ displayName: 'Gemini 2.5 Flash-Lite',
+ key: 'gemini-2.5-flash-lite',
+ },
{
displayName: 'Gemini 2.5 Pro',
key: 'gemini-2.5-pro',
diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts
index d5c7899..cb0b848 100644
--- a/src/lib/providers/ollama.ts
+++ b/src/lib/providers/ollama.ts
@@ -1,5 +1,5 @@
import axios from 'axios';
-import { getKeepAlive, getOllamaApiEndpoint } from '../config';
+import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
@@ -11,6 +11,7 @@ import { OllamaEmbeddings } from '@langchain/ollama';
export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
+ const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -33,6 +34,9 @@ export const loadOllamaChatModels = async () => {
model: model.model,
temperature: 0.7,
keepAlive: getKeepAlive(),
+ ...(ollamaApiKey
+ ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
+ : {}),
}),
};
});
@@ -46,6 +50,7 @@ export const loadOllamaChatModels = async () => {
export const loadOllamaEmbeddingModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
+ const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -66,6 +71,9 @@ export const loadOllamaEmbeddingModels = async () => {
model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint,
model: model.model,
+ ...(ollamaApiKey
+ ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
+ : {}),
}),
};
});
diff --git a/src/lib/providers/openai.ts b/src/lib/providers/openai.ts
index c857b0e..7e26763 100644
--- a/src/lib/providers/openai.ts
+++ b/src/lib/providers/openai.ts
@@ -42,6 +42,18 @@ const openaiChatModels: Record[] = [
displayName: 'GPT 4.1',
key: 'gpt-4.1',
},
+ {
+ displayName: 'GPT 5 nano',
+ key: 'gpt-5-nano',
+ },
+ {
+ displayName: 'GPT 5 mini',
+ key: 'gpt-5-mini',
+ },
+ {
+ displayName: 'GPT 5',
+ key: 'gpt-5',
+ },
];
const openaiEmbeddingModels: Record[] = [
@@ -69,7 +81,7 @@ export const loadOpenAIChatModels = async () => {
model: new ChatOpenAI({
apiKey: openaiApiKey,
modelName: model.key,
- temperature: 0.7,
+ temperature: model.key.includes('gpt-5') ? 1 : 0.7,
}) as unknown as BaseChatModel,
};
});