diff --git a/package.json b/package.json
index 5715c2a..876361e 100644
--- a/package.json
+++ b/package.json
@@ -20,6 +20,7 @@
"@langchain/core": "^0.3.66",
"@langchain/google-genai": "^0.2.15",
"@langchain/groq": "^0.2.3",
+ "@langchain/langgraph": "^0.4.0",
"@langchain/ollama": "^0.2.3",
"@langchain/openai": "^0.6.2",
"@langchain/textsplitters": "^0.1.0",
@@ -42,6 +43,7 @@
"pdf-parse": "^1.1.1",
"react": "^18",
"react-dom": "^18",
+ "react-syntax-highlighter": "^15.6.1",
"react-text-to-speech": "^0.14.5",
"react-textarea-autosize": "^8.5.3",
"sonner": "^1.4.41",
@@ -58,6 +60,7 @@
"@types/pdf-parse": "^1.1.4",
"@types/react": "^18",
"@types/react-dom": "^18",
+ "@types/react-syntax-highlighter": "^15.5.13",
"autoprefixer": "^10.0.1",
"drizzle-kit": "^0.30.5",
"eslint": "^8",
@@ -65,6 +68,6 @@
"postcss": "^8",
"prettier": "^3.2.5",
"tailwindcss": "^3.3.0",
- "typescript": "^5"
+ "typescript": "5.8.3"
}
}
diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts
index ba88da6..2d53b75 100644
--- a/src/app/api/chat/route.ts
+++ b/src/app/api/chat/route.ts
@@ -1,7 +1,11 @@
+import prompts from '@/lib/prompts';
+import MetaSearchAgent from '@/lib/search/metaSearchAgent';
import crypto from 'crypto';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { EventEmitter } from 'stream';
import {
+ chatModelProviders,
+ embeddingModelProviders,
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '@/lib/providers';
@@ -134,8 +138,6 @@ const handleHistorySave = async (
where: eq(chats.id, message.chatId),
});
- const fileData = files.map(getFileDetails);
-
if (!chat) {
await db
.insert(chats)
@@ -144,15 +146,9 @@ const handleHistorySave = async (
title: message.content,
createdAt: new Date().toString(),
focusMode: focusMode,
- files: fileData,
- })
- .execute();
- } else if (JSON.stringify(chat.files ?? []) != JSON.stringify(fileData)) {
- db.update(chats)
- .set({
files: files.map(getFileDetails),
})
- .where(eq(chats.id, message.chatId));
+ .execute();
}
const messageExists = await db.query.messages.findFirst({
diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts
index f117cce..0c11b23 100644
--- a/src/app/api/config/route.ts
+++ b/src/app/api/config/route.ts
@@ -11,7 +11,6 @@ import {
getAimlApiKey,
getLMStudioApiEndpoint,
updateConfig,
- getOllamaApiKey,
} from '@/lib/config';
import {
getAvailableChatModelProviders,
@@ -54,7 +53,6 @@ export const GET = async (req: Request) => {
config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
- config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
@@ -95,7 +93,6 @@ export const POST = async (req: Request) => {
},
OLLAMA: {
API_URL: config.ollamaApiUrl,
- API_KEY: config.ollamaApiKey,
},
DEEPSEEK: {
API_KEY: config.deepseekApiKey,
diff --git a/src/app/c/[chatId]/page.tsx b/src/app/c/[chatId]/page.tsx
index 672107a..aac125a 100644
--- a/src/app/c/[chatId]/page.tsx
+++ b/src/app/c/[chatId]/page.tsx
@@ -1,17 +1,9 @@
-'use client';
-
import ChatWindow from '@/components/ChatWindow';
-import { useParams } from 'next/navigation';
import React from 'react';
-import { ChatProvider } from '@/lib/hooks/useChat';
-const Page = () => {
- const { chatId }: { chatId: string } = useParams();
- return (
-
-
-
- );
+const Page = ({ params }: { params: Promise<{ chatId: string }> }) => {
+ const { chatId } = React.use(params);
+ return ;
};
export default Page;
diff --git a/src/app/page.tsx b/src/app/page.tsx
index 25981b5..e18aca9 100644
--- a/src/app/page.tsx
+++ b/src/app/page.tsx
@@ -1,5 +1,4 @@
import ChatWindow from '@/components/ChatWindow';
-import { ChatProvider } from '@/lib/hooks/useChat';
import { Metadata } from 'next';
import { Suspense } from 'react';
@@ -12,9 +11,7 @@ const Home = () => {
return (
-
-
-
+
);
diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx
index 6fb8255..1b13c9c 100644
--- a/src/app/settings/page.tsx
+++ b/src/app/settings/page.tsx
@@ -21,7 +21,6 @@ interface SettingsType {
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
- ollamaApiKey: string;
lmStudioApiUrl: string;
deepseekApiKey: string;
aimlApiKey: string;
@@ -819,25 +818,6 @@ const Page = () => {
/>
-
-
- Ollama API Key (Can be left blank)
-
-
{
- setConfig((prev) => ({
- ...prev!,
- ollamaApiKey: e.target.value,
- }));
- }}
- onSave={(value) => saveConfig('ollamaApiKey', value)}
- />
-
-
GROQ API Key
diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx
index a5d8cf9..0cf125b 100644
--- a/src/components/Chat.tsx
+++ b/src/components/Chat.tsx
@@ -5,11 +5,28 @@ import MessageInput from './MessageInput';
import { File, Message } from './ChatWindow';
import MessageBox from './MessageBox';
import MessageBoxLoading from './MessageBoxLoading';
-import { useChat } from '@/lib/hooks/useChat';
-
-const Chat = () => {
- const { messages, loading, messageAppeared } = useChat();
+const Chat = ({
+ loading,
+ messages,
+ sendMessage,
+ messageAppeared,
+ rewrite,
+ fileIds,
+ setFileIds,
+ files,
+ setFiles,
+}: {
+ messages: Message[];
+ sendMessage: (message: string) => void;
+ loading: boolean;
+ messageAppeared: boolean;
+ rewrite: (messageId: string) => void;
+ fileIds: string[];
+ setFileIds: (fileIds: string[]) => void;
+ files: File[];
+ setFiles: (files: File[]) => void;
+}) => {
const [dividerWidth, setDividerWidth] = useState(0);
const dividerRef = useRef(null);
const messageEnd = useRef(null);
@@ -55,8 +72,12 @@ const Chat = () => {
key={i}
message={msg}
messageIndex={i}
+ history={messages}
+ loading={loading}
dividerRef={isLast ? dividerRef : undefined}
isLast={isLast}
+ rewrite={rewrite}
+ sendMessage={sendMessage}
/>
{!isLast && msg.role === 'assistant' && (
@@ -71,7 +92,14 @@ const Chat = () => {
className="bottom-24 lg:bottom-10 fixed z-40"
style={{ width: dividerWidth }}
>
-
+
)}
diff --git a/src/components/ChatWindow.tsx b/src/components/ChatWindow.tsx
index 0d40c83..de6d869 100644
--- a/src/components/ChatWindow.tsx
+++ b/src/components/ChatWindow.tsx
@@ -1,13 +1,17 @@
'use client';
+import { useEffect, useRef, useState } from 'react';
import { Document } from '@langchain/core/documents';
import Navbar from './Navbar';
import Chat from './Chat';
import EmptyChat from './EmptyChat';
+import crypto from 'crypto';
+import { toast } from 'sonner';
+import { useSearchParams } from 'next/navigation';
+import { getSuggestions } from '@/lib/actions';
import { Settings } from 'lucide-react';
import Link from 'next/link';
import NextError from 'next/error';
-import { useChat } from '@/lib/hooks/useChat';
export type Message = {
messageId: string;
@@ -25,8 +29,557 @@ export interface File {
fileId: string;
}
-const ChatWindow = () => {
- const { hasError, isReady, notFound, messages } = useChat();
+interface ChatModelProvider {
+ name: string;
+ provider: string;
+}
+
+interface EmbeddingModelProvider {
+ name: string;
+ provider: string;
+}
+
+const checkConfig = async (
+ setChatModelProvider: (provider: ChatModelProvider) => void,
+ setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
+ setIsConfigReady: (ready: boolean) => void,
+ setHasError: (hasError: boolean) => void,
+) => {
+ try {
+ let chatModel = localStorage.getItem('chatModel');
+ let chatModelProvider = localStorage.getItem('chatModelProvider');
+ let embeddingModel = localStorage.getItem('embeddingModel');
+ let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
+
+ const autoImageSearch = localStorage.getItem('autoImageSearch');
+ const autoVideoSearch = localStorage.getItem('autoVideoSearch');
+
+ if (!autoImageSearch) {
+ localStorage.setItem('autoImageSearch', 'true');
+ }
+
+ if (!autoVideoSearch) {
+ localStorage.setItem('autoVideoSearch', 'false');
+ }
+
+ const providers = await fetch(`/api/models`, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ }).then(async (res) => {
+ if (!res.ok)
+ throw new Error(
+ `Failed to fetch models: ${res.status} ${res.statusText}`,
+ );
+ return res.json();
+ });
+
+ if (
+ !chatModel ||
+ !chatModelProvider ||
+ !embeddingModel ||
+ !embeddingModelProvider
+ ) {
+ if (!chatModel || !chatModelProvider) {
+ const chatModelProviders = providers.chatModelProviders;
+ const chatModelProvidersKeys = Object.keys(chatModelProviders);
+
+ if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
+ return toast.error('No chat models available');
+ } else {
+ chatModelProvider =
+ chatModelProvidersKeys.find(
+ (provider) =>
+ Object.keys(chatModelProviders[provider]).length > 0,
+ ) || chatModelProvidersKeys[0];
+ }
+
+ if (
+ chatModelProvider === 'custom_openai' &&
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0
+ ) {
+ toast.error(
+ "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
+ );
+ return setHasError(true);
+ }
+
+ chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
+ }
+
+ if (!embeddingModel || !embeddingModelProvider) {
+ const embeddingModelProviders = providers.embeddingModelProviders;
+
+ if (
+ !embeddingModelProviders ||
+ Object.keys(embeddingModelProviders).length === 0
+ )
+ return toast.error('No embedding models available');
+
+ embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
+ embeddingModel = Object.keys(
+ embeddingModelProviders[embeddingModelProvider],
+ )[0];
+ }
+
+ localStorage.setItem('chatModel', chatModel!);
+ localStorage.setItem('chatModelProvider', chatModelProvider);
+ localStorage.setItem('embeddingModel', embeddingModel!);
+ localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
+ } else {
+ const chatModelProviders = providers.chatModelProviders;
+ const embeddingModelProviders = providers.embeddingModelProviders;
+
+ if (
+ Object.keys(chatModelProviders).length > 0 &&
+ (!chatModelProviders[chatModelProvider] ||
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0)
+ ) {
+ const chatModelProvidersKeys = Object.keys(chatModelProviders);
+ chatModelProvider =
+ chatModelProvidersKeys.find(
+ (key) => Object.keys(chatModelProviders[key]).length > 0,
+ ) || chatModelProvidersKeys[0];
+
+ localStorage.setItem('chatModelProvider', chatModelProvider);
+ }
+
+ if (
+ chatModelProvider &&
+ !chatModelProviders[chatModelProvider][chatModel]
+ ) {
+ if (
+ chatModelProvider === 'custom_openai' &&
+ Object.keys(chatModelProviders[chatModelProvider]).length === 0
+ ) {
+ toast.error(
+ "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
+ );
+ return setHasError(true);
+ }
+
+ chatModel = Object.keys(
+ chatModelProviders[
+ Object.keys(chatModelProviders[chatModelProvider]).length > 0
+ ? chatModelProvider
+ : Object.keys(chatModelProviders)[0]
+ ],
+ )[0];
+
+ localStorage.setItem('chatModel', chatModel);
+ }
+
+ if (
+ Object.keys(embeddingModelProviders).length > 0 &&
+ !embeddingModelProviders[embeddingModelProvider]
+ ) {
+ embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
+ localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
+ }
+
+ if (
+ embeddingModelProvider &&
+ !embeddingModelProviders[embeddingModelProvider][embeddingModel]
+ ) {
+ embeddingModel = Object.keys(
+ embeddingModelProviders[embeddingModelProvider],
+ )[0];
+ localStorage.setItem('embeddingModel', embeddingModel);
+ }
+ }
+
+ setChatModelProvider({
+ name: chatModel!,
+ provider: chatModelProvider,
+ });
+
+ setEmbeddingModelProvider({
+ name: embeddingModel!,
+ provider: embeddingModelProvider,
+ });
+
+ setIsConfigReady(true);
+ } catch (err) {
+ console.error('An error occurred while checking the configuration:', err);
+ setIsConfigReady(false);
+ setHasError(true);
+ }
+};
+
+const loadMessages = async (
+ chatId: string,
+ setMessages: (messages: Message[]) => void,
+ setIsMessagesLoaded: (loaded: boolean) => void,
+ setChatHistory: (history: [string, string][]) => void,
+ setFocusMode: (mode: string) => void,
+ setNotFound: (notFound: boolean) => void,
+ setFiles: (files: File[]) => void,
+ setFileIds: (fileIds: string[]) => void,
+) => {
+ const res = await fetch(`/api/chats/${chatId}`, {
+ method: 'GET',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
+
+ if (res.status === 404) {
+ setNotFound(true);
+ setIsMessagesLoaded(true);
+ return;
+ }
+
+ const data = await res.json();
+
+ const messages = data.messages.map((msg: any) => {
+ return {
+ ...msg,
+ ...JSON.parse(msg.metadata),
+ };
+ }) as Message[];
+
+ setMessages(messages);
+
+ const history = messages.map((msg) => {
+ return [msg.role, msg.content];
+ }) as [string, string][];
+
+ console.debug(new Date(), 'app:messages_loaded');
+
+ document.title = messages[0].content;
+
+ const files = data.chat.files.map((file: any) => {
+ return {
+ fileName: file.name,
+ fileExtension: file.name.split('.').pop(),
+ fileId: file.fileId,
+ };
+ });
+
+ setFiles(files);
+ setFileIds(files.map((file: File) => file.fileId));
+
+ setChatHistory(history);
+ setFocusMode(data.chat.focusMode);
+ setIsMessagesLoaded(true);
+};
+
+const ChatWindow = ({ id }: { id?: string }) => {
+ const searchParams = useSearchParams();
+ const initialMessage = searchParams.get('q');
+
+ const [chatId, setChatId] = useState(id);
+ const [newChatCreated, setNewChatCreated] = useState(false);
+
+ const [chatModelProvider, setChatModelProvider] = useState(
+ {
+ name: '',
+ provider: '',
+ },
+ );
+
+ const [embeddingModelProvider, setEmbeddingModelProvider] =
+ useState({
+ name: '',
+ provider: '',
+ });
+
+ const [isConfigReady, setIsConfigReady] = useState(false);
+ const [hasError, setHasError] = useState(false);
+ const [isReady, setIsReady] = useState(false);
+
+ useEffect(() => {
+ checkConfig(
+ setChatModelProvider,
+ setEmbeddingModelProvider,
+ setIsConfigReady,
+ setHasError,
+ );
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
+ const [loading, setLoading] = useState(false);
+ const [messageAppeared, setMessageAppeared] = useState(false);
+
+ const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
+ const [messages, setMessages] = useState([]);
+
+ const [files, setFiles] = useState([]);
+ const [fileIds, setFileIds] = useState([]);
+
+ const [focusMode, setFocusMode] = useState('webSearch');
+ const [optimizationMode, setOptimizationMode] = useState('speed');
+
+ const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
+
+ const [notFound, setNotFound] = useState(false);
+
+ useEffect(() => {
+ if (
+ chatId &&
+ !newChatCreated &&
+ !isMessagesLoaded &&
+ messages.length === 0
+ ) {
+ loadMessages(
+ chatId,
+ setMessages,
+ setIsMessagesLoaded,
+ setChatHistory,
+ setFocusMode,
+ setNotFound,
+ setFiles,
+ setFileIds,
+ );
+ } else if (!chatId) {
+ setNewChatCreated(true);
+ setIsMessagesLoaded(true);
+ setChatId(crypto.randomBytes(20).toString('hex'));
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
+ const messagesRef = useRef([]);
+
+ useEffect(() => {
+ messagesRef.current = messages;
+ }, [messages]);
+
+ useEffect(() => {
+ if (isMessagesLoaded && isConfigReady) {
+ setIsReady(true);
+ console.debug(new Date(), 'app:ready');
+ } else {
+ setIsReady(false);
+ }
+ }, [isMessagesLoaded, isConfigReady]);
+
+ const sendMessage = async (
+ message: string,
+ messageId?: string,
+ rewrite = false,
+ ) => {
+ if (loading) return;
+ if (!isConfigReady) {
+ toast.error('Cannot send message before the configuration is ready');
+ return;
+ }
+
+ setLoading(true);
+ setMessageAppeared(false);
+
+ let sources: Document[] | undefined = undefined;
+ let recievedMessage = '';
+ let added = false;
+
+ messageId = messageId ?? crypto.randomBytes(7).toString('hex');
+
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: message,
+ messageId: messageId,
+ chatId: chatId!,
+ role: 'user',
+ createdAt: new Date(),
+ },
+ ]);
+
+ const messageHandler = async (data: any) => {
+ if (data.type === 'error') {
+ toast.error(data.data);
+ setLoading(false);
+ return;
+ }
+
+ if (data.type === 'sources') {
+ sources = data.data;
+ if (!added) {
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: '',
+ messageId: data.messageId,
+ chatId: chatId!,
+ role: 'assistant',
+ sources: sources,
+ createdAt: new Date(),
+ },
+ ]);
+ added = true;
+ setMessageAppeared(true);
+ } else {
+ setMessages((prev) =>
+ prev.map((message) => {
+ if (message.messageId === data.messageId) {
+ return { ...message, sources: sources };
+ }
+
+ return message;
+ }),
+ );
+ }
+ }
+
+ if (data.type === 'message') {
+ if (!added) {
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ {
+ content: data.data,
+ messageId: data.messageId,
+ chatId: chatId!,
+ role: 'assistant',
+ sources: sources,
+ createdAt: new Date(),
+ },
+ ]);
+ added = true;
+ } else {
+ setMessages((prev) =>
+ prev.map((message) => {
+ if (message.messageId === data.messageId) {
+ return { ...message, content: message.content + data.data };
+ }
+
+ return message;
+ }),
+ );
+
+ recievedMessage += data.data;
+ setMessageAppeared(true);
+ }
+ }
+
+ if (data.type === 'messageEnd') {
+ setChatHistory((prevHistory) => [
+ ...prevHistory,
+ ['human', message],
+ ['assistant', recievedMessage],
+ ]);
+
+ setLoading(false);
+
+ const lastMsg = messagesRef.current[messagesRef.current.length - 1];
+
+ const autoImageSearch = localStorage.getItem('autoImageSearch');
+ const autoVideoSearch = localStorage.getItem('autoVideoSearch');
+
+ if (autoImageSearch === 'true') {
+ document
+ .getElementById(`search-images-${lastMsg.messageId}`)
+ ?.click();
+ }
+
+ if (autoVideoSearch === 'true') {
+ document
+ .getElementById(`search-videos-${lastMsg.messageId}`)
+ ?.click();
+ }
+
+ if (
+ lastMsg.role === 'assistant' &&
+ lastMsg.sources &&
+ lastMsg.sources.length > 0 &&
+ !lastMsg.suggestions
+ ) {
+ const suggestions = await getSuggestions(messagesRef.current);
+ setMessages((prev) =>
+ prev.map((msg) => {
+ if (msg.messageId === lastMsg.messageId) {
+ return { ...msg, suggestions: suggestions };
+ }
+ return msg;
+ }),
+ );
+ }
+ }
+ };
+
+ const messageIndex = messages.findIndex((m) => m.messageId === messageId);
+
+ const res = await fetch('/api/chat', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ content: message,
+ message: {
+ messageId: messageId,
+ chatId: chatId!,
+ content: message,
+ },
+ chatId: chatId!,
+ files: fileIds,
+ focusMode: focusMode,
+ optimizationMode: optimizationMode,
+ history: rewrite
+ ? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
+ : chatHistory,
+ chatModel: {
+ name: chatModelProvider.name,
+ provider: chatModelProvider.provider,
+ },
+ embeddingModel: {
+ name: embeddingModelProvider.name,
+ provider: embeddingModelProvider.provider,
+ },
+ systemInstructions: localStorage.getItem('systemInstructions'),
+ }),
+ });
+
+ if (!res.body) throw new Error('No response body');
+
+ const reader = res.body?.getReader();
+ const decoder = new TextDecoder('utf-8');
+
+ let partialChunk = '';
+
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done) break;
+
+ partialChunk += decoder.decode(value, { stream: true });
+
+ try {
+ const messages = partialChunk.split('\n');
+ for (const msg of messages) {
+ if (!msg.trim()) continue;
+ const json = JSON.parse(msg);
+ messageHandler(json);
+ }
+ partialChunk = '';
+ } catch (error) {
+ console.warn('Incomplete JSON, waiting for next chunk...');
+ }
+ }
+ };
+
+ const rewrite = (messageId: string) => {
+ const index = messages.findIndex((msg) => msg.messageId === messageId);
+
+ if (index === -1) return;
+
+ const message = messages[index - 1];
+
+ setMessages((prev) => {
+ return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
+ });
+ setChatHistory((prev) => {
+ return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
+ });
+
+ sendMessage(message.content, message.messageId, true);
+ };
+
+ useEffect(() => {
+ if (isReady && initialMessage && isConfigReady) {
+ sendMessage(initialMessage);
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [isConfigReady, isReady, initialMessage]);
+
if (hasError) {
return (
@@ -51,11 +604,31 @@ const ChatWindow = () => {
{messages.length > 0 ? (
<>
-
-
+
+
>
) : (
-
+
)}
)
diff --git a/src/components/EmptyChat.tsx b/src/components/EmptyChat.tsx
index e40a338..0eb76ac 100644
--- a/src/components/EmptyChat.tsx
+++ b/src/components/EmptyChat.tsx
@@ -5,7 +5,27 @@ import Link from 'next/link';
import WeatherWidget from './WeatherWidget';
import NewsArticleWidget from './NewsArticleWidget';
-const EmptyChat = () => {
+const EmptyChat = ({
+ sendMessage,
+ focusMode,
+ setFocusMode,
+ optimizationMode,
+ setOptimizationMode,
+ fileIds,
+ setFileIds,
+ files,
+ setFiles,
+}: {
+ sendMessage: (message: string) => void;
+ focusMode: string;
+ setFocusMode: (mode: string) => void;
+ optimizationMode: string;
+ setOptimizationMode: (mode: string) => void;
+ fileIds: string[];
+ setFileIds: (fileIds: string[]) => void;
+ files: File[];
+ setFiles: (files: File[]) => void;
+}) => {
return (
@@ -18,7 +38,17 @@ const EmptyChat = () => {
Research begins here.
-
+
diff --git a/src/components/EmptyChatMessageInput.tsx b/src/components/EmptyChatMessageInput.tsx
index 3c5ff6b..43d1e28 100644
--- a/src/components/EmptyChatMessageInput.tsx
+++ b/src/components/EmptyChatMessageInput.tsx
@@ -1,15 +1,34 @@
import { ArrowRight } from 'lucide-react';
import { useEffect, useRef, useState } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
+import CopilotToggle from './MessageInputActions/Copilot';
import Focus from './MessageInputActions/Focus';
import Optimization from './MessageInputActions/Optimization';
import Attach from './MessageInputActions/Attach';
-import { useChat } from '@/lib/hooks/useChat';
+import { File } from './ChatWindow';
-const EmptyChatMessageInput = () => {
- const { sendMessage } = useChat();
-
- /* const [copilotEnabled, setCopilotEnabled] = useState(false); */
+const EmptyChatMessageInput = ({
+ sendMessage,
+ focusMode,
+ setFocusMode,
+ optimizationMode,
+ setOptimizationMode,
+ fileIds,
+ setFileIds,
+ files,
+ setFiles,
+}: {
+ sendMessage: (message: string) => void;
+ focusMode: string;
+ setFocusMode: (mode: string) => void;
+ optimizationMode: string;
+ setOptimizationMode: (mode: string) => void;
+ fileIds: string[];
+ setFileIds: (fileIds: string[]) => void;
+ files: File[];
+ setFiles: (files: File[]) => void;
+}) => {
+ const [copilotEnabled, setCopilotEnabled] = useState(false);
const [message, setMessage] = useState('');
const inputRef = useRef
(null);
@@ -65,11 +84,20 @@ const EmptyChatMessageInput = () => {
/>
-
+