diff --git a/package.json b/package.json index 5715c2a..9e9137f 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,6 @@ "@langchain/community": "^0.3.49", "@langchain/core": "^0.3.66", "@langchain/google-genai": "^0.2.15", - "@langchain/groq": "^0.2.3", "@langchain/ollama": "^0.2.3", "@langchain/openai": "^0.6.2", "@langchain/textsplitters": "^0.1.0", diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts index ba88da6..2d53b75 100644 --- a/src/app/api/chat/route.ts +++ b/src/app/api/chat/route.ts @@ -1,7 +1,11 @@ +import prompts from '@/lib/prompts'; +import MetaSearchAgent from '@/lib/search/metaSearchAgent'; import crypto from 'crypto'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { EventEmitter } from 'stream'; import { + chatModelProviders, + embeddingModelProviders, getAvailableChatModelProviders, getAvailableEmbeddingModelProviders, } from '@/lib/providers'; @@ -134,8 +138,6 @@ const handleHistorySave = async ( where: eq(chats.id, message.chatId), }); - const fileData = files.map(getFileDetails); - if (!chat) { await db .insert(chats) @@ -144,15 +146,9 @@ const handleHistorySave = async ( title: message.content, createdAt: new Date().toString(), focusMode: focusMode, - files: fileData, - }) - .execute(); - } else if (JSON.stringify(chat.files ?? []) != JSON.stringify(fileData)) { - db.update(chats) - .set({ files: files.map(getFileDetails), }) - .where(eq(chats.id, message.chatId)); + .execute(); } const messageExists = await db.query.messages.findFirst({ diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index f117cce..0c11b23 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -11,7 +11,6 @@ import { getAimlApiKey, getLMStudioApiEndpoint, updateConfig, - getOllamaApiKey, } from '@/lib/config'; import { getAvailableChatModelProviders, @@ -54,7 +53,6 @@ export const GET = async (req: Request) => { config['openaiApiKey'] = getOpenaiApiKey(); config['ollamaApiUrl'] = getOllamaApiEndpoint(); - config['ollamaApiKey'] = getOllamaApiKey(); config['lmStudioApiUrl'] = getLMStudioApiEndpoint(); config['anthropicApiKey'] = getAnthropicApiKey(); config['groqApiKey'] = getGroqApiKey(); @@ -95,7 +93,6 @@ export const POST = async (req: Request) => { }, OLLAMA: { API_URL: config.ollamaApiUrl, - API_KEY: config.ollamaApiKey, }, DEEPSEEK: { API_KEY: config.deepseekApiKey, diff --git a/src/app/api/discover/route.ts b/src/app/api/discover/route.ts index 415aee8..d0e56a6 100644 --- a/src/app/api/discover/route.ts +++ b/src/app/api/discover/route.ts @@ -1,72 +1,55 @@ import { searchSearxng } from '@/lib/searxng'; -const websitesForTopic = { - tech: { - query: ['technology news', 'latest tech', 'AI', 'science and innovation'], - links: ['techcrunch.com', 'wired.com', 'theverge.com'], - }, - finance: { - query: ['finance news', 'economy', 'stock market', 'investing'], - links: ['bloomberg.com', 'cnbc.com', 'marketwatch.com'], - }, - art: { - query: ['art news', 'culture', 'modern art', 'cultural events'], - links: ['artnews.com', 'hyperallergic.com', 'theartnewspaper.com'], - }, - sports: { - query: ['sports news', 'latest sports', 'cricket football tennis'], - links: ['espn.com', 'bbc.com/sport', 'skysports.com'], - }, - entertainment: { - query: ['entertainment news', 'movies', 'TV shows', 'celebrities'], - links: ['hollywoodreporter.com', 'variety.com', 'deadline.com'], - }, -}; +const articleWebsites = [ + 'yahoo.com', + 'www.exchangewire.com', + 'businessinsider.com', + /* 'wired.com', + 'mashable.com', + 'theverge.com', + 'gizmodo.com', + 'cnet.com', + 'venturebeat.com', */ +]; -type Topic = keyof typeof websitesForTopic; +const topics = ['AI', 'tech']; /* TODO: Add UI to customize this */ export const GET = async (req: Request) => { try { const params = new URL(req.url).searchParams; - const mode: 'normal' | 'preview' = (params.get('mode') as 'normal' | 'preview') || 'normal'; - const topic: Topic = (params.get('topic') as Topic) || 'tech'; - - const selectedTopic = websitesForTopic[topic]; let data = []; if (mode === 'normal') { - const seenUrls = new Set(); - data = ( - await Promise.all( - selectedTopic.links.flatMap((link) => - selectedTopic.query.map(async (query) => { + await Promise.all([ + ...new Array(articleWebsites.length * topics.length) + .fill(0) + .map(async (_, i) => { return ( - await searchSearxng(`site:${link} ${query}`, { - engines: ['bing news'], - pageno: 1, - language: 'en', - }) + await searchSearxng( + `site:${articleWebsites[i % articleWebsites.length]} ${ + topics[i % topics.length] + }`, + { + engines: ['bing news'], + pageno: 1, + language: 'en', + }, + ) ).results; }), - ), - ) + ]) ) + .map((result) => result) .flat() - .filter((item) => { - const url = item.url?.toLowerCase().trim(); - if (seenUrls.has(url)) return false; - seenUrls.add(url); - return true; - }) .sort(() => Math.random() - 0.5); } else { data = ( await searchSearxng( - `site:${selectedTopic.links[Math.floor(Math.random() * selectedTopic.links.length)]} ${selectedTopic.query[Math.floor(Math.random() * selectedTopic.query.length)]}`, + `site:${articleWebsites[Math.floor(Math.random() * articleWebsites.length)]} ${topics[Math.floor(Math.random() * topics.length)]}`, { engines: ['bing news'], pageno: 1, diff --git a/src/app/api/search/route.ts b/src/app/api/search/route.ts index 5f752ec..16325de 100644 --- a/src/app/api/search/route.ts +++ b/src/app/api/search/route.ts @@ -81,7 +81,8 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ modelName: body.chatModel?.name || getCustomOpenaiModelName(), - apiKey: body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(), + apiKey: + body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(), temperature: 0.7, configuration: { baseURL: diff --git a/src/app/api/weather/route.ts b/src/app/api/weather/route.ts index afaf8a6..1f9867f 100644 --- a/src/app/api/weather/route.ts +++ b/src/app/api/weather/route.ts @@ -1,10 +1,7 @@ export const POST = async (req: Request) => { try { - const body: { - lat: number; - lng: number; - measureUnit: 'Imperial' | 'Metric'; - } = await req.json(); + const body: { lat: number; lng: number; temperatureUnit: 'C' | 'F' } = + await req.json(); if (!body.lat || !body.lng) { return Response.json( @@ -16,9 +13,7 @@ export const POST = async (req: Request) => { } const res = await fetch( - `https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}¤t=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${ - body.measureUnit === 'Metric' ? '' : '&temperature_unit=fahrenheit' - }${body.measureUnit === 'Metric' ? '' : '&wind_speed_unit=mph'}`, + `https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}¤t=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${body.temperatureUnit === 'C' ? '' : '&temperature_unit=fahrenheit'}`, ); const data = await res.json(); @@ -40,15 +35,13 @@ export const POST = async (req: Request) => { windSpeed: number; icon: string; temperatureUnit: 'C' | 'F'; - windSpeedUnit: 'm/s' | 'mph'; } = { temperature: data.current.temperature_2m, condition: '', humidity: data.current.relative_humidity_2m, windSpeed: data.current.wind_speed_10m, icon: '', - temperatureUnit: body.measureUnit === 'Metric' ? 'C' : 'F', - windSpeedUnit: body.measureUnit === 'Metric' ? 'm/s' : 'mph', + temperatureUnit: body.temperatureUnit, }; const code = data.current.weather_code; diff --git a/src/app/c/[chatId]/page.tsx b/src/app/c/[chatId]/page.tsx index 672107a..aac125a 100644 --- a/src/app/c/[chatId]/page.tsx +++ b/src/app/c/[chatId]/page.tsx @@ -1,17 +1,9 @@ -'use client'; - import ChatWindow from '@/components/ChatWindow'; -import { useParams } from 'next/navigation'; import React from 'react'; -import { ChatProvider } from '@/lib/hooks/useChat'; -const Page = () => { - const { chatId }: { chatId: string } = useParams(); - return ( - - - - ); +const Page = ({ params }: { params: Promise<{ chatId: string }> }) => { + const { chatId } = React.use(params); + return ; }; export default Page; diff --git a/src/app/discover/page.tsx b/src/app/discover/page.tsx index 8e20e50..eb7de7f 100644 --- a/src/app/discover/page.tsx +++ b/src/app/discover/page.tsx @@ -4,7 +4,6 @@ import { Search } from 'lucide-react'; import { useEffect, useState } from 'react'; import Link from 'next/link'; import { toast } from 'sonner'; -import { cn } from '@/lib/utils'; interface Discover { title: string; @@ -13,66 +12,60 @@ interface Discover { thumbnail: string; } -const topics: { key: string; display: string }[] = [ - { - display: 'Tech & Science', - key: 'tech', - }, - { - display: 'Finance', - key: 'finance', - }, - { - display: 'Art & Culture', - key: 'art', - }, - { - display: 'Sports', - key: 'sports', - }, - { - display: 'Entertainment', - key: 'entertainment', - }, -]; - const Page = () => { const [discover, setDiscover] = useState(null); const [loading, setLoading] = useState(true); - const [activeTopic, setActiveTopic] = useState(topics[0].key); - - const fetchArticles = async (topic: string) => { - setLoading(true); - try { - const res = await fetch(`/api/discover?topic=${topic}`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }); - - const data = await res.json(); - - if (!res.ok) { - throw new Error(data.message); - } - - data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail); - - setDiscover(data.blogs); - } catch (err: any) { - console.error('Error fetching data:', err.message); - toast.error('Error fetching data'); - } finally { - setLoading(false); - } - }; useEffect(() => { - fetchArticles(activeTopic); - }, [activeTopic]); + const fetchData = async () => { + try { + const res = await fetch(`/api/discover`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + }); - return ( + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.message); + } + + data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail); + + setDiscover(data.blogs); + } catch (err: any) { + console.error('Error fetching data:', err.message); + toast.error('Error fetching data'); + } finally { + setLoading(false); + } + }; + + fetchData(); + }, []); + + return loading ? ( +
+ +
+ ) : ( <>
@@ -83,73 +76,35 @@ const Page = () => {
-
- {topics.map((t, i) => ( -
setActiveTopic(t.key)} - > - {t.display} -
- ))} -
- - {loading ? ( -
- -
- ) : ( -
- {discover && - discover?.map((item, i) => ( - - {item.title} -
-
- {item.title.slice(0, 100)}... -
-

- {item.content.slice(0, 100)}... -

+
+ {discover && + discover?.map((item, i) => ( + + {item.title} +
+
+ {item.title.slice(0, 100)}...
- - ))} -
- )} +

+ {item.content.slice(0, 100)}... +

+
+ + ))} +
); diff --git a/src/app/page.tsx b/src/app/page.tsx index 25981b5..e18aca9 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -1,5 +1,4 @@ import ChatWindow from '@/components/ChatWindow'; -import { ChatProvider } from '@/lib/hooks/useChat'; import { Metadata } from 'next'; import { Suspense } from 'react'; @@ -12,9 +11,7 @@ const Home = () => { return (
- - - +
); diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx index 6fb8255..045226c 100644 --- a/src/app/settings/page.tsx +++ b/src/app/settings/page.tsx @@ -21,7 +21,6 @@ interface SettingsType { anthropicApiKey: string; geminiApiKey: string; ollamaApiUrl: string; - ollamaApiKey: string; lmStudioApiUrl: string; deepseekApiKey: string; aimlApiKey: string; @@ -149,9 +148,7 @@ const Page = () => { const [automaticImageSearch, setAutomaticImageSearch] = useState(false); const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false); const [systemInstructions, setSystemInstructions] = useState(''); - const [measureUnit, setMeasureUnit] = useState<'Imperial' | 'Metric'>( - 'Metric', - ); + const [temperatureUnit, setTemperatureUnit] = useState<'C' | 'F'>('C'); const [savingStates, setSavingStates] = useState>({}); useEffect(() => { @@ -214,9 +211,7 @@ const Page = () => { setSystemInstructions(localStorage.getItem('systemInstructions')!); - setMeasureUnit( - localStorage.getItem('measureUnit')! as 'Imperial' | 'Metric', - ); + setTemperatureUnit(localStorage.getItem('temperatureUnit')! as 'C' | 'F'); setIsLoading(false); }; @@ -376,8 +371,8 @@ const Page = () => { localStorage.setItem('embeddingModel', value); } else if (key === 'systemInstructions') { localStorage.setItem('systemInstructions', value); - } else if (key === 'measureUnit') { - localStorage.setItem('measureUnit', value.toString()); + } else if (key === 'temperatureUnit') { + localStorage.setItem('temperatureUnit', value.toString()); } } catch (err) { console.error('Failed to save:', err); @@ -435,22 +430,22 @@ const Page = () => {

- Measurement Units + Temperature Unit

{ - setConfig((prev) => ({ - ...prev!, - ollamaApiKey: e.target.value, - })); - }} - onSave={(value) => saveConfig('ollamaApiKey', value)} - /> -
-

GROQ API Key diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx index a5d8cf9..0cf125b 100644 --- a/src/components/Chat.tsx +++ b/src/components/Chat.tsx @@ -5,11 +5,28 @@ import MessageInput from './MessageInput'; import { File, Message } from './ChatWindow'; import MessageBox from './MessageBox'; import MessageBoxLoading from './MessageBoxLoading'; -import { useChat } from '@/lib/hooks/useChat'; - -const Chat = () => { - const { messages, loading, messageAppeared } = useChat(); +const Chat = ({ + loading, + messages, + sendMessage, + messageAppeared, + rewrite, + fileIds, + setFileIds, + files, + setFiles, +}: { + messages: Message[]; + sendMessage: (message: string) => void; + loading: boolean; + messageAppeared: boolean; + rewrite: (messageId: string) => void; + fileIds: string[]; + setFileIds: (fileIds: string[]) => void; + files: File[]; + setFiles: (files: File[]) => void; +}) => { const [dividerWidth, setDividerWidth] = useState(0); const dividerRef = useRef(null); const messageEnd = useRef(null); @@ -55,8 +72,12 @@ const Chat = () => { key={i} message={msg} messageIndex={i} + history={messages} + loading={loading} dividerRef={isLast ? dividerRef : undefined} isLast={isLast} + rewrite={rewrite} + sendMessage={sendMessage} /> {!isLast && msg.role === 'assistant' && (

@@ -71,7 +92,14 @@ const Chat = () => { className="bottom-24 lg:bottom-10 fixed z-40" style={{ width: dividerWidth }} > - +
)}
diff --git a/src/components/ChatWindow.tsx b/src/components/ChatWindow.tsx index 0d40c83..67a5d0c 100644 --- a/src/components/ChatWindow.tsx +++ b/src/components/ChatWindow.tsx @@ -1,13 +1,17 @@ 'use client'; +import { useEffect, useRef, useState } from 'react'; import { Document } from '@langchain/core/documents'; import Navbar from './Navbar'; import Chat from './Chat'; import EmptyChat from './EmptyChat'; +import crypto from 'crypto'; +import { toast } from 'sonner'; +import { useSearchParams } from 'next/navigation'; +import { getSuggestions } from '@/lib/actions'; import { Settings } from 'lucide-react'; import Link from 'next/link'; import NextError from 'next/error'; -import { useChat } from '@/lib/hooks/useChat'; export type Message = { messageId: string; @@ -25,8 +29,539 @@ export interface File { fileId: string; } -const ChatWindow = () => { - const { hasError, isReady, notFound, messages } = useChat(); +interface ChatModelProvider { + name: string; + provider: string; +} + +interface EmbeddingModelProvider { + name: string; + provider: string; +} + +const checkConfig = async ( + setChatModelProvider: (provider: ChatModelProvider) => void, + setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void, + setIsConfigReady: (ready: boolean) => void, + setHasError: (hasError: boolean) => void, +) => { + try { + let chatModel = localStorage.getItem('chatModel'); + let chatModelProvider = localStorage.getItem('chatModelProvider'); + let embeddingModel = localStorage.getItem('embeddingModel'); + let embeddingModelProvider = localStorage.getItem('embeddingModelProvider'); + + const autoImageSearch = localStorage.getItem('autoImageSearch'); + const autoVideoSearch = localStorage.getItem('autoVideoSearch'); + + if (!autoImageSearch) { + localStorage.setItem('autoImageSearch', 'true'); + } + + if (!autoVideoSearch) { + localStorage.setItem('autoVideoSearch', 'false'); + } + + const providers = await fetch(`/api/models`, { + headers: { + 'Content-Type': 'application/json', + }, + }).then(async (res) => { + if (!res.ok) + throw new Error( + `Failed to fetch models: ${res.status} ${res.statusText}`, + ); + return res.json(); + }); + + if ( + !chatModel || + !chatModelProvider || + !embeddingModel || + !embeddingModelProvider + ) { + if (!chatModel || !chatModelProvider) { + const chatModelProviders = providers.chatModelProviders; + const chatModelProvidersKeys = Object.keys(chatModelProviders); + + if (!chatModelProviders || chatModelProvidersKeys.length === 0) { + return toast.error('No chat models available'); + } else { + chatModelProvider = + chatModelProvidersKeys.find( + (provider) => + Object.keys(chatModelProviders[provider]).length > 0, + ) || chatModelProvidersKeys[0]; + } + + if ( + chatModelProvider === 'custom_openai' && + Object.keys(chatModelProviders[chatModelProvider]).length === 0 + ) { + toast.error( + "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.", + ); + return setHasError(true); + } + + chatModel = Object.keys(chatModelProviders[chatModelProvider])[0]; + } + + if (!embeddingModel || !embeddingModelProvider) { + const embeddingModelProviders = providers.embeddingModelProviders; + + if ( + !embeddingModelProviders || + Object.keys(embeddingModelProviders).length === 0 + ) + return toast.error('No embedding models available'); + + embeddingModelProvider = Object.keys(embeddingModelProviders)[0]; + embeddingModel = Object.keys( + embeddingModelProviders[embeddingModelProvider], + )[0]; + } + + localStorage.setItem('chatModel', chatModel!); + localStorage.setItem('chatModelProvider', chatModelProvider); + localStorage.setItem('embeddingModel', embeddingModel!); + localStorage.setItem('embeddingModelProvider', embeddingModelProvider); + } else { + const chatModelProviders = providers.chatModelProviders; + const embeddingModelProviders = providers.embeddingModelProviders; + + if ( + Object.keys(chatModelProviders).length > 0 && + (!chatModelProviders[chatModelProvider] || + Object.keys(chatModelProviders[chatModelProvider]).length === 0) + ) { + const chatModelProvidersKeys = Object.keys(chatModelProviders); + chatModelProvider = + chatModelProvidersKeys.find( + (key) => Object.keys(chatModelProviders[key]).length > 0, + ) || chatModelProvidersKeys[0]; + + localStorage.setItem('chatModelProvider', chatModelProvider); + } + + if ( + chatModelProvider && + !chatModelProviders[chatModelProvider][chatModel] + ) { + if ( + chatModelProvider === 'custom_openai' && + Object.keys(chatModelProviders[chatModelProvider]).length === 0 + ) { + toast.error( + "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.", + ); + return setHasError(true); + } + + chatModel = Object.keys( + chatModelProviders[ + Object.keys(chatModelProviders[chatModelProvider]).length > 0 + ? chatModelProvider + : Object.keys(chatModelProviders)[0] + ], + )[0]; + + localStorage.setItem('chatModel', chatModel); + } + + if ( + Object.keys(embeddingModelProviders).length > 0 && + !embeddingModelProviders[embeddingModelProvider] + ) { + embeddingModelProvider = Object.keys(embeddingModelProviders)[0]; + localStorage.setItem('embeddingModelProvider', embeddingModelProvider); + } + + if ( + embeddingModelProvider && + !embeddingModelProviders[embeddingModelProvider][embeddingModel] + ) { + embeddingModel = Object.keys( + embeddingModelProviders[embeddingModelProvider], + )[0]; + localStorage.setItem('embeddingModel', embeddingModel); + } + } + + setChatModelProvider({ + name: chatModel!, + provider: chatModelProvider, + }); + + setEmbeddingModelProvider({ + name: embeddingModel!, + provider: embeddingModelProvider, + }); + + setIsConfigReady(true); + } catch (err) { + console.error('An error occurred while checking the configuration:', err); + setIsConfigReady(false); + setHasError(true); + } +}; + +const loadMessages = async ( + chatId: string, + setMessages: (messages: Message[]) => void, + setIsMessagesLoaded: (loaded: boolean) => void, + setChatHistory: (history: [string, string][]) => void, + setFocusMode: (mode: string) => void, + setNotFound: (notFound: boolean) => void, + setFiles: (files: File[]) => void, + setFileIds: (fileIds: string[]) => void, +) => { + const res = await fetch(`/api/chats/${chatId}`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + }); + + if (res.status === 404) { + setNotFound(true); + setIsMessagesLoaded(true); + return; + } + + const data = await res.json(); + + const messages = data.messages.map((msg: any) => { + return { + ...msg, + ...JSON.parse(msg.metadata), + }; + }) as Message[]; + + setMessages(messages); + + const history = messages.map((msg) => { + return [msg.role, msg.content]; + }) as [string, string][]; + + console.debug(new Date(), 'app:messages_loaded'); + + document.title = messages[0].content; + + const files = data.chat.files.map((file: any) => { + return { + fileName: file.name, + fileExtension: file.name.split('.').pop(), + fileId: file.fileId, + }; + }); + + setFiles(files); + setFileIds(files.map((file: File) => file.fileId)); + + setChatHistory(history); + setFocusMode(data.chat.focusMode); + setIsMessagesLoaded(true); +}; + +const ChatWindow = ({ id }: { id?: string }) => { + const searchParams = useSearchParams(); + const initialMessage = searchParams.get('q'); + + const [chatId, setChatId] = useState(id); + const [newChatCreated, setNewChatCreated] = useState(false); + + const [chatModelProvider, setChatModelProvider] = useState( + { + name: '', + provider: '', + }, + ); + + const [embeddingModelProvider, setEmbeddingModelProvider] = + useState({ + name: '', + provider: '', + }); + + const [isConfigReady, setIsConfigReady] = useState(false); + const [hasError, setHasError] = useState(false); + const [isReady, setIsReady] = useState(false); + + useEffect(() => { + checkConfig( + setChatModelProvider, + setEmbeddingModelProvider, + setIsConfigReady, + setHasError, + ); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const [loading, setLoading] = useState(false); + const [messageAppeared, setMessageAppeared] = useState(false); + + const [chatHistory, setChatHistory] = useState<[string, string][]>([]); + const [messages, setMessages] = useState([]); + + const [files, setFiles] = useState([]); + const [fileIds, setFileIds] = useState([]); + + const [focusMode, setFocusMode] = useState('webSearch'); + const [optimizationMode, setOptimizationMode] = useState('speed'); + + const [isMessagesLoaded, setIsMessagesLoaded] = useState(false); + + const [notFound, setNotFound] = useState(false); + + useEffect(() => { + if ( + chatId && + !newChatCreated && + !isMessagesLoaded && + messages.length === 0 + ) { + loadMessages( + chatId, + setMessages, + setIsMessagesLoaded, + setChatHistory, + setFocusMode, + setNotFound, + setFiles, + setFileIds, + ); + } else if (!chatId) { + setNewChatCreated(true); + setIsMessagesLoaded(true); + setChatId(crypto.randomBytes(20).toString('hex')); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const messagesRef = useRef([]); + + useEffect(() => { + messagesRef.current = messages; + }, [messages]); + + useEffect(() => { + if (isMessagesLoaded && isConfigReady) { + setIsReady(true); + console.debug(new Date(), 'app:ready'); + } else { + setIsReady(false); + } + }, [isMessagesLoaded, isConfigReady]); + + const sendMessage = async (message: string, messageId?: string) => { + if (loading) return; + if (!isConfigReady) { + toast.error('Cannot send message before the configuration is ready'); + return; + } + + setLoading(true); + setMessageAppeared(false); + + let sources: Document[] | undefined = undefined; + let recievedMessage = ''; + let added = false; + + messageId = messageId ?? crypto.randomBytes(7).toString('hex'); + + setMessages((prevMessages) => [ + ...prevMessages, + { + content: message, + messageId: messageId, + chatId: chatId!, + role: 'user', + createdAt: new Date(), + }, + ]); + + const messageHandler = async (data: any) => { + if (data.type === 'error') { + toast.error(data.data); + setLoading(false); + return; + } + + if (data.type === 'sources') { + sources = data.data; + if (!added) { + setMessages((prevMessages) => [ + ...prevMessages, + { + content: '', + messageId: data.messageId, + chatId: chatId!, + role: 'assistant', + sources: sources, + createdAt: new Date(), + }, + ]); + added = true; + } + setMessageAppeared(true); + } + + if (data.type === 'message') { + if (!added) { + setMessages((prevMessages) => [ + ...prevMessages, + { + content: data.data, + messageId: data.messageId, + chatId: chatId!, + role: 'assistant', + sources: sources, + createdAt: new Date(), + }, + ]); + added = true; + } + + setMessages((prev) => + prev.map((message) => { + if (message.messageId === data.messageId) { + return { ...message, content: message.content + data.data }; + } + + return message; + }), + ); + + recievedMessage += data.data; + setMessageAppeared(true); + } + + if (data.type === 'messageEnd') { + setChatHistory((prevHistory) => [ + ...prevHistory, + ['human', message], + ['assistant', recievedMessage], + ]); + + setLoading(false); + + const lastMsg = messagesRef.current[messagesRef.current.length - 1]; + + const autoImageSearch = localStorage.getItem('autoImageSearch'); + const autoVideoSearch = localStorage.getItem('autoVideoSearch'); + + if (autoImageSearch === 'true') { + document + .getElementById(`search-images-${lastMsg.messageId}`) + ?.click(); + } + + if (autoVideoSearch === 'true') { + document + .getElementById(`search-videos-${lastMsg.messageId}`) + ?.click(); + } + + if ( + lastMsg.role === 'assistant' && + lastMsg.sources && + lastMsg.sources.length > 0 && + !lastMsg.suggestions + ) { + const suggestions = await getSuggestions(messagesRef.current); + setMessages((prev) => + prev.map((msg) => { + if (msg.messageId === lastMsg.messageId) { + return { ...msg, suggestions: suggestions }; + } + return msg; + }), + ); + } + } + }; + + const res = await fetch('/api/chat', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + content: message, + message: { + messageId: messageId, + chatId: chatId!, + content: message, + }, + chatId: chatId!, + files: fileIds, + focusMode: focusMode, + optimizationMode: optimizationMode, + history: chatHistory, + chatModel: { + name: chatModelProvider.name, + provider: chatModelProvider.provider, + }, + embeddingModel: { + name: embeddingModelProvider.name, + provider: embeddingModelProvider.provider, + }, + systemInstructions: localStorage.getItem('systemInstructions'), + }), + }); + + if (!res.body) throw new Error('No response body'); + + const reader = res.body?.getReader(); + const decoder = new TextDecoder('utf-8'); + + let partialChunk = ''; + + while (true) { + const { value, done } = await reader.read(); + if (done) break; + + partialChunk += decoder.decode(value, { stream: true }); + + try { + const messages = partialChunk.split('\n'); + for (const msg of messages) { + if (!msg.trim()) continue; + const json = JSON.parse(msg); + messageHandler(json); + } + partialChunk = ''; + } catch (error) { + console.warn('Incomplete JSON, waiting for next chunk...'); + } + } + }; + + const rewrite = (messageId: string) => { + const index = messages.findIndex((msg) => msg.messageId === messageId); + + if (index === -1) return; + + const message = messages[index - 1]; + + setMessages((prev) => { + return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)]; + }); + setChatHistory((prev) => { + return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)]; + }); + + sendMessage(message.content, message.messageId); + }; + + useEffect(() => { + if (isReady && initialMessage && isConfigReady) { + sendMessage(initialMessage); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isConfigReady, isReady, initialMessage]); + if (hasError) { return (
@@ -51,11 +586,31 @@ const ChatWindow = () => {
{messages.length > 0 ? ( <> - - + + ) : ( - + )}
) diff --git a/src/components/EmptyChat.tsx b/src/components/EmptyChat.tsx index e40a338..0eb76ac 100644 --- a/src/components/EmptyChat.tsx +++ b/src/components/EmptyChat.tsx @@ -5,7 +5,27 @@ import Link from 'next/link'; import WeatherWidget from './WeatherWidget'; import NewsArticleWidget from './NewsArticleWidget'; -const EmptyChat = () => { +const EmptyChat = ({ + sendMessage, + focusMode, + setFocusMode, + optimizationMode, + setOptimizationMode, + fileIds, + setFileIds, + files, + setFiles, +}: { + sendMessage: (message: string) => void; + focusMode: string; + setFocusMode: (mode: string) => void; + optimizationMode: string; + setOptimizationMode: (mode: string) => void; + fileIds: string[]; + setFileIds: (fileIds: string[]) => void; + files: File[]; + setFiles: (files: File[]) => void; +}) => { return (
@@ -18,7 +38,17 @@ const EmptyChat = () => {

Research begins here.

- +
diff --git a/src/components/EmptyChatMessageInput.tsx b/src/components/EmptyChatMessageInput.tsx index 3c5ff6b..43d1e28 100644 --- a/src/components/EmptyChatMessageInput.tsx +++ b/src/components/EmptyChatMessageInput.tsx @@ -1,15 +1,34 @@ import { ArrowRight } from 'lucide-react'; import { useEffect, useRef, useState } from 'react'; import TextareaAutosize from 'react-textarea-autosize'; +import CopilotToggle from './MessageInputActions/Copilot'; import Focus from './MessageInputActions/Focus'; import Optimization from './MessageInputActions/Optimization'; import Attach from './MessageInputActions/Attach'; -import { useChat } from '@/lib/hooks/useChat'; +import { File } from './ChatWindow'; -const EmptyChatMessageInput = () => { - const { sendMessage } = useChat(); - - /* const [copilotEnabled, setCopilotEnabled] = useState(false); */ +const EmptyChatMessageInput = ({ + sendMessage, + focusMode, + setFocusMode, + optimizationMode, + setOptimizationMode, + fileIds, + setFileIds, + files, + setFiles, +}: { + sendMessage: (message: string) => void; + focusMode: string; + setFocusMode: (mode: string) => void; + optimizationMode: string; + setOptimizationMode: (mode: string) => void; + fileIds: string[]; + setFileIds: (fileIds: string[]) => void; + files: File[]; + setFiles: (files: File[]) => void; +}) => { + const [copilotEnabled, setCopilotEnabled] = useState(false); const [message, setMessage] = useState(''); const inputRef = useRef(null); @@ -65,11 +84,20 @@ const EmptyChatMessageInput = () => { />
- - + +
- +