Add status updates when generating chat completions, add system theme support, add custom openai model embedding support, and fix various bugs.
This commit is contained in:
parent
cc821315f8
commit
1077c1b703
13 changed files with 347 additions and 66 deletions
|
|
@ -61,10 +61,22 @@ const handleEmitterEvents = async (
|
|||
) => {
|
||||
let recievedMessage = '';
|
||||
let sources: any[] = [];
|
||||
let sentGeneratingStatus = false;
|
||||
|
||||
stream.on('data', (data) => {
|
||||
stream.on('data', (data: string) => {
|
||||
const parsedData = JSON.parse(data);
|
||||
if (parsedData.type === 'response') {
|
||||
if (!sentGeneratingStatus) {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'status',
|
||||
data: 'Generating answer...',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
sentGeneratingStatus = true;
|
||||
}
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
|
|
@ -77,6 +89,17 @@ const handleEmitterEvents = async (
|
|||
|
||||
recievedMessage += parsedData.data;
|
||||
} else if (parsedData.type === 'sources') {
|
||||
if (!sentGeneratingStatus) {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'status',
|
||||
data: 'Generating answer...',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
sentGeneratingStatus = true;
|
||||
}
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
|
|
@ -114,8 +137,16 @@ const handleEmitterEvents = async (
|
|||
})
|
||||
.execute();
|
||||
});
|
||||
stream.on('error', (data) => {
|
||||
stream.on('error', (data: string) => {
|
||||
const parsedData = JSON.parse(data);
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'status',
|
||||
data: 'Chat completion failed.',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
|
|
@ -218,6 +249,28 @@ export const POST = async (req: Request) => {
|
|||
body.embeddingModel?.name || Object.keys(embeddingProvider)[0]
|
||||
];
|
||||
|
||||
const selectedChatProviderKey =
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0];
|
||||
const selectedChatModelKey =
|
||||
body.chatModel?.name || Object.keys(chatModelProvider)[0];
|
||||
const selectedEmbeddingProviderKey =
|
||||
body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0];
|
||||
const selectedEmbeddingModelKey =
|
||||
body.embeddingModel?.name || Object.keys(embeddingProvider)[0];
|
||||
|
||||
console.log('[Models] Chat request', {
|
||||
chatProvider: selectedChatProviderKey,
|
||||
chatModel: selectedChatModelKey,
|
||||
embeddingProvider: selectedEmbeddingProviderKey,
|
||||
embeddingModel: selectedEmbeddingModelKey,
|
||||
...(selectedChatProviderKey === 'custom_openai'
|
||||
? { chatBaseURL: getCustomOpenaiApiUrl() }
|
||||
: {}),
|
||||
...(selectedEmbeddingProviderKey === 'custom_openai'
|
||||
? { embeddingBaseURL: getCustomOpenaiApiUrl() }
|
||||
: {}),
|
||||
});
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
let embedding = embeddingModel.model;
|
||||
|
||||
|
|
@ -272,11 +325,54 @@ export const POST = async (req: Request) => {
|
|||
);
|
||||
}
|
||||
|
||||
const llmProxy = new Proxy(llm as any, {
|
||||
get(target, prop, receiver) {
|
||||
if (
|
||||
prop === 'invoke' ||
|
||||
prop === 'stream' ||
|
||||
prop === 'streamEvents' ||
|
||||
prop === 'generate'
|
||||
) {
|
||||
return (...args: any[]) => {
|
||||
console.log('[Models] Chat model call', {
|
||||
provider: selectedChatProviderKey,
|
||||
model: selectedChatModelKey,
|
||||
method: String(prop),
|
||||
});
|
||||
return (target as any)[prop](...args);
|
||||
};
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
|
||||
const embeddingProxy = new Proxy(embedding as any, {
|
||||
get(target, prop, receiver) {
|
||||
if (prop === 'embedQuery' || prop === 'embedDocuments') {
|
||||
return (...args: any[]) => {
|
||||
console.log('[Models] Embedding model call', {
|
||||
provider: selectedEmbeddingProviderKey,
|
||||
model: selectedEmbeddingModelKey,
|
||||
method: String(prop),
|
||||
size:
|
||||
prop === 'embedDocuments'
|
||||
? Array.isArray(args[0])
|
||||
? args[0].length
|
||||
: undefined
|
||||
: undefined,
|
||||
});
|
||||
return (target as any)[prop](...args);
|
||||
};
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
|
||||
const stream = await handler.searchAndAnswer(
|
||||
message.content,
|
||||
history,
|
||||
llm,
|
||||
embedding,
|
||||
llmProxy as any,
|
||||
embeddingProxy as any,
|
||||
body.optimizationMode,
|
||||
body.files,
|
||||
body.systemInstructions,
|
||||
|
|
@ -286,6 +382,18 @@ export const POST = async (req: Request) => {
|
|||
const writer = responseStream.writable.getWriter();
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'status',
|
||||
data:
|
||||
body.focusMode === 'writingAssistant'
|
||||
? 'Waiting for chat completion...'
|
||||
: 'Searching web...',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
|
||||
handleEmitterEvents(stream, writer, encoder, aiMessageId, message.chatId);
|
||||
handleHistorySave(message, humanMessageId, body.focusMode, body.files);
|
||||
|
||||
|
|
|
|||
|
|
@ -75,6 +75,19 @@ export const POST = async (req: Request) => {
|
|||
body.embeddingModel?.name ||
|
||||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
|
||||
|
||||
console.log('[Models] Search request', {
|
||||
chatProvider: chatModelProvider,
|
||||
chatModel,
|
||||
embeddingProvider: embeddingModelProvider,
|
||||
embeddingModel,
|
||||
...(chatModelProvider === 'custom_openai'
|
||||
? { chatBaseURL: getCustomOpenaiApiUrl() }
|
||||
: {}),
|
||||
...(embeddingModelProvider === 'custom_openai'
|
||||
? { embeddingBaseURL: getCustomOpenaiApiUrl() }
|
||||
: {}),
|
||||
});
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
let embeddings: Embeddings | undefined;
|
||||
|
||||
|
|
@ -118,11 +131,54 @@ export const POST = async (req: Request) => {
|
|||
return Response.json({ message: 'Invalid focus mode' }, { status: 400 });
|
||||
}
|
||||
|
||||
const llmProxy = new Proxy(llm as any, {
|
||||
get(target, prop, receiver) {
|
||||
if (
|
||||
prop === 'invoke' ||
|
||||
prop === 'stream' ||
|
||||
prop === 'streamEvents' ||
|
||||
prop === 'generate'
|
||||
) {
|
||||
return (...args: any[]) => {
|
||||
console.log('[Models] Chat model call', {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
method: String(prop),
|
||||
});
|
||||
return (target as any)[prop](...args);
|
||||
};
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
|
||||
const embeddingProxy = new Proxy(embeddings as any, {
|
||||
get(target, prop, receiver) {
|
||||
if (prop === 'embedQuery' || prop === 'embedDocuments') {
|
||||
return (...args: any[]) => {
|
||||
console.log('[Models] Embedding model call', {
|
||||
provider: embeddingModelProvider,
|
||||
model: embeddingModel,
|
||||
method: String(prop),
|
||||
size:
|
||||
prop === 'embedDocuments'
|
||||
? Array.isArray(args[0])
|
||||
? args[0].length
|
||||
: undefined
|
||||
: undefined,
|
||||
});
|
||||
return (target as any)[prop](...args);
|
||||
};
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
|
||||
const emitter = await searchHandler.searchAndAnswer(
|
||||
body.query,
|
||||
history,
|
||||
llm,
|
||||
embeddings,
|
||||
llmProxy as any,
|
||||
embeddingProxy as any,
|
||||
body.optimizationMode,
|
||||
[],
|
||||
body.systemInstructions || '',
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import fs from 'fs';
|
|||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
import { getAvailableEmbeddingModelProviders } from '@/lib/providers';
|
||||
import { getCustomOpenaiApiUrl } from '@/lib/config';
|
||||
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
|
||||
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
|
|
@ -46,6 +47,14 @@ export async function POST(req: Request) {
|
|||
const embeddingModel =
|
||||
embedding_model ?? Object.keys(embeddingModels[provider as string])[0];
|
||||
|
||||
console.log('[Models] Upload embeddings request', {
|
||||
embeddingProvider: provider,
|
||||
embeddingModel,
|
||||
...(provider === 'custom_openai'
|
||||
? { embeddingBaseURL: getCustomOpenaiApiUrl() }
|
||||
: {}),
|
||||
});
|
||||
|
||||
let embeddingsModel =
|
||||
embeddingModels[provider as string]?.[embeddingModel as string]?.model;
|
||||
if (!embeddingsModel) {
|
||||
|
|
@ -55,6 +64,28 @@ export async function POST(req: Request) {
|
|||
);
|
||||
}
|
||||
|
||||
const loggedEmbeddings = new Proxy(embeddingsModel as any, {
|
||||
get(target, prop, receiver) {
|
||||
if (prop === 'embedQuery' || prop === 'embedDocuments') {
|
||||
return (...args: any[]) => {
|
||||
console.log('[Models] Upload embedding model call', {
|
||||
provider,
|
||||
model: embeddingModel,
|
||||
method: String(prop),
|
||||
size:
|
||||
prop === 'embedDocuments'
|
||||
? Array.isArray(args[0])
|
||||
? args[0].length
|
||||
: undefined
|
||||
: undefined,
|
||||
});
|
||||
return (target as any)[prop](...args);
|
||||
};
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
|
||||
const processedFiles: FileRes[] = [];
|
||||
|
||||
await Promise.all(
|
||||
|
|
@ -98,7 +129,7 @@ export async function POST(req: Request) {
|
|||
}),
|
||||
);
|
||||
|
||||
const embeddings = await embeddingsModel.embedDocuments(
|
||||
const embeddings = await loggedEmbeddings.embedDocuments(
|
||||
splitted.map((doc) => doc.pageContent),
|
||||
);
|
||||
const embeddingsDataPath = filePath.replace(
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ const Chat = ({
|
|||
setFileIds,
|
||||
files,
|
||||
setFiles,
|
||||
statusText,
|
||||
}: {
|
||||
messages: Message[];
|
||||
sendMessage: (message: string) => void;
|
||||
|
|
@ -26,6 +27,7 @@ const Chat = ({
|
|||
setFileIds: (fileIds: string[]) => void;
|
||||
files: File[];
|
||||
setFiles: (files: File[]) => void;
|
||||
statusText?: string;
|
||||
}) => {
|
||||
const [dividerWidth, setDividerWidth] = useState(0);
|
||||
const dividerRef = useRef<HTMLDivElement | null>(null);
|
||||
|
|
@ -78,6 +80,7 @@ const Chat = ({
|
|||
isLast={isLast}
|
||||
rewrite={rewrite}
|
||||
sendMessage={sendMessage}
|
||||
statusText={statusText}
|
||||
/>
|
||||
{!isLast && msg.role === 'assistant' && (
|
||||
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
|
|
@ -85,7 +88,9 @@ const Chat = ({
|
|||
</Fragment>
|
||||
);
|
||||
})}
|
||||
{loading && !messageAppeared && <MessageBoxLoading />}
|
||||
{loading && !messageAppeared && (
|
||||
<MessageBoxLoading statusText={statusText} />
|
||||
)}
|
||||
<div ref={messageEnd} className="h-0" />
|
||||
{dividerWidth > 0 && (
|
||||
<div
|
||||
|
|
|
|||
|
|
@ -313,6 +313,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
|
||||
|
||||
const [notFound, setNotFound] = useState(false);
|
||||
const [statusText, setStatusText] = useState<string | undefined>(undefined);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
|
|
@ -367,6 +368,11 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
|
||||
setLoading(true);
|
||||
setMessageAppeared(false);
|
||||
setStatusText(
|
||||
focusMode === 'writingAssistant'
|
||||
? 'Waiting for chat completion...'
|
||||
: 'Searching web...'
|
||||
);
|
||||
|
||||
let sources: Document[] | undefined = undefined;
|
||||
let recievedMessage = '';
|
||||
|
|
@ -386,13 +392,19 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
]);
|
||||
|
||||
const messageHandler = async (data: any) => {
|
||||
if (data.type === 'status') {
|
||||
if (typeof data.data === 'string') setStatusText(data.data);
|
||||
return;
|
||||
}
|
||||
if (data.type === 'error') {
|
||||
toast.error(data.data);
|
||||
setStatusText('Chat completion failed.');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.type === 'sources') {
|
||||
setStatusText('Generating answer...');
|
||||
sources = data.data;
|
||||
if (!added) {
|
||||
setMessages((prevMessages) => [
|
||||
|
|
@ -412,6 +424,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
}
|
||||
|
||||
if (data.type === 'message') {
|
||||
setStatusText('Generating answer...');
|
||||
if (!added) {
|
||||
setMessages((prevMessages) => [
|
||||
...prevMessages,
|
||||
|
|
@ -442,6 +455,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
}
|
||||
|
||||
if (data.type === 'messageEnd') {
|
||||
setStatusText(undefined);
|
||||
setChatHistory((prevHistory) => [
|
||||
...prevHistory,
|
||||
['human', message],
|
||||
|
|
@ -519,31 +533,61 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
}),
|
||||
});
|
||||
|
||||
if (!res.body) throw new Error('No response body');
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
toast.error(json.message || `Request failed: ${res.status} ${res.statusText}`);
|
||||
} catch {
|
||||
toast.error(`Request failed: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
setStatusText('Chat completion failed.');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!res.body) {
|
||||
toast.error('No response body');
|
||||
setStatusText('Chat completion failed.');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const reader = res.body?.getReader();
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
|
||||
let partialChunk = '';
|
||||
try {
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
partialChunk += decoder.decode(value, { stream: true });
|
||||
|
||||
partialChunk += decoder.decode(value, { stream: true });
|
||||
|
||||
try {
|
||||
const messages = partialChunk.split('\n');
|
||||
for (const msg of messages) {
|
||||
if (!msg.trim()) continue;
|
||||
const json = JSON.parse(msg);
|
||||
messageHandler(json);
|
||||
try {
|
||||
const messages = partialChunk.split('\n');
|
||||
for (const msg of messages) {
|
||||
if (!msg.trim()) continue;
|
||||
const json = JSON.parse(msg);
|
||||
messageHandler(json);
|
||||
}
|
||||
partialChunk = '';
|
||||
} catch (error) {
|
||||
console.warn('Incomplete JSON, waiting for next chunk...');
|
||||
}
|
||||
partialChunk = '';
|
||||
} catch (error) {
|
||||
console.warn('Incomplete JSON, waiting for next chunk...');
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Streaming error', e);
|
||||
toast.error('Chat streaming failed.');
|
||||
setStatusText('Chat completion failed.');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Fallback: if the stream ended without 'messageEnd' or explicit error,
|
||||
// ensure the UI doesn't stay in a loading state indefinitely.
|
||||
setStatusText(undefined);
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
const rewrite = (messageId: string) => {
|
||||
|
|
@ -605,6 +649,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
setFileIds={setFileIds}
|
||||
files={files}
|
||||
setFiles={setFiles}
|
||||
statusText={statusText}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ const EmptyChat = ({
|
|||
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-4">
|
||||
<div className="flex flex-col items-center justify-center w-full space-y-8">
|
||||
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
|
||||
Research begins here.
|
||||
Ask away...
|
||||
</h2>
|
||||
<EmptyChatMessageInput
|
||||
sendMessage={sendMessage}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
'use client';
|
||||
|
||||
import { Check, ClipboardList } from 'lucide-react';
|
||||
import { Message } from '../ChatWindow';
|
||||
import { useState } from 'react';
|
||||
|
|
@ -13,11 +15,37 @@ const Copy = ({
|
|||
|
||||
return (
|
||||
<button
|
||||
onClick={() => {
|
||||
const contentToCopy = `${initialMessage}${message.sources && message.sources.length > 0 && `\n\nCitations:\n${message.sources?.map((source: any, i: any) => `[${i + 1}] ${source.metadata.url}`).join(`\n`)}`}`;
|
||||
navigator.clipboard.writeText(contentToCopy);
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 1000);
|
||||
onClick={async () => {
|
||||
const citations =
|
||||
message.sources && message.sources.length > 0
|
||||
? `\n\nCitations:\n${message.sources
|
||||
?.map((source: any, i: number) => {
|
||||
const url = source?.metadata?.url ?? '';
|
||||
return `[${i + 1}] ${url}`;
|
||||
})
|
||||
.join('\n')}`
|
||||
: '';
|
||||
const contentToCopy = `${initialMessage}${citations}`;
|
||||
|
||||
try {
|
||||
if (navigator?.clipboard && window.isSecureContext) {
|
||||
await navigator.clipboard.writeText(contentToCopy);
|
||||
} else {
|
||||
const textArea = document.createElement('textarea');
|
||||
textArea.value = contentToCopy;
|
||||
textArea.style.position = 'fixed';
|
||||
textArea.style.left = '-9999px';
|
||||
document.body.appendChild(textArea);
|
||||
textArea.focus();
|
||||
textArea.select();
|
||||
document.execCommand('copy');
|
||||
document.body.removeChild(textArea);
|
||||
}
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 1200);
|
||||
} catch (err) {
|
||||
console.error('Copy failed', err);
|
||||
}
|
||||
}}
|
||||
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ const MessageBox = ({
|
|||
isLast,
|
||||
rewrite,
|
||||
sendMessage,
|
||||
statusText,
|
||||
}: {
|
||||
message: Message;
|
||||
messageIndex: number;
|
||||
|
|
@ -51,6 +52,7 @@ const MessageBox = ({
|
|||
isLast: boolean;
|
||||
rewrite: (messageId: string) => void;
|
||||
sendMessage: (message: string) => void;
|
||||
statusText?: string;
|
||||
}) => {
|
||||
const [parsedMessage, setParsedMessage] = useState(message.content);
|
||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||
|
|
@ -182,7 +184,7 @@ const MessageBox = ({
|
|||
size={20}
|
||||
/>
|
||||
<h3 className="text-black dark:text-white font-medium text-xl">
|
||||
Answer
|
||||
{loading && isLast && statusText ? statusText : 'Answer'}
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
const MessageBoxLoading = () => {
|
||||
const MessageBoxLoading = ({ statusText }: { statusText?: string }) => {
|
||||
return (
|
||||
<div className="flex flex-col space-y-2 w-full lg:w-9/12 bg-light-primary dark:bg-dark-primary animate-pulse rounded-lg py-3">
|
||||
<div className="h-2 rounded-full w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
<div className="h-2 rounded-full w-9/12 bg-light-secondary dark:bg-dark-secondary" />
|
||||
<div className="h-2 rounded-full w-10/12 bg-light-secondary dark:bg-dark-secondary" />
|
||||
{statusText && (
|
||||
<div className="mt-3 text-xs text-black/70 dark:text-white/70 not-italic animate-none">
|
||||
{statusText}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,13 +1,10 @@
|
|||
'use client';
|
||||
import { ThemeProvider } from 'next-themes';
|
||||
import type { ReactNode } from 'react';
|
||||
|
||||
const ThemeProviderComponent = ({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) => {
|
||||
const ThemeProviderComponent = ({ children }: { children: ReactNode }) => {
|
||||
return (
|
||||
<ThemeProvider attribute="class" enableSystem={false} defaultTheme="dark">
|
||||
<ThemeProvider attribute="class" enableSystem={true} defaultTheme="system">
|
||||
{children}
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,44 +1,19 @@
|
|||
'use client';
|
||||
import { useTheme } from 'next-themes';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import type { ChangeEvent } from 'react';
|
||||
import Select from '../ui/Select';
|
||||
|
||||
type Theme = 'dark' | 'light' | 'system';
|
||||
|
||||
const ThemeSwitcher = ({ className }: { className?: string }) => {
|
||||
const [mounted, setMounted] = useState(false);
|
||||
|
||||
const { theme, setTheme } = useTheme();
|
||||
|
||||
const isTheme = useCallback((t: Theme) => t === theme, [theme]);
|
||||
|
||||
const handleThemeSwitch = (theme: Theme) => {
|
||||
setTheme(theme);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setMounted(true);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (isTheme('system')) {
|
||||
const preferDarkScheme = window.matchMedia(
|
||||
'(prefers-color-scheme: dark)',
|
||||
);
|
||||
|
||||
const detectThemeChange = (event: MediaQueryListEvent) => {
|
||||
const theme: Theme = event.matches ? 'dark' : 'light';
|
||||
setTheme(theme);
|
||||
};
|
||||
|
||||
preferDarkScheme.addEventListener('change', detectThemeChange);
|
||||
|
||||
return () => {
|
||||
preferDarkScheme.removeEventListener('change', detectThemeChange);
|
||||
};
|
||||
}
|
||||
}, [isTheme, setTheme, theme]);
|
||||
|
||||
// Avoid Hydration Mismatch
|
||||
if (!mounted) {
|
||||
return null;
|
||||
|
|
@ -48,8 +23,9 @@ const ThemeSwitcher = ({ className }: { className?: string }) => {
|
|||
<Select
|
||||
className={className}
|
||||
value={theme}
|
||||
onChange={(e) => handleThemeSwitch(e.target.value as Theme)}
|
||||
onChange={(e: ChangeEvent<HTMLSelectElement>) => setTheme(e.target.value as Theme)}
|
||||
options={[
|
||||
{ value: 'system', label: 'System' },
|
||||
{ value: 'light', label: 'Light' },
|
||||
{ value: 'dark', label: 'Dark' },
|
||||
]}
|
||||
|
|
|
|||
|
|
@ -45,6 +45,7 @@ interface Config {
|
|||
API_URL: string;
|
||||
API_KEY: string;
|
||||
MODEL_NAME: string;
|
||||
EMBEDDING_MODEL_NAME: string;
|
||||
};
|
||||
};
|
||||
API_ENDPOINTS: {
|
||||
|
|
@ -99,6 +100,9 @@ export const getCustomOpenaiApiUrl = () =>
|
|||
export const getCustomOpenaiModelName = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||
|
||||
export const getCustomOpenaiEmbeddingModelName = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.EMBEDDING_MODEL_NAME;
|
||||
|
||||
export const getLMStudioApiEndpoint = () =>
|
||||
loadConfig().MODELS.LM_STUDIO.API_URL;
|
||||
|
||||
|
|
|
|||
|
|
@ -10,8 +10,9 @@ import {
|
|||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
getCustomOpenaiEmbeddingModelName,
|
||||
} from '../config';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import {
|
||||
loadOllamaChatModels,
|
||||
loadOllamaEmbeddingModels,
|
||||
|
|
@ -143,5 +144,28 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
|||
}
|
||||
}
|
||||
|
||||
const customOpenAiApiKey = getCustomOpenaiApiKey();
|
||||
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
||||
const customOpenAiEmbeddingModelName = getCustomOpenaiEmbeddingModelName();
|
||||
|
||||
models['custom_openai'] = {
|
||||
...(customOpenAiApiKey &&
|
||||
customOpenAiApiUrl &&
|
||||
customOpenAiEmbeddingModelName
|
||||
? {
|
||||
[customOpenAiEmbeddingModelName]: {
|
||||
displayName: customOpenAiEmbeddingModelName,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: customOpenAiApiKey,
|
||||
modelName: customOpenAiEmbeddingModelName,
|
||||
configuration: {
|
||||
baseURL: customOpenAiApiUrl,
|
||||
},
|
||||
}) as unknown as Embeddings,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
return models;
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue