(feat): Model stats so you know what model you used when you were chatting. Setting for auto retrieving suggestions.
This commit is contained in:
parent
abf9dbb8ba
commit
86a3c59acd
7 changed files with 326 additions and 31 deletions
|
|
@ -92,12 +92,24 @@ const handleEmitterEvents = async (
|
|||
sources = parsedData.data;
|
||||
}
|
||||
});
|
||||
let modelStats = {
|
||||
modelName: '',
|
||||
};
|
||||
|
||||
stream.on('stats', (data) => {
|
||||
const parsedData = JSON.parse(data);
|
||||
if (parsedData.type === 'modelStats') {
|
||||
modelStats = parsedData.data;
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'messageEnd',
|
||||
messageId: aiMessageId,
|
||||
modelStats: modelStats,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
|
|
@ -109,10 +121,9 @@ const handleEmitterEvents = async (
|
|||
chatId: chatId,
|
||||
messageId: aiMessageId,
|
||||
role: 'assistant',
|
||||
metadata: JSON.stringify({
|
||||
createdAt: new Date(),
|
||||
...(sources && sources.length > 0 && { sources }),
|
||||
}),
|
||||
metadata: {
|
||||
modelStats: modelStats,
|
||||
},
|
||||
})
|
||||
.execute();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { useEffect, useState } from 'react';
|
|||
import { cn } from '@/lib/utils';
|
||||
import { Switch } from '@headlessui/react';
|
||||
import ThemeSwitcher from '@/components/theme/Switcher';
|
||||
import { ImagesIcon, VideoIcon } from 'lucide-react';
|
||||
import { ImagesIcon, VideoIcon, Layers3 } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import { PROVIDER_METADATA } from '@/lib/providers';
|
||||
|
||||
|
|
@ -147,6 +147,7 @@ const Page = () => {
|
|||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
||||
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
||||
const [automaticSuggestions, setAutomaticSuggestions] = useState(true);
|
||||
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
||||
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
||||
const [contextWindowSize, setContextWindowSize] = useState(2048);
|
||||
|
|
@ -214,6 +215,9 @@ const Page = () => {
|
|||
setAutomaticVideoSearch(
|
||||
localStorage.getItem('autoVideoSearch') === 'true',
|
||||
);
|
||||
setAutomaticSuggestions(
|
||||
localStorage.getItem('autoSuggestions') !== 'false', // default to true if not set
|
||||
);
|
||||
const storedContextWindow = parseInt(
|
||||
localStorage.getItem('ollamaContextWindow') ?? '2048',
|
||||
);
|
||||
|
|
@ -372,6 +376,8 @@ const Page = () => {
|
|||
localStorage.setItem('autoImageSearch', value.toString());
|
||||
} else if (key === 'automaticVideoSearch') {
|
||||
localStorage.setItem('autoVideoSearch', value.toString());
|
||||
} else if (key === 'automaticSuggestions') {
|
||||
localStorage.setItem('autoSuggestions', value.toString());
|
||||
} else if (key === 'chatModelProvider') {
|
||||
localStorage.setItem('chatModelProvider', value);
|
||||
} else if (key === 'chatModel') {
|
||||
|
|
@ -526,6 +532,48 @@ const Page = () => {
|
|||
/>
|
||||
</Switch>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between p-3 bg-light-secondary dark:bg-dark-secondary rounded-lg hover:bg-light-200 dark:hover:bg-dark-200 transition-colors">
|
||||
<div className="flex items-center space-x-3">
|
||||
<div className="p-2 bg-light-200 dark:bg-dark-200 rounded-lg">
|
||||
<Layers3
|
||||
size={18}
|
||||
className="text-black/70 dark:text-white/70"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-sm text-black/90 dark:text-white/90 font-medium">
|
||||
Automatic Suggestions
|
||||
</p>
|
||||
<p className="text-xs text-black/60 dark:text-white/60 mt-0.5">
|
||||
Automatically show related suggestions after
|
||||
responses
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<Switch
|
||||
checked={automaticSuggestions}
|
||||
onChange={(checked) => {
|
||||
setAutomaticSuggestions(checked);
|
||||
saveConfig('automaticSuggestions', checked);
|
||||
}}
|
||||
className={cn(
|
||||
automaticSuggestions
|
||||
? 'bg-[#24A0ED]'
|
||||
: 'bg-light-200 dark:bg-dark-200',
|
||||
'relative inline-flex h-6 w-11 items-center rounded-full transition-colors focus:outline-none',
|
||||
)}
|
||||
>
|
||||
<span
|
||||
className={cn(
|
||||
automaticSuggestions
|
||||
? 'translate-x-6'
|
||||
: 'translate-x-1',
|
||||
'inline-block h-4 w-4 transform rounded-full bg-white transition-transform',
|
||||
)}
|
||||
/>
|
||||
</Switch>
|
||||
</div>
|
||||
</div>
|
||||
</SettingsSection>
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,14 @@ const Chat = ({
|
|||
setOptimizationMode,
|
||||
}: {
|
||||
messages: Message[];
|
||||
sendMessage: (message: string) => void;
|
||||
sendMessage: (
|
||||
message: string,
|
||||
options?: {
|
||||
messageId?: string;
|
||||
rewriteIndex?: number;
|
||||
suggestions?: string[];
|
||||
},
|
||||
) => void;
|
||||
loading: boolean;
|
||||
messageAppeared: boolean;
|
||||
rewrite: (messageId: string) => void;
|
||||
|
|
|
|||
|
|
@ -13,6 +13,14 @@ import { Settings } from 'lucide-react';
|
|||
import Link from 'next/link';
|
||||
import NextError from 'next/error';
|
||||
|
||||
export type ModelStats = {
|
||||
modelName: string;
|
||||
};
|
||||
|
||||
export type MessageMetadata = {
|
||||
modelStats?: ModelStats;
|
||||
};
|
||||
|
||||
export type Message = {
|
||||
messageId: string;
|
||||
chatId: string;
|
||||
|
|
@ -21,6 +29,7 @@ export type Message = {
|
|||
role: 'user' | 'assistant';
|
||||
suggestions?: string[];
|
||||
sources?: Document[];
|
||||
metadata?: MessageMetadata;
|
||||
};
|
||||
|
||||
export interface File {
|
||||
|
|
@ -207,7 +216,6 @@ const loadMessages = async (
|
|||
const messages = data.messages.map((msg: any) => {
|
||||
return {
|
||||
...msg,
|
||||
...JSON.parse(msg.metadata),
|
||||
};
|
||||
}) as Message[];
|
||||
|
||||
|
|
@ -339,9 +347,25 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
|
||||
const sendMessage = async (
|
||||
message: string,
|
||||
messageId?: string,
|
||||
options?: { rewriteIndex?: number },
|
||||
options?: {
|
||||
messageId?: string;
|
||||
rewriteIndex?: number;
|
||||
suggestions?: string[];
|
||||
},
|
||||
) => {
|
||||
// Special case: If we're just updating an existing message with suggestions
|
||||
if (options?.suggestions && options.messageId) {
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) => {
|
||||
if (msg.messageId === options.messageId) {
|
||||
return { ...msg, suggestions: options.suggestions };
|
||||
}
|
||||
return msg;
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (loading) return;
|
||||
if (!isConfigReady) {
|
||||
toast.error('Cannot send message before the configuration is ready');
|
||||
|
|
@ -369,7 +393,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
setChatHistory(messageChatHistory);
|
||||
}
|
||||
|
||||
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
|
||||
const messageId =
|
||||
options?.messageId ?? crypto.randomBytes(7).toString('hex');
|
||||
|
||||
setMessages((prevMessages) => [
|
||||
...prevMessages,
|
||||
|
|
@ -419,6 +444,12 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
role: 'assistant',
|
||||
sources: sources,
|
||||
createdAt: new Date(),
|
||||
metadata: {
|
||||
// modelStats will be added when we receive messageEnd event
|
||||
modelStats: {
|
||||
modelName: data.modelName,
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
added = true;
|
||||
|
|
@ -445,12 +476,29 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
['assistant', recievedMessage],
|
||||
]);
|
||||
|
||||
// Always update the message, adding modelStats if available
|
||||
setMessages((prev) =>
|
||||
prev.map((message) => {
|
||||
if (message.messageId === data.messageId) {
|
||||
return {
|
||||
...message,
|
||||
metadata: {
|
||||
// Include model stats if available, otherwise null
|
||||
modelStats: data.modelStats || null,
|
||||
},
|
||||
};
|
||||
}
|
||||
return message;
|
||||
}),
|
||||
);
|
||||
|
||||
setLoading(false);
|
||||
|
||||
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
|
||||
|
||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
||||
const autoSuggestions = localStorage.getItem('autoSuggestions');
|
||||
|
||||
if (autoImageSearch === 'true') {
|
||||
document
|
||||
|
|
@ -468,7 +516,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
lastMsg.role === 'assistant' &&
|
||||
lastMsg.sources &&
|
||||
lastMsg.sources.length > 0 &&
|
||||
!lastMsg.suggestions
|
||||
!lastMsg.suggestions &&
|
||||
autoSuggestions !== 'false' // Default to true if not set
|
||||
) {
|
||||
const suggestions = await getSuggestions(messagesRef.current);
|
||||
setMessages((prev) =>
|
||||
|
|
@ -550,7 +599,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
(msg) => msg.messageId === messageId,
|
||||
);
|
||||
if (messageIndex == -1) return;
|
||||
sendMessage(messages[messageIndex - 1].content, messageId, {
|
||||
sendMessage(messages[messageIndex - 1].content, {
|
||||
messageId: messageId,
|
||||
rewriteIndex: messageIndex,
|
||||
});
|
||||
};
|
||||
|
|
|
|||
72
src/components/MessageActions/ModelInfo.tsx
Normal file
72
src/components/MessageActions/ModelInfo.tsx
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
'use client';
|
||||
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Info } from 'lucide-react';
|
||||
import { ModelStats } from '../ChatWindow';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
interface ModelInfoButtonProps {
|
||||
modelStats: ModelStats | null;
|
||||
}
|
||||
|
||||
const ModelInfoButton: React.FC<ModelInfoButtonProps> = ({ modelStats }) => {
|
||||
const [showPopover, setShowPopover] = useState(false);
|
||||
const popoverRef = useRef<HTMLDivElement>(null);
|
||||
const buttonRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
// Always render, using "Unknown" as fallback if model info isn't available
|
||||
const modelName = modelStats?.modelName || 'Unknown';
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (
|
||||
popoverRef.current &&
|
||||
!popoverRef.current.contains(event.target as Node) &&
|
||||
buttonRef.current &&
|
||||
!buttonRef.current.contains(event.target as Node)
|
||||
) {
|
||||
setShowPopover(false);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
<button
|
||||
ref={buttonRef}
|
||||
className="p-1 ml-1 text-black/50 dark:text-white/50 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
|
||||
onClick={() => setShowPopover(!showPopover)}
|
||||
aria-label="Show model information"
|
||||
>
|
||||
<Info size={14} />
|
||||
</button>
|
||||
{showPopover && (
|
||||
<div
|
||||
ref={popoverRef}
|
||||
className="absolute z-10 left-6 top-0 w-64 rounded-md shadow-lg bg-white dark:bg-dark-secondary border border-light-200 dark:border-dark-200"
|
||||
>
|
||||
<div className="py-2 px-3">
|
||||
<h4 className="text-sm font-medium mb-2 text-black dark:text-white">
|
||||
Model Information
|
||||
</h4>
|
||||
<div className="space-y-1 text-xs">
|
||||
<div className="flex justify-between">
|
||||
<span className="text-black/70 dark:text-white/70">Model:</span>
|
||||
<span className="text-black dark:text-white font-medium">
|
||||
{modelName}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ModelInfoButton;
|
||||
|
|
@ -4,6 +4,7 @@
|
|||
import React, { MutableRefObject, useEffect, useState } from 'react';
|
||||
import { Message } from './ChatWindow';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { getSuggestions } from '@/lib/actions';
|
||||
import {
|
||||
BookCopy,
|
||||
Disc3,
|
||||
|
|
@ -11,10 +12,12 @@ import {
|
|||
StopCircle,
|
||||
Layers3,
|
||||
Plus,
|
||||
Sparkles,
|
||||
} from 'lucide-react';
|
||||
import Markdown, { MarkdownToJSX } from 'markdown-to-jsx';
|
||||
import Copy from './MessageActions/Copy';
|
||||
import Rewrite from './MessageActions/Rewrite';
|
||||
import ModelInfoButton from './MessageActions/ModelInfo';
|
||||
import MessageSources from './MessageSources';
|
||||
import SearchImages from './SearchImages';
|
||||
import SearchVideos from './SearchVideos';
|
||||
|
|
@ -42,10 +45,36 @@ const MessageBox = ({
|
|||
dividerRef?: MutableRefObject<HTMLDivElement | null>;
|
||||
isLast: boolean;
|
||||
rewrite: (messageId: string) => void;
|
||||
sendMessage: (message: string) => void;
|
||||
sendMessage: (
|
||||
message: string,
|
||||
options?: {
|
||||
messageId?: string;
|
||||
rewriteIndex?: number;
|
||||
suggestions?: string[];
|
||||
},
|
||||
) => void;
|
||||
}) => {
|
||||
const [parsedMessage, setParsedMessage] = useState(message.content);
|
||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||
const [loadingSuggestions, setLoadingSuggestions] = useState(false);
|
||||
const [autoSuggestions, setAutoSuggestions] = useState(
|
||||
localStorage.getItem('autoSuggestions')
|
||||
);
|
||||
|
||||
const handleLoadSuggestions = async () => {
|
||||
if (loadingSuggestions || (message?.suggestions && message.suggestions.length > 0)) return;
|
||||
|
||||
setLoadingSuggestions(true);
|
||||
try {
|
||||
const suggestions = await getSuggestions([...history]);
|
||||
// We need to update the message.suggestions property through parent component
|
||||
sendMessage('', { messageId: message.messageId, suggestions });
|
||||
} catch (error) {
|
||||
console.error('Error loading suggestions:', error);
|
||||
} finally {
|
||||
setLoadingSuggestions(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const citationRegex = /\[([^\]]+)\]/g;
|
||||
|
|
@ -105,6 +134,18 @@ const MessageBox = ({
|
|||
setParsedMessage(processedMessage);
|
||||
}, [message.content, message.sources, message.role]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleStorageChange = () => {
|
||||
setAutoSuggestions(localStorage.getItem('autoSuggestions'));
|
||||
};
|
||||
|
||||
window.addEventListener('storage', handleStorageChange);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('storage', handleStorageChange);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const { speechStatus, start, stop } = useSpeech({ text: speechMessage });
|
||||
|
||||
const markdownOverrides: MarkdownToJSX.Options = {
|
||||
|
|
@ -149,6 +190,7 @@ const MessageBox = ({
|
|||
</div>
|
||||
)}
|
||||
<div className="flex flex-col space-y-2">
|
||||
{' '}
|
||||
<div className="flex flex-row items-center space-x-2">
|
||||
<Disc3
|
||||
className={cn(
|
||||
|
|
@ -160,8 +202,10 @@ const MessageBox = ({
|
|||
<h3 className="text-black dark:text-white font-medium text-xl">
|
||||
Answer
|
||||
</h3>
|
||||
{message.metadata?.modelStats && (
|
||||
<ModelInfoButton modelStats={message.metadata.modelStats} />
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Markdown
|
||||
className={cn(
|
||||
'prose prose-h1:mb-3 prose-h2:mb-2 prose-h2:mt-6 prose-h2:font-[800] prose-h3:mt-4 prose-h3:mb-1.5 prose-h3:font-[600] dark:prose-invert prose-p:leading-relaxed prose-pre:p-0 font-[400]',
|
||||
|
|
@ -200,18 +244,36 @@ const MessageBox = ({
|
|||
</div>
|
||||
</div>
|
||||
)}
|
||||
{isLast &&
|
||||
message.suggestions &&
|
||||
message.suggestions.length > 0 &&
|
||||
message.role === 'assistant' &&
|
||||
!loading && (
|
||||
<>
|
||||
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
<div className="flex flex-col space-y-3 text-black dark:text-white">
|
||||
<div className="flex flex-row items-center space-x-2 mt-4">
|
||||
<Layers3 />
|
||||
<h3 className="text-xl font-medium">Related</h3>
|
||||
</div>
|
||||
{isLast && message.role === 'assistant' && !loading && (
|
||||
<>
|
||||
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
<div className="flex flex-col space-y-3 text-black dark:text-white">
|
||||
<div className="flex flex-row items-center space-x-2 mt-4">
|
||||
<Layers3 />
|
||||
<h3 className="text-xl font-medium">Related</h3>{' '}
|
||||
{(!autoSuggestions || autoSuggestions === 'false') && (!message.suggestions ||
|
||||
message.suggestions.length === 0) ? (
|
||||
<div className="bg-light-secondary dark:bg-dark-secondary">
|
||||
<button
|
||||
onClick={handleLoadSuggestions}
|
||||
disabled={loadingSuggestions}
|
||||
className="px-4 py-2 flex flex-row items-center justify-center space-x-2 rounded-lg bg-light-secondary dark:bg-dark-secondary hover:bg-light-200 dark:hover:bg-dark-200 transition duration-200 text-black/70 dark:text-white/70 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
{loadingSuggestions ? (
|
||||
<div className="w-4 h-4 border-2 border-t-transparent border-gray-400 dark:border-gray-500 rounded-full animate-spin" />
|
||||
) : (
|
||||
<Sparkles size={16} />
|
||||
)}
|
||||
<span>
|
||||
{loadingSuggestions
|
||||
? 'Loading suggestions...'
|
||||
: 'Load suggestions'}
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
{message.suggestions && message.suggestions.length > 0 ? (
|
||||
<div className="flex flex-col space-y-3">
|
||||
{message.suggestions.map((suggestion, i) => (
|
||||
<div
|
||||
|
|
@ -236,9 +298,10 @@ const MessageBox = ({
|
|||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
) : null}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
||||
|
|
|
|||
|
|
@ -434,13 +434,13 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||
private async handleStream(
|
||||
stream: AsyncGenerator<StreamEvent, any, any>,
|
||||
emitter: eventEmitter,
|
||||
llm: BaseChatModel,
|
||||
) {
|
||||
for await (const event of stream) {
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalSourceRetriever'
|
||||
) {
|
||||
``;
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
||||
|
|
@ -459,6 +459,50 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
// Get model name safely with better detection
|
||||
let modelName = 'Unknown';
|
||||
try {
|
||||
// @ts-ignore - Different LLM implementations have different properties
|
||||
if (llm.modelName) {
|
||||
// @ts-ignore
|
||||
modelName = llm.modelName;
|
||||
// @ts-ignore
|
||||
} else if (llm._llm && llm._llm.modelName) {
|
||||
// @ts-ignore
|
||||
modelName = llm._llm.modelName;
|
||||
// @ts-ignore
|
||||
} else if (llm.model && llm.model.modelName) {
|
||||
// @ts-ignore
|
||||
modelName = llm.model.modelName;
|
||||
} else if ('model' in llm) {
|
||||
// @ts-ignore
|
||||
const model = llm.model;
|
||||
if (typeof model === 'string') {
|
||||
modelName = model;
|
||||
// @ts-ignore
|
||||
} else if (model && model.modelName) {
|
||||
// @ts-ignore
|
||||
modelName = model.modelName;
|
||||
}
|
||||
} else if (llm.constructor && llm.constructor.name) {
|
||||
// Last resort: use the class name
|
||||
modelName = llm.constructor.name;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to get model name:', e);
|
||||
}
|
||||
|
||||
// Send model info before ending
|
||||
emitter.emit(
|
||||
'stats',
|
||||
JSON.stringify({
|
||||
type: 'modelStats',
|
||||
data: {
|
||||
modelName,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
emitter.emit('end');
|
||||
}
|
||||
}
|
||||
|
|
@ -493,7 +537,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||
},
|
||||
);
|
||||
|
||||
this.handleStream(stream, emitter);
|
||||
this.handleStream(stream, emitter, llm);
|
||||
|
||||
return emitter;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue