feat(search): Add ability to set default provider, model, and optimization mode when coming from a search query
This commit is contained in:
parent
b392aa2c21
commit
37c93c3c9b
17 changed files with 603 additions and 319 deletions
|
|
@ -10,6 +10,7 @@ import {
|
|||
PlusCircle,
|
||||
Save,
|
||||
X,
|
||||
RotateCcw,
|
||||
} from 'lucide-react';
|
||||
import { useEffect, useState, useRef } from 'react';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
|
@ -18,6 +19,8 @@ import ThemeSwitcher from '@/components/theme/Switcher';
|
|||
import { ImagesIcon, VideoIcon, Layers3 } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import { PROVIDER_METADATA } from '@/lib/providers';
|
||||
import Optimization from '@/components/MessageInputActions/Optimization';
|
||||
import ModelSelector from '@/components/MessageInputActions/ModelSelector';
|
||||
|
||||
interface SettingsType {
|
||||
chatModelProviders: {
|
||||
|
|
@ -242,6 +245,13 @@ export default function SettingsPage() {
|
|||
);
|
||||
const [isAddingNewPrompt, setIsAddingNewPrompt] = useState(false);
|
||||
|
||||
// Default Search Settings state variables
|
||||
const [searchOptimizationMode, setSearchOptimizationMode] =
|
||||
useState<string>('');
|
||||
const [searchChatModelProvider, setSearchChatModelProvider] =
|
||||
useState<string>('');
|
||||
const [searchChatModel, setSearchChatModel] = useState<string>('');
|
||||
|
||||
useEffect(() => {
|
||||
const fetchConfig = async () => {
|
||||
setIsLoading(true);
|
||||
|
|
@ -311,6 +321,29 @@ export default function SettingsPage() {
|
|||
|
||||
fetchConfig();
|
||||
|
||||
// Load search settings from localStorage
|
||||
const loadSearchSettings = () => {
|
||||
const storedSearchOptimizationMode = localStorage.getItem(
|
||||
'searchOptimizationMode',
|
||||
);
|
||||
const storedSearchChatModelProvider = localStorage.getItem(
|
||||
'searchChatModelProvider',
|
||||
);
|
||||
const storedSearchChatModel = localStorage.getItem('searchChatModel');
|
||||
|
||||
if (storedSearchOptimizationMode) {
|
||||
setSearchOptimizationMode(storedSearchOptimizationMode);
|
||||
}
|
||||
if (storedSearchChatModelProvider) {
|
||||
setSearchChatModelProvider(storedSearchChatModelProvider);
|
||||
}
|
||||
if (storedSearchChatModel) {
|
||||
setSearchChatModel(storedSearchChatModel);
|
||||
}
|
||||
};
|
||||
|
||||
loadSearchSettings();
|
||||
|
||||
const fetchSystemPrompts = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
|
|
@ -492,6 +525,10 @@ export default function SettingsPage() {
|
|||
}
|
||||
};
|
||||
|
||||
const saveSearchSetting = (key: string, value: string) => {
|
||||
localStorage.setItem(key, value);
|
||||
};
|
||||
|
||||
const handleAddOrUpdateSystemPrompt = async () => {
|
||||
const currentPrompt = editingPrompt || {
|
||||
name: newPromptName,
|
||||
|
|
@ -997,6 +1034,79 @@ export default function SettingsPage() {
|
|||
</div>
|
||||
</SettingsSection>
|
||||
|
||||
<SettingsSection
|
||||
title="Default Search Settings"
|
||||
tooltip='Select the settings that will be used when navigating to the site with a search query, such as "example.com/search?q=your+query".\nThese settings will override the global settings for search queries.\n\nIf settings are not specified, the global settings will be used.'
|
||||
>
|
||||
<div className="flex flex-col space-y-4">
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Optimization Mode
|
||||
</p>
|
||||
<div className="flex justify-start items-center space-x-2">
|
||||
<Optimization
|
||||
optimizationMode={searchOptimizationMode}
|
||||
setOptimizationMode={(mode) => {
|
||||
setSearchOptimizationMode(mode);
|
||||
saveSearchSetting('searchOptimizationMode', mode);
|
||||
}}
|
||||
showTitle={true}
|
||||
/>
|
||||
{searchOptimizationMode && (
|
||||
<button
|
||||
onClick={() => {
|
||||
setSearchOptimizationMode('');
|
||||
localStorage.removeItem('searchOptimizationMode');
|
||||
}}
|
||||
className="p-1.5 rounded-md hover:bg-light-200 dark:hover:bg-dark-200 text-black/50 dark:text-white/50 hover:text-black/80 dark:hover:text-white/80 transition-colors"
|
||||
title="Reset optimization mode"
|
||||
>
|
||||
<RotateCcw size={16} />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Chat Model
|
||||
</p>
|
||||
<div className="flex justify-start items-center space-x-2">
|
||||
<ModelSelector
|
||||
selectedModel={{
|
||||
provider: searchChatModelProvider,
|
||||
model: searchChatModel,
|
||||
}}
|
||||
setSelectedModel={(model) => {
|
||||
setSearchChatModelProvider(model.provider);
|
||||
setSearchChatModel(model.model);
|
||||
saveSearchSetting(
|
||||
'searchChatModelProvider',
|
||||
model.provider,
|
||||
);
|
||||
saveSearchSetting('searchChatModel', model.model);
|
||||
}}
|
||||
truncateModelName={false}
|
||||
/>
|
||||
{(searchChatModelProvider || searchChatModel) && (
|
||||
<button
|
||||
onClick={() => {
|
||||
setSearchChatModelProvider('');
|
||||
setSearchChatModel('');
|
||||
localStorage.removeItem('searchChatModelProvider');
|
||||
localStorage.removeItem('searchChatModel');
|
||||
}}
|
||||
className="p-1.5 rounded-md hover:bg-light-200 dark:hover:bg-dark-200 text-black/50 dark:text-white/50 hover:text-black/80 dark:hover:text-white/80 transition-colors"
|
||||
title="Reset chat model"
|
||||
>
|
||||
<RotateCcw size={16} />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</SettingsSection>
|
||||
|
||||
<SettingsSection title="Model Settings">
|
||||
{config.chatModelProviders && (
|
||||
<div className="flex flex-col space-y-4">
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import Chat from './Chat';
|
|||
import EmptyChat from './EmptyChat';
|
||||
import crypto from 'crypto';
|
||||
import { toast } from 'sonner';
|
||||
import { useSearchParams } from 'next/navigation';
|
||||
import { useSearchParams, useRouter } from 'next/navigation';
|
||||
import { getSuggestions } from '@/lib/actions';
|
||||
import { Settings } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
|
|
@ -250,6 +250,7 @@ const loadMessages = async (
|
|||
|
||||
const ChatWindow = ({ id }: { id?: string }) => {
|
||||
const searchParams = useSearchParams();
|
||||
const router = useRouter();
|
||||
const initialMessage = searchParams.get('q');
|
||||
|
||||
const [chatId, setChatId] = useState<string | undefined>(id);
|
||||
|
|
@ -585,6 +586,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
currentChatModelProvider || chatModelProvider.provider;
|
||||
const modelName = currentChatModel || chatModelProvider.name;
|
||||
|
||||
const currentOptimizationMode =
|
||||
localStorage.getItem('optimizationMode') || optimizationMode;
|
||||
|
||||
const res = await fetch('/api/chat', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
|
@ -600,7 +604,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
chatId: chatId!,
|
||||
files: fileIds,
|
||||
focusMode: focusMode,
|
||||
optimizationMode: optimizationMode,
|
||||
optimizationMode: currentOptimizationMode,
|
||||
history: messageChatHistory,
|
||||
chatModel: {
|
||||
name: modelName,
|
||||
|
|
@ -674,7 +678,42 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
|
||||
useEffect(() => {
|
||||
if (isReady && initialMessage && isConfigReady) {
|
||||
// Check if we have an initial query and apply saved search settings
|
||||
const searchOptimizationMode = localStorage.getItem(
|
||||
'searchOptimizationMode',
|
||||
);
|
||||
const searchChatModelProvider = localStorage.getItem(
|
||||
'searchChatModelProvider',
|
||||
);
|
||||
const searchChatModel = localStorage.getItem('searchChatModel');
|
||||
|
||||
// Apply saved optimization mode if valid
|
||||
if (
|
||||
searchOptimizationMode &&
|
||||
(searchOptimizationMode === 'speed' ||
|
||||
searchOptimizationMode === 'agent')
|
||||
) {
|
||||
setOptimizationMode(searchOptimizationMode);
|
||||
localStorage.setItem('optimizationMode', searchOptimizationMode);
|
||||
}
|
||||
|
||||
// Apply saved chat model if valid
|
||||
if (searchChatModelProvider && searchChatModel) {
|
||||
setChatModelProvider({
|
||||
name: searchChatModel,
|
||||
provider: searchChatModelProvider,
|
||||
});
|
||||
// Also update localStorage to ensure consistency
|
||||
localStorage.setItem('chatModelProvider', searchChatModelProvider);
|
||||
localStorage.setItem('chatModel', searchChatModel);
|
||||
}
|
||||
|
||||
sendMessage(initialMessage);
|
||||
|
||||
// Remove the query parameter from the URL to prevent re-execution on page reload
|
||||
const url = new URL(window.location.href);
|
||||
url.searchParams.delete('q');
|
||||
router.replace(url.pathname + url.search, { scroll: false });
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isConfigReady, isReady, initialMessage]);
|
||||
|
|
|
|||
|
|
@ -164,7 +164,14 @@ const MessageInput = ({
|
|||
<div className="flex flex-row items-center space-x-2">
|
||||
<ModelSelector
|
||||
selectedModel={selectedModel}
|
||||
setSelectedModel={setSelectedModel}
|
||||
setSelectedModel={(selectedModel) => {
|
||||
setSelectedModel(selectedModel);
|
||||
localStorage.setItem(
|
||||
'chatModelProvider',
|
||||
selectedModel.provider,
|
||||
);
|
||||
localStorage.setItem('chatModel', selectedModel.model);
|
||||
}}
|
||||
/>
|
||||
<SystemPromptSelector
|
||||
selectedPromptIds={systemPromptIds}
|
||||
|
|
@ -172,7 +179,10 @@ const MessageInput = ({
|
|||
/>
|
||||
<Optimization
|
||||
optimizationMode={optimizationMode}
|
||||
setOptimizationMode={setOptimizationMode}
|
||||
setOptimizationMode={(optimizationMode) => {
|
||||
setOptimizationMode(optimizationMode);
|
||||
localStorage.setItem('optimizationMode', optimizationMode);
|
||||
}}
|
||||
/>
|
||||
{loading ? (
|
||||
<button
|
||||
|
|
|
|||
|
|
@ -90,8 +90,16 @@ const Attach = ({
|
|||
>
|
||||
{files.length > 1 && (
|
||||
<>
|
||||
<File size={19} className={isDisabled ? 'text-sky-900' : 'text-sky-400'} />
|
||||
<p className={cn("inline whitespace-nowrap text-xs font-medium", isDisabled ? 'text-sky-900' : 'text-sky-400')}>
|
||||
<File
|
||||
size={19}
|
||||
className={isDisabled ? 'text-sky-900' : 'text-sky-400'}
|
||||
/>
|
||||
<p
|
||||
className={cn(
|
||||
'inline whitespace-nowrap text-xs font-medium',
|
||||
isDisabled ? 'text-sky-900' : 'text-sky-400',
|
||||
)}
|
||||
>
|
||||
{files.length} files
|
||||
</p>
|
||||
</>
|
||||
|
|
@ -99,8 +107,16 @@ const Attach = ({
|
|||
|
||||
{files.length === 1 && (
|
||||
<>
|
||||
<File size={18} className={isDisabled ? 'text-sky-900' : 'text-sky-400'} />
|
||||
<p className={cn("text-xs font-medium", isDisabled ? 'text-sky-900' : 'text-sky-400')}>
|
||||
<File
|
||||
size={18}
|
||||
className={isDisabled ? 'text-sky-900' : 'text-sky-400'}
|
||||
/>
|
||||
<p
|
||||
className={cn(
|
||||
'text-xs font-medium',
|
||||
isDisabled ? 'text-sky-900' : 'text-sky-400',
|
||||
)}
|
||||
>
|
||||
{files[0].fileName.length > 10
|
||||
? files[0].fileName.replace(/\.\w+$/, '').substring(0, 3) +
|
||||
'...' +
|
||||
|
|
|
|||
|
|
@ -25,9 +25,11 @@ interface ProviderModelMap {
|
|||
const ModelSelector = ({
|
||||
selectedModel,
|
||||
setSelectedModel,
|
||||
truncateModelName = true,
|
||||
}: {
|
||||
selectedModel: { provider: string; model: string } | null;
|
||||
setSelectedModel: (model: { provider: string; model: string }) => void;
|
||||
truncateModelName?: boolean;
|
||||
}) => {
|
||||
const [providerModels, setProviderModels] = useState<ProviderModelMap>({});
|
||||
const [providersList, setProvidersList] = useState<string[]>([]);
|
||||
|
|
@ -114,7 +116,13 @@ const ModelSelector = ({
|
|||
setSelectedModelDisplay(currentModel.displayName);
|
||||
setSelectedProviderDisplay(provider.displayName);
|
||||
}
|
||||
} else {
|
||||
setSelectedModelDisplay('');
|
||||
setSelectedProviderDisplay('');
|
||||
}
|
||||
} else {
|
||||
setSelectedModelDisplay('');
|
||||
setSelectedProviderDisplay('');
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
|
|
@ -125,7 +133,7 @@ const ModelSelector = ({
|
|||
};
|
||||
|
||||
fetchModels();
|
||||
}, [selectedModel, setSelectedModel]);
|
||||
}, [selectedModel]);
|
||||
|
||||
const toggleProviderExpanded = (provider: string) => {
|
||||
setExpandedProviders((prev) => ({
|
||||
|
|
@ -144,10 +152,6 @@ const ModelSelector = ({
|
|||
setSelectedProviderDisplay(
|
||||
providerModels[option.provider]?.displayName || option.provider,
|
||||
);
|
||||
|
||||
// Save to localStorage for persistence
|
||||
localStorage.setItem('chatModelProvider', option.provider);
|
||||
localStorage.setItem('chatModel', option.model);
|
||||
};
|
||||
|
||||
const getDisplayText = () => {
|
||||
|
|
@ -162,9 +166,19 @@ const ModelSelector = ({
|
|||
{({ open }) => (
|
||||
<>
|
||||
<div className="relative">
|
||||
<PopoverButton className="group flex items-center justify-center text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="p-2 group flex text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<Cpu size={18} />
|
||||
<span className="mx-2 text-xs font-medium overflow-hidden text-ellipsis whitespace-nowrap max-w-44 hidden lg:block">
|
||||
<span
|
||||
className={cn(
|
||||
'mx-2 text-xs font-medium overflow-hidden text-ellipsis whitespace-nowrap hidden lg:block',
|
||||
{
|
||||
'max-w-44': truncateModelName,
|
||||
},
|
||||
)}
|
||||
>
|
||||
{getDisplayText()}
|
||||
</span>
|
||||
<ChevronDown
|
||||
|
|
|
|||
|
|
@ -46,15 +46,12 @@ const OptimizationModes = [
|
|||
const Optimization = ({
|
||||
optimizationMode,
|
||||
setOptimizationMode,
|
||||
showTitle = false,
|
||||
}: {
|
||||
optimizationMode: string;
|
||||
setOptimizationMode: (mode: string) => void;
|
||||
showTitle?: boolean;
|
||||
}) => {
|
||||
const handleOptimizationChange = (mode: string) => {
|
||||
setOptimizationMode(mode);
|
||||
localStorage.setItem('optimizationMode', mode);
|
||||
};
|
||||
|
||||
return (
|
||||
<Popover className="relative">
|
||||
<PopoverButton
|
||||
|
|
@ -62,16 +59,14 @@ const Optimization = ({
|
|||
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{
|
||||
OptimizationModes.find((mode) => mode.key === optimizationMode)
|
||||
?.icon
|
||||
}
|
||||
{/* <p className="text-xs font-medium hidden lg:block">
|
||||
{
|
||||
OptimizationModes.find((mode) => mode.key === optimizationMode)
|
||||
?.title
|
||||
}
|
||||
</p> */}
|
||||
{OptimizationModes.find((mode) => mode.key === optimizationMode)
|
||||
?.icon || <Minimize2 size={20} className="text-gray-400" />}
|
||||
{showTitle && (
|
||||
<p className="text-xs font-medium">
|
||||
{OptimizationModes.find((mode) => mode.key === optimizationMode)
|
||||
?.title || 'Select mode'}
|
||||
</p>
|
||||
)}
|
||||
<ChevronDown size={20} />
|
||||
</div>
|
||||
</PopoverButton>
|
||||
|
|
@ -88,7 +83,7 @@ const Optimization = ({
|
|||
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-max max-w-[300px] p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{OptimizationModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => handleOptimizationChange(mode.key)}
|
||||
onClick={() => setOptimizationMode(mode.key)}
|
||||
key={i}
|
||||
className={cn(
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition',
|
||||
|
|
|
|||
|
|
@ -84,11 +84,13 @@ export class AnalyzerAgent {
|
|||
if (!state.urlsToSummarize || state.urlsToSummarize.length === 0) {
|
||||
const urlRegex = /https?:\/\/[^\s]+/gi;
|
||||
const urls = [...new Set(state.query.match(urlRegex) || [])];
|
||||
|
||||
|
||||
if (urls.length > 0) {
|
||||
console.log('URLs detected in initial query, routing to URL summarization');
|
||||
console.log(
|
||||
'URLs detected in initial query, routing to URL summarization',
|
||||
);
|
||||
console.log(`URLs found: ${urls.join(', ')}`);
|
||||
|
||||
|
||||
// Emit URL detection event
|
||||
this.emitter.emit('agent_action', {
|
||||
type: 'agent_action',
|
||||
|
|
|
|||
|
|
@ -104,10 +104,10 @@ export class ContentRouterAgent {
|
|||
},
|
||||
);
|
||||
|
||||
const routerDecision = await structuredLlm.invoke(
|
||||
const routerDecision = (await structuredLlm.invoke(
|
||||
[...removeThinkingBlocksFromMessages(state.messages), prompt],
|
||||
{ signal: this.signal },
|
||||
);
|
||||
)) as RouterDecision;
|
||||
|
||||
console.log(`Router decision: ${routerDecision.decision}`);
|
||||
console.log(`Router reasoning: ${routerDecision.reasoning}`);
|
||||
|
|
|
|||
|
|
@ -145,9 +145,9 @@ export class TaskManagerAgent {
|
|||
name: 'break_down_tasks',
|
||||
});
|
||||
|
||||
const taskBreakdownResult = await structuredLlm.invoke([prompt], {
|
||||
const taskBreakdownResult = (await structuredLlm.invoke([prompt], {
|
||||
signal: this.signal,
|
||||
});
|
||||
})) as TaskBreakdown;
|
||||
|
||||
console.log('Task breakdown response:', taskBreakdownResult);
|
||||
|
||||
|
|
|
|||
|
|
@ -35,14 +35,22 @@ export class URLSummarizationAgent {
|
|||
|
||||
// Use pre-analyzed URLs from ContentRouterAgent
|
||||
const urlsToProcess = state.urlsToSummarize || [];
|
||||
const summarizationIntent = state.summarizationIntent || 'process content to help answer the user query';
|
||||
const summarizationIntent =
|
||||
state.summarizationIntent ||
|
||||
'process content to help answer the user query';
|
||||
|
||||
if (urlsToProcess.length === 0) {
|
||||
console.log('No URLs found for processing, routing back to content router');
|
||||
console.log(
|
||||
'No URLs found for processing, routing back to content router',
|
||||
);
|
||||
return new Command({
|
||||
goto: 'content_router',
|
||||
update: {
|
||||
messages: [new AIMessage('No URLs found for processing, routing to content router')],
|
||||
messages: [
|
||||
new AIMessage(
|
||||
'No URLs found for processing, routing to content router',
|
||||
),
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -93,7 +101,7 @@ export class URLSummarizationAgent {
|
|||
|
||||
if (!webContent || !webContent.pageContent) {
|
||||
console.warn(`No content retrieved from URL: ${url}`);
|
||||
|
||||
|
||||
// Emit URL processing failure event
|
||||
this.emitter.emit('agent_action', {
|
||||
type: 'agent_action',
|
||||
|
|
@ -118,9 +126,11 @@ export class URLSummarizationAgent {
|
|||
if (contentLength < 4000) {
|
||||
finalContent = webContent.pageContent;
|
||||
processingType = 'url-direct-content';
|
||||
|
||||
console.log(`Content is short (${contentLength} chars), using directly without summarization`);
|
||||
|
||||
|
||||
console.log(
|
||||
`Content is short (${contentLength} chars), using directly without summarization`,
|
||||
);
|
||||
|
||||
// Emit direct content usage event
|
||||
this.emitter.emit('agent_action', {
|
||||
type: 'agent_action',
|
||||
|
|
@ -138,8 +148,10 @@ export class URLSummarizationAgent {
|
|||
});
|
||||
} else {
|
||||
// Content is long, summarize using LLM
|
||||
console.log(`Content is long (${contentLength} chars), generating summary`);
|
||||
|
||||
console.log(
|
||||
`Content is long (${contentLength} chars), generating summary`,
|
||||
);
|
||||
|
||||
const systemPrompt = this.systemInstructions
|
||||
? `${this.systemInstructions}\n\n`
|
||||
: '';
|
||||
|
|
@ -215,7 +227,7 @@ Provide a comprehensive summary of the above web page content, focusing on infor
|
|||
}
|
||||
} catch (error) {
|
||||
console.error(`Error processing URL ${url}:`, error);
|
||||
|
||||
|
||||
// Emit URL processing error event
|
||||
this.emitter.emit('agent_action', {
|
||||
type: 'agent_action',
|
||||
|
|
|
|||
|
|
@ -183,23 +183,26 @@ export class WebSearchAgent {
|
|||
|
||||
let previewContents: PreviewContent[] = [];
|
||||
// Always take the top 3 results for preview content
|
||||
previewContents.push(...filteredResults.slice(0, 3)
|
||||
.map((result) => ({
|
||||
previewContents.push(
|
||||
...filteredResults.slice(0, 3).map((result) => ({
|
||||
title: result.title || 'Untitled',
|
||||
snippet: result.content || '',
|
||||
url: result.url,
|
||||
}))
|
||||
})),
|
||||
);
|
||||
|
||||
// Sort by relevance score and take top 12 results for a total of 15
|
||||
previewContents.push(...resultsWithSimilarity.slice(3)
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, 12)
|
||||
.map(({ result }) => ({
|
||||
title: result.title || 'Untitled',
|
||||
snippet: result.content || '',
|
||||
url: result.url,
|
||||
})));
|
||||
previewContents.push(
|
||||
...resultsWithSimilarity
|
||||
.slice(3)
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, 12)
|
||||
.map(({ result }) => ({
|
||||
title: result.title || 'Untitled',
|
||||
snippet: result.content || '',
|
||||
url: result.url,
|
||||
})),
|
||||
);
|
||||
|
||||
console.log(
|
||||
`Extracted preview content from ${previewContents.length} search results for analysis`,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { ChatGroq } from '@langchain/groq';
|
||||
import { getGroqApiKey } from '../config';
|
||||
import { ChatModel } from '.';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
|
@ -47,7 +47,7 @@ const generateDisplayName = (modelId: string, ownedBy: string): string => {
|
|||
let displayName = modelId
|
||||
.replace(/[-_]/g, ' ')
|
||||
.split(' ')
|
||||
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
|
||||
// Add owner info for certain models
|
||||
|
|
@ -62,24 +62,27 @@ const fetchGroqModels = async (apiKey: string): Promise<GroqModel[]> => {
|
|||
try {
|
||||
const response = await fetch('https://api.groq.com/openai/v1/models', {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch models: ${response.status} ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to fetch models: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const data: GroqModelsResponse = await response.json();
|
||||
|
||||
|
||||
// Filter for active chat completion models (exclude audio/whisper models)
|
||||
return data.data.filter(model =>
|
||||
model.active &&
|
||||
!model.id.includes('whisper') &&
|
||||
!model.id.includes('tts') &&
|
||||
!model.id.includes('guard') &&
|
||||
!model.id.includes('prompt-guard')
|
||||
return data.data.filter(
|
||||
(model) =>
|
||||
model.active &&
|
||||
!model.id.includes('whisper') &&
|
||||
!model.id.includes('tts') &&
|
||||
!model.id.includes('guard') &&
|
||||
!model.id.includes('prompt-guard'),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error fetching Groq models:', error);
|
||||
|
|
@ -101,13 +104,9 @@ export const loadGroqChatModels = async () => {
|
|||
availableModels.forEach((model) => {
|
||||
chatModels[model.id] = {
|
||||
displayName: generateDisplayName(model.id, model.owned_by),
|
||||
model: new ChatOpenAI({
|
||||
openAIApiKey: groqApiKey,
|
||||
modelName: model.id,
|
||||
// temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
},
|
||||
model: new ChatGroq({
|
||||
apiKey: groqApiKey,
|
||||
model: model.id,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ const RelevanceCheckSchema = z.object({
|
|||
.describe('Whether the content is relevant to the user query'),
|
||||
reason: z
|
||||
.string()
|
||||
.describe('Brief explanation of why content is or isn\'t relevant'),
|
||||
.describe("Brief explanation of why content is or isn't relevant"),
|
||||
});
|
||||
|
||||
export const summarizeWebContent = async (
|
||||
|
|
@ -39,7 +39,8 @@ export const summarizeWebContent = async (
|
|||
: '';
|
||||
|
||||
// Determine content length for short-circuit logic
|
||||
const contentToAnalyze = content.pageContent || content.metadata.html || '';
|
||||
const contentToAnalyze =
|
||||
content.pageContent || content.metadata.html || '';
|
||||
const isShortContent = contentToAnalyze.length < 4000;
|
||||
|
||||
if (isShortContent) {
|
||||
|
|
@ -66,14 +67,16 @@ Here is the query you need to answer: ${query}
|
|||
|
||||
Here is the content to analyze:
|
||||
${contentToAnalyze}`,
|
||||
{ signal }
|
||||
{ signal },
|
||||
);
|
||||
|
||||
if (!relevanceResult) {
|
||||
console.error(`No relevance result returned for URL ${url}`);
|
||||
// Fall through to full summarization as fallback
|
||||
} else if (relevanceResult.relevant) {
|
||||
console.log(`Short content for URL "${url}" is relevant: ${relevanceResult.reason}`);
|
||||
console.log(
|
||||
`Short content for URL "${url}" is relevant: ${relevanceResult.reason}`,
|
||||
);
|
||||
return {
|
||||
document: new Document({
|
||||
pageContent: content.pageContent,
|
||||
|
|
@ -86,14 +89,20 @@ ${contentToAnalyze}`,
|
|||
notRelevantReason: undefined,
|
||||
};
|
||||
} else {
|
||||
console.log(`Short content for URL "${url}" is not relevant: ${relevanceResult.reason}`);
|
||||
console.log(
|
||||
`Short content for URL "${url}" is not relevant: ${relevanceResult.reason}`,
|
||||
);
|
||||
return {
|
||||
document: null,
|
||||
notRelevantReason: relevanceResult.reason || 'Content not relevant to query',
|
||||
notRelevantReason:
|
||||
relevanceResult.reason || 'Content not relevant to query',
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error checking relevance for short content from URL ${url}:`, error);
|
||||
console.error(
|
||||
`Error checking relevance for short content from URL ${url}:`,
|
||||
error,
|
||||
);
|
||||
// Fall through to full summarization as fallback
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue