fix: Comment out setTemperature calls for deterministic output in agents and utils

This commit is contained in:
Willie Zutz 2025-06-23 09:06:09 -06:00
parent b97383bf0f
commit feec1cd608
5 changed files with 14 additions and 9 deletions

View file

@ -59,7 +59,7 @@ export class AnalyzerAgent {
async execute(state: typeof AgentState.State): Promise<Command> { async execute(state: typeof AgentState.State): Promise<Command> {
try { try {
setTemperature(this.llm, 0.0); //setTemperature(this.llm, 0.0);
// Initialize originalQuery if not set // Initialize originalQuery if not set
if (!state.originalQuery) { if (!state.originalQuery) {

View file

@ -39,7 +39,7 @@ export class TaskManagerAgent {
*/ */
async execute(state: typeof AgentState.State): Promise<Command> { async execute(state: typeof AgentState.State): Promise<Command> {
try { try {
setTemperature(this.llm, 0); // Set temperature to 0 for deterministic output //setTemperature(this.llm, 0); // Set temperature to 0 for deterministic output
// Check if we're in task progression mode (tasks already exist and we're processing them) // Check if we're in task progression mode (tasks already exist and we're processing them)
if (state.tasks && state.tasks.length > 0) { if (state.tasks && state.tasks.length > 0) {

View file

@ -54,7 +54,7 @@ export class WebSearchAgent {
*/ */
async execute(state: typeof AgentState.State): Promise<Command> { async execute(state: typeof AgentState.State): Promise<Command> {
try { try {
setTemperature(this.llm, 0); // Set temperature to 0 for deterministic output //setTemperature(this.llm, 0); // Set temperature to 0 for deterministic output
// Determine current task to process // Determine current task to process
const currentTask = const currentTask =

View file

@ -19,7 +19,7 @@ export type PreviewContent = {
// Zod schema for structured preview analysis output // Zod schema for structured preview analysis output
const PreviewAnalysisSchema = z.object({ const PreviewAnalysisSchema = z.object({
isSufficient: z.boolean().describe('Whether the preview content is sufficient to answer the task query'), isSufficient: z.boolean().describe('Whether the preview content is sufficient to answer the task query'),
reason: z.string().optional().nullable().describe('Specific reason why full content analysis is required (only if isSufficient is false)') reason: z.string().nullable().describe('Specific reason why full content analysis is required (only if isSufficient is false)')
}); });
export const analyzePreviewContent = async ( export const analyzePreviewContent = async (
@ -80,11 +80,6 @@ Snippet: ${content.snippet}
- Be specific in your reasoning when the content is not sufficient - Be specific in your reasoning when the content is not sufficient
- The original query is provided for additional context, only use it for clarification of overall expectations and intent. You do **not** need to answer the original query directly or completely - The original query is provided for additional context, only use it for clarification of overall expectations and intent. You do **not** need to answer the original query directly or completely
# Response Format
You must return a JSON object with:
- isSufficient: boolean indicating whether preview content is sufficient
- reason: string explaining why full content analysis is required (only if isSufficient is false)
# Information Context: # Information Context:
Today's date is ${formatDateForLLM(new Date())} Today's date is ${formatDateForLLM(new Date())}
@ -99,6 +94,11 @@ ${taskQuery}
# Search Result Previews to Analyze: # Search Result Previews to Analyze:
${formattedPreviewContent} ${formattedPreviewContent}
# Response Format
You must return a JSON object with:
- isSufficient: boolean indicating whether preview content is sufficient
- reason: string explaining why full content analysis is required (only if isSufficient is false)
`, `,
{ signal }, { signal },
); );

View file

@ -3,6 +3,7 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { formatDateForLLM } from '../utils'; import { formatDateForLLM } from '../utils';
import { getWebContent } from './documents'; import { getWebContent } from './documents';
import { removeThinkingBlocks } from './contentUtils'; import { removeThinkingBlocks } from './contentUtils';
import { setTemperature } from './modelUtils';
export type SummarizeResult = { export type SummarizeResult = {
document: Document | null; document: Document | null;
@ -17,6 +18,7 @@ export const summarizeWebContent = async (
signal: AbortSignal, signal: AbortSignal,
): Promise<SummarizeResult> => { ): Promise<SummarizeResult> => {
try { try {
setTemperature(llm, 0); // Set temperature to 0 for deterministic output
// Helper function to summarize content and check relevance // Helper function to summarize content and check relevance
const summarizeContent = async ( const summarizeContent = async (
content: Document, content: Document,
@ -140,5 +142,8 @@ ${i === 0 ? content.metadata.html : content.pageContent}`;
document: null, document: null,
notRelevantReason: `Error processing URL: ${error instanceof Error ? error.message : 'Unknown error'}`, notRelevantReason: `Error processing URL: ${error instanceof Error ? error.message : 'Unknown error'}`,
}; };
} finally {
// Reset temperature to default after processing
setTemperature(llm);
} }
}; };