implemented the history retention feature
### 1. __History Retention Configuration__ - __config.toml__: Added `[HISTORY]` section with `RETENTION_DAYS = 30` setting - __Backend Integration__: Updated configuration handling to support history retention - __API Endpoints__: Modified `/api/config` to read/write history retention settings ### 2. __User Interface__ - __Settings Page__: Added "History Settings" section with number input for retention days - __Real-time Updates__: Settings are saved to config.toml when changed - __Clear Documentation__: Explains that retention only applies when incognito mode is off ### 3. __Automatic History Cleanup__ - __Background Processing__: Cleanup runs automatically when new chats are created (non-incognito mode) - __Smart Logic__: Only deletes chats older than configured retention period - __Complete Cleanup__: Removes both chat records and associated messages - __Performance Optimized__: Non-blocking background execution ### 4. __Manual Cleanup API__ - __Endpoint__: `POST /api/cleanup-history` for manual cleanup triggers - __Utility Functions__: Reusable cleanup logic in dedicated utility file ### 5. __Docker Rebuild__ - __Container Rebuild__: Successfully rebuilt the Docker containers with new features - __Configuration Persistence__: config.toml changes are preserved in Docker volume - __Application Ready__: The application should now be accessible at [](http://localhost:3000)<http://localhost:3000> ## Key Features: 1. __Incognito Mode Integration__: History retention only applies when incognito mode is OFF 2. __Flexible Configuration__: 0 = keep forever, any positive number = days to retain 3. __Automatic Cleanup__: Runs in background when creating new chats 4. __Manual Control__: API endpoint for manual cleanup triggers 5. __Database Integrity__: Properly removes both chats and associated messages ## Testing the Feature: 1. __Access the Application__: Open [](http://localhost:3000)<http://localhost:3000> in your browser 2. __Configure Settings__: Go to Settings → History Settings → Set retention days 3. __Test Incognito Mode__: Toggle incognito mode on/off to see different behaviors 4. __Create Test Chats__: Create chats in both modes to verify functionality 5. __Manual Cleanup__: Use the `/api/cleanup-history` endpoint to test manual cleanup
This commit is contained in:
parent
26952ff6c8
commit
883e457009
6 changed files with 152 additions and 14 deletions
|
|
@ -21,6 +21,7 @@ import {
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
import { searchHandlers } from '@/lib/search';
|
import { searchHandlers } from '@/lib/search';
|
||||||
|
import { cleanupOldHistory } from '@/lib/utils/historyCleanup';
|
||||||
|
|
||||||
export const runtime = 'nodejs';
|
export const runtime = 'nodejs';
|
||||||
export const dynamic = 'force-dynamic';
|
export const dynamic = 'force-dynamic';
|
||||||
|
|
@ -50,6 +51,7 @@ type Body = {
|
||||||
chatModel: ChatModel;
|
chatModel: ChatModel;
|
||||||
embeddingModel: EmbeddingModel;
|
embeddingModel: EmbeddingModel;
|
||||||
systemInstructions: string;
|
systemInstructions: string;
|
||||||
|
isIncognito?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleEmitterEvents = async (
|
const handleEmitterEvents = async (
|
||||||
|
|
@ -58,6 +60,7 @@ const handleEmitterEvents = async (
|
||||||
encoder: TextEncoder,
|
encoder: TextEncoder,
|
||||||
aiMessageId: string,
|
aiMessageId: string,
|
||||||
chatId: string,
|
chatId: string,
|
||||||
|
isIncognito: boolean = false,
|
||||||
) => {
|
) => {
|
||||||
let recievedMessage = '';
|
let recievedMessage = '';
|
||||||
let sources: any[] = [];
|
let sources: any[] = [];
|
||||||
|
|
@ -101,18 +104,21 @@ const handleEmitterEvents = async (
|
||||||
);
|
);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
db.insert(messagesSchema)
|
// 在無痕模式下不保存助手回應到數據庫
|
||||||
.values({
|
if (!isIncognito) {
|
||||||
content: recievedMessage,
|
db.insert(messagesSchema)
|
||||||
chatId: chatId,
|
.values({
|
||||||
messageId: aiMessageId,
|
content: recievedMessage,
|
||||||
role: 'assistant',
|
chatId: chatId,
|
||||||
metadata: JSON.stringify({
|
messageId: aiMessageId,
|
||||||
createdAt: new Date(),
|
role: 'assistant',
|
||||||
...(sources && sources.length > 0 && { sources }),
|
metadata: JSON.stringify({
|
||||||
}),
|
createdAt: new Date(),
|
||||||
})
|
...(sources && sources.length > 0 && { sources }),
|
||||||
.execute();
|
}),
|
||||||
|
})
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
stream.on('error', (data) => {
|
stream.on('error', (data) => {
|
||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
|
|
@ -149,6 +155,11 @@ const handleHistorySave = async (
|
||||||
files: files.map(getFileDetails),
|
files: files.map(getFileDetails),
|
||||||
})
|
})
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
|
// Trigger history cleanup for new chats (run in background)
|
||||||
|
cleanupOldHistory().catch(err => {
|
||||||
|
console.error('Background history cleanup failed:', err);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const messageExists = await db.query.messages.findFirst({
|
const messageExists = await db.query.messages.findFirst({
|
||||||
|
|
@ -286,8 +297,12 @@ export const POST = async (req: Request) => {
|
||||||
const writer = responseStream.writable.getWriter();
|
const writer = responseStream.writable.getWriter();
|
||||||
const encoder = new TextEncoder();
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
handleEmitterEvents(stream, writer, encoder, aiMessageId, message.chatId);
|
handleEmitterEvents(stream, writer, encoder, aiMessageId, message.chatId, body.isIncognito);
|
||||||
handleHistorySave(message, humanMessageId, body.focusMode, body.files);
|
|
||||||
|
// 在無痕模式下不保存聊天記錄
|
||||||
|
if (!body.isIncognito) {
|
||||||
|
handleHistorySave(message, humanMessageId, body.focusMode, body.files);
|
||||||
|
}
|
||||||
|
|
||||||
return new Response(responseStream.readable, {
|
return new Response(responseStream.readable, {
|
||||||
headers: {
|
headers: {
|
||||||
|
|
|
||||||
19
src/app/api/cleanup-history/route.ts
Normal file
19
src/app/api/cleanup-history/route.ts
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
import { cleanupOldHistory } from '@/lib/utils/historyCleanup';
|
||||||
|
|
||||||
|
export const POST = async (req: Request) => {
|
||||||
|
try {
|
||||||
|
const result = await cleanupOldHistory();
|
||||||
|
|
||||||
|
return Response.json({
|
||||||
|
message: result.message,
|
||||||
|
deletedChats: result.deletedChats
|
||||||
|
}, { status: 200 });
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
console.error('An error occurred while cleaning up history:', err);
|
||||||
|
return Response.json(
|
||||||
|
{ message: 'An error occurred while cleaning up history' },
|
||||||
|
{ status: 500 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
@ -9,6 +9,7 @@ import {
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
getDeepseekApiKey,
|
getDeepseekApiKey,
|
||||||
getLMStudioApiEndpoint,
|
getLMStudioApiEndpoint,
|
||||||
|
getHistoryRetentionDays,
|
||||||
updateConfig,
|
updateConfig,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
import {
|
import {
|
||||||
|
|
@ -60,6 +61,7 @@ export const GET = async (req: Request) => {
|
||||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||||
|
config['historyRetentionDays'] = getHistoryRetentionDays();
|
||||||
|
|
||||||
return Response.json({ ...config }, { status: 200 });
|
return Response.json({ ...config }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
@ -76,6 +78,9 @@ export const POST = async (req: Request) => {
|
||||||
const config = await req.json();
|
const config = await req.json();
|
||||||
|
|
||||||
const updatedConfig = {
|
const updatedConfig = {
|
||||||
|
HISTORY: {
|
||||||
|
RETENTION_DAYS: config.historyRetentionDays,
|
||||||
|
},
|
||||||
MODELS: {
|
MODELS: {
|
||||||
OPENAI: {
|
OPENAI: {
|
||||||
API_KEY: config.openaiApiKey,
|
API_KEY: config.openaiApiKey,
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ interface SettingsType {
|
||||||
customOpenaiApiKey: string;
|
customOpenaiApiKey: string;
|
||||||
customOpenaiApiUrl: string;
|
customOpenaiApiUrl: string;
|
||||||
customOpenaiModelName: string;
|
customOpenaiModelName: string;
|
||||||
|
historyRetentionDays: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
||||||
|
|
@ -512,6 +513,33 @@ const Page = () => {
|
||||||
</div>
|
</div>
|
||||||
</SettingsSection>
|
</SettingsSection>
|
||||||
|
|
||||||
|
<SettingsSection title="History Settings">
|
||||||
|
<div className="flex flex-col space-y-4">
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
History Retention (Days)
|
||||||
|
</p>
|
||||||
|
<p className="text-xs text-black/60 dark:text-white/60">
|
||||||
|
Number of days to keep chat history when incognito mode is off (0 = keep forever)
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="number"
|
||||||
|
placeholder="30"
|
||||||
|
min="0"
|
||||||
|
value={config.historyRetentionDays?.toString() || '30'}
|
||||||
|
isSaving={savingStates['historyRetentionDays']}
|
||||||
|
onChange={(e) => {
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
historyRetentionDays: parseInt(e.target.value) || 0,
|
||||||
|
}));
|
||||||
|
}}
|
||||||
|
onSave={(value) => saveConfig('historyRetentionDays', parseInt(value) || 0)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</SettingsSection>
|
||||||
|
|
||||||
<SettingsSection title="System Instructions">
|
<SettingsSection title="System Instructions">
|
||||||
<div className="flex flex-col space-y-4">
|
<div className="flex flex-col space-y-4">
|
||||||
<Textarea
|
<Textarea
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,9 @@ interface Config {
|
||||||
SIMILARITY_MEASURE: string;
|
SIMILARITY_MEASURE: string;
|
||||||
KEEP_ALIVE: string;
|
KEEP_ALIVE: string;
|
||||||
};
|
};
|
||||||
|
HISTORY: {
|
||||||
|
RETENTION_DAYS: number;
|
||||||
|
};
|
||||||
MODELS: {
|
MODELS: {
|
||||||
OPENAI: {
|
OPENAI: {
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
|
|
@ -70,6 +73,8 @@ export const getSimilarityMeasure = () =>
|
||||||
|
|
||||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
||||||
|
|
||||||
|
export const getHistoryRetentionDays = () => loadConfig().HISTORY.RETENTION_DAYS;
|
||||||
|
|
||||||
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
||||||
|
|
||||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
||||||
|
|
|
||||||
66
src/lib/utils/historyCleanup.ts
Normal file
66
src/lib/utils/historyCleanup.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
import db from '@/lib/db';
|
||||||
|
import { chats, messages } from '@/lib/db/schema';
|
||||||
|
import { getHistoryRetentionDays } from '@/lib/config';
|
||||||
|
import { lt, eq } from 'drizzle-orm';
|
||||||
|
|
||||||
|
export const cleanupOldHistory = async (): Promise<{ deletedChats: number; message: string }> => {
|
||||||
|
try {
|
||||||
|
const retentionDays = getHistoryRetentionDays();
|
||||||
|
|
||||||
|
// If retention is 0, keep forever
|
||||||
|
if (retentionDays === 0) {
|
||||||
|
return { deletedChats: 0, message: 'History retention disabled, keeping all chats' };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate cutoff date
|
||||||
|
const cutoffDate = new Date();
|
||||||
|
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
||||||
|
const cutoffDateString = cutoffDate.toISOString();
|
||||||
|
|
||||||
|
// Find chats older than retention period
|
||||||
|
const oldChats = await db
|
||||||
|
.select({ id: chats.id })
|
||||||
|
.from(chats)
|
||||||
|
.where(lt(chats.createdAt, cutoffDateString));
|
||||||
|
|
||||||
|
if (oldChats.length === 0) {
|
||||||
|
return { deletedChats: 0, message: 'No old chats to clean up' };
|
||||||
|
}
|
||||||
|
|
||||||
|
const chatIds = oldChats.map((chat: { id: string }) => chat.id);
|
||||||
|
|
||||||
|
// Delete messages for old chats
|
||||||
|
for (const chatId of chatIds) {
|
||||||
|
await db.delete(messages).where(eq(messages.chatId, chatId));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete old chats
|
||||||
|
await db.delete(chats).where(lt(chats.createdAt, cutoffDateString));
|
||||||
|
|
||||||
|
return {
|
||||||
|
deletedChats: oldChats.length,
|
||||||
|
message: `Cleaned up ${oldChats.length} old chats and their messages`
|
||||||
|
};
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
console.error('An error occurred while cleaning up history:', err);
|
||||||
|
throw new Error('Failed to cleanup history');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Function to check if cleanup should run (run cleanup every 24 hours)
|
||||||
|
export const shouldRunCleanup = (): boolean => {
|
||||||
|
const lastCleanup = localStorage.getItem('lastHistoryCleanup');
|
||||||
|
if (!lastCleanup) return true;
|
||||||
|
|
||||||
|
const lastCleanupTime = new Date(lastCleanup);
|
||||||
|
const now = new Date();
|
||||||
|
const hoursSinceLastCleanup = (now.getTime() - lastCleanupTime.getTime()) / (1000 * 60 * 60);
|
||||||
|
|
||||||
|
return hoursSinceLastCleanup >= 24;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Function to mark cleanup as completed
|
||||||
|
export const markCleanupCompleted = (): void => {
|
||||||
|
localStorage.setItem('lastHistoryCleanup', new Date().toISOString());
|
||||||
|
};
|
||||||
Loading…
Add table
Add a link
Reference in a new issue