feat(models): Implement model visibility management with hidden models configuration

This commit is contained in:
Willie Zutz 2025-07-13 11:50:51 -06:00
parent e47307d1d4
commit 18fdb192d8
8 changed files with 459 additions and 22 deletions

View file

@ -10,6 +10,7 @@ import {
getOpenaiApiKey,
getDeepseekApiKey,
getLMStudioApiEndpoint,
getHiddenModels,
updateConfig,
} from '@/lib/config';
import {
@ -70,6 +71,7 @@ export const GET = async (req: Request) => {
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
config['baseUrl'] = getBaseUrl();
config['hiddenModels'] = getHiddenModels();
return Response.json({ ...config }, { status: 200 });
} catch (err) {
@ -96,6 +98,9 @@ export const POST = async (req: Request) => {
};
const updatedConfig = {
GENERAL: {
HIDDEN_MODELS: config.hiddenModels || [],
},
MODELS: {
OPENAI: {
API_KEY: getUpdatedProtectedValue(

View file

@ -5,9 +5,12 @@ import {
export const GET = async (req: Request) => {
try {
const url = new URL(req.url);
const includeHidden = url.searchParams.get('include_hidden') === 'true';
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(),
getAvailableChatModelProviders({ includeHidden }),
getAvailableEmbeddingModelProviders({ includeHidden }),
]);
Object.keys(chatModelProviders).forEach((provider) => {

View file

@ -11,6 +11,8 @@ import {
Save,
X,
RotateCcw,
ChevronDown,
ChevronRight,
} from 'lucide-react';
import { useEffect, useState, useRef } from 'react';
import { cn } from '@/lib/utils';
@ -40,6 +42,7 @@ interface SettingsType {
customOpenaiApiUrl: string;
customOpenaiModelName: string;
ollamaContextWindow: number;
hiddenModels: string[];
}
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
@ -245,6 +248,14 @@ export default function SettingsPage() {
);
const [isAddingNewPrompt, setIsAddingNewPrompt] = useState(false);
// Model visibility state variables
const [allModels, setAllModels] = useState<{
chat: Record<string, Record<string, any>>;
embedding: Record<string, Record<string, any>>;
}>({ chat: {}, embedding: {} });
const [hiddenModels, setHiddenModels] = useState<string[]>([]);
const [expandedProviders, setExpandedProviders] = useState<Set<string>>(new Set());
// Default Search Settings state variables
const [searchOptimizationMode, setSearchOptimizationMode] =
useState<string>('');
@ -265,6 +276,9 @@ export default function SettingsPage() {
setConfig(data);
// Populate hiddenModels state from config
setHiddenModels(data.hiddenModels || []);
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
const embeddingModelProvidersKeys = Object.keys(
data.embeddingModelProviders || {},
@ -319,7 +333,29 @@ export default function SettingsPage() {
setIsLoading(false);
};
const fetchAllModels = async () => {
try {
// Fetch complete model list including hidden models
const res = await fetch(`/api/models?include_hidden=true`, {
headers: {
'Content-Type': 'application/json',
},
});
if (res.ok) {
const data = await res.json();
setAllModels({
chat: data.chatModelProviders || {},
embedding: data.embeddingModelProviders || {},
});
}
} catch (error) {
console.error('Failed to fetch all models:', error);
}
};
fetchConfig();
fetchAllModels();
// Load search settings from localStorage
const loadSearchSettings = () => {
@ -529,6 +565,42 @@ export default function SettingsPage() {
localStorage.setItem(key, value);
};
const handleModelVisibilityToggle = async (modelKey: string, isVisible: boolean) => {
let updatedHiddenModels: string[];
if (isVisible) {
// Model should be visible, remove from hidden list
updatedHiddenModels = hiddenModels.filter(m => m !== modelKey);
} else {
// Model should be hidden, add to hidden list
updatedHiddenModels = [...hiddenModels, modelKey];
}
// Update local state immediately
setHiddenModels(updatedHiddenModels);
// Persist changes to backend
try {
await saveConfig('hiddenModels', updatedHiddenModels);
} catch (error) {
console.error('Failed to save hidden models:', error);
// Revert local state on error
setHiddenModels(hiddenModels);
}
};
const toggleProviderExpansion = (providerId: string) => {
setExpandedProviders(prev => {
const newSet = new Set(prev);
if (newSet.has(providerId)) {
newSet.delete(providerId);
} else {
newSet.add(providerId);
}
return newSet;
});
};
const handleAddOrUpdateSystemPrompt = async () => {
const currentPrompt = editingPrompt || {
name: newPromptName,
@ -1400,6 +1472,123 @@ export default function SettingsPage() {
)}
</SettingsSection>
<SettingsSection
title="Model Visibility"
tooltip="Hide models from the API to prevent them from appearing in model lists.\nHidden models will not be available for selection in the interface.\nThis allows server admins to disable models that may incur large costs or won't work with the application."
>
<div className="flex flex-col space-y-3">
{/* Unified Models List */}
{(() => {
// Combine all models from both chat and embedding providers
const allProviders: Record<string, Record<string, any>> = {};
// Add chat models
Object.entries(allModels.chat).forEach(([provider, models]) => {
if (!allProviders[provider]) {
allProviders[provider] = {};
}
Object.entries(models).forEach(([modelKey, model]) => {
allProviders[provider][modelKey] = model;
});
});
// Add embedding models
Object.entries(allModels.embedding).forEach(([provider, models]) => {
if (!allProviders[provider]) {
allProviders[provider] = {};
}
Object.entries(models).forEach(([modelKey, model]) => {
allProviders[provider][modelKey] = model;
});
});
return Object.keys(allProviders).length > 0 ? (
Object.entries(allProviders).map(([provider, models]) => {
const providerId = `provider-${provider}`;
const isExpanded = expandedProviders.has(providerId);
const modelEntries = Object.entries(models);
const hiddenCount = modelEntries.filter(([modelKey]) => hiddenModels.includes(modelKey)).length;
const totalCount = modelEntries.length;
return (
<div
key={providerId}
className="border border-light-200 dark:border-dark-200 rounded-lg overflow-hidden"
>
<button
onClick={() => toggleProviderExpansion(providerId)}
className="w-full p-3 bg-light-secondary dark:bg-dark-secondary hover:bg-light-200 dark:hover:bg-dark-200 transition-colors flex items-center justify-between"
>
<div className="flex items-center space-x-3">
{isExpanded ? (
<ChevronDown size={16} className="text-black/70 dark:text-white/70" />
) : (
<ChevronRight size={16} className="text-black/70 dark:text-white/70" />
)}
<h4 className="text-sm font-medium text-black/80 dark:text-white/80">
{(PROVIDER_METADATA as any)[provider]?.displayName ||
provider.charAt(0).toUpperCase() + provider.slice(1)}
</h4>
</div>
<div className="flex items-center space-x-2 text-xs text-black/60 dark:text-white/60">
<span>{totalCount - hiddenCount} visible</span>
{hiddenCount > 0 && (
<span className="px-2 py-1 bg-red-100 dark:bg-red-900/30 text-red-700 dark:text-red-400 rounded">
{hiddenCount} hidden
</span>
)}
</div>
</button>
{isExpanded && (
<div className="p-3 bg-light-100 dark:bg-dark-100 border-t border-light-200 dark:border-dark-200">
<div className="grid grid-cols-1 md:grid-cols-2 gap-2">
{modelEntries.map(([modelKey, model]) => (
<div
key={`${provider}-${modelKey}`}
className="flex items-center justify-between p-2 bg-white dark:bg-dark-secondary rounded-md"
>
<span className="text-sm text-black/90 dark:text-white/90">
{model.displayName || modelKey}
</span>
<Switch
checked={!hiddenModels.includes(modelKey)}
onChange={(checked) => {
handleModelVisibilityToggle(modelKey, checked);
}}
className={cn(
!hiddenModels.includes(modelKey)
? 'bg-[#24A0ED]'
: 'bg-light-200 dark:bg-dark-200',
'relative inline-flex h-5 w-9 items-center rounded-full transition-colors focus:outline-none',
)}
>
<span
className={cn(
!hiddenModels.includes(modelKey)
? 'translate-x-5'
: 'translate-x-1',
'inline-block h-3 w-3 transform rounded-full bg-white transition-transform',
)}
/>
</Switch>
</div>
))}
</div>
</div>
)}
</div>
);
})
) : (
<p className="text-sm text-black/60 dark:text-white/60 italic">
No models available
</p>
);
})()}
</div>
</SettingsSection>
<SettingsSection
title="API Keys"
tooltip="API Key values can be viewed in the config.toml file"

View file

@ -16,6 +16,7 @@ interface Config {
SIMILARITY_MEASURE: string;
KEEP_ALIVE: string;
BASE_URL?: string;
HIDDEN_MODELS: string[];
};
MODELS: {
OPENAI: {
@ -57,9 +58,35 @@ type RecursivePartial<T> = {
const loadConfig = () => {
// Server-side only
if (typeof window === 'undefined') {
return toml.parse(
const config = toml.parse(
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
) as any as Config;
// Ensure GENERAL section exists
if (!config.GENERAL) {
config.GENERAL = {} as any;
}
// Handle HIDDEN_MODELS - fix malformed table format to proper array
if (!config.GENERAL.HIDDEN_MODELS) {
config.GENERAL.HIDDEN_MODELS = [];
} else if (typeof config.GENERAL.HIDDEN_MODELS === 'object' && !Array.isArray(config.GENERAL.HIDDEN_MODELS)) {
// Convert malformed table format to array
const hiddenModelsObj = config.GENERAL.HIDDEN_MODELS as any;
const hiddenModelsArray: string[] = [];
// Extract values from numeric keys and sort by key
const keys = Object.keys(hiddenModelsObj).map(k => parseInt(k)).filter(k => !isNaN(k)).sort((a, b) => a - b);
for (const key of keys) {
if (typeof hiddenModelsObj[key] === 'string') {
hiddenModelsArray.push(hiddenModelsObj[key]);
}
}
config.GENERAL.HIDDEN_MODELS = hiddenModelsArray;
}
return config;
}
// Client-side fallback - settings will be loaded via API
@ -73,6 +100,8 @@ export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
export const getBaseUrl = () => loadConfig().GENERAL.BASE_URL;
export const getHiddenModels = () => loadConfig().GENERAL.HIDDEN_MODELS;
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
@ -109,17 +138,26 @@ const mergeConfigs = (current: any, update: any): any => {
return update;
}
// Handle arrays specifically - don't merge them, replace them
if (Array.isArray(update)) {
return update;
}
const result = { ...current };
for (const key in update) {
if (Object.prototype.hasOwnProperty.call(update, key)) {
const updateValue = update[key];
if (
// Handle arrays specifically - don't merge them, replace them
if (Array.isArray(updateValue)) {
result[key] = updateValue;
} else if (
typeof updateValue === 'object' &&
updateValue !== null &&
typeof result[key] === 'object' &&
result[key] !== null
result[key] !== null &&
!Array.isArray(result[key])
) {
result[key] = mergeConfigs(result[key], updateValue);
} else if (updateValue !== undefined) {

View file

@ -10,6 +10,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getHiddenModels,
} from '../config';
import { ChatOpenAI } from '@langchain/openai';
import {
@ -90,7 +91,10 @@ export const embeddingModelProviders: Record<
lmstudio: loadLMStudioEmbeddingsModels,
};
export const getAvailableChatModelProviders = async () => {
export const getAvailableChatModelProviders = async (
options: { includeHidden?: boolean } = {}
) => {
const { includeHidden = false } = options;
const models: Record<string, Record<string, ChatModel>> = {};
for (const provider in chatModelProviders) {
@ -111,28 +115,48 @@ export const getAvailableChatModelProviders = async () => {
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName();
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
? {
[customOpenAiModelName]: {
displayName: customOpenAiModelName,
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
// temperature: 0.7,
configuration: {
baseURL: customOpenAiApiUrl,
},
}) as unknown as BaseChatModel,
// Only add custom_openai provider if all required fields are configured
if (customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName) {
models['custom_openai'] = {
[customOpenAiModelName]: {
displayName: customOpenAiModelName,
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
// temperature: 0.7,
configuration: {
baseURL: customOpenAiApiUrl,
},
}) as unknown as BaseChatModel,
},
};
}
// Filter out hidden models if includeHidden is false
if (!includeHidden) {
const hiddenModels = getHiddenModels();
if (hiddenModels.length > 0) {
for (const provider in models) {
for (const modelKey in models[provider]) {
if (hiddenModels.includes(modelKey)) {
delete models[provider][modelKey];
}
}
: {}),
};
// Remove provider if all models are hidden
if (Object.keys(models[provider]).length === 0) {
delete models[provider];
}
}
}
}
return models;
};
export const getAvailableEmbeddingModelProviders = async () => {
export const getAvailableEmbeddingModelProviders = async (
options: { includeHidden?: boolean } = {}
) => {
const { includeHidden = false } = options;
const models: Record<string, Record<string, EmbeddingModel>> = {};
for (const provider in embeddingModelProviders) {
@ -149,5 +173,23 @@ export const getAvailableEmbeddingModelProviders = async () => {
}
}
// Filter out hidden models if includeHidden is false
if (!includeHidden) {
const hiddenModels = getHiddenModels();
if (hiddenModels.length > 0) {
for (const provider in models) {
for (const modelKey in models[provider]) {
if (hiddenModels.includes(modelKey)) {
delete models[provider][modelKey];
}
}
// Remove provider if all models are hidden
if (Object.keys(models[provider]).length === 0) {
delete models[provider];
}
}
}
}
return models;
};