From 8e0a054a841e8899a3eb0bf747876659a158aebb Mon Sep 17 00:00:00 2001 From: Patrick Britton Date: Mon, 16 Feb 2026 07:58:32 -0600 Subject: [PATCH] Redesign AI Moderation page - clean dropdowns, single engine selector, terminal-style test output --- admin/src/app/ai-moderation/page.tsx | 942 +++++++++------------------ 1 file changed, 320 insertions(+), 622 deletions(-) diff --git a/admin/src/app/ai-moderation/page.tsx b/admin/src/app/ai-moderation/page.tsx index 689748c..01d502b 100644 --- a/admin/src/app/ai-moderation/page.tsx +++ b/admin/src/app/ai-moderation/page.tsx @@ -3,23 +3,34 @@ import AdminShell from '@/components/AdminShell'; import { api } from '@/lib/api'; import { useEffect, useState, useCallback, useRef } from 'react'; -import { Brain, Search, Check, Power, PowerOff, ChevronDown, Play, Loader2, Eye, MessageSquare, Video, Sparkles, Shield, MapPin, Users, AlertTriangle, RefreshCw, Server, Cloud, Cpu, Zap } from 'lucide-react'; +import { Brain, Search, Check, ChevronDown, Play, Loader2, Eye, MessageSquare, Video, Shield, MapPin, Users, AlertTriangle, Server, Cloud, Cpu, Terminal } from 'lucide-react'; const MODERATION_TYPES = [ - { key: 'text', label: 'Text Moderation', icon: MessageSquare, desc: 'Analyze post text, comments, and captions for policy violations' }, - { key: 'image', label: 'Image Moderation', icon: Eye, desc: 'Analyze uploaded images for inappropriate content (requires vision model)' }, - { key: 'video', label: 'Video Moderation', icon: Video, desc: 'Analyze video frames extracted from Quips (requires vision model)' }, - { key: 'group_text', label: 'Group Chat Moderation', icon: Users, desc: 'AI moderation for private group messages — pre-send check before E2EE encryption' }, - { key: 'group_image', label: 'Group Image Moderation', icon: Shield, desc: 'AI moderation for images shared in private groups (requires vision model)' }, - { key: 'beacon_text', label: 'Beacon Text Moderation', icon: MapPin, desc: 'AI moderation for beacon reports — safety/incident content on the map' }, - { key: 'beacon_image', label: 'Beacon Image Moderation', icon: AlertTriangle, desc: 'AI moderation for beacon images — photos attached to safety reports (requires vision model)' }, + { key: 'text', label: 'Text Moderation', icon: MessageSquare }, + { key: 'image', label: 'Image Moderation', icon: Eye }, + { key: 'video', label: 'Video Moderation', icon: Video }, + { key: 'group_text', label: 'Group Chat', icon: Users }, + { key: 'group_image', label: 'Group Image', icon: Shield }, + { key: 'beacon_text', label: 'Beacon Text', icon: MapPin }, + { key: 'beacon_image', label: 'Beacon Image', icon: AlertTriangle }, +]; + +const ENGINES = [ + { id: 'local_ai', label: 'Local AI (Ollama)', icon: Cpu }, + { id: 'openrouter', label: 'OpenRouter', icon: Cloud }, + { id: 'openai', label: 'OpenAI', icon: Server }, + { id: 'google', label: 'Google Vision', icon: Eye }, +]; + +const LOCAL_MODELS = [ + { id: 'llama-guard3:1b', name: 'LLaMA Guard 3 (1B)' }, + { id: 'qwen2.5:7b-instruct-q4_K_M', name: 'Qwen 2.5 (7B)' }, ]; interface ModelInfo { id: string; name: string; - description?: string; - pricing: { prompt: string; completion: string; image?: string }; + pricing: { prompt: string; completion: string }; context_length: number; architecture?: Record; } @@ -35,270 +46,93 @@ interface ModerationConfig { updated_at: string; } -const ALL_ENGINES = [ - { id: 'local_ai', label: 'Local AI', desc: 'Ollama llama-guard (free, on-server)', icon: Cpu }, - { id: 'openrouter', label: 'OpenRouter', desc: 'Cloud models (configurable below)', icon: Cloud }, - { id: 'openai', label: 'OpenAI', desc: 'Three Poisons moderation API', icon: Server }, - { id: 'google', label: 'Google Vision', desc: 'SafeSearch image moderation API', icon: Eye }, -]; - interface EngineInfo { id: string; name: string; - description: string; status: string; - configured: boolean; - details?: any; } export default function AIModerationPage() { const [configs, setConfigs] = useState([]); - const [loading, setLoading] = useState(true); - const [activeType, setActiveType] = useState('text'); const [engines, setEngines] = useState([]); - const [enginesLoading, setEnginesLoading] = useState(true); - - const loadConfigs = useCallback(() => { - setLoading(true); - api.getAIModerationConfigs() - .then((data) => setConfigs(data.configs || [])) - .catch(() => {}) - .finally(() => setLoading(false)); - }, []); - - const loadEngines = useCallback(() => { - setEnginesLoading(true); - api.getAIEngines() - .then((data) => setEngines(data.engines || [])) - .catch(() => {}) - .finally(() => setEnginesLoading(false)); - }, []); - - useEffect(() => { loadConfigs(); loadEngines(); }, [loadConfigs, loadEngines]); - - const getConfig = (type: string) => configs.find(c => c.moderation_type === type); - - return ( - -
-

- AI Moderation -

-

Configure and monitor AI moderation engines

-
- - {/* Engines Status Panel */} -
-
-

- Moderation Engines -

- -
-
- {engines.map((engine) => { - const Icon = engine.id === 'local_ai' ? Cpu : engine.id === 'openrouter' ? Cloud : engine.id === 'google' ? Eye : Server; - const statusColor = engine.status === 'ready' ? 'text-green-600 bg-green-50 border-green-200' : - engine.status === 'down' ? 'text-red-600 bg-red-50 border-red-200' : - engine.status === 'not_configured' ? 'text-gray-400 bg-gray-50 border-gray-200' : - 'text-amber-600 bg-amber-50 border-amber-200'; - const dotColor = engine.status === 'ready' ? 'bg-green-500' : - engine.status === 'down' ? 'bg-red-500' : - engine.status === 'not_configured' ? 'bg-gray-300' : 'bg-amber-500'; - return ( -
-
-
- - {engine.name} -
- - - {engine.status === 'ready' ? 'Online' : engine.status === 'down' ? 'Down' : engine.status === 'not_configured' ? 'Not Configured' : engine.status} - -
-

{engine.description}

- {engine.details && engine.id === 'local_ai' && engine.status === 'ready' && ( -
- Redis: {engine.details.redis} - Ollama: {engine.details.ollama} - Judge Q: {engine.details.queue_judge} - Writer Q: {engine.details.queue_writer} -
- )} - {engine.details && engine.id === 'openrouter' && ( -
- {engine.details.enabled_configs}/{engine.details.total_configs} configs enabled -
- )} -
- ); - })} - {engines.length === 0 && !enginesLoading && ( -
No engine data available
- )} -
-
- - {/* Config Cards */} -
- {MODERATION_TYPES.map((mt) => { - const config = getConfig(mt.key); - const Icon = mt.icon; - return ( - - ); - })} -
- - {/* Active Config Editor */} - -
- ); -} - -// ─── Local AI models available on Ollama ───────── -const LOCAL_MODELS = [ - { id: 'llama-guard3:1b', name: 'LLaMA Guard 3 (1B)', desc: 'Content safety classifier — fast, accurate moderation', type: 'judge' }, - { id: 'qwen2.5:7b-instruct-q4_K_M', name: 'Qwen 2.5 (7B)', desc: 'General-purpose reasoning model — slower, deeper analysis', type: 'writer' }, -]; - -// ─── Config Editor for a single moderation type ───────── - -function ConfigEditor({ moderationType, config, onSaved }: { - moderationType: string; - config?: ModerationConfig; - onSaved: () => void; -}) { - const [modelId, setModelId] = useState(config?.model_id || ''); - const [modelName, setModelName] = useState(config?.model_name || ''); - const [systemPrompt, setSystemPrompt] = useState(config?.system_prompt || ''); - const [enabled, setEnabled] = useState(config?.enabled || false); - const [engines, setEngines] = useState(config?.engines || ['local_ai', 'openrouter', 'openai']); + const [loading, setLoading] = useState(true); + + // Selection states + const [selectedType, setSelectedType] = useState('text'); + const [selectedEngine, setSelectedEngine] = useState('local_ai'); + + // Config states + const [enabled, setEnabled] = useState(false); + const [modelId, setModelId] = useState(''); + const [modelName, setModelName] = useState(''); + const [systemPrompt, setSystemPrompt] = useState(''); const [saving, setSaving] = useState(false); - const [activeTab, setActiveTab] = useState(engines[0] || 'local_ai'); - - const toggleEngine = (engineId: string) => { - setEngines(prev => { - const next = prev.includes(engineId) ? prev.filter(e => e !== engineId) : [...prev, engineId]; - // If we just enabled this engine, switch tab to it - if (!prev.includes(engineId)) setActiveTab(engineId); - // If we disabled the active tab, switch to first remaining - if (engineId === activeTab && next.length > 0) setActiveTab(next[0]); - return next; - }); - }; - - // Sync testEngine with activeTab when tab changes - useEffect(() => { - setTestEngine(activeTab); - }, [activeTab]); - + // OpenRouter model picker const [showPicker, setShowPicker] = useState(false); const [models, setModels] = useState([]); const [modelsLoading, setModelsLoading] = useState(false); const [searchTerm, setSearchTerm] = useState(''); - const [capability, setCapability] = useState(''); - const searchTimer = useRef(null); - - // Test + + // Test states const [testInput, setTestInput] = useState(''); const [testResponse, setTestResponse] = useState(null); const [testing, setTesting] = useState(false); - const [testEngine, setTestEngine] = useState(engines[0] || 'local_ai'); + const [testHistory, setTestHistory] = useState([]); - const loadModels = useCallback((search?: string, cap?: string) => { - setModelsLoading(true); - api.listOpenRouterModels({ search, capability: cap }) - .then((data) => setModels(data.models || [])) - .catch(() => {}) - .finally(() => setModelsLoading(false)); + const loadConfigs = useCallback(() => { + setLoading(true); + Promise.all([ + api.getAIModerationConfigs(), + api.getAIEngines() + ]) + .then(([configData, engineData]) => { + setConfigs(configData.configs || []); + setEngines(engineData.engines || []); + }) + .finally(() => setLoading(false)); }, []); + useEffect(() => { loadConfigs(); }, [loadConfigs]); + + // Load config when type changes useEffect(() => { - if (showPicker) loadModels(searchTerm || undefined, capability || undefined); - }, [showPicker]); + const config = configs.find(c => c.moderation_type === selectedType); + if (config) { + setEnabled(config.enabled); + setModelId(config.model_id || ''); + setModelName(config.model_name || ''); + setSystemPrompt(config.system_prompt || ''); + if (config.engines && config.engines.length > 0) { + setSelectedEngine(config.engines[0]); + } + } else { + setEnabled(false); + setModelId(''); + setModelName(''); + setSystemPrompt(''); + } + }, [selectedType, configs]); - const onSearchChange = (val: string) => { - setSearchTerm(val); - if (searchTimer.current) clearTimeout(searchTimer.current); - searchTimer.current = setTimeout(() => { - loadModels(val || undefined, capability || undefined); - }, 400); - }; - - const onCapabilityChange = (val: string) => { - setCapability(val); - loadModels(searchTerm || undefined, val || undefined); - }; - - const selectModel = (m: ModelInfo) => { - setModelId(m.id); - setModelName(m.name); - setShowPicker(false); - }; + const loadModels = useCallback((search?: string) => { + setModelsLoading(true); + api.listOpenRouterModels({ search }) + .then((data) => setModels(data.models || [])) + .finally(() => setModelsLoading(false)); + }, []); const handleSave = async () => { setSaving(true); try { await api.setAIModerationConfig({ - moderation_type: moderationType, + moderation_type: selectedType, model_id: modelId, model_name: modelName, system_prompt: systemPrompt, enabled, - engines, + engines: [selectedEngine], }); - onSaved(); + loadConfigs(); } catch (e: any) { alert(e.message); } finally { @@ -309,12 +143,12 @@ function ConfigEditor({ moderationType, config, onSaved }: { const handleTest = async () => { if (!testInput.trim()) return; setTesting(true); - setTestResponse(null); + const startTime = Date.now(); try { - const isImage = moderationType.includes('image') || moderationType === 'video'; + const isImage = selectedType.includes('image') || selectedType === 'video'; const data: any = { - moderation_type: moderationType, - engine: testEngine, + moderation_type: selectedType, + engine: selectedEngine, }; if (isImage) { data.image_url = testInput; @@ -322,418 +156,282 @@ function ConfigEditor({ moderationType, config, onSaved }: { data.content = testInput; } const res = await api.testAIModeration(data); - setTestResponse(res); + const duration = Date.now() - startTime; + const entry = { ...res, timestamp: new Date().toISOString(), duration }; + setTestResponse(entry); + setTestHistory(prev => [entry, ...prev].slice(0, 10)); } catch (e: any) { - setTestResponse({ error: e.message, engine: testEngine, moderation_type: moderationType, input: testInput }); + const entry = { + error: e.message, + engine: selectedEngine, + moderation_type: selectedType, + input: testInput, + timestamp: new Date().toISOString(), + duration: Date.now() - startTime + }; + setTestResponse(entry); + setTestHistory(prev => [entry, ...prev].slice(0, 10)); } finally { setTesting(false); } }; - const isFree = (m: ModelInfo) => m.pricing.prompt === '0' || m.pricing.prompt === '0.0'; - const isVision = (m: ModelInfo) => { - const modality = m.architecture?.modality; - return typeof modality === 'string' && modality.includes('image'); + const typeLabel = MODERATION_TYPES.find(t => t.key === selectedType)?.label || selectedType; + const engineLabel = ENGINES.find(e => e.id === selectedEngine)?.label || selectedEngine; + + const getEngineStatus = (id: string) => { + const engine = engines.find(e => e.id === id); + if (!engine) return { color: 'text-gray-400', dot: 'bg-gray-300', label: 'Unknown' }; + if (engine.status === 'ready') return { color: 'text-green-600', dot: 'bg-green-500', label: 'Online' }; + if (engine.status === 'down') return { color: 'text-red-600', dot: 'bg-red-500', label: 'Down' }; + return { color: 'text-gray-400', dot: 'bg-gray-300', label: 'Not Configured' }; }; - const typeLabel = MODERATION_TYPES.find(t => t.key === moderationType)?.label || moderationType; - return ( -
-
- {/* Header */} -
-

{typeLabel}

- -
+ +
+

+ AI Moderation +

+

Configure AI moderation engines

+
- {/* Engine Tabs — compact pill toggles */} -
- {ALL_ENGINES.map((eng) => { - const active = engines.includes(eng.id); - const isTab = activeTab === eng.id; - const Icon = eng.icon; + {/* Engine Status - Compact */} +
+
+ Engine Status: + {ENGINES.map(eng => { + const status = getEngineStatus(eng.id); return ( -
- - {/* Active indicator dot */} - {active && ( - - )} - {/* X button to disable */} - {active && engines.length > 1 && ( - - )} +
+ + {eng.label}
); })}
+
- {engines.length === 0 && ( -
Select at least one engine above
- )} - - {/* ─── Local AI Config Panel ─── */} - {activeTab === 'local_ai' && engines.includes('local_ai') && ( -
-
- - Local AI — On-Server Ollama - Free -
-

Runs locally on your server. No data leaves the machine. Select the model to use for this moderation type:

+
+ {/* Left: Configuration */} +
+ {/* Type Selector */} +
+ -

LLaMA Guard 3 is recommended for moderation — fast (~1-2s) and purpose-built for safety classification.

- )} - {/* ─── OpenRouter Config Panel ─── */} - {activeTab === 'openrouter' && engines.includes('openrouter') && ( -
-
- - OpenRouter — Cloud Models - Paid / Free Tier -
- - {/* Model Dropdown */} - -
setShowPicker(!showPicker)} - className="flex items-center justify-between px-3 py-2.5 border border-warm-300 rounded-lg cursor-pointer hover:bg-white transition-colors bg-white mb-2" + {/* Engine Selector */} +
+ + +
- {/* Model Picker */} - {showPicker && ( -
-
-
- - onSearchChange(e.target.value)} - className="w-full pl-9 pr-3 py-2 text-sm border border-warm-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-brand-500" - /> -
- + {LOCAL_MODELS.map(m => ( + + ))} + +
+ )} + + {selectedEngine === 'openrouter' && ( +
+
+ +
setShowPicker(!showPicker)} + className="flex items-center justify-between px-3 py-2 border border-gray-300 rounded-lg cursor-pointer hover:bg-gray-50" > - - - - -
-
- {modelsLoading ? ( -
- Loading... -
- ) : models.length === 0 ? ( -
No models found
- ) : ( - models.map((m) => ( -
selectModel(m)} - className={`px-3 py-2 border-b border-warm-100 cursor-pointer hover:bg-brand-50 transition-colors ${modelId === m.id ? 'bg-brand-50' : ''}`} - > -
-
- {modelId === m.id && } -
-
{m.name}
-
{m.id}
-
-
-
- {isVision(m) && Vision} - {isFree(m) ? ( - Free - ) : ( - ${m.pricing.prompt}/tok - )} - {(m.context_length / 1000).toFixed(0)}k -
-
-
- )) - )} -
-
- )} - - {/* System Prompt */} - -