/** * Voice Debug Screen * * Real-time debugging interface for voice recognition pipeline. * Shows all events, timers, API calls, and state changes. */ import React, { useState, useEffect, useRef, useCallback } from 'react'; import { View, Text, ScrollView, StyleSheet, TouchableOpacity, Platform, } from 'react-native'; import { useSafeAreaInsets } from 'react-native-safe-area-context'; import { Feather } from '@expo/vector-icons'; import { useVoice } from '@/contexts/VoiceContext'; import { useSpeechRecognition } from '@/hooks/useSpeechRecognition'; import { AppColors } from '@/constants/theme'; import { useColorScheme } from '@/hooks/use-color-scheme'; interface LogEntry { id: string; timestamp: number; category: 'stt' | 'api' | 'tts' | 'timer' | 'system'; message: string; level: 'info' | 'warning' | 'error' | 'success'; data?: any; } export default function VoiceDebugScreen() { const colorScheme = useColorScheme(); const isDark = colorScheme === 'dark'; const insets = useSafeAreaInsets(); const { isListening, isSpeaking, status, startSession, stopSession, voiceApiType, } = useVoice(); const { isListening: sttIsListening, partialTranscript, recognizedText, } = useSpeechRecognition({ lang: 'en-US', continuous: true, interimResults: true, }); const [logs, setLogs] = useState([]); const [silenceTimer, setSilenceTimer] = useState(0); const scrollViewRef = useRef(null); const logIdCounter = useRef(0); const lastPartialRef = useRef(''); // Add log entry const addLog = useCallback(( category: LogEntry['category'], message: string, level: LogEntry['level'] = 'info', data?: any ) => { const entry: LogEntry = { id: `log-${logIdCounter.current++}`, timestamp: Date.now(), category, message, level, data, }; console.log(`[VoiceDebug:${category}]`, message, data || ''); setLogs(prev => { const updated = [...prev, entry]; // Keep only last 100 logs return updated.slice(-100); }); setTimeout(() => { scrollViewRef.current?.scrollToEnd({ animated: true }); }, 50); }, []); // Clear logs const clearLogs = useCallback(() => { setLogs([]); logIdCounter.current = 0; addLog('system', 'Logs cleared', 'info'); }, [addLog]); // Monitor voice session state useEffect(() => { if (isListening) { addLog('system', '🎀 Voice session STARTED', 'success'); } else { addLog('system', '⏹️ Voice session STOPPED', 'info'); setSilenceTimer(0); } }, [isListening, addLog]); // Monitor STT state useEffect(() => { if (sttIsListening) { addLog('stt', '▢️ STT listening started', 'success'); } else if (isListening) { addLog('stt', '⏸️ STT stopped (but session active)', 'warning'); } }, [sttIsListening, isListening, addLog]); // Monitor status changes useEffect(() => { if (status === 'processing') { addLog('api', 'βš™οΈ Processing transcript β†’ sending to API', 'info'); } else if (status === 'speaking') { addLog('tts', 'πŸ”Š TTS playing (Julia speaking)', 'info'); } else if (status === 'listening') { addLog('system', 'πŸ‘‚ Ready to listen', 'info'); } }, [status, addLog]); // Monitor partial transcripts useEffect(() => { if (partialTranscript && partialTranscript !== lastPartialRef.current) { lastPartialRef.current = partialTranscript; addLog('stt', `πŸ“ Partial: "${partialTranscript.slice(0, 40)}${partialTranscript.length > 40 ? '...' : ''}"`, 'info'); // Reset silence timer setSilenceTimer(0); addLog('timer', 'πŸ”„ Silence timer RESET', 'warning'); } }, [partialTranscript, addLog]); // Monitor final transcripts useEffect(() => { if (recognizedText && recognizedText !== lastPartialRef.current) { addLog('stt', `βœ… FINAL: "${recognizedText.slice(0, 40)}${recognizedText.length > 40 ? '...' : ''}"`, 'success', { length: recognizedText.length, transcript: recognizedText }); addLog('api', 'πŸ“€ Sending to API...', 'info'); } }, [recognizedText, addLog]); // Silence timer (only when STT is listening and not processing/speaking) useEffect(() => { let interval: NodeJS.Timeout | null = null; if (sttIsListening && status !== 'processing' && status !== 'speaking') { interval = setInterval(() => { setSilenceTimer(prev => { const next = prev + 100; // Log milestones if (next === 1000) { addLog('timer', '⏱️ Silence: 1.0s', 'info'); } else if (next === 1500) { addLog('timer', '⏱️ Silence: 1.5s', 'warning'); } else if (next === 2000) { addLog('timer', 'πŸ›‘ Silence: 2.0s β†’ AUTO-STOP triggered', 'error'); } return next; }); }, 100); } else { setSilenceTimer(0); } return () => { if (interval) clearInterval(interval); }; }, [sttIsListening, status, addLog]); // Get status indicator const getStatusDisplay = () => { if (status === 'speaking' || isSpeaking) { return { color: '#9333EA', icon: 'πŸ”Š', text: 'Speaking' }; } if (status === 'processing') { return { color: '#F59E0B', icon: 'βš™οΈ', text: 'Processing' }; } if (isListening && sttIsListening) { return { color: '#10B981', icon: '🟒', text: 'Listening' }; } if (isListening && !sttIsListening) { return { color: '#F59E0B', icon: '🟑', text: 'Session Active (STT Off)' }; } return { color: '#6B7280', icon: 'βšͺ', text: 'Idle' }; }; const statusDisplay = getStatusDisplay(); const silenceProgress = Math.min(silenceTimer / 2000, 1); const silenceSeconds = (silenceTimer / 1000).toFixed(1); // Log level colors const getLogColor = (level: LogEntry['level']) => { switch (level) { case 'error': return '#EF4444'; case 'warning': return '#F59E0B'; case 'success': return '#10B981'; default: return isDark ? '#D1D5DB' : '#374151'; } }; // Category icons const getCategoryIcon = (category: LogEntry['category']) => { switch (category) { case 'stt': return '🎀'; case 'api': return 'πŸ“‘'; case 'tts': return 'πŸ”Š'; case 'timer': return '⏱️'; case 'system': return 'βš™οΈ'; default: return 'β€’'; } }; // Platform badge const platformBadge = Platform.OS === 'ios' ? '🍎 iOS' : 'πŸ€– Android'; const platformColor = Platform.OS === 'ios' ? '#007AFF' : '#3DDC84'; return ( {/* Header */} Voice Debug {platformBadge} {/* Status Card */} {statusDisplay.icon} Status {statusDisplay.text} {/* Voice API Type */} πŸ“‘ API Function {voiceApiType} {/* Silence Timer */} {sttIsListening && status !== 'processing' && status !== 'speaking' && ( Silence Timer ({Platform.OS === 'ios' ? 'iOS' : 'Android'} auto-stop at 2.0s) = 2000 ? '#EF4444' : silenceTimer >= 1500 ? '#F59E0B' : isDark ? '#D1D5DB' : '#374151' }]}> {silenceSeconds}s / 2.0s = 2000 ? '#EF4444' : silenceTimer >= 1500 ? '#F59E0B' : '#10B981' }]} /> )} {/* Current Transcripts */} {partialTranscript && ( Partial: "{partialTranscript}" )} {recognizedText && ( Final: "{recognizedText}" )} {/* Logs */} Event Log {logs.length === 0 ? ( No events yet. Press FAB to start. ) : ( logs.map(log => { const time = new Date(log.timestamp); const timeStr = `${String(time.getHours()).padStart(2, '0')}:${String(time.getMinutes()).padStart(2, '0')}:${String(time.getSeconds()).padStart(2, '0')}.${String(time.getMilliseconds()).padStart(3, '0')}`; return ( {timeStr} {getCategoryIcon(log.category)} {log.message} ); }) )} {/* FAB */} { if (isListening) { addLog('system', 'πŸ›‘ User stopped session', 'warning'); stopSession(); } else { clearLogs(); addLog('system', '▢️ User started session', 'success'); startSession(); } }} > ); } const styles = StyleSheet.create({ container: { flex: 1, }, header: { flexDirection: 'row', alignItems: 'center', justifyContent: 'space-between', paddingHorizontal: 20, paddingBottom: 16, }, headerLeft: { flexDirection: 'row', alignItems: 'center', gap: 12, }, headerTitle: { fontSize: 28, fontWeight: '700', }, platformBadge: { paddingHorizontal: 10, paddingVertical: 4, borderRadius: 12, }, platformBadgeText: { color: '#FFFFFF', fontSize: 12, fontWeight: '700', }, clearButton: { padding: 8, }, statusCard: { marginHorizontal: 20, marginBottom: 16, padding: 16, borderRadius: 12, borderLeftWidth: 4, }, statusRow: { flexDirection: 'row', alignItems: 'center', }, statusIcon: { fontSize: 32, marginRight: 12, }, statusTextContainer: { flex: 1, }, statusLabel: { fontSize: 12, fontWeight: '500', marginBottom: 2, }, statusText: { fontSize: 18, fontWeight: '700', }, timerContainer: { marginTop: 16, paddingTop: 16, borderTopWidth: 1, borderTopColor: 'rgba(156, 163, 175, 0.2)', }, timerLabel: { fontSize: 12, fontWeight: '500', marginBottom: 8, }, timerRow: { marginBottom: 8, }, timerText: { fontSize: 24, fontWeight: '700', fontVariant: ['tabular-nums'], }, progressBarContainer: { height: 8, borderRadius: 4, overflow: 'hidden', }, progressBarFill: { height: '100%', borderRadius: 4, }, transcriptContainer: { marginTop: 12, paddingTop: 12, borderTopWidth: 1, borderTopColor: 'rgba(156, 163, 175, 0.2)', }, transcriptLabel: { fontSize: 12, fontWeight: '500', marginBottom: 4, }, transcriptText: { fontSize: 14, fontStyle: 'italic', }, logsContainer: { flex: 1, marginHorizontal: 20, }, logsTitle: { fontSize: 16, fontWeight: '700', marginBottom: 8, }, logsScrollView: { flex: 1, borderRadius: 8, }, logsContent: { padding: 12, }, emptyText: { textAlign: 'center', fontSize: 14, fontStyle: 'italic', paddingVertical: 20, }, logEntry: { flexDirection: 'row', marginBottom: 8, alignItems: 'flex-start', }, logTimestamp: { fontSize: 11, fontVariant: ['tabular-nums'], marginRight: 8, width: 80, }, logIcon: { fontSize: 14, marginRight: 6, }, logMessage: { fontSize: 13, flex: 1, lineHeight: 18, }, fab: { position: 'absolute', right: 20, width: 64, height: 64, borderRadius: 32, alignItems: 'center', justifyContent: 'center', shadowColor: '#000', shadowOffset: { width: 0, height: 4 }, shadowOpacity: 0.3, shadowRadius: 8, elevation: 8, }, });