/** * Debug Screen - Voice Call Testing with Detailed Logs * * All-in-one screen for testing Julia AI voice: * - Start/End call buttons * - Speaker/Earpiece toggle with logging * - Real-time logs of all LiveKit events * - Copy logs button * - Works on both iOS and Android */ import React, { useState, useEffect, useRef, useCallback } from 'react'; import { View, Text, StyleSheet, FlatList, TouchableOpacity, Platform, Share, AppState, AppStateStatus, } from 'react-native'; import { SafeAreaView } from 'react-native-safe-area-context'; import { Ionicons } from '@expo/vector-icons'; import * as Clipboard from 'expo-clipboard'; import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake'; import type { Room as RoomType } from 'livekit-client'; import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; import { getToken, VOICE_NAME } from '@/services/livekitService'; import { configureAudioForVoiceCall, stopAudioSession, setAudioOutput, } from '@/utils/audioSession'; import { startVoiceCallService, stopVoiceCallService, checkAndPromptBatteryOptimization, requestNotificationPermission, } from '@/utils/androidVoiceService'; import Constants from 'expo-constants'; const APP_VERSION = Constants.expoConfig?.version ?? '?.?.?'; type LogEntry = { id: string; time: string; message: string; type: 'info' | 'success' | 'error' | 'event'; }; type CallState = 'idle' | 'connecting' | 'connected' | 'ending'; export default function DebugScreen() { const [logs, setLogs] = useState([]); const [callState, setCallState] = useState('idle'); const [callDuration, setCallDuration] = useState(0); const [isSpeakerOn, setIsSpeakerOn] = useState(true); // Default to speaker const flatListRef = useRef(null); const roomRef = useRef(null); const callStartTimeRef = useRef(null); const appStateRef = useRef(AppState.currentState); // Add log entry const log = useCallback((message: string, type: LogEntry['type'] = 'info') => { const time = new Date().toLocaleTimeString('en-US', { hour12: false, hour: '2-digit', minute: '2-digit', second: '2-digit' }); const ms = String(new Date().getMilliseconds()).padStart(3, '0'); setLogs(prev => [...prev, { id: `${Date.now()}-${Math.random()}`, time: `${time}.${ms}`, message, type, }]); }, []); // Clear logs const clearLogs = useCallback(() => { setLogs([]); }, []); // Copy logs to clipboard const copyLogs = useCallback(async () => { const text = logs.map(l => `[${l.time}] ${l.message}`).join('\n'); await Clipboard.setStringAsync(text); log('Logs copied to clipboard!', 'success'); }, [logs, log]); // Share logs const shareLogs = useCallback(async () => { const text = logs.map(l => `[${l.time}] ${l.message}`).join('\n'); try { await Share.share({ message: text, title: 'Voice Debug Logs' }); } catch (e) { log(`Share failed: ${e}`, 'error'); } }, [logs, log]); // Auto-scroll to bottom useEffect(() => { if (logs.length > 0) { setTimeout(() => flatListRef.current?.scrollToEnd({ animated: true }), 100); } }, [logs]); // Call duration timer useEffect(() => { if (callState !== 'connected') return; const interval = setInterval(() => { if (callStartTimeRef.current) { setCallDuration(Math.floor((Date.now() - callStartTimeRef.current) / 1000)); } }, 1000); return () => clearInterval(interval); }, [callState]); // Handle app background/foreground useEffect(() => { const subscription = AppState.addEventListener('change', (nextAppState) => { if (appStateRef.current.match(/inactive|background/) && nextAppState === 'active') { log('App returned to foreground', 'event'); } else if (appStateRef.current === 'active' && nextAppState.match(/inactive|background/)) { log('App went to background - call continues', 'event'); } appStateRef.current = nextAppState; }); return () => subscription.remove(); }, [log]); // Toggle speaker const toggleSpeaker = useCallback(async () => { const newState = !isSpeakerOn; log(`=== TOGGLING SPEAKER: ${isSpeakerOn ? 'ON' : 'OFF'} → ${newState ? 'ON' : 'OFF'} ===`, 'info'); try { await setAudioOutput(newState); setIsSpeakerOn(newState); log(`Speaker toggled to ${newState ? 'ON (loud speaker)' : 'OFF (earpiece)'}`, 'success'); } catch (err: any) { log(`Speaker toggle error: ${err?.message || err}`, 'error'); } }, [isSpeakerOn, log]); // Start call const startCall = useCallback(async () => { if (callState !== 'idle') return; clearLogs(); setCallState('connecting'); setCallDuration(0); setIsSpeakerOn(true); // Reset speaker state callStartTimeRef.current = null; try { log('=== STARTING VOICE CALL ===', 'info'); log(`Platform: ${Platform.OS} ${Platform.Version}`, 'info'); // Android: Request notification permission and check battery optimization if (Platform.OS === 'android') { log('Android: Requesting notification permission...', 'info'); const notifPermission = await requestNotificationPermission(); log(`Notification permission: ${notifPermission ? 'granted' : 'denied'}`, notifPermission ? 'success' : 'info'); log('Android: Checking battery optimization...', 'info'); const canProceed = await checkAndPromptBatteryOptimization(); if (!canProceed) { log('User went to battery settings - call postponed', 'info'); setCallState('idle'); return; } log('Battery optimization check passed', 'success'); } // Keep screen awake await activateKeepAwakeAsync('voiceCall').catch(() => {}); log('Screen keep-awake activated', 'info'); // Step 1: Register WebRTC globals log('Step 1: Importing @livekit/react-native...', 'info'); const { registerGlobals } = await import('@livekit/react-native'); if (typeof global.RTCPeerConnection === 'undefined') { log('Registering WebRTC globals...', 'info'); registerGlobals(); log('WebRTC globals registered', 'success'); } else { log('WebRTC globals already registered', 'info'); } // Step 2: Import livekit-client log('Step 2: Importing livekit-client...', 'info'); const { Room, RoomEvent, ConnectionState, Track } = await import('livekit-client'); log('livekit-client imported', 'success'); // Step 3: Configure AudioSession (iOS + Android) log(`Step 3: Configuring AudioSession for ${Platform.OS}...`, 'info'); try { await configureAudioForVoiceCall(); log(`AudioSession configured for ${Platform.OS}`, 'success'); } catch (audioErr: any) { log(`AudioSession config error: ${audioErr?.message || audioErr}`, 'error'); // Continue anyway - might still work } // Step 4: Get token from server log('Step 4: Requesting token from server...', 'info'); log(`Token server: wellnuo.smartlaunchhub.com/julia/token`, 'info'); const result = await getToken(`user-${Date.now()}`); if (!result.success || !result.data) { throw new Error(result.error || 'Failed to get token'); } const { token, wsUrl, roomName } = result.data; log(`Token received`, 'success'); log(`Room: ${roomName}`, 'info'); log(`WebSocket URL: ${wsUrl}`, 'info'); // Step 5: Create room and setup listeners log('Step 5: Creating Room instance...', 'info'); const newRoom = new Room(); roomRef.current = newRoom; log('Room instance created', 'success'); // Setup ALL event listeners log('Step 6: Setting up event listeners...', 'info'); newRoom.on(RoomEvent.ConnectionStateChanged, (state: any) => { log(`EVENT: ConnectionStateChanged → ${state}`, 'event'); if (state === ConnectionState.Connected) { setCallState('connected'); callStartTimeRef.current = Date.now(); } else if (state === ConnectionState.Disconnected) { setCallState('idle'); } }); newRoom.on(RoomEvent.Connected, () => { log('EVENT: Connected to room', 'success'); }); newRoom.on(RoomEvent.Disconnected, (reason?: any) => { log(`EVENT: Disconnected. Reason: ${reason || 'unknown'}`, 'event'); }); newRoom.on(RoomEvent.Reconnecting, () => { log('EVENT: Reconnecting...', 'event'); }); newRoom.on(RoomEvent.Reconnected, () => { log('EVENT: Reconnected', 'success'); }); newRoom.on(RoomEvent.ParticipantConnected, (participant: any) => { log(`EVENT: Participant connected: ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.ParticipantDisconnected, (participant: any) => { log(`EVENT: Participant disconnected: ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.TrackSubscribed, (track: any, publication: any, participant: any) => { log(`EVENT: Track subscribed: ${track.kind} from ${participant.identity}`, 'event'); if (track.kind === Track.Kind.Audio) { log('Audio track from Julia AI - should hear voice now', 'success'); } }); newRoom.on(RoomEvent.TrackUnsubscribed, (track: any, publication: any, participant: any) => { log(`EVENT: Track unsubscribed: ${track.kind} from ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.TrackMuted, (publication: any, participant: any) => { log(`EVENT: Track muted by ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.TrackUnmuted, (publication: any, participant: any) => { log(`EVENT: Track unmuted by ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => { if (speakers.length > 0) { log(`EVENT: Active speakers: ${speakers.map(s => s.identity).join(', ')}`, 'event'); } }); newRoom.on(RoomEvent.DataReceived, (payload: any, participant: any) => { try { const data = JSON.parse(new TextDecoder().decode(payload)); log(`EVENT: Data received: ${JSON.stringify(data).substring(0, 100)}`, 'event'); } catch (e) { log(`EVENT: Data received (binary)`, 'event'); } }); newRoom.on(RoomEvent.AudioPlaybackStatusChanged, () => { log(`EVENT: AudioPlaybackStatusChanged - canPlay: ${newRoom.canPlaybackAudio}`, 'event'); }); newRoom.on(RoomEvent.MediaDevicesError, (error: any) => { log(`EVENT: MediaDevicesError: ${error?.message || error}`, 'error'); }); newRoom.on(RoomEvent.RoomMetadataChanged, (metadata: string) => { log(`EVENT: RoomMetadataChanged: ${metadata}`, 'event'); }); log('Event listeners set up', 'success'); // Step 7: Connect to room log('Step 7: Connecting to LiveKit room...', 'info'); await newRoom.connect(wsUrl, token, { autoSubscribe: true }); log('Connected to room', 'success'); // Step 7.5: Start audio playback (required for iOS) log('Step 7.5: Starting audio playback...', 'info'); await newRoom.startAudio(); log(`Audio playback started, canPlay: ${newRoom.canPlaybackAudio}`, 'success'); // Step 8: Enable microphone log('Step 8: Enabling microphone...', 'info'); await newRoom.localParticipant.setMicrophoneEnabled(true); log('Microphone enabled', 'success'); // Step 9: Log local audio track info log('Step 9: Checking local audio track...', 'info'); const localAudioTracks = newRoom.localParticipant.audioTrackPublications; log(`Local audio publications: ${localAudioTracks.size}`, 'info'); localAudioTracks.forEach((pub: any) => { log(`Local audio track: ${pub.trackSid}, muted: ${pub.isMuted}, source: ${pub.source}`, 'info'); if (pub.track) { log(`Track mediaStreamTrack: ${pub.track.mediaStreamTrack ? 'exists' : 'NULL'}`, 'info'); log(`Track enabled: ${pub.track.mediaStreamTrack?.enabled}`, 'info'); } }); // Listen for local track published newRoom.localParticipant.on('localTrackPublished', (pub: any) => { log(`MY TRACK PUBLISHED: ${pub.kind} sid=${pub.trackSid}`, 'success'); }); // Listen when I become an active speaker (means mic is working) newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => { const iAmSpeaking = speakers.some(s => s.identity === newRoom.localParticipant.identity); if (iAmSpeaking) { log(`*** I AM SPEAKING - MIC WORKS ***`, 'success'); } }); log(`Local participant: ${newRoom.localParticipant.identity}`, 'info'); // Android: Start foreground service to keep call alive in background if (Platform.OS === 'android') { log('Android: Starting foreground service...', 'info'); try { await startVoiceCallService(); log('Foreground service started - call will continue in background', 'success'); } catch (fgErr: any) { log(`Foreground service error: ${fgErr?.message || fgErr}`, 'error'); // Continue anyway - call will still work, just may be killed in background } } log('=== CALL ACTIVE ===', 'success'); } catch (err: any) { log(`ERROR: ${err?.message || err}`, 'error'); log(`Stack: ${err?.stack?.substring(0, 200) || 'no stack'}`, 'error'); setCallState('idle'); deactivateKeepAwake('voiceCall'); } }, [callState, log, clearLogs]); // End call const endCall = useCallback(async () => { if (callState === 'idle') return; log('=== ENDING CALL ===', 'info'); setCallState('ending'); try { if (roomRef.current) { log('Disconnecting from room...', 'info'); await roomRef.current.disconnect(); roomRef.current = null; log('Disconnected from room', 'success'); } // Android: Stop foreground service if (Platform.OS === 'android') { log('Android: Stopping foreground service...', 'info'); try { await stopVoiceCallService(); log('Foreground service stopped', 'success'); } catch (fgErr: any) { log(`Foreground service stop error: ${fgErr?.message || fgErr}`, 'error'); } } // Stop AudioSession (iOS + Android) log(`Stopping AudioSession on ${Platform.OS}...`, 'info'); try { await stopAudioSession(); log('AudioSession stopped', 'success'); } catch (audioErr: any) { log(`AudioSession stop error: ${audioErr?.message || audioErr}`, 'error'); } deactivateKeepAwake('voiceCall'); log('Screen keep-awake deactivated', 'info'); } catch (err: any) { log(`Error during cleanup: ${err?.message || err}`, 'error'); } setCallState('idle'); log('=== CALL ENDED ===', 'info'); }, [callState, log]); // Format duration const formatDuration = (seconds: number): string => { const mins = Math.floor(seconds / 60); const secs = seconds % 60; return `${mins}:${secs.toString().padStart(2, '0')}`; }; // Get log color const getLogColor = (type: LogEntry['type']): string => { switch (type) { case 'success': return '#4ade80'; case 'error': return '#f87171'; case 'event': return '#60a5fa'; default: return '#e5e5e5'; } }; return ( {/* Header */} Voice Debug v{APP_VERSION} {VOICE_NAME} {/* Call Status */} {callState === 'idle' && 'Ready'} {callState === 'connecting' && 'Connecting...'} {callState === 'connected' && `Connected ${formatDuration(callDuration)}`} {callState === 'ending' && 'Ending...'} {logs.length} logs {/* Control Buttons - Row 1: Call controls */} {callState === 'idle' ? ( Start Call ) : ( End Call )} {/* Speaker Toggle Button */} {isSpeakerOn ? 'Speaker' : 'Ear'} {/* Control Buttons - Row 2: Log controls */} Copy Share Clear {Platform.OS} {Platform.Version} {/* Logs */} item.id} style={styles.logsList} contentContainerStyle={styles.logsContent} renderItem={({ item }) => ( [{item.time}] {item.message} )} ListEmptyComponent={ Press "Start Call" to begin } /> ); } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: '#0f0f0f', }, header: { padding: Spacing.md, borderBottomWidth: 1, borderBottomColor: '#333', }, headerRow: { flexDirection: 'row', alignItems: 'center', justifyContent: 'space-between', }, title: { fontSize: 24, fontWeight: '700', color: '#fff', }, versionBadge: { fontSize: 14, fontWeight: '600', color: '#22c55e', backgroundColor: 'rgba(34, 197, 94, 0.15)', paddingHorizontal: 10, paddingVertical: 4, borderRadius: 8, overflow: 'hidden', }, subtitle: { fontSize: 14, color: '#888', marginTop: 2, }, statusBar: { flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', paddingHorizontal: Spacing.md, paddingVertical: Spacing.sm, backgroundColor: '#1a1a1a', }, statusLeft: { flexDirection: 'row', alignItems: 'center', }, statusDot: { width: 10, height: 10, borderRadius: 5, marginRight: 8, }, statusText: { color: '#fff', fontSize: 14, fontWeight: '500', }, logCount: { color: '#888', fontSize: 12, }, controls: { flexDirection: 'row', padding: Spacing.md, paddingBottom: Spacing.sm, gap: 10, }, controlsRow2: { flexDirection: 'row', paddingHorizontal: Spacing.md, paddingBottom: Spacing.md, gap: 10, borderBottomWidth: 1, borderBottomColor: '#333', }, startButton: { flex: 1, flexDirection: 'row', alignItems: 'center', justifyContent: 'center', backgroundColor: '#22c55e', paddingVertical: 14, borderRadius: 12, gap: 8, }, endButton: { flex: 1, flexDirection: 'row', alignItems: 'center', justifyContent: 'center', backgroundColor: '#ef4444', paddingVertical: 14, borderRadius: 12, gap: 8, }, buttonText: { color: '#fff', fontSize: 16, fontWeight: '600', }, copyButton: { alignItems: 'center', justifyContent: 'center', backgroundColor: '#3b82f6', paddingVertical: 10, paddingHorizontal: 12, borderRadius: 10, }, shareButton: { alignItems: 'center', justifyContent: 'center', backgroundColor: '#8b5cf6', paddingVertical: 10, paddingHorizontal: 12, borderRadius: 10, }, clearButton: { alignItems: 'center', justifyContent: 'center', backgroundColor: '#6b7280', paddingVertical: 10, paddingHorizontal: 12, borderRadius: 10, }, speakerButton: { alignItems: 'center', justifyContent: 'center', paddingVertical: 10, paddingHorizontal: 16, borderRadius: 10, }, speakerOn: { backgroundColor: '#f59e0b', // Orange when speaker is ON }, speakerOff: { backgroundColor: '#4b5563', // Gray when earpiece }, platformBadge: { flex: 1, alignItems: 'flex-end', justifyContent: 'center', }, platformText: { color: '#888', fontSize: 11, fontWeight: '500', }, smallButtonText: { color: '#fff', fontSize: 10, fontWeight: '500', marginTop: 2, }, logsList: { flex: 1, }, logsContent: { padding: Spacing.sm, paddingBottom: 100, }, logEntry: { fontSize: 12, fontFamily: Platform.OS === 'ios' ? 'Menlo' : 'monospace', lineHeight: 18, marginBottom: 2, }, logTime: { color: '#888', }, emptyContainer: { alignItems: 'center', justifyContent: 'center', paddingTop: 100, }, emptyText: { color: '#6b7280', fontSize: 16, marginTop: 12, }, });