Sergei 57577b42c9 Fix Android audio routing - use music stream for speaker output
- Changed audioStreamType from 'voiceCall' to 'music' on Android
  - voiceCall stream defaults to earpiece
  - music stream defaults to speaker
- Added Debug tab to test voice calls with detailed logs
- Added speaker/earpiece toggle button with proper stream switching
- Full Android AudioSession support for LiveKit voice calls

audioSession.ts:
- configureAudioForVoiceCall: uses music/media for speaker output
- setAudioOutput: switches between music (speaker) and voiceCall (earpiece)
- reconfigureAudioForPlayback: ensures speaker output on Android

debug.tsx:
- Added platform info display
- Added speaker toggle with logging
- Improved UI with control rows
2026-01-19 22:54:59 -08:00

679 lines
21 KiB
TypeScript

/**
* Debug Screen - Voice Call Testing with Detailed Logs
*
* All-in-one screen for testing Julia AI voice:
* - Start/End call buttons
* - Speaker/Earpiece toggle with logging
* - Real-time logs of all LiveKit events
* - Copy logs button
* - Works on both iOS and Android
*/
import React, { useState, useEffect, useRef, useCallback } from 'react';
import {
View,
Text,
StyleSheet,
FlatList,
TouchableOpacity,
Platform,
Share,
AppState,
AppStateStatus,
} from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context';
import { Ionicons } from '@expo/vector-icons';
import * as Clipboard from 'expo-clipboard';
import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
import type { Room as RoomType } from 'livekit-client';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { getToken, VOICE_NAME } from '@/services/livekitService';
import {
configureAudioForVoiceCall,
stopAudioSession,
setAudioOutput,
} from '@/utils/audioSession';
import Constants from 'expo-constants';
const APP_VERSION = Constants.expoConfig?.version ?? '?.?.?';
type LogEntry = {
id: string;
time: string;
message: string;
type: 'info' | 'success' | 'error' | 'event';
};
type CallState = 'idle' | 'connecting' | 'connected' | 'ending';
export default function DebugScreen() {
const [logs, setLogs] = useState<LogEntry[]>([]);
const [callState, setCallState] = useState<CallState>('idle');
const [callDuration, setCallDuration] = useState(0);
const [isSpeakerOn, setIsSpeakerOn] = useState(true); // Default to speaker
const flatListRef = useRef<FlatList>(null);
const roomRef = useRef<RoomType | null>(null);
const callStartTimeRef = useRef<number | null>(null);
const appStateRef = useRef<AppStateStatus>(AppState.currentState);
// Add log entry
const log = useCallback((message: string, type: LogEntry['type'] = 'info') => {
const time = new Date().toLocaleTimeString('en-US', { hour12: false, hour: '2-digit', minute: '2-digit', second: '2-digit' });
const ms = String(new Date().getMilliseconds()).padStart(3, '0');
setLogs(prev => [...prev, {
id: `${Date.now()}-${Math.random()}`,
time: `${time}.${ms}`,
message,
type,
}]);
}, []);
// Clear logs
const clearLogs = useCallback(() => {
setLogs([]);
}, []);
// Copy logs to clipboard
const copyLogs = useCallback(async () => {
const text = logs.map(l => `[${l.time}] ${l.message}`).join('\n');
await Clipboard.setStringAsync(text);
log('Logs copied to clipboard!', 'success');
}, [logs, log]);
// Share logs
const shareLogs = useCallback(async () => {
const text = logs.map(l => `[${l.time}] ${l.message}`).join('\n');
try {
await Share.share({ message: text, title: 'Voice Debug Logs' });
} catch (e) {
log(`Share failed: ${e}`, 'error');
}
}, [logs, log]);
// Auto-scroll to bottom
useEffect(() => {
if (logs.length > 0) {
setTimeout(() => flatListRef.current?.scrollToEnd({ animated: true }), 100);
}
}, [logs]);
// Call duration timer
useEffect(() => {
if (callState !== 'connected') return;
const interval = setInterval(() => {
if (callStartTimeRef.current) {
setCallDuration(Math.floor((Date.now() - callStartTimeRef.current) / 1000));
}
}, 1000);
return () => clearInterval(interval);
}, [callState]);
// Handle app background/foreground
useEffect(() => {
const subscription = AppState.addEventListener('change', (nextAppState) => {
if (appStateRef.current.match(/inactive|background/) && nextAppState === 'active') {
log('App returned to foreground', 'event');
} else if (appStateRef.current === 'active' && nextAppState.match(/inactive|background/)) {
log('App went to background - call continues', 'event');
}
appStateRef.current = nextAppState;
});
return () => subscription.remove();
}, [log]);
// Toggle speaker
const toggleSpeaker = useCallback(async () => {
const newState = !isSpeakerOn;
log(`=== TOGGLING SPEAKER: ${isSpeakerOn ? 'ON' : 'OFF'}${newState ? 'ON' : 'OFF'} ===`, 'info');
try {
await setAudioOutput(newState);
setIsSpeakerOn(newState);
log(`Speaker toggled to ${newState ? 'ON (loud speaker)' : 'OFF (earpiece)'}`, 'success');
} catch (err: any) {
log(`Speaker toggle error: ${err?.message || err}`, 'error');
}
}, [isSpeakerOn, log]);
// Start call
const startCall = useCallback(async () => {
if (callState !== 'idle') return;
clearLogs();
setCallState('connecting');
setCallDuration(0);
setIsSpeakerOn(true); // Reset speaker state
callStartTimeRef.current = null;
try {
log('=== STARTING VOICE CALL ===', 'info');
log(`Platform: ${Platform.OS} ${Platform.Version}`, 'info');
// Keep screen awake
await activateKeepAwakeAsync('voiceCall').catch(() => {});
log('Screen keep-awake activated', 'info');
// Step 1: Register WebRTC globals
log('Step 1: Importing @livekit/react-native...', 'info');
const { registerGlobals } = await import('@livekit/react-native');
if (typeof global.RTCPeerConnection === 'undefined') {
log('Registering WebRTC globals...', 'info');
registerGlobals();
log('WebRTC globals registered', 'success');
} else {
log('WebRTC globals already registered', 'info');
}
// Step 2: Import livekit-client
log('Step 2: Importing livekit-client...', 'info');
const { Room, RoomEvent, ConnectionState, Track } = await import('livekit-client');
log('livekit-client imported', 'success');
// Step 3: Configure AudioSession (iOS + Android)
log(`Step 3: Configuring AudioSession for ${Platform.OS}...`, 'info');
try {
await configureAudioForVoiceCall();
log(`AudioSession configured for ${Platform.OS}`, 'success');
} catch (audioErr: any) {
log(`AudioSession config error: ${audioErr?.message || audioErr}`, 'error');
// Continue anyway - might still work
}
// Step 4: Get token from server
log('Step 4: Requesting token from server...', 'info');
log(`Token server: wellnuo.smartlaunchhub.com/julia/token`, 'info');
const result = await getToken(`user-${Date.now()}`);
if (!result.success || !result.data) {
throw new Error(result.error || 'Failed to get token');
}
const { token, wsUrl, roomName } = result.data;
log(`Token received`, 'success');
log(`Room: ${roomName}`, 'info');
log(`WebSocket URL: ${wsUrl}`, 'info');
// Step 5: Create room and setup listeners
log('Step 5: Creating Room instance...', 'info');
const newRoom = new Room();
roomRef.current = newRoom;
log('Room instance created', 'success');
// Setup ALL event listeners
log('Step 6: Setting up event listeners...', 'info');
newRoom.on(RoomEvent.ConnectionStateChanged, (state: any) => {
log(`EVENT: ConnectionStateChanged → ${state}`, 'event');
if (state === ConnectionState.Connected) {
setCallState('connected');
callStartTimeRef.current = Date.now();
} else if (state === ConnectionState.Disconnected) {
setCallState('idle');
}
});
newRoom.on(RoomEvent.Connected, () => {
log('EVENT: Connected to room', 'success');
});
newRoom.on(RoomEvent.Disconnected, (reason?: any) => {
log(`EVENT: Disconnected. Reason: ${reason || 'unknown'}`, 'event');
});
newRoom.on(RoomEvent.Reconnecting, () => {
log('EVENT: Reconnecting...', 'event');
});
newRoom.on(RoomEvent.Reconnected, () => {
log('EVENT: Reconnected', 'success');
});
newRoom.on(RoomEvent.ParticipantConnected, (participant: any) => {
log(`EVENT: Participant connected: ${participant.identity}`, 'event');
});
newRoom.on(RoomEvent.ParticipantDisconnected, (participant: any) => {
log(`EVENT: Participant disconnected: ${participant.identity}`, 'event');
});
newRoom.on(RoomEvent.TrackSubscribed, (track: any, publication: any, participant: any) => {
log(`EVENT: Track subscribed: ${track.kind} from ${participant.identity}`, 'event');
if (track.kind === Track.Kind.Audio) {
log('Audio track from Julia AI - should hear voice now', 'success');
}
});
newRoom.on(RoomEvent.TrackUnsubscribed, (track: any, publication: any, participant: any) => {
log(`EVENT: Track unsubscribed: ${track.kind} from ${participant.identity}`, 'event');
});
newRoom.on(RoomEvent.TrackMuted, (publication: any, participant: any) => {
log(`EVENT: Track muted by ${participant.identity}`, 'event');
});
newRoom.on(RoomEvent.TrackUnmuted, (publication: any, participant: any) => {
log(`EVENT: Track unmuted by ${participant.identity}`, 'event');
});
newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => {
if (speakers.length > 0) {
log(`EVENT: Active speakers: ${speakers.map(s => s.identity).join(', ')}`, 'event');
}
});
newRoom.on(RoomEvent.DataReceived, (payload: any, participant: any) => {
try {
const data = JSON.parse(new TextDecoder().decode(payload));
log(`EVENT: Data received: ${JSON.stringify(data).substring(0, 100)}`, 'event');
} catch (e) {
log(`EVENT: Data received (binary)`, 'event');
}
});
newRoom.on(RoomEvent.AudioPlaybackStatusChanged, () => {
log(`EVENT: AudioPlaybackStatusChanged - canPlay: ${newRoom.canPlaybackAudio}`, 'event');
});
newRoom.on(RoomEvent.MediaDevicesError, (error: any) => {
log(`EVENT: MediaDevicesError: ${error?.message || error}`, 'error');
});
newRoom.on(RoomEvent.RoomMetadataChanged, (metadata: string) => {
log(`EVENT: RoomMetadataChanged: ${metadata}`, 'event');
});
log('Event listeners set up', 'success');
// Step 7: Connect to room
log('Step 7: Connecting to LiveKit room...', 'info');
await newRoom.connect(wsUrl, token, { autoSubscribe: true });
log('Connected to room', 'success');
// Step 7.5: Start audio playback (required for iOS)
log('Step 7.5: Starting audio playback...', 'info');
await newRoom.startAudio();
log(`Audio playback started, canPlay: ${newRoom.canPlaybackAudio}`, 'success');
// Step 8: Enable microphone
log('Step 8: Enabling microphone...', 'info');
await newRoom.localParticipant.setMicrophoneEnabled(true);
log('Microphone enabled', 'success');
// Step 9: Log local audio track info
log('Step 9: Checking local audio track...', 'info');
const localAudioTracks = newRoom.localParticipant.audioTrackPublications;
log(`Local audio publications: ${localAudioTracks.size}`, 'info');
localAudioTracks.forEach((pub: any) => {
log(`Local audio track: ${pub.trackSid}, muted: ${pub.isMuted}, source: ${pub.source}`, 'info');
if (pub.track) {
log(`Track mediaStreamTrack: ${pub.track.mediaStreamTrack ? 'exists' : 'NULL'}`, 'info');
log(`Track enabled: ${pub.track.mediaStreamTrack?.enabled}`, 'info');
}
});
// Listen for local track published
newRoom.localParticipant.on('localTrackPublished', (pub: any) => {
log(`MY TRACK PUBLISHED: ${pub.kind} sid=${pub.trackSid}`, 'success');
});
// Listen when I become an active speaker (means mic is working)
newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => {
const iAmSpeaking = speakers.some(s => s.identity === newRoom.localParticipant.identity);
if (iAmSpeaking) {
log(`*** I AM SPEAKING - MIC WORKS ***`, 'success');
}
});
log(`Local participant: ${newRoom.localParticipant.identity}`, 'info');
log('=== CALL ACTIVE ===', 'success');
} catch (err: any) {
log(`ERROR: ${err?.message || err}`, 'error');
log(`Stack: ${err?.stack?.substring(0, 200) || 'no stack'}`, 'error');
setCallState('idle');
deactivateKeepAwake('voiceCall');
}
}, [callState, log, clearLogs]);
// End call
const endCall = useCallback(async () => {
if (callState === 'idle') return;
log('=== ENDING CALL ===', 'info');
setCallState('ending');
try {
if (roomRef.current) {
log('Disconnecting from room...', 'info');
await roomRef.current.disconnect();
roomRef.current = null;
log('Disconnected from room', 'success');
}
// Stop AudioSession (iOS + Android)
log(`Stopping AudioSession on ${Platform.OS}...`, 'info');
try {
await stopAudioSession();
log('AudioSession stopped', 'success');
} catch (audioErr: any) {
log(`AudioSession stop error: ${audioErr?.message || audioErr}`, 'error');
}
deactivateKeepAwake('voiceCall');
log('Screen keep-awake deactivated', 'info');
} catch (err: any) {
log(`Error during cleanup: ${err?.message || err}`, 'error');
}
setCallState('idle');
log('=== CALL ENDED ===', 'info');
}, [callState, log]);
// Format duration
const formatDuration = (seconds: number): string => {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins}:${secs.toString().padStart(2, '0')}`;
};
// Get log color
const getLogColor = (type: LogEntry['type']): string => {
switch (type) {
case 'success': return '#4ade80';
case 'error': return '#f87171';
case 'event': return '#60a5fa';
default: return '#e5e5e5';
}
};
return (
<SafeAreaView style={styles.container} edges={['top']}>
{/* Header */}
<View style={styles.header}>
<View style={styles.headerRow}>
<Text style={styles.title}>Voice Debug</Text>
<Text style={styles.versionBadge}>v{APP_VERSION}</Text>
</View>
<Text style={styles.subtitle}>{VOICE_NAME}</Text>
</View>
{/* Call Status */}
<View style={styles.statusBar}>
<View style={styles.statusLeft}>
<View style={[
styles.statusDot,
{ backgroundColor: callState === 'connected' ? '#4ade80' : callState === 'connecting' ? '#fbbf24' : '#6b7280' }
]} />
<Text style={styles.statusText}>
{callState === 'idle' && 'Ready'}
{callState === 'connecting' && 'Connecting...'}
{callState === 'connected' && `Connected ${formatDuration(callDuration)}`}
{callState === 'ending' && 'Ending...'}
</Text>
</View>
<Text style={styles.logCount}>{logs.length} logs</Text>
</View>
{/* Control Buttons - Row 1: Call controls */}
<View style={styles.controls}>
{callState === 'idle' ? (
<TouchableOpacity style={styles.startButton} onPress={startCall}>
<Ionicons name="call" size={24} color="#fff" />
<Text style={styles.buttonText}>Start Call</Text>
</TouchableOpacity>
) : (
<TouchableOpacity
style={styles.endButton}
onPress={endCall}
disabled={callState === 'ending'}
>
<Ionicons name="call" size={24} color="#fff" style={{ transform: [{ rotate: '135deg' }] }} />
<Text style={styles.buttonText}>End Call</Text>
</TouchableOpacity>
)}
{/* Speaker Toggle Button */}
<TouchableOpacity
style={[styles.speakerButton, isSpeakerOn ? styles.speakerOn : styles.speakerOff]}
onPress={toggleSpeaker}
disabled={callState === 'idle'}
>
<Ionicons
name={isSpeakerOn ? 'volume-high' : 'ear'}
size={20}
color="#fff"
/>
<Text style={styles.smallButtonText}>{isSpeakerOn ? 'Speaker' : 'Ear'}</Text>
</TouchableOpacity>
</View>
{/* Control Buttons - Row 2: Log controls */}
<View style={styles.controlsRow2}>
<TouchableOpacity style={styles.copyButton} onPress={copyLogs}>
<Ionicons name="copy" size={20} color="#fff" />
<Text style={styles.smallButtonText}>Copy</Text>
</TouchableOpacity>
<TouchableOpacity style={styles.shareButton} onPress={shareLogs}>
<Ionicons name="share" size={20} color="#fff" />
<Text style={styles.smallButtonText}>Share</Text>
</TouchableOpacity>
<TouchableOpacity style={styles.clearButton} onPress={clearLogs}>
<Ionicons name="trash" size={20} color="#fff" />
<Text style={styles.smallButtonText}>Clear</Text>
</TouchableOpacity>
<View style={styles.platformBadge}>
<Text style={styles.platformText}>{Platform.OS} {Platform.Version}</Text>
</View>
</View>
{/* Logs */}
<FlatList
ref={flatListRef}
data={logs}
keyExtractor={(item) => item.id}
style={styles.logsList}
contentContainerStyle={styles.logsContent}
renderItem={({ item }) => (
<Text style={[styles.logEntry, { color: getLogColor(item.type) }]}>
<Text style={styles.logTime}>[{item.time}]</Text> {item.message}
</Text>
)}
ListEmptyComponent={
<View style={styles.emptyContainer}>
<Ionicons name="terminal" size={48} color="#6b7280" />
<Text style={styles.emptyText}>Press "Start Call" to begin</Text>
</View>
}
/>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#0f0f0f',
},
header: {
padding: Spacing.md,
borderBottomWidth: 1,
borderBottomColor: '#333',
},
headerRow: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
},
title: {
fontSize: 24,
fontWeight: '700',
color: '#fff',
},
versionBadge: {
fontSize: 14,
fontWeight: '600',
color: '#22c55e',
backgroundColor: 'rgba(34, 197, 94, 0.15)',
paddingHorizontal: 10,
paddingVertical: 4,
borderRadius: 8,
overflow: 'hidden',
},
subtitle: {
fontSize: 14,
color: '#888',
marginTop: 2,
},
statusBar: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
backgroundColor: '#1a1a1a',
},
statusLeft: {
flexDirection: 'row',
alignItems: 'center',
},
statusDot: {
width: 10,
height: 10,
borderRadius: 5,
marginRight: 8,
},
statusText: {
color: '#fff',
fontSize: 14,
fontWeight: '500',
},
logCount: {
color: '#888',
fontSize: 12,
},
controls: {
flexDirection: 'row',
padding: Spacing.md,
paddingBottom: Spacing.sm,
gap: 10,
},
controlsRow2: {
flexDirection: 'row',
paddingHorizontal: Spacing.md,
paddingBottom: Spacing.md,
gap: 10,
borderBottomWidth: 1,
borderBottomColor: '#333',
},
startButton: {
flex: 1,
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
backgroundColor: '#22c55e',
paddingVertical: 14,
borderRadius: 12,
gap: 8,
},
endButton: {
flex: 1,
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
backgroundColor: '#ef4444',
paddingVertical: 14,
borderRadius: 12,
gap: 8,
},
buttonText: {
color: '#fff',
fontSize: 16,
fontWeight: '600',
},
copyButton: {
alignItems: 'center',
justifyContent: 'center',
backgroundColor: '#3b82f6',
paddingVertical: 10,
paddingHorizontal: 12,
borderRadius: 10,
},
shareButton: {
alignItems: 'center',
justifyContent: 'center',
backgroundColor: '#8b5cf6',
paddingVertical: 10,
paddingHorizontal: 12,
borderRadius: 10,
},
clearButton: {
alignItems: 'center',
justifyContent: 'center',
backgroundColor: '#6b7280',
paddingVertical: 10,
paddingHorizontal: 12,
borderRadius: 10,
},
speakerButton: {
alignItems: 'center',
justifyContent: 'center',
paddingVertical: 10,
paddingHorizontal: 16,
borderRadius: 10,
},
speakerOn: {
backgroundColor: '#f59e0b', // Orange when speaker is ON
},
speakerOff: {
backgroundColor: '#4b5563', // Gray when earpiece
},
platformBadge: {
flex: 1,
alignItems: 'flex-end',
justifyContent: 'center',
},
platformText: {
color: '#888',
fontSize: 11,
fontWeight: '500',
},
smallButtonText: {
color: '#fff',
fontSize: 10,
fontWeight: '500',
marginTop: 2,
},
logsList: {
flex: 1,
},
logsContent: {
padding: Spacing.sm,
paddingBottom: 100,
},
logEntry: {
fontSize: 12,
fontFamily: Platform.OS === 'ios' ? 'Menlo' : 'monospace',
lineHeight: 18,
marginBottom: 2,
},
logTime: {
color: '#888',
},
emptyContainer: {
alignItems: 'center',
justifyContent: 'center',
paddingTop: 100,
},
emptyText: {
color: '#6b7280',
fontSize: 16,
marginTop: 12,
},
});