Fix Android audio routing - use music stream for speaker output

- Changed audioStreamType from 'voiceCall' to 'music' on Android
  - voiceCall stream defaults to earpiece
  - music stream defaults to speaker
- Added Debug tab to test voice calls with detailed logs
- Added speaker/earpiece toggle button with proper stream switching
- Full Android AudioSession support for LiveKit voice calls

audioSession.ts:
- configureAudioForVoiceCall: uses music/media for speaker output
- setAudioOutput: switches between music (speaker) and voiceCall (earpiece)
- reconfigureAudioForPlayback: ensures speaker output on Android

debug.tsx:
- Added platform info display
- Added speaker toggle with logging
- Improved UI with control rows
This commit is contained in:
Sergei 2026-01-19 22:54:59 -08:00
parent bbc59e61ce
commit 57577b42c9
3 changed files with 229 additions and 86 deletions

View File

@ -72,11 +72,14 @@ export default function TabLayout() {
),
}}
/>
{/* Debug tab hidden */}
{/* Debug tab - for testing audio/voice */}
<Tabs.Screen
name="debug"
options={{
href: null,
title: 'Debug',
tabBarIcon: ({ color, size }) => (
<Feather name="terminal" size={22} color={color} />
),
}}
/>
{/* Hide explore tab */}

View File

@ -3,8 +3,10 @@
*
* All-in-one screen for testing Julia AI voice:
* - Start/End call buttons
* - Speaker/Earpiece toggle with logging
* - Real-time logs of all LiveKit events
* - Copy logs button
* - Works on both iOS and Android
*/
import React, { useState, useEffect, useRef, useCallback } from 'react';
@ -26,6 +28,11 @@ import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
import type { Room as RoomType } from 'livekit-client';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { getToken, VOICE_NAME } from '@/services/livekitService';
import {
configureAudioForVoiceCall,
stopAudioSession,
setAudioOutput,
} from '@/utils/audioSession';
import Constants from 'expo-constants';
const APP_VERSION = Constants.expoConfig?.version ?? '?.?.?';
@ -43,6 +50,7 @@ export default function DebugScreen() {
const [logs, setLogs] = useState<LogEntry[]>([]);
const [callState, setCallState] = useState<CallState>('idle');
const [callDuration, setCallDuration] = useState(0);
const [isSpeakerOn, setIsSpeakerOn] = useState(true); // Default to speaker
const flatListRef = useRef<FlatList>(null);
const roomRef = useRef<RoomType | null>(null);
const callStartTimeRef = useRef<number | null>(null);
@ -113,6 +121,20 @@ export default function DebugScreen() {
return () => subscription.remove();
}, [log]);
// Toggle speaker
const toggleSpeaker = useCallback(async () => {
const newState = !isSpeakerOn;
log(`=== TOGGLING SPEAKER: ${isSpeakerOn ? 'ON' : 'OFF'}${newState ? 'ON' : 'OFF'} ===`, 'info');
try {
await setAudioOutput(newState);
setIsSpeakerOn(newState);
log(`Speaker toggled to ${newState ? 'ON (loud speaker)' : 'OFF (earpiece)'}`, 'success');
} catch (err: any) {
log(`Speaker toggle error: ${err?.message || err}`, 'error');
}
}, [isSpeakerOn, log]);
// Start call
const startCall = useCallback(async () => {
if (callState !== 'idle') return;
@ -120,10 +142,12 @@ export default function DebugScreen() {
clearLogs();
setCallState('connecting');
setCallDuration(0);
setIsSpeakerOn(true); // Reset speaker state
callStartTimeRef.current = null;
try {
log('=== STARTING VOICE CALL ===', 'info');
log(`Platform: ${Platform.OS} ${Platform.Version}`, 'info');
// Keep screen awake
await activateKeepAwakeAsync('voiceCall').catch(() => {});
@ -131,7 +155,7 @@ export default function DebugScreen() {
// Step 1: Register WebRTC globals
log('Step 1: Importing @livekit/react-native...', 'info');
const { registerGlobals, AudioSession } = await import('@livekit/react-native');
const { registerGlobals } = await import('@livekit/react-native');
if (typeof global.RTCPeerConnection === 'undefined') {
log('Registering WebRTC globals...', 'info');
@ -146,11 +170,14 @@ export default function DebugScreen() {
const { Room, RoomEvent, ConnectionState, Track } = await import('livekit-client');
log('livekit-client imported', 'success');
// Step 3: Start iOS AudioSession
if (Platform.OS === 'ios') {
log('Step 3: Starting iOS AudioSession...', 'info');
await AudioSession.startAudioSession();
log('iOS AudioSession started', 'success');
// Step 3: Configure AudioSession (iOS + Android)
log(`Step 3: Configuring AudioSession for ${Platform.OS}...`, 'info');
try {
await configureAudioForVoiceCall();
log(`AudioSession configured for ${Platform.OS}`, 'success');
} catch (audioErr: any) {
log(`AudioSession config error: ${audioErr?.message || audioErr}`, 'error');
// Continue anyway - might still work
}
// Step 4: Get token from server
@ -325,11 +352,13 @@ export default function DebugScreen() {
log('Disconnected from room', 'success');
}
if (Platform.OS === 'ios') {
log('Stopping iOS AudioSession...', 'info');
const { AudioSession } = await import('@livekit/react-native');
await AudioSession.stopAudioSession();
log('iOS AudioSession stopped', 'success');
// Stop AudioSession (iOS + Android)
log(`Stopping AudioSession on ${Platform.OS}...`, 'info');
try {
await stopAudioSession();
log('AudioSession stopped', 'success');
} catch (audioErr: any) {
log(`AudioSession stop error: ${audioErr?.message || audioErr}`, 'error');
}
deactivateKeepAwake('voiceCall');
@ -388,7 +417,7 @@ export default function DebugScreen() {
<Text style={styles.logCount}>{logs.length} logs</Text>
</View>
{/* Control Buttons */}
{/* Control Buttons - Row 1: Call controls */}
<View style={styles.controls}>
{callState === 'idle' ? (
<TouchableOpacity style={styles.startButton} onPress={startCall}>
@ -406,6 +435,23 @@ export default function DebugScreen() {
</TouchableOpacity>
)}
{/* Speaker Toggle Button */}
<TouchableOpacity
style={[styles.speakerButton, isSpeakerOn ? styles.speakerOn : styles.speakerOff]}
onPress={toggleSpeaker}
disabled={callState === 'idle'}
>
<Ionicons
name={isSpeakerOn ? 'volume-high' : 'ear'}
size={20}
color="#fff"
/>
<Text style={styles.smallButtonText}>{isSpeakerOn ? 'Speaker' : 'Ear'}</Text>
</TouchableOpacity>
</View>
{/* Control Buttons - Row 2: Log controls */}
<View style={styles.controlsRow2}>
<TouchableOpacity style={styles.copyButton} onPress={copyLogs}>
<Ionicons name="copy" size={20} color="#fff" />
<Text style={styles.smallButtonText}>Copy</Text>
@ -420,6 +466,10 @@ export default function DebugScreen() {
<Ionicons name="trash" size={20} color="#fff" />
<Text style={styles.smallButtonText}>Clear</Text>
</TouchableOpacity>
<View style={styles.platformBadge}>
<Text style={styles.platformText}>{Platform.OS} {Platform.Version}</Text>
</View>
</View>
{/* Logs */}
@ -510,6 +560,13 @@ const styles = StyleSheet.create({
controls: {
flexDirection: 'row',
padding: Spacing.md,
paddingBottom: Spacing.sm,
gap: 10,
},
controlsRow2: {
flexDirection: 'row',
paddingHorizontal: Spacing.md,
paddingBottom: Spacing.md,
gap: 10,
borderBottomWidth: 1,
borderBottomColor: '#333',
@ -563,6 +620,29 @@ const styles = StyleSheet.create({
paddingHorizontal: 12,
borderRadius: 10,
},
speakerButton: {
alignItems: 'center',
justifyContent: 'center',
paddingVertical: 10,
paddingHorizontal: 16,
borderRadius: 10,
},
speakerOn: {
backgroundColor: '#f59e0b', // Orange when speaker is ON
},
speakerOff: {
backgroundColor: '#4b5563', // Gray when earpiece
},
platformBadge: {
flex: 1,
alignItems: 'flex-end',
justifyContent: 'center',
},
platformText: {
color: '#888',
fontSize: 11,
fontWeight: '500',
},
smallButtonText: {
color: '#fff',
fontSize: 10,

View File

@ -1,8 +1,9 @@
/**
* iOS AudioSession Configuration Helpers
* Audio Session Configuration Helpers (iOS + Android)
*
* CRITICAL: This must be configured BEFORE connecting to LiveKit room!
* Without proper AudioSession setup, microphone won't work on iOS.
* On Android, this controls speaker/earpiece routing.
*/
import { Platform } from 'react-native';
@ -16,8 +17,6 @@ let audioSessionModule: any = null;
* This is needed because @livekit/react-native must be imported after registerGlobals()
*/
async function getAudioSession(): Promise<any | null> {
if (Platform.OS !== 'ios') return null;
if (!audioSessionModule) {
const livekit = await import('@livekit/react-native');
audioSessionModule = livekit.AudioSession;
@ -27,22 +26,21 @@ async function getAudioSession(): Promise<any | null> {
}
/**
* Configure iOS AudioSession for bidirectional voice call
* Configure AudioSession for bidirectional voice call (iOS + Android)
*
* MUST be called BEFORE connecting to LiveKit room!
*
* Configuration:
* iOS Configuration:
* - Category: playAndRecord (both speaker and mic)
* - Mode: voiceChat (optimized for voice calls)
* - Options: Bluetooth, speaker, mix with others
*
* Android Configuration:
* - audioTypeOptions: communication (for voice calls)
* - forceHandleAudioRouting: true (to control speaker/earpiece)
*/
export async function configureAudioForVoiceCall(): Promise<void> {
if (Platform.OS !== 'ios') {
console.log('[AudioSession] Skipping on non-iOS platform');
return;
}
console.log('[AudioSession] Configuring for voice call...');
console.log(`[AudioSession] Configuring for voice call on ${Platform.OS}...`);
try {
const AudioSession = await getAudioSession();
@ -51,7 +49,8 @@ export async function configureAudioForVoiceCall(): Promise<void> {
return;
}
// Step 1: Set Apple-specific audio configuration
if (Platform.OS === 'ios') {
// iOS-specific configuration
console.log('[AudioSession] Step 1: Setting Apple audio config...');
await AudioSession.setAppleAudioConfiguration({
audioCategory: 'playAndRecord',
@ -64,7 +63,6 @@ export async function configureAudioForVoiceCall(): Promise<void> {
audioMode: 'voiceChat',
});
// Step 2: Configure default output to speaker
console.log('[AudioSession] Step 2: Setting default output...');
await AudioSession.configureAudio({
ios: {
@ -72,9 +70,35 @@ export async function configureAudioForVoiceCall(): Promise<void> {
},
});
// Step 3: Start the audio session
console.log('[AudioSession] Step 3: Starting audio session...');
await AudioSession.startAudioSession();
} else if (Platform.OS === 'android') {
// Android-specific configuration
// IMPORTANT: Using 'music' stream type to force output to speaker
// 'voiceCall' stream type defaults to earpiece on many Android devices
console.log('[AudioSession] Configuring Android audio for SPEAKER...');
await AudioSession.configureAudio({
android: {
// Use MEDIA mode to ensure speaker output
audioTypeOptions: {
manageAudioFocus: true,
audioMode: 'normal',
audioFocusMode: 'gain',
// Use 'music' stream - goes to speaker by default
audioStreamType: 'music',
audioAttributesUsageType: 'media',
audioAttributesContentType: 'music',
},
// Force speaker as output
preferredOutputList: ['speaker'],
// Allow us to control audio routing
forceHandleAudioRouting: true,
},
});
console.log('[AudioSession] Starting Android audio session...');
await AudioSession.startAudioSession();
}
console.log('[AudioSession] Configuration complete!');
} catch (error) {
@ -84,16 +108,16 @@ export async function configureAudioForVoiceCall(): Promise<void> {
}
/**
* Stop iOS AudioSession
* Stop AudioSession (iOS + Android)
*
* Should be called when disconnecting from voice call
*/
export async function stopAudioSession(): Promise<void> {
if (Platform.OS !== 'ios') {
if (Platform.OS !== 'ios' && Platform.OS !== 'android') {
return;
}
console.log('[AudioSession] Stopping audio session...');
console.log(`[AudioSession] Stopping audio session on ${Platform.OS}...`);
try {
const AudioSession = await getAudioSession();
@ -110,16 +134,16 @@ export async function stopAudioSession(): Promise<void> {
}
/**
* Reconfigure audio session after remote track arrives
* Reconfigure audio session after remote track arrives (iOS + Android)
*
* Sometimes iOS needs a kick to properly route audio after remote participant joins
* Sometimes the OS needs a kick to properly route audio after remote participant joins
*/
export async function reconfigureAudioForPlayback(): Promise<void> {
if (Platform.OS !== 'ios') {
if (Platform.OS !== 'ios' && Platform.OS !== 'android') {
return;
}
console.log('[AudioSession] Reconfiguring for playback...');
console.log(`[AudioSession] Reconfiguring for playback on ${Platform.OS}...`);
try {
const AudioSession = await getAudioSession();
@ -127,6 +151,7 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
return;
}
if (Platform.OS === 'ios') {
// Just reconfigure the same settings - this "refreshes" the audio routing
await AudioSession.setAppleAudioConfiguration({
audioCategory: 'playAndRecord',
@ -138,6 +163,24 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
],
audioMode: 'voiceChat',
});
} else if (Platform.OS === 'android') {
// Reconfigure Android audio to ensure speaker output
// Using 'music' stream type to force speaker
await AudioSession.configureAudio({
android: {
audioTypeOptions: {
manageAudioFocus: true,
audioMode: 'normal',
audioFocusMode: 'gain',
audioStreamType: 'music',
audioAttributesUsageType: 'media',
audioAttributesContentType: 'music',
},
preferredOutputList: ['speaker'],
forceHandleAudioRouting: true,
},
});
}
console.log('[AudioSession] Reconfigured successfully');
} catch (error) {
@ -147,17 +190,12 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
}
/**
* Switch audio output between speaker and earpiece
* Switch audio output between speaker and earpiece (iOS + Android)
*
* @param useSpeaker - true for speaker, false for earpiece
*/
export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
if (Platform.OS !== 'ios') {
console.log('[AudioSession] setAudioOutput - skipping on non-iOS');
return;
}
console.log(`[AudioSession] Setting audio output to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}...`);
console.log(`[AudioSession] Setting audio output to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'} on ${Platform.OS}...`);
try {
const AudioSession = await getAudioSession();
@ -166,7 +204,8 @@ export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
return;
}
// Configure audio output
if (Platform.OS === 'ios') {
// iOS: Configure audio output
await AudioSession.configureAudio({
ios: {
defaultOutput: useSpeaker ? 'speaker' : 'earpiece',
@ -181,6 +220,27 @@ export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
: ['allowBluetooth', 'allowBluetoothA2DP', 'mixWithOthers'],
audioMode: 'voiceChat',
});
} else if (Platform.OS === 'android') {
// Android: Switch stream type to control speaker/earpiece
// - 'music' stream goes to speaker by default
// - 'voiceCall' stream goes to earpiece by default
await AudioSession.configureAudio({
android: {
audioTypeOptions: {
manageAudioFocus: true,
audioMode: useSpeaker ? 'normal' : 'inCommunication',
audioFocusMode: 'gain',
// Key difference: music→speaker, voiceCall→earpiece
audioStreamType: useSpeaker ? 'music' : 'voiceCall',
audioAttributesUsageType: useSpeaker ? 'media' : 'voiceCommunication',
audioAttributesContentType: useSpeaker ? 'music' : 'speech',
},
// Also set preferred output list
preferredOutputList: useSpeaker ? ['speaker'] : ['earpiece'],
forceHandleAudioRouting: true,
},
});
}
console.log(`[AudioSession] Audio output set to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}`);
} catch (error) {