Compare commits

...

4 Commits

Author SHA1 Message Date
Sergei
5b5cdf1098 Add audio output switcher for voice calls (Android speaker fix)
- Add Audio button during active calls to switch output
- Fallback to Speaker/Earpiece options when LiveKit API unavailable
- Speaker now works correctly on Android

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-26 13:25:19 -08:00
Sergei
8dd8590c1c Add audio output device enumeration and selection utils
- Add AudioOutputDevice interface with id, name, type fields
- Add getAvailableAudioOutputs() to list available audio devices
- Add selectAudioOutput(deviceId) to switch to specific device
- Add mapDeviceType() helper for device type normalization

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-26 13:05:12 -08:00
Sergei
f2e633df99 Fix audio playback: add room.startAudio() call
Root cause: Audio from remote participant (Julia AI) was not playing
because room.startAudio() was never called after connecting.

This is REQUIRED by LiveKit WebRTC to enable audio playback.
The fix matches the working implementation in debug.tsx (Robert version).

Changes:
- Add room.startAudio() call after room.connect()
- Add canPlayAudio state tracking
- Add proper error handling for startAudio

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-25 18:03:56 -08:00
Sergei
cd4137ef36 Fix Android speaker: use music stream type instead of voiceCall
- audioStreamType: music (routes to SPEAKER by default)
- audioMode: normal (not inCommunication which uses earpiece)
- audioAttributesUsageType: media
- audioAttributesContentType: music

Previous voiceCall stream was routing to earpiece on Android devices.
2026-01-25 13:12:16 -08:00
3 changed files with 191 additions and 58 deletions

View File

@ -45,6 +45,7 @@ import {
import { ConnectionState, Track } from 'livekit-client'; import { ConnectionState, Track } from 'livekit-client';
import { getToken, type BeneficiaryData } from '@/services/livekitService'; import { getToken, type BeneficiaryData } from '@/services/livekitService';
import { useAuth } from '@/contexts/AuthContext'; import { useAuth } from '@/contexts/AuthContext';
import { getAvailableAudioOutputs, selectAudioOutput, setAudioOutput } from '@/utils/audioSession';
// Register LiveKit globals (must be called before using LiveKit) // Register LiveKit globals (must be called before using LiveKit)
registerGlobals(); registerGlobals();
@ -612,6 +613,40 @@ export default function ChatScreen() {
endVoiceCallContext(); endVoiceCallContext();
}, [endVoiceCallContext, callState.callDuration]); }, [endVoiceCallContext, callState.callDuration]);
// Audio output picker
const showAudioPicker = useCallback(async () => {
const devices = await getAvailableAudioOutputs();
// If devices found from LiveKit API, use them
if (devices.length > 0) {
const buttons: any[] = devices.map(device => ({
text: device.name,
onPress: () => selectAudioOutput(device.id),
}));
buttons.push({ text: 'Cancel', style: 'cancel' });
Alert.alert('Audio Output', 'Select audio device:', buttons);
return;
}
// Fallback for Android (and iOS if no devices found)
// Show simple Speaker/Earpiece toggle using setAudioOutput()
Alert.alert(
'Audio Output',
'Select audio output:',
[
{
text: '🔊 Speaker',
onPress: () => setAudioOutput(true),
},
{
text: '📱 Earpiece',
onPress: () => setAudioOutput(false),
},
{ text: 'Cancel', style: 'cancel' },
]
);
}, []);
// Handle voice transcript entries - add to chat in real-time // Handle voice transcript entries - add to chat in real-time
const handleVoiceTranscript = useCallback((role: 'user' | 'assistant', text: string) => { const handleVoiceTranscript = useCallback((role: 'user' | 'assistant', text: string) => {
if (!text.trim()) return; if (!text.trim()) return;
@ -1025,6 +1060,15 @@ export default function ChatScreen() {
</Text> </Text>
</View> </View>
)} )}
{/* Audio output button - only during active call */}
{isCallActive && (
<TouchableOpacity
style={styles.audioButton}
onPress={showAudioPicker}
>
<Ionicons name="volume-high" size={20} color={AppColors.primary} />
</TouchableOpacity>
)}
<TextInput <TextInput
style={styles.input} style={styles.input}
@ -1238,6 +1282,17 @@ const styles = StyleSheet.create({
backgroundColor: AppColors.error, backgroundColor: AppColors.error,
borderColor: AppColors.error, borderColor: AppColors.error,
}, },
audioButton: {
width: 44,
height: 44,
borderRadius: 22,
backgroundColor: AppColors.surface,
justifyContent: 'center',
alignItems: 'center',
marginRight: Spacing.sm,
borderWidth: 1,
borderColor: AppColors.primary,
},
callActiveIndicator: { callActiveIndicator: {
width: '100%', width: '100%',
height: '100%', height: '100%',

View File

@ -435,6 +435,19 @@ export function useLiveKitRoom(options: UseLiveKitRoomOptions): UseLiveKitRoomRe
logSuccess('Connected to room!'); logSuccess('Connected to room!');
// ========== CRITICAL: Start Audio Playback ==========
// This is REQUIRED for audio to play on iOS and Android!
// Without this call, remote audio tracks will NOT be heard.
logInfo('Starting audio playback (room.startAudio)...');
try {
await lkRoom.startAudio();
logSuccess(`Audio playback started! canPlaybackAudio: ${lkRoom.canPlaybackAudio}`);
setCanPlayAudio(lkRoom.canPlaybackAudio);
} catch (audioPlaybackErr: any) {
logError(`startAudio failed: ${audioPlaybackErr.message}`);
// Don't fail the whole call - audio might still work on some platforms
}
// Check if connection was cancelled after connect // Check if connection was cancelled after connect
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) { if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
logWarn('Connection cancelled after room.connect()'); logWarn('Connection cancelled after room.connect()');

View File

@ -8,6 +8,15 @@
import { Platform } from 'react-native'; import { Platform } from 'react-native';
/**
* Represents an available audio output device
*/
export interface AudioOutputDevice {
id: string;
name: string;
type: 'speaker' | 'earpiece' | 'bluetooth' | 'headphones' | 'unknown';
}
// AudioSession module - use 'any' to avoid complex typing issues with @livekit/react-native // AudioSession module - use 'any' to avoid complex typing issues with @livekit/react-native
// The actual AudioSession from LiveKit has specific enum types that are hard to match statically // The actual AudioSession from LiveKit has specific enum types that are hard to match statically
let audioSessionModule: any = null; let audioSessionModule: any = null;
@ -93,50 +102,42 @@ export async function configureAudioForVoiceCall(): Promise<void> {
} }
} else if (Platform.OS === 'android') { } else if (Platform.OS === 'android') {
// Android-specific configuration - FORCE SPEAKER OUTPUT // Android-specific configuration - FORCE SPEAKER OUTPUT
// SOLUTION: Use 'inCommunication' for echo cancellation + forceHandleAudioRouting + explicit speaker selection // CRITICAL: Use 'inCommunication' mode + 'music' stream for speaker
console.log('[AudioSession] Configuring Android audio for SPEAKER with echo cancellation...'); // Many Android devices default to earpiece for voice calls
console.log('[AudioSession] Configuring Android audio for SPEAKER...');
await AudioSession.configureAudio({ await AudioSession.configureAudio({
android: { android: {
// Force speaker as preferred output // Use inCommunication mode but with music stream for speaker
preferredOutputList: ['speaker'],
// CRITICAL: This flag forces audio routing even in communication mode
forceHandleAudioRouting: true,
audioTypeOptions: { audioTypeOptions: {
manageAudioFocus: true, manageAudioFocus: true,
// Use 'inCommunication' for echo cancellation (important for voice calls!) // inCommunication gives us more control over audio routing
audioMode: 'inCommunication', audioMode: 'inCommunication',
audioFocusMode: 'gain', audioFocusMode: 'gain',
// Voice call stream type for proper routing // Use 'music' stream - goes to speaker by default!
audioStreamType: 'voiceCall', audioStreamType: 'music',
audioAttributesUsageType: 'voiceCommunication', audioAttributesUsageType: 'media',
audioAttributesContentType: 'speech', audioAttributesContentType: 'music',
}, },
// Force speaker as output
preferredOutputList: ['speaker'],
// Allow us to control audio routing
forceHandleAudioRouting: true,
}, },
}); });
console.log('[AudioSession] Starting Android audio session...'); console.log('[AudioSession] Starting Android audio session...');
await AudioSession.startAudioSession(); await AudioSession.startAudioSession();
// CRITICAL: Explicitly select speaker AFTER session starts // After starting, explicitly set speaker output
// This overrides the default earpiece routing of inCommunication mode console.log('[AudioSession] Forcing speaker output...');
try { try {
console.log('[AudioSession] Explicitly selecting speaker output...'); await AudioSession.showAudioRoutePicker?.();
await AudioSession.selectAudioOutput('speaker'); } catch {
console.log('[AudioSession] Speaker output explicitly selected!'); // showAudioRoutePicker may not be available, that's ok
} catch (speakerErr) {
console.warn('[AudioSession] selectAudioOutput failed, trying showAudioRoutePicker:', speakerErr);
// Fallback: try to show audio route picker or use alternative method
try {
if (AudioSession.showAudioRoutePicker) {
await AudioSession.showAudioRoutePicker();
}
} catch (pickerErr) {
console.warn('[AudioSession] showAudioRoutePicker also failed:', pickerErr);
}
} }
console.log('[AudioSession] Android speaker mode with echo cancellation configured!'); console.log('[AudioSession] Android speaker mode configured!');
} }
console.log('[AudioSession] Configuration complete!'); console.log('[AudioSession] Configuration complete!');
@ -210,30 +211,22 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
}); });
console.log('[AudioSession] iOS reconfigured for speaker playback'); console.log('[AudioSession] iOS reconfigured for speaker playback');
} else if (Platform.OS === 'android') { } else if (Platform.OS === 'android') {
// Reconfigure Android - force speaker while keeping echo cancellation // Reconfigure Android audio to ensure speaker output
// Using inCommunication + music stream for reliable speaker routing
await AudioSession.configureAudio({ await AudioSession.configureAudio({
android: { android: {
preferredOutputList: ['speaker'],
forceHandleAudioRouting: true,
audioTypeOptions: { audioTypeOptions: {
manageAudioFocus: true, manageAudioFocus: true,
audioMode: 'inCommunication', // Keep for echo cancellation audioMode: 'inCommunication',
audioFocusMode: 'gain', audioFocusMode: 'gain',
audioStreamType: 'voiceCall', audioStreamType: 'music',
audioAttributesUsageType: 'voiceCommunication', audioAttributesUsageType: 'media',
audioAttributesContentType: 'speech', audioAttributesContentType: 'music',
}, },
preferredOutputList: ['speaker'],
forceHandleAudioRouting: true,
}, },
}); });
// Explicitly select speaker output
try {
await AudioSession.selectAudioOutput('speaker');
console.log('[AudioSession] Android speaker explicitly selected');
} catch (err) {
console.warn('[AudioSession] selectAudioOutput failed in reconfigure:', err);
}
console.log('[AudioSession] Android reconfigured for speaker playback'); console.log('[AudioSession] Android reconfigured for speaker playback');
} }
@ -244,6 +237,82 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
} }
} }
/**
* Switch audio output between speaker and earpiece (iOS + Android)
*
* @param useSpeaker - true for speaker, false for earpiece
*/
/**
* Get list of available audio output devices
*
* @returns Array of available audio output devices
*/
export async function getAvailableAudioOutputs(): Promise<AudioOutputDevice[]> {
console.log(`[AudioSession] Getting available audio outputs on ${Platform.OS}...`);
try {
const AudioSession = await getAudioSession();
if (!AudioSession) {
console.error('[AudioSession] Failed to get AudioSession module');
return [];
}
const outputs = await AudioSession.getAudioOutputs();
console.log('[AudioSession] Available outputs:', outputs);
// Map the raw outputs to our AudioOutputDevice interface
if (Array.isArray(outputs)) {
return outputs.map((output: any) => ({
id: output.id || output.deviceId || String(output),
name: output.name || output.deviceName || String(output),
type: mapDeviceType(output.type || output.deviceType),
}));
}
return [];
} catch (error) {
console.error('[AudioSession] getAvailableAudioOutputs error:', error);
return [];
}
}
/**
* Select a specific audio output device by ID
*
* @param deviceId - The ID of the device to select
*/
export async function selectAudioOutput(deviceId: string): Promise<void> {
console.log(`[AudioSession] Selecting audio output: ${deviceId} on ${Platform.OS}...`);
try {
const AudioSession = await getAudioSession();
if (!AudioSession) {
console.error('[AudioSession] Failed to get AudioSession module');
return;
}
await AudioSession.selectAudioOutput(deviceId);
console.log(`[AudioSession] Audio output selected: ${deviceId}`);
} catch (error) {
console.error('[AudioSession] selectAudioOutput error:', error);
}
}
/**
* Map raw device type to our AudioOutputDevice type
*/
function mapDeviceType(rawType: string | undefined): AudioOutputDevice['type'] {
if (!rawType) return 'unknown';
const type = rawType.toLowerCase();
if (type.includes('speaker')) return 'speaker';
if (type.includes('earpiece') || type.includes('receiver')) return 'earpiece';
if (type.includes('bluetooth')) return 'bluetooth';
if (type.includes('headphone') || type.includes('headset') || type.includes('wired')) return 'headphones';
return 'unknown';
}
/** /**
* Switch audio output between speaker and earpiece (iOS + Android) * Switch audio output between speaker and earpiece (iOS + Android)
* *
@ -276,29 +345,25 @@ export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
}, },
}); });
} else if (Platform.OS === 'android') { } else if (Platform.OS === 'android') {
// Android: Keep inCommunication mode for echo cancellation, use explicit output selection // Android: Switch stream type to control speaker/earpiece
// - 'music' stream goes to speaker by default
// - 'voiceCall' stream goes to earpiece by default
await AudioSession.configureAudio({ await AudioSession.configureAudio({
android: { android: {
preferredOutputList: useSpeaker ? ['speaker'] : ['earpiece'],
forceHandleAudioRouting: true,
audioTypeOptions: { audioTypeOptions: {
manageAudioFocus: true, manageAudioFocus: true,
// Always use inCommunication for echo cancellation audioMode: useSpeaker ? 'normal' : 'inCommunication',
audioMode: 'inCommunication',
audioFocusMode: 'gain', audioFocusMode: 'gain',
audioStreamType: 'voiceCall', // Key difference: music→speaker, voiceCall→earpiece
audioAttributesUsageType: 'voiceCommunication', audioStreamType: useSpeaker ? 'music' : 'voiceCall',
audioAttributesContentType: 'speech', audioAttributesUsageType: useSpeaker ? 'media' : 'voiceCommunication',
audioAttributesContentType: useSpeaker ? 'music' : 'speech',
}, },
// Also set preferred output list
preferredOutputList: useSpeaker ? ['speaker'] : ['earpiece'],
forceHandleAudioRouting: true,
}, },
}); });
// Explicitly select output device
try {
await AudioSession.selectAudioOutput(useSpeaker ? 'speaker' : 'earpiece');
} catch (err) {
console.warn('[AudioSession] selectAudioOutput failed:', err);
}
} }
console.log(`[AudioSession] Audio output set to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}`); console.log(`[AudioSession] Audio output set to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}`);