Compare commits
No commits in common. "5b5cdf1098d2926fdf5822885ce163f205b27f7f" and "8240e51bc5682013ffb572e36ddf70a33ad84b99" have entirely different histories.
5b5cdf1098
...
8240e51bc5
@ -45,7 +45,6 @@ import {
|
||||
import { ConnectionState, Track } from 'livekit-client';
|
||||
import { getToken, type BeneficiaryData } from '@/services/livekitService';
|
||||
import { useAuth } from '@/contexts/AuthContext';
|
||||
import { getAvailableAudioOutputs, selectAudioOutput, setAudioOutput } from '@/utils/audioSession';
|
||||
|
||||
// Register LiveKit globals (must be called before using LiveKit)
|
||||
registerGlobals();
|
||||
@ -613,40 +612,6 @@ export default function ChatScreen() {
|
||||
endVoiceCallContext();
|
||||
}, [endVoiceCallContext, callState.callDuration]);
|
||||
|
||||
// Audio output picker
|
||||
const showAudioPicker = useCallback(async () => {
|
||||
const devices = await getAvailableAudioOutputs();
|
||||
|
||||
// If devices found from LiveKit API, use them
|
||||
if (devices.length > 0) {
|
||||
const buttons: any[] = devices.map(device => ({
|
||||
text: device.name,
|
||||
onPress: () => selectAudioOutput(device.id),
|
||||
}));
|
||||
buttons.push({ text: 'Cancel', style: 'cancel' });
|
||||
Alert.alert('Audio Output', 'Select audio device:', buttons);
|
||||
return;
|
||||
}
|
||||
|
||||
// Fallback for Android (and iOS if no devices found)
|
||||
// Show simple Speaker/Earpiece toggle using setAudioOutput()
|
||||
Alert.alert(
|
||||
'Audio Output',
|
||||
'Select audio output:',
|
||||
[
|
||||
{
|
||||
text: '🔊 Speaker',
|
||||
onPress: () => setAudioOutput(true),
|
||||
},
|
||||
{
|
||||
text: '📱 Earpiece',
|
||||
onPress: () => setAudioOutput(false),
|
||||
},
|
||||
{ text: 'Cancel', style: 'cancel' },
|
||||
]
|
||||
);
|
||||
}, []);
|
||||
|
||||
// Handle voice transcript entries - add to chat in real-time
|
||||
const handleVoiceTranscript = useCallback((role: 'user' | 'assistant', text: string) => {
|
||||
if (!text.trim()) return;
|
||||
@ -1060,15 +1025,6 @@ export default function ChatScreen() {
|
||||
</Text>
|
||||
</View>
|
||||
)}
|
||||
{/* Audio output button - only during active call */}
|
||||
{isCallActive && (
|
||||
<TouchableOpacity
|
||||
style={styles.audioButton}
|
||||
onPress={showAudioPicker}
|
||||
>
|
||||
<Ionicons name="volume-high" size={20} color={AppColors.primary} />
|
||||
</TouchableOpacity>
|
||||
)}
|
||||
|
||||
<TextInput
|
||||
style={styles.input}
|
||||
@ -1282,17 +1238,6 @@ const styles = StyleSheet.create({
|
||||
backgroundColor: AppColors.error,
|
||||
borderColor: AppColors.error,
|
||||
},
|
||||
audioButton: {
|
||||
width: 44,
|
||||
height: 44,
|
||||
borderRadius: 22,
|
||||
backgroundColor: AppColors.surface,
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
marginRight: Spacing.sm,
|
||||
borderWidth: 1,
|
||||
borderColor: AppColors.primary,
|
||||
},
|
||||
callActiveIndicator: {
|
||||
width: '100%',
|
||||
height: '100%',
|
||||
|
||||
@ -435,19 +435,6 @@ export function useLiveKitRoom(options: UseLiveKitRoomOptions): UseLiveKitRoomRe
|
||||
|
||||
logSuccess('Connected to room!');
|
||||
|
||||
// ========== CRITICAL: Start Audio Playback ==========
|
||||
// This is REQUIRED for audio to play on iOS and Android!
|
||||
// Without this call, remote audio tracks will NOT be heard.
|
||||
logInfo('Starting audio playback (room.startAudio)...');
|
||||
try {
|
||||
await lkRoom.startAudio();
|
||||
logSuccess(`Audio playback started! canPlaybackAudio: ${lkRoom.canPlaybackAudio}`);
|
||||
setCanPlayAudio(lkRoom.canPlaybackAudio);
|
||||
} catch (audioPlaybackErr: any) {
|
||||
logError(`startAudio failed: ${audioPlaybackErr.message}`);
|
||||
// Don't fail the whole call - audio might still work on some platforms
|
||||
}
|
||||
|
||||
// Check if connection was cancelled after connect
|
||||
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
||||
logWarn('Connection cancelled after room.connect()');
|
||||
|
||||
@ -8,15 +8,6 @@
|
||||
|
||||
import { Platform } from 'react-native';
|
||||
|
||||
/**
|
||||
* Represents an available audio output device
|
||||
*/
|
||||
export interface AudioOutputDevice {
|
||||
id: string;
|
||||
name: string;
|
||||
type: 'speaker' | 'earpiece' | 'bluetooth' | 'headphones' | 'unknown';
|
||||
}
|
||||
|
||||
// AudioSession module - use 'any' to avoid complex typing issues with @livekit/react-native
|
||||
// The actual AudioSession from LiveKit has specific enum types that are hard to match statically
|
||||
let audioSessionModule: any = null;
|
||||
@ -102,42 +93,50 @@ export async function configureAudioForVoiceCall(): Promise<void> {
|
||||
}
|
||||
} else if (Platform.OS === 'android') {
|
||||
// Android-specific configuration - FORCE SPEAKER OUTPUT
|
||||
// CRITICAL: Use 'inCommunication' mode + 'music' stream for speaker
|
||||
// Many Android devices default to earpiece for voice calls
|
||||
console.log('[AudioSession] Configuring Android audio for SPEAKER...');
|
||||
// SOLUTION: Use 'inCommunication' for echo cancellation + forceHandleAudioRouting + explicit speaker selection
|
||||
console.log('[AudioSession] Configuring Android audio for SPEAKER with echo cancellation...');
|
||||
|
||||
await AudioSession.configureAudio({
|
||||
android: {
|
||||
// Use inCommunication mode but with music stream for speaker
|
||||
// Force speaker as preferred output
|
||||
preferredOutputList: ['speaker'],
|
||||
// CRITICAL: This flag forces audio routing even in communication mode
|
||||
forceHandleAudioRouting: true,
|
||||
audioTypeOptions: {
|
||||
manageAudioFocus: true,
|
||||
// inCommunication gives us more control over audio routing
|
||||
// Use 'inCommunication' for echo cancellation (important for voice calls!)
|
||||
audioMode: 'inCommunication',
|
||||
audioFocusMode: 'gain',
|
||||
// Use 'music' stream - goes to speaker by default!
|
||||
audioStreamType: 'music',
|
||||
audioAttributesUsageType: 'media',
|
||||
audioAttributesContentType: 'music',
|
||||
// Voice call stream type for proper routing
|
||||
audioStreamType: 'voiceCall',
|
||||
audioAttributesUsageType: 'voiceCommunication',
|
||||
audioAttributesContentType: 'speech',
|
||||
},
|
||||
// Force speaker as output
|
||||
preferredOutputList: ['speaker'],
|
||||
// Allow us to control audio routing
|
||||
forceHandleAudioRouting: true,
|
||||
},
|
||||
});
|
||||
|
||||
console.log('[AudioSession] Starting Android audio session...');
|
||||
await AudioSession.startAudioSession();
|
||||
|
||||
// After starting, explicitly set speaker output
|
||||
console.log('[AudioSession] Forcing speaker output...');
|
||||
// CRITICAL: Explicitly select speaker AFTER session starts
|
||||
// This overrides the default earpiece routing of inCommunication mode
|
||||
try {
|
||||
await AudioSession.showAudioRoutePicker?.();
|
||||
} catch {
|
||||
// showAudioRoutePicker may not be available, that's ok
|
||||
console.log('[AudioSession] Explicitly selecting speaker output...');
|
||||
await AudioSession.selectAudioOutput('speaker');
|
||||
console.log('[AudioSession] Speaker output explicitly selected!');
|
||||
} catch (speakerErr) {
|
||||
console.warn('[AudioSession] selectAudioOutput failed, trying showAudioRoutePicker:', speakerErr);
|
||||
// Fallback: try to show audio route picker or use alternative method
|
||||
try {
|
||||
if (AudioSession.showAudioRoutePicker) {
|
||||
await AudioSession.showAudioRoutePicker();
|
||||
}
|
||||
} catch (pickerErr) {
|
||||
console.warn('[AudioSession] showAudioRoutePicker also failed:', pickerErr);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('[AudioSession] Android speaker mode configured!');
|
||||
console.log('[AudioSession] Android speaker mode with echo cancellation configured!');
|
||||
}
|
||||
|
||||
console.log('[AudioSession] Configuration complete!');
|
||||
@ -211,22 +210,30 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
|
||||
});
|
||||
console.log('[AudioSession] iOS reconfigured for speaker playback');
|
||||
} else if (Platform.OS === 'android') {
|
||||
// Reconfigure Android audio to ensure speaker output
|
||||
// Using inCommunication + music stream for reliable speaker routing
|
||||
// Reconfigure Android - force speaker while keeping echo cancellation
|
||||
await AudioSession.configureAudio({
|
||||
android: {
|
||||
audioTypeOptions: {
|
||||
manageAudioFocus: true,
|
||||
audioMode: 'inCommunication',
|
||||
audioFocusMode: 'gain',
|
||||
audioStreamType: 'music',
|
||||
audioAttributesUsageType: 'media',
|
||||
audioAttributesContentType: 'music',
|
||||
},
|
||||
preferredOutputList: ['speaker'],
|
||||
forceHandleAudioRouting: true,
|
||||
audioTypeOptions: {
|
||||
manageAudioFocus: true,
|
||||
audioMode: 'inCommunication', // Keep for echo cancellation
|
||||
audioFocusMode: 'gain',
|
||||
audioStreamType: 'voiceCall',
|
||||
audioAttributesUsageType: 'voiceCommunication',
|
||||
audioAttributesContentType: 'speech',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Explicitly select speaker output
|
||||
try {
|
||||
await AudioSession.selectAudioOutput('speaker');
|
||||
console.log('[AudioSession] Android speaker explicitly selected');
|
||||
} catch (err) {
|
||||
console.warn('[AudioSession] selectAudioOutput failed in reconfigure:', err);
|
||||
}
|
||||
|
||||
console.log('[AudioSession] Android reconfigured for speaker playback');
|
||||
}
|
||||
|
||||
@ -237,82 +244,6 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch audio output between speaker and earpiece (iOS + Android)
|
||||
*
|
||||
* @param useSpeaker - true for speaker, false for earpiece
|
||||
*/
|
||||
/**
|
||||
* Get list of available audio output devices
|
||||
*
|
||||
* @returns Array of available audio output devices
|
||||
*/
|
||||
export async function getAvailableAudioOutputs(): Promise<AudioOutputDevice[]> {
|
||||
console.log(`[AudioSession] Getting available audio outputs on ${Platform.OS}...`);
|
||||
|
||||
try {
|
||||
const AudioSession = await getAudioSession();
|
||||
if (!AudioSession) {
|
||||
console.error('[AudioSession] Failed to get AudioSession module');
|
||||
return [];
|
||||
}
|
||||
|
||||
const outputs = await AudioSession.getAudioOutputs();
|
||||
console.log('[AudioSession] Available outputs:', outputs);
|
||||
|
||||
// Map the raw outputs to our AudioOutputDevice interface
|
||||
if (Array.isArray(outputs)) {
|
||||
return outputs.map((output: any) => ({
|
||||
id: output.id || output.deviceId || String(output),
|
||||
name: output.name || output.deviceName || String(output),
|
||||
type: mapDeviceType(output.type || output.deviceType),
|
||||
}));
|
||||
}
|
||||
|
||||
return [];
|
||||
} catch (error) {
|
||||
console.error('[AudioSession] getAvailableAudioOutputs error:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select a specific audio output device by ID
|
||||
*
|
||||
* @param deviceId - The ID of the device to select
|
||||
*/
|
||||
export async function selectAudioOutput(deviceId: string): Promise<void> {
|
||||
console.log(`[AudioSession] Selecting audio output: ${deviceId} on ${Platform.OS}...`);
|
||||
|
||||
try {
|
||||
const AudioSession = await getAudioSession();
|
||||
if (!AudioSession) {
|
||||
console.error('[AudioSession] Failed to get AudioSession module');
|
||||
return;
|
||||
}
|
||||
|
||||
await AudioSession.selectAudioOutput(deviceId);
|
||||
console.log(`[AudioSession] Audio output selected: ${deviceId}`);
|
||||
} catch (error) {
|
||||
console.error('[AudioSession] selectAudioOutput error:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Map raw device type to our AudioOutputDevice type
|
||||
*/
|
||||
function mapDeviceType(rawType: string | undefined): AudioOutputDevice['type'] {
|
||||
if (!rawType) return 'unknown';
|
||||
|
||||
const type = rawType.toLowerCase();
|
||||
if (type.includes('speaker')) return 'speaker';
|
||||
if (type.includes('earpiece') || type.includes('receiver')) return 'earpiece';
|
||||
if (type.includes('bluetooth')) return 'bluetooth';
|
||||
if (type.includes('headphone') || type.includes('headset') || type.includes('wired')) return 'headphones';
|
||||
|
||||
return 'unknown';
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch audio output between speaker and earpiece (iOS + Android)
|
||||
*
|
||||
@ -345,25 +276,29 @@ export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
|
||||
},
|
||||
});
|
||||
} else if (Platform.OS === 'android') {
|
||||
// Android: Switch stream type to control speaker/earpiece
|
||||
// - 'music' stream goes to speaker by default
|
||||
// - 'voiceCall' stream goes to earpiece by default
|
||||
// Android: Keep inCommunication mode for echo cancellation, use explicit output selection
|
||||
await AudioSession.configureAudio({
|
||||
android: {
|
||||
audioTypeOptions: {
|
||||
manageAudioFocus: true,
|
||||
audioMode: useSpeaker ? 'normal' : 'inCommunication',
|
||||
audioFocusMode: 'gain',
|
||||
// Key difference: music→speaker, voiceCall→earpiece
|
||||
audioStreamType: useSpeaker ? 'music' : 'voiceCall',
|
||||
audioAttributesUsageType: useSpeaker ? 'media' : 'voiceCommunication',
|
||||
audioAttributesContentType: useSpeaker ? 'music' : 'speech',
|
||||
},
|
||||
// Also set preferred output list
|
||||
preferredOutputList: useSpeaker ? ['speaker'] : ['earpiece'],
|
||||
forceHandleAudioRouting: true,
|
||||
audioTypeOptions: {
|
||||
manageAudioFocus: true,
|
||||
// Always use inCommunication for echo cancellation
|
||||
audioMode: 'inCommunication',
|
||||
audioFocusMode: 'gain',
|
||||
audioStreamType: 'voiceCall',
|
||||
audioAttributesUsageType: 'voiceCommunication',
|
||||
audioAttributesContentType: 'speech',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Explicitly select output device
|
||||
try {
|
||||
await AudioSession.selectAudioOutput(useSpeaker ? 'speaker' : 'earpiece');
|
||||
} catch (err) {
|
||||
console.warn('[AudioSession] selectAudioOutput failed:', err);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[AudioSession] Audio output set to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}`);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user