Fix Android audio routing - use music stream for speaker output
- Changed audioStreamType from 'voiceCall' to 'music' on Android - voiceCall stream defaults to earpiece - music stream defaults to speaker - Added Debug tab to test voice calls with detailed logs - Added speaker/earpiece toggle button with proper stream switching - Full Android AudioSession support for LiveKit voice calls audioSession.ts: - configureAudioForVoiceCall: uses music/media for speaker output - setAudioOutput: switches between music (speaker) and voiceCall (earpiece) - reconfigureAudioForPlayback: ensures speaker output on Android debug.tsx: - Added platform info display - Added speaker toggle with logging - Improved UI with control rows
This commit is contained in:
parent
bbc59e61ce
commit
57577b42c9
@ -72,11 +72,14 @@ export default function TabLayout() {
|
|||||||
),
|
),
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
{/* Debug tab hidden */}
|
{/* Debug tab - for testing audio/voice */}
|
||||||
<Tabs.Screen
|
<Tabs.Screen
|
||||||
name="debug"
|
name="debug"
|
||||||
options={{
|
options={{
|
||||||
href: null,
|
title: 'Debug',
|
||||||
|
tabBarIcon: ({ color, size }) => (
|
||||||
|
<Feather name="terminal" size={22} color={color} />
|
||||||
|
),
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
{/* Hide explore tab */}
|
{/* Hide explore tab */}
|
||||||
|
|||||||
@ -3,8 +3,10 @@
|
|||||||
*
|
*
|
||||||
* All-in-one screen for testing Julia AI voice:
|
* All-in-one screen for testing Julia AI voice:
|
||||||
* - Start/End call buttons
|
* - Start/End call buttons
|
||||||
|
* - Speaker/Earpiece toggle with logging
|
||||||
* - Real-time logs of all LiveKit events
|
* - Real-time logs of all LiveKit events
|
||||||
* - Copy logs button
|
* - Copy logs button
|
||||||
|
* - Works on both iOS and Android
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import React, { useState, useEffect, useRef, useCallback } from 'react';
|
import React, { useState, useEffect, useRef, useCallback } from 'react';
|
||||||
@ -26,6 +28,11 @@ import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
|
|||||||
import type { Room as RoomType } from 'livekit-client';
|
import type { Room as RoomType } from 'livekit-client';
|
||||||
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
|
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
|
||||||
import { getToken, VOICE_NAME } from '@/services/livekitService';
|
import { getToken, VOICE_NAME } from '@/services/livekitService';
|
||||||
|
import {
|
||||||
|
configureAudioForVoiceCall,
|
||||||
|
stopAudioSession,
|
||||||
|
setAudioOutput,
|
||||||
|
} from '@/utils/audioSession';
|
||||||
import Constants from 'expo-constants';
|
import Constants from 'expo-constants';
|
||||||
|
|
||||||
const APP_VERSION = Constants.expoConfig?.version ?? '?.?.?';
|
const APP_VERSION = Constants.expoConfig?.version ?? '?.?.?';
|
||||||
@ -43,6 +50,7 @@ export default function DebugScreen() {
|
|||||||
const [logs, setLogs] = useState<LogEntry[]>([]);
|
const [logs, setLogs] = useState<LogEntry[]>([]);
|
||||||
const [callState, setCallState] = useState<CallState>('idle');
|
const [callState, setCallState] = useState<CallState>('idle');
|
||||||
const [callDuration, setCallDuration] = useState(0);
|
const [callDuration, setCallDuration] = useState(0);
|
||||||
|
const [isSpeakerOn, setIsSpeakerOn] = useState(true); // Default to speaker
|
||||||
const flatListRef = useRef<FlatList>(null);
|
const flatListRef = useRef<FlatList>(null);
|
||||||
const roomRef = useRef<RoomType | null>(null);
|
const roomRef = useRef<RoomType | null>(null);
|
||||||
const callStartTimeRef = useRef<number | null>(null);
|
const callStartTimeRef = useRef<number | null>(null);
|
||||||
@ -113,6 +121,20 @@ export default function DebugScreen() {
|
|||||||
return () => subscription.remove();
|
return () => subscription.remove();
|
||||||
}, [log]);
|
}, [log]);
|
||||||
|
|
||||||
|
// Toggle speaker
|
||||||
|
const toggleSpeaker = useCallback(async () => {
|
||||||
|
const newState = !isSpeakerOn;
|
||||||
|
log(`=== TOGGLING SPEAKER: ${isSpeakerOn ? 'ON' : 'OFF'} → ${newState ? 'ON' : 'OFF'} ===`, 'info');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await setAudioOutput(newState);
|
||||||
|
setIsSpeakerOn(newState);
|
||||||
|
log(`Speaker toggled to ${newState ? 'ON (loud speaker)' : 'OFF (earpiece)'}`, 'success');
|
||||||
|
} catch (err: any) {
|
||||||
|
log(`Speaker toggle error: ${err?.message || err}`, 'error');
|
||||||
|
}
|
||||||
|
}, [isSpeakerOn, log]);
|
||||||
|
|
||||||
// Start call
|
// Start call
|
||||||
const startCall = useCallback(async () => {
|
const startCall = useCallback(async () => {
|
||||||
if (callState !== 'idle') return;
|
if (callState !== 'idle') return;
|
||||||
@ -120,10 +142,12 @@ export default function DebugScreen() {
|
|||||||
clearLogs();
|
clearLogs();
|
||||||
setCallState('connecting');
|
setCallState('connecting');
|
||||||
setCallDuration(0);
|
setCallDuration(0);
|
||||||
|
setIsSpeakerOn(true); // Reset speaker state
|
||||||
callStartTimeRef.current = null;
|
callStartTimeRef.current = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log('=== STARTING VOICE CALL ===', 'info');
|
log('=== STARTING VOICE CALL ===', 'info');
|
||||||
|
log(`Platform: ${Platform.OS} ${Platform.Version}`, 'info');
|
||||||
|
|
||||||
// Keep screen awake
|
// Keep screen awake
|
||||||
await activateKeepAwakeAsync('voiceCall').catch(() => {});
|
await activateKeepAwakeAsync('voiceCall').catch(() => {});
|
||||||
@ -131,7 +155,7 @@ export default function DebugScreen() {
|
|||||||
|
|
||||||
// Step 1: Register WebRTC globals
|
// Step 1: Register WebRTC globals
|
||||||
log('Step 1: Importing @livekit/react-native...', 'info');
|
log('Step 1: Importing @livekit/react-native...', 'info');
|
||||||
const { registerGlobals, AudioSession } = await import('@livekit/react-native');
|
const { registerGlobals } = await import('@livekit/react-native');
|
||||||
|
|
||||||
if (typeof global.RTCPeerConnection === 'undefined') {
|
if (typeof global.RTCPeerConnection === 'undefined') {
|
||||||
log('Registering WebRTC globals...', 'info');
|
log('Registering WebRTC globals...', 'info');
|
||||||
@ -146,11 +170,14 @@ export default function DebugScreen() {
|
|||||||
const { Room, RoomEvent, ConnectionState, Track } = await import('livekit-client');
|
const { Room, RoomEvent, ConnectionState, Track } = await import('livekit-client');
|
||||||
log('livekit-client imported', 'success');
|
log('livekit-client imported', 'success');
|
||||||
|
|
||||||
// Step 3: Start iOS AudioSession
|
// Step 3: Configure AudioSession (iOS + Android)
|
||||||
if (Platform.OS === 'ios') {
|
log(`Step 3: Configuring AudioSession for ${Platform.OS}...`, 'info');
|
||||||
log('Step 3: Starting iOS AudioSession...', 'info');
|
try {
|
||||||
await AudioSession.startAudioSession();
|
await configureAudioForVoiceCall();
|
||||||
log('iOS AudioSession started', 'success');
|
log(`AudioSession configured for ${Platform.OS}`, 'success');
|
||||||
|
} catch (audioErr: any) {
|
||||||
|
log(`AudioSession config error: ${audioErr?.message || audioErr}`, 'error');
|
||||||
|
// Continue anyway - might still work
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 4: Get token from server
|
// Step 4: Get token from server
|
||||||
@ -325,11 +352,13 @@ export default function DebugScreen() {
|
|||||||
log('Disconnected from room', 'success');
|
log('Disconnected from room', 'success');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Platform.OS === 'ios') {
|
// Stop AudioSession (iOS + Android)
|
||||||
log('Stopping iOS AudioSession...', 'info');
|
log(`Stopping AudioSession on ${Platform.OS}...`, 'info');
|
||||||
const { AudioSession } = await import('@livekit/react-native');
|
try {
|
||||||
await AudioSession.stopAudioSession();
|
await stopAudioSession();
|
||||||
log('iOS AudioSession stopped', 'success');
|
log('AudioSession stopped', 'success');
|
||||||
|
} catch (audioErr: any) {
|
||||||
|
log(`AudioSession stop error: ${audioErr?.message || audioErr}`, 'error');
|
||||||
}
|
}
|
||||||
|
|
||||||
deactivateKeepAwake('voiceCall');
|
deactivateKeepAwake('voiceCall');
|
||||||
@ -388,7 +417,7 @@ export default function DebugScreen() {
|
|||||||
<Text style={styles.logCount}>{logs.length} logs</Text>
|
<Text style={styles.logCount}>{logs.length} logs</Text>
|
||||||
</View>
|
</View>
|
||||||
|
|
||||||
{/* Control Buttons */}
|
{/* Control Buttons - Row 1: Call controls */}
|
||||||
<View style={styles.controls}>
|
<View style={styles.controls}>
|
||||||
{callState === 'idle' ? (
|
{callState === 'idle' ? (
|
||||||
<TouchableOpacity style={styles.startButton} onPress={startCall}>
|
<TouchableOpacity style={styles.startButton} onPress={startCall}>
|
||||||
@ -406,6 +435,23 @@ export default function DebugScreen() {
|
|||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Speaker Toggle Button */}
|
||||||
|
<TouchableOpacity
|
||||||
|
style={[styles.speakerButton, isSpeakerOn ? styles.speakerOn : styles.speakerOff]}
|
||||||
|
onPress={toggleSpeaker}
|
||||||
|
disabled={callState === 'idle'}
|
||||||
|
>
|
||||||
|
<Ionicons
|
||||||
|
name={isSpeakerOn ? 'volume-high' : 'ear'}
|
||||||
|
size={20}
|
||||||
|
color="#fff"
|
||||||
|
/>
|
||||||
|
<Text style={styles.smallButtonText}>{isSpeakerOn ? 'Speaker' : 'Ear'}</Text>
|
||||||
|
</TouchableOpacity>
|
||||||
|
</View>
|
||||||
|
|
||||||
|
{/* Control Buttons - Row 2: Log controls */}
|
||||||
|
<View style={styles.controlsRow2}>
|
||||||
<TouchableOpacity style={styles.copyButton} onPress={copyLogs}>
|
<TouchableOpacity style={styles.copyButton} onPress={copyLogs}>
|
||||||
<Ionicons name="copy" size={20} color="#fff" />
|
<Ionicons name="copy" size={20} color="#fff" />
|
||||||
<Text style={styles.smallButtonText}>Copy</Text>
|
<Text style={styles.smallButtonText}>Copy</Text>
|
||||||
@ -420,6 +466,10 @@ export default function DebugScreen() {
|
|||||||
<Ionicons name="trash" size={20} color="#fff" />
|
<Ionicons name="trash" size={20} color="#fff" />
|
||||||
<Text style={styles.smallButtonText}>Clear</Text>
|
<Text style={styles.smallButtonText}>Clear</Text>
|
||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
|
|
||||||
|
<View style={styles.platformBadge}>
|
||||||
|
<Text style={styles.platformText}>{Platform.OS} {Platform.Version}</Text>
|
||||||
|
</View>
|
||||||
</View>
|
</View>
|
||||||
|
|
||||||
{/* Logs */}
|
{/* Logs */}
|
||||||
@ -510,6 +560,13 @@ const styles = StyleSheet.create({
|
|||||||
controls: {
|
controls: {
|
||||||
flexDirection: 'row',
|
flexDirection: 'row',
|
||||||
padding: Spacing.md,
|
padding: Spacing.md,
|
||||||
|
paddingBottom: Spacing.sm,
|
||||||
|
gap: 10,
|
||||||
|
},
|
||||||
|
controlsRow2: {
|
||||||
|
flexDirection: 'row',
|
||||||
|
paddingHorizontal: Spacing.md,
|
||||||
|
paddingBottom: Spacing.md,
|
||||||
gap: 10,
|
gap: 10,
|
||||||
borderBottomWidth: 1,
|
borderBottomWidth: 1,
|
||||||
borderBottomColor: '#333',
|
borderBottomColor: '#333',
|
||||||
@ -563,6 +620,29 @@ const styles = StyleSheet.create({
|
|||||||
paddingHorizontal: 12,
|
paddingHorizontal: 12,
|
||||||
borderRadius: 10,
|
borderRadius: 10,
|
||||||
},
|
},
|
||||||
|
speakerButton: {
|
||||||
|
alignItems: 'center',
|
||||||
|
justifyContent: 'center',
|
||||||
|
paddingVertical: 10,
|
||||||
|
paddingHorizontal: 16,
|
||||||
|
borderRadius: 10,
|
||||||
|
},
|
||||||
|
speakerOn: {
|
||||||
|
backgroundColor: '#f59e0b', // Orange when speaker is ON
|
||||||
|
},
|
||||||
|
speakerOff: {
|
||||||
|
backgroundColor: '#4b5563', // Gray when earpiece
|
||||||
|
},
|
||||||
|
platformBadge: {
|
||||||
|
flex: 1,
|
||||||
|
alignItems: 'flex-end',
|
||||||
|
justifyContent: 'center',
|
||||||
|
},
|
||||||
|
platformText: {
|
||||||
|
color: '#888',
|
||||||
|
fontSize: 11,
|
||||||
|
fontWeight: '500',
|
||||||
|
},
|
||||||
smallButtonText: {
|
smallButtonText: {
|
||||||
color: '#fff',
|
color: '#fff',
|
||||||
fontSize: 10,
|
fontSize: 10,
|
||||||
|
|||||||
@ -1,8 +1,9 @@
|
|||||||
/**
|
/**
|
||||||
* iOS AudioSession Configuration Helpers
|
* Audio Session Configuration Helpers (iOS + Android)
|
||||||
*
|
*
|
||||||
* CRITICAL: This must be configured BEFORE connecting to LiveKit room!
|
* CRITICAL: This must be configured BEFORE connecting to LiveKit room!
|
||||||
* Without proper AudioSession setup, microphone won't work on iOS.
|
* Without proper AudioSession setup, microphone won't work on iOS.
|
||||||
|
* On Android, this controls speaker/earpiece routing.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { Platform } from 'react-native';
|
import { Platform } from 'react-native';
|
||||||
@ -16,8 +17,6 @@ let audioSessionModule: any = null;
|
|||||||
* This is needed because @livekit/react-native must be imported after registerGlobals()
|
* This is needed because @livekit/react-native must be imported after registerGlobals()
|
||||||
*/
|
*/
|
||||||
async function getAudioSession(): Promise<any | null> {
|
async function getAudioSession(): Promise<any | null> {
|
||||||
if (Platform.OS !== 'ios') return null;
|
|
||||||
|
|
||||||
if (!audioSessionModule) {
|
if (!audioSessionModule) {
|
||||||
const livekit = await import('@livekit/react-native');
|
const livekit = await import('@livekit/react-native');
|
||||||
audioSessionModule = livekit.AudioSession;
|
audioSessionModule = livekit.AudioSession;
|
||||||
@ -27,22 +26,21 @@ async function getAudioSession(): Promise<any | null> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configure iOS AudioSession for bidirectional voice call
|
* Configure AudioSession for bidirectional voice call (iOS + Android)
|
||||||
*
|
*
|
||||||
* MUST be called BEFORE connecting to LiveKit room!
|
* MUST be called BEFORE connecting to LiveKit room!
|
||||||
*
|
*
|
||||||
* Configuration:
|
* iOS Configuration:
|
||||||
* - Category: playAndRecord (both speaker and mic)
|
* - Category: playAndRecord (both speaker and mic)
|
||||||
* - Mode: voiceChat (optimized for voice calls)
|
* - Mode: voiceChat (optimized for voice calls)
|
||||||
* - Options: Bluetooth, speaker, mix with others
|
* - Options: Bluetooth, speaker, mix with others
|
||||||
|
*
|
||||||
|
* Android Configuration:
|
||||||
|
* - audioTypeOptions: communication (for voice calls)
|
||||||
|
* - forceHandleAudioRouting: true (to control speaker/earpiece)
|
||||||
*/
|
*/
|
||||||
export async function configureAudioForVoiceCall(): Promise<void> {
|
export async function configureAudioForVoiceCall(): Promise<void> {
|
||||||
if (Platform.OS !== 'ios') {
|
console.log(`[AudioSession] Configuring for voice call on ${Platform.OS}...`);
|
||||||
console.log('[AudioSession] Skipping on non-iOS platform');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('[AudioSession] Configuring for voice call...');
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const AudioSession = await getAudioSession();
|
const AudioSession = await getAudioSession();
|
||||||
@ -51,7 +49,8 @@ export async function configureAudioForVoiceCall(): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 1: Set Apple-specific audio configuration
|
if (Platform.OS === 'ios') {
|
||||||
|
// iOS-specific configuration
|
||||||
console.log('[AudioSession] Step 1: Setting Apple audio config...');
|
console.log('[AudioSession] Step 1: Setting Apple audio config...');
|
||||||
await AudioSession.setAppleAudioConfiguration({
|
await AudioSession.setAppleAudioConfiguration({
|
||||||
audioCategory: 'playAndRecord',
|
audioCategory: 'playAndRecord',
|
||||||
@ -64,7 +63,6 @@ export async function configureAudioForVoiceCall(): Promise<void> {
|
|||||||
audioMode: 'voiceChat',
|
audioMode: 'voiceChat',
|
||||||
});
|
});
|
||||||
|
|
||||||
// Step 2: Configure default output to speaker
|
|
||||||
console.log('[AudioSession] Step 2: Setting default output...');
|
console.log('[AudioSession] Step 2: Setting default output...');
|
||||||
await AudioSession.configureAudio({
|
await AudioSession.configureAudio({
|
||||||
ios: {
|
ios: {
|
||||||
@ -72,9 +70,35 @@ export async function configureAudioForVoiceCall(): Promise<void> {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Step 3: Start the audio session
|
|
||||||
console.log('[AudioSession] Step 3: Starting audio session...');
|
console.log('[AudioSession] Step 3: Starting audio session...');
|
||||||
await AudioSession.startAudioSession();
|
await AudioSession.startAudioSession();
|
||||||
|
} else if (Platform.OS === 'android') {
|
||||||
|
// Android-specific configuration
|
||||||
|
// IMPORTANT: Using 'music' stream type to force output to speaker
|
||||||
|
// 'voiceCall' stream type defaults to earpiece on many Android devices
|
||||||
|
console.log('[AudioSession] Configuring Android audio for SPEAKER...');
|
||||||
|
await AudioSession.configureAudio({
|
||||||
|
android: {
|
||||||
|
// Use MEDIA mode to ensure speaker output
|
||||||
|
audioTypeOptions: {
|
||||||
|
manageAudioFocus: true,
|
||||||
|
audioMode: 'normal',
|
||||||
|
audioFocusMode: 'gain',
|
||||||
|
// Use 'music' stream - goes to speaker by default
|
||||||
|
audioStreamType: 'music',
|
||||||
|
audioAttributesUsageType: 'media',
|
||||||
|
audioAttributesContentType: 'music',
|
||||||
|
},
|
||||||
|
// Force speaker as output
|
||||||
|
preferredOutputList: ['speaker'],
|
||||||
|
// Allow us to control audio routing
|
||||||
|
forceHandleAudioRouting: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('[AudioSession] Starting Android audio session...');
|
||||||
|
await AudioSession.startAudioSession();
|
||||||
|
}
|
||||||
|
|
||||||
console.log('[AudioSession] Configuration complete!');
|
console.log('[AudioSession] Configuration complete!');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -84,16 +108,16 @@ export async function configureAudioForVoiceCall(): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stop iOS AudioSession
|
* Stop AudioSession (iOS + Android)
|
||||||
*
|
*
|
||||||
* Should be called when disconnecting from voice call
|
* Should be called when disconnecting from voice call
|
||||||
*/
|
*/
|
||||||
export async function stopAudioSession(): Promise<void> {
|
export async function stopAudioSession(): Promise<void> {
|
||||||
if (Platform.OS !== 'ios') {
|
if (Platform.OS !== 'ios' && Platform.OS !== 'android') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('[AudioSession] Stopping audio session...');
|
console.log(`[AudioSession] Stopping audio session on ${Platform.OS}...`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const AudioSession = await getAudioSession();
|
const AudioSession = await getAudioSession();
|
||||||
@ -110,16 +134,16 @@ export async function stopAudioSession(): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reconfigure audio session after remote track arrives
|
* Reconfigure audio session after remote track arrives (iOS + Android)
|
||||||
*
|
*
|
||||||
* Sometimes iOS needs a kick to properly route audio after remote participant joins
|
* Sometimes the OS needs a kick to properly route audio after remote participant joins
|
||||||
*/
|
*/
|
||||||
export async function reconfigureAudioForPlayback(): Promise<void> {
|
export async function reconfigureAudioForPlayback(): Promise<void> {
|
||||||
if (Platform.OS !== 'ios') {
|
if (Platform.OS !== 'ios' && Platform.OS !== 'android') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('[AudioSession] Reconfiguring for playback...');
|
console.log(`[AudioSession] Reconfiguring for playback on ${Platform.OS}...`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const AudioSession = await getAudioSession();
|
const AudioSession = await getAudioSession();
|
||||||
@ -127,6 +151,7 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (Platform.OS === 'ios') {
|
||||||
// Just reconfigure the same settings - this "refreshes" the audio routing
|
// Just reconfigure the same settings - this "refreshes" the audio routing
|
||||||
await AudioSession.setAppleAudioConfiguration({
|
await AudioSession.setAppleAudioConfiguration({
|
||||||
audioCategory: 'playAndRecord',
|
audioCategory: 'playAndRecord',
|
||||||
@ -138,6 +163,24 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
|
|||||||
],
|
],
|
||||||
audioMode: 'voiceChat',
|
audioMode: 'voiceChat',
|
||||||
});
|
});
|
||||||
|
} else if (Platform.OS === 'android') {
|
||||||
|
// Reconfigure Android audio to ensure speaker output
|
||||||
|
// Using 'music' stream type to force speaker
|
||||||
|
await AudioSession.configureAudio({
|
||||||
|
android: {
|
||||||
|
audioTypeOptions: {
|
||||||
|
manageAudioFocus: true,
|
||||||
|
audioMode: 'normal',
|
||||||
|
audioFocusMode: 'gain',
|
||||||
|
audioStreamType: 'music',
|
||||||
|
audioAttributesUsageType: 'media',
|
||||||
|
audioAttributesContentType: 'music',
|
||||||
|
},
|
||||||
|
preferredOutputList: ['speaker'],
|
||||||
|
forceHandleAudioRouting: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
console.log('[AudioSession] Reconfigured successfully');
|
console.log('[AudioSession] Reconfigured successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -147,17 +190,12 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Switch audio output between speaker and earpiece
|
* Switch audio output between speaker and earpiece (iOS + Android)
|
||||||
*
|
*
|
||||||
* @param useSpeaker - true for speaker, false for earpiece
|
* @param useSpeaker - true for speaker, false for earpiece
|
||||||
*/
|
*/
|
||||||
export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
|
export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
|
||||||
if (Platform.OS !== 'ios') {
|
console.log(`[AudioSession] Setting audio output to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'} on ${Platform.OS}...`);
|
||||||
console.log('[AudioSession] setAudioOutput - skipping on non-iOS');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`[AudioSession] Setting audio output to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}...`);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const AudioSession = await getAudioSession();
|
const AudioSession = await getAudioSession();
|
||||||
@ -166,7 +204,8 @@ export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Configure audio output
|
if (Platform.OS === 'ios') {
|
||||||
|
// iOS: Configure audio output
|
||||||
await AudioSession.configureAudio({
|
await AudioSession.configureAudio({
|
||||||
ios: {
|
ios: {
|
||||||
defaultOutput: useSpeaker ? 'speaker' : 'earpiece',
|
defaultOutput: useSpeaker ? 'speaker' : 'earpiece',
|
||||||
@ -181,6 +220,27 @@ export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
|
|||||||
: ['allowBluetooth', 'allowBluetoothA2DP', 'mixWithOthers'],
|
: ['allowBluetooth', 'allowBluetoothA2DP', 'mixWithOthers'],
|
||||||
audioMode: 'voiceChat',
|
audioMode: 'voiceChat',
|
||||||
});
|
});
|
||||||
|
} else if (Platform.OS === 'android') {
|
||||||
|
// Android: Switch stream type to control speaker/earpiece
|
||||||
|
// - 'music' stream goes to speaker by default
|
||||||
|
// - 'voiceCall' stream goes to earpiece by default
|
||||||
|
await AudioSession.configureAudio({
|
||||||
|
android: {
|
||||||
|
audioTypeOptions: {
|
||||||
|
manageAudioFocus: true,
|
||||||
|
audioMode: useSpeaker ? 'normal' : 'inCommunication',
|
||||||
|
audioFocusMode: 'gain',
|
||||||
|
// Key difference: music→speaker, voiceCall→earpiece
|
||||||
|
audioStreamType: useSpeaker ? 'music' : 'voiceCall',
|
||||||
|
audioAttributesUsageType: useSpeaker ? 'media' : 'voiceCommunication',
|
||||||
|
audioAttributesContentType: useSpeaker ? 'music' : 'speech',
|
||||||
|
},
|
||||||
|
// Also set preferred output list
|
||||||
|
preferredOutputList: useSpeaker ? ['speaker'] : ['earpiece'],
|
||||||
|
forceHandleAudioRouting: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`[AudioSession] Audio output set to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}`);
|
console.log(`[AudioSession] Audio output set to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user