Add Chat tab with Julia AI + voice call improvements

- Enable Chat tab (replace Debug) - text chat with Julia AI
- Add voice call button in chat header and input area
- Add speaker/earpiece toggle in voice-call screen
- setAudioOutput() function for switching audio output
This commit is contained in:
Sergei 2026-01-18 22:00:26 -08:00
parent 6a6c85f7c3
commit cd9dddda34
3 changed files with 72 additions and 11 deletions

View File

@ -46,11 +46,14 @@ export default function TabLayout() {
href: null, href: null,
}} }}
/> />
{/* Chat hidden for now - testing via debug */} {/* Chat with Julia AI */}
<Tabs.Screen <Tabs.Screen
name="chat" name="chat"
options={{ options={{
href: null, title: 'Julia',
tabBarIcon: ({ color, size }) => (
<Feather name="message-circle" size={22} color={color} />
),
}} }}
/> />
{/* Voice tab hidden - using Debug for testing */} {/* Voice tab hidden - using Debug for testing */}
@ -69,14 +72,11 @@ export default function TabLayout() {
), ),
}} }}
/> />
{/* Debug tab for testing */} {/* Debug tab hidden */}
<Tabs.Screen <Tabs.Screen
name="debug" name="debug"
options={{ options={{
title: 'Debug', href: null,
tabBarIcon: ({ color, size }) => (
<Feather name="code" size={22} color={color} />
),
}} }}
/> />
{/* Hide explore tab */} {/* Hide explore tab */}

View File

@ -35,6 +35,7 @@ import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { VOICE_NAME } from '@/services/livekitService'; import { VOICE_NAME } from '@/services/livekitService';
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext'; import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
import { useLiveKitRoom, ConnectionState } from '@/hooks/useLiveKitRoom'; import { useLiveKitRoom, ConnectionState } from '@/hooks/useLiveKitRoom';
import { setAudioOutput } from '@/utils/audioSession';
const { width: SCREEN_WIDTH } = Dimensions.get('window'); const { width: SCREEN_WIDTH } = Dimensions.get('window');
@ -47,6 +48,9 @@ export default function VoiceCallScreen() {
const [logsMinimized, setLogsMinimized] = React.useState(false); const [logsMinimized, setLogsMinimized] = React.useState(false);
const logsScrollRef = useRef<ScrollView>(null); const logsScrollRef = useRef<ScrollView>(null);
// Speaker/earpiece toggle state
const [isSpeakerOn, setIsSpeakerOn] = React.useState(true);
// LiveKit hook - ALL logic is here // LiveKit hook - ALL logic is here
const { const {
state, state,
@ -159,6 +163,13 @@ export default function VoiceCallScreen() {
router.back(); router.back();
}; };
// Toggle speaker/earpiece
const handleToggleSpeaker = async () => {
const newSpeakerState = !isSpeakerOn;
setIsSpeakerOn(newSpeakerState);
await setAudioOutput(newSpeakerState);
};
// Copy logs to clipboard // Copy logs to clipboard
const copyLogs = async () => { const copyLogs = async () => {
const logsText = logs.map(l => `[${l.timestamp}] ${l.message}`).join('\n'); const logsText = logs.map(l => `[${l.timestamp}] ${l.message}`).join('\n');
@ -376,10 +387,18 @@ export default function VoiceCallScreen() {
<Ionicons name="call" size={32} color={AppColors.white} /> <Ionicons name="call" size={32} color={AppColors.white} />
</TouchableOpacity> </TouchableOpacity>
{/* Speaker button (placeholder for future) */} {/* Speaker/Earpiece toggle */}
<TouchableOpacity style={styles.controlButton} disabled> <TouchableOpacity
<Ionicons name="volume-high" size={28} color={AppColors.white} /> style={[styles.controlButton, isSpeakerOn && styles.controlButtonActive]}
<Text style={styles.controlLabel}>Speaker</Text> onPress={handleToggleSpeaker}
disabled={!isActive}
>
<Ionicons
name={isSpeakerOn ? 'volume-high' : 'ear'}
size={28}
color={isSpeakerOn ? AppColors.success : AppColors.white}
/>
<Text style={styles.controlLabel}>{isSpeakerOn ? 'Speaker' : 'Earpiece'}</Text>
</TouchableOpacity> </TouchableOpacity>
</View> </View>
</SafeAreaView> </SafeAreaView>

View File

@ -145,3 +145,45 @@ export async function reconfigureAudioForPlayback(): Promise<void> {
// Don't throw - this is a best-effort operation // Don't throw - this is a best-effort operation
} }
} }
/**
* Switch audio output between speaker and earpiece
*
* @param useSpeaker - true for speaker, false for earpiece
*/
export async function setAudioOutput(useSpeaker: boolean): Promise<void> {
if (Platform.OS !== 'ios') {
console.log('[AudioSession] setAudioOutput - skipping on non-iOS');
return;
}
console.log(`[AudioSession] Setting audio output to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}...`);
try {
const AudioSession = await getAudioSession();
if (!AudioSession) {
console.error('[AudioSession] Failed to get AudioSession module');
return;
}
// Configure audio output
await AudioSession.configureAudio({
ios: {
defaultOutput: useSpeaker ? 'speaker' : 'earpiece',
},
});
// Also update the full configuration to ensure it takes effect
await AudioSession.setAppleAudioConfiguration({
audioCategory: 'playAndRecord',
audioCategoryOptions: useSpeaker
? ['allowBluetooth', 'allowBluetoothA2DP', 'defaultToSpeaker', 'mixWithOthers']
: ['allowBluetooth', 'allowBluetoothA2DP', 'mixWithOthers'],
audioMode: 'voiceChat',
});
console.log(`[AudioSession] Audio output set to ${useSpeaker ? 'SPEAKER' : 'EARPIECE'}`);
} catch (error) {
console.error('[AudioSession] setAudioOutput error:', error);
}
}