/** * Voice Screen - Ultravox Voice AI Integration * Real-time voice conversation with Julia AI using WebRTC * Ferdinand context is automatically loaded */ import React, { useState, useCallback, useRef, useEffect } from 'react'; import { View, Text, StyleSheet, TouchableOpacity, ActivityIndicator, Animated, Easing, } from 'react-native'; import { SafeAreaView } from 'react-native-safe-area-context'; import { Ionicons, Feather } from '@expo/vector-icons'; import { useRouter } from 'expo-router'; import { useFocusEffect } from '@react-navigation/native'; import { useUltravox, UltravoxSessionStatus, type Transcript, } from 'ultravox-react-native'; import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; import { createCall, getSystemPrompt, VOICE_NAME, } from '@/services/ultravoxService'; type CallState = 'idle' | 'connecting' | 'active' | 'ending' | 'error'; export default function VoiceScreen() { const router = useRouter(); // Call state const [callState, setCallState] = useState('idle'); const [error, setError] = useState(null); const [isMuted, setIsMuted] = useState(false); // Animation for the voice button const pulseAnim = useRef(new Animated.Value(1)).current; const rotateAnim = useRef(new Animated.Value(0)).current; // Tool implementations for navigation (client-side) const toolImplementations = { navigateToDashboard: () => { console.log('[Voice] Tool: navigateToDashboard'); router.push('/(tabs)/dashboard'); return 'Navigating to Dashboard'; }, navigateToBeneficiaries: () => { console.log('[Voice] Tool: navigateToBeneficiaries'); router.push('/(tabs)/beneficiaries'); return 'Navigating to Beneficiaries'; }, navigateToProfile: () => { console.log('[Voice] Tool: navigateToProfile'); router.push('/(tabs)/profile'); return 'Navigating to Profile'; }, }; // Ultravox hook - proper way to use the SDK const { transcripts, joinCall, leaveCall, session } = useUltravox({ tools: toolImplementations, onStatusChange: (event) => { console.log('[Voice] Status changed:', event.status); switch (event.status) { case UltravoxSessionStatus.IDLE: case UltravoxSessionStatus.DISCONNECTED: setCallState('idle'); break; case UltravoxSessionStatus.CONNECTING: setCallState('connecting'); break; case UltravoxSessionStatus.LISTENING: case UltravoxSessionStatus.THINKING: case UltravoxSessionStatus.SPEAKING: setCallState('active'); break; case UltravoxSessionStatus.DISCONNECTING: setCallState('ending'); break; } }, }); // Pulse animation when active useEffect(() => { if (callState === 'active') { const pulse = Animated.loop( Animated.sequence([ Animated.timing(pulseAnim, { toValue: 1.15, duration: 1000, easing: Easing.inOut(Easing.ease), useNativeDriver: true, }), Animated.timing(pulseAnim, { toValue: 1, duration: 1000, easing: Easing.inOut(Easing.ease), useNativeDriver: true, }), ]) ); pulse.start(); return () => pulse.stop(); } else { pulseAnim.setValue(1); } }, [callState, pulseAnim]); // Rotate animation when connecting useEffect(() => { if (callState === 'connecting') { const rotate = Animated.loop( Animated.timing(rotateAnim, { toValue: 1, duration: 1500, easing: Easing.linear, useNativeDriver: true, }) ); rotate.start(); return () => rotate.stop(); } else { rotateAnim.setValue(0); } }, [callState, rotateAnim]); // Start voice call const startCall = useCallback(async () => { setError(null); setCallState('connecting'); // Get system prompt with Ferdinand context const systemPrompt = getSystemPrompt(); try { // Create call via API const result = await createCall({ systemPrompt, firstSpeaker: 'FIRST_SPEAKER_AGENT', }); if (!result.success) { throw new Error(result.error); } console.log('[Voice] Call created, joinUrl:', result.data.joinUrl); // Join the call using the hook's joinCall await joinCall(result.data.joinUrl); console.log('[Voice] Joined call'); } catch (err) { console.error('[Voice] Failed to start call:', err); setError(err instanceof Error ? err.message : 'Failed to start call'); setCallState('error'); } }, [joinCall]); // End voice call const endCall = useCallback(async () => { setCallState('ending'); try { await leaveCall(); } catch (err) { console.error('[Voice] Error leaving call:', err); } setCallState('idle'); }, [leaveCall]); // Toggle mute const toggleMute = useCallback(() => { if (session) { const newMuted = !isMuted; if (newMuted) { session.muteMic(); } else { session.unmuteMic(); } setIsMuted(newMuted); } }, [session, isMuted]); // End call when leaving the screen (switching tabs) useFocusEffect( useCallback(() => { // Screen focused - do nothing special return () => { // Screen unfocused - end the call if active if (callState === 'active' || callState === 'connecting') { console.log('[Voice] Screen unfocused, ending call'); leaveCall().catch(console.error); setCallState('idle'); } }; }, [callState, leaveCall]) ); // Get last transcript for display const lastTranscript = transcripts[transcripts.length - 1]; // Render voice button based on state const renderVoiceButton = () => { const spin = rotateAnim.interpolate({ inputRange: [0, 1], outputRange: ['0deg', '360deg'], }); switch (callState) { case 'connecting': return ( ); case 'active': return ( ); case 'ending': return ( ); case 'error': return ( ); default: // idle return ( ); } }; return ( {/* Header */} router.push('/(tabs)/dashboard')} > Julia AI {callState === 'active' ? 'In call' : callState === 'connecting' ? 'Connecting...' : `Voice: ${VOICE_NAME}`} {callState === 'active' && ( )} {/* Main content */} {/* Avatar and status */} J {callState === 'active' && ( )} Julia Ferdinand Zmrzli's Wellness Assistant {/* Transcript display */} {lastTranscript && callState === 'active' && ( {lastTranscript.speaker === 'agent' ? 'Julia' : 'You'}: {lastTranscript.text} )} {/* Error display */} {error && ( {error} )} {/* Voice button */} {renderVoiceButton()} {callState === 'idle' && 'Tap to start voice call'} {callState === 'connecting' && 'Connecting...'} {callState === 'active' && 'Tap to end call'} {callState === 'ending' && 'Ending call...'} {callState === 'error' && 'Tap to retry'} {/* Info text */} {callState === 'idle' && ( Ask Julia about Ferdinand's wellness status, alerts, or say "show me the dashboard" to navigate. )} ); } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: AppColors.background, }, header: { flexDirection: 'row', alignItems: 'center', justifyContent: 'space-between', paddingHorizontal: Spacing.md, paddingVertical: Spacing.sm, borderBottomWidth: 1, borderBottomColor: AppColors.border, }, backButton: { padding: Spacing.xs, }, headerCenter: { alignItems: 'center', }, headerTitle: { fontSize: FontSizes.lg, fontWeight: '600', color: AppColors.textPrimary, }, headerSubtitle: { fontSize: FontSizes.sm, color: AppColors.success, marginTop: 2, }, headerRight: { width: 44, alignItems: 'flex-end', }, muteButton: { padding: Spacing.xs, }, content: { flex: 1, alignItems: 'center', justifyContent: 'space-between', paddingVertical: Spacing.xl, }, avatarSection: { alignItems: 'center', paddingTop: Spacing.xl, }, avatarContainer: { position: 'relative', }, avatar: { width: 120, height: 120, borderRadius: 60, backgroundColor: AppColors.success, justifyContent: 'center', alignItems: 'center', }, avatarText: { fontSize: 48, fontWeight: '600', color: AppColors.white, }, statusDot: { position: 'absolute', bottom: 8, right: 8, width: 24, height: 24, borderRadius: 12, backgroundColor: AppColors.success, borderWidth: 3, borderColor: AppColors.background, }, assistantName: { fontSize: FontSizes.xxl, fontWeight: '700', color: AppColors.textPrimary, marginTop: Spacing.md, }, assistantRole: { fontSize: FontSizes.base, color: AppColors.textSecondary, marginTop: Spacing.xs, }, transcriptContainer: { backgroundColor: AppColors.surface, borderRadius: BorderRadius.lg, padding: Spacing.md, marginHorizontal: Spacing.lg, maxWidth: '90%', }, transcriptLabel: { fontSize: FontSizes.sm, fontWeight: '600', color: AppColors.primary, marginBottom: Spacing.xs, }, transcriptText: { fontSize: FontSizes.base, color: AppColors.textPrimary, lineHeight: 22, }, errorContainer: { flexDirection: 'row', alignItems: 'center', backgroundColor: 'rgba(229, 57, 53, 0.1)', borderRadius: BorderRadius.md, padding: Spacing.md, marginHorizontal: Spacing.lg, }, errorText: { fontSize: FontSizes.sm, color: AppColors.error, marginLeft: Spacing.sm, flex: 1, }, buttonSection: { alignItems: 'center', }, voiceButton: { width: 120, height: 120, borderRadius: 60, justifyContent: 'center', alignItems: 'center', shadowColor: '#000', shadowOffset: { width: 0, height: 4 }, shadowOpacity: 0.3, shadowRadius: 8, elevation: 8, }, voiceButtonInner: { width: '100%', height: '100%', justifyContent: 'center', alignItems: 'center', }, voiceButtonIdle: { backgroundColor: AppColors.primary, }, voiceButtonConnecting: { backgroundColor: AppColors.warning || '#FF9800', }, voiceButtonActive: { backgroundColor: AppColors.success, }, voiceButtonEnding: { backgroundColor: AppColors.textMuted, }, voiceButtonError: { backgroundColor: AppColors.error, }, buttonHint: { fontSize: FontSizes.sm, color: AppColors.textSecondary, marginTop: Spacing.md, }, infoContainer: { paddingHorizontal: Spacing.xl, paddingBottom: Spacing.lg, }, infoText: { fontSize: FontSizes.sm, color: AppColors.textMuted, textAlign: 'center', lineHeight: 20, }, });