Chat screen now supports both: - Text messaging (keyboard input) - High-quality Ultravox voice calls (WebRTC) Features: - Voice call button in input bar (phone icon) - Green status bar when call is active - Transcripts from voice calls appear in chat history - Voice badge on messages from voice conversation - Mute button during calls - Auto-end call when leaving screen Background audio configured for iOS (audio, voip modes)
516 lines
14 KiB
TypeScript
516 lines
14 KiB
TypeScript
/**
|
|
* Voice Screen - Ultravox Voice AI Integration
|
|
* Real-time voice conversation with Julia AI using WebRTC
|
|
* Ferdinand context is automatically loaded
|
|
*/
|
|
|
|
import React, { useState, useCallback, useRef, useEffect } from 'react';
|
|
import {
|
|
View,
|
|
Text,
|
|
StyleSheet,
|
|
TouchableOpacity,
|
|
ActivityIndicator,
|
|
Animated,
|
|
Easing,
|
|
} from 'react-native';
|
|
import { SafeAreaView } from 'react-native-safe-area-context';
|
|
import { Ionicons, Feather } from '@expo/vector-icons';
|
|
import { useRouter } from 'expo-router';
|
|
import { useFocusEffect } from '@react-navigation/native';
|
|
import {
|
|
useUltravox,
|
|
UltravoxSessionStatus,
|
|
type Transcript,
|
|
} from 'ultravox-react-native';
|
|
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
|
|
import {
|
|
createCall,
|
|
getSystemPrompt,
|
|
VOICE_NAME,
|
|
} from '@/services/ultravoxService';
|
|
|
|
type CallState = 'idle' | 'connecting' | 'active' | 'ending' | 'error';
|
|
|
|
export default function VoiceScreen() {
|
|
const router = useRouter();
|
|
|
|
// Call state
|
|
const [callState, setCallState] = useState<CallState>('idle');
|
|
const [error, setError] = useState<string | null>(null);
|
|
const [isMuted, setIsMuted] = useState(false);
|
|
|
|
// Animation for the voice button
|
|
const pulseAnim = useRef(new Animated.Value(1)).current;
|
|
const rotateAnim = useRef(new Animated.Value(0)).current;
|
|
|
|
// Tool implementations for navigation (client-side)
|
|
const toolImplementations = {
|
|
navigateToDashboard: () => {
|
|
console.log('[Voice] Tool: navigateToDashboard');
|
|
router.push('/(tabs)/dashboard');
|
|
return 'Navigating to Dashboard';
|
|
},
|
|
navigateToBeneficiaries: () => {
|
|
console.log('[Voice] Tool: navigateToBeneficiaries');
|
|
router.push('/(tabs)/beneficiaries');
|
|
return 'Navigating to Beneficiaries';
|
|
},
|
|
navigateToProfile: () => {
|
|
console.log('[Voice] Tool: navigateToProfile');
|
|
router.push('/(tabs)/profile');
|
|
return 'Navigating to Profile';
|
|
},
|
|
};
|
|
|
|
// Ultravox hook - proper way to use the SDK
|
|
const { transcripts, joinCall, leaveCall, session } = useUltravox({
|
|
tools: toolImplementations,
|
|
onStatusChange: (event) => {
|
|
console.log('[Voice] Status changed:', event.status);
|
|
|
|
switch (event.status) {
|
|
case UltravoxSessionStatus.IDLE:
|
|
case UltravoxSessionStatus.DISCONNECTED:
|
|
setCallState('idle');
|
|
break;
|
|
case UltravoxSessionStatus.CONNECTING:
|
|
setCallState('connecting');
|
|
break;
|
|
case UltravoxSessionStatus.LISTENING:
|
|
case UltravoxSessionStatus.THINKING:
|
|
case UltravoxSessionStatus.SPEAKING:
|
|
setCallState('active');
|
|
break;
|
|
case UltravoxSessionStatus.DISCONNECTING:
|
|
setCallState('ending');
|
|
break;
|
|
}
|
|
},
|
|
});
|
|
|
|
// Pulse animation when active
|
|
useEffect(() => {
|
|
if (callState === 'active') {
|
|
const pulse = Animated.loop(
|
|
Animated.sequence([
|
|
Animated.timing(pulseAnim, {
|
|
toValue: 1.15,
|
|
duration: 1000,
|
|
easing: Easing.inOut(Easing.ease),
|
|
useNativeDriver: true,
|
|
}),
|
|
Animated.timing(pulseAnim, {
|
|
toValue: 1,
|
|
duration: 1000,
|
|
easing: Easing.inOut(Easing.ease),
|
|
useNativeDriver: true,
|
|
}),
|
|
])
|
|
);
|
|
pulse.start();
|
|
return () => pulse.stop();
|
|
} else {
|
|
pulseAnim.setValue(1);
|
|
}
|
|
}, [callState, pulseAnim]);
|
|
|
|
// Rotate animation when connecting
|
|
useEffect(() => {
|
|
if (callState === 'connecting') {
|
|
const rotate = Animated.loop(
|
|
Animated.timing(rotateAnim, {
|
|
toValue: 1,
|
|
duration: 1500,
|
|
easing: Easing.linear,
|
|
useNativeDriver: true,
|
|
})
|
|
);
|
|
rotate.start();
|
|
return () => rotate.stop();
|
|
} else {
|
|
rotateAnim.setValue(0);
|
|
}
|
|
}, [callState, rotateAnim]);
|
|
|
|
// Start voice call
|
|
const startCall = useCallback(async () => {
|
|
setError(null);
|
|
setCallState('connecting');
|
|
|
|
// Get system prompt with Ferdinand context
|
|
const systemPrompt = getSystemPrompt();
|
|
|
|
try {
|
|
// Create call via API
|
|
const result = await createCall({
|
|
systemPrompt,
|
|
firstSpeaker: 'FIRST_SPEAKER_AGENT',
|
|
});
|
|
|
|
if (!result.success) {
|
|
throw new Error(result.error);
|
|
}
|
|
|
|
console.log('[Voice] Call created, joinUrl:', result.data.joinUrl);
|
|
|
|
// Join the call using the hook's joinCall
|
|
await joinCall(result.data.joinUrl);
|
|
console.log('[Voice] Joined call');
|
|
|
|
} catch (err) {
|
|
console.error('[Voice] Failed to start call:', err);
|
|
setError(err instanceof Error ? err.message : 'Failed to start call');
|
|
setCallState('error');
|
|
}
|
|
}, [joinCall]);
|
|
|
|
// End voice call
|
|
const endCall = useCallback(async () => {
|
|
setCallState('ending');
|
|
try {
|
|
await leaveCall();
|
|
} catch (err) {
|
|
console.error('[Voice] Error leaving call:', err);
|
|
}
|
|
setCallState('idle');
|
|
}, [leaveCall]);
|
|
|
|
// Toggle mute
|
|
const toggleMute = useCallback(() => {
|
|
if (session) {
|
|
const newMuted = !isMuted;
|
|
if (newMuted) {
|
|
session.muteMic();
|
|
} else {
|
|
session.unmuteMic();
|
|
}
|
|
setIsMuted(newMuted);
|
|
}
|
|
}, [session, isMuted]);
|
|
|
|
// End call when leaving the screen (switching tabs)
|
|
useFocusEffect(
|
|
useCallback(() => {
|
|
// Screen focused - do nothing special
|
|
return () => {
|
|
// Screen unfocused - end the call if active
|
|
if (callState === 'active' || callState === 'connecting') {
|
|
console.log('[Voice] Screen unfocused, ending call');
|
|
leaveCall().catch(console.error);
|
|
setCallState('idle');
|
|
}
|
|
};
|
|
}, [callState, leaveCall])
|
|
);
|
|
|
|
// Get last transcript for display
|
|
const lastTranscript = transcripts[transcripts.length - 1];
|
|
|
|
// Render voice button based on state
|
|
const renderVoiceButton = () => {
|
|
const spin = rotateAnim.interpolate({
|
|
inputRange: [0, 1],
|
|
outputRange: ['0deg', '360deg'],
|
|
});
|
|
|
|
switch (callState) {
|
|
case 'connecting':
|
|
return (
|
|
<Animated.View style={[styles.voiceButton, styles.voiceButtonConnecting, { transform: [{ rotate: spin }] }]}>
|
|
<Feather name="loader" size={48} color={AppColors.white} />
|
|
</Animated.View>
|
|
);
|
|
|
|
case 'active':
|
|
return (
|
|
<Animated.View style={[styles.voiceButton, styles.voiceButtonActive, { transform: [{ scale: pulseAnim }] }]}>
|
|
<TouchableOpacity onPress={endCall} style={styles.voiceButtonInner}>
|
|
<Ionicons name="call" size={48} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
</Animated.View>
|
|
);
|
|
|
|
case 'ending':
|
|
return (
|
|
<View style={[styles.voiceButton, styles.voiceButtonEnding]}>
|
|
<ActivityIndicator size="large" color={AppColors.white} />
|
|
</View>
|
|
);
|
|
|
|
case 'error':
|
|
return (
|
|
<TouchableOpacity style={[styles.voiceButton, styles.voiceButtonError]} onPress={startCall}>
|
|
<Ionicons name="refresh" size={48} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
);
|
|
|
|
default: // idle
|
|
return (
|
|
<TouchableOpacity style={[styles.voiceButton, styles.voiceButtonIdle]} onPress={startCall}>
|
|
<Ionicons name="mic" size={48} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
);
|
|
}
|
|
};
|
|
|
|
return (
|
|
<SafeAreaView style={styles.container} edges={['top']}>
|
|
{/* Header */}
|
|
<View style={styles.header}>
|
|
<TouchableOpacity
|
|
style={styles.backButton}
|
|
onPress={() => router.push('/(tabs)/dashboard')}
|
|
>
|
|
<Ionicons name="arrow-back" size={24} color={AppColors.textPrimary} />
|
|
</TouchableOpacity>
|
|
<View style={styles.headerCenter}>
|
|
<Text style={styles.headerTitle}>Julia AI</Text>
|
|
<Text style={styles.headerSubtitle}>
|
|
{callState === 'active' ? 'In call' : callState === 'connecting' ? 'Connecting...' : `Voice: ${VOICE_NAME}`}
|
|
</Text>
|
|
</View>
|
|
<View style={styles.headerRight}>
|
|
{callState === 'active' && (
|
|
<TouchableOpacity style={styles.muteButton} onPress={toggleMute}>
|
|
<Ionicons
|
|
name={isMuted ? 'mic-off' : 'mic'}
|
|
size={24}
|
|
color={isMuted ? AppColors.error : AppColors.textPrimary}
|
|
/>
|
|
</TouchableOpacity>
|
|
)}
|
|
</View>
|
|
</View>
|
|
|
|
{/* Main content */}
|
|
<View style={styles.content}>
|
|
{/* Avatar and status */}
|
|
<View style={styles.avatarSection}>
|
|
<View style={styles.avatarContainer}>
|
|
<View style={styles.avatar}>
|
|
<Text style={styles.avatarText}>J</Text>
|
|
</View>
|
|
{callState === 'active' && (
|
|
<View style={styles.statusDot} />
|
|
)}
|
|
</View>
|
|
<Text style={styles.assistantName}>Julia</Text>
|
|
<Text style={styles.assistantRole}>Ferdinand Zmrzli's Wellness Assistant</Text>
|
|
</View>
|
|
|
|
{/* Transcript display */}
|
|
{lastTranscript && callState === 'active' && (
|
|
<View style={styles.transcriptContainer}>
|
|
<Text style={styles.transcriptLabel}>
|
|
{lastTranscript.speaker === 'agent' ? 'Julia' : 'You'}:
|
|
</Text>
|
|
<Text style={styles.transcriptText} numberOfLines={3}>
|
|
{lastTranscript.text}
|
|
</Text>
|
|
</View>
|
|
)}
|
|
|
|
{/* Error display */}
|
|
{error && (
|
|
<View style={styles.errorContainer}>
|
|
<Ionicons name="alert-circle" size={24} color={AppColors.error} />
|
|
<Text style={styles.errorText}>{error}</Text>
|
|
</View>
|
|
)}
|
|
|
|
{/* Voice button */}
|
|
<View style={styles.buttonSection}>
|
|
{renderVoiceButton()}
|
|
<Text style={styles.buttonHint}>
|
|
{callState === 'idle' && 'Tap to start voice call'}
|
|
{callState === 'connecting' && 'Connecting...'}
|
|
{callState === 'active' && 'Tap to end call'}
|
|
{callState === 'ending' && 'Ending call...'}
|
|
{callState === 'error' && 'Tap to retry'}
|
|
</Text>
|
|
</View>
|
|
|
|
{/* Info text */}
|
|
{callState === 'idle' && (
|
|
<View style={styles.infoContainer}>
|
|
<Text style={styles.infoText}>
|
|
Ask Julia about Ferdinand's wellness status, alerts, or say "show me the dashboard" to navigate.
|
|
</Text>
|
|
</View>
|
|
)}
|
|
</View>
|
|
</SafeAreaView>
|
|
);
|
|
}
|
|
|
|
const styles = StyleSheet.create({
|
|
container: {
|
|
flex: 1,
|
|
backgroundColor: AppColors.background,
|
|
},
|
|
header: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
justifyContent: 'space-between',
|
|
paddingHorizontal: Spacing.md,
|
|
paddingVertical: Spacing.sm,
|
|
borderBottomWidth: 1,
|
|
borderBottomColor: AppColors.border,
|
|
},
|
|
backButton: {
|
|
padding: Spacing.xs,
|
|
},
|
|
headerCenter: {
|
|
alignItems: 'center',
|
|
},
|
|
headerTitle: {
|
|
fontSize: FontSizes.lg,
|
|
fontWeight: '600',
|
|
color: AppColors.textPrimary,
|
|
},
|
|
headerSubtitle: {
|
|
fontSize: FontSizes.sm,
|
|
color: AppColors.success,
|
|
marginTop: 2,
|
|
},
|
|
headerRight: {
|
|
width: 44,
|
|
alignItems: 'flex-end',
|
|
},
|
|
muteButton: {
|
|
padding: Spacing.xs,
|
|
},
|
|
content: {
|
|
flex: 1,
|
|
alignItems: 'center',
|
|
justifyContent: 'space-between',
|
|
paddingVertical: Spacing.xl,
|
|
},
|
|
avatarSection: {
|
|
alignItems: 'center',
|
|
paddingTop: Spacing.xl,
|
|
},
|
|
avatarContainer: {
|
|
position: 'relative',
|
|
},
|
|
avatar: {
|
|
width: 120,
|
|
height: 120,
|
|
borderRadius: 60,
|
|
backgroundColor: AppColors.success,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
},
|
|
avatarText: {
|
|
fontSize: 48,
|
|
fontWeight: '600',
|
|
color: AppColors.white,
|
|
},
|
|
statusDot: {
|
|
position: 'absolute',
|
|
bottom: 8,
|
|
right: 8,
|
|
width: 24,
|
|
height: 24,
|
|
borderRadius: 12,
|
|
backgroundColor: AppColors.success,
|
|
borderWidth: 3,
|
|
borderColor: AppColors.background,
|
|
},
|
|
assistantName: {
|
|
fontSize: FontSizes.xxl,
|
|
fontWeight: '700',
|
|
color: AppColors.textPrimary,
|
|
marginTop: Spacing.md,
|
|
},
|
|
assistantRole: {
|
|
fontSize: FontSizes.base,
|
|
color: AppColors.textSecondary,
|
|
marginTop: Spacing.xs,
|
|
},
|
|
transcriptContainer: {
|
|
backgroundColor: AppColors.surface,
|
|
borderRadius: BorderRadius.lg,
|
|
padding: Spacing.md,
|
|
marginHorizontal: Spacing.lg,
|
|
maxWidth: '90%',
|
|
},
|
|
transcriptLabel: {
|
|
fontSize: FontSizes.sm,
|
|
fontWeight: '600',
|
|
color: AppColors.primary,
|
|
marginBottom: Spacing.xs,
|
|
},
|
|
transcriptText: {
|
|
fontSize: FontSizes.base,
|
|
color: AppColors.textPrimary,
|
|
lineHeight: 22,
|
|
},
|
|
errorContainer: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
backgroundColor: 'rgba(229, 57, 53, 0.1)',
|
|
borderRadius: BorderRadius.md,
|
|
padding: Spacing.md,
|
|
marginHorizontal: Spacing.lg,
|
|
},
|
|
errorText: {
|
|
fontSize: FontSizes.sm,
|
|
color: AppColors.error,
|
|
marginLeft: Spacing.sm,
|
|
flex: 1,
|
|
},
|
|
buttonSection: {
|
|
alignItems: 'center',
|
|
},
|
|
voiceButton: {
|
|
width: 120,
|
|
height: 120,
|
|
borderRadius: 60,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
shadowColor: '#000',
|
|
shadowOffset: { width: 0, height: 4 },
|
|
shadowOpacity: 0.3,
|
|
shadowRadius: 8,
|
|
elevation: 8,
|
|
},
|
|
voiceButtonInner: {
|
|
width: '100%',
|
|
height: '100%',
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
},
|
|
voiceButtonIdle: {
|
|
backgroundColor: AppColors.primary,
|
|
},
|
|
voiceButtonConnecting: {
|
|
backgroundColor: AppColors.warning || '#FF9800',
|
|
},
|
|
voiceButtonActive: {
|
|
backgroundColor: AppColors.success,
|
|
},
|
|
voiceButtonEnding: {
|
|
backgroundColor: AppColors.textMuted,
|
|
},
|
|
voiceButtonError: {
|
|
backgroundColor: AppColors.error,
|
|
},
|
|
buttonHint: {
|
|
fontSize: FontSizes.sm,
|
|
color: AppColors.textSecondary,
|
|
marginTop: Spacing.md,
|
|
},
|
|
infoContainer: {
|
|
paddingHorizontal: Spacing.xl,
|
|
paddingBottom: Spacing.lg,
|
|
},
|
|
infoText: {
|
|
fontSize: FontSizes.sm,
|
|
color: AppColors.textMuted,
|
|
textAlign: 'center',
|
|
lineHeight: 20,
|
|
},
|
|
});
|