- Voice agent now extracts deploymentId and beneficiaryNamesDict from participant metadata passed via LiveKit token - WellNuoLLM class accepts dynamic deployment_id and beneficiary_names_dict - API calls now include personalized beneficiary names for better responses - Text chat already has this functionality (verified) - Updated LiveKit agent deployed to cloud Also includes: - Speaker toggle button in voice call UI - Keyboard controller integration for chat - Various UI improvements
512 lines
14 KiB
TypeScript
512 lines
14 KiB
TypeScript
/**
|
|
* Voice Call Screen - Fullscreen LiveKit Voice Call with Julia AI
|
|
*
|
|
* ARCHITECTURE:
|
|
* - ALL LiveKit/WebRTC logic is in useLiveKitRoom hook
|
|
* - This component ONLY handles UI rendering
|
|
* - No direct LiveKit imports here!
|
|
*
|
|
* Features:
|
|
* - Phone call-like UI with Julia avatar
|
|
* - Call duration timer
|
|
* - Mute/unmute and speaker toggle
|
|
* - Proper cleanup on unmount
|
|
*/
|
|
|
|
import React, { useEffect, useRef, useState, useMemo } from 'react';
|
|
import { View, Text, StyleSheet, TouchableOpacity, Animated, Easing, Dimensions } from 'react-native';
|
|
import { Ionicons } from '@expo/vector-icons';
|
|
import { SafeAreaView } from 'react-native-safe-area-context';
|
|
import { useRouter } from 'expo-router';
|
|
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
|
|
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
|
|
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
|
|
import { useLiveKitRoom, ConnectionState } from '@/hooks/useLiveKitRoom';
|
|
import { setAudioOutput } from '@/utils/audioSession';
|
|
import { api } from '@/services/api';
|
|
import type { Beneficiary } from '@/types';
|
|
import type { BeneficiaryData } from '@/services/livekitService';
|
|
|
|
const { width: SCREEN_WIDTH } = Dimensions.get('window');
|
|
|
|
export default function VoiceCallScreen() {
|
|
const router = useRouter();
|
|
const { clearTranscript, addTranscriptEntry } = useVoiceTranscript();
|
|
const { currentBeneficiary } = useBeneficiary();
|
|
|
|
// Speaker toggle state (default: speaker ON)
|
|
const [isSpeakerOn, setIsSpeakerOn] = useState(true);
|
|
|
|
// Beneficiary state for building beneficiaryData
|
|
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
|
|
|
|
// Load beneficiaries on mount
|
|
useEffect(() => {
|
|
const loadBeneficiaries = async () => {
|
|
try {
|
|
const data = await api.getAllBeneficiaries();
|
|
if (data) {
|
|
setBeneficiaries(data);
|
|
}
|
|
} catch (error) {
|
|
console.warn('[VoiceCall] Failed to load beneficiaries:', error);
|
|
}
|
|
};
|
|
loadBeneficiaries();
|
|
}, []);
|
|
|
|
// Build beneficiaryData for voice agent
|
|
const beneficiaryData = useMemo((): BeneficiaryData | undefined => {
|
|
if (beneficiaries.length === 0) {
|
|
return undefined;
|
|
}
|
|
|
|
// Build beneficiary_names_dict from all beneficiaries
|
|
// Format: {"21": "papa", "69": "David"}
|
|
const beneficiaryNamesDict: Record<string, string> = {};
|
|
beneficiaries.forEach(b => {
|
|
beneficiaryNamesDict[b.id.toString()] = b.name;
|
|
});
|
|
|
|
// Get deployment_id from current beneficiary or fallback to first one
|
|
const deploymentId = currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21';
|
|
|
|
console.log('[VoiceCall] BeneficiaryData:', { deploymentId, beneficiaryNamesDict });
|
|
|
|
return {
|
|
deploymentId,
|
|
beneficiaryNamesDict,
|
|
};
|
|
}, [beneficiaries, currentBeneficiary]);
|
|
|
|
// LiveKit hook - ALL logic is here
|
|
const {
|
|
state,
|
|
error,
|
|
callDuration,
|
|
isMuted,
|
|
isAgentSpeaking,
|
|
canPlayAudio,
|
|
participantCount,
|
|
connect,
|
|
disconnect,
|
|
toggleMute,
|
|
} = useLiveKitRoom({
|
|
userId: `user-${Date.now()}`,
|
|
beneficiaryData,
|
|
onTranscript: (role, text) => {
|
|
addTranscriptEntry(role, text);
|
|
},
|
|
});
|
|
|
|
// Animations
|
|
const pulseAnim = useRef(new Animated.Value(1)).current;
|
|
const rotateAnim = useRef(new Animated.Value(0)).current;
|
|
const avatarScale = useRef(new Animated.Value(0.8)).current;
|
|
|
|
// Clear transcript and start call on mount
|
|
useEffect(() => {
|
|
clearTranscript();
|
|
connect();
|
|
|
|
return () => {
|
|
// Cleanup handled by the hook
|
|
};
|
|
}, []);
|
|
|
|
// Navigate back on disconnect or error
|
|
useEffect(() => {
|
|
if (state === 'disconnected' || state === 'error') {
|
|
const timeout = setTimeout(() => {
|
|
router.back();
|
|
}, state === 'error' ? 2000 : 500);
|
|
return () => clearTimeout(timeout);
|
|
}
|
|
}, [state, router]);
|
|
|
|
// Pulse animation for active call
|
|
useEffect(() => {
|
|
if (state === 'connected') {
|
|
const pulse = Animated.loop(
|
|
Animated.sequence([
|
|
Animated.timing(pulseAnim, {
|
|
toValue: 1.1,
|
|
duration: 1500,
|
|
easing: Easing.inOut(Easing.ease),
|
|
useNativeDriver: true,
|
|
}),
|
|
Animated.timing(pulseAnim, {
|
|
toValue: 1,
|
|
duration: 1500,
|
|
easing: Easing.inOut(Easing.ease),
|
|
useNativeDriver: true,
|
|
}),
|
|
])
|
|
);
|
|
pulse.start();
|
|
|
|
// Avatar entrance animation
|
|
Animated.spring(avatarScale, {
|
|
toValue: 1,
|
|
friction: 8,
|
|
tension: 40,
|
|
useNativeDriver: true,
|
|
}).start();
|
|
|
|
return () => pulse.stop();
|
|
}
|
|
}, [state, pulseAnim, avatarScale]);
|
|
|
|
// Rotate animation for connecting states
|
|
useEffect(() => {
|
|
const connectingStates: ConnectionState[] = [
|
|
'initializing',
|
|
'configuring_audio',
|
|
'requesting_token',
|
|
'connecting',
|
|
'reconnecting',
|
|
];
|
|
|
|
if (connectingStates.includes(state)) {
|
|
const rotate = Animated.loop(
|
|
Animated.timing(rotateAnim, {
|
|
toValue: 1,
|
|
duration: 2000,
|
|
easing: Easing.linear,
|
|
useNativeDriver: true,
|
|
})
|
|
);
|
|
rotate.start();
|
|
return () => rotate.stop();
|
|
} else {
|
|
rotateAnim.setValue(0);
|
|
}
|
|
}, [state, rotateAnim]);
|
|
|
|
// End call handler
|
|
const handleEndCall = async () => {
|
|
await disconnect();
|
|
router.back();
|
|
};
|
|
|
|
// Toggle speaker/earpiece
|
|
const handleToggleSpeaker = async () => {
|
|
const newSpeakerState = !isSpeakerOn;
|
|
setIsSpeakerOn(newSpeakerState);
|
|
await setAudioOutput(newSpeakerState);
|
|
};
|
|
|
|
// Format duration as MM:SS
|
|
const formatDuration = (seconds: number): string => {
|
|
const mins = Math.floor(seconds / 60);
|
|
const secs = seconds % 60;
|
|
return `${mins}:${secs.toString().padStart(2, '0')}`;
|
|
};
|
|
|
|
// Get status text based on state
|
|
const getStatusText = (): string => {
|
|
switch (state) {
|
|
case 'idle':
|
|
return 'Starting...';
|
|
case 'initializing':
|
|
return 'Initializing...';
|
|
case 'configuring_audio':
|
|
return 'Configuring audio...';
|
|
case 'requesting_token':
|
|
return 'Requesting token...';
|
|
case 'connecting':
|
|
return 'Connecting...';
|
|
case 'connected':
|
|
if (isAgentSpeaking) return 'Julia is speaking...';
|
|
if (!canPlayAudio) return 'Waiting for audio...';
|
|
return 'Connected';
|
|
case 'reconnecting':
|
|
return 'Reconnecting...';
|
|
case 'disconnected':
|
|
return 'Disconnected';
|
|
case 'error':
|
|
return error || 'Error occurred';
|
|
default:
|
|
return 'Unknown state';
|
|
}
|
|
};
|
|
|
|
// Is call currently connecting?
|
|
const isConnecting = [
|
|
'idle',
|
|
'initializing',
|
|
'configuring_audio',
|
|
'requesting_token',
|
|
'connecting',
|
|
].includes(state);
|
|
|
|
// Is call active?
|
|
const isActive = state === 'connected';
|
|
|
|
// Rotation interpolation
|
|
const spin = rotateAnim.interpolate({
|
|
inputRange: [0, 1],
|
|
outputRange: ['0deg', '360deg'],
|
|
});
|
|
|
|
return (
|
|
<SafeAreaView style={styles.container} edges={['top', 'bottom']}>
|
|
{/* Background gradient effect */}
|
|
<View style={styles.backgroundGradient} />
|
|
|
|
{/* Top bar - minimal */}
|
|
<View style={styles.topBar}>
|
|
<TouchableOpacity style={styles.backButton} onPress={handleEndCall}>
|
|
<Ionicons name="chevron-down" size={28} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
<View style={styles.topBarCenter} />
|
|
<View style={styles.backButton} />
|
|
</View>
|
|
|
|
{/* Main content */}
|
|
<View style={styles.content}>
|
|
{/* Avatar */}
|
|
<Animated.View
|
|
style={[
|
|
styles.avatarContainer,
|
|
{
|
|
transform: [
|
|
{ scale: isActive ? pulseAnim : avatarScale },
|
|
{ rotate: isConnecting ? spin : '0deg' },
|
|
],
|
|
},
|
|
]}
|
|
>
|
|
<View style={styles.avatar}>
|
|
<Text style={styles.avatarText}>J</Text>
|
|
</View>
|
|
{isActive && <View style={styles.activeIndicator} />}
|
|
</Animated.View>
|
|
|
|
{/* Name and status */}
|
|
<Text style={styles.name}>Julia AI</Text>
|
|
|
|
{isActive ? (
|
|
<View style={styles.statusContainer}>
|
|
<View style={styles.activeDot} />
|
|
<Text style={styles.duration}>{formatDuration(callDuration)}</Text>
|
|
</View>
|
|
) : (
|
|
<Text style={styles.status}>{getStatusText()}</Text>
|
|
)}
|
|
|
|
{/* Additional status info */}
|
|
{isActive && (
|
|
<Text style={styles.listeningStatus}>
|
|
{getStatusText()}
|
|
{participantCount > 1 && ` • ${participantCount} participants`}
|
|
</Text>
|
|
)}
|
|
|
|
{/* Error display */}
|
|
{state === 'error' && error && (
|
|
<View style={styles.errorContainer}>
|
|
<Ionicons name="alert-circle" size={20} color={AppColors.error} />
|
|
<Text style={styles.errorText}>{error}</Text>
|
|
</View>
|
|
)}
|
|
</View>
|
|
|
|
|
|
{/* Bottom controls - centered layout with 3 buttons */}
|
|
<View style={styles.controls}>
|
|
{/* Mute button */}
|
|
<TouchableOpacity
|
|
style={[styles.controlButton, isMuted && styles.controlButtonActive]}
|
|
onPress={toggleMute}
|
|
disabled={!isActive}
|
|
>
|
|
<Ionicons
|
|
name={isMuted ? 'mic-off' : 'mic'}
|
|
size={28}
|
|
color={isMuted ? AppColors.error : AppColors.white}
|
|
/>
|
|
<Text style={styles.controlLabel}>{isMuted ? 'Unmute' : 'Mute'}</Text>
|
|
</TouchableOpacity>
|
|
|
|
{/* Speaker toggle button */}
|
|
<TouchableOpacity
|
|
style={[styles.controlButton, isSpeakerOn && styles.controlButtonActive]}
|
|
onPress={handleToggleSpeaker}
|
|
disabled={!isActive}
|
|
>
|
|
<Ionicons
|
|
name={isSpeakerOn ? 'volume-high' : 'ear'}
|
|
size={28}
|
|
color={isSpeakerOn ? AppColors.success : AppColors.white}
|
|
/>
|
|
<Text style={styles.controlLabel}>{isSpeakerOn ? 'Speaker' : 'Earpiece'}</Text>
|
|
</TouchableOpacity>
|
|
|
|
{/* End call button */}
|
|
<TouchableOpacity style={styles.endCallButton} onPress={handleEndCall}>
|
|
<Ionicons name="call" size={32} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
</View>
|
|
</SafeAreaView>
|
|
);
|
|
}
|
|
|
|
const styles = StyleSheet.create({
|
|
container: {
|
|
flex: 1,
|
|
backgroundColor: '#1a1a2e',
|
|
},
|
|
backgroundGradient: {
|
|
position: 'absolute',
|
|
top: 0,
|
|
left: 0,
|
|
right: 0,
|
|
height: '50%',
|
|
backgroundColor: '#16213e',
|
|
borderBottomLeftRadius: SCREEN_WIDTH,
|
|
borderBottomRightRadius: SCREEN_WIDTH,
|
|
transform: [{ scaleX: 2 }],
|
|
},
|
|
topBar: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
justifyContent: 'space-between',
|
|
paddingHorizontal: Spacing.md,
|
|
paddingVertical: Spacing.sm,
|
|
},
|
|
backButton: {
|
|
width: 44,
|
|
height: 44,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
},
|
|
topBarCenter: {
|
|
flex: 1,
|
|
alignItems: 'center',
|
|
},
|
|
content: {
|
|
flex: 1,
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
paddingBottom: 100,
|
|
},
|
|
avatarContainer: {
|
|
width: 150,
|
|
height: 150,
|
|
marginBottom: Spacing.xl,
|
|
},
|
|
avatar: {
|
|
width: 150,
|
|
height: 150,
|
|
borderRadius: 75,
|
|
backgroundColor: AppColors.success,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
shadowColor: AppColors.success,
|
|
shadowOffset: { width: 0, height: 0 },
|
|
shadowOpacity: 0.5,
|
|
shadowRadius: 20,
|
|
elevation: 10,
|
|
},
|
|
avatarText: {
|
|
fontSize: 64,
|
|
fontWeight: '600',
|
|
color: AppColors.white,
|
|
},
|
|
activeIndicator: {
|
|
position: 'absolute',
|
|
bottom: 10,
|
|
right: 10,
|
|
width: 24,
|
|
height: 24,
|
|
borderRadius: 12,
|
|
backgroundColor: AppColors.success,
|
|
borderWidth: 3,
|
|
borderColor: '#1a1a2e',
|
|
},
|
|
name: {
|
|
fontSize: 32,
|
|
fontWeight: '700',
|
|
color: AppColors.white,
|
|
marginBottom: Spacing.xs,
|
|
},
|
|
statusContainer: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
},
|
|
activeDot: {
|
|
width: 8,
|
|
height: 8,
|
|
borderRadius: 4,
|
|
backgroundColor: AppColors.success,
|
|
marginRight: Spacing.sm,
|
|
},
|
|
duration: {
|
|
fontSize: FontSizes.lg,
|
|
color: AppColors.white,
|
|
fontVariant: ['tabular-nums'],
|
|
},
|
|
status: {
|
|
fontSize: FontSizes.base,
|
|
color: 'rgba(255,255,255,0.7)',
|
|
},
|
|
listeningStatus: {
|
|
fontSize: FontSizes.sm,
|
|
color: 'rgba(255,255,255,0.5)',
|
|
marginTop: Spacing.md,
|
|
fontStyle: 'italic',
|
|
},
|
|
errorContainer: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
marginTop: Spacing.md,
|
|
paddingHorizontal: Spacing.lg,
|
|
},
|
|
errorText: {
|
|
fontSize: FontSizes.sm,
|
|
color: AppColors.error,
|
|
marginLeft: Spacing.sm,
|
|
flex: 1,
|
|
},
|
|
controls: {
|
|
flexDirection: 'row',
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
paddingVertical: Spacing.xl,
|
|
paddingHorizontal: Spacing.lg,
|
|
gap: 24, // Space between 3 buttons (Mute, Speaker, End Call)
|
|
},
|
|
controlButton: {
|
|
alignItems: 'center',
|
|
padding: Spacing.md,
|
|
borderRadius: BorderRadius.full,
|
|
backgroundColor: 'rgba(255,255,255,0.1)',
|
|
width: 70,
|
|
height: 70,
|
|
justifyContent: 'center',
|
|
},
|
|
controlButtonActive: {
|
|
backgroundColor: 'rgba(255,255,255,0.2)',
|
|
},
|
|
controlLabel: {
|
|
fontSize: FontSizes.xs,
|
|
color: AppColors.white,
|
|
marginTop: 4,
|
|
},
|
|
endCallButton: {
|
|
width: 72,
|
|
height: 72,
|
|
borderRadius: 36,
|
|
backgroundColor: AppColors.error,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
transform: [{ rotate: '135deg' }],
|
|
shadowColor: AppColors.error,
|
|
shadowOffset: { width: 0, height: 4 },
|
|
shadowOpacity: 0.4,
|
|
shadowRadius: 8,
|
|
elevation: 8,
|
|
},
|
|
});
|