wellnua-lite-Robert/app/voice-call.tsx
Sergei a578ec8081 feat: Pass Debug tab deployment ID to voice calls
- Add debugDeploymentId to BeneficiaryContext for sharing between screens
- Sync Debug tab's deploymentId state with global context
- voice-call.tsx now prioritizes debugDeploymentId when starting calls
- Enables testing voice calls with specific deployment IDs from Debug screen
2026-01-24 00:05:47 -08:00

558 lines
16 KiB
TypeScript

/**
* Voice Call Screen - Fullscreen LiveKit Voice Call with Julia AI
*
* ARCHITECTURE:
* - ALL LiveKit/WebRTC logic is in useLiveKitRoom hook
* - This component ONLY handles UI rendering
* - No direct LiveKit imports here!
*
* Features:
* - Phone call-like UI with Julia avatar
* - Call duration timer
* - Mute/unmute toggle
* - Proper cleanup on unmount
*/
import React, { useEffect, useRef, useState, useMemo } from 'react';
import { View, Text, StyleSheet, TouchableOpacity, Animated, Easing, Dimensions } from 'react-native';
import { Ionicons } from '@expo/vector-icons';
import { SafeAreaView } from 'react-native-safe-area-context';
import { useRouter } from 'expo-router';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
import { useLiveKitRoom, ConnectionState } from '@/hooks/useLiveKitRoom';
import { api } from '@/services/api';
import type { Beneficiary } from '@/types';
import type { BeneficiaryData } from '@/services/livekitService';
const { width: SCREEN_WIDTH } = Dimensions.get('window');
export default function VoiceCallScreen() {
const router = useRouter();
const { clearTranscript, addTranscriptEntry } = useVoiceTranscript();
const { currentBeneficiary, debugDeploymentId } = useBeneficiary();
// Beneficiary state for building beneficiaryData
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
const [beneficiariesLoaded, setBeneficiariesLoaded] = useState(false);
// Load beneficiaries on mount
useEffect(() => {
const loadBeneficiaries = async () => {
try {
const response = await api.getAllBeneficiaries();
if (response.ok && response.data) {
setBeneficiaries(response.data);
console.log('[VoiceCall] Beneficiaries loaded:', response.data.length);
}
} catch (error) {
console.warn('[VoiceCall] Failed to load beneficiaries:', error);
} finally {
setBeneficiariesLoaded(true);
}
};
loadBeneficiaries();
}, []);
// Build beneficiaryData for voice agent
const beneficiaryData = useMemo((): BeneficiaryData | undefined => {
// PRIORITY 1: If debugDeploymentId is set (from Debug screen), use it
if (debugDeploymentId) {
console.log('[VoiceCall] Using DEBUG deployment ID:', debugDeploymentId);
return {
deploymentId: debugDeploymentId,
beneficiaryNamesDict: {},
};
}
// PRIORITY 2: Use beneficiaries from API
// Safety check - ensure beneficiaries is an array
if (!Array.isArray(beneficiaries) || beneficiaries.length === 0) {
console.log('[VoiceCall] No beneficiaries yet, skipping beneficiaryData');
return undefined;
}
try {
// Build beneficiary_names_dict from all beneficiaries
// Format: {"21": "papa", "69": "David"}
const beneficiaryNamesDict: Record<string, string> = {};
beneficiaries.forEach(b => {
// Safety: check that b exists and has id and name
if (b && b.id != null && b.name) {
beneficiaryNamesDict[String(b.id)] = b.name;
}
});
// Get deployment_id from current beneficiary or fallback to first one
const deploymentId = currentBeneficiary?.id != null
? String(currentBeneficiary.id)
: beneficiaries[0]?.id != null
? String(beneficiaries[0].id)
: '21';
console.log('[VoiceCall] BeneficiaryData:', { deploymentId, beneficiaryNamesDict });
return {
deploymentId,
beneficiaryNamesDict,
};
} catch (error) {
console.error('[VoiceCall] Error building beneficiaryData:', error);
return undefined;
}
}, [beneficiaries, currentBeneficiary, debugDeploymentId]);
// LiveKit hook - ALL logic is here
const {
state,
error,
callDuration,
isMuted,
isAgentSpeaking,
canPlayAudio,
participantCount,
connect,
disconnect,
toggleMute,
} = useLiveKitRoom({
userId: `user-${Date.now()}`,
beneficiaryData,
onTranscript: (role, text) => {
addTranscriptEntry(role, text);
},
});
// Animations
const pulseAnim = useRef(new Animated.Value(1)).current;
const rotateAnim = useRef(new Animated.Value(0)).current;
const avatarScale = useRef(new Animated.Value(0.8)).current;
// Clear transcript on mount
useEffect(() => {
clearTranscript();
}, []);
// Track if connect has been called to prevent duplicate calls
const connectCalledRef = useRef(false);
// Start call ONLY after beneficiaryData is ready
// IMPORTANT: We must wait for beneficiaryData to be populated!
// Without deploymentId, Julia AI agent won't know which beneficiary to talk about.
useEffect(() => {
// Prevent duplicate connect calls
if (connectCalledRef.current) return;
// If debugDeploymentId is set, connect immediately (don't wait for beneficiaries)
if (debugDeploymentId && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with DEBUG deploymentId:', debugDeploymentId);
connectCalledRef.current = true;
connect();
return;
}
// Otherwise, only connect when beneficiaries are loaded AND beneficiaryData is ready
if (beneficiariesLoaded && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with beneficiaryData:', JSON.stringify(beneficiaryData));
connectCalledRef.current = true;
connect();
} else if (beneficiariesLoaded) {
console.log('[VoiceCall] Waiting for beneficiaryData... Current state:', {
beneficiariesLoaded,
beneficiariesCount: beneficiaries.length,
beneficiaryData: beneficiaryData ? JSON.stringify(beneficiaryData) : 'undefined'
});
}
}, [beneficiariesLoaded, beneficiaryData, beneficiaries.length, connect, debugDeploymentId]);
// Fallback: if beneficiaryData doesn't arrive in 5 seconds, connect anyway
// This handles edge cases where API fails or user has no beneficiaries
useEffect(() => {
if (connectCalledRef.current) return;
const timeout = setTimeout(() => {
if (!connectCalledRef.current && beneficiariesLoaded) {
console.warn('[VoiceCall] Timeout: beneficiaryData not ready after 5s, connecting without it');
connectCalledRef.current = true;
connect();
}
}, 5000);
return () => clearTimeout(timeout);
}, [beneficiariesLoaded, connect]);
// Navigate back on disconnect or error
useEffect(() => {
if (state === 'disconnected' || state === 'error') {
const timeout = setTimeout(() => {
router.back();
}, state === 'error' ? 2000 : 500);
return () => clearTimeout(timeout);
}
}, [state, router]);
// Pulse animation for active call
useEffect(() => {
if (state === 'connected') {
const pulse = Animated.loop(
Animated.sequence([
Animated.timing(pulseAnim, {
toValue: 1.1,
duration: 1500,
easing: Easing.inOut(Easing.ease),
useNativeDriver: true,
}),
Animated.timing(pulseAnim, {
toValue: 1,
duration: 1500,
easing: Easing.inOut(Easing.ease),
useNativeDriver: true,
}),
])
);
pulse.start();
// Avatar entrance animation
Animated.spring(avatarScale, {
toValue: 1,
friction: 8,
tension: 40,
useNativeDriver: true,
}).start();
return () => pulse.stop();
}
}, [state, pulseAnim, avatarScale]);
// Rotate animation for connecting states
useEffect(() => {
const connectingStates: ConnectionState[] = [
'initializing',
'configuring_audio',
'requesting_token',
'connecting',
'reconnecting',
];
if (connectingStates.includes(state)) {
const rotate = Animated.loop(
Animated.timing(rotateAnim, {
toValue: 1,
duration: 2000,
easing: Easing.linear,
useNativeDriver: true,
})
);
rotate.start();
return () => rotate.stop();
} else {
rotateAnim.setValue(0);
}
}, [state, rotateAnim]);
// End call handler
const handleEndCall = async () => {
await disconnect();
router.back();
};
// Format duration as MM:SS
const formatDuration = (seconds: number): string => {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins}:${secs.toString().padStart(2, '0')}`;
};
// Get status text based on state
const getStatusText = (): string => {
switch (state) {
case 'idle':
return 'Starting...';
case 'initializing':
return 'Initializing...';
case 'configuring_audio':
return 'Configuring audio...';
case 'requesting_token':
return 'Requesting token...';
case 'connecting':
return 'Connecting...';
case 'connected':
if (isAgentSpeaking) return 'Julia is speaking...';
if (!canPlayAudio) return 'Waiting for audio...';
return 'Connected';
case 'reconnecting':
return 'Reconnecting...';
case 'disconnected':
return 'Disconnected';
case 'error':
return error || 'Error occurred';
default:
return 'Unknown state';
}
};
// Is call currently connecting?
const isConnecting = [
'idle',
'initializing',
'configuring_audio',
'requesting_token',
'connecting',
].includes(state);
// Is call active?
const isActive = state === 'connected';
// Rotation interpolation
const spin = rotateAnim.interpolate({
inputRange: [0, 1],
outputRange: ['0deg', '360deg'],
});
return (
<SafeAreaView style={styles.container} edges={['top', 'bottom']}>
{/* Background gradient effect */}
<View style={styles.backgroundGradient} />
{/* Top bar - minimal */}
<View style={styles.topBar}>
<TouchableOpacity style={styles.backButton} onPress={handleEndCall}>
<Ionicons name="chevron-down" size={28} color={AppColors.white} />
</TouchableOpacity>
<View style={styles.topBarCenter} />
<View style={styles.backButton} />
</View>
{/* Main content */}
<View style={styles.content}>
{/* Avatar */}
<Animated.View
style={[
styles.avatarContainer,
{
transform: [
{ scale: isActive ? pulseAnim : avatarScale },
{ rotate: isConnecting ? spin : '0deg' },
],
},
]}
>
<View style={styles.avatar}>
<Text style={styles.avatarText}>J</Text>
</View>
{isActive && <View style={styles.activeIndicator} />}
</Animated.View>
{/* Name and status */}
<Text style={styles.name}>Julia AI</Text>
{isActive ? (
<View style={styles.statusContainer}>
<View style={styles.activeDot} />
<Text style={styles.duration}>{formatDuration(callDuration)}</Text>
</View>
) : (
<Text style={styles.status}>{getStatusText()}</Text>
)}
{/* Additional status info */}
{isActive && (
<Text style={styles.listeningStatus}>
{getStatusText()}
{participantCount > 1 && `${participantCount} participants`}
</Text>
)}
{/* Error display */}
{state === 'error' && error && (
<View style={styles.errorContainer}>
<Ionicons name="alert-circle" size={20} color={AppColors.error} />
<Text style={styles.errorText}>{error}</Text>
</View>
)}
</View>
{/* Bottom controls - centered layout with 2 buttons */}
<View style={styles.controls}>
{/* Mute button */}
<TouchableOpacity
style={[styles.controlButton, isMuted && styles.controlButtonActive]}
onPress={toggleMute}
disabled={!isActive}
>
<Ionicons
name={isMuted ? 'mic-off' : 'mic'}
size={28}
color={isMuted ? AppColors.error : AppColors.white}
/>
<Text style={styles.controlLabel}>{isMuted ? 'Unmute' : 'Mute'}</Text>
</TouchableOpacity>
{/* End call button */}
<TouchableOpacity style={styles.endCallButton} onPress={handleEndCall}>
<Ionicons name="call" size={32} color={AppColors.white} />
</TouchableOpacity>
</View>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#1a1a2e',
},
backgroundGradient: {
position: 'absolute',
top: 0,
left: 0,
right: 0,
height: '50%',
backgroundColor: '#16213e',
borderBottomLeftRadius: SCREEN_WIDTH,
borderBottomRightRadius: SCREEN_WIDTH,
transform: [{ scaleX: 2 }],
},
topBar: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
},
backButton: {
width: 44,
height: 44,
justifyContent: 'center',
alignItems: 'center',
},
topBarCenter: {
flex: 1,
alignItems: 'center',
},
content: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
paddingBottom: 100,
},
avatarContainer: {
width: 150,
height: 150,
marginBottom: Spacing.xl,
},
avatar: {
width: 150,
height: 150,
borderRadius: 75,
backgroundColor: AppColors.success,
justifyContent: 'center',
alignItems: 'center',
shadowColor: AppColors.success,
shadowOffset: { width: 0, height: 0 },
shadowOpacity: 0.5,
shadowRadius: 20,
elevation: 10,
},
avatarText: {
fontSize: 64,
fontWeight: '600',
color: AppColors.white,
},
activeIndicator: {
position: 'absolute',
bottom: 10,
right: 10,
width: 24,
height: 24,
borderRadius: 12,
backgroundColor: AppColors.success,
borderWidth: 3,
borderColor: '#1a1a2e',
},
name: {
fontSize: 32,
fontWeight: '700',
color: AppColors.white,
marginBottom: Spacing.xs,
},
statusContainer: {
flexDirection: 'row',
alignItems: 'center',
},
activeDot: {
width: 8,
height: 8,
borderRadius: 4,
backgroundColor: AppColors.success,
marginRight: Spacing.sm,
},
duration: {
fontSize: FontSizes.lg,
color: AppColors.white,
fontVariant: ['tabular-nums'],
},
status: {
fontSize: FontSizes.base,
color: 'rgba(255,255,255,0.7)',
},
listeningStatus: {
fontSize: FontSizes.sm,
color: 'rgba(255,255,255,0.5)',
marginTop: Spacing.md,
fontStyle: 'italic',
},
errorContainer: {
flexDirection: 'row',
alignItems: 'center',
marginTop: Spacing.md,
paddingHorizontal: Spacing.lg,
},
errorText: {
fontSize: FontSizes.sm,
color: AppColors.error,
marginLeft: Spacing.sm,
flex: 1,
},
controls: {
flexDirection: 'row',
justifyContent: 'center',
alignItems: 'center',
paddingVertical: Spacing.xl,
paddingHorizontal: Spacing.lg,
gap: 40, // Space between 2 buttons (Mute, End Call)
},
controlButton: {
alignItems: 'center',
padding: Spacing.md,
borderRadius: BorderRadius.full,
backgroundColor: 'rgba(255,255,255,0.1)',
width: 70,
height: 70,
justifyContent: 'center',
},
controlButtonActive: {
backgroundColor: 'rgba(255,255,255,0.2)',
},
controlLabel: {
fontSize: FontSizes.xs,
color: AppColors.white,
marginTop: 4,
},
endCallButton: {
width: 72,
height: 72,
borderRadius: 36,
backgroundColor: AppColors.error,
justifyContent: 'center',
alignItems: 'center',
transform: [{ rotate: '135deg' }],
shadowColor: AppColors.error,
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 0.4,
shadowRadius: 8,
elevation: 8,
},
});