Remove separate voice call screen

Deleted app/voice-call.tsx fullscreen modal and removed all navigation
references to it from chat.tsx, voice.tsx and _layout.tsx.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Sergei 2026-01-24 20:19:19 -08:00
parent a23116a796
commit 6f7c79f601
4 changed files with 2 additions and 624 deletions

View File

@ -2,7 +2,6 @@
* Chat Screen - Text Chat with Julia AI * Chat Screen - Text Chat with Julia AI
* *
* Clean text chat interface. * Clean text chat interface.
* Voice calls are handled by separate voice-call.tsx screen.
*/ */
import React, { useState, useCallback, useRef, useEffect } from 'react'; import React, { useState, useCallback, useRef, useEffect } from 'react';
@ -209,10 +208,6 @@ export default function ChatScreen() {
setShowBeneficiaryPicker(false); setShowBeneficiaryPicker(false);
}, [setCurrentBeneficiary]); }, [setCurrentBeneficiary]);
// Start voice call - navigate to voice-call screen
const startVoiceCall = useCallback(() => {
router.push('/voice-call');
}, [router]);
// Cached API token for WellNuo // Cached API token for WellNuo
const apiTokenRef = useRef<string | null>(null); const apiTokenRef = useRef<string | null>(null);
@ -395,12 +390,7 @@ export default function ChatScreen() {
</Text> </Text>
</View> </View>
</View> </View>
<View style={styles.headerButtons}> <View style={styles.headerButtons} />
{/* Voice Call Button */}
<TouchableOpacity style={styles.callButton} onPress={startVoiceCall}>
<Ionicons name="call" size={22} color={AppColors.white} />
</TouchableOpacity>
</View>
</View> </View>
{/* Beneficiary Picker Modal */} {/* Beneficiary Picker Modal */}
@ -476,11 +466,6 @@ export default function ChatScreen() {
{/* Input */} {/* Input */}
<View style={styles.inputContainer}> <View style={styles.inputContainer}>
{/* Voice Call Button in input area */}
<TouchableOpacity style={styles.voiceCallButton} onPress={startVoiceCall}>
<Ionicons name="call-outline" size={20} color={AppColors.primary} />
</TouchableOpacity>
<TextInput <TextInput
style={styles.input} style={styles.input}
placeholder="Type a message..." placeholder="Type a message..."
@ -560,14 +545,6 @@ const styles = StyleSheet.create({
alignItems: 'center', alignItems: 'center',
gap: Spacing.sm, gap: Spacing.sm,
}, },
callButton: {
width: 40,
height: 40,
borderRadius: 20,
backgroundColor: AppColors.success,
justifyContent: 'center',
alignItems: 'center',
},
headerButton: { headerButton: {
padding: Spacing.xs, padding: Spacing.xs,
}, },
@ -643,17 +620,6 @@ const styles = StyleSheet.create({
borderTopWidth: 1, borderTopWidth: 1,
borderTopColor: AppColors.border, borderTopColor: AppColors.border,
}, },
voiceCallButton: {
width: 44,
height: 44,
borderRadius: 22,
backgroundColor: AppColors.surface,
borderWidth: 1,
borderColor: AppColors.primary,
justifyContent: 'center',
alignItems: 'center',
marginRight: Spacing.sm,
},
input: { input: {
flex: 1, flex: 1,
backgroundColor: AppColors.surface, backgroundColor: AppColors.surface,

View File

@ -15,13 +15,11 @@ import {
} from 'react-native'; } from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context'; import { SafeAreaView } from 'react-native-safe-area-context';
import { Ionicons, Feather } from '@expo/vector-icons'; import { Ionicons, Feather } from '@expo/vector-icons';
import { useRouter } from 'expo-router';
import * as Clipboard from 'expo-clipboard'; import * as Clipboard from 'expo-clipboard';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext'; import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
export default function VoiceDebugScreen() { export default function VoiceDebugScreen() {
const router = useRouter();
const { transcript, clearTranscript, hasNewTranscript, markTranscriptAsShown, addTranscriptEntry } = useVoiceTranscript(); const { transcript, clearTranscript, hasNewTranscript, markTranscriptAsShown, addTranscriptEntry } = useVoiceTranscript();
// Mark as shown when viewed // Mark as shown when viewed
@ -74,10 +72,6 @@ export default function VoiceDebugScreen() {
); );
}, [clearTranscript]); }, [clearTranscript]);
// Start a new voice call
const startVoiceCall = useCallback(() => {
router.push('/voice-call');
}, [router]);
// Add mock data for testing (simulator has no microphone) // Add mock data for testing (simulator has no microphone)
const addMockData = useCallback(() => { const addMockData = useCallback(() => {
@ -122,13 +116,8 @@ export default function VoiceDebugScreen() {
</View> </View>
</View> </View>
{/* Start Call Button */}
<View style={styles.callButtonContainer}>
<TouchableOpacity style={styles.callButton} onPress={startVoiceCall}>
<Ionicons name="call" size={24} color={AppColors.white} />
<Text style={styles.callButtonText}>Start Voice Call</Text>
</TouchableOpacity>
{/* Mock Data Button for simulator testing */} {/* Mock Data Button for simulator testing */}
<View style={styles.callButtonContainer}>
<TouchableOpacity style={styles.mockDataButton} onPress={addMockData}> <TouchableOpacity style={styles.mockDataButton} onPress={addMockData}>
<Feather name="plus-circle" size={20} color={AppColors.primary} /> <Feather name="plus-circle" size={20} color={AppColors.primary} />
<Text style={styles.mockDataButtonText}>Add Mock Data</Text> <Text style={styles.mockDataButtonText}>Add Mock Data</Text>
@ -242,25 +231,6 @@ const styles = StyleSheet.create({
paddingHorizontal: Spacing.md, paddingHorizontal: Spacing.md,
paddingVertical: Spacing.md, paddingVertical: Spacing.md,
}, },
callButton: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
gap: Spacing.sm,
backgroundColor: AppColors.success,
paddingVertical: Spacing.md,
borderRadius: BorderRadius.lg,
shadowColor: AppColors.success,
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 0.3,
shadowRadius: 8,
elevation: 4,
},
callButtonText: {
fontSize: FontSizes.lg,
fontWeight: '600',
color: AppColors.white,
},
mockDataButton: { mockDataButton: {
flexDirection: 'row', flexDirection: 'row',
alignItems: 'center', alignItems: 'center',

View File

@ -50,7 +50,6 @@ function RootLayoutNav() {
<Stack.Screen name="(auth)" /> <Stack.Screen name="(auth)" />
<Stack.Screen name="(tabs)" /> <Stack.Screen name="(tabs)" />
<Stack.Screen name="modal" options={{ presentation: 'modal', title: 'Modal' }} /> <Stack.Screen name="modal" options={{ presentation: 'modal', title: 'Modal' }} />
<Stack.Screen name="voice-call" options={{ presentation: 'fullScreenModal', headerShown: false, gestureEnabled: false }} />
<Stack.Screen name="terms" options={{ presentation: 'modal' }} /> <Stack.Screen name="terms" options={{ presentation: 'modal' }} />
<Stack.Screen name="privacy" options={{ presentation: 'modal' }} /> <Stack.Screen name="privacy" options={{ presentation: 'modal' }} />
</Stack> </Stack>

View File

@ -1,557 +0,0 @@
/**
* Voice Call Screen - Fullscreen LiveKit Voice Call with Julia AI
*
* ARCHITECTURE:
* - ALL LiveKit/WebRTC logic is in useLiveKitRoom hook
* - This component ONLY handles UI rendering
* - No direct LiveKit imports here!
*
* Features:
* - Phone call-like UI with Julia avatar
* - Call duration timer
* - Mute/unmute toggle
* - Proper cleanup on unmount
*/
import React, { useEffect, useRef, useState, useMemo } from 'react';
import { View, Text, StyleSheet, TouchableOpacity, Animated, Easing, Dimensions } from 'react-native';
import { Ionicons } from '@expo/vector-icons';
import { SafeAreaView } from 'react-native-safe-area-context';
import { useRouter } from 'expo-router';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
import { useLiveKitRoom, ConnectionState } from '@/hooks/useLiveKitRoom';
import { api } from '@/services/api';
import type { Beneficiary } from '@/types';
import type { BeneficiaryData } from '@/services/livekitService';
const { width: SCREEN_WIDTH } = Dimensions.get('window');
export default function VoiceCallScreen() {
const router = useRouter();
const { clearTranscript, addTranscriptEntry } = useVoiceTranscript();
const { currentBeneficiary, debugDeploymentId } = useBeneficiary();
// Beneficiary state for building beneficiaryData
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
const [beneficiariesLoaded, setBeneficiariesLoaded] = useState(false);
// Load beneficiaries on mount
useEffect(() => {
const loadBeneficiaries = async () => {
try {
const response = await api.getAllBeneficiaries();
if (response.ok && response.data) {
setBeneficiaries(response.data);
console.log('[VoiceCall] Beneficiaries loaded:', response.data.length);
}
} catch (error) {
console.warn('[VoiceCall] Failed to load beneficiaries:', error);
} finally {
setBeneficiariesLoaded(true);
}
};
loadBeneficiaries();
}, []);
// Build beneficiaryData for voice agent
const beneficiaryData = useMemo((): BeneficiaryData | undefined => {
// PRIORITY 1: If debugDeploymentId is set (from Debug screen), use it
if (debugDeploymentId) {
console.log('[VoiceCall] Using DEBUG deployment ID:', debugDeploymentId);
return {
deploymentId: debugDeploymentId,
beneficiaryNamesDict: {},
};
}
// PRIORITY 2: Use beneficiaries from API
// Safety check - ensure beneficiaries is an array
if (!Array.isArray(beneficiaries) || beneficiaries.length === 0) {
console.log('[VoiceCall] No beneficiaries yet, skipping beneficiaryData');
return undefined;
}
try {
// Build beneficiary_names_dict from all beneficiaries
// Format: {"21": "papa", "69": "David"}
const beneficiaryNamesDict: Record<string, string> = {};
beneficiaries.forEach(b => {
// Safety: check that b exists and has id and name
if (b && b.id != null && b.name) {
beneficiaryNamesDict[String(b.id)] = b.name;
}
});
// Get deployment_id from current beneficiary or fallback to first one
const deploymentId = currentBeneficiary?.id != null
? String(currentBeneficiary.id)
: beneficiaries[0]?.id != null
? String(beneficiaries[0].id)
: '21';
console.log('[VoiceCall] BeneficiaryData:', { deploymentId, beneficiaryNamesDict });
return {
deploymentId,
beneficiaryNamesDict,
};
} catch (error) {
console.error('[VoiceCall] Error building beneficiaryData:', error);
return undefined;
}
}, [beneficiaries, currentBeneficiary, debugDeploymentId]);
// LiveKit hook - ALL logic is here
const {
state,
error,
callDuration,
isMuted,
isAgentSpeaking,
canPlayAudio,
participantCount,
connect,
disconnect,
toggleMute,
} = useLiveKitRoom({
userId: `user-${Date.now()}`,
beneficiaryData,
onTranscript: (role, text) => {
addTranscriptEntry(role, text);
},
});
// Animations
const pulseAnim = useRef(new Animated.Value(1)).current;
const rotateAnim = useRef(new Animated.Value(0)).current;
const avatarScale = useRef(new Animated.Value(0.8)).current;
// Clear transcript on mount
useEffect(() => {
clearTranscript();
}, []);
// Track if connect has been called to prevent duplicate calls
const connectCalledRef = useRef(false);
// Start call ONLY after beneficiaryData is ready
// IMPORTANT: We must wait for beneficiaryData to be populated!
// Without deploymentId, Julia AI agent won't know which beneficiary to talk about.
useEffect(() => {
// Prevent duplicate connect calls
if (connectCalledRef.current) return;
// If debugDeploymentId is set, connect immediately (don't wait for beneficiaries)
if (debugDeploymentId && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with DEBUG deploymentId:', debugDeploymentId);
connectCalledRef.current = true;
connect();
return;
}
// Otherwise, only connect when beneficiaries are loaded AND beneficiaryData is ready
if (beneficiariesLoaded && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with beneficiaryData:', JSON.stringify(beneficiaryData));
connectCalledRef.current = true;
connect();
} else if (beneficiariesLoaded) {
console.log('[VoiceCall] Waiting for beneficiaryData... Current state:', {
beneficiariesLoaded,
beneficiariesCount: beneficiaries.length,
beneficiaryData: beneficiaryData ? JSON.stringify(beneficiaryData) : 'undefined'
});
}
}, [beneficiariesLoaded, beneficiaryData, beneficiaries.length, connect, debugDeploymentId]);
// Fallback: if beneficiaryData doesn't arrive in 5 seconds, connect anyway
// This handles edge cases where API fails or user has no beneficiaries
useEffect(() => {
if (connectCalledRef.current) return;
const timeout = setTimeout(() => {
if (!connectCalledRef.current && beneficiariesLoaded) {
console.warn('[VoiceCall] Timeout: beneficiaryData not ready after 5s, connecting without it');
connectCalledRef.current = true;
connect();
}
}, 5000);
return () => clearTimeout(timeout);
}, [beneficiariesLoaded, connect]);
// Navigate back on disconnect or error
useEffect(() => {
if (state === 'disconnected' || state === 'error') {
const timeout = setTimeout(() => {
router.back();
}, state === 'error' ? 2000 : 500);
return () => clearTimeout(timeout);
}
}, [state, router]);
// Pulse animation for active call
useEffect(() => {
if (state === 'connected') {
const pulse = Animated.loop(
Animated.sequence([
Animated.timing(pulseAnim, {
toValue: 1.1,
duration: 1500,
easing: Easing.inOut(Easing.ease),
useNativeDriver: true,
}),
Animated.timing(pulseAnim, {
toValue: 1,
duration: 1500,
easing: Easing.inOut(Easing.ease),
useNativeDriver: true,
}),
])
);
pulse.start();
// Avatar entrance animation
Animated.spring(avatarScale, {
toValue: 1,
friction: 8,
tension: 40,
useNativeDriver: true,
}).start();
return () => pulse.stop();
}
}, [state, pulseAnim, avatarScale]);
// Rotate animation for connecting states
useEffect(() => {
const connectingStates: ConnectionState[] = [
'initializing',
'configuring_audio',
'requesting_token',
'connecting',
'reconnecting',
];
if (connectingStates.includes(state)) {
const rotate = Animated.loop(
Animated.timing(rotateAnim, {
toValue: 1,
duration: 2000,
easing: Easing.linear,
useNativeDriver: true,
})
);
rotate.start();
return () => rotate.stop();
} else {
rotateAnim.setValue(0);
}
}, [state, rotateAnim]);
// End call handler
const handleEndCall = async () => {
await disconnect();
router.back();
};
// Format duration as MM:SS
const formatDuration = (seconds: number): string => {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins}:${secs.toString().padStart(2, '0')}`;
};
// Get status text based on state
const getStatusText = (): string => {
switch (state) {
case 'idle':
return 'Starting...';
case 'initializing':
return 'Initializing...';
case 'configuring_audio':
return 'Configuring audio...';
case 'requesting_token':
return 'Requesting token...';
case 'connecting':
return 'Connecting...';
case 'connected':
if (isAgentSpeaking) return 'Julia is speaking...';
if (!canPlayAudio) return 'Waiting for audio...';
return 'Connected';
case 'reconnecting':
return 'Reconnecting...';
case 'disconnected':
return 'Disconnected';
case 'error':
return error || 'Error occurred';
default:
return 'Unknown state';
}
};
// Is call currently connecting?
const isConnecting = [
'idle',
'initializing',
'configuring_audio',
'requesting_token',
'connecting',
].includes(state);
// Is call active?
const isActive = state === 'connected';
// Rotation interpolation
const spin = rotateAnim.interpolate({
inputRange: [0, 1],
outputRange: ['0deg', '360deg'],
});
return (
<SafeAreaView style={styles.container} edges={['top', 'bottom']}>
{/* Background gradient effect */}
<View style={styles.backgroundGradient} />
{/* Top bar - minimal */}
<View style={styles.topBar}>
<TouchableOpacity style={styles.backButton} onPress={handleEndCall}>
<Ionicons name="chevron-down" size={28} color={AppColors.white} />
</TouchableOpacity>
<View style={styles.topBarCenter} />
<View style={styles.backButton} />
</View>
{/* Main content */}
<View style={styles.content}>
{/* Avatar */}
<Animated.View
style={[
styles.avatarContainer,
{
transform: [
{ scale: isActive ? pulseAnim : avatarScale },
{ rotate: isConnecting ? spin : '0deg' },
],
},
]}
>
<View style={styles.avatar}>
<Text style={styles.avatarText}>J</Text>
</View>
{isActive && <View style={styles.activeIndicator} />}
</Animated.View>
{/* Name and status */}
<Text style={styles.name}>Julia AI</Text>
{isActive ? (
<View style={styles.statusContainer}>
<View style={styles.activeDot} />
<Text style={styles.duration}>{formatDuration(callDuration)}</Text>
</View>
) : (
<Text style={styles.status}>{getStatusText()}</Text>
)}
{/* Additional status info */}
{isActive && (
<Text style={styles.listeningStatus}>
{getStatusText()}
{participantCount > 1 && `${participantCount} participants`}
</Text>
)}
{/* Error display */}
{state === 'error' && error && (
<View style={styles.errorContainer}>
<Ionicons name="alert-circle" size={20} color={AppColors.error} />
<Text style={styles.errorText}>{error}</Text>
</View>
)}
</View>
{/* Bottom controls - centered layout with 2 buttons */}
<View style={styles.controls}>
{/* Mute button */}
<TouchableOpacity
style={[styles.controlButton, isMuted && styles.controlButtonActive]}
onPress={toggleMute}
disabled={!isActive}
>
<Ionicons
name={isMuted ? 'mic-off' : 'mic'}
size={28}
color={isMuted ? AppColors.error : AppColors.white}
/>
<Text style={styles.controlLabel}>{isMuted ? 'Unmute' : 'Mute'}</Text>
</TouchableOpacity>
{/* End call button */}
<TouchableOpacity style={styles.endCallButton} onPress={handleEndCall}>
<Ionicons name="call" size={32} color={AppColors.white} />
</TouchableOpacity>
</View>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#1a1a2e',
},
backgroundGradient: {
position: 'absolute',
top: 0,
left: 0,
right: 0,
height: '50%',
backgroundColor: '#16213e',
borderBottomLeftRadius: SCREEN_WIDTH,
borderBottomRightRadius: SCREEN_WIDTH,
transform: [{ scaleX: 2 }],
},
topBar: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
},
backButton: {
width: 44,
height: 44,
justifyContent: 'center',
alignItems: 'center',
},
topBarCenter: {
flex: 1,
alignItems: 'center',
},
content: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
paddingBottom: 100,
},
avatarContainer: {
width: 150,
height: 150,
marginBottom: Spacing.xl,
},
avatar: {
width: 150,
height: 150,
borderRadius: 75,
backgroundColor: AppColors.success,
justifyContent: 'center',
alignItems: 'center',
shadowColor: AppColors.success,
shadowOffset: { width: 0, height: 0 },
shadowOpacity: 0.5,
shadowRadius: 20,
elevation: 10,
},
avatarText: {
fontSize: 64,
fontWeight: '600',
color: AppColors.white,
},
activeIndicator: {
position: 'absolute',
bottom: 10,
right: 10,
width: 24,
height: 24,
borderRadius: 12,
backgroundColor: AppColors.success,
borderWidth: 3,
borderColor: '#1a1a2e',
},
name: {
fontSize: 32,
fontWeight: '700',
color: AppColors.white,
marginBottom: Spacing.xs,
},
statusContainer: {
flexDirection: 'row',
alignItems: 'center',
},
activeDot: {
width: 8,
height: 8,
borderRadius: 4,
backgroundColor: AppColors.success,
marginRight: Spacing.sm,
},
duration: {
fontSize: FontSizes.lg,
color: AppColors.white,
fontVariant: ['tabular-nums'],
},
status: {
fontSize: FontSizes.base,
color: 'rgba(255,255,255,0.7)',
},
listeningStatus: {
fontSize: FontSizes.sm,
color: 'rgba(255,255,255,0.5)',
marginTop: Spacing.md,
fontStyle: 'italic',
},
errorContainer: {
flexDirection: 'row',
alignItems: 'center',
marginTop: Spacing.md,
paddingHorizontal: Spacing.lg,
},
errorText: {
fontSize: FontSizes.sm,
color: AppColors.error,
marginLeft: Spacing.sm,
flex: 1,
},
controls: {
flexDirection: 'row',
justifyContent: 'center',
alignItems: 'center',
paddingVertical: Spacing.xl,
paddingHorizontal: Spacing.lg,
gap: 40, // Space between 2 buttons (Mute, End Call)
},
controlButton: {
alignItems: 'center',
padding: Spacing.md,
borderRadius: BorderRadius.full,
backgroundColor: 'rgba(255,255,255,0.1)',
width: 70,
height: 70,
justifyContent: 'center',
},
controlButtonActive: {
backgroundColor: 'rgba(255,255,255,0.2)',
},
controlLabel: {
fontSize: FontSizes.xs,
color: AppColors.white,
marginTop: 4,
},
endCallButton: {
width: 72,
height: 72,
borderRadius: 36,
backgroundColor: AppColors.error,
justifyContent: 'center',
alignItems: 'center',
transform: [{ rotate: '135deg' }],
shadowColor: AppColors.error,
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 0.4,
shadowRadius: 8,
elevation: 8,
},
});