From 6f7c79f6017e9dd7bc6550677bdc42b1c96fab01 Mon Sep 17 00:00:00 2001 From: Sergei Date: Sat, 24 Jan 2026 20:19:19 -0800 Subject: [PATCH] Remove separate voice call screen MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Deleted app/voice-call.tsx fullscreen modal and removed all navigation references to it from chat.tsx, voice.tsx and _layout.tsx. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- app/(tabs)/chat.tsx | 36 +-- app/(tabs)/voice.tsx | 32 +-- app/_layout.tsx | 1 - app/voice-call.tsx | 557 ------------------------------------------- 4 files changed, 2 insertions(+), 624 deletions(-) delete mode 100644 app/voice-call.tsx diff --git a/app/(tabs)/chat.tsx b/app/(tabs)/chat.tsx index 63c4d87..216bf00 100644 --- a/app/(tabs)/chat.tsx +++ b/app/(tabs)/chat.tsx @@ -2,7 +2,6 @@ * Chat Screen - Text Chat with Julia AI * * Clean text chat interface. - * Voice calls are handled by separate voice-call.tsx screen. */ import React, { useState, useCallback, useRef, useEffect } from 'react'; @@ -209,10 +208,6 @@ export default function ChatScreen() { setShowBeneficiaryPicker(false); }, [setCurrentBeneficiary]); - // Start voice call - navigate to voice-call screen - const startVoiceCall = useCallback(() => { - router.push('/voice-call'); - }, [router]); // Cached API token for WellNuo const apiTokenRef = useRef(null); @@ -395,12 +390,7 @@ export default function ChatScreen() { - - {/* Voice Call Button */} - - - - + {/* Beneficiary Picker Modal */} @@ -476,11 +466,6 @@ export default function ChatScreen() { {/* Input */} - {/* Voice Call Button in input area */} - - - - { - router.push('/voice-call'); - }, [router]); // Add mock data for testing (simulator has no microphone) const addMockData = useCallback(() => { @@ -122,13 +116,8 @@ export default function VoiceDebugScreen() { - {/* Start Call Button */} + {/* Mock Data Button for simulator testing */} - - - Start Voice Call - - {/* Mock Data Button for simulator testing */} Add Mock Data @@ -242,25 +231,6 @@ const styles = StyleSheet.create({ paddingHorizontal: Spacing.md, paddingVertical: Spacing.md, }, - callButton: { - flexDirection: 'row', - alignItems: 'center', - justifyContent: 'center', - gap: Spacing.sm, - backgroundColor: AppColors.success, - paddingVertical: Spacing.md, - borderRadius: BorderRadius.lg, - shadowColor: AppColors.success, - shadowOffset: { width: 0, height: 4 }, - shadowOpacity: 0.3, - shadowRadius: 8, - elevation: 4, - }, - callButtonText: { - fontSize: FontSizes.lg, - fontWeight: '600', - color: AppColors.white, - }, mockDataButton: { flexDirection: 'row', alignItems: 'center', diff --git a/app/_layout.tsx b/app/_layout.tsx index 21e36ce..2d4418f 100644 --- a/app/_layout.tsx +++ b/app/_layout.tsx @@ -50,7 +50,6 @@ function RootLayoutNav() { - diff --git a/app/voice-call.tsx b/app/voice-call.tsx deleted file mode 100644 index 2d122e1..0000000 --- a/app/voice-call.tsx +++ /dev/null @@ -1,557 +0,0 @@ -/** - * Voice Call Screen - Fullscreen LiveKit Voice Call with Julia AI - * - * ARCHITECTURE: - * - ALL LiveKit/WebRTC logic is in useLiveKitRoom hook - * - This component ONLY handles UI rendering - * - No direct LiveKit imports here! - * - * Features: - * - Phone call-like UI with Julia avatar - * - Call duration timer - * - Mute/unmute toggle - * - Proper cleanup on unmount - */ - -import React, { useEffect, useRef, useState, useMemo } from 'react'; -import { View, Text, StyleSheet, TouchableOpacity, Animated, Easing, Dimensions } from 'react-native'; -import { Ionicons } from '@expo/vector-icons'; -import { SafeAreaView } from 'react-native-safe-area-context'; -import { useRouter } from 'expo-router'; -import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; -import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext'; -import { useBeneficiary } from '@/contexts/BeneficiaryContext'; -import { useLiveKitRoom, ConnectionState } from '@/hooks/useLiveKitRoom'; -import { api } from '@/services/api'; -import type { Beneficiary } from '@/types'; -import type { BeneficiaryData } from '@/services/livekitService'; - -const { width: SCREEN_WIDTH } = Dimensions.get('window'); - -export default function VoiceCallScreen() { - const router = useRouter(); - const { clearTranscript, addTranscriptEntry } = useVoiceTranscript(); - const { currentBeneficiary, debugDeploymentId } = useBeneficiary(); - - // Beneficiary state for building beneficiaryData - const [beneficiaries, setBeneficiaries] = useState([]); - const [beneficiariesLoaded, setBeneficiariesLoaded] = useState(false); - - // Load beneficiaries on mount - useEffect(() => { - const loadBeneficiaries = async () => { - try { - const response = await api.getAllBeneficiaries(); - if (response.ok && response.data) { - setBeneficiaries(response.data); - console.log('[VoiceCall] Beneficiaries loaded:', response.data.length); - } - } catch (error) { - console.warn('[VoiceCall] Failed to load beneficiaries:', error); - } finally { - setBeneficiariesLoaded(true); - } - }; - loadBeneficiaries(); - }, []); - - // Build beneficiaryData for voice agent - const beneficiaryData = useMemo((): BeneficiaryData | undefined => { - // PRIORITY 1: If debugDeploymentId is set (from Debug screen), use it - if (debugDeploymentId) { - console.log('[VoiceCall] Using DEBUG deployment ID:', debugDeploymentId); - return { - deploymentId: debugDeploymentId, - beneficiaryNamesDict: {}, - }; - } - - // PRIORITY 2: Use beneficiaries from API - // Safety check - ensure beneficiaries is an array - if (!Array.isArray(beneficiaries) || beneficiaries.length === 0) { - console.log('[VoiceCall] No beneficiaries yet, skipping beneficiaryData'); - return undefined; - } - - try { - // Build beneficiary_names_dict from all beneficiaries - // Format: {"21": "papa", "69": "David"} - const beneficiaryNamesDict: Record = {}; - beneficiaries.forEach(b => { - // Safety: check that b exists and has id and name - if (b && b.id != null && b.name) { - beneficiaryNamesDict[String(b.id)] = b.name; - } - }); - - // Get deployment_id from current beneficiary or fallback to first one - const deploymentId = currentBeneficiary?.id != null - ? String(currentBeneficiary.id) - : beneficiaries[0]?.id != null - ? String(beneficiaries[0].id) - : '21'; - - console.log('[VoiceCall] BeneficiaryData:', { deploymentId, beneficiaryNamesDict }); - - return { - deploymentId, - beneficiaryNamesDict, - }; - } catch (error) { - console.error('[VoiceCall] Error building beneficiaryData:', error); - return undefined; - } - }, [beneficiaries, currentBeneficiary, debugDeploymentId]); - - // LiveKit hook - ALL logic is here - const { - state, - error, - callDuration, - isMuted, - isAgentSpeaking, - canPlayAudio, - participantCount, - connect, - disconnect, - toggleMute, - } = useLiveKitRoom({ - userId: `user-${Date.now()}`, - beneficiaryData, - onTranscript: (role, text) => { - addTranscriptEntry(role, text); - }, - }); - - // Animations - const pulseAnim = useRef(new Animated.Value(1)).current; - const rotateAnim = useRef(new Animated.Value(0)).current; - const avatarScale = useRef(new Animated.Value(0.8)).current; - - // Clear transcript on mount - useEffect(() => { - clearTranscript(); - }, []); - - // Track if connect has been called to prevent duplicate calls - const connectCalledRef = useRef(false); - - // Start call ONLY after beneficiaryData is ready - // IMPORTANT: We must wait for beneficiaryData to be populated! - // Without deploymentId, Julia AI agent won't know which beneficiary to talk about. - useEffect(() => { - // Prevent duplicate connect calls - if (connectCalledRef.current) return; - - // If debugDeploymentId is set, connect immediately (don't wait for beneficiaries) - if (debugDeploymentId && beneficiaryData?.deploymentId) { - console.log('[VoiceCall] Starting call with DEBUG deploymentId:', debugDeploymentId); - connectCalledRef.current = true; - connect(); - return; - } - - // Otherwise, only connect when beneficiaries are loaded AND beneficiaryData is ready - if (beneficiariesLoaded && beneficiaryData?.deploymentId) { - console.log('[VoiceCall] Starting call with beneficiaryData:', JSON.stringify(beneficiaryData)); - connectCalledRef.current = true; - connect(); - } else if (beneficiariesLoaded) { - console.log('[VoiceCall] Waiting for beneficiaryData... Current state:', { - beneficiariesLoaded, - beneficiariesCount: beneficiaries.length, - beneficiaryData: beneficiaryData ? JSON.stringify(beneficiaryData) : 'undefined' - }); - } - }, [beneficiariesLoaded, beneficiaryData, beneficiaries.length, connect, debugDeploymentId]); - - // Fallback: if beneficiaryData doesn't arrive in 5 seconds, connect anyway - // This handles edge cases where API fails or user has no beneficiaries - useEffect(() => { - if (connectCalledRef.current) return; - - const timeout = setTimeout(() => { - if (!connectCalledRef.current && beneficiariesLoaded) { - console.warn('[VoiceCall] Timeout: beneficiaryData not ready after 5s, connecting without it'); - connectCalledRef.current = true; - connect(); - } - }, 5000); - - return () => clearTimeout(timeout); - }, [beneficiariesLoaded, connect]); - - // Navigate back on disconnect or error - useEffect(() => { - if (state === 'disconnected' || state === 'error') { - const timeout = setTimeout(() => { - router.back(); - }, state === 'error' ? 2000 : 500); - return () => clearTimeout(timeout); - } - }, [state, router]); - - // Pulse animation for active call - useEffect(() => { - if (state === 'connected') { - const pulse = Animated.loop( - Animated.sequence([ - Animated.timing(pulseAnim, { - toValue: 1.1, - duration: 1500, - easing: Easing.inOut(Easing.ease), - useNativeDriver: true, - }), - Animated.timing(pulseAnim, { - toValue: 1, - duration: 1500, - easing: Easing.inOut(Easing.ease), - useNativeDriver: true, - }), - ]) - ); - pulse.start(); - - // Avatar entrance animation - Animated.spring(avatarScale, { - toValue: 1, - friction: 8, - tension: 40, - useNativeDriver: true, - }).start(); - - return () => pulse.stop(); - } - }, [state, pulseAnim, avatarScale]); - - // Rotate animation for connecting states - useEffect(() => { - const connectingStates: ConnectionState[] = [ - 'initializing', - 'configuring_audio', - 'requesting_token', - 'connecting', - 'reconnecting', - ]; - - if (connectingStates.includes(state)) { - const rotate = Animated.loop( - Animated.timing(rotateAnim, { - toValue: 1, - duration: 2000, - easing: Easing.linear, - useNativeDriver: true, - }) - ); - rotate.start(); - return () => rotate.stop(); - } else { - rotateAnim.setValue(0); - } - }, [state, rotateAnim]); - - // End call handler - const handleEndCall = async () => { - await disconnect(); - router.back(); - }; - - // Format duration as MM:SS - const formatDuration = (seconds: number): string => { - const mins = Math.floor(seconds / 60); - const secs = seconds % 60; - return `${mins}:${secs.toString().padStart(2, '0')}`; - }; - - // Get status text based on state - const getStatusText = (): string => { - switch (state) { - case 'idle': - return 'Starting...'; - case 'initializing': - return 'Initializing...'; - case 'configuring_audio': - return 'Configuring audio...'; - case 'requesting_token': - return 'Requesting token...'; - case 'connecting': - return 'Connecting...'; - case 'connected': - if (isAgentSpeaking) return 'Julia is speaking...'; - if (!canPlayAudio) return 'Waiting for audio...'; - return 'Connected'; - case 'reconnecting': - return 'Reconnecting...'; - case 'disconnected': - return 'Disconnected'; - case 'error': - return error || 'Error occurred'; - default: - return 'Unknown state'; - } - }; - - // Is call currently connecting? - const isConnecting = [ - 'idle', - 'initializing', - 'configuring_audio', - 'requesting_token', - 'connecting', - ].includes(state); - - // Is call active? - const isActive = state === 'connected'; - - // Rotation interpolation - const spin = rotateAnim.interpolate({ - inputRange: [0, 1], - outputRange: ['0deg', '360deg'], - }); - - return ( - - {/* Background gradient effect */} - - - {/* Top bar - minimal */} - - - - - - - - - {/* Main content */} - - {/* Avatar */} - - - J - - {isActive && } - - - {/* Name and status */} - Julia AI - - {isActive ? ( - - - {formatDuration(callDuration)} - - ) : ( - {getStatusText()} - )} - - {/* Additional status info */} - {isActive && ( - - {getStatusText()} - {participantCount > 1 && ` • ${participantCount} participants`} - - )} - - {/* Error display */} - {state === 'error' && error && ( - - - {error} - - )} - - - - {/* Bottom controls - centered layout with 2 buttons */} - - {/* Mute button */} - - - {isMuted ? 'Unmute' : 'Mute'} - - - {/* End call button */} - - - - - - ); -} - -const styles = StyleSheet.create({ - container: { - flex: 1, - backgroundColor: '#1a1a2e', - }, - backgroundGradient: { - position: 'absolute', - top: 0, - left: 0, - right: 0, - height: '50%', - backgroundColor: '#16213e', - borderBottomLeftRadius: SCREEN_WIDTH, - borderBottomRightRadius: SCREEN_WIDTH, - transform: [{ scaleX: 2 }], - }, - topBar: { - flexDirection: 'row', - alignItems: 'center', - justifyContent: 'space-between', - paddingHorizontal: Spacing.md, - paddingVertical: Spacing.sm, - }, - backButton: { - width: 44, - height: 44, - justifyContent: 'center', - alignItems: 'center', - }, - topBarCenter: { - flex: 1, - alignItems: 'center', - }, - content: { - flex: 1, - alignItems: 'center', - justifyContent: 'center', - paddingBottom: 100, - }, - avatarContainer: { - width: 150, - height: 150, - marginBottom: Spacing.xl, - }, - avatar: { - width: 150, - height: 150, - borderRadius: 75, - backgroundColor: AppColors.success, - justifyContent: 'center', - alignItems: 'center', - shadowColor: AppColors.success, - shadowOffset: { width: 0, height: 0 }, - shadowOpacity: 0.5, - shadowRadius: 20, - elevation: 10, - }, - avatarText: { - fontSize: 64, - fontWeight: '600', - color: AppColors.white, - }, - activeIndicator: { - position: 'absolute', - bottom: 10, - right: 10, - width: 24, - height: 24, - borderRadius: 12, - backgroundColor: AppColors.success, - borderWidth: 3, - borderColor: '#1a1a2e', - }, - name: { - fontSize: 32, - fontWeight: '700', - color: AppColors.white, - marginBottom: Spacing.xs, - }, - statusContainer: { - flexDirection: 'row', - alignItems: 'center', - }, - activeDot: { - width: 8, - height: 8, - borderRadius: 4, - backgroundColor: AppColors.success, - marginRight: Spacing.sm, - }, - duration: { - fontSize: FontSizes.lg, - color: AppColors.white, - fontVariant: ['tabular-nums'], - }, - status: { - fontSize: FontSizes.base, - color: 'rgba(255,255,255,0.7)', - }, - listeningStatus: { - fontSize: FontSizes.sm, - color: 'rgba(255,255,255,0.5)', - marginTop: Spacing.md, - fontStyle: 'italic', - }, - errorContainer: { - flexDirection: 'row', - alignItems: 'center', - marginTop: Spacing.md, - paddingHorizontal: Spacing.lg, - }, - errorText: { - fontSize: FontSizes.sm, - color: AppColors.error, - marginLeft: Spacing.sm, - flex: 1, - }, - controls: { - flexDirection: 'row', - justifyContent: 'center', - alignItems: 'center', - paddingVertical: Spacing.xl, - paddingHorizontal: Spacing.lg, - gap: 40, // Space between 2 buttons (Mute, End Call) - }, - controlButton: { - alignItems: 'center', - padding: Spacing.md, - borderRadius: BorderRadius.full, - backgroundColor: 'rgba(255,255,255,0.1)', - width: 70, - height: 70, - justifyContent: 'center', - }, - controlButtonActive: { - backgroundColor: 'rgba(255,255,255,0.2)', - }, - controlLabel: { - fontSize: FontSizes.xs, - color: AppColors.white, - marginTop: 4, - }, - endCallButton: { - width: 72, - height: 72, - borderRadius: 36, - backgroundColor: AppColors.error, - justifyContent: 'center', - alignItems: 'center', - transform: [{ rotate: '135deg' }], - shadowColor: AppColors.error, - shadowOffset: { width: 0, height: 4 }, - shadowOpacity: 0.4, - shadowRadius: 8, - elevation: 8, - }, -});