/** * Chat Screen - Text Chat with Julia AI * * Clean text chat interface with integrated voice calls. */ import React, { useState, useCallback, useRef, useEffect } from 'react'; import { View, Text, StyleSheet, FlatList, TextInput, TouchableOpacity, Modal, ActivityIndicator, Keyboard, Platform, Alert, Animated, ScrollView, } from 'react-native'; import * as Clipboard from 'expo-clipboard'; import { KeyboardAvoidingView } from 'react-native-keyboard-controller'; import { Ionicons } from '@expo/vector-icons'; import { SafeAreaView } from 'react-native-safe-area-context'; import { useRouter, useFocusEffect } from 'expo-router'; import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake'; import { api } from '@/services/api'; import { useBeneficiary } from '@/contexts/BeneficiaryContext'; import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext'; import { useVoiceCall } from '@/contexts/VoiceCallContext'; import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; import type { Message, Beneficiary } from '@/types'; // LiveKit imports import { registerGlobals, LiveKitRoom, useVoiceAssistant, useConnectionState, useTrackTranscription, useTracks, } from '@livekit/react-native'; import { ConnectionState, Track } from 'livekit-client'; import { getToken, type BeneficiaryData } from '@/services/livekitService'; import { useAuth } from '@/contexts/AuthContext'; import { getAvailableAudioOutputs, selectAudioOutput, setAudioOutput } from '@/utils/audioSession'; // Register LiveKit globals (must be called before using LiveKit) registerGlobals(); const API_URL = 'https://eluxnetworks.net/function/well-api/api'; // WellNuo API credentials (same as julia-agent) const WELLNUO_USER = 'anandk'; const WELLNUO_PASSWORD = 'anandk_8'; // ============================================================================ // SINGLE_DEPLOYMENT_MODE // When true: sends only deployment_id (no beneficiary_names_dict) // When false: sends both deployment_id AND beneficiary_names_dict // // Use true for WellNuo Lite (single beneficiary per user) // Use false for full WellNuo app (multiple beneficiaries) // ============================================================================ const SINGLE_DEPLOYMENT_MODE = true; // Keywords for question normalization (same as julia-agent/julia-ai/src/agent.py) const STATUS_KEYWORDS = [ /\bhow\s+is\b/i, /\bhow'?s\b/i, /\bhow\s+are\b/i, /\btell\s+me\s+about\b/i, /\bwhat'?s\s+up\s+with\b/i, /\bupdate\s+on\b/i, /\bstatus\b/i, /\bdoing\b/i, /\bfeeling\b/i, /\bcheck\s+on\b/i, /\bis\s+\w+\s+okay\b/i, /\bis\s+\w+\s+alright\b/i, /\bis\s+\w+\s+fine\b/i, /\bokay\?\b/i, /\balright\?\b/i, ]; const SUBJECT_KEYWORDS = [ /\bdad\b/i, /\bfather\b/i, /\bferdinand\b/i, /\bhim\b/i, /\bhe\b/i, /\bmy\s+dad\b/i, /\bmy\s+father\b/i, /\bthe\s+patient\b/i, /\bloved\s+one\b/i, /\bparent\b/i, /\bgrandpa\b/i, /\bgrandfather\b/i, ]; /** * Transform user questions into format WellNuo API understands. * WellNuo API only responds with real sensor data for very specific phrases. * This function maps common user questions to those phrases. * (Same logic as julia-agent/julia-ai/src/agent.py normalize_question) */ function normalizeQuestion(userMessage: string): string { const msgLower = userMessage.toLowerCase().trim(); const isStatusQuery = STATUS_KEYWORDS.some(pattern => pattern.test(msgLower)); const isAboutRecipient = SUBJECT_KEYWORDS.some(pattern => pattern.test(msgLower)); // If asking about the care recipient's general status if (isStatusQuery && isAboutRecipient) { console.log(`[Chat] Normalized '${userMessage}' -> 'how is dad doing'`); return 'how is dad doing'; } // Generic status questions without clear subject - assume they mean the care recipient if (isStatusQuery && !isAboutRecipient) { console.log(`[Chat] Normalized '${userMessage}' -> 'how is dad doing' (assumed recipient)`); return 'how is dad doing'; } // If no transformation needed, return original console.log(`[Chat] No normalization applied to: '${userMessage}'`); return userMessage; } // ============================================================================ // Voice Call Transcript Handler (invisible - just captures transcripts) // ============================================================================ interface VoiceCallTranscriptHandlerProps { onTranscript: (role: 'user' | 'assistant', text: string) => void; onDurationUpdate: (seconds: number) => void; onLog?: (message: string) => void; } // Debug log entry type interface DebugLogEntry { id: string; timestamp: string; level: 'info' | 'warn' | 'error' | 'success'; message: string; } function VoiceCallTranscriptHandler({ onTranscript, onDurationUpdate, onLog }: VoiceCallTranscriptHandlerProps) { const connectionState = useConnectionState(); const { audioTrack, state: agentState } = useVoiceAssistant(); const [callDuration, setCallDuration] = useState(0); const [lastProcessedId, setLastProcessedId] = useState(null); const prevConnectionStateRef = useRef(null); const prevAgentStateRef = useRef(null); // Track all audio tracks for transcription const tracks = useTracks([Track.Source.Microphone, Track.Source.Unknown], { onlySubscribed: false }); // Get transcription from agent's audio track const { segments: agentSegments } = useTrackTranscription(audioTrack); // Get transcription from user's microphone const localTrack = tracks.find(t => t.participant?.isLocal); const { segments: userSegments } = useTrackTranscription(localTrack); // Log connection state changes useEffect(() => { if (prevConnectionStateRef.current !== connectionState) { const msg = `Connection: ${prevConnectionStateRef.current || 'initial'} -> ${connectionState}`; console.log('[VoiceCall]', msg); onLog?.(msg); prevConnectionStateRef.current = connectionState; } }, [connectionState, onLog]); // Log agent state changes useEffect(() => { if (agentState && prevAgentStateRef.current !== agentState) { const msg = `Agent state: ${prevAgentStateRef.current || 'initial'} -> ${agentState}`; console.log('[VoiceCall]', msg); onLog?.(msg); prevAgentStateRef.current = agentState; } }, [agentState, onLog]); // Log audio track info useEffect(() => { if (audioTrack) { // audioTrack may have different properties depending on LiveKit version const trackInfo = JSON.stringify({ hasTrack: !!audioTrack, publication: (audioTrack as any)?.publication?.sid || 'no-pub', trackSid: (audioTrack as any)?.sid || (audioTrack as any)?.trackSid || 'unknown', }); const msg = `Audio track received: ${trackInfo}`; console.log('[VoiceCall]', msg); onLog?.(msg); } }, [audioTrack, onLog]); // Log all tracks useEffect(() => { if (tracks.length > 0) { const trackInfo = tracks.map(t => { const participant = t.participant?.identity || 'unknown'; const source = t.source || 'unknown'; const isLocal = t.participant?.isLocal ? 'local' : 'remote'; return `${participant}(${isLocal}):${source}`; }).join(', '); const msg = `Tracks (${tracks.length}): ${trackInfo}`; console.log('[VoiceCall]', msg); onLog?.(msg); } }, [tracks, onLog]); // Process agent transcription useEffect(() => { if (agentSegments && agentSegments.length > 0) { const lastSegment = agentSegments[agentSegments.length - 1]; if (lastSegment && lastSegment.final && lastSegment.id !== lastProcessedId) { setLastProcessedId(lastSegment.id); onTranscript('assistant', lastSegment.text); const msg = `Julia said: "${lastSegment.text}"`; console.log('[VoiceCall]', msg); onLog?.(msg); } } }, [agentSegments, lastProcessedId, onTranscript, onLog]); // Process user transcription const [lastUserSegmentId, setLastUserSegmentId] = useState(null); useEffect(() => { if (userSegments && userSegments.length > 0) { const lastSegment = userSegments[userSegments.length - 1]; if (lastSegment && lastSegment.final && lastSegment.id !== lastUserSegmentId) { setLastUserSegmentId(lastSegment.id); onTranscript('user', lastSegment.text); const msg = `User said: "${lastSegment.text}"`; console.log('[VoiceCall]', msg); onLog?.(msg); } } }, [userSegments, lastUserSegmentId, onTranscript, onLog]); // Call duration timer - use ref to avoid state updates during render const durationRef = useRef(0); useEffect(() => { if (connectionState === ConnectionState.Connected) { const interval = setInterval(() => { durationRef.current += 1; onDurationUpdate(durationRef.current); }, 1000); return () => clearInterval(interval); } }, [connectionState, onDurationUpdate]); // Keep screen awake during call useEffect(() => { activateKeepAwakeAsync('voice-call'); return () => { deactivateKeepAwake('voice-call'); }; }, []); // This component renders nothing - it just handles transcripts return null; } export default function ChatScreen() { const router = useRouter(); const { currentBeneficiary, setCurrentBeneficiary } = useBeneficiary(); const { addTranscriptEntry, clearTranscript } = useVoiceTranscript(); const { user } = useAuth(); const { callState, startCall, endCall: endVoiceCallContext, minimizeCall, maximizeCall, updateDuration, isCallActive, } = useVoiceCall(); // Helper to create initial message with beneficiary name const createInitialMessage = useCallback((beneficiaryName?: string | null): Message => ({ id: '1', role: 'assistant', content: `Hello! I'm Julia, your AI wellness companion.${beneficiaryName ? `\n\nI'm here to help you monitor ${beneficiaryName}.` : ''}\n\nTap the phone button to start a voice call, or type a message below.`, timestamp: new Date(), }), []); // Custom deployment ID and name from settings const [customDeploymentId, setCustomDeploymentId] = useState(null); const [deploymentName, setDeploymentName] = useState(null); // Chat state - initialized after deployment ID is loaded const [messages, setMessages] = useState([createInitialMessage(null)]); const [sortNewestFirst, setSortNewestFirst] = useState(false); // Voice call state (local connecting state only) const [isConnectingVoice, setIsConnectingVoice] = useState(false); // Debug logs state const [debugLogs, setDebugLogs] = useState([]); const [showDebugPanel, setShowDebugPanel] = useState(false); const debugLogIdRef = useRef(0); // Add debug log entry const addDebugLog = useCallback((message: string, level: DebugLogEntry['level'] = 'info') => { const now = new Date(); const timestamp = now.toLocaleTimeString('en-US', { hour12: false, hour: '2-digit', minute: '2-digit', second: '2-digit', }) + '.' + now.getMilliseconds().toString().padStart(3, '0'); const entry: DebugLogEntry = { id: `log-${++debugLogIdRef.current}`, timestamp, level, message, }; setDebugLogs(prev => [...prev.slice(-100), entry]); // Keep last 100 logs }, []); // Copy logs to clipboard const copyLogsToClipboard = useCallback(async () => { const logsText = debugLogs.map(log => `[${log.timestamp}] ${log.level.toUpperCase()}: ${log.message}`).join('\n'); await Clipboard.setStringAsync(logsText); Alert.alert('Copied', `${debugLogs.length} log entries copied to clipboard`); }, [debugLogs]); // Clear debug logs const clearDebugLogs = useCallback(() => { setDebugLogs([]); addDebugLog('Logs cleared', 'info'); }, [addDebugLog]); // Pulsing animation for active call const pulseAnim = useRef(new Animated.Value(1)).current; // Start pulsing animation when call is active useEffect(() => { if (isCallActive) { const pulse = Animated.loop( Animated.sequence([ Animated.timing(pulseAnim, { toValue: 1.15, duration: 600, useNativeDriver: true, }), Animated.timing(pulseAnim, { toValue: 1, duration: 600, useNativeDriver: true, }), ]) ); pulse.start(); return () => pulse.stop(); } else { pulseAnim.setValue(1); } }, [isCallActive, pulseAnim]); // Track if we've shown the voice call separator for current call const [hasShownVoiceSeparator, setHasShownVoiceSeparator] = useState(false); // Reset separator flag when starting a new call useEffect(() => { if (isCallActive && !hasShownVoiceSeparator) { // Will show separator on first voice message } else if (!isCallActive) { setHasShownVoiceSeparator(false); } }, [isCallActive]); const [input, setInput] = useState(''); const [isSending, setIsSending] = useState(false); const inputRef = useRef(''); const flatListRef = useRef(null); // Keep inputRef in sync with input state useEffect(() => { inputRef.current = input; }, [input]); // Beneficiary picker const [showBeneficiaryPicker, setShowBeneficiaryPicker] = useState(false); const [beneficiaries, setBeneficiaries] = useState([]); const [loadingBeneficiaries, setLoadingBeneficiaries] = useState(false); // Load custom deployment ID and name from settings // Use useFocusEffect to reload when returning from profile screen useFocusEffect( useCallback(() => { const loadDeploymentData = async () => { const savedId = await api.getDeploymentId(); const savedName = await api.getDeploymentName(); console.log('[Chat] useFocusEffect: loaded deployment ID:', savedId, 'name:', savedName); setCustomDeploymentId(savedId); setDeploymentName(savedName); }; loadDeploymentData(); }, []) ); // When deployment ID changes, end call and clear chat // Track previous value to detect actual changes (not just re-renders) const previousDeploymentIdRef = useRef(undefined); useEffect(() => { // undefined means "not yet initialized" - store current value and skip if (previousDeploymentIdRef.current === undefined) { console.log('[Chat] Initializing deployment tracking:', customDeploymentId, 'name:', deploymentName); previousDeploymentIdRef.current = customDeploymentId; // Update initial message with deployment name if we have one if (customDeploymentId || deploymentName) { setMessages([createInitialMessage(deploymentName)]); } return; } // Check if deployment actually changed if (previousDeploymentIdRef.current !== customDeploymentId) { console.log('[Chat] Deployment changed!', { old: previousDeploymentIdRef.current, new: customDeploymentId, name: deploymentName, isCallActive, }); // End any active call endVoiceCallContext(); // Clear chat with new initial message (use name instead of ID) setMessages([createInitialMessage(deploymentName)]); setHasShownVoiceSeparator(false); // Update ref previousDeploymentIdRef.current = customDeploymentId; } }, [customDeploymentId, deploymentName, createInitialMessage, isCallActive, endVoiceCallContext]); // Update initial message when deploymentName is loaded (but only if chat has just the initial message) useEffect(() => { if (deploymentName && messages.length === 1 && messages[0].id === '1') { setMessages([createInitialMessage(deploymentName)]); } }, [deploymentName, createInitialMessage]); // Load beneficiaries const loadBeneficiaries = useCallback(async () => { setLoadingBeneficiaries(true); try { const response = await api.getAllBeneficiaries(); if (response.ok && response.data) { setBeneficiaries(response.data); return response.data; } return []; } catch (error) { console.error('Failed to load beneficiaries:', error); return []; } finally { setLoadingBeneficiaries(false); } }, []); // Auto-select first beneficiary useEffect(() => { const autoSelect = async () => { if (!currentBeneficiary) { const loaded = await loadBeneficiaries(); if (loaded.length > 0) { setCurrentBeneficiary(loaded[0]); } } }; autoSelect(); }, []); // Helper function to scroll to the latest message based on sort mode const scrollToLatestMessage = useCallback((animated = true) => { if (sortNewestFirst) { // When newest first, latest messages are at top (index 0) flatListRef.current?.scrollToOffset({ offset: 0, animated }); } else { // When oldest first, latest messages are at bottom flatListRef.current?.scrollToEnd({ animated }); } }, [sortNewestFirst]); // Scroll to latest when keyboard shows useEffect(() => { const keyboardShowListener = Keyboard.addListener( Platform.OS === 'ios' ? 'keyboardWillShow' : 'keyboardDidShow', () => { setTimeout(() => { scrollToLatestMessage(true); }, 100); } ); return () => keyboardShowListener.remove(); }, [scrollToLatestMessage]); const openBeneficiaryPicker = useCallback(() => { setShowBeneficiaryPicker(true); loadBeneficiaries(); }, [loadBeneficiaries]); const selectBeneficiary = useCallback((beneficiary: Beneficiary) => { setCurrentBeneficiary(beneficiary); setShowBeneficiaryPicker(false); }, [setCurrentBeneficiary]); // ============================================================================ // Voice Call Functions // ============================================================================ // Start voice call const startVoiceCall = useCallback(async () => { if (isConnectingVoice || isCallActive) return; setIsConnectingVoice(true); addDebugLog('Starting voice call...', 'info'); console.log('[Chat] Starting voice call...'); try { // Build beneficiary data for the agent // Priority: customDeploymentId from settings > currentBeneficiary > first beneficiary > fallback const beneficiaryData: BeneficiaryData = { deploymentId: customDeploymentId || currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21', beneficiaryNamesDict: {}, }; addDebugLog(`Deployment ID: ${beneficiaryData.deploymentId}`, 'info'); // Add names dict if not in single deployment mode if (!SINGLE_DEPLOYMENT_MODE) { beneficiaries.forEach(b => { beneficiaryData.beneficiaryNamesDict[b.id.toString()] = b.name; }); } // Get LiveKit token addDebugLog('Requesting LiveKit token...', 'info'); const userIdStr = user?.user_id?.toString() || 'user-' + Date.now(); const tokenResponse = await getToken(userIdStr, beneficiaryData); if (!tokenResponse.success || !tokenResponse.data) { throw new Error(tokenResponse.error || 'Failed to get voice token'); } addDebugLog(`Token received! Room: ${tokenResponse.data.roomName}`, 'success'); addDebugLog(`WS URL: ${tokenResponse.data.wsUrl}`, 'info'); console.log('[Chat] Got voice token, connecting to room:', tokenResponse.data.roomName); // Add call start message to chat const callStartMessage: Message = { id: `call-start-${Date.now()}`, role: 'assistant', content: 'Voice call started', timestamp: new Date(), isSystem: true, }; setMessages(prev => [...prev, callStartMessage]); // Clear previous transcript and start call via context clearTranscript(); addDebugLog('Calling startCall with token and wsUrl...', 'info'); startCall({ token: tokenResponse.data.token, wsUrl: tokenResponse.data.wsUrl, beneficiaryName: currentBeneficiary?.name, beneficiaryId: currentBeneficiary?.id?.toString(), }); addDebugLog('startCall called, waiting for LiveKitRoom to connect...', 'success'); } catch (error) { const errorMsg = error instanceof Error ? error.message : 'Unknown error'; addDebugLog(`Voice call error: ${errorMsg}`, 'error'); console.error('[Chat] Voice call error:', error); Alert.alert( 'Voice Call Error', error instanceof Error ? error.message : 'Failed to start voice call' ); } finally { setIsConnectingVoice(false); } }, [isConnectingVoice, isCallActive, currentBeneficiary, beneficiaries, user, clearTranscript, startCall, customDeploymentId, addDebugLog]); // End voice call and log to chat const endVoiceCall = useCallback(() => { console.log('[Chat] Ending voice call...'); // Add call end message to chat with duration const duration = callState.callDuration; const minutes = Math.floor(duration / 60); const seconds = duration % 60; const durationStr = `${minutes}:${seconds.toString().padStart(2, '0')}`; const callEndMessage: Message = { id: `call-end-${Date.now()}-${Math.random().toString(36).slice(2)}`, role: 'assistant', content: `Call ended (${durationStr})`, timestamp: new Date(), isSystem: true, }; setMessages(prev => [...prev, callEndMessage]); setHasShownVoiceSeparator(false); endVoiceCallContext(); }, [endVoiceCallContext, callState.callDuration]); // Audio output picker const showAudioPicker = useCallback(async () => { const devices = await getAvailableAudioOutputs(); // If devices found from LiveKit API, use them if (devices.length > 0) { const buttons: any[] = devices.map(device => ({ text: device.name, onPress: () => selectAudioOutput(device.id), })); buttons.push({ text: 'Cancel', style: 'cancel' }); Alert.alert('Audio Output', 'Select audio device:', buttons); return; } // Fallback for Android (and iOS if no devices found) // Show simple Speaker/Earpiece toggle using setAudioOutput() Alert.alert( 'Audio Output', 'Select audio output:', [ { text: '🔊 Speaker', onPress: () => setAudioOutput(true), }, { text: '📱 Earpiece', onPress: () => setAudioOutput(false), }, { text: 'Cancel', style: 'cancel' }, ] ); }, []); // Handle voice transcript entries - add to chat in real-time const handleVoiceTranscript = useCallback((role: 'user' | 'assistant', text: string) => { if (!text.trim()) return; // Create voice message and add to chat immediately const voiceMessage: Message = { id: `voice-${Date.now()}-${Math.random().toString(36).slice(2)}`, role, content: text.trim(), timestamp: new Date(), isVoice: true, }; setMessages(prev => [...prev, voiceMessage]); // Scroll to latest message (respects sort mode) setTimeout(() => { scrollToLatestMessage(true); }, 100); // Also store in transcript context for persistence addTranscriptEntry(role, text); }, [hasShownVoiceSeparator, addTranscriptEntry, scrollToLatestMessage]); // Cached API token for WellNuo const apiTokenRef = useRef(null); // Get WellNuo API token (same credentials as julia-agent) const getWellNuoToken = useCallback(async (): Promise => { if (apiTokenRef.current) { return apiTokenRef.current; } const nonce = Math.floor(Math.random() * 1000000).toString(); const response = await fetch(API_URL, { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ function: 'credentials', clientId: 'MA_001', user_name: WELLNUO_USER, ps: WELLNUO_PASSWORD, nonce: nonce, }).toString(), }); const data = await response.json(); if (data.status === '200 OK' && data.access_token) { apiTokenRef.current = data.access_token; console.log('[Chat] WellNuo token obtained'); return data.access_token; } throw new Error('Failed to authenticate with WellNuo API'); }, []); // Text chat - send message via API (same as julia-agent) const sendTextMessage = useCallback(async () => { const trimmedInput = inputRef.current.trim(); if (!trimmedInput || isSending) return; const userMessage: Message = { id: Date.now().toString(), role: 'user', content: trimmedInput, timestamp: new Date(), }; // Clear input immediately before any async operations setInput(''); inputRef.current = ''; setMessages(prev => [...prev, userMessage]); setIsSending(true); Keyboard.dismiss(); try { // Get WellNuo API token (uses anandk credentials like julia-agent) const token = await getWellNuoToken(); // Normalize question to format WellNuo API understands // (same logic as julia-agent/julia-ai/src/agent.py) const normalizedQuestion = normalizeQuestion(trimmedInput); // Build beneficiary_names_dict from all loaded beneficiaries // Format: {"21": "papa", "69": "David"} const beneficiaryNamesDict: Record = {}; beneficiaries.forEach(b => { beneficiaryNamesDict[b.id.toString()] = b.name; }); // Get deployment_id: custom from settings > current beneficiary > first beneficiary > fallback const deploymentId = customDeploymentId || currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21'; // Call API with EXACT same params as voice agent // SINGLE_DEPLOYMENT_MODE: sends only deployment_id (no beneficiary_names_dict) const requestParams: Record = { function: 'ask_wellnuo_ai', clientId: 'MA_001', user_name: WELLNUO_USER, token: token, question: normalizedQuestion, deployment_id: deploymentId, }; // Only add beneficiary_names_dict if NOT in single deployment mode if (!SINGLE_DEPLOYMENT_MODE) { requestParams.beneficiary_names_dict = JSON.stringify(beneficiaryNamesDict); } const response = await fetch(API_URL, { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams(requestParams).toString(), }); const data = await response.json(); if (data.ok && data.response?.body) { const assistantMessage: Message = { id: (Date.now() + 1).toString(), role: 'assistant', content: data.response.body, timestamp: new Date(), }; setMessages(prev => [...prev, assistantMessage]); } else { // Token might be expired, clear and retry once if (data.status === '401 Unauthorized') { apiTokenRef.current = null; throw new Error('Session expired, please try again'); } throw new Error('Could not get response'); } } catch (error) { const errorMessage: Message = { id: (Date.now() + 1).toString(), role: 'assistant', content: `Sorry, I encountered an error: ${error instanceof Error ? error.message : 'Unknown error'}`, timestamp: new Date(), }; setMessages(prev => [...prev, errorMessage]); } finally { setIsSending(false); } }, [isSending, getWellNuoToken, customDeploymentId, currentBeneficiary, beneficiaries]); // Render message bubble const renderMessage = ({ item }: { item: Message }) => { const isUser = item.role === 'user'; const isVoice = item.isVoice; const isSystem = item.isSystem; // System messages (like "Voice Call Transcript" separator) if (isSystem) { return ( {item.content.replace(/---/g, '').trim()} ); } return ( {!isUser && ( J )} {isVoice && ( 🎤 )} {item.content} {item.timestamp.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })} ); }; return ( {/* Header */} router.push('/(tabs)')}> J Julia AI {currentBeneficiary ? `About ${currentBeneficiary.name}` : 'Online'} setSortNewestFirst(prev => !prev)} > { Alert.alert( 'Clear Chat', 'Are you sure you want to clear all messages?', [ { text: 'Cancel', style: 'cancel' }, { text: 'Clear', style: 'destructive', onPress: () => { setMessages([ { id: '1', role: 'assistant', content: 'Hello! I\'m Julia, your AI wellness assistant. You can type a message or tap the phone button to start a voice call.', timestamp: new Date(), }, ]); }, }, ] ); }} > {/* Beneficiary Picker Modal */} setShowBeneficiaryPicker(false)} > Select Beneficiary setShowBeneficiaryPicker(false)}> {loadingBeneficiaries ? ( ) : beneficiaries.length === 0 ? ( No beneficiaries found ) : ( item.id.toString()} renderItem={({ item }) => ( selectBeneficiary(item)} > {item.name.split(' ').map(n => n[0]).join('').slice(0, 2)} {item.name} {currentBeneficiary?.id === item.id && ( )} )} style={styles.beneficiaryList} /> )} {/* Debug Logs Modal */} setShowDebugPanel(false)} > Debug Logs ({debugLogs.length}) setShowDebugPanel(false)}> {debugLogs.length === 0 ? ( No logs yet. Start a voice call to see logs. ) : ( debugLogs.map(log => ( {log.timestamp} {log.message} )) )} {/* Messages */} item.id} renderItem={renderMessage} contentContainerStyle={styles.messagesList} showsVerticalScrollIndicator={false} onContentSizeChange={() => { scrollToLatestMessage(true); }} /> {/* Typing indicator */} {isSending && ( Julia is typing... )} {/* Input */} {/* Voice Call Button - becomes pulsing bubble during call */} {isConnectingVoice ? ( ) : isCallActive ? ( ) : ( )} {/* Call duration badge */} {isCallActive && ( {Math.floor(callState.callDuration / 60).toString().padStart(2, '0')}: {(callState.callDuration % 60).toString().padStart(2, '0')} )} {/* Audio output button - only during active call */} {isCallActive && ( )} {/* Invisible LiveKit Room - runs in background during call */} {isCallActive && callState.token && callState.wsUrl && ( { console.log('[Chat] LiveKit connected'); addDebugLog('LiveKitRoom: CONNECTED to server!', 'success'); }} onDisconnected={() => { addDebugLog('LiveKitRoom: DISCONNECTED', 'warn'); endVoiceCall(); }} onError={(error) => { const errorMsg = error?.message || 'Unknown error'; addDebugLog(`LiveKitRoom ERROR: ${errorMsg}`, 'error'); console.error('[Chat] LiveKit error:', error); Alert.alert('Voice Call Error', error.message); endVoiceCall(); }} > )} ); } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: AppColors.surface, }, header: { flexDirection: 'row', alignItems: 'center', justifyContent: 'space-between', paddingHorizontal: Spacing.md, paddingVertical: Spacing.sm, backgroundColor: AppColors.background, borderBottomWidth: 1, borderBottomColor: AppColors.border, }, backButton: { padding: Spacing.xs, marginRight: Spacing.sm, }, headerInfo: { flex: 1, flexDirection: 'row', alignItems: 'center', }, headerAvatar: { width: 40, height: 40, borderRadius: BorderRadius.full, backgroundColor: AppColors.success, justifyContent: 'center', alignItems: 'center', marginRight: Spacing.sm, }, headerAvatarText: { fontSize: FontSizes.lg, fontWeight: '600', color: AppColors.white, }, headerTitle: { fontSize: FontSizes.lg, fontWeight: '600', color: AppColors.textPrimary, }, headerSubtitle: { fontSize: FontSizes.sm, color: AppColors.success, }, headerButton: { padding: Spacing.xs, marginLeft: Spacing.sm, }, chatContainer: { flex: 1, }, messagesList: { padding: Spacing.md, paddingBottom: Spacing.lg, }, messageContainer: { flexDirection: 'row', marginBottom: Spacing.md, alignItems: 'flex-end', }, userMessageContainer: { justifyContent: 'flex-end', }, assistantMessageContainer: { justifyContent: 'flex-start', }, avatarContainer: { width: 32, height: 32, borderRadius: BorderRadius.full, backgroundColor: AppColors.success, justifyContent: 'center', alignItems: 'center', marginRight: Spacing.xs, }, avatarText: { fontSize: FontSizes.sm, fontWeight: '600', color: AppColors.white, }, messageBubble: { maxWidth: '75%', padding: Spacing.sm + 4, borderRadius: BorderRadius.lg, }, userBubble: { backgroundColor: AppColors.primary, borderBottomRightRadius: BorderRadius.sm, }, assistantBubble: { backgroundColor: AppColors.background, borderBottomLeftRadius: BorderRadius.sm, }, messageText: { fontSize: FontSizes.base, lineHeight: 22, }, userMessageText: { color: AppColors.white, }, assistantMessageText: { color: AppColors.textPrimary, }, timestamp: { fontSize: FontSizes.xs, color: AppColors.textMuted, marginTop: Spacing.xs, alignSelf: 'flex-end', }, userTimestamp: { color: 'rgba(255,255,255,0.7)', }, inputContainer: { flexDirection: 'row', alignItems: 'flex-end', padding: Spacing.md, backgroundColor: AppColors.background, borderTopWidth: 1, borderTopColor: AppColors.border, }, input: { flex: 1, backgroundColor: AppColors.surface, borderRadius: BorderRadius.xl, paddingHorizontal: Spacing.md, paddingVertical: Spacing.sm, fontSize: FontSizes.base, color: AppColors.textPrimary, maxHeight: 100, marginRight: Spacing.sm, }, voiceButton: { width: 44, height: 44, borderRadius: BorderRadius.full, backgroundColor: AppColors.surface, justifyContent: 'center', alignItems: 'center', marginRight: Spacing.sm, borderWidth: 1, borderColor: AppColors.primary, }, voiceButtonConnecting: { borderColor: AppColors.success, backgroundColor: 'rgba(90, 200, 168, 0.1)', }, voiceButtonActive: { backgroundColor: AppColors.error, borderColor: AppColors.error, }, audioButton: { width: 44, height: 44, borderRadius: 22, backgroundColor: AppColors.surface, justifyContent: 'center', alignItems: 'center', marginRight: Spacing.sm, borderWidth: 1, borderColor: AppColors.primary, }, callActiveIndicator: { width: '100%', height: '100%', justifyContent: 'center', alignItems: 'center', }, callDurationBadge: { position: 'absolute', left: 32, top: -8, backgroundColor: AppColors.error, paddingHorizontal: 6, paddingVertical: 2, borderRadius: 8, minWidth: 42, alignItems: 'center', }, callDurationText: { fontSize: 10, fontWeight: '600', color: AppColors.white, fontVariant: ['tabular-nums'], }, sendButton: { width: 44, height: 44, borderRadius: BorderRadius.full, backgroundColor: AppColors.primary, justifyContent: 'center', alignItems: 'center', }, sendButtonDisabled: { backgroundColor: AppColors.surface, }, // Typing indicator typingIndicator: { flexDirection: 'row', alignItems: 'center', paddingHorizontal: Spacing.md, paddingVertical: Spacing.sm, gap: 8, }, typingDots: { flexDirection: 'row', alignItems: 'center', gap: 4, }, typingDot: { width: 8, height: 8, borderRadius: 4, backgroundColor: AppColors.primary, opacity: 0.4, }, typingDot1: { opacity: 0.4, }, typingDot2: { opacity: 0.6, }, typingDot3: { opacity: 0.8, }, typingText: { fontSize: 13, color: AppColors.textSecondary, fontStyle: 'italic', }, // Modal styles modalOverlay: { flex: 1, backgroundColor: 'rgba(0, 0, 0, 0.5)', justifyContent: 'flex-end', }, modalContent: { backgroundColor: AppColors.background, borderTopLeftRadius: BorderRadius.xl, borderTopRightRadius: BorderRadius.xl, maxHeight: '70%', paddingBottom: Spacing.xl, }, modalHeader: { flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', padding: Spacing.md, borderBottomWidth: 1, borderBottomColor: AppColors.border, }, modalTitle: { fontSize: FontSizes.lg, fontWeight: '600', color: AppColors.textPrimary, }, modalLoading: { padding: Spacing.xl, alignItems: 'center', }, modalEmpty: { padding: Spacing.xl, alignItems: 'center', }, emptyText: { fontSize: FontSizes.base, color: AppColors.textSecondary, }, beneficiaryList: { paddingHorizontal: Spacing.md, }, beneficiaryItem: { flexDirection: 'row', alignItems: 'center', padding: Spacing.md, backgroundColor: AppColors.surface, borderRadius: BorderRadius.md, marginTop: Spacing.sm, }, beneficiaryItemSelected: { backgroundColor: AppColors.primaryLight || '#E3F2FD', borderWidth: 1, borderColor: AppColors.primary, }, beneficiaryAvatar: { width: 44, height: 44, borderRadius: BorderRadius.full, backgroundColor: AppColors.primary, justifyContent: 'center', alignItems: 'center', marginRight: Spacing.md, }, beneficiaryAvatarText: { fontSize: FontSizes.base, fontWeight: '600', color: AppColors.white, }, beneficiaryInfo: { flex: 1, }, beneficiaryName: { fontSize: FontSizes.base, fontWeight: '500', color: AppColors.textPrimary, }, // Voice message styles voiceBubble: { borderWidth: 1, borderColor: 'rgba(59, 130, 246, 0.3)', }, voiceIndicator: { position: 'absolute', top: 6, right: 6, }, voiceIndicatorEmoji: { fontSize: 10, }, // System message styles systemMessageContainer: { flexDirection: 'row', alignItems: 'center', marginVertical: Spacing.md, paddingHorizontal: Spacing.md, }, systemMessageLine: { flex: 1, height: 1, backgroundColor: AppColors.border, }, systemMessageBadge: { flexDirection: 'row', alignItems: 'center', paddingHorizontal: Spacing.sm, paddingVertical: 4, backgroundColor: AppColors.surface, borderRadius: BorderRadius.sm, marginHorizontal: Spacing.sm, }, systemMessageText: { fontSize: FontSizes.xs, color: AppColors.textMuted, marginLeft: 4, }, // Debug panel styles debugButtonActive: { backgroundColor: 'rgba(59, 130, 246, 0.1)', }, debugModalContent: { maxHeight: '80%', }, debugHeaderButtons: { flexDirection: 'row', alignItems: 'center', gap: Spacing.md, }, debugHeaderBtn: { padding: Spacing.xs, }, debugLogsContainer: { flex: 1, padding: Spacing.sm, backgroundColor: '#1a1a2e', }, debugEmptyText: { color: AppColors.textMuted, textAlign: 'center', padding: Spacing.lg, fontSize: FontSizes.sm, }, debugLogEntry: { flexDirection: 'row', paddingVertical: 3, borderBottomWidth: 1, borderBottomColor: 'rgba(255,255,255,0.05)', }, debugTimestamp: { color: '#6b7280', fontSize: 11, fontFamily: Platform.OS === 'ios' ? 'Menlo' : 'monospace', marginRight: Spacing.sm, minWidth: 90, }, debugMessage: { color: '#e5e7eb', fontSize: 11, fontFamily: Platform.OS === 'ios' ? 'Menlo' : 'monospace', flex: 1, flexWrap: 'wrap', }, debugError: { color: '#ef4444', }, debugWarn: { color: '#f59e0b', }, debugSuccess: { color: '#10b981', }, });