Compare commits

..

No commits in common. "ad0fe41ee9f8f3582d3e9b487871fac4d92aaf8b" and "a578ec80815a3164a8c1fb86b06b0a2af81051e1" have entirely different histories.

15 changed files with 2467 additions and 1180 deletions

View File

@ -1,8 +1,6 @@
import { Tabs } from 'expo-router';
import React from 'react';
import { Platform } from 'react-native';
import { Feather } from '@expo/vector-icons';
import { useSafeAreaInsets } from 'react-native-safe-area-context';
import { HapticTab } from '@/components/haptic-tab';
import { AppColors } from '@/constants/theme';
@ -11,17 +9,6 @@ import { useColorScheme } from '@/hooks/use-color-scheme';
export default function TabLayout() {
const colorScheme = useColorScheme();
const isDark = colorScheme === 'dark';
const insets = useSafeAreaInsets();
// Calculate tab bar height based on safe area
// On iOS with home indicator, insets.bottom is ~34px
// On Android with gesture navigation or software buttons (Samsung/Pixel):
// - insets.bottom should reflect the navigation bar height
// - But some devices/modes may return 0, so we add a minimum for Android
// Android minimum: 16px to ensure content doesn't touch system buttons
const androidMinPadding = Platform.OS === 'android' ? 16 : 0;
const bottomPadding = Math.max(insets.bottom, androidMinPadding, 10);
const tabBarHeight = 60 + bottomPadding; // 60px for content + safe area padding
return (
<Tabs
@ -31,8 +18,8 @@ export default function TabLayout() {
tabBarStyle: {
backgroundColor: isDark ? '#151718' : AppColors.background,
borderTopColor: isDark ? '#2D3135' : AppColors.border,
height: tabBarHeight,
paddingBottom: bottomPadding,
height: 85,
paddingBottom: 25,
paddingTop: 10,
},
tabBarLabelStyle: {
@ -69,6 +56,13 @@ export default function TabLayout() {
),
}}
/>
{/* Voice tab - HIDDEN (calls go through Julia tab chat screen) */}
<Tabs.Screen
name="voice"
options={{
href: null,
}}
/>
<Tabs.Screen
name="profile"
options={{
@ -78,6 +72,13 @@ export default function TabLayout() {
),
}}
/>
{/* Debug tab - HIDDEN, no longer needed */}
<Tabs.Screen
name="debug"
options={{
href: null,
}}
/>
{/* Hide explore tab */}
<Tabs.Screen
name="explore"

View File

@ -1,7 +1,8 @@
/**
* Chat Screen - Text Chat with Julia AI
*
* Clean text chat interface with integrated voice calls.
* Clean text chat interface.
* Voice calls are handled by separate voice-call.tsx screen.
*/
import React, { useState, useCallback, useRef, useEffect } from 'react';
@ -16,37 +17,17 @@ import {
ActivityIndicator,
Keyboard,
Platform,
Alert,
Animated,
} from 'react-native';
import { KeyboardAvoidingView } from 'react-native-keyboard-controller';
import { Ionicons } from '@expo/vector-icons';
import { SafeAreaView } from 'react-native-safe-area-context';
import { useRouter } from 'expo-router';
import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
import { api } from '@/services/api';
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
import { useVoiceCall } from '@/contexts/VoiceCallContext';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import type { Message, Beneficiary } from '@/types';
// LiveKit imports
import {
registerGlobals,
LiveKitRoom,
useVoiceAssistant,
useConnectionState,
useTrackTranscription,
useTracks,
} from '@livekit/react-native';
import { ConnectionState, Track } from 'livekit-client';
import { getToken, type BeneficiaryData } from '@/services/livekitService';
import { useAuth } from '@/contexts/AuthContext';
// Register LiveKit globals (must be called before using LiveKit)
registerGlobals();
const API_URL = 'https://eluxnetworks.net/function/well-api/api';
// WellNuo API credentials (same as julia-agent)
@ -126,151 +107,45 @@ function normalizeQuestion(userMessage: string): string {
return userMessage;
}
// ============================================================================
// Voice Call Transcript Handler (invisible - just captures transcripts)
// ============================================================================
interface VoiceCallTranscriptHandlerProps {
onTranscript: (role: 'user' | 'assistant', text: string) => void;
onDurationUpdate: (seconds: number) => void;
}
function VoiceCallTranscriptHandler({ onTranscript, onDurationUpdate }: VoiceCallTranscriptHandlerProps) {
const connectionState = useConnectionState();
const { audioTrack } = useVoiceAssistant();
const [callDuration, setCallDuration] = useState(0);
const [lastProcessedId, setLastProcessedId] = useState<string | null>(null);
// Track all audio tracks for transcription
const tracks = useTracks([Track.Source.Microphone], { onlySubscribed: true });
// Get transcription from agent's audio track
const { segments: agentSegments } = useTrackTranscription(audioTrack);
// Get transcription from user's microphone
const localTrack = tracks.find(t => t.participant?.isLocal);
const { segments: userSegments } = useTrackTranscription(localTrack);
// Process agent transcription
useEffect(() => {
if (agentSegments && agentSegments.length > 0) {
const lastSegment = agentSegments[agentSegments.length - 1];
if (lastSegment && lastSegment.final && lastSegment.id !== lastProcessedId) {
setLastProcessedId(lastSegment.id);
onTranscript('assistant', lastSegment.text);
console.log('[VoiceCall] Agent said:', lastSegment.text);
}
}
}, [agentSegments, lastProcessedId, onTranscript]);
// Process user transcription
const [lastUserSegmentId, setLastUserSegmentId] = useState<string | null>(null);
useEffect(() => {
if (userSegments && userSegments.length > 0) {
const lastSegment = userSegments[userSegments.length - 1];
if (lastSegment && lastSegment.final && lastSegment.id !== lastUserSegmentId) {
setLastUserSegmentId(lastSegment.id);
onTranscript('user', lastSegment.text);
console.log('[VoiceCall] User said:', lastSegment.text);
}
}
}, [userSegments, lastUserSegmentId, onTranscript]);
// Call duration timer - use ref to avoid state updates during render
const durationRef = useRef(0);
useEffect(() => {
if (connectionState === ConnectionState.Connected) {
const interval = setInterval(() => {
durationRef.current += 1;
onDurationUpdate(durationRef.current);
}, 1000);
return () => clearInterval(interval);
}
}, [connectionState, onDurationUpdate]);
// Keep screen awake during call
useEffect(() => {
activateKeepAwakeAsync('voice-call');
return () => {
deactivateKeepAwake('voice-call');
};
}, []);
// This component renders nothing - it just handles transcripts
return null;
}
export default function ChatScreen() {
const router = useRouter();
const { currentBeneficiary, setCurrentBeneficiary } = useBeneficiary();
const { addTranscriptEntry, clearTranscript } = useVoiceTranscript();
const { user } = useAuth();
const {
callState,
startCall,
endCall: endVoiceCallContext,
minimizeCall,
maximizeCall,
updateDuration,
isCallActive,
} = useVoiceCall();
const { getTranscriptAsMessages, hasNewTranscript, markTranscriptAsShown } = useVoiceTranscript();
// Helper to create initial message with deployment ID
const createInitialMessage = useCallback((deploymentId?: string | null): Message => ({
// Chat state
const [messages, setMessages] = useState<Message[]>([
{
id: '1',
role: 'assistant',
content: `Hello! I'm Julia, your AI wellness companion.${deploymentId ? `\n\nDeployment ID: ${deploymentId}` : ''}\n\nTap the phone button to start a voice call, or type a message below.`,
content: 'Hello! I\'m Julia, your AI wellness assistant. You can type a message or tap the phone button to start a voice call.',
timestamp: new Date(),
}), []);
},
]);
// Custom deployment ID from settings
const [customDeploymentId, setCustomDeploymentId] = useState<string | null>(null);
// Chat state - initialized after deployment ID is loaded
const [messages, setMessages] = useState<Message[]>([createInitialMessage(null)]);
const [sortNewestFirst, setSortNewestFirst] = useState(false);
// Voice call state (local connecting state only)
const [isConnectingVoice, setIsConnectingVoice] = useState(false);
// Pulsing animation for active call
const pulseAnim = useRef(new Animated.Value(1)).current;
// Start pulsing animation when call is active
// Add voice call transcript to messages when returning from call
useEffect(() => {
if (isCallActive) {
const pulse = Animated.loop(
Animated.sequence([
Animated.timing(pulseAnim, {
toValue: 1.15,
duration: 600,
useNativeDriver: true,
}),
Animated.timing(pulseAnim, {
toValue: 1,
duration: 600,
useNativeDriver: true,
}),
])
);
pulse.start();
return () => pulse.stop();
} else {
pulseAnim.setValue(1);
}
}, [isCallActive, pulseAnim]);
if (hasNewTranscript) {
const transcriptMessages = getTranscriptAsMessages();
if (transcriptMessages.length > 0) {
// Add a separator message
const separatorMessage: Message = {
id: `voice-separator-${Date.now()}`,
role: 'assistant',
content: '--- Voice Call Transcript ---',
timestamp: new Date(),
isSystem: true,
};
// Track if we've shown the voice call separator for current call
const [hasShownVoiceSeparator, setHasShownVoiceSeparator] = useState(false);
setMessages(prev => [...prev, separatorMessage, ...transcriptMessages]);
markTranscriptAsShown();
// Reset separator flag when starting a new call
useEffect(() => {
if (isCallActive && !hasShownVoiceSeparator) {
// Will show separator on first voice message
} else if (!isCallActive) {
setHasShownVoiceSeparator(false);
// Scroll to bottom
setTimeout(() => {
flatListRef.current?.scrollToEnd({ animated: true });
}, 100);
}
}, [isCallActive]);
}
}, [hasNewTranscript, getTranscriptAsMessages, markTranscriptAsShown]);
const [input, setInput] = useState('');
const [isSending, setIsSending] = useState(false);
const flatListRef = useRef<FlatList>(null);
@ -280,41 +155,6 @@ export default function ChatScreen() {
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
const [loadingBeneficiaries, setLoadingBeneficiaries] = useState(false);
// Load custom deployment ID from settings and update initial message
useEffect(() => {
const loadCustomDeploymentId = async () => {
const saved = await api.getDeploymentId();
setCustomDeploymentId(saved);
// Update initial message with deployment ID
if (saved) {
setMessages([createInitialMessage(saved)]);
}
};
loadCustomDeploymentId();
}, [createInitialMessage]);
// When deployment ID changes, end call and clear chat
const previousDeploymentId = useRef<string | null>(null);
useEffect(() => {
// Skip initial load
if (previousDeploymentId.current === null) {
previousDeploymentId.current = customDeploymentId;
return;
}
// If deployment ID actually changed
if (previousDeploymentId.current !== customDeploymentId) {
console.log('[Chat] Deployment ID changed, ending call and clearing chat');
// End any active call
if (isCallActive) {
endVoiceCallContext();
}
// Clear chat with new initial message
setMessages([createInitialMessage(customDeploymentId)]);
setHasShownVoiceSeparator(false);
previousDeploymentId.current = customDeploymentId;
}
}, [customDeploymentId, createInitialMessage, isCallActive, endVoiceCallContext]);
// Load beneficiaries
const loadBeneficiaries = useCallback(async () => {
setLoadingBeneficiaries(true);
@ -346,29 +186,18 @@ export default function ChatScreen() {
autoSelect();
}, []);
// Helper function to scroll to the latest message based on sort mode
const scrollToLatestMessage = useCallback((animated = true) => {
if (sortNewestFirst) {
// When newest first, latest messages are at top (index 0)
flatListRef.current?.scrollToOffset({ offset: 0, animated });
} else {
// When oldest first, latest messages are at bottom
flatListRef.current?.scrollToEnd({ animated });
}
}, [sortNewestFirst]);
// Scroll to latest when keyboard shows
// Scroll to end when keyboard shows
useEffect(() => {
const keyboardShowListener = Keyboard.addListener(
Platform.OS === 'ios' ? 'keyboardWillShow' : 'keyboardDidShow',
() => {
setTimeout(() => {
scrollToLatestMessage(true);
flatListRef.current?.scrollToEnd({ animated: true });
}, 100);
}
);
return () => keyboardShowListener.remove();
}, [scrollToLatestMessage]);
}, []);
const openBeneficiaryPicker = useCallback(() => {
setShowBeneficiaryPicker(true);
@ -380,117 +209,10 @@ export default function ChatScreen() {
setShowBeneficiaryPicker(false);
}, [setCurrentBeneficiary]);
// ============================================================================
// Voice Call Functions
// ============================================================================
// Start voice call
const startVoiceCall = useCallback(async () => {
if (isConnectingVoice || isCallActive) return;
setIsConnectingVoice(true);
console.log('[Chat] Starting voice call...');
try {
// Build beneficiary data for the agent
// Priority: customDeploymentId from settings > currentBeneficiary > first beneficiary > fallback
const beneficiaryData: BeneficiaryData = {
deploymentId: customDeploymentId || currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21',
beneficiaryNamesDict: {},
};
// Add names dict if not in single deployment mode
if (!SINGLE_DEPLOYMENT_MODE) {
beneficiaries.forEach(b => {
beneficiaryData.beneficiaryNamesDict[b.id.toString()] = b.name;
});
}
// Get LiveKit token
const userIdStr = user?.user_id?.toString() || 'user-' + Date.now();
const tokenResponse = await getToken(userIdStr, beneficiaryData);
if (!tokenResponse.success || !tokenResponse.data) {
throw new Error(tokenResponse.error || 'Failed to get voice token');
}
console.log('[Chat] Got voice token, connecting to room:', tokenResponse.data.roomName);
// Add call start message to chat
const callStartMessage: Message = {
id: `call-start-${Date.now()}`,
role: 'assistant',
content: 'Voice call started',
timestamp: new Date(),
isSystem: true,
};
setMessages(prev => [...prev, callStartMessage]);
// Clear previous transcript and start call via context
clearTranscript();
startCall({
token: tokenResponse.data.token,
wsUrl: tokenResponse.data.wsUrl,
beneficiaryName: currentBeneficiary?.name,
beneficiaryId: currentBeneficiary?.id?.toString(),
});
} catch (error) {
console.error('[Chat] Voice call error:', error);
Alert.alert(
'Voice Call Error',
error instanceof Error ? error.message : 'Failed to start voice call'
);
} finally {
setIsConnectingVoice(false);
}
}, [isConnectingVoice, isCallActive, currentBeneficiary, beneficiaries, user, clearTranscript, startCall, customDeploymentId]);
// End voice call and log to chat
const endVoiceCall = useCallback(() => {
console.log('[Chat] Ending voice call...');
// Add call end message to chat with duration
const duration = callState.callDuration;
const minutes = Math.floor(duration / 60);
const seconds = duration % 60;
const durationStr = `${minutes}:${seconds.toString().padStart(2, '0')}`;
const callEndMessage: Message = {
id: `call-end-${Date.now()}`,
role: 'assistant',
content: `Call ended (${durationStr})`,
timestamp: new Date(),
isSystem: true,
};
setMessages(prev => [...prev, callEndMessage]);
setHasShownVoiceSeparator(false);
endVoiceCallContext();
}, [endVoiceCallContext, callState.callDuration]);
// Handle voice transcript entries - add to chat in real-time
const handleVoiceTranscript = useCallback((role: 'user' | 'assistant', text: string) => {
if (!text.trim()) return;
// Create voice message and add to chat immediately
const voiceMessage: Message = {
id: `voice-${Date.now()}-${Math.random().toString(36).slice(2)}`,
role,
content: text.trim(),
timestamp: new Date(),
isVoice: true,
};
setMessages(prev => [...prev, voiceMessage]);
// Scroll to latest message (respects sort mode)
setTimeout(() => {
scrollToLatestMessage(true);
}, 100);
// Also store in transcript context for persistence
addTranscriptEntry(role, text);
}, [hasShownVoiceSeparator, addTranscriptEntry, scrollToLatestMessage]);
// Start voice call - navigate to voice-call screen
const startVoiceCall = useCallback(() => {
router.push('/voice-call');
}, [router]);
// Cached API token for WellNuo
const apiTokenRef = useRef<string | null>(null);
@ -555,8 +277,8 @@ export default function ChatScreen() {
beneficiaryNamesDict[b.id.toString()] = b.name;
});
// Get deployment_id: custom from settings > current beneficiary > first beneficiary > fallback
const deploymentId = customDeploymentId || currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21';
// Get deployment_id from current beneficiary or fallback to first one
const deploymentId = currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21';
// Call API with EXACT same params as voice agent
// SINGLE_DEPLOYMENT_MODE: sends only deployment_id (no beneficiary_names_dict)
@ -609,7 +331,7 @@ export default function ChatScreen() {
} finally {
setIsSending(false);
}
}, [input, isSending, getWellNuoToken, customDeploymentId, currentBeneficiary, beneficiaries]);
}, [input, isSending, getWellNuoToken]);
// Render message bubble
const renderMessage = ({ item }: { item: Message }) => {
@ -641,7 +363,7 @@ export default function ChatScreen() {
<View style={[styles.messageBubble, isUser ? styles.userBubble : styles.assistantBubble, isVoice && styles.voiceBubble]}>
{isVoice && (
<View style={styles.voiceIndicator}>
<Text style={styles.voiceIndicatorEmoji}>🎤</Text>
<Ionicons name="mic" size={12} color={isUser ? 'rgba(255,255,255,0.7)' : AppColors.textMuted} />
</View>
)}
<Text style={[styles.messageText, isUser ? styles.userMessageText : styles.assistantMessageText]}>
@ -656,7 +378,7 @@ export default function ChatScreen() {
};
return (
<SafeAreaView style={styles.container} edges={['top']}>
<SafeAreaView style={styles.container} edges={['top', 'bottom']}>
{/* Header */}
<View style={styles.header}>
<TouchableOpacity style={styles.backButton} onPress={() => router.push('/(tabs)')}>
@ -673,45 +395,13 @@ export default function ChatScreen() {
</Text>
</View>
</View>
<TouchableOpacity
style={styles.headerButton}
onPress={() => setSortNewestFirst(prev => !prev)}
>
<Ionicons
name={sortNewestFirst ? 'arrow-up' : 'arrow-down'}
size={22}
color={AppColors.textSecondary}
/>
</TouchableOpacity>
<TouchableOpacity
style={styles.headerButton}
onPress={() => {
Alert.alert(
'Clear Chat',
'Are you sure you want to clear all messages?',
[
{ text: 'Cancel', style: 'cancel' },
{
text: 'Clear',
style: 'destructive',
onPress: () => {
setMessages([
{
id: '1',
role: 'assistant',
content: 'Hello! I\'m Julia, your AI wellness assistant. You can type a message or tap the phone button to start a voice call.',
timestamp: new Date(),
},
]);
},
},
]
);
}}
>
<Ionicons name="trash-outline" size={22} color={AppColors.textSecondary} />
<View style={styles.headerButtons}>
{/* Voice Call Button */}
<TouchableOpacity style={styles.callButton} onPress={startVoiceCall}>
<Ionicons name="call" size={22} color={AppColors.white} />
</TouchableOpacity>
</View>
</View>
{/* Beneficiary Picker Modal */}
<Modal
@ -776,49 +466,20 @@ export default function ChatScreen() {
>
<FlatList
ref={flatListRef}
data={sortNewestFirst ? [...messages].reverse() : messages}
data={messages}
keyExtractor={(item) => item.id}
renderItem={renderMessage}
contentContainerStyle={styles.messagesList}
showsVerticalScrollIndicator={false}
onContentSizeChange={() => {
scrollToLatestMessage(true);
}}
onContentSizeChange={() => flatListRef.current?.scrollToEnd({ animated: true })}
/>
{/* Input */}
<View style={styles.inputContainer}>
{/* Voice Call Button - becomes pulsing bubble during call */}
<Animated.View style={{ transform: [{ scale: pulseAnim }] }}>
<TouchableOpacity
style={[
styles.voiceButton,
isConnectingVoice && styles.voiceButtonConnecting,
isCallActive && styles.voiceButtonActive,
]}
onPress={isCallActive ? endVoiceCall : startVoiceCall}
disabled={isConnectingVoice}
>
{isConnectingVoice ? (
<ActivityIndicator size="small" color={AppColors.primary} />
) : isCallActive ? (
<View style={styles.callActiveIndicator}>
<Ionicons name="call" size={20} color={AppColors.white} />
</View>
) : (
<Ionicons name="call" size={20} color={AppColors.primary} />
)}
{/* Voice Call Button in input area */}
<TouchableOpacity style={styles.voiceCallButton} onPress={startVoiceCall}>
<Ionicons name="call-outline" size={20} color={AppColors.primary} />
</TouchableOpacity>
</Animated.View>
{/* Call duration badge */}
{isCallActive && (
<View style={styles.callDurationBadge}>
<Text style={styles.callDurationText}>
{Math.floor(callState.callDuration / 60).toString().padStart(2, '0')}:
{(callState.callDuration % 60).toString().padStart(2, '0')}
</Text>
</View>
)}
<TextInput
style={styles.input}
@ -843,29 +504,6 @@ export default function ChatScreen() {
</TouchableOpacity>
</View>
</KeyboardAvoidingView>
{/* Invisible LiveKit Room - runs in background during call */}
{isCallActive && callState.token && callState.wsUrl && (
<LiveKitRoom
serverUrl={callState.wsUrl}
token={callState.token}
connect={true}
audio={true}
video={false}
onConnected={() => console.log('[Chat] LiveKit connected')}
onDisconnected={endVoiceCall}
onError={(error) => {
console.error('[Chat] LiveKit error:', error);
Alert.alert('Voice Call Error', error.message);
endVoiceCall();
}}
>
<VoiceCallTranscriptHandler
onTranscript={handleVoiceTranscript}
onDurationUpdate={updateDuration}
/>
</LiveKitRoom>
)}
</SafeAreaView>
);
}
@ -917,9 +555,21 @@ const styles = StyleSheet.create({
fontSize: FontSizes.sm,
color: AppColors.success,
},
headerButtons: {
flexDirection: 'row',
alignItems: 'center',
gap: Spacing.sm,
},
callButton: {
width: 40,
height: 40,
borderRadius: 20,
backgroundColor: AppColors.success,
justifyContent: 'center',
alignItems: 'center',
},
headerButton: {
padding: Spacing.xs,
marginLeft: Spacing.sm,
},
chatContainer: {
flex: 1,
@ -993,6 +643,17 @@ const styles = StyleSheet.create({
borderTopWidth: 1,
borderTopColor: AppColors.border,
},
voiceCallButton: {
width: 44,
height: 44,
borderRadius: 22,
backgroundColor: AppColors.surface,
borderWidth: 1,
borderColor: AppColors.primary,
justifyContent: 'center',
alignItems: 'center',
marginRight: Spacing.sm,
},
input: {
flex: 1,
backgroundColor: AppColors.surface,
@ -1004,48 +665,6 @@ const styles = StyleSheet.create({
maxHeight: 100,
marginRight: Spacing.sm,
},
voiceButton: {
width: 44,
height: 44,
borderRadius: BorderRadius.full,
backgroundColor: AppColors.surface,
justifyContent: 'center',
alignItems: 'center',
marginRight: Spacing.sm,
borderWidth: 1,
borderColor: AppColors.primary,
},
voiceButtonConnecting: {
borderColor: AppColors.success,
backgroundColor: 'rgba(90, 200, 168, 0.1)',
},
voiceButtonActive: {
backgroundColor: AppColors.success,
borderColor: AppColors.success,
},
callActiveIndicator: {
width: '100%',
height: '100%',
justifyContent: 'center',
alignItems: 'center',
},
callDurationBadge: {
position: 'absolute',
left: 32,
top: -8,
backgroundColor: AppColors.success,
paddingHorizontal: 6,
paddingVertical: 2,
borderRadius: 8,
minWidth: 42,
alignItems: 'center',
},
callDurationText: {
fontSize: 10,
fontWeight: '600',
color: AppColors.white,
fontVariant: ['tabular-nums'],
},
sendButton: {
width: 44,
height: 44,
@ -1143,9 +762,6 @@ const styles = StyleSheet.create({
top: 6,
right: 6,
},
voiceIndicatorEmoji: {
fontSize: 10,
},
// System message styles
systemMessageContainer: {
flexDirection: 'row',

1403
app/(tabs)/debug.tsx Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
import React, { useState, useEffect, useCallback } from 'react';
import React from 'react';
import {
View,
Text,
@ -6,14 +6,11 @@ import {
ScrollView,
TouchableOpacity,
Alert,
TextInput,
Modal,
} from 'react-native';
import { router } from 'expo-router';
import { Ionicons } from '@expo/vector-icons';
import { SafeAreaView } from 'react-native-safe-area-context';
import { useAuth } from '@/contexts/AuthContext';
import { api } from '@/services/api';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
interface MenuItemProps {
@ -53,65 +50,6 @@ function MenuItem({
export default function ProfileScreen() {
const { user, logout } = useAuth();
const [deploymentId, setDeploymentId] = useState<string>('');
const [deploymentName, setDeploymentName] = useState<string>('');
const [showDeploymentModal, setShowDeploymentModal] = useState(false);
const [tempDeploymentId, setTempDeploymentId] = useState('');
const [isValidating, setIsValidating] = useState(false);
const [validationError, setValidationError] = useState<string | null>(null);
// Load saved deployment ID and validate to get name
useEffect(() => {
const loadDeploymentId = async () => {
const saved = await api.getDeploymentId();
if (saved) {
setDeploymentId(saved);
// Validate to get the deployment name
const result = await api.validateDeploymentId(saved);
if (result.ok && result.data?.valid && result.data.name) {
setDeploymentName(result.data.name);
}
}
};
loadDeploymentId();
}, []);
const openDeploymentModal = useCallback(() => {
setTempDeploymentId(deploymentId);
setValidationError(null);
setShowDeploymentModal(true);
}, [deploymentId]);
const saveDeploymentId = useCallback(async () => {
const trimmed = tempDeploymentId.trim();
setValidationError(null);
if (trimmed) {
setIsValidating(true);
try {
const result = await api.validateDeploymentId(trimmed);
if (result.ok && result.data?.valid) {
await api.setDeploymentId(trimmed);
setDeploymentId(trimmed);
setDeploymentName(result.data.name || '');
setShowDeploymentModal(false);
} else if (result.ok && !result.data?.valid) {
setValidationError('Invalid Deployment ID. Please check and try again.');
} else {
setValidationError(result.error?.message || 'Failed to validate Deployment ID');
}
} catch {
setValidationError('Network error. Please try again.');
} finally {
setIsValidating(false);
}
} else {
await api.clearDeploymentId();
setDeploymentId('');
setDeploymentName('');
setShowDeploymentModal(false);
}
}, [tempDeploymentId]);
const openTerms = () => {
router.push('/terms');
@ -160,19 +98,6 @@ export default function ProfileScreen() {
</View>
</View>
{/* Settings */}
<View style={styles.section}>
<Text style={styles.sectionTitle}>Settings</Text>
<View style={styles.menuCard}>
<MenuItem
icon="server-outline"
title="Deployment"
subtitle={deploymentId ? (deploymentName || `ID: ${deploymentId}`) : 'Auto'}
onPress={openDeploymentModal}
/>
</View>
</View>
{/* Legal - Required for App Store */}
<View style={styles.section}>
<Text style={styles.sectionTitle}>Legal</Text>
@ -202,57 +127,6 @@ export default function ProfileScreen() {
{/* Version */}
<Text style={styles.version}>WellNuo v1.0.0</Text>
</ScrollView>
{/* Deployment ID Modal */}
<Modal
visible={showDeploymentModal}
transparent
animationType="fade"
onRequestClose={() => setShowDeploymentModal(false)}
>
<View style={styles.modalOverlay}>
<View style={styles.modalContent}>
<Text style={styles.modalTitle}>Deployment ID</Text>
<Text style={styles.modalDescription}>
Enter the deployment ID to connect to a specific device. Leave empty for automatic detection.
</Text>
<TextInput
style={[styles.modalInput, validationError && styles.modalInputError]}
placeholder="e.g., 21"
placeholderTextColor={AppColors.textMuted}
value={tempDeploymentId}
onChangeText={(text) => {
setTempDeploymentId(text);
setValidationError(null);
}}
keyboardType="numeric"
autoFocus
editable={!isValidating}
/>
{validationError && (
<Text style={styles.errorText}>{validationError}</Text>
)}
<View style={styles.modalButtons}>
<TouchableOpacity
style={styles.modalButtonCancel}
onPress={() => setShowDeploymentModal(false)}
disabled={isValidating}
>
<Text style={[styles.modalButtonCancelText, isValidating && styles.disabledText]}>Cancel</Text>
</TouchableOpacity>
<TouchableOpacity
style={[styles.modalButtonSave, isValidating && styles.modalButtonDisabled]}
onPress={saveDeploymentId}
disabled={isValidating}
>
<Text style={styles.modalButtonSaveText}>
{isValidating ? 'Validating...' : 'Save'}
</Text>
</TouchableOpacity>
</View>
</View>
</View>
</Modal>
</SafeAreaView>
);
}
@ -378,80 +252,4 @@ const styles = StyleSheet.create({
color: AppColors.textMuted,
paddingVertical: Spacing.xl,
},
// Modal styles
modalOverlay: {
flex: 1,
backgroundColor: 'rgba(0, 0, 0, 0.5)',
justifyContent: 'center',
alignItems: 'center',
padding: Spacing.lg,
},
modalContent: {
backgroundColor: AppColors.background,
borderRadius: BorderRadius.lg,
padding: Spacing.lg,
width: '100%',
maxWidth: 400,
},
modalTitle: {
fontSize: FontSizes.lg,
fontWeight: '600',
color: AppColors.textPrimary,
marginBottom: Spacing.sm,
},
modalDescription: {
fontSize: FontSizes.sm,
color: AppColors.textSecondary,
marginBottom: Spacing.md,
},
modalInput: {
backgroundColor: AppColors.surface,
borderRadius: BorderRadius.md,
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm + 4,
fontSize: FontSizes.base,
color: AppColors.textPrimary,
borderWidth: 1,
borderColor: AppColors.border,
marginBottom: Spacing.md,
},
modalInputError: {
borderColor: AppColors.error,
marginBottom: Spacing.xs,
},
errorText: {
color: AppColors.error,
fontSize: FontSizes.sm,
marginBottom: Spacing.md,
},
modalButtons: {
flexDirection: 'row',
justifyContent: 'flex-end',
gap: Spacing.sm,
},
modalButtonCancel: {
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
},
modalButtonCancelText: {
fontSize: FontSizes.base,
color: AppColors.textSecondary,
},
modalButtonSave: {
backgroundColor: AppColors.primary,
paddingHorizontal: Spacing.lg,
paddingVertical: Spacing.sm,
borderRadius: BorderRadius.md,
},
modalButtonSaveText: {
fontSize: FontSizes.base,
fontWeight: '600',
color: AppColors.white,
},
modalButtonDisabled: {
backgroundColor: AppColors.textMuted,
},
disabledText: {
opacity: 0.5,
},
});

380
app/(tabs)/voice.tsx Normal file
View File

@ -0,0 +1,380 @@
/**
* Voice Debug Screen
* Shows transcript logs from voice calls for debugging
* Allows easy copying of logs
*/
import React, { useCallback } from 'react';
import {
View,
Text,
StyleSheet,
TouchableOpacity,
ScrollView,
Alert,
} from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context';
import { Ionicons, Feather } from '@expo/vector-icons';
import { useRouter } from 'expo-router';
import * as Clipboard from 'expo-clipboard';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
export default function VoiceDebugScreen() {
const router = useRouter();
const { transcript, clearTranscript, hasNewTranscript, markTranscriptAsShown, addTranscriptEntry } = useVoiceTranscript();
// Mark as shown when viewed
React.useEffect(() => {
if (hasNewTranscript) {
markTranscriptAsShown();
}
}, [hasNewTranscript, markTranscriptAsShown]);
// Copy all logs to clipboard
const copyAllLogs = useCallback(async () => {
if (transcript.length === 0) {
Alert.alert('No logs', 'There are no voice call logs to copy.');
return;
}
const logsText = transcript
.map((entry) => {
const time = entry.timestamp.toLocaleTimeString();
const speaker = entry.role === 'user' ? 'USER' : 'JULIA';
return `[${time}] ${speaker}: ${entry.text}`;
})
.join('\n\n');
const header = `=== Voice Call Transcript ===\n${new Date().toLocaleString()}\nTotal entries: ${transcript.length}\n\n`;
await Clipboard.setStringAsync(header + logsText);
Alert.alert('Copied!', 'Voice call logs copied to clipboard.');
}, [transcript]);
// Copy single entry
const copySingleEntry = useCallback(async (text: string) => {
await Clipboard.setStringAsync(text);
Alert.alert('Copied!', 'Message copied to clipboard.');
}, []);
// Clear all logs
const handleClearLogs = useCallback(() => {
Alert.alert(
'Clear Logs',
'Are you sure you want to clear all voice call logs?',
[
{ text: 'Cancel', style: 'cancel' },
{
text: 'Clear',
style: 'destructive',
onPress: clearTranscript,
},
]
);
}, [clearTranscript]);
// Start a new voice call
const startVoiceCall = useCallback(() => {
router.push('/voice-call');
}, [router]);
// Add mock data for testing (simulator has no microphone)
const addMockData = useCallback(() => {
const mockConversation = [
{ role: 'assistant' as const, text: "Hi! I have some concerns about Ferdinand today - there was an incident this morning. Want me to tell you more?" },
{ role: 'user' as const, text: "Yes, what happened?" },
{ role: 'assistant' as const, text: "Ferdinand had a fall at 6:32 AM in the bathroom. He was able to get up on his own, but I recommend checking in with him. His sleep was also shorter than usual - only 5 hours last night." },
{ role: 'user' as const, text: "Did he take his medications?" },
{ role: 'assistant' as const, text: "Yes, he took his morning medications at 8:15 AM. All on schedule. Would you like me to show you the dashboard with more details?" },
{ role: 'user' as const, text: "Show me the dashboard" },
{ role: 'assistant' as const, text: "Navigating to Dashboard now. You can see the 7-day overview there." },
];
mockConversation.forEach((entry, index) => {
setTimeout(() => {
addTranscriptEntry(entry.role, entry.text);
}, index * 100);
});
Alert.alert('Mock Data Added', 'Sample voice conversation added for testing.');
}, [addTranscriptEntry]);
return (
<SafeAreaView style={styles.container} edges={['top']}>
{/* Header */}
<View style={styles.header}>
<View style={styles.headerLeft}>
<Feather name="terminal" size={24} color={AppColors.primary} />
<Text style={styles.headerTitle}>Voice Debug</Text>
</View>
<View style={styles.headerButtons}>
{transcript.length > 0 && (
<>
<TouchableOpacity style={styles.headerButton} onPress={copyAllLogs}>
<Ionicons name="copy-outline" size={22} color={AppColors.primary} />
</TouchableOpacity>
<TouchableOpacity style={styles.headerButton} onPress={handleClearLogs}>
<Ionicons name="trash-outline" size={22} color={AppColors.error} />
</TouchableOpacity>
</>
)}
</View>
</View>
{/* Start Call Button */}
<View style={styles.callButtonContainer}>
<TouchableOpacity style={styles.callButton} onPress={startVoiceCall}>
<Ionicons name="call" size={24} color={AppColors.white} />
<Text style={styles.callButtonText}>Start Voice Call</Text>
</TouchableOpacity>
{/* Mock Data Button for simulator testing */}
<TouchableOpacity style={styles.mockDataButton} onPress={addMockData}>
<Feather name="plus-circle" size={20} color={AppColors.primary} />
<Text style={styles.mockDataButtonText}>Add Mock Data</Text>
</TouchableOpacity>
</View>
{/* Logs Section */}
<View style={styles.logsHeader}>
<Text style={styles.logsTitle}>Call Transcript</Text>
<Text style={styles.logsCount}>
{transcript.length} {transcript.length === 1 ? 'entry' : 'entries'}
</Text>
</View>
{/* Transcript List */}
<ScrollView style={styles.logsList} contentContainerStyle={styles.logsContent}>
{transcript.length === 0 ? (
<View style={styles.emptyState}>
<Feather name="mic-off" size={48} color={AppColors.textMuted} />
<Text style={styles.emptyTitle}>No voice logs yet</Text>
<Text style={styles.emptySubtitle}>
Start a voice call with Julia AI to see the transcript here.
</Text>
</View>
) : (
transcript.map((entry) => (
<TouchableOpacity
key={entry.id}
style={[
styles.logEntry,
entry.role === 'user' ? styles.logEntryUser : styles.logEntryAssistant,
]}
onLongPress={() => copySingleEntry(entry.text)}
activeOpacity={0.7}
>
<View style={styles.logEntryHeader}>
<View style={styles.logEntrySpeaker}>
<Ionicons
name={entry.role === 'user' ? 'person' : 'sparkles'}
size={14}
color={entry.role === 'user' ? AppColors.primary : AppColors.success}
/>
<Text
style={[
styles.logEntrySpeakerText,
{ color: entry.role === 'user' ? AppColors.primary : AppColors.success },
]}
>
{entry.role === 'user' ? 'You' : 'Julia'}
</Text>
</View>
<Text style={styles.logEntryTime}>
{entry.timestamp.toLocaleTimeString()}
</Text>
</View>
<Text style={styles.logEntryText} selectable>
{entry.text}
</Text>
<Text style={styles.logEntryHint}>Long press to copy</Text>
</TouchableOpacity>
))
)}
</ScrollView>
{/* Footer hint */}
{transcript.length > 0 && (
<View style={styles.footer}>
<Text style={styles.footerText}>
Tap the copy icon to copy all logs
</Text>
</View>
)}
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: AppColors.background,
},
header: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
borderBottomWidth: 1,
borderBottomColor: AppColors.border,
},
headerLeft: {
flexDirection: 'row',
alignItems: 'center',
gap: Spacing.sm,
},
headerTitle: {
fontSize: FontSizes.xl,
fontWeight: '700',
color: AppColors.textPrimary,
},
headerButtons: {
flexDirection: 'row',
gap: Spacing.sm,
},
headerButton: {
padding: Spacing.xs,
borderRadius: BorderRadius.md,
backgroundColor: AppColors.surface,
},
callButtonContainer: {
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.md,
},
callButton: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
gap: Spacing.sm,
backgroundColor: AppColors.success,
paddingVertical: Spacing.md,
borderRadius: BorderRadius.lg,
shadowColor: AppColors.success,
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 0.3,
shadowRadius: 8,
elevation: 4,
},
callButtonText: {
fontSize: FontSizes.lg,
fontWeight: '600',
color: AppColors.white,
},
mockDataButton: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
gap: Spacing.xs,
marginTop: Spacing.sm,
paddingVertical: Spacing.sm,
borderRadius: BorderRadius.md,
borderWidth: 1,
borderColor: AppColors.primary,
backgroundColor: 'transparent',
},
mockDataButtonText: {
fontSize: FontSizes.sm,
fontWeight: '500',
color: AppColors.primary,
},
logsHeader: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
borderBottomWidth: 1,
borderBottomColor: AppColors.border,
},
logsTitle: {
fontSize: FontSizes.base,
fontWeight: '600',
color: AppColors.textPrimary,
},
logsCount: {
fontSize: FontSizes.sm,
color: AppColors.textMuted,
},
logsList: {
flex: 1,
},
logsContent: {
padding: Spacing.md,
gap: Spacing.sm,
},
emptyState: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
paddingVertical: Spacing.xxl * 2,
},
emptyTitle: {
fontSize: FontSizes.lg,
fontWeight: '600',
color: AppColors.textPrimary,
marginTop: Spacing.md,
},
emptySubtitle: {
fontSize: FontSizes.sm,
color: AppColors.textMuted,
textAlign: 'center',
marginTop: Spacing.xs,
paddingHorizontal: Spacing.xl,
},
logEntry: {
padding: Spacing.md,
borderRadius: BorderRadius.lg,
marginBottom: Spacing.sm,
},
logEntryUser: {
backgroundColor: 'rgba(33, 150, 243, 0.1)',
borderLeftWidth: 3,
borderLeftColor: AppColors.primary,
},
logEntryAssistant: {
backgroundColor: 'rgba(76, 175, 80, 0.1)',
borderLeftWidth: 3,
borderLeftColor: AppColors.success,
},
logEntryHeader: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
marginBottom: Spacing.xs,
},
logEntrySpeaker: {
flexDirection: 'row',
alignItems: 'center',
gap: 4,
},
logEntrySpeakerText: {
fontSize: FontSizes.sm,
fontWeight: '600',
},
logEntryTime: {
fontSize: FontSizes.xs,
color: AppColors.textMuted,
},
logEntryText: {
fontSize: FontSizes.base,
color: AppColors.textPrimary,
lineHeight: 22,
},
logEntryHint: {
fontSize: FontSizes.xs,
color: AppColors.textMuted,
marginTop: Spacing.xs,
fontStyle: 'italic',
},
footer: {
padding: Spacing.md,
alignItems: 'center',
borderTopWidth: 1,
borderTopColor: AppColors.border,
},
footerText: {
fontSize: FontSizes.sm,
color: AppColors.textMuted,
},
});

View File

@ -8,15 +8,12 @@ import { StatusBar } from 'expo-status-bar';
import * as SplashScreen from 'expo-splash-screen';
import 'react-native-reanimated';
import { KeyboardProvider } from 'react-native-keyboard-controller';
import { SafeAreaProvider } from 'react-native-safe-area-context';
import { useColorScheme } from '@/hooks/use-color-scheme';
import { AuthProvider, useAuth } from '@/contexts/AuthContext';
import { BeneficiaryProvider } from '@/contexts/BeneficiaryContext';
import { VoiceTranscriptProvider } from '@/contexts/VoiceTranscriptContext';
import { VoiceCallProvider } from '@/contexts/VoiceCallContext';
import { LoadingSpinner } from '@/components/ui/LoadingSpinner';
import { FloatingCallBubble } from '@/components/FloatingCallBubble';
// Prevent auto-hiding splash screen
SplashScreen.preventAutoHideAsync();
@ -53,10 +50,10 @@ function RootLayoutNav() {
<Stack.Screen name="(auth)" />
<Stack.Screen name="(tabs)" />
<Stack.Screen name="modal" options={{ presentation: 'modal', title: 'Modal' }} />
<Stack.Screen name="voice-call" options={{ presentation: 'fullScreenModal', headerShown: false, gestureEnabled: false }} />
<Stack.Screen name="terms" options={{ presentation: 'modal' }} />
<Stack.Screen name="privacy" options={{ presentation: 'modal' }} />
</Stack>
<FloatingCallBubble />
<StatusBar style="auto" />
</ThemeProvider>
);
@ -64,18 +61,14 @@ function RootLayoutNav() {
export default function RootLayout() {
return (
<SafeAreaProvider>
<KeyboardProvider>
<AuthProvider>
<BeneficiaryProvider>
<VoiceTranscriptProvider>
<VoiceCallProvider>
<RootLayoutNav />
</VoiceCallProvider>
</VoiceTranscriptProvider>
</BeneficiaryProvider>
</AuthProvider>
</KeyboardProvider>
</SafeAreaProvider>
);
}

557
app/voice-call.tsx Normal file
View File

@ -0,0 +1,557 @@
/**
* Voice Call Screen - Fullscreen LiveKit Voice Call with Julia AI
*
* ARCHITECTURE:
* - ALL LiveKit/WebRTC logic is in useLiveKitRoom hook
* - This component ONLY handles UI rendering
* - No direct LiveKit imports here!
*
* Features:
* - Phone call-like UI with Julia avatar
* - Call duration timer
* - Mute/unmute toggle
* - Proper cleanup on unmount
*/
import React, { useEffect, useRef, useState, useMemo } from 'react';
import { View, Text, StyleSheet, TouchableOpacity, Animated, Easing, Dimensions } from 'react-native';
import { Ionicons } from '@expo/vector-icons';
import { SafeAreaView } from 'react-native-safe-area-context';
import { useRouter } from 'expo-router';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
import { useLiveKitRoom, ConnectionState } from '@/hooks/useLiveKitRoom';
import { api } from '@/services/api';
import type { Beneficiary } from '@/types';
import type { BeneficiaryData } from '@/services/livekitService';
const { width: SCREEN_WIDTH } = Dimensions.get('window');
export default function VoiceCallScreen() {
const router = useRouter();
const { clearTranscript, addTranscriptEntry } = useVoiceTranscript();
const { currentBeneficiary, debugDeploymentId } = useBeneficiary();
// Beneficiary state for building beneficiaryData
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
const [beneficiariesLoaded, setBeneficiariesLoaded] = useState(false);
// Load beneficiaries on mount
useEffect(() => {
const loadBeneficiaries = async () => {
try {
const response = await api.getAllBeneficiaries();
if (response.ok && response.data) {
setBeneficiaries(response.data);
console.log('[VoiceCall] Beneficiaries loaded:', response.data.length);
}
} catch (error) {
console.warn('[VoiceCall] Failed to load beneficiaries:', error);
} finally {
setBeneficiariesLoaded(true);
}
};
loadBeneficiaries();
}, []);
// Build beneficiaryData for voice agent
const beneficiaryData = useMemo((): BeneficiaryData | undefined => {
// PRIORITY 1: If debugDeploymentId is set (from Debug screen), use it
if (debugDeploymentId) {
console.log('[VoiceCall] Using DEBUG deployment ID:', debugDeploymentId);
return {
deploymentId: debugDeploymentId,
beneficiaryNamesDict: {},
};
}
// PRIORITY 2: Use beneficiaries from API
// Safety check - ensure beneficiaries is an array
if (!Array.isArray(beneficiaries) || beneficiaries.length === 0) {
console.log('[VoiceCall] No beneficiaries yet, skipping beneficiaryData');
return undefined;
}
try {
// Build beneficiary_names_dict from all beneficiaries
// Format: {"21": "papa", "69": "David"}
const beneficiaryNamesDict: Record<string, string> = {};
beneficiaries.forEach(b => {
// Safety: check that b exists and has id and name
if (b && b.id != null && b.name) {
beneficiaryNamesDict[String(b.id)] = b.name;
}
});
// Get deployment_id from current beneficiary or fallback to first one
const deploymentId = currentBeneficiary?.id != null
? String(currentBeneficiary.id)
: beneficiaries[0]?.id != null
? String(beneficiaries[0].id)
: '21';
console.log('[VoiceCall] BeneficiaryData:', { deploymentId, beneficiaryNamesDict });
return {
deploymentId,
beneficiaryNamesDict,
};
} catch (error) {
console.error('[VoiceCall] Error building beneficiaryData:', error);
return undefined;
}
}, [beneficiaries, currentBeneficiary, debugDeploymentId]);
// LiveKit hook - ALL logic is here
const {
state,
error,
callDuration,
isMuted,
isAgentSpeaking,
canPlayAudio,
participantCount,
connect,
disconnect,
toggleMute,
} = useLiveKitRoom({
userId: `user-${Date.now()}`,
beneficiaryData,
onTranscript: (role, text) => {
addTranscriptEntry(role, text);
},
});
// Animations
const pulseAnim = useRef(new Animated.Value(1)).current;
const rotateAnim = useRef(new Animated.Value(0)).current;
const avatarScale = useRef(new Animated.Value(0.8)).current;
// Clear transcript on mount
useEffect(() => {
clearTranscript();
}, []);
// Track if connect has been called to prevent duplicate calls
const connectCalledRef = useRef(false);
// Start call ONLY after beneficiaryData is ready
// IMPORTANT: We must wait for beneficiaryData to be populated!
// Without deploymentId, Julia AI agent won't know which beneficiary to talk about.
useEffect(() => {
// Prevent duplicate connect calls
if (connectCalledRef.current) return;
// If debugDeploymentId is set, connect immediately (don't wait for beneficiaries)
if (debugDeploymentId && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with DEBUG deploymentId:', debugDeploymentId);
connectCalledRef.current = true;
connect();
return;
}
// Otherwise, only connect when beneficiaries are loaded AND beneficiaryData is ready
if (beneficiariesLoaded && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with beneficiaryData:', JSON.stringify(beneficiaryData));
connectCalledRef.current = true;
connect();
} else if (beneficiariesLoaded) {
console.log('[VoiceCall] Waiting for beneficiaryData... Current state:', {
beneficiariesLoaded,
beneficiariesCount: beneficiaries.length,
beneficiaryData: beneficiaryData ? JSON.stringify(beneficiaryData) : 'undefined'
});
}
}, [beneficiariesLoaded, beneficiaryData, beneficiaries.length, connect, debugDeploymentId]);
// Fallback: if beneficiaryData doesn't arrive in 5 seconds, connect anyway
// This handles edge cases where API fails or user has no beneficiaries
useEffect(() => {
if (connectCalledRef.current) return;
const timeout = setTimeout(() => {
if (!connectCalledRef.current && beneficiariesLoaded) {
console.warn('[VoiceCall] Timeout: beneficiaryData not ready after 5s, connecting without it');
connectCalledRef.current = true;
connect();
}
}, 5000);
return () => clearTimeout(timeout);
}, [beneficiariesLoaded, connect]);
// Navigate back on disconnect or error
useEffect(() => {
if (state === 'disconnected' || state === 'error') {
const timeout = setTimeout(() => {
router.back();
}, state === 'error' ? 2000 : 500);
return () => clearTimeout(timeout);
}
}, [state, router]);
// Pulse animation for active call
useEffect(() => {
if (state === 'connected') {
const pulse = Animated.loop(
Animated.sequence([
Animated.timing(pulseAnim, {
toValue: 1.1,
duration: 1500,
easing: Easing.inOut(Easing.ease),
useNativeDriver: true,
}),
Animated.timing(pulseAnim, {
toValue: 1,
duration: 1500,
easing: Easing.inOut(Easing.ease),
useNativeDriver: true,
}),
])
);
pulse.start();
// Avatar entrance animation
Animated.spring(avatarScale, {
toValue: 1,
friction: 8,
tension: 40,
useNativeDriver: true,
}).start();
return () => pulse.stop();
}
}, [state, pulseAnim, avatarScale]);
// Rotate animation for connecting states
useEffect(() => {
const connectingStates: ConnectionState[] = [
'initializing',
'configuring_audio',
'requesting_token',
'connecting',
'reconnecting',
];
if (connectingStates.includes(state)) {
const rotate = Animated.loop(
Animated.timing(rotateAnim, {
toValue: 1,
duration: 2000,
easing: Easing.linear,
useNativeDriver: true,
})
);
rotate.start();
return () => rotate.stop();
} else {
rotateAnim.setValue(0);
}
}, [state, rotateAnim]);
// End call handler
const handleEndCall = async () => {
await disconnect();
router.back();
};
// Format duration as MM:SS
const formatDuration = (seconds: number): string => {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins}:${secs.toString().padStart(2, '0')}`;
};
// Get status text based on state
const getStatusText = (): string => {
switch (state) {
case 'idle':
return 'Starting...';
case 'initializing':
return 'Initializing...';
case 'configuring_audio':
return 'Configuring audio...';
case 'requesting_token':
return 'Requesting token...';
case 'connecting':
return 'Connecting...';
case 'connected':
if (isAgentSpeaking) return 'Julia is speaking...';
if (!canPlayAudio) return 'Waiting for audio...';
return 'Connected';
case 'reconnecting':
return 'Reconnecting...';
case 'disconnected':
return 'Disconnected';
case 'error':
return error || 'Error occurred';
default:
return 'Unknown state';
}
};
// Is call currently connecting?
const isConnecting = [
'idle',
'initializing',
'configuring_audio',
'requesting_token',
'connecting',
].includes(state);
// Is call active?
const isActive = state === 'connected';
// Rotation interpolation
const spin = rotateAnim.interpolate({
inputRange: [0, 1],
outputRange: ['0deg', '360deg'],
});
return (
<SafeAreaView style={styles.container} edges={['top', 'bottom']}>
{/* Background gradient effect */}
<View style={styles.backgroundGradient} />
{/* Top bar - minimal */}
<View style={styles.topBar}>
<TouchableOpacity style={styles.backButton} onPress={handleEndCall}>
<Ionicons name="chevron-down" size={28} color={AppColors.white} />
</TouchableOpacity>
<View style={styles.topBarCenter} />
<View style={styles.backButton} />
</View>
{/* Main content */}
<View style={styles.content}>
{/* Avatar */}
<Animated.View
style={[
styles.avatarContainer,
{
transform: [
{ scale: isActive ? pulseAnim : avatarScale },
{ rotate: isConnecting ? spin : '0deg' },
],
},
]}
>
<View style={styles.avatar}>
<Text style={styles.avatarText}>J</Text>
</View>
{isActive && <View style={styles.activeIndicator} />}
</Animated.View>
{/* Name and status */}
<Text style={styles.name}>Julia AI</Text>
{isActive ? (
<View style={styles.statusContainer}>
<View style={styles.activeDot} />
<Text style={styles.duration}>{formatDuration(callDuration)}</Text>
</View>
) : (
<Text style={styles.status}>{getStatusText()}</Text>
)}
{/* Additional status info */}
{isActive && (
<Text style={styles.listeningStatus}>
{getStatusText()}
{participantCount > 1 && `${participantCount} participants`}
</Text>
)}
{/* Error display */}
{state === 'error' && error && (
<View style={styles.errorContainer}>
<Ionicons name="alert-circle" size={20} color={AppColors.error} />
<Text style={styles.errorText}>{error}</Text>
</View>
)}
</View>
{/* Bottom controls - centered layout with 2 buttons */}
<View style={styles.controls}>
{/* Mute button */}
<TouchableOpacity
style={[styles.controlButton, isMuted && styles.controlButtonActive]}
onPress={toggleMute}
disabled={!isActive}
>
<Ionicons
name={isMuted ? 'mic-off' : 'mic'}
size={28}
color={isMuted ? AppColors.error : AppColors.white}
/>
<Text style={styles.controlLabel}>{isMuted ? 'Unmute' : 'Mute'}</Text>
</TouchableOpacity>
{/* End call button */}
<TouchableOpacity style={styles.endCallButton} onPress={handleEndCall}>
<Ionicons name="call" size={32} color={AppColors.white} />
</TouchableOpacity>
</View>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#1a1a2e',
},
backgroundGradient: {
position: 'absolute',
top: 0,
left: 0,
right: 0,
height: '50%',
backgroundColor: '#16213e',
borderBottomLeftRadius: SCREEN_WIDTH,
borderBottomRightRadius: SCREEN_WIDTH,
transform: [{ scaleX: 2 }],
},
topBar: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
},
backButton: {
width: 44,
height: 44,
justifyContent: 'center',
alignItems: 'center',
},
topBarCenter: {
flex: 1,
alignItems: 'center',
},
content: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
paddingBottom: 100,
},
avatarContainer: {
width: 150,
height: 150,
marginBottom: Spacing.xl,
},
avatar: {
width: 150,
height: 150,
borderRadius: 75,
backgroundColor: AppColors.success,
justifyContent: 'center',
alignItems: 'center',
shadowColor: AppColors.success,
shadowOffset: { width: 0, height: 0 },
shadowOpacity: 0.5,
shadowRadius: 20,
elevation: 10,
},
avatarText: {
fontSize: 64,
fontWeight: '600',
color: AppColors.white,
},
activeIndicator: {
position: 'absolute',
bottom: 10,
right: 10,
width: 24,
height: 24,
borderRadius: 12,
backgroundColor: AppColors.success,
borderWidth: 3,
borderColor: '#1a1a2e',
},
name: {
fontSize: 32,
fontWeight: '700',
color: AppColors.white,
marginBottom: Spacing.xs,
},
statusContainer: {
flexDirection: 'row',
alignItems: 'center',
},
activeDot: {
width: 8,
height: 8,
borderRadius: 4,
backgroundColor: AppColors.success,
marginRight: Spacing.sm,
},
duration: {
fontSize: FontSizes.lg,
color: AppColors.white,
fontVariant: ['tabular-nums'],
},
status: {
fontSize: FontSizes.base,
color: 'rgba(255,255,255,0.7)',
},
listeningStatus: {
fontSize: FontSizes.sm,
color: 'rgba(255,255,255,0.5)',
marginTop: Spacing.md,
fontStyle: 'italic',
},
errorContainer: {
flexDirection: 'row',
alignItems: 'center',
marginTop: Spacing.md,
paddingHorizontal: Spacing.lg,
},
errorText: {
fontSize: FontSizes.sm,
color: AppColors.error,
marginLeft: Spacing.sm,
flex: 1,
},
controls: {
flexDirection: 'row',
justifyContent: 'center',
alignItems: 'center',
paddingVertical: Spacing.xl,
paddingHorizontal: Spacing.lg,
gap: 40, // Space between 2 buttons (Mute, End Call)
},
controlButton: {
alignItems: 'center',
padding: Spacing.md,
borderRadius: BorderRadius.full,
backgroundColor: 'rgba(255,255,255,0.1)',
width: 70,
height: 70,
justifyContent: 'center',
},
controlButtonActive: {
backgroundColor: 'rgba(255,255,255,0.2)',
},
controlLabel: {
fontSize: FontSizes.xs,
color: AppColors.white,
marginTop: 4,
},
endCallButton: {
width: 72,
height: 72,
borderRadius: 36,
backgroundColor: AppColors.error,
justifyContent: 'center',
alignItems: 'center',
transform: [{ rotate: '135deg' }],
shadowColor: AppColors.error,
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 0.4,
shadowRadius: 8,
elevation: 8,
},
});

View File

@ -1,243 +0,0 @@
/**
* Floating Call Bubble Component
*
* Shows a floating bubble during active voice calls.
* Can be dragged around the screen.
* Tapping it ends the call.
*/
import React, { useEffect, useRef, useState } from 'react';
import {
View,
Text,
StyleSheet,
TouchableOpacity,
Animated,
PanResponder,
Dimensions,
} from 'react-native';
import { useSafeAreaInsets } from 'react-native-safe-area-context';
import { AppColors, FontSizes, Spacing } from '@/constants/theme';
import { useVoiceCall } from '@/contexts/VoiceCallContext';
const BUBBLE_SIZE = 70;
const TAB_BAR_HEIGHT = 60; // Tab bar content height (without safe area)
const { width: SCREEN_WIDTH, height: SCREEN_HEIGHT } = Dimensions.get('window');
export function FloatingCallBubble() {
const { callState, endCall } = useVoiceCall();
const insets = useSafeAreaInsets();
// Animation values
const pan = useRef(new Animated.ValueXY({
x: SCREEN_WIDTH - BUBBLE_SIZE - 16,
y: insets.top + 100,
})).current;
const scale = useRef(new Animated.Value(0)).current;
const pulseAnim = useRef(new Animated.Value(1)).current;
// Local duration state (updates from context)
const [displayDuration, setDisplayDuration] = useState(callState.callDuration);
// Update display duration when context changes
useEffect(() => {
setDisplayDuration(callState.callDuration);
}, [callState.callDuration]);
// Duration timer (local increment for smooth display)
useEffect(() => {
if (callState.isActive && callState.isMinimized) {
const interval = setInterval(() => {
setDisplayDuration(prev => prev + 1);
}, 1000);
return () => clearInterval(interval);
}
}, [callState.isActive, callState.isMinimized]);
// Show/hide animation
useEffect(() => {
if (callState.isActive && callState.isMinimized) {
// Show bubble
Animated.spring(scale, {
toValue: 1,
friction: 5,
tension: 40,
useNativeDriver: true,
}).start();
} else {
// Hide bubble
Animated.timing(scale, {
toValue: 0,
duration: 200,
useNativeDriver: true,
}).start();
}
}, [callState.isActive, callState.isMinimized, scale]);
// Pulse animation
useEffect(() => {
if (callState.isActive && callState.isMinimized) {
const pulse = Animated.loop(
Animated.sequence([
Animated.timing(pulseAnim, {
toValue: 1.1,
duration: 1000,
useNativeDriver: true,
}),
Animated.timing(pulseAnim, {
toValue: 1,
duration: 1000,
useNativeDriver: true,
}),
])
);
pulse.start();
return () => pulse.stop();
}
}, [callState.isActive, callState.isMinimized, pulseAnim]);
// Pan responder for dragging
const panResponder = useRef(
PanResponder.create({
onStartShouldSetPanResponder: () => true,
onMoveShouldSetPanResponder: () => true,
onPanResponderGrant: () => {
pan.extractOffset();
},
onPanResponderMove: Animated.event([null, { dx: pan.x, dy: pan.y }], {
useNativeDriver: false,
}),
onPanResponderRelease: (_, gestureState) => {
pan.flattenOffset();
// Snap to edge
const currentX = (pan.x as any)._value;
const currentY = (pan.y as any)._value;
const snapToLeft = currentX < SCREEN_WIDTH / 2;
const targetX = snapToLeft ? 16 : SCREEN_WIDTH - BUBBLE_SIZE - 16;
// Clamp Y within screen bounds
// Account for tab bar height + safe area to avoid overlapping navigation
const minY = insets.top + 16;
const maxY = SCREEN_HEIGHT - BUBBLE_SIZE - insets.bottom - TAB_BAR_HEIGHT - 16;
const targetY = Math.max(minY, Math.min(currentY, maxY));
Animated.spring(pan, {
toValue: { x: targetX, y: targetY },
friction: 7,
useNativeDriver: false,
}).start();
},
})
).current;
// Format duration as mm:ss
const formatDuration = (seconds: number) => {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
};
// Don't render if not showing
if (!callState.isActive || !callState.isMinimized) {
return null;
}
return (
<Animated.View
style={[
styles.container,
{
transform: [
{ translateX: pan.x },
{ translateY: pan.y },
{ scale },
],
},
]}
{...panResponder.panHandlers}
>
{/* Pulse ring */}
<Animated.View
style={[
styles.pulseRing,
{
transform: [{ scale: pulseAnim }],
},
]}
/>
{/* Main bubble - tap to end call */}
<TouchableOpacity
style={styles.bubble}
onPress={endCall}
activeOpacity={0.9}
>
<View style={styles.avatarContainer}>
<Text style={styles.avatarText}>J</Text>
</View>
<View style={styles.durationBadge}>
<Text style={styles.durationText}>{formatDuration(displayDuration)}</Text>
</View>
</TouchableOpacity>
</Animated.View>
);
}
const styles = StyleSheet.create({
container: {
position: 'absolute',
zIndex: 9999,
width: BUBBLE_SIZE,
height: BUBBLE_SIZE,
},
pulseRing: {
position: 'absolute',
width: BUBBLE_SIZE,
height: BUBBLE_SIZE,
borderRadius: BUBBLE_SIZE / 2,
backgroundColor: 'rgba(90, 200, 168, 0.3)',
},
bubble: {
width: BUBBLE_SIZE,
height: BUBBLE_SIZE,
borderRadius: BUBBLE_SIZE / 2,
backgroundColor: AppColors.success,
justifyContent: 'center',
alignItems: 'center',
shadowColor: '#000',
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 0.3,
shadowRadius: 8,
elevation: 10,
},
avatarContainer: {
width: 44,
height: 44,
borderRadius: 22,
backgroundColor: 'rgba(255, 255, 255, 0.2)',
justifyContent: 'center',
alignItems: 'center',
},
avatarText: {
fontSize: FontSizes.xl,
fontWeight: '600',
color: AppColors.white,
},
durationBadge: {
position: 'absolute',
bottom: -4,
backgroundColor: 'rgba(0, 0, 0, 0.7)',
paddingHorizontal: 6,
paddingVertical: 2,
borderRadius: 8,
},
durationText: {
fontSize: 10,
fontWeight: '600',
color: AppColors.white,
fontVariant: ['tabular-nums'],
},
});

View File

@ -34,8 +34,8 @@ export function Button({
styles.base,
styles[variant],
styles[`size_${size}`],
fullWidth ? styles.fullWidth : {},
isDisabled ? styles.disabled : {},
fullWidth && styles.fullWidth,
isDisabled && styles.disabled,
style as ViewStyle,
];
@ -43,7 +43,7 @@ export function Button({
styles.text,
styles[`text_${variant}`],
styles[`text_${size}`],
isDisabled ? styles.textDisabled : {},
isDisabled && styles.textDisabled,
];
return (

View File

@ -1,137 +0,0 @@
/**
* Voice Call Context
*
* Global state for voice calls that persists across screens.
* Enables floating bubble when call is active and user navigates away.
*/
import React, { createContext, useContext, useState, useCallback, ReactNode } from 'react';
interface VoiceCallState {
// Whether a voice call is currently active
isActive: boolean;
// Whether the call UI is minimized (showing bubble instead of full screen)
isMinimized: boolean;
// LiveKit connection details
token: string | undefined;
wsUrl: string | undefined;
// Call metadata
beneficiaryName: string | undefined;
beneficiaryId: string | undefined;
// Call duration in seconds
callDuration: number;
}
interface VoiceCallContextValue {
// Current call state
callState: VoiceCallState;
// Start a new voice call
startCall: (params: {
token: string;
wsUrl: string;
beneficiaryName?: string;
beneficiaryId?: string;
}) => void;
// End the current call
endCall: () => void;
// Minimize call (show floating bubble)
minimizeCall: () => void;
// Maximize call (show full screen)
maximizeCall: () => void;
// Update call duration
updateDuration: (seconds: number) => void;
// Check if call is active
isCallActive: boolean;
}
const initialState: VoiceCallState = {
isActive: false,
isMinimized: false,
token: undefined,
wsUrl: undefined,
beneficiaryName: undefined,
beneficiaryId: undefined,
callDuration: 0,
};
const VoiceCallContext = createContext<VoiceCallContextValue | undefined>(undefined);
export function VoiceCallProvider({ children }: { children: ReactNode }) {
const [callState, setCallState] = useState<VoiceCallState>(initialState);
const startCall = useCallback((params: {
token: string;
wsUrl: string;
beneficiaryName?: string;
beneficiaryId?: string;
}) => {
console.log('[VoiceCallContext] Starting call');
setCallState({
isActive: true,
isMinimized: false,
token: params.token,
wsUrl: params.wsUrl,
beneficiaryName: params.beneficiaryName,
beneficiaryId: params.beneficiaryId,
callDuration: 0,
});
}, []);
const endCall = useCallback(() => {
console.log('[VoiceCallContext] Ending call');
setCallState(initialState);
}, []);
const minimizeCall = useCallback(() => {
console.log('[VoiceCallContext] Minimizing call');
setCallState(prev => ({
...prev,
isMinimized: true,
}));
}, []);
const maximizeCall = useCallback(() => {
console.log('[VoiceCallContext] Maximizing call');
setCallState(prev => ({
...prev,
isMinimized: false,
}));
}, []);
const updateDuration = useCallback((seconds: number) => {
setCallState(prev => ({
...prev,
callDuration: seconds,
}));
}, []);
return (
<VoiceCallContext.Provider
value={{
callState,
startCall,
endCall,
minimizeCall,
maximizeCall,
updateDuration,
isCallActive: callState.isActive,
}}
>
{children}
</VoiceCallContext.Provider>
);
}
export function useVoiceCall() {
const context = useContext(VoiceCallContext);
if (!context) {
throw new Error('useVoiceCall must be used within VoiceCallProvider');
}
return context;
}

View File

@ -6,9 +6,3 @@ id = "CA_Yd3qcuYEVKKE"
[build]
dockerfile = "Dockerfile"
[env]
# Deepgram for TTS
DEEPGRAM_API_KEY = "cec33b489b0ba12c4e4f1ea888e887e88fba5848"
# AssemblyAI for STT (best accuracy - correctly recognizes "dad" vs "dead")
ASSEMBLYAI_API_KEY = "42e753b65b6a4360ae4a77ac76961857"

View File

@ -12,8 +12,6 @@ dependencies = [
"livekit-agents[silero]~=1.3",
"livekit-plugins-noise-cancellation~=0.2",
"livekit-plugins-deepgram~=1.0",
# Removed assemblyai - was giving garbage transcriptions
# Deepgram Nova-2 is faster and more accurate
"python-dotenv",
"aiohttp",
]

View File

@ -312,14 +312,7 @@ class WellNuoLLMStream(llm.LLMStream):
def prewarm(proc: JobProcess):
"""Preload VAD model for faster startup."""
# Increase min_silence_duration to prevent cutting off user speech during barge-in
# Default is 0.55s which is too short - user pauses between words get interpreted as end of speech
# 0.9s gives user more time to continue speaking without being cut off
proc.userdata["vad"] = silero.VAD.load(
min_silence_duration=0.9, # Wait 0.9s of silence before ending speech (default: 0.55)
min_speech_duration=0.05, # Keep low for quick interruption detection (default: 0.05)
activation_threshold=0.4, # Slightly lower for better sensitivity (default: 0.5)
)
proc.userdata["vad"] = silero.VAD.load()
async def wait_for_participant_with_metadata(
@ -396,39 +389,34 @@ async def entrypoint(ctx: JobContext):
logger.info(f"Starting Julia AI session in room {ctx.room.name}")
# Wait for participant with metadata - short timeout since metadata arrives immediately if present
# The mobile app sends deploymentId via token metadata
# Wait for participant with metadata (fixes race condition)
# The mobile app sends deploymentId and beneficiaryNamesDict in token metadata
deployment_id, beneficiary_names_dict = await wait_for_participant_with_metadata(
ctx, timeout=2.0 # 2 seconds is enough - if metadata exists, it arrives within 0.5s
ctx, timeout=10.0
)
# Use deployment_id from metadata, or fall back to default
# Log what we're using
effective_deployment_id = deployment_id or DEPLOYMENT_ID
logger.info(f"Using deployment_id={effective_deployment_id} (from_metadata={deployment_id is not None})")
logger.info(
f"Using WellNuo ask_wellnuo_ai API with deployment_id: {effective_deployment_id}"
)
if beneficiary_names_dict:
logger.info(f"Beneficiary names dict: {beneficiary_names_dict}")
else:
logger.info("No beneficiary_names_dict provided, using default behavior")
# Deepgram for STT - better accuracy and faster than AssemblyAI
# AssemblyAI was giving garbage like "shambhala balashambal" instead of actual speech
session = AgentSession(
# Deepgram Nova-2 model for best STT accuracy
stt=deepgram.STT(
model="nova-2-general",
language="en-US",
smart_format=True, # Better punctuation and formatting
no_delay=True, # Faster response for real-time
),
# Deepgram Nova-2 for accurate speech-to-text
stt=deepgram.STT(model="nova-2"),
# WellNuo voice_ask API for LLM with dynamic beneficiary data
llm=WellNuoLLM(
deployment_id=effective_deployment_id,
deployment_id=deployment_id,
beneficiary_names_dict=beneficiary_names_dict,
),
# Deepgram Aura Asteria for natural female voice
tts=deepgram.TTS(model="aura-asteria-en"),
# Silero VAD for voice activity detection (prewarmed with tuned settings)
# Silero VAD for voice activity detection
vad=ctx.proc.userdata["vad"],
# INTERRUPTION SETTINGS:
# min_interruption_duration: How long user must speak to trigger interruption (default 0.5s)
# Set to 999.0 to effectively DISABLE interruption - user cannot interrupt the agent
min_interruption_duration=999.0,
)
# Start the session with Julia assistant

View File

@ -198,67 +198,6 @@ class ApiService {
}
}
// Deployment ID management
async setDeploymentId(deploymentId: string): Promise<void> {
await SecureStore.setItemAsync('deploymentId', deploymentId);
}
async getDeploymentId(): Promise<string | null> {
try {
return await SecureStore.getItemAsync('deploymentId');
} catch {
return null;
}
}
async clearDeploymentId(): Promise<void> {
await SecureStore.deleteItemAsync('deploymentId');
}
async validateDeploymentId(deploymentId: string): Promise<ApiResponse<{ valid: boolean; name?: string }>> {
const token = await this.getToken();
const userName = await this.getUserName();
if (!token || !userName) {
return { ok: false, error: { message: 'Not authenticated', code: 'UNAUTHORIZED' } };
}
const response = await this.makeRequest<{ result_list: Array<{
deployment_id: number;
email: string;
first_name: string;
last_name: string;
}> }>({
function: 'deployments_list',
user_name: userName,
token: token,
first: '0',
last: '100',
});
if (!response.ok || !response.data?.result_list) {
return { ok: false, error: response.error || { message: 'Failed to validate deployment ID' } };
}
const deploymentIdNum = parseInt(deploymentId, 10);
const deployment = response.data.result_list.find(item => item.deployment_id === deploymentIdNum);
if (deployment) {
return {
ok: true,
data: {
valid: true,
name: `${deployment.first_name} ${deployment.last_name}`.trim(),
},
};
}
return {
ok: true,
data: { valid: false },
};
}
// Beneficiaries (elderly people being monitored)
async getBeneficiaries(): Promise<ApiResponse<{ beneficiaries: Beneficiary[] }>> {
const token = await this.getToken();

View File

@ -88,7 +88,7 @@ CURRENT STATUS (Today - ${todayData?.day || 'Wednesday'}):
const emoji = alert.severity === 'critical' ? '🔴' : alert.severity === 'high' ? '🟠' : alert.severity === 'medium' ? '🟡' : '🟢';
context += ` ${emoji} ${alert.type.replace(/_/g, ' ').toUpperCase()} at ${alert.time}`;
if (alert.note) context += ` - ${alert.note}`;
if ('location' in alert && alert.location) context += ` (${alert.location})`;
if (alert.location) context += ` (${alert.location})`;
context += '\n';
});
}