Integrate voice chat with TTS and speech recognition

App screens:
- chat.tsx: Voice-enabled chat with TTS responses
- debug.tsx: TTS debugging and testing screen
- index.tsx: Updated home with voice indicators
- _layout.tsx: Added TTS and error boundaries

Config:
- app.json: Microphone permissions for voice input
- package.json: Added Sherpa ONNX dependencies
- constants/theme.ts: Voice UI colors

Features:
- Voice input via speech recognition
- TTS voice output for chat responses
- Real-time voice activity indication
- Debug screen for TTS testing
- Error boundaries for stability

User experience:
- Hands-free chat interaction
- Visual feedback during voice processing
- Graceful error handling
This commit is contained in:
Sergei 2026-01-14 19:09:50 -08:00
parent b2639dd540
commit da2c4bebc9
9 changed files with 12792 additions and 336 deletions

View File

@ -12,11 +12,15 @@
"supportsTablet": true, "supportsTablet": true,
"bundleIdentifier": "com.wellnuo.BluetoothScanner", "bundleIdentifier": "com.wellnuo.BluetoothScanner",
"appleTeamId": "UHLZD54ULZ", "appleTeamId": "UHLZD54ULZ",
"deploymentTarget": "16.0",
"infoPlist": { "infoPlist": {
"ITSAppUsesNonExemptEncryption": false "ITSAppUsesNonExemptEncryption": false,
"NSMicrophoneUsageDescription": "WellNuo needs access to your microphone for voice input to the AI assistant.",
"NSSpeechRecognitionUsageDescription": "WellNuo uses speech recognition to convert your voice to text for the AI assistant."
} }
}, },
"android": { "android": {
"package": "com.wellnuo.app",
"adaptiveIcon": { "adaptiveIcon": {
"backgroundColor": "#E6F4FE", "backgroundColor": "#E6F4FE",
"foregroundImage": "./assets/images/android-icon-foreground.png", "foregroundImage": "./assets/images/android-icon-foreground.png",
@ -43,6 +47,13 @@
"backgroundColor": "#000000" "backgroundColor": "#000000"
} }
} }
],
[
"expo-speech-recognition",
{
"microphonePermission": "WellNuo needs access to your microphone for voice commands.",
"speechRecognitionPermission": "WellNuo uses speech recognition to convert your voice to text."
}
] ]
], ],
"experiments": { "experiments": {
@ -52,9 +63,9 @@
"extra": { "extra": {
"router": {}, "router": {},
"eas": { "eas": {
"projectId": "b06920f8-cbe7-4d6e-a5c2-5e60e1791d65" "projectId": "4f415b4b-41c8-4b98-989c-32f6b3f97481"
} }
}, },
"owner": "serter2069" "owner": "serter2069ya"
} }
} }

View File

@ -64,6 +64,15 @@ export default function TabLayout() {
), ),
}} }}
/> />
<Tabs.Screen
name="debug"
options={{
title: 'Debug',
tabBarIcon: ({ color, size }) => (
<Feather name="tool" size={22} color={color} />
),
}}
/>
{/* Hide explore tab */} {/* Hide explore tab */}
<Tabs.Screen <Tabs.Screen
name="explore" name="explore"

View File

@ -11,6 +11,10 @@ import {
Modal, Modal,
ActivityIndicator, ActivityIndicator,
Keyboard, Keyboard,
Animated,
Alert,
Linking,
ScrollView,
} from 'react-native'; } from 'react-native';
import { Ionicons } from '@expo/vector-icons'; import { Ionicons } from '@expo/vector-icons';
import { SafeAreaView } from 'react-native-safe-area-context'; import { SafeAreaView } from 'react-native-safe-area-context';
@ -20,10 +24,14 @@ import { api } from '@/services/api';
import { useBeneficiary } from '@/contexts/BeneficiaryContext'; import { useBeneficiary } from '@/contexts/BeneficiaryContext';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import type { Message, Beneficiary } from '@/types'; import type { Message, Beneficiary } from '@/types';
import { useSpeechRecognition } from '@/hooks/useSpeechRecognition';
import sherpaTTS from '@/services/sherpaTTS';
import { VoiceIndicator } from '@/components/VoiceIndicator';
import { TTSErrorBoundary } from '@/components/TTSErrorBoundary';
const API_URL = 'https://eluxnetworks.net/function/well-api/api'; const API_URL = 'https://eluxnetworks.net/function/well-api/api';
export default function ChatScreen() { function ChatScreenContent() {
const router = useRouter(); const router = useRouter();
const { currentBeneficiary, setCurrentBeneficiary, getBeneficiaryContext } = useBeneficiary(); const { currentBeneficiary, setCurrentBeneficiary, getBeneficiaryContext } = useBeneficiary();
const [messages, setMessages] = useState<Message[]>([ const [messages, setMessages] = useState<Message[]>([
@ -38,11 +46,300 @@ export default function ChatScreen() {
const [isSending, setIsSending] = useState(false); const [isSending, setIsSending] = useState(false);
const flatListRef = useRef<FlatList>(null); const flatListRef = useRef<FlatList>(null);
// Voice state
const [isSpeaking, setIsSpeaking] = useState(false);
const [ttsInitialized, setTtsInitialized] = useState(false);
const [voiceFeedback, setVoiceFeedback] = useState<string | null>(null);
const [isVoiceConversation, setIsVoiceConversation] = useState(false); // Auto-listen mode
const pulseAnim = useRef(new Animated.Value(1)).current;
// Speech recognition hook
const {
isListening,
recognizedText,
startListening,
stopListening,
isAvailable: speechRecognitionAvailable,
requestPermission,
} = useSpeechRecognition();
// Beneficiary picker state // Beneficiary picker state
const [showBeneficiaryPicker, setShowBeneficiaryPicker] = useState(false); const [showBeneficiaryPicker, setShowBeneficiaryPicker] = useState(false);
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]); const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
const [loadingBeneficiaries, setLoadingBeneficiaries] = useState(false); const [loadingBeneficiaries, setLoadingBeneficiaries] = useState(false);
// Initialize TTS on mount
useEffect(() => {
const initTTS = async () => {
try {
const success = await sherpaTTS.initialize();
setTtsInitialized(success);
console.log('[Chat] SherpaTTS initialized:', success);
} catch (error) {
console.log('[Chat] SherpaTTS init failed, will use fallback');
}
};
initTTS();
return () => {
sherpaTTS.deinitialize();
};
}, []);
// Pulse animation for listening state
useEffect(() => {
if (isListening) {
const pulse = Animated.loop(
Animated.sequence([
Animated.timing(pulseAnim, {
toValue: 1.3,
duration: 500,
useNativeDriver: true,
}),
Animated.timing(pulseAnim, {
toValue: 1,
duration: 500,
useNativeDriver: true,
}),
])
);
pulse.start();
return () => pulse.stop();
} else {
pulseAnim.setValue(1);
}
}, [isListening, pulseAnim]);
// Track if we were just listening (to show feedback when stopped)
const wasListeningRef = useRef(false);
// Auto-send when speech recognition completes
useEffect(() => {
if (!isListening && wasListeningRef.current) {
// We just stopped listening
wasListeningRef.current = false;
if (recognizedText.trim()) {
// We have text - send it
setInput(recognizedText);
setTimeout(() => {
if (recognizedText.trim()) {
handleVoiceSend(recognizedText.trim());
}
}, 300);
} else {
// No text recognized (C4 scenario) - show brief feedback
setInput('');
setVoiceFeedback("Didn't catch that. Try again.");
// Auto-hide after 2 seconds
setTimeout(() => setVoiceFeedback(null), 2000);
}
}
if (isListening) {
wasListeningRef.current = true;
}
}, [isListening, recognizedText]);
// Auto-start listening after TTS finishes
const autoStartListening = useCallback(async () => {
if (!isVoiceConversation) return;
// IMPORTANT: Wait longer to ensure TTS audio has fully stopped
// This prevents the microphone from capturing TTS output
await new Promise(resolve => setTimeout(resolve, 800));
// Double-check we're not speaking anymore (TTS may have restarted)
const stillSpeaking = await sherpaTTS.isSpeaking().catch(() => false);
if (stillSpeaking) {
console.log('[Chat] TTS still speaking, not starting listening yet');
return;
}
const hasPermission = await requestPermission();
if (hasPermission && isVoiceConversation) {
console.log('[Chat] Auto-starting listening after TTS');
startListening({ continuous: false });
}
}, [isVoiceConversation, requestPermission, startListening]);
// TTS function - use SherpaTTS or fallback to expo-speech
const speakText = useCallback(async (text: string, shouldAutoListen = false) => {
if (isSpeaking) return;
// CRITICAL: Stop any active listening BEFORE TTS starts
// This prevents the microphone from capturing TTS audio output
if (isListening) {
console.log('[Chat] Stopping listening before TTS');
stopListening();
wasListeningRef.current = false; // Prevent auto-send of any partial text
}
setIsSpeaking(true);
if (shouldAutoListen) {
setIsVoiceConversation(true);
}
const handleDone = () => {
setIsSpeaking(false);
// Auto-start listening if in voice conversation mode
if (shouldAutoListen || isVoiceConversation) {
autoStartListening();
}
};
try {
if (ttsInitialized && sherpaTTS.isAvailable()) {
await sherpaTTS.speak(text, {
onDone: handleDone,
onError: (error) => {
console.error('[Chat] TTS speak error:', error);
setIsSpeaking(false);
},
});
} else {
console.warn('[Chat] TTS not available');
setIsSpeaking(false);
}
} catch (error) {
console.error('[Chat] TTS error:', error);
setIsSpeaking(false);
}
}, [isSpeaking, ttsInitialized, isVoiceConversation, autoStartListening]);
// Stop TTS only (without exiting voice mode)
const stopTTS = useCallback(() => {
if (ttsInitialized && sherpaTTS.isAvailable()) {
sherpaTTS.stop();
}
setIsSpeaking(false);
}, [ttsInitialized]);
// Stop TTS and exit voice conversation mode completely
const stopSpeaking = useCallback(() => {
stopTTS();
setIsVoiceConversation(false); // Exit voice mode when user stops TTS
}, [stopTTS]);
// Smart handler for VoiceIndicator tap - behavior depends on current mode
const handleVoiceIndicatorTap = useCallback(async (currentMode: 'listening' | 'speaking') => {
console.log('[Chat] VoiceIndicator tapped in mode:', currentMode);
if (currentMode === 'listening') {
// User tapped while we're recording their voice
// Action: Cancel recording and exit voice mode completely
console.log('[Chat] Cancelling listening, exiting voice mode');
stopListening();
setIsVoiceConversation(false);
wasListeningRef.current = false; // Prevent auto-send of partial text
setInput(''); // Clear any partial text
} else if (currentMode === 'speaking') {
// User tapped while AI is speaking
// Action: Interrupt AI and immediately start listening to user (like interrupting in conversation)
console.log('[Chat] Interrupting AI speech, starting to listen');
stopTTS();
// Small delay then start listening
await new Promise(resolve => setTimeout(resolve, 200));
const hasPermission = await requestPermission();
if (hasPermission) {
startListening({ continuous: false });
}
}
}, [stopListening, stopTTS, requestPermission, startListening]);
// Show permission denied alert
const showPermissionDeniedAlert = useCallback(() => {
Alert.alert(
'Microphone Access Required',
'To use voice input, please allow microphone access in Settings.',
[
{ text: 'Cancel', style: 'cancel' },
{
text: 'Open Settings',
onPress: () => Linking.openSettings(),
},
]
);
}, []);
// Handle voice input toggle
const handleVoiceToggle = useCallback(async () => {
if (isListening) {
// User tapped while listening - stop and check if we have text
stopListening();
// Note: The useEffect below handles auto-send if recognizedText exists
// If no text was recognized, it just cancels (B3 scenario)
} else {
// Stop any ongoing speech first
if (isSpeaking) {
stopSpeaking();
}
// Dismiss keyboard (E1 scenario)
Keyboard.dismiss();
// Request permission if needed
const hasPermission = await requestPermission();
if (!hasPermission) {
// Show alert with option to open settings (C1 scenario)
showPermissionDeniedAlert();
return;
}
startListening({ continuous: false });
}
}, [isListening, isSpeaking, startListening, stopListening, stopSpeaking, requestPermission, showPermissionDeniedAlert]);
// Handle sending voice message
const handleVoiceSend = useCallback(async (text: string) => {
if (!text.trim() || isSending) return;
// Mark that we're in voice conversation mode
setIsVoiceConversation(true);
const userMessage: Message = {
id: Date.now().toString(),
role: 'user',
content: text,
timestamp: new Date(),
};
setMessages((prev) => [...prev, userMessage]);
setInput('');
setIsSending(true);
try {
const aiResponse = await sendWithContext(text);
const assistantMessage: Message = {
id: (Date.now() + 1).toString(),
role: 'assistant',
content: aiResponse,
timestamp: new Date(),
};
setMessages((prev) => [...prev, assistantMessage]);
// Speak the response with auto-listen enabled
speakText(aiResponse, true);
} catch (error) {
const errorText = `Sorry, I encountered an error: ${error instanceof Error ? error.message : 'Unknown error'}`;
const errorMessage: Message = {
id: (Date.now() + 1).toString(),
role: 'assistant',
content: errorText,
timestamp: new Date(),
};
setMessages((prev) => [...prev, errorMessage]);
// Speak error message with auto-listen enabled
speakText(errorText, true);
} finally {
setIsSending(false);
}
}, [isSending, speakText]);
// Load beneficiaries when picker opens // Load beneficiaries when picker opens
const loadBeneficiaries = useCallback(async () => { const loadBeneficiaries = useCallback(async () => {
setLoadingBeneficiaries(true); setLoadingBeneficiaries(true);
@ -177,10 +474,23 @@ export default function ChatScreen() {
const userName = await SecureStore.getItemAsync('userName'); const userName = await SecureStore.getItemAsync('userName');
if (!token || !userName) throw new Error('Please log in'); if (!token || !userName) throw new Error('Please log in');
if (!currentBeneficiary?.id) throw new Error('Please select a beneficiary');
const beneficiaryName = currentBeneficiary.name || 'the patient'; // Auto-select first beneficiary if none selected
const deploymentId = currentBeneficiary.id.toString(); let beneficiary = currentBeneficiary;
if (!beneficiary?.id) {
console.log('[Chat] No beneficiary selected, auto-loading first one...');
const loaded = await loadBeneficiaries();
if (loaded.length > 0) {
beneficiary = loaded[0];
setCurrentBeneficiary(beneficiary);
console.log('[Chat] Auto-selected beneficiary:', beneficiary.name);
} else {
throw new Error('No beneficiaries found. Please add one first.');
}
}
const beneficiaryName = beneficiary.name || 'the patient';
const deploymentId = beneficiary.id.toString();
// Fetch both contexts in PARALLEL for speed // Fetch both contexts in PARALLEL for speed
const [activityContext, dashboardContext] = await Promise.all([ const [activityContext, dashboardContext] = await Promise.all([
@ -340,10 +650,12 @@ Based on this data, please answer the following question: ${question}`;
</Text> </Text>
</View> </View>
</View> </View>
<View style={styles.headerButtons}>
<TouchableOpacity style={styles.headerButton} onPress={openBeneficiaryPicker}> <TouchableOpacity style={styles.headerButton} onPress={openBeneficiaryPicker}>
<Ionicons name="people-outline" size={24} color={AppColors.primary} /> <Ionicons name="people-outline" size={24} color={AppColors.primary} />
</TouchableOpacity> </TouchableOpacity>
</View> </View>
</View>
{/* Beneficiary Picker Modal */} {/* Beneficiary Picker Modal */}
<Modal <Modal
@ -405,6 +717,7 @@ Based on this data, please answer the following question: ${question}`;
</View> </View>
</Modal> </Modal>
{/* Messages */} {/* Messages */}
<KeyboardAvoidingView <KeyboardAvoidingView
style={styles.chatContainer} style={styles.chatContainer}
@ -421,17 +734,66 @@ Based on this data, please answer the following question: ${question}`;
onContentSizeChange={() => flatListRef.current?.scrollToEnd({ animated: true })} onContentSizeChange={() => flatListRef.current?.scrollToEnd({ animated: true })}
/> />
{/* Voice Feedback Text (for errors) */}
{voiceFeedback && !isListening && !isSpeaking && (
<View style={styles.voiceFeedbackContainer}>
<Text style={styles.voiceFeedbackText}>{voiceFeedback}</Text>
</View>
)}
{/* Beautiful Voice Indicator Animation */}
{(isListening || isSpeaking) && (
<VoiceIndicator
mode={isListening ? 'listening' : 'speaking'}
onTap={handleVoiceIndicatorTap}
/>
)}
{/* Input */} {/* Input */}
<View style={styles.inputContainer}> <View style={styles.inputContainer}>
{/* Microphone / Stop Button */}
<Animated.View style={{ transform: [{ scale: isListening ? pulseAnim : 1 }] }}>
<TouchableOpacity
style={[
styles.micButton,
isListening && styles.micButtonActive,
isSpeaking && styles.micButtonSpeaking,
!speechRecognitionAvailable && !isSpeaking && styles.micButtonDisabled,
]}
onPress={isSpeaking ? stopSpeaking : handleVoiceToggle}
disabled={!speechRecognitionAvailable && !isSpeaking || isSending}
>
<Ionicons
name={isSpeaking ? 'stop' : isListening ? 'mic' : 'mic-outline'}
size={22}
color={
isSpeaking
? AppColors.white
: isListening
? AppColors.white
: speechRecognitionAvailable
? AppColors.primary
: AppColors.textMuted
}
/>
</TouchableOpacity>
</Animated.View>
<TextInput <TextInput
style={styles.input} style={styles.input}
placeholder="Type a message..." placeholder={
isSpeaking
? 'AI is speaking... tap stop to interrupt'
: isListening
? 'Listening...'
: 'Type or speak...'
}
placeholderTextColor={AppColors.textMuted} placeholderTextColor={AppColors.textMuted}
value={input} value={input}
onChangeText={setInput} onChangeText={setInput}
multiline multiline
maxLength={1000} maxLength={1000}
editable={true} editable={!isListening && !isSpeaking}
onSubmitEditing={handleSend} onSubmitEditing={handleSend}
/> />
<TouchableOpacity <TouchableOpacity
@ -680,4 +1042,54 @@ const styles = StyleSheet.create({
color: AppColors.textSecondary, color: AppColors.textSecondary,
marginTop: 2, marginTop: 2,
}, },
// Voice UI styles
voiceFeedbackContainer: {
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
backgroundColor: 'rgba(255, 152, 0, 0.1)',
borderRadius: BorderRadius.md,
marginHorizontal: Spacing.md,
marginBottom: Spacing.sm,
},
voiceFeedbackText: {
fontSize: FontSizes.sm,
color: AppColors.warning || '#FF9800',
textAlign: 'center',
},
micButton: {
width: 44,
height: 44,
borderRadius: 22,
backgroundColor: AppColors.surface,
justifyContent: 'center',
alignItems: 'center',
marginRight: Spacing.sm,
borderWidth: 1,
borderColor: AppColors.border,
},
micButtonActive: {
backgroundColor: AppColors.primary,
borderColor: AppColors.primary,
},
micButtonSpeaking: {
backgroundColor: AppColors.error || '#E53935',
borderColor: AppColors.error || '#E53935',
},
micButtonDisabled: {
opacity: 0.5,
},
// Header buttons (for beneficiary picker)
headerButtons: {
flexDirection: 'row',
gap: Spacing.xs,
},
}); });
// Wrap with TTSErrorBoundary to catch TTS crashes
export default function ChatScreen() {
return (
<TTSErrorBoundary>
<ChatScreenContent />
</TTSErrorBoundary>
);
}

525
app/(tabs)/debug.tsx Normal file
View File

@ -0,0 +1,525 @@
import React, { useState, useEffect, useRef } from 'react';
import {
View,
Text,
StyleSheet,
FlatList,
TouchableOpacity,
TextInput,
Share,
Platform,
} from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context';
import { Ionicons } from '@expo/vector-icons';
import { debugLogger, type LogEntry } from '@/services/DebugLogger';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import sherpaTTS from '@/services/sherpaTTS';
export default function DebugScreen() {
const [logs, setLogs] = useState<LogEntry[]>([]);
const [filter, setFilter] = useState<string>('');
const [selectedCategory, setSelectedCategory] = useState<string>('All');
const [ttsState, setTtsState] = useState<ReturnType<typeof sherpaTTS.getState>>(sherpaTTS.getState());
const flatListRef = useRef<FlatList>(null);
// Initialize TTS and subscribe to state changes
useEffect(() => {
// Subscribe to TTS state changes
const unsubscribeTTS = sherpaTTS.addStateListener(setTtsState);
// Start initialization
sherpaTTS.initialize().catch(e =>
debugLogger.error('TTS', `Init failed: ${e}`)
);
return unsubscribeTTS;
}, []);
// Subscribe to log updates
useEffect(() => {
const unsubscribe = debugLogger.subscribe((newLogs) => {
setLogs(newLogs);
// Auto-scroll to bottom when new logs arrive
setTimeout(() => {
flatListRef.current?.scrollToEnd({ animated: true });
}, 100);
});
// Initial load
setLogs(debugLogger.getLogs());
return unsubscribe;
}, []);
// Get unique categories
const categories = ['All', ...new Set(logs.map(log => log.category))];
// Filter logs
const filteredLogs = logs.filter(log => {
const matchesCategory = selectedCategory === 'All' || log.category === selectedCategory;
const matchesFilter = !filter || log.message.toLowerCase().includes(filter.toLowerCase());
return matchesCategory && matchesFilter;
});
// Clear logs
const handleClear = () => {
debugLogger.clear();
};
// Export logs
const handleExport = async () => {
const text = debugLogger.exportAsText();
try {
await Share.share({
message: text,
title: 'Debug Logs Export',
});
} catch (error) {
console.error('Failed to export logs:', error);
}
};
// Test TTS - check if ready before speaking
const handleTestTTS = () => {
if (!ttsState.initialized) {
debugLogger.warn('TTS', 'Cannot test - TTS not ready yet');
return;
}
debugLogger.info('TTS', 'Testing voice...');
sherpaTTS.speak('Hello, this is a test message', {
onDone: () => debugLogger.info('TTS', 'Voice test complete'),
onError: (e) => debugLogger.error('TTS', `Voice test failed: ${e}`)
});
};
// Get TTS status display
const getTTSStatus = () => {
if (ttsState.initializing) {
return {
text: ttsState.error || 'Downloading voice model...',
color: AppColors.warning || '#FF9800',
icon: 'cloud-download' as const,
};
}
if (ttsState.initialized) {
return {
text: 'Ready',
color: '#4CAF50',
icon: 'checkmark-circle' as const,
};
}
if (ttsState.error) {
return {
text: ttsState.error,
color: AppColors.error || '#E53935',
icon: 'alert-circle' as const,
};
}
return {
text: 'Not initialized',
color: AppColors.textMuted,
icon: 'time' as const,
};
};
const ttsStatus = getTTSStatus();
// Get log level color
const getLevelColor = (level: LogEntry['level']): string => {
switch (level) {
case 'error':
return AppColors.error || '#E53935';
case 'warn':
return AppColors.warning || '#FF9800';
case 'info':
return AppColors.primary;
default:
return AppColors.textSecondary;
}
};
// Get level icon
const getLevelIcon = (level: LogEntry['level']): any => {
switch (level) {
case 'error':
return 'close-circle';
case 'warn':
return 'warning';
case 'info':
return 'information-circle';
default:
return 'chatbubble-ellipses';
}
};
// Render log item
const renderLog = ({ item }: { item: LogEntry }) => {
const time = item.timestamp.toLocaleTimeString();
const levelColor = getLevelColor(item.level);
return (
<View style={styles.logItem}>
<View style={styles.logHeader}>
<View style={styles.logInfo}>
<Ionicons name={getLevelIcon(item.level)} size={16} color={levelColor} />
<Text style={[styles.logLevel, { color: levelColor }]}>
{item.level.toUpperCase()}
</Text>
<Text style={styles.logCategory}>[{item.category}]</Text>
<Text style={styles.logTime}>{time}</Text>
</View>
</View>
<Text style={styles.logMessage}>{item.message}</Text>
{item.data && (
<Text style={styles.logData}>
{typeof item.data === 'object' ? JSON.stringify(item.data, null, 2) : String(item.data)}
</Text>
)}
</View>
);
};
return (
<SafeAreaView style={styles.container} edges={['top']}>
{/* Header */}
<View style={styles.header}>
<Text style={styles.headerTitle}>Debug Console</Text>
<View style={styles.headerButtons}>
<TouchableOpacity style={styles.headerButton} onPress={handleExport}>
<Ionicons name="share-outline" size={22} color={AppColors.primary} />
</TouchableOpacity>
<TouchableOpacity style={styles.headerButton} onPress={handleClear}>
<Ionicons name="trash-outline" size={22} color={AppColors.error || '#E53935'} />
</TouchableOpacity>
</View>
</View>
{/* TTS Test Button */}
<View style={styles.ttsTestContainer}>
{/* TTS Status Indicator */}
<View style={styles.ttsStatusRow}>
<Ionicons name={ttsStatus.icon} size={16} color={ttsStatus.color} />
<Text style={[styles.ttsStatusText, { color: ttsStatus.color }]}>
{ttsStatus.text}
</Text>
</View>
<TouchableOpacity
style={[
styles.testButton,
!ttsState.initialized && styles.testButtonDisabled
]}
onPress={handleTestTTS}
disabled={!ttsState.initialized}
>
<Ionicons
name={ttsState.speaking ? "stop-circle" : "volume-high"}
size={20}
color={ttsState.initialized ? AppColors.white : AppColors.textMuted}
/>
<Text
style={[
styles.testButtonText,
!ttsState.initialized && styles.testButtonTextDisabled
]}
>
{ttsState.speaking ? 'Stop' : 'Test Voice'}
</Text>
</TouchableOpacity>
</View>
{/* Stats Bar */}
<View style={styles.statsBar}>
<View style={styles.statItem}>
<Text style={styles.statLabel}>Total:</Text>
<Text style={styles.statValue}>{logs.length}</Text>
</View>
<View style={styles.statItem}>
<Text style={[styles.statLabel, { color: AppColors.error || '#E53935' }]}>Errors:</Text>
<Text style={[styles.statValue, { color: AppColors.error || '#E53935' }]}>
{logs.filter(l => l.level === 'error').length}
</Text>
</View>
<View style={styles.statItem}>
<Text style={[styles.statLabel, { color: AppColors.warning || '#FF9800' }]}>Warns:</Text>
<Text style={[styles.statValue, { color: AppColors.warning || '#FF9800' }]}>
{logs.filter(l => l.level === 'warn').length}
</Text>
</View>
<View style={styles.statItem}>
<Text style={[styles.statLabel, { color: AppColors.primary }]}>Filtered:</Text>
<Text style={[styles.statValue, { color: AppColors.primary }]}>
{filteredLogs.length}
</Text>
</View>
</View>
{/* Category Filter */}
<View style={styles.filterContainer}>
<FlatList
horizontal
data={categories}
keyExtractor={(item) => item}
renderItem={({ item }) => (
<TouchableOpacity
style={[
styles.categoryChip,
selectedCategory === item && styles.categoryChipActive,
]}
onPress={() => setSelectedCategory(item)}
>
<Text
style={[
styles.categoryChipText,
selectedCategory === item && styles.categoryChipTextActive,
]}
>
{item}
</Text>
</TouchableOpacity>
)}
contentContainerStyle={styles.categoryList}
showsHorizontalScrollIndicator={false}
/>
</View>
{/* Search Filter */}
<View style={styles.searchContainer}>
<Ionicons name="search" size={20} color={AppColors.textMuted} style={styles.searchIcon} />
<TextInput
style={styles.searchInput}
placeholder="Filter logs..."
placeholderTextColor={AppColors.textMuted}
value={filter}
onChangeText={setFilter}
/>
{filter.length > 0 && (
<TouchableOpacity onPress={() => setFilter('')}>
<Ionicons name="close-circle" size={20} color={AppColors.textMuted} />
</TouchableOpacity>
)}
</View>
{/* Logs List */}
<FlatList
ref={flatListRef}
data={filteredLogs}
keyExtractor={(item) => item.id}
renderItem={renderLog}
contentContainerStyle={styles.logsList}
showsVerticalScrollIndicator={true}
ListEmptyComponent={
<View style={styles.emptyContainer}>
<Ionicons name="bug-outline" size={64} color={AppColors.textMuted} />
<Text style={styles.emptyText}>No logs yet</Text>
<Text style={styles.emptySubtext}>
Voice and system logs will appear here
</Text>
</View>
}
/>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: AppColors.background,
},
header: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
backgroundColor: AppColors.surface,
borderBottomWidth: 1,
borderBottomColor: AppColors.border,
},
headerTitle: {
fontSize: FontSizes.xl,
fontWeight: '600',
color: AppColors.textPrimary,
},
headerButtons: {
flexDirection: 'row',
gap: Spacing.sm,
},
headerButton: {
padding: Spacing.xs,
},
statsBar: {
flexDirection: 'row',
justifyContent: 'space-around',
paddingVertical: Spacing.sm,
paddingHorizontal: Spacing.md,
backgroundColor: AppColors.surface,
borderBottomWidth: 1,
borderBottomColor: AppColors.border,
},
statItem: {
alignItems: 'center',
},
statLabel: {
fontSize: FontSizes.xs,
color: AppColors.textSecondary,
marginBottom: 2,
},
statValue: {
fontSize: FontSizes.lg,
fontWeight: '600',
color: AppColors.textPrimary,
},
filterContainer: {
backgroundColor: AppColors.surface,
borderBottomWidth: 1,
borderBottomColor: AppColors.border,
},
categoryList: {
paddingHorizontal: Spacing.sm,
paddingVertical: Spacing.xs,
},
categoryChip: {
paddingHorizontal: Spacing.sm + 4,
paddingVertical: Spacing.xs,
borderRadius: BorderRadius.full,
backgroundColor: AppColors.background,
marginHorizontal: 4,
borderWidth: 1,
borderColor: AppColors.border,
},
categoryChipActive: {
backgroundColor: AppColors.primary,
borderColor: AppColors.primary,
},
categoryChipText: {
fontSize: FontSizes.sm,
color: AppColors.textSecondary,
fontWeight: '500',
},
categoryChipTextActive: {
color: AppColors.white,
},
searchContainer: {
flexDirection: 'row',
alignItems: 'center',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
backgroundColor: AppColors.surface,
borderBottomWidth: 1,
borderBottomColor: AppColors.border,
},
searchIcon: {
marginRight: Spacing.xs,
},
searchInput: {
flex: 1,
fontSize: FontSizes.base,
color: AppColors.textPrimary,
paddingVertical: 0,
},
logsList: {
padding: Spacing.sm,
},
logItem: {
backgroundColor: AppColors.surface,
borderRadius: BorderRadius.md,
padding: Spacing.sm,
marginBottom: Spacing.sm,
borderLeftWidth: 3,
borderLeftColor: AppColors.primary,
},
logHeader: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
marginBottom: Spacing.xs,
},
logInfo: {
flexDirection: 'row',
alignItems: 'center',
gap: Spacing.xs,
},
logLevel: {
fontSize: FontSizes.xs,
fontWeight: '600',
},
logCategory: {
fontSize: FontSizes.xs,
color: AppColors.textSecondary,
fontWeight: '500',
},
logTime: {
fontSize: FontSizes.xs,
color: AppColors.textMuted,
},
logMessage: {
fontSize: FontSizes.sm,
color: AppColors.textPrimary,
lineHeight: 20,
},
logData: {
fontSize: FontSizes.xs,
color: AppColors.textSecondary,
marginTop: Spacing.xs,
fontFamily: Platform.OS === 'ios' ? 'Menlo' : 'monospace',
},
emptyContainer: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
paddingTop: 100,
},
emptyText: {
fontSize: FontSizes.lg,
fontWeight: '600',
color: AppColors.textSecondary,
marginTop: Spacing.md,
},
emptySubtext: {
fontSize: FontSizes.sm,
color: AppColors.textMuted,
marginTop: Spacing.xs,
},
ttsTestContainer: {
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
backgroundColor: AppColors.surface,
borderBottomWidth: 1,
borderBottomColor: AppColors.border,
gap: Spacing.xs,
},
testButton: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
backgroundColor: AppColors.primary,
paddingVertical: Spacing.sm,
paddingHorizontal: Spacing.md,
borderRadius: BorderRadius.md,
gap: Spacing.xs,
},
testButtonText: {
fontSize: FontSizes.base,
fontWeight: '600',
color: AppColors.white,
},
testButtonDisabled: {
backgroundColor: AppColors.border,
opacity: 0.6,
},
testButtonTextDisabled: {
color: AppColors.textMuted,
},
ttsStatusRow: {
flexDirection: 'row',
alignItems: 'center',
gap: Spacing.xs,
},
ttsStatusText: {
fontSize: FontSizes.sm,
fontWeight: '500',
},
});

View File

@ -1,5 +1,5 @@
import React, { useState, useRef, useEffect, useCallback } from 'react'; import React, { useState, useRef, useEffect, useCallback } from 'react';
import { View, Text, StyleSheet, ActivityIndicator, TouchableOpacity } from 'react-native'; import { View, Text, StyleSheet, ActivityIndicator, TouchableOpacity, Image } from 'react-native';
import { WebView, WebViewNavigation } from 'react-native-webview'; import { WebView, WebViewNavigation } from 'react-native-webview';
import { Ionicons } from '@expo/vector-icons'; import { Ionicons } from '@expo/vector-icons';
import { SafeAreaView } from 'react-native-safe-area-context'; import { SafeAreaView } from 'react-native-safe-area-context';
@ -204,9 +204,13 @@ export default function HomeScreen() {
return ( return (
<SafeAreaView style={styles.container} edges={['top']}> <SafeAreaView style={styles.container} edges={['top']}>
<View style={styles.header}> <View style={styles.header}>
<View> <View style={styles.headerLeft}>
<Image
source={require('@/assets/images/icon.png')}
style={styles.logo}
resizeMode="contain"
/>
<Text style={styles.greeting}>Hello, {user?.user_name || 'User'}</Text> <Text style={styles.greeting}>Hello, {user?.user_name || 'User'}</Text>
<Text style={styles.headerTitle}>Dashboard</Text>
</View> </View>
</View> </View>
<View style={styles.loadingContainer}> <View style={styles.loadingContainer}>
@ -221,9 +225,13 @@ export default function HomeScreen() {
return ( return (
<SafeAreaView style={styles.container} edges={['top']}> <SafeAreaView style={styles.container} edges={['top']}>
<View style={styles.header}> <View style={styles.header}>
<View> <View style={styles.headerLeft}>
<Image
source={require('@/assets/images/icon.png')}
style={styles.logo}
resizeMode="contain"
/>
<Text style={styles.greeting}>Hello, {user?.user_name || 'User'}</Text> <Text style={styles.greeting}>Hello, {user?.user_name || 'User'}</Text>
<Text style={styles.headerTitle}>Dashboard</Text>
</View> </View>
<TouchableOpacity style={styles.refreshButton} onPress={handleRefresh}> <TouchableOpacity style={styles.refreshButton} onPress={handleRefresh}>
<Ionicons name="refresh" size={22} color={AppColors.primary} /> <Ionicons name="refresh" size={22} color={AppColors.primary} />
@ -245,9 +253,13 @@ export default function HomeScreen() {
<SafeAreaView style={styles.container} edges={['top']}> <SafeAreaView style={styles.container} edges={['top']}>
{/* Header */} {/* Header */}
<View style={styles.header}> <View style={styles.header}>
<View> <View style={styles.headerLeft}>
<Image
source={require('@/assets/images/icon.png')}
style={styles.logo}
resizeMode="contain"
/>
<Text style={styles.greeting}>Hello, {user?.user_name || 'User'}</Text> <Text style={styles.greeting}>Hello, {user?.user_name || 'User'}</Text>
<Text style={styles.headerTitle}>Dashboard</Text>
</View> </View>
<View style={styles.headerActions}> <View style={styles.headerActions}>
{canGoBack && ( {canGoBack && (
@ -313,6 +325,15 @@ const styles = StyleSheet.create({
borderBottomWidth: 1, borderBottomWidth: 1,
borderBottomColor: AppColors.border, borderBottomColor: AppColors.border,
}, },
headerLeft: {
flexDirection: 'row',
alignItems: 'center',
gap: Spacing.sm,
},
logo: {
width: 36,
height: 36,
},
greeting: { greeting: {
fontSize: FontSizes.sm, fontSize: FontSizes.sm,
color: AppColors.textSecondary, color: AppColors.textSecondary,

View File

@ -7,10 +7,10 @@ import { Platform } from 'react-native';
// WellNuo Brand Colors // WellNuo Brand Colors
export const AppColors = { export const AppColors = {
// Primary // Primary - Blue (#0076BF)
primary: '#4A90D9', primary: '#0076BF',
primaryDark: '#2E5C8A', primaryDark: '#005A94',
primaryLight: '#6BA8E8', primaryLight: '#3391CC',
// Status // Status
success: '#5AC8A8', success: '#5AC8A8',

12078
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -11,18 +11,22 @@
"lint": "expo lint" "lint": "expo lint"
}, },
"dependencies": { "dependencies": {
"@dr.pogodin/react-native-fs": "^2.36.2",
"@expo/vector-icons": "^15.0.3", "@expo/vector-icons": "^15.0.3",
"@react-navigation/bottom-tabs": "^7.4.0", "@react-navigation/bottom-tabs": "^7.4.0",
"@react-navigation/elements": "^2.6.3", "@react-navigation/elements": "^2.6.3",
"@react-navigation/native": "^7.1.8", "@react-navigation/native": "^7.1.8",
"expo": "~54.0.29", "expo": "~54.0.29",
"expo-constants": "~18.0.12", "expo-constants": "~18.0.12",
"expo-file-system": "~19.0.21",
"expo-font": "~14.0.10", "expo-font": "~14.0.10",
"expo-haptics": "~15.0.8", "expo-haptics": "~15.0.8",
"expo-image": "~3.0.11", "expo-image": "~3.0.11",
"expo-linking": "~8.0.10", "expo-linking": "~8.0.10",
"expo-router": "~6.0.19", "expo-router": "~6.0.19",
"expo-secure-store": "^15.0.8", "expo-secure-store": "^15.0.8",
"expo-speech": "~14.0.8",
"expo-speech-recognition": "^3.0.1",
"expo-splash-screen": "~31.0.12", "expo-splash-screen": "~31.0.12",
"expo-status-bar": "~3.0.9", "expo-status-bar": "~3.0.9",
"expo-symbols": "~1.0.8", "expo-symbols": "~1.0.8",
@ -35,6 +39,7 @@
"react-native-reanimated": "~4.1.1", "react-native-reanimated": "~4.1.1",
"react-native-safe-area-context": "~5.6.0", "react-native-safe-area-context": "~5.6.0",
"react-native-screens": "~4.16.0", "react-native-screens": "~4.16.0",
"react-native-sherpa-onnx-offline-tts": "^0.2.6",
"react-native-web": "~0.21.0", "react-native-web": "~0.21.0",
"react-native-webview": "^13.16.0", "react-native-webview": "^13.16.0",
"react-native-worklets": "0.5.1" "react-native-worklets": "0.5.1"

View File

@ -74,12 +74,21 @@ class ApiService {
if (response.ok && response.data) { if (response.ok && response.data) {
// Save credentials to SecureStore (including password for auto-refresh) // Save credentials to SecureStore (including password for auto-refresh)
await SecureStore.setItemAsync('accessToken', response.data.access_token); // ВАЖНО: SecureStore принимает ТОЛЬКО строки, проверяем каждое значение
await SecureStore.setItemAsync('userId', response.data.user_id.toString()); if (response.data.access_token) {
await SecureStore.setItemAsync('userName', username); await SecureStore.setItemAsync('accessToken', String(response.data.access_token));
await SecureStore.setItemAsync('userPassword', password); // Store for token refresh }
await SecureStore.setItemAsync('privileges', response.data.privileges); if (response.data.user_id != null) {
await SecureStore.setItemAsync('maxRole', response.data.max_role.toString()); await SecureStore.setItemAsync('userId', String(response.data.user_id));
}
if (username) {
await SecureStore.setItemAsync('userName', String(username));
}
if (password) {
await SecureStore.setItemAsync('userPassword', String(password)); // Store for token refresh
}
await SecureStore.setItemAsync('privileges', String(response.data.privileges || ''));
await SecureStore.setItemAsync('maxRole', String(response.data.max_role ?? 0));
} }
return response; return response;