ScrollView now properly scrolls through all 35+ voices 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
1458 lines
48 KiB
TypeScript
1458 lines
48 KiB
TypeScript
import React, { useState, useCallback, useRef, useEffect } from 'react';
|
||
import {
|
||
View,
|
||
Text,
|
||
StyleSheet,
|
||
FlatList,
|
||
TextInput,
|
||
TouchableOpacity,
|
||
KeyboardAvoidingView,
|
||
Platform,
|
||
Alert,
|
||
ActivityIndicator,
|
||
Modal,
|
||
ScrollView,
|
||
Animated,
|
||
} from 'react-native';
|
||
import { Ionicons, Feather } from '@expo/vector-icons';
|
||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||
import * as SecureStore from 'expo-secure-store';
|
||
import * as Speech from 'expo-speech';
|
||
import { ExpoSpeechRecognitionModule, useSpeechRecognitionEvent } from 'expo-speech-recognition';
|
||
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
|
||
import { api } from '@/services/api';
|
||
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
|
||
import type { Message, Beneficiary } from '@/types';
|
||
|
||
const OLD_API_URL = 'https://eluxnetworks.net/function/well-api/api';
|
||
|
||
// DEV ONLY: Voice options for testing different TTS voices
|
||
const DEV_MODE = __DEV__;
|
||
|
||
interface VoiceOption {
|
||
id: string;
|
||
name: string;
|
||
language: string;
|
||
voice?: string; // iOS voice identifier
|
||
}
|
||
|
||
// Available iOS voices for testing
|
||
const AVAILABLE_VOICES: VoiceOption[] = [
|
||
// English voices
|
||
{ id: 'en-US-default', name: 'English (US) - Default', language: 'en-US' },
|
||
{ id: 'en-US-samantha', name: 'Samantha (US)', language: 'en-US', voice: 'com.apple.ttsbundle.Samantha-compact' },
|
||
{ id: 'en-GB-daniel', name: 'Daniel (UK)', language: 'en-GB', voice: 'com.apple.ttsbundle.Daniel-compact' },
|
||
{ id: 'en-AU-karen', name: 'Karen (Australia)', language: 'en-AU', voice: 'com.apple.ttsbundle.Karen-compact' },
|
||
{ id: 'en-IE-moira', name: 'Moira (Ireland)', language: 'en-IE', voice: 'com.apple.ttsbundle.Moira-compact' },
|
||
{ id: 'en-ZA-tessa', name: 'Tessa (South Africa)', language: 'en-ZA', voice: 'com.apple.ttsbundle.Tessa-compact' },
|
||
{ id: 'en-IN-rishi', name: 'Rishi (India)', language: 'en-IN', voice: 'com.apple.ttsbundle.Rishi-compact' },
|
||
|
||
// European languages
|
||
{ id: 'fr-FR', name: 'French (France)', language: 'fr-FR' },
|
||
{ id: 'de-DE', name: 'German', language: 'de-DE' },
|
||
{ id: 'es-ES', name: 'Spanish (Spain)', language: 'es-ES' },
|
||
{ id: 'es-MX', name: 'Spanish (Mexico)', language: 'es-MX' },
|
||
{ id: 'it-IT', name: 'Italian', language: 'it-IT' },
|
||
{ id: 'pt-BR', name: 'Portuguese (Brazil)', language: 'pt-BR' },
|
||
{ id: 'pt-PT', name: 'Portuguese (Portugal)', language: 'pt-PT' },
|
||
{ id: 'nl-NL', name: 'Dutch', language: 'nl-NL' },
|
||
{ id: 'pl-PL', name: 'Polish', language: 'pl-PL' },
|
||
{ id: 'ru-RU', name: 'Russian', language: 'ru-RU' },
|
||
{ id: 'uk-UA', name: 'Ukrainian', language: 'uk-UA' },
|
||
{ id: 'cs-CZ', name: 'Czech', language: 'cs-CZ' },
|
||
{ id: 'da-DK', name: 'Danish', language: 'da-DK' },
|
||
{ id: 'fi-FI', name: 'Finnish', language: 'fi-FI' },
|
||
{ id: 'el-GR', name: 'Greek', language: 'el-GR' },
|
||
{ id: 'hu-HU', name: 'Hungarian', language: 'hu-HU' },
|
||
{ id: 'no-NO', name: 'Norwegian', language: 'no-NO' },
|
||
{ id: 'ro-RO', name: 'Romanian', language: 'ro-RO' },
|
||
{ id: 'sk-SK', name: 'Slovak', language: 'sk-SK' },
|
||
{ id: 'sv-SE', name: 'Swedish', language: 'sv-SE' },
|
||
{ id: 'tr-TR', name: 'Turkish', language: 'tr-TR' },
|
||
|
||
// Asian languages
|
||
{ id: 'zh-CN', name: 'Chinese (Mandarin)', language: 'zh-CN' },
|
||
{ id: 'zh-TW', name: 'Chinese (Taiwan)', language: 'zh-TW' },
|
||
{ id: 'zh-HK', name: 'Chinese (Cantonese)', language: 'zh-HK' },
|
||
{ id: 'ja-JP', name: 'Japanese', language: 'ja-JP' },
|
||
{ id: 'ko-KR', name: 'Korean', language: 'ko-KR' },
|
||
{ id: 'hi-IN', name: 'Hindi', language: 'hi-IN' },
|
||
{ id: 'th-TH', name: 'Thai', language: 'th-TH' },
|
||
{ id: 'vi-VN', name: 'Vietnamese', language: 'vi-VN' },
|
||
{ id: 'id-ID', name: 'Indonesian', language: 'id-ID' },
|
||
|
||
// Middle Eastern
|
||
{ id: 'ar-SA', name: 'Arabic', language: 'ar-SA' },
|
||
{ id: 'he-IL', name: 'Hebrew', language: 'he-IL' },
|
||
];
|
||
|
||
interface ActivityData {
|
||
name: string;
|
||
rooms: Array<{
|
||
name: string;
|
||
data: Array<{
|
||
title: string;
|
||
events: number;
|
||
hours: number;
|
||
}>;
|
||
}>;
|
||
}
|
||
|
||
interface ActivitiesResponse {
|
||
alert_text: string;
|
||
chart_data: ActivityData[];
|
||
}
|
||
|
||
interface VoiceAskResponse {
|
||
ok: boolean;
|
||
response: {
|
||
Command: string;
|
||
body: string;
|
||
name?: string;
|
||
reflected?: string;
|
||
language?: string;
|
||
time?: number;
|
||
};
|
||
status: string;
|
||
}
|
||
|
||
export default function VoiceAIScreen() {
|
||
const { currentBeneficiary, setCurrentBeneficiary } = useBeneficiary();
|
||
const [messages, setMessages] = useState<Message[]>([
|
||
{
|
||
id: '1',
|
||
role: 'assistant',
|
||
content: 'Hello! I\'m Julia, your voice assistant for monitoring your loved ones. Select a beneficiary and tap the microphone to ask a question.',
|
||
timestamp: new Date(),
|
||
},
|
||
]);
|
||
const [input, setInput] = useState('');
|
||
const [isSending, setIsSending] = useState(false);
|
||
const [isSpeaking, setIsSpeaking] = useState(false);
|
||
const [isListening, setIsListening] = useState(false);
|
||
const [recognizedText, setRecognizedText] = useState('');
|
||
const [showBeneficiaryPicker, setShowBeneficiaryPicker] = useState(false);
|
||
const [isContinuousMode, setIsContinuousMode] = useState(false); // Live chat mode
|
||
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
|
||
// DEV ONLY: Voice selection for testing
|
||
const [selectedVoice, setSelectedVoice] = useState<VoiceOption>(AVAILABLE_VOICES[0]);
|
||
const [showVoicePicker, setShowVoicePicker] = useState(false);
|
||
const flatListRef = useRef<FlatList>(null);
|
||
const lastSendTimeRef = useRef<number>(0);
|
||
const pulseAnim = useRef(new Animated.Value(1)).current;
|
||
const SEND_COOLDOWN_MS = 1000;
|
||
|
||
// Speech recognition event handlers
|
||
useSpeechRecognitionEvent('start', () => {
|
||
setIsListening(true);
|
||
setRecognizedText('');
|
||
});
|
||
|
||
useSpeechRecognitionEvent('end', () => {
|
||
setIsListening(false);
|
||
});
|
||
|
||
useSpeechRecognitionEvent('result', (event) => {
|
||
const transcript = event.results[0]?.transcript || '';
|
||
setRecognizedText(transcript);
|
||
|
||
// If final result, send to AI
|
||
if (event.isFinal && transcript.trim()) {
|
||
setInput(transcript);
|
||
// Auto-send after speech recognition completes
|
||
setTimeout(() => {
|
||
handleSendWithText(transcript);
|
||
}, 300);
|
||
}
|
||
});
|
||
|
||
useSpeechRecognitionEvent('error', (event) => {
|
||
console.log('Speech recognition error:', event.error, event.message);
|
||
setIsListening(false);
|
||
if (event.error !== 'no-speech') {
|
||
Alert.alert('Voice Error', event.message || 'Could not recognize speech. Please try again.');
|
||
}
|
||
});
|
||
|
||
// Load beneficiaries on mount
|
||
useEffect(() => {
|
||
loadBeneficiaries();
|
||
return () => {
|
||
Speech.stop();
|
||
};
|
||
}, []);
|
||
|
||
const loadBeneficiaries = async () => {
|
||
const response = await api.getAllBeneficiaries();
|
||
if (response.ok && response.data) {
|
||
setBeneficiaries(response.data);
|
||
}
|
||
};
|
||
|
||
// Pulse animation for speaking
|
||
useEffect(() => {
|
||
if (isSpeaking) {
|
||
const pulse = Animated.loop(
|
||
Animated.sequence([
|
||
Animated.timing(pulseAnim, {
|
||
toValue: 1.3,
|
||
duration: 500,
|
||
useNativeDriver: true,
|
||
}),
|
||
Animated.timing(pulseAnim, {
|
||
toValue: 1,
|
||
duration: 500,
|
||
useNativeDriver: true,
|
||
}),
|
||
])
|
||
);
|
||
pulse.start();
|
||
return () => pulse.stop();
|
||
}
|
||
}, [isSpeaking]);
|
||
|
||
// Fetch activity data and format it as context
|
||
const getActivityContext = async (token: string, userName: string, deploymentId: string): Promise<string> => {
|
||
try {
|
||
console.log('Fetching activity context for deployment:', deploymentId);
|
||
|
||
const response = await fetch(OLD_API_URL, {
|
||
method: 'POST',
|
||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||
body: new URLSearchParams({
|
||
function: 'activities_report_details',
|
||
user_name: userName,
|
||
token: token,
|
||
deployment_id: deploymentId,
|
||
filter: '0',
|
||
}).toString(),
|
||
});
|
||
|
||
const data: ActivitiesResponse = await response.json();
|
||
console.log('Activity API response:', JSON.stringify(data).slice(0, 200));
|
||
|
||
if (!data.chart_data || data.chart_data.length === 0) {
|
||
console.log('No chart_data in response');
|
||
return '';
|
||
}
|
||
|
||
// Get weekly data (most recent)
|
||
const weeklyData = data.chart_data.find(d => d.name === 'Weekly');
|
||
if (!weeklyData) {
|
||
console.log('No Weekly data found');
|
||
return '';
|
||
}
|
||
|
||
// Build context string
|
||
const lines: string[] = [];
|
||
lines.push(`Alert status: ${data.alert_text || 'No alert'}`);
|
||
|
||
// Calculate today's data (last item in each room's data)
|
||
const todayStats: string[] = [];
|
||
for (const room of weeklyData.rooms) {
|
||
const todayData = room.data[room.data.length - 1]; // Today is the last entry
|
||
if (todayData && todayData.hours > 0) {
|
||
todayStats.push(`${room.name}: ${todayData.hours.toFixed(1)} hours (${todayData.events} events)`);
|
||
}
|
||
}
|
||
|
||
if (todayStats.length > 0) {
|
||
lines.push(`Today's activity: ${todayStats.join(', ')}`);
|
||
}
|
||
|
||
// Calculate weekly totals
|
||
const weeklyStats: string[] = [];
|
||
for (const room of weeklyData.rooms) {
|
||
const totalHours = room.data.reduce((sum, d) => sum + d.hours, 0);
|
||
if (totalHours > 0) {
|
||
weeklyStats.push(`${room.name}: ${totalHours.toFixed(1)} hours total this week`);
|
||
}
|
||
}
|
||
|
||
if (weeklyStats.length > 0) {
|
||
lines.push(`Weekly summary: ${weeklyStats.join(', ')}`);
|
||
}
|
||
|
||
const result = lines.join('. ');
|
||
console.log('Activity context result:', result);
|
||
return result;
|
||
} catch (error) {
|
||
console.log('Failed to fetch activity context:', error);
|
||
return '';
|
||
}
|
||
};
|
||
|
||
// Fetch dashboard data as fallback context
|
||
const getDashboardContext = async (token: string, userName: string, deploymentId: string): Promise<string> => {
|
||
try {
|
||
const today = new Date().toISOString().split('T')[0];
|
||
|
||
const response = await fetch(OLD_API_URL, {
|
||
method: 'POST',
|
||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||
body: new URLSearchParams({
|
||
function: 'dashboard_single',
|
||
user_name: userName,
|
||
token: token,
|
||
deployment_id: deploymentId,
|
||
date: today,
|
||
}).toString(),
|
||
});
|
||
|
||
const data = await response.json();
|
||
console.log('Dashboard API response:', JSON.stringify(data).slice(0, 300));
|
||
|
||
if (!data.result_list || data.result_list.length === 0) {
|
||
return '';
|
||
}
|
||
|
||
const info = data.result_list[0];
|
||
const lines: string[] = [];
|
||
|
||
if (info.wellness_descriptor) {
|
||
lines.push(`Current wellness: ${info.wellness_descriptor}`);
|
||
}
|
||
if (info.wellness_score_percent) {
|
||
lines.push(`Wellness score: ${info.wellness_score_percent}%`);
|
||
}
|
||
if (info.last_location) {
|
||
lines.push(`Last seen in: ${info.last_location}`);
|
||
}
|
||
if (info.last_detected_time) {
|
||
lines.push(`Last activity: ${info.last_detected_time}`);
|
||
}
|
||
if (info.sleep_hours) {
|
||
lines.push(`Sleep hours: ${info.sleep_hours}`);
|
||
}
|
||
if (info.temperature) {
|
||
lines.push(`Temperature: ${info.temperature}${info.units === 'F' ? '°F' : '°C'}`);
|
||
}
|
||
|
||
return lines.join('. ');
|
||
} catch (error) {
|
||
console.log('Failed to fetch dashboard context:', error);
|
||
return '';
|
||
}
|
||
};
|
||
|
||
const sendToVoiceAsk = async (question: string): Promise<string> => {
|
||
const token = await SecureStore.getItemAsync('accessToken');
|
||
const userName = await SecureStore.getItemAsync('userName');
|
||
|
||
if (!token || !userName) {
|
||
throw new Error('Please log in to use voice assistant');
|
||
}
|
||
|
||
if (!currentBeneficiary?.id) {
|
||
throw new Error('Please select a beneficiary first');
|
||
}
|
||
|
||
const beneficiaryName = currentBeneficiary.name || 'the patient';
|
||
const deploymentId = currentBeneficiary.id.toString();
|
||
|
||
// Get activity context (primary source)
|
||
let activityContext = await getActivityContext(token, userName, deploymentId);
|
||
|
||
// If activity context is empty, try dashboard context as fallback
|
||
if (!activityContext) {
|
||
console.log('Activity context empty, trying dashboard...');
|
||
activityContext = await getDashboardContext(token, userName, deploymentId);
|
||
}
|
||
|
||
// Build the question with embedded context
|
||
// Format it clearly so the LLM understands this is data about the person
|
||
let enhancedQuestion: string;
|
||
|
||
if (activityContext) {
|
||
enhancedQuestion = `You are a caring assistant helping monitor ${beneficiaryName}'s wellbeing.
|
||
|
||
Here is the current data about ${beneficiaryName}:
|
||
${activityContext}
|
||
|
||
Based on this data, please answer the following question: ${question}`;
|
||
} else {
|
||
// No context available - still try to answer
|
||
enhancedQuestion = `You are a caring assistant helping monitor ${beneficiaryName}'s wellbeing. Please answer: ${question}`;
|
||
}
|
||
|
||
// Debug logging
|
||
console.log('=== Voice API Debug ===');
|
||
console.log('Beneficiary Name:', beneficiaryName);
|
||
console.log('Activity Context Length:', activityContext?.length || 0);
|
||
console.log('Activity Context:', activityContext || 'EMPTY');
|
||
console.log('Deployment ID:', deploymentId);
|
||
|
||
const requestBody = new URLSearchParams({
|
||
function: 'voice_ask',
|
||
clientId: '001',
|
||
user_name: userName,
|
||
token: token,
|
||
question: enhancedQuestion,
|
||
deployment_id: deploymentId,
|
||
// Also try sending context as separate parameter in case API supports it
|
||
context: activityContext || '',
|
||
}).toString();
|
||
|
||
console.log('Request Body (first 500 chars):', requestBody.slice(0, 500));
|
||
|
||
const response = await fetch(OLD_API_URL, {
|
||
method: 'POST',
|
||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||
body: requestBody,
|
||
});
|
||
|
||
const data: VoiceAskResponse = await response.json();
|
||
|
||
console.log('=== Voice API Response ===');
|
||
console.log('Full Response:', JSON.stringify(data, null, 2));
|
||
|
||
if (data.ok && data.response?.body) {
|
||
return data.response.body;
|
||
} else if (data.status === '401 Unauthorized') {
|
||
throw new Error('Session expired. Please log in again.');
|
||
} else {
|
||
throw new Error('Could not get response from voice assistant');
|
||
}
|
||
};
|
||
|
||
// Text-to-Speech using expo-speech (works out of the box)
|
||
const speakResponse = async (text: string, autoListenAfter: boolean = false) => {
|
||
setIsSpeaking(true);
|
||
try {
|
||
const speechOptions: Speech.SpeechOptions = {
|
||
language: selectedVoice.language,
|
||
pitch: 1.0,
|
||
rate: 0.9,
|
||
onDone: () => {
|
||
setIsSpeaking(false);
|
||
if (autoListenAfter && isContinuousMode && currentBeneficiary?.id) {
|
||
setTimeout(() => {
|
||
startListeningInternal();
|
||
}, 500);
|
||
}
|
||
},
|
||
onError: () => setIsSpeaking(false),
|
||
};
|
||
// Add specific voice if available (iOS only)
|
||
if (selectedVoice.voice) {
|
||
speechOptions.voice = selectedVoice.voice;
|
||
}
|
||
await Speech.speak(text, speechOptions);
|
||
} catch (error) {
|
||
console.error('TTS error:', error);
|
||
setIsSpeaking(false);
|
||
}
|
||
};
|
||
|
||
// DEV: Test voice with sample text
|
||
const testVoice = (voice: VoiceOption) => {
|
||
Speech.stop();
|
||
const testText = getTestTextForLanguage(voice.language);
|
||
const speechOptions: Speech.SpeechOptions = {
|
||
language: voice.language,
|
||
pitch: 1.0,
|
||
rate: 0.9,
|
||
};
|
||
if (voice.voice) {
|
||
speechOptions.voice = voice.voice;
|
||
}
|
||
Speech.speak(testText, speechOptions);
|
||
};
|
||
|
||
// Get appropriate test text for each language
|
||
const getTestTextForLanguage = (language: string): string => {
|
||
const testTexts: Record<string, string> = {
|
||
'en-US': 'Hello! I am Julia, your voice assistant. How can I help you today?',
|
||
'en-GB': 'Hello! I am Julia, your voice assistant. How can I help you today?',
|
||
'en-AU': 'Hello! I am Julia, your voice assistant. How can I help you today?',
|
||
'en-IE': 'Hello! I am Julia, your voice assistant. How can I help you today?',
|
||
'en-ZA': 'Hello! I am Julia, your voice assistant. How can I help you today?',
|
||
'en-IN': 'Hello! I am Julia, your voice assistant. How can I help you today?',
|
||
'fr-FR': 'Bonjour! Je suis Julia, votre assistante vocale. Comment puis-je vous aider?',
|
||
'de-DE': 'Hallo! Ich bin Julia, Ihre Sprachassistentin. Wie kann ich Ihnen helfen?',
|
||
'es-ES': 'Hola! Soy Julia, tu asistente de voz. ¿Cómo puedo ayudarte?',
|
||
'es-MX': 'Hola! Soy Julia, tu asistente de voz. ¿Cómo puedo ayudarte?',
|
||
'it-IT': 'Ciao! Sono Julia, la tua assistente vocale. Come posso aiutarti?',
|
||
'pt-BR': 'Olá! Sou Julia, sua assistente de voz. Como posso ajudá-lo?',
|
||
'pt-PT': 'Olá! Sou a Julia, a sua assistente de voz. Como posso ajudá-lo?',
|
||
'ru-RU': 'Привет! Я Юлия, ваш голосовой помощник. Чем могу помочь?',
|
||
'uk-UA': 'Привіт! Я Юлія, ваш голосовий помічник. Чим можу допомогти?',
|
||
'zh-CN': '你好!我是朱莉娅,您的语音助手。我能帮您什么?',
|
||
'zh-TW': '你好!我是茱莉亞,您的語音助手。我能幫您什麼?',
|
||
'zh-HK': '你好!我係Julia,你嘅語音助手。有咩可以幫到你?',
|
||
'ja-JP': 'こんにちは!私はジュリア、あなたの音声アシスタントです。何かお手伝いできますか?',
|
||
'ko-KR': '안녕하세요! 저는 줄리아, 당신의 음성 비서입니다. 어떻게 도와드릴까요?',
|
||
'ar-SA': 'مرحبا! أنا جوليا، مساعدتك الصوتية. كيف يمكنني مساعدتك؟',
|
||
'he-IL': 'שלום! אני ג׳וליה, העוזרת הקולית שלך. איך אוכל לעזור לך?',
|
||
'hi-IN': 'नमस्ते! मैं जूलिया हूं, आपकी वॉयस असिस्टेंट। मैं आपकी कैसे मदद कर सकती हूं?',
|
||
};
|
||
return testTexts[language] || testTexts['en-US'];
|
||
};
|
||
|
||
const handleSend = useCallback(async () => {
|
||
const trimmedInput = input.trim();
|
||
if (!trimmedInput || isSending) return;
|
||
|
||
// Require beneficiary selection
|
||
if (!currentBeneficiary?.id) {
|
||
Alert.alert(
|
||
'Select Beneficiary',
|
||
'Please select a beneficiary first to ask questions about their wellbeing.',
|
||
[{ text: 'Select', onPress: () => setShowBeneficiaryPicker(true) }, { text: 'Cancel' }]
|
||
);
|
||
return;
|
||
}
|
||
|
||
// Debounce
|
||
const now = Date.now();
|
||
if (now - lastSendTimeRef.current < SEND_COOLDOWN_MS) return;
|
||
lastSendTimeRef.current = now;
|
||
|
||
const userMessage: Message = {
|
||
id: Date.now().toString(),
|
||
role: 'user',
|
||
content: trimmedInput,
|
||
timestamp: new Date(),
|
||
};
|
||
|
||
setMessages(prev => [...prev, userMessage]);
|
||
setInput('');
|
||
setIsSending(true);
|
||
|
||
try {
|
||
const aiResponse = await sendToVoiceAsk(trimmedInput);
|
||
|
||
const assistantMessage: Message = {
|
||
id: (Date.now() + 1).toString(),
|
||
role: 'assistant',
|
||
content: aiResponse,
|
||
timestamp: new Date(),
|
||
};
|
||
setMessages(prev => [...prev, assistantMessage]);
|
||
|
||
// Speak the response
|
||
await speakResponse(aiResponse);
|
||
} catch (error) {
|
||
const errorMessage: Message = {
|
||
id: (Date.now() + 1).toString(),
|
||
role: 'assistant',
|
||
content: `Sorry, I encountered an error: ${error instanceof Error ? error.message : 'Unknown error'}. Please try again.`,
|
||
timestamp: new Date(),
|
||
};
|
||
setMessages(prev => [...prev, errorMessage]);
|
||
} finally {
|
||
setIsSending(false);
|
||
}
|
||
}, [input, isSending, currentBeneficiary]);
|
||
|
||
const selectBeneficiary = (beneficiary: Beneficiary) => {
|
||
setCurrentBeneficiary(beneficiary);
|
||
setShowBeneficiaryPicker(false);
|
||
|
||
const welcomeMessage: Message = {
|
||
id: Date.now().toString(),
|
||
role: 'assistant',
|
||
content: `Great! I'm now ready to answer questions about ${beneficiary.name}. ${beneficiary.wellness_descriptor ? `Current status: ${beneficiary.wellness_descriptor}.` : ''} Type your question below!`,
|
||
timestamp: new Date(),
|
||
};
|
||
setMessages(prev => [...prev, welcomeMessage]);
|
||
|
||
// Speak the welcome message
|
||
speakResponse(`Ready to answer questions about ${beneficiary.name}`);
|
||
};
|
||
|
||
const stopSpeaking = async () => {
|
||
Speech.stop();
|
||
setIsSpeaking(false);
|
||
setIsContinuousMode(false); // Also stop continuous mode when user stops speaking
|
||
};
|
||
|
||
// Internal function to start listening (no permission check, used for continuous mode)
|
||
const startListeningInternal = () => {
|
||
if (isSending || isSpeaking) return;
|
||
if (!currentBeneficiary?.id) return;
|
||
|
||
// Stop any ongoing speech
|
||
Speech.stop();
|
||
setIsSpeaking(false);
|
||
|
||
// Start recognition
|
||
ExpoSpeechRecognitionModule.start({
|
||
lang: 'en-US',
|
||
interimResults: true,
|
||
maxAlternatives: 1,
|
||
continuous: false,
|
||
});
|
||
};
|
||
|
||
// Start voice recognition (user-initiated)
|
||
const startListening = async () => {
|
||
if (isSending || isSpeaking) return;
|
||
|
||
// Require beneficiary selection
|
||
if (!currentBeneficiary?.id) {
|
||
Alert.alert(
|
||
'Select Beneficiary',
|
||
'Please select a beneficiary first to ask questions about their wellbeing.',
|
||
[{ text: 'Select', onPress: () => setShowBeneficiaryPicker(true) }, { text: 'Cancel' }]
|
||
);
|
||
return;
|
||
}
|
||
|
||
// Request permissions
|
||
const result = await ExpoSpeechRecognitionModule.requestPermissionsAsync();
|
||
if (!result.granted) {
|
||
Alert.alert(
|
||
'Microphone Permission Required',
|
||
'Please grant microphone permission to use voice input.',
|
||
[{ text: 'OK' }]
|
||
);
|
||
return;
|
||
}
|
||
|
||
// Enable continuous mode when user starts listening
|
||
setIsContinuousMode(true);
|
||
|
||
// Stop any ongoing speech
|
||
Speech.stop();
|
||
setIsSpeaking(false);
|
||
|
||
// Start recognition
|
||
ExpoSpeechRecognitionModule.start({
|
||
lang: 'en-US',
|
||
interimResults: true,
|
||
maxAlternatives: 1,
|
||
continuous: false,
|
||
});
|
||
};
|
||
|
||
// Stop voice recognition and disable continuous mode
|
||
const stopListening = () => {
|
||
ExpoSpeechRecognitionModule.stop();
|
||
setIsListening(false);
|
||
setIsContinuousMode(false); // User manually stopped, disable continuous mode
|
||
};
|
||
|
||
// Handle send with specific text (used by speech recognition)
|
||
const handleSendWithText = async (text: string) => {
|
||
const trimmedInput = text.trim();
|
||
if (!trimmedInput || isSending) return;
|
||
|
||
if (!currentBeneficiary?.id) return;
|
||
|
||
// Debounce
|
||
const now = Date.now();
|
||
if (now - lastSendTimeRef.current < SEND_COOLDOWN_MS) return;
|
||
lastSendTimeRef.current = now;
|
||
|
||
const userMessage: Message = {
|
||
id: Date.now().toString(),
|
||
role: 'user',
|
||
content: trimmedInput,
|
||
timestamp: new Date(),
|
||
};
|
||
|
||
setMessages(prev => [...prev, userMessage]);
|
||
setInput('');
|
||
setRecognizedText('');
|
||
setIsSending(true);
|
||
|
||
try {
|
||
const aiResponse = await sendToVoiceAsk(trimmedInput);
|
||
|
||
const assistantMessage: Message = {
|
||
id: (Date.now() + 1).toString(),
|
||
role: 'assistant',
|
||
content: aiResponse,
|
||
timestamp: new Date(),
|
||
};
|
||
setMessages(prev => [...prev, assistantMessage]);
|
||
|
||
// Speak the response - in continuous mode, auto-listen after speaking
|
||
await speakResponse(aiResponse, true);
|
||
} catch (error) {
|
||
const errorMessage: Message = {
|
||
id: (Date.now() + 1).toString(),
|
||
role: 'assistant',
|
||
content: `Sorry, I encountered an error: ${error instanceof Error ? error.message : 'Unknown error'}. Please try again.`,
|
||
timestamp: new Date(),
|
||
};
|
||
setMessages(prev => [...prev, errorMessage]);
|
||
// Even on error, continue listening in continuous mode
|
||
if (isContinuousMode && currentBeneficiary?.id) {
|
||
setTimeout(() => startListeningInternal(), 500);
|
||
}
|
||
} finally {
|
||
setIsSending(false);
|
||
}
|
||
};
|
||
|
||
const renderMessage = ({ item }: { item: Message }) => {
|
||
const isUser = item.role === 'user';
|
||
|
||
return (
|
||
<View style={[styles.messageContainer, isUser ? styles.userMessageContainer : styles.assistantMessageContainer]}>
|
||
{!isUser && (
|
||
<View style={styles.avatarContainer}>
|
||
<Feather name="mic" size={16} color={AppColors.white} />
|
||
</View>
|
||
)}
|
||
<View style={[styles.messageBubble, isUser ? styles.userBubble : styles.assistantBubble]}>
|
||
<Text style={[styles.messageText, isUser ? styles.userMessageText : styles.assistantMessageText]}>
|
||
{item.content}
|
||
</Text>
|
||
<Text style={[styles.timestamp, isUser && styles.userTimestamp]}>
|
||
{item.timestamp.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })}
|
||
</Text>
|
||
</View>
|
||
</View>
|
||
);
|
||
};
|
||
|
||
return (
|
||
<SafeAreaView style={styles.container} edges={['top']}>
|
||
{/* Header */}
|
||
<View style={styles.header}>
|
||
<View style={styles.headerInfo}>
|
||
<View style={styles.headerAvatar}>
|
||
<Feather name="mic" size={20} color={AppColors.white} />
|
||
</View>
|
||
<View>
|
||
<Text style={styles.headerTitle}>Julia AI</Text>
|
||
<Text style={styles.headerSubtitle}>
|
||
{isSending
|
||
? 'Thinking...'
|
||
: isListening
|
||
? 'Listening...'
|
||
: isSpeaking
|
||
? 'Speaking...'
|
||
: currentBeneficiary
|
||
? `Monitoring ${currentBeneficiary.name}`
|
||
: 'Select a beneficiary'}
|
||
</Text>
|
||
</View>
|
||
</View>
|
||
<View style={styles.headerButtons}>
|
||
{/* DEV ONLY: Voice Settings */}
|
||
{DEV_MODE && (
|
||
<TouchableOpacity
|
||
style={styles.voiceSettingsButton}
|
||
onPress={() => setShowVoicePicker(true)}
|
||
>
|
||
<Feather name="sliders" size={18} color="#9B59B6" />
|
||
</TouchableOpacity>
|
||
)}
|
||
<TouchableOpacity style={styles.beneficiaryButton} onPress={() => setShowBeneficiaryPicker(true)}>
|
||
<Feather name="users" size={20} color={AppColors.primary} />
|
||
<Text style={styles.beneficiaryButtonText}>
|
||
{currentBeneficiary?.name?.split(' ')[0] || 'Select'}
|
||
</Text>
|
||
</TouchableOpacity>
|
||
</View>
|
||
</View>
|
||
|
||
{/* Messages */}
|
||
<KeyboardAvoidingView
|
||
style={styles.chatContainer}
|
||
behavior={Platform.OS === 'ios' ? 'padding' : undefined}
|
||
keyboardVerticalOffset={Platform.OS === 'ios' ? 90 : 0}
|
||
>
|
||
<FlatList
|
||
ref={flatListRef}
|
||
data={messages}
|
||
keyExtractor={item => item.id}
|
||
renderItem={renderMessage}
|
||
contentContainerStyle={styles.messagesList}
|
||
showsVerticalScrollIndicator={false}
|
||
onContentSizeChange={() => flatListRef.current?.scrollToEnd({ animated: true })}
|
||
/>
|
||
|
||
{/* Listening indicator */}
|
||
{isListening && (
|
||
<TouchableOpacity style={styles.listeningIndicator} onPress={stopListening}>
|
||
<Animated.View style={{ transform: [{ scale: pulseAnim }] }}>
|
||
<Feather name="mic" size={20} color="#E74C3C" />
|
||
</Animated.View>
|
||
<Text style={styles.listeningText}>
|
||
{recognizedText || 'Listening... tap to stop'}
|
||
</Text>
|
||
</TouchableOpacity>
|
||
)}
|
||
|
||
{/* Speaking indicator */}
|
||
{isSpeaking && !isListening && (
|
||
<TouchableOpacity style={styles.speakingIndicator} onPress={stopSpeaking}>
|
||
<Feather name="volume-2" size={20} color="#9B59B6" />
|
||
<Text style={styles.speakingText}>
|
||
{isContinuousMode ? 'Live mode - Speaking... tap to stop' : 'Speaking... tap to stop'}
|
||
</Text>
|
||
</TouchableOpacity>
|
||
)}
|
||
|
||
{/* Continuous mode indicator when idle */}
|
||
{isContinuousMode && !isListening && !isSpeaking && !isSending && (
|
||
<TouchableOpacity style={styles.continuousModeIndicator} onPress={() => setIsContinuousMode(false)}>
|
||
<Feather name="radio" size={20} color="#27AE60" />
|
||
<Text style={styles.continuousModeText}>Live chat active - tap to stop</Text>
|
||
</TouchableOpacity>
|
||
)}
|
||
|
||
{/* Input */}
|
||
<View style={styles.inputContainer}>
|
||
<TouchableOpacity
|
||
style={[
|
||
styles.micButton,
|
||
isListening && styles.micButtonActive,
|
||
isContinuousMode && !isListening && styles.micButtonContinuous
|
||
]}
|
||
onPress={isListening ? stopListening : startListening}
|
||
disabled={isSending}
|
||
>
|
||
<Feather
|
||
name={isListening ? "mic" : "mic"}
|
||
size={24}
|
||
color={isListening ? AppColors.white : (isContinuousMode ? '#27AE60' : AppColors.primary)}
|
||
/>
|
||
</TouchableOpacity>
|
||
|
||
<TextInput
|
||
style={styles.input}
|
||
placeholder="Type your question..."
|
||
placeholderTextColor={AppColors.textMuted}
|
||
value={input}
|
||
onChangeText={setInput}
|
||
multiline
|
||
maxLength={1000}
|
||
editable={!isSending}
|
||
onSubmitEditing={handleSend}
|
||
/>
|
||
|
||
<TouchableOpacity
|
||
style={[styles.sendButton, (!input.trim() || isSending) && styles.sendButtonDisabled]}
|
||
onPress={handleSend}
|
||
disabled={!input.trim() || isSending}
|
||
>
|
||
{isSending ? (
|
||
<ActivityIndicator size="small" color={AppColors.white} />
|
||
) : (
|
||
<Ionicons name="send" size={20} color={input.trim() ? AppColors.white : AppColors.textMuted} />
|
||
)}
|
||
</TouchableOpacity>
|
||
</View>
|
||
</KeyboardAvoidingView>
|
||
|
||
{/* Beneficiary Picker Modal */}
|
||
<Modal visible={showBeneficiaryPicker} animationType="slide" transparent>
|
||
<View style={styles.modalOverlay}>
|
||
<View style={styles.modalContent}>
|
||
<View style={styles.modalHeader}>
|
||
<Text style={styles.modalTitle}>Select Beneficiary</Text>
|
||
<TouchableOpacity onPress={() => setShowBeneficiaryPicker(false)}>
|
||
<Ionicons name="close" size={24} color={AppColors.textPrimary} />
|
||
</TouchableOpacity>
|
||
</View>
|
||
<ScrollView style={styles.beneficiaryList}>
|
||
{beneficiaries.length === 0 ? (
|
||
<View style={styles.emptyState}>
|
||
<ActivityIndicator size="large" color={AppColors.primary} />
|
||
<Text style={styles.emptyStateText}>Loading beneficiaries...</Text>
|
||
</View>
|
||
) : (
|
||
beneficiaries.map(beneficiary => (
|
||
<TouchableOpacity
|
||
key={beneficiary.id}
|
||
style={[
|
||
styles.beneficiaryItem,
|
||
currentBeneficiary?.id === beneficiary.id && styles.beneficiaryItemSelected,
|
||
]}
|
||
onPress={() => selectBeneficiary(beneficiary)}
|
||
>
|
||
<View style={styles.beneficiaryInfo}>
|
||
<Text style={styles.beneficiaryName}>{beneficiary.name}</Text>
|
||
<Text style={styles.beneficiaryStatus}>
|
||
{beneficiary.wellness_descriptor || beneficiary.last_location || 'No data'}
|
||
</Text>
|
||
</View>
|
||
<View style={[styles.statusDot, { backgroundColor: beneficiary.status === 'online' ? AppColors.success : AppColors.offline }]} />
|
||
</TouchableOpacity>
|
||
))
|
||
)}
|
||
</ScrollView>
|
||
</View>
|
||
</View>
|
||
</Modal>
|
||
|
||
{/* DEV ONLY: Voice Picker Modal */}
|
||
{DEV_MODE && (
|
||
<Modal visible={showVoicePicker} animationType="slide" transparent>
|
||
<View style={styles.modalOverlay}>
|
||
<View style={[styles.modalContent, { maxHeight: '80%' }]}>
|
||
<View style={styles.modalHeader}>
|
||
<View>
|
||
<Text style={styles.modalTitle}>Voice Settings</Text>
|
||
<Text style={styles.devBadge}>DEV ONLY</Text>
|
||
</View>
|
||
<TouchableOpacity onPress={() => setShowVoicePicker(false)}>
|
||
<Ionicons name="close" size={24} color={AppColors.textPrimary} />
|
||
</TouchableOpacity>
|
||
</View>
|
||
|
||
{/* Current Voice Info */}
|
||
<View style={styles.currentVoiceInfo}>
|
||
<Text style={styles.currentVoiceLabel}>Current: {selectedVoice.name}</Text>
|
||
<TouchableOpacity
|
||
style={styles.testVoiceButton}
|
||
onPress={() => testVoice(selectedVoice)}
|
||
>
|
||
<Feather name="play" size={16} color={AppColors.white} />
|
||
<Text style={styles.testVoiceButtonText}>Test</Text>
|
||
</TouchableOpacity>
|
||
</View>
|
||
|
||
<ScrollView
|
||
style={styles.voiceList}
|
||
nestedScrollEnabled={true}
|
||
showsVerticalScrollIndicator={true}
|
||
contentContainerStyle={{ paddingBottom: 20 }}
|
||
>
|
||
{/* English Voices Section */}
|
||
<Text style={styles.voiceSectionTitle}>English Voices</Text>
|
||
{AVAILABLE_VOICES.filter(v => v.language.startsWith('en-')).map(voice => (
|
||
<TouchableOpacity
|
||
key={voice.id}
|
||
style={[
|
||
styles.voiceItem,
|
||
selectedVoice.id === voice.id && styles.voiceItemSelected,
|
||
]}
|
||
onPress={() => setSelectedVoice(voice)}
|
||
>
|
||
<View style={styles.voiceItemInfo}>
|
||
<Text style={styles.voiceItemName}>{voice.name}</Text>
|
||
<Text style={styles.voiceItemLang}>{voice.language}</Text>
|
||
</View>
|
||
<View style={styles.voiceItemActions}>
|
||
<TouchableOpacity
|
||
style={styles.playButton}
|
||
onPress={() => testVoice(voice)}
|
||
>
|
||
<Feather name="play-circle" size={24} color="#9B59B6" />
|
||
</TouchableOpacity>
|
||
{selectedVoice.id === voice.id && (
|
||
<Feather name="check-circle" size={24} color={AppColors.success} />
|
||
)}
|
||
</View>
|
||
</TouchableOpacity>
|
||
))}
|
||
|
||
{/* European Languages Section */}
|
||
<Text style={styles.voiceSectionTitle}>European Languages</Text>
|
||
{AVAILABLE_VOICES.filter(v =>
|
||
['fr-FR', 'de-DE', 'es-ES', 'es-MX', 'it-IT', 'pt-BR', 'pt-PT', 'nl-NL', 'pl-PL', 'ru-RU', 'uk-UA', 'cs-CZ', 'da-DK', 'fi-FI', 'el-GR', 'hu-HU', 'no-NO', 'ro-RO', 'sk-SK', 'sv-SE', 'tr-TR'].includes(v.language)
|
||
).map(voice => (
|
||
<TouchableOpacity
|
||
key={voice.id}
|
||
style={[
|
||
styles.voiceItem,
|
||
selectedVoice.id === voice.id && styles.voiceItemSelected,
|
||
]}
|
||
onPress={() => setSelectedVoice(voice)}
|
||
>
|
||
<View style={styles.voiceItemInfo}>
|
||
<Text style={styles.voiceItemName}>{voice.name}</Text>
|
||
<Text style={styles.voiceItemLang}>{voice.language}</Text>
|
||
</View>
|
||
<View style={styles.voiceItemActions}>
|
||
<TouchableOpacity
|
||
style={styles.playButton}
|
||
onPress={() => testVoice(voice)}
|
||
>
|
||
<Feather name="play-circle" size={24} color="#9B59B6" />
|
||
</TouchableOpacity>
|
||
{selectedVoice.id === voice.id && (
|
||
<Feather name="check-circle" size={24} color={AppColors.success} />
|
||
)}
|
||
</View>
|
||
</TouchableOpacity>
|
||
))}
|
||
|
||
{/* Asian Languages Section */}
|
||
<Text style={styles.voiceSectionTitle}>Asian Languages</Text>
|
||
{AVAILABLE_VOICES.filter(v =>
|
||
['zh-CN', 'zh-TW', 'zh-HK', 'ja-JP', 'ko-KR', 'hi-IN', 'th-TH', 'vi-VN', 'id-ID'].includes(v.language)
|
||
).map(voice => (
|
||
<TouchableOpacity
|
||
key={voice.id}
|
||
style={[
|
||
styles.voiceItem,
|
||
selectedVoice.id === voice.id && styles.voiceItemSelected,
|
||
]}
|
||
onPress={() => setSelectedVoice(voice)}
|
||
>
|
||
<View style={styles.voiceItemInfo}>
|
||
<Text style={styles.voiceItemName}>{voice.name}</Text>
|
||
<Text style={styles.voiceItemLang}>{voice.language}</Text>
|
||
</View>
|
||
<View style={styles.voiceItemActions}>
|
||
<TouchableOpacity
|
||
style={styles.playButton}
|
||
onPress={() => testVoice(voice)}
|
||
>
|
||
<Feather name="play-circle" size={24} color="#9B59B6" />
|
||
</TouchableOpacity>
|
||
{selectedVoice.id === voice.id && (
|
||
<Feather name="check-circle" size={24} color={AppColors.success} />
|
||
)}
|
||
</View>
|
||
</TouchableOpacity>
|
||
))}
|
||
|
||
{/* Middle Eastern Languages Section */}
|
||
<Text style={styles.voiceSectionTitle}>Middle Eastern</Text>
|
||
{AVAILABLE_VOICES.filter(v =>
|
||
['ar-SA', 'he-IL'].includes(v.language)
|
||
).map(voice => (
|
||
<TouchableOpacity
|
||
key={voice.id}
|
||
style={[
|
||
styles.voiceItem,
|
||
selectedVoice.id === voice.id && styles.voiceItemSelected,
|
||
]}
|
||
onPress={() => setSelectedVoice(voice)}
|
||
>
|
||
<View style={styles.voiceItemInfo}>
|
||
<Text style={styles.voiceItemName}>{voice.name}</Text>
|
||
<Text style={styles.voiceItemLang}>{voice.language}</Text>
|
||
</View>
|
||
<View style={styles.voiceItemActions}>
|
||
<TouchableOpacity
|
||
style={styles.playButton}
|
||
onPress={() => testVoice(voice)}
|
||
>
|
||
<Feather name="play-circle" size={24} color="#9B59B6" />
|
||
</TouchableOpacity>
|
||
{selectedVoice.id === voice.id && (
|
||
<Feather name="check-circle" size={24} color={AppColors.success} />
|
||
)}
|
||
</View>
|
||
</TouchableOpacity>
|
||
))}
|
||
</ScrollView>
|
||
|
||
{/* Apply Button */}
|
||
<TouchableOpacity
|
||
style={styles.applyButton}
|
||
onPress={() => {
|
||
setShowVoicePicker(false);
|
||
Speech.speak(`Voice changed to ${selectedVoice.name}`, {
|
||
language: selectedVoice.language,
|
||
voice: selectedVoice.voice,
|
||
});
|
||
}}
|
||
>
|
||
<Text style={styles.applyButtonText}>Apply & Close</Text>
|
||
</TouchableOpacity>
|
||
</View>
|
||
</View>
|
||
</Modal>
|
||
)}
|
||
</SafeAreaView>
|
||
);
|
||
}
|
||
|
||
const styles = StyleSheet.create({
|
||
container: {
|
||
flex: 1,
|
||
backgroundColor: AppColors.surface,
|
||
},
|
||
header: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'space-between',
|
||
paddingHorizontal: Spacing.md,
|
||
paddingVertical: Spacing.sm,
|
||
backgroundColor: AppColors.background,
|
||
borderBottomWidth: 1,
|
||
borderBottomColor: AppColors.border,
|
||
},
|
||
headerInfo: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
},
|
||
headerAvatar: {
|
||
width: 40,
|
||
height: 40,
|
||
borderRadius: BorderRadius.full,
|
||
backgroundColor: '#9B59B6',
|
||
justifyContent: 'center',
|
||
alignItems: 'center',
|
||
marginRight: Spacing.sm,
|
||
},
|
||
headerTitle: {
|
||
fontSize: FontSizes.lg,
|
||
fontWeight: '600',
|
||
color: AppColors.textPrimary,
|
||
},
|
||
headerSubtitle: {
|
||
fontSize: FontSizes.sm,
|
||
color: AppColors.success,
|
||
},
|
||
beneficiaryButton: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
paddingHorizontal: Spacing.sm,
|
||
paddingVertical: Spacing.xs,
|
||
backgroundColor: AppColors.surface,
|
||
borderRadius: BorderRadius.lg,
|
||
borderWidth: 1,
|
||
borderColor: AppColors.border,
|
||
},
|
||
beneficiaryButtonText: {
|
||
marginLeft: Spacing.xs,
|
||
fontSize: FontSizes.sm,
|
||
color: AppColors.primary,
|
||
fontWeight: '500',
|
||
},
|
||
chatContainer: {
|
||
flex: 1,
|
||
},
|
||
messagesList: {
|
||
padding: Spacing.md,
|
||
paddingBottom: Spacing.lg,
|
||
},
|
||
messageContainer: {
|
||
flexDirection: 'row',
|
||
marginBottom: Spacing.md,
|
||
alignItems: 'flex-end',
|
||
},
|
||
userMessageContainer: {
|
||
justifyContent: 'flex-end',
|
||
},
|
||
assistantMessageContainer: {
|
||
justifyContent: 'flex-start',
|
||
},
|
||
avatarContainer: {
|
||
width: 32,
|
||
height: 32,
|
||
borderRadius: BorderRadius.full,
|
||
backgroundColor: '#9B59B6',
|
||
justifyContent: 'center',
|
||
alignItems: 'center',
|
||
marginRight: Spacing.xs,
|
||
},
|
||
messageBubble: {
|
||
maxWidth: '75%',
|
||
padding: Spacing.sm + 4,
|
||
borderRadius: BorderRadius.lg,
|
||
},
|
||
userBubble: {
|
||
backgroundColor: AppColors.primary,
|
||
borderBottomRightRadius: BorderRadius.sm,
|
||
},
|
||
assistantBubble: {
|
||
backgroundColor: AppColors.background,
|
||
borderBottomLeftRadius: BorderRadius.sm,
|
||
},
|
||
messageText: {
|
||
fontSize: FontSizes.base,
|
||
lineHeight: 22,
|
||
},
|
||
userMessageText: {
|
||
color: AppColors.white,
|
||
},
|
||
assistantMessageText: {
|
||
color: AppColors.textPrimary,
|
||
},
|
||
timestamp: {
|
||
fontSize: FontSizes.xs,
|
||
color: AppColors.textMuted,
|
||
marginTop: Spacing.xs,
|
||
alignSelf: 'flex-end',
|
||
},
|
||
userTimestamp: {
|
||
color: 'rgba(255,255,255,0.7)',
|
||
},
|
||
listeningIndicator: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'center',
|
||
paddingVertical: Spacing.sm,
|
||
paddingHorizontal: Spacing.md,
|
||
backgroundColor: 'rgba(231, 76, 60, 0.1)',
|
||
marginHorizontal: Spacing.md,
|
||
borderRadius: BorderRadius.lg,
|
||
marginBottom: Spacing.sm,
|
||
},
|
||
listeningText: {
|
||
fontSize: FontSizes.sm,
|
||
color: '#E74C3C',
|
||
fontWeight: '500',
|
||
marginLeft: Spacing.sm,
|
||
flex: 1,
|
||
},
|
||
speakingIndicator: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'center',
|
||
paddingVertical: Spacing.sm,
|
||
paddingHorizontal: Spacing.md,
|
||
backgroundColor: 'rgba(155, 89, 182, 0.1)',
|
||
marginHorizontal: Spacing.md,
|
||
borderRadius: BorderRadius.lg,
|
||
marginBottom: Spacing.sm,
|
||
},
|
||
speakingText: {
|
||
fontSize: FontSizes.sm,
|
||
color: '#9B59B6',
|
||
fontWeight: '500',
|
||
marginLeft: Spacing.sm,
|
||
},
|
||
continuousModeIndicator: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'center',
|
||
paddingVertical: Spacing.sm,
|
||
paddingHorizontal: Spacing.md,
|
||
backgroundColor: 'rgba(39, 174, 96, 0.1)',
|
||
marginHorizontal: Spacing.md,
|
||
borderRadius: BorderRadius.lg,
|
||
marginBottom: Spacing.sm,
|
||
},
|
||
continuousModeText: {
|
||
fontSize: FontSizes.sm,
|
||
color: '#27AE60',
|
||
fontWeight: '500',
|
||
marginLeft: Spacing.sm,
|
||
},
|
||
inputContainer: {
|
||
flexDirection: 'row',
|
||
alignItems: 'flex-end',
|
||
padding: Spacing.md,
|
||
backgroundColor: AppColors.background,
|
||
borderTopWidth: 1,
|
||
borderTopColor: AppColors.border,
|
||
},
|
||
micButton: {
|
||
width: 50,
|
||
height: 50,
|
||
borderRadius: BorderRadius.full,
|
||
backgroundColor: AppColors.surface,
|
||
justifyContent: 'center',
|
||
alignItems: 'center',
|
||
marginRight: Spacing.sm,
|
||
borderWidth: 2,
|
||
borderColor: AppColors.primary,
|
||
},
|
||
micButtonActive: {
|
||
backgroundColor: '#E74C3C',
|
||
borderColor: '#E74C3C',
|
||
},
|
||
micButtonContinuous: {
|
||
borderColor: '#27AE60',
|
||
backgroundColor: 'rgba(39, 174, 96, 0.1)',
|
||
},
|
||
input: {
|
||
flex: 1,
|
||
backgroundColor: AppColors.surface,
|
||
borderRadius: BorderRadius.xl,
|
||
paddingHorizontal: Spacing.md,
|
||
paddingVertical: Spacing.sm,
|
||
fontSize: FontSizes.base,
|
||
color: AppColors.textPrimary,
|
||
maxHeight: 100,
|
||
marginRight: Spacing.sm,
|
||
},
|
||
sendButton: {
|
||
width: 44,
|
||
height: 44,
|
||
borderRadius: BorderRadius.full,
|
||
backgroundColor: '#9B59B6',
|
||
justifyContent: 'center',
|
||
alignItems: 'center',
|
||
},
|
||
sendButtonDisabled: {
|
||
backgroundColor: AppColors.surface,
|
||
},
|
||
// Modal styles
|
||
modalOverlay: {
|
||
flex: 1,
|
||
backgroundColor: 'rgba(0,0,0,0.5)',
|
||
justifyContent: 'flex-end',
|
||
},
|
||
modalContent: {
|
||
backgroundColor: AppColors.background,
|
||
borderTopLeftRadius: BorderRadius.xl,
|
||
borderTopRightRadius: BorderRadius.xl,
|
||
maxHeight: '70%',
|
||
},
|
||
modalHeader: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'space-between',
|
||
padding: Spacing.md,
|
||
borderBottomWidth: 1,
|
||
borderBottomColor: AppColors.border,
|
||
},
|
||
modalTitle: {
|
||
fontSize: FontSizes.lg,
|
||
fontWeight: '600',
|
||
color: AppColors.textPrimary,
|
||
},
|
||
beneficiaryList: {
|
||
padding: Spacing.md,
|
||
},
|
||
beneficiaryItem: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'space-between',
|
||
padding: Spacing.md,
|
||
backgroundColor: AppColors.surface,
|
||
borderRadius: BorderRadius.lg,
|
||
marginBottom: Spacing.sm,
|
||
},
|
||
beneficiaryItemSelected: {
|
||
backgroundColor: '#E8F0FE',
|
||
borderWidth: 2,
|
||
borderColor: AppColors.primary,
|
||
},
|
||
beneficiaryInfo: {
|
||
flex: 1,
|
||
},
|
||
beneficiaryName: {
|
||
fontSize: FontSizes.base,
|
||
fontWeight: '600',
|
||
color: AppColors.textPrimary,
|
||
},
|
||
beneficiaryStatus: {
|
||
fontSize: FontSizes.sm,
|
||
color: AppColors.textMuted,
|
||
marginTop: 2,
|
||
},
|
||
statusDot: {
|
||
width: 10,
|
||
height: 10,
|
||
borderRadius: 5,
|
||
marginLeft: Spacing.sm,
|
||
},
|
||
emptyState: {
|
||
alignItems: 'center',
|
||
padding: Spacing.xl,
|
||
},
|
||
emptyStateText: {
|
||
marginTop: Spacing.md,
|
||
fontSize: FontSizes.base,
|
||
color: AppColors.textMuted,
|
||
},
|
||
// DEV Voice Picker styles
|
||
headerButtons: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
gap: Spacing.sm,
|
||
},
|
||
voiceSettingsButton: {
|
||
width: 36,
|
||
height: 36,
|
||
borderRadius: BorderRadius.full,
|
||
backgroundColor: 'rgba(155, 89, 182, 0.1)',
|
||
justifyContent: 'center',
|
||
alignItems: 'center',
|
||
borderWidth: 1,
|
||
borderColor: 'rgba(155, 89, 182, 0.3)',
|
||
},
|
||
devBadge: {
|
||
fontSize: FontSizes.xs,
|
||
color: '#E74C3C',
|
||
fontWeight: '600',
|
||
marginTop: 2,
|
||
},
|
||
currentVoiceInfo: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'space-between',
|
||
padding: Spacing.md,
|
||
backgroundColor: 'rgba(155, 89, 182, 0.1)',
|
||
borderBottomWidth: 1,
|
||
borderBottomColor: AppColors.border,
|
||
},
|
||
currentVoiceLabel: {
|
||
fontSize: FontSizes.base,
|
||
color: AppColors.textPrimary,
|
||
fontWeight: '500',
|
||
flex: 1,
|
||
},
|
||
testVoiceButton: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
backgroundColor: '#9B59B6',
|
||
paddingHorizontal: Spacing.md,
|
||
paddingVertical: Spacing.xs,
|
||
borderRadius: BorderRadius.lg,
|
||
gap: Spacing.xs,
|
||
},
|
||
testVoiceButtonText: {
|
||
color: AppColors.white,
|
||
fontSize: FontSizes.sm,
|
||
fontWeight: '600',
|
||
},
|
||
voiceList: {
|
||
flex: 1,
|
||
maxHeight: 400,
|
||
padding: Spacing.sm,
|
||
},
|
||
voiceSectionTitle: {
|
||
fontSize: FontSizes.sm,
|
||
fontWeight: '700',
|
||
color: AppColors.textMuted,
|
||
textTransform: 'uppercase',
|
||
marginTop: Spacing.md,
|
||
marginBottom: Spacing.sm,
|
||
marginLeft: Spacing.xs,
|
||
},
|
||
voiceItem: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
justifyContent: 'space-between',
|
||
padding: Spacing.sm,
|
||
backgroundColor: AppColors.surface,
|
||
borderRadius: BorderRadius.md,
|
||
marginBottom: Spacing.xs,
|
||
},
|
||
voiceItemSelected: {
|
||
backgroundColor: 'rgba(155, 89, 182, 0.15)',
|
||
borderWidth: 1,
|
||
borderColor: '#9B59B6',
|
||
},
|
||
voiceItemInfo: {
|
||
flex: 1,
|
||
},
|
||
voiceItemName: {
|
||
fontSize: FontSizes.base,
|
||
color: AppColors.textPrimary,
|
||
fontWeight: '500',
|
||
},
|
||
voiceItemLang: {
|
||
fontSize: FontSizes.xs,
|
||
color: AppColors.textMuted,
|
||
marginTop: 2,
|
||
},
|
||
voiceItemActions: {
|
||
flexDirection: 'row',
|
||
alignItems: 'center',
|
||
gap: Spacing.sm,
|
||
},
|
||
playButton: {
|
||
padding: Spacing.xs,
|
||
},
|
||
applyButton: {
|
||
backgroundColor: '#9B59B6',
|
||
margin: Spacing.md,
|
||
padding: Spacing.md,
|
||
borderRadius: BorderRadius.lg,
|
||
alignItems: 'center',
|
||
},
|
||
applyButtonText: {
|
||
color: AppColors.white,
|
||
fontSize: FontSizes.base,
|
||
fontWeight: '600',
|
||
},
|
||
});
|