diff --git a/components/ErrorBoundary.tsx b/components/ErrorBoundary.tsx new file mode 100644 index 0000000..8c44f5a --- /dev/null +++ b/components/ErrorBoundary.tsx @@ -0,0 +1,182 @@ +import React, { Component, ReactNode } from 'react'; +import { View, Text, StyleSheet, TouchableOpacity } from 'react-native'; +import { debugLogger } from '@/services/DebugLogger'; +import { AppColors, Spacing, FontSizes, BorderRadius } from '@/constants/theme'; + +interface Props { + children: ReactNode; + fallback?: ReactNode; + onError?: (error: Error, errorInfo: React.ErrorInfo) => void; + category?: string; +} + +interface State { + hasError: boolean; + error: Error | null; + errorInfo: React.ErrorInfo | null; +} + +/** + * ErrorBoundary - catches JavaScript errors in child components + * Logs errors to DebugLogger and shows fallback UI + */ +export class ErrorBoundary extends Component { + constructor(props: Props) { + super(props); + this.state = { + hasError: false, + error: null, + errorInfo: null, + }; + } + + static getDerivedStateFromError(error: Error): State { + return { + hasError: true, + error, + errorInfo: null, + }; + } + + componentDidCatch(error: Error, errorInfo: React.ErrorInfo) { + const category = this.props.category || 'ErrorBoundary'; + + // Log to debug console + debugLogger.error( + category, + `💥 Component crashed: ${error.message}`, + { + error: error.toString(), + stack: error.stack, + componentStack: errorInfo.componentStack, + } + ); + + // Update state with error info + this.setState({ + errorInfo, + }); + + // Call optional error handler + this.props.onError?.(error, errorInfo); + } + + handleReset = () => { + this.setState({ + hasError: false, + error: null, + errorInfo: null, + }); + }; + + render() { + if (this.state.hasError) { + // Custom fallback UI + if (this.props.fallback) { + return this.props.fallback; + } + + // Default fallback UI + return ( + + + Something went wrong + + {this.state.error?.message || 'Unknown error'} + + + {this.state.error?.stack && ( + + Stack trace: + + {this.state.error.stack} + + + )} + + + Try Again + + + + Check Debug tab for full error details + + + + ); + } + + return this.props.children; + } +} + +const styles = StyleSheet.create({ + container: { + flex: 1, + justifyContent: 'center', + alignItems: 'center', + backgroundColor: AppColors.background, + padding: Spacing.lg, + }, + errorCard: { + backgroundColor: AppColors.surface, + borderRadius: BorderRadius.lg, + padding: Spacing.lg, + maxWidth: 400, + width: '100%', + borderLeftWidth: 4, + borderLeftColor: AppColors.error || '#E53935', + }, + errorTitle: { + fontSize: FontSizes.xl, + fontWeight: '700', + color: AppColors.error || '#E53935', + marginBottom: Spacing.sm, + }, + errorMessage: { + fontSize: FontSizes.base, + color: AppColors.textPrimary, + marginBottom: Spacing.md, + lineHeight: 22, + }, + stackContainer: { + backgroundColor: AppColors.background, + borderRadius: BorderRadius.md, + padding: Spacing.sm, + marginBottom: Spacing.md, + }, + stackLabel: { + fontSize: FontSizes.xs, + color: AppColors.textSecondary, + fontWeight: '600', + marginBottom: Spacing.xs, + }, + stackTrace: { + fontSize: FontSizes.xs, + color: AppColors.textMuted, + fontFamily: 'monospace', + lineHeight: 16, + }, + resetButton: { + backgroundColor: AppColors.primary, + borderRadius: BorderRadius.md, + paddingVertical: Spacing.sm, + paddingHorizontal: Spacing.md, + alignItems: 'center', + marginBottom: Spacing.sm, + }, + resetButtonText: { + color: AppColors.white, + fontSize: FontSizes.base, + fontWeight: '600', + }, + debugHint: { + fontSize: FontSizes.xs, + color: AppColors.textMuted, + textAlign: 'center', + marginTop: Spacing.xs, + }, +}); diff --git a/components/TTSErrorBoundary.tsx b/components/TTSErrorBoundary.tsx new file mode 100644 index 0000000..1853c7f --- /dev/null +++ b/components/TTSErrorBoundary.tsx @@ -0,0 +1,105 @@ +import React, { ReactNode } from 'react'; +import { View, Text, StyleSheet } from 'react-native'; +import { ErrorBoundary } from './ErrorBoundary'; +import { debugLogger } from '@/services/DebugLogger'; +import { AppColors, Spacing, FontSizes, BorderRadius } from '@/constants/theme'; + +interface TTSErrorBoundaryProps { + children: ReactNode; +} + +/** + * TTSErrorBoundary - specialized error boundary for TTS operations + * Catches crashes during voice synthesis and playback + */ +export function TTSErrorBoundary({ children }: TTSErrorBoundaryProps) { + const handleError = (error: Error, errorInfo: React.ErrorInfo) => { + // Additional TTS-specific error handling + debugLogger.error('TTS', '💥 TTS CRASH CAUGHT BY ERROR BOUNDARY', { + message: error.message, + name: error.name, + stack: error.stack, + componentStack: errorInfo.componentStack, + }); + + // Check for common TTS errors + if (error.message.includes('TTSManager')) { + debugLogger.error('TTS', 'Native module error - TTSManager not initialized or crashed'); + } + if (error.message.includes('generateAndPlay')) { + debugLogger.error('TTS', 'Speech generation/playback failed'); + } + if (error.message.includes('model') || error.message.includes('onnx')) { + debugLogger.error('TTS', 'TTS model loading error - check if models are bundled'); + } + }; + + const fallbackUI = ( + + + 🔊 + Voice Playback Error + + The voice synthesis system encountered an error and was reset. + + + Check the Debug tab for detailed error logs + + + + ); + + return ( + + {children} + + ); +} + +const styles = StyleSheet.create({ + container: { + flex: 1, + justifyContent: 'center', + alignItems: 'center', + backgroundColor: AppColors.background, + padding: Spacing.lg, + }, + errorCard: { + backgroundColor: AppColors.surface, + borderRadius: BorderRadius.lg, + padding: Spacing.xl, + maxWidth: 360, + width: '100%', + alignItems: 'center', + borderLeftWidth: 4, + borderLeftColor: AppColors.error || '#E53935', + }, + errorIcon: { + fontSize: 48, + marginBottom: Spacing.md, + }, + errorTitle: { + fontSize: FontSizes.xl, + fontWeight: '700', + color: AppColors.textPrimary, + marginBottom: Spacing.sm, + textAlign: 'center', + }, + errorMessage: { + fontSize: FontSizes.base, + color: AppColors.textSecondary, + marginBottom: Spacing.md, + textAlign: 'center', + lineHeight: 22, + }, + debugHint: { + fontSize: FontSizes.xs, + color: AppColors.textMuted, + textAlign: 'center', + marginTop: Spacing.xs, + }, +}); diff --git a/components/VoiceIndicator.tsx b/components/VoiceIndicator.tsx new file mode 100644 index 0000000..c0a0f15 --- /dev/null +++ b/components/VoiceIndicator.tsx @@ -0,0 +1,276 @@ +/** + * VoiceIndicator - Animated visual feedback for voice conversation + * Shows pulsing circles when listening or speaking + */ + +import React, { useEffect, useRef } from 'react'; +import { View, StyleSheet, Animated, Text, TouchableOpacity } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; + +interface VoiceIndicatorProps { + mode: 'listening' | 'speaking' | 'idle'; + onTap?: (currentMode: 'listening' | 'speaking') => void; +} + +export function VoiceIndicator({ mode, onTap }: VoiceIndicatorProps) { + // Animation values for 3 concentric circles + const ring1Scale = useRef(new Animated.Value(1)).current; + const ring2Scale = useRef(new Animated.Value(1)).current; + const ring3Scale = useRef(new Animated.Value(1)).current; + const ring1Opacity = useRef(new Animated.Value(0.8)).current; + const ring2Opacity = useRef(new Animated.Value(0.6)).current; + const ring3Opacity = useRef(new Animated.Value(0.4)).current; + + // Inner circle pulse + const innerPulse = useRef(new Animated.Value(1)).current; + + useEffect(() => { + if (mode === 'idle') { + // Reset all animations + ring1Scale.setValue(1); + ring2Scale.setValue(1); + ring3Scale.setValue(1); + ring1Opacity.setValue(0); + ring2Opacity.setValue(0); + ring3Opacity.setValue(0); + innerPulse.setValue(1); + return; + } + + // Create pulsing animation for rings + const createRingAnimation = ( + scale: Animated.Value, + opacity: Animated.Value, + delay: number + ) => { + return Animated.loop( + Animated.sequence([ + Animated.delay(delay), + Animated.parallel([ + Animated.timing(scale, { + toValue: 2.5, + duration: 1500, + useNativeDriver: true, + }), + Animated.timing(opacity, { + toValue: 0, + duration: 1500, + useNativeDriver: true, + }), + ]), + Animated.parallel([ + Animated.timing(scale, { + toValue: 1, + duration: 0, + useNativeDriver: true, + }), + Animated.timing(opacity, { + toValue: mode === 'listening' ? 0.8 : 0.6, + duration: 0, + useNativeDriver: true, + }), + ]), + ]) + ); + }; + + // Inner pulse animation + const innerPulseAnimation = Animated.loop( + Animated.sequence([ + Animated.timing(innerPulse, { + toValue: 1.15, + duration: 400, + useNativeDriver: true, + }), + Animated.timing(innerPulse, { + toValue: 1, + duration: 400, + useNativeDriver: true, + }), + ]) + ); + + // Reset opacity values + ring1Opacity.setValue(mode === 'listening' ? 0.8 : 0.6); + ring2Opacity.setValue(mode === 'listening' ? 0.6 : 0.4); + ring3Opacity.setValue(mode === 'listening' ? 0.4 : 0.3); + + // Start all animations + const anim1 = createRingAnimation(ring1Scale, ring1Opacity, 0); + const anim2 = createRingAnimation(ring2Scale, ring2Opacity, 500); + const anim3 = createRingAnimation(ring3Scale, ring3Opacity, 1000); + + anim1.start(); + anim2.start(); + anim3.start(); + innerPulseAnimation.start(); + + return () => { + anim1.stop(); + anim2.stop(); + anim3.stop(); + innerPulseAnimation.stop(); + }; + }, [mode]); + + if (mode === 'idle') { + return null; + } + + const isListening = mode === 'listening'; + const primaryColor = isListening ? AppColors.primary : '#4CAF50'; + const secondaryColor = isListening ? '#2196F3' : '#66BB6A'; + + // Handle tap anywhere on the indicator + const handlePress = () => { + if (mode !== 'idle') { + onTap?.(mode as 'listening' | 'speaking'); + } + }; + + return ( + + {/* Animated rings */} + + + + + + {/* Inner pulsing circle with icon */} + + + + + + {/* Status text */} + + {isListening ? 'Listening...' : 'Speaking...'} + + + {/* Tap hint - shows what will happen when tapped */} + + + + {isListening ? 'Tap to cancel' : 'Tap to interrupt & speak'} + + + + ); +} + +const styles = StyleSheet.create({ + container: { + alignItems: 'center', + justifyContent: 'center', + paddingVertical: Spacing.xl, + backgroundColor: 'rgba(255, 255, 255, 0.95)', + borderRadius: BorderRadius.lg, + marginHorizontal: Spacing.md, + marginBottom: Spacing.md, + shadowColor: '#000', + shadowOffset: { width: 0, height: 2 }, + shadowOpacity: 0.1, + shadowRadius: 8, + elevation: 4, + }, + ringsContainer: { + width: 120, + height: 120, + alignItems: 'center', + justifyContent: 'center', + }, + ring: { + position: 'absolute', + width: 60, + height: 60, + borderRadius: 30, + }, + innerCircle: { + width: 70, + height: 70, + borderRadius: 35, + alignItems: 'center', + justifyContent: 'center', + shadowColor: '#000', + shadowOffset: { width: 0, height: 4 }, + shadowOpacity: 0.3, + shadowRadius: 6, + elevation: 8, + }, + statusText: { + fontSize: FontSizes.lg, + fontWeight: '600', + marginTop: Spacing.md, + }, + hintContainer: { + flexDirection: 'row', + alignItems: 'center', + marginTop: Spacing.md, + paddingHorizontal: Spacing.md, + paddingVertical: Spacing.sm, + backgroundColor: 'rgba(244, 67, 54, 0.1)', + borderRadius: BorderRadius.full, + }, + hintContainerSpeak: { + backgroundColor: 'rgba(33, 150, 243, 0.1)', + }, + hintText: { + marginLeft: Spacing.xs, + fontSize: FontSizes.sm, + color: AppColors.error, + fontWeight: '500', + }, + hintTextSpeak: { + color: AppColors.primary, + }, +}); + +export default VoiceIndicator; diff --git a/hooks/useSpeechRecognition.ts b/hooks/useSpeechRecognition.ts new file mode 100644 index 0000000..f2f854f --- /dev/null +++ b/hooks/useSpeechRecognition.ts @@ -0,0 +1,142 @@ +import { useState, useEffect, useCallback, useRef } from 'react'; +import { Platform, Alert } from 'react-native'; +import { debugLogger } from '@/services/DebugLogger'; + +// Try to import native module +let ExpoSpeechRecognitionModule: any = null; +let SPEECH_RECOGNITION_AVAILABLE = false; +try { + const speechRecognition = require('expo-speech-recognition'); + ExpoSpeechRecognitionModule = speechRecognition.ExpoSpeechRecognitionModule; + if (ExpoSpeechRecognitionModule) { + SPEECH_RECOGNITION_AVAILABLE = true; + debugLogger.info('STT', 'Speech recognition module loaded successfully'); + } +} catch (e) { + debugLogger.warn('STT', 'Speech recognition not available', e); + console.log('[useSpeechRecognition] expo-speech-recognition not available'); +} + +export interface SpeechRecognitionResult { + transcript: string; + isFinal: boolean; +} + +export interface UseSpeechRecognitionReturn { + isListening: boolean; + recognizedText: string; + startListening: (options?: { continuous?: boolean }) => Promise; + stopListening: () => void; + isAvailable: boolean; + hasPermission: boolean; + requestPermission: () => Promise; +} + +export function useSpeechRecognition(): UseSpeechRecognitionReturn { + const [isListening, setIsListening] = useState(false); + const [recognizedText, setRecognizedText] = useState(''); + const [hasPermission, setHasPermission] = useState(false); + + // Callbacks + const onResultRef = useRef<((result: SpeechRecognitionResult) => void) | null>(null); + + useEffect(() => { + if (!SPEECH_RECOGNITION_AVAILABLE || !ExpoSpeechRecognitionModule) { + debugLogger.warn('STT', 'Cannot setup listeners - module not available'); + return; + } + + debugLogger.info('STT', 'Setting up speech recognition event listeners'); + const subscriptions: any[] = []; + + if (ExpoSpeechRecognitionModule.addListener) { + subscriptions.push( + ExpoSpeechRecognitionModule.addListener('start', () => { + debugLogger.info('STT', 'Speech recognition started'); + setIsListening(true); + }) + ); + subscriptions.push( + ExpoSpeechRecognitionModule.addListener('end', () => { + debugLogger.info('STT', 'Speech recognition ended'); + setIsListening(false); + }) + ); + subscriptions.push( + ExpoSpeechRecognitionModule.addListener('result', (event: any) => { + const transcript = event.results?.[0]?.transcript || ''; + const isFinal = event.results?.[0]?.isFinal || false; + debugLogger.log('STT', `Recognized: "${transcript}" (${isFinal ? 'FINAL' : 'interim'})`); + setRecognizedText(transcript); + }) + ); + subscriptions.push( + ExpoSpeechRecognitionModule.addListener('error', (event: any) => { + debugLogger.error('STT', 'Speech recognition error', event); + setIsListening(false); + console.warn('[Speech] Error:', event); + }) + ); + } + + return () => { + debugLogger.info('STT', 'Cleaning up speech recognition listeners'); + subscriptions.forEach(sub => sub.remove?.()); + }; + }, []); + + const requestPermission = async () => { + if (!SPEECH_RECOGNITION_AVAILABLE) { + debugLogger.warn('STT', 'Cannot request permission - module not available'); + return false; + } + debugLogger.info('STT', 'Requesting microphone permissions'); + const result = await ExpoSpeechRecognitionModule.requestPermissionsAsync(); + setHasPermission(result.granted); + debugLogger.log('STT', `Permission ${result.granted ? 'granted' : 'denied'}`); + return result.granted; + }; + + const startListening = async (options?: { continuous?: boolean }) => { + if (!SPEECH_RECOGNITION_AVAILABLE) { + debugLogger.error('STT', 'Cannot start - speech recognition not available'); + Alert.alert('Not Available', 'Voice recognition is not available on this device.'); + return; + } + + try { + // Reset text + setRecognizedText(''); + debugLogger.info('STT', `Starting speech recognition (continuous: ${options?.continuous ?? false})`); + + await ExpoSpeechRecognitionModule.start({ + lang: 'en-US', + interimResults: true, + maxAlternatives: 1, + continuous: options?.continuous ?? false, + }); + } catch (e) { + debugLogger.error('STT', 'Failed to start listening', e); + console.error('Failed to start listening', e); + setIsListening(false); + } + }; + + const stopListening = () => { + debugLogger.info('STT', 'Stopping speech recognition'); + if (SPEECH_RECOGNITION_AVAILABLE) { + ExpoSpeechRecognitionModule.stop(); + } + setIsListening(false); + }; + + return { + isListening, + recognizedText, + startListening, + stopListening, + isAvailable: SPEECH_RECOGNITION_AVAILABLE, + hasPermission, + requestPermission + }; +} diff --git a/services/DebugLogger.ts b/services/DebugLogger.ts new file mode 100644 index 0000000..f0c1d30 --- /dev/null +++ b/services/DebugLogger.ts @@ -0,0 +1,175 @@ +/** + * Centralized Debug Logger + * Captures console logs, errors, warnings for display in Debug tab + */ + +export interface LogEntry { + id: string; + timestamp: Date; + level: 'log' | 'warn' | 'error' | 'info'; + category: string; // e.g., "TTS", "STT", "Chat", "System" + message: string; + data?: any; +} + +class DebugLogger { + private logs: LogEntry[] = []; + private maxLogs = 500; // Keep last 500 logs + private listeners: Set<(logs: LogEntry[]) => void> = new Set(); + + // Original console methods + private originalConsole = { + log: console.log.bind(console), + warn: console.warn.bind(console), + error: console.error.bind(console), + info: console.info.bind(console), + }; + + constructor() { + this.interceptConsole(); + } + + /** + * Intercept console methods to capture logs + */ + private interceptConsole() { + console.log = (...args: any[]) => { + this.originalConsole.log(...args); + this.addLog('log', 'System', this.formatArgs(args)); + }; + + console.warn = (...args: any[]) => { + this.originalConsole.warn(...args); + this.addLog('warn', 'System', this.formatArgs(args)); + }; + + console.error = (...args: any[]) => { + this.originalConsole.error(...args); + this.addLog('error', 'System', this.formatArgs(args)); + }; + + console.info = (...args: any[]) => { + this.originalConsole.info(...args); + this.addLog('info', 'System', this.formatArgs(args)); + }; + } + + /** + * Format console arguments into string + */ + private formatArgs(args: any[]): string { + return args + .map(arg => { + if (typeof arg === 'object') { + try { + return JSON.stringify(arg, null, 2); + } catch { + return String(arg); + } + } + return String(arg); + }) + .join(' '); + } + + /** + * Add a log entry + */ + private addLog(level: LogEntry['level'], category: string, message: string, data?: any) { + const entry: LogEntry = { + id: `${Date.now()}-${Math.random()}`, + timestamp: new Date(), + level, + category, + message, + data, + }; + + this.logs.push(entry); + + // Trim old logs + if (this.logs.length > this.maxLogs) { + this.logs = this.logs.slice(-this.maxLogs); + } + + // Notify listeners + this.notifyListeners(); + } + + /** + * Public logging methods with category support + */ + log(category: string, message: string, data?: any) { + this.originalConsole.log(`[${category}]`, message, data || ''); + this.addLog('log', category, message, data); + } + + warn(category: string, message: string, data?: any) { + this.originalConsole.warn(`[${category}]`, message, data || ''); + this.addLog('warn', category, message, data); + } + + error(category: string, message: string, data?: any) { + this.originalConsole.error(`[${category}]`, message, data || ''); + this.addLog('error', category, message, data); + } + + info(category: string, message: string, data?: any) { + this.originalConsole.info(`[${category}]`, message, data || ''); + this.addLog('info', category, message, data); + } + + /** + * Get all logs + */ + getLogs(): LogEntry[] { + return [...this.logs]; + } + + /** + * Get logs by category + */ + getLogsByCategory(category: string): LogEntry[] { + return this.logs.filter(log => log.category === category); + } + + /** + * Clear all logs + */ + clear() { + this.logs = []; + this.notifyListeners(); + } + + /** + * Subscribe to log updates + */ + subscribe(listener: (logs: LogEntry[]) => void) { + this.listeners.add(listener); + return () => this.listeners.delete(listener); + } + + /** + * Notify all listeners + */ + private notifyListeners() { + this.listeners.forEach(listener => listener(this.getLogs())); + } + + /** + * Export logs as text + */ + exportAsText(): string { + return this.logs + .map(log => { + const time = log.timestamp.toLocaleTimeString(); + const level = log.level.toUpperCase().padEnd(5); + const category = `[${log.category}]`.padEnd(12); + return `${time} ${level} ${category} ${log.message}`; + }) + .join('\n'); + } +} + +// Singleton instance +export const debugLogger = new DebugLogger(); diff --git a/services/sherpaTTS.ts b/services/sherpaTTS.ts new file mode 100644 index 0000000..d5c5a95 --- /dev/null +++ b/services/sherpaTTS.ts @@ -0,0 +1,345 @@ +/** + * Sherpa TTS Service - Dynamic model loading + * Uses react-native-sherpa-onnx-offline-tts with Piper VITS models + * Models downloaded from Hugging Face on first use + */ + +import { NativeEventEmitter } from 'react-native'; +import TTSManager from 'react-native-sherpa-onnx-offline-tts'; +import RNFS from '@dr.pogodin/react-native-fs'; +import { debugLogger } from '@/services/DebugLogger'; + +// Only Orayan (Ryan) voice - downloaded from Hugging Face +const ORAYAN_VOICE = { + id: 'ryan-medium', + name: 'Ryan (Orayan)', + description: 'Male, clear voice', + hfRepo: 'csukuangfj/vits-piper-en_US-ryan-medium', + modelDir: 'vits-piper-en_US-ryan-medium', + onnxFile: 'en_US-ryan-medium.onnx', +}; + +interface SherpaTTSState { + initialized: boolean; + initializing: boolean; + speaking: boolean; + error: string | null; +} + +// Check if native module is available +const NATIVE_MODULE_AVAILABLE = !!TTSManager; + +if (NATIVE_MODULE_AVAILABLE) { + debugLogger.info('TTS', 'TTSManager native module loaded successfully'); +} else { + debugLogger.error('TTS', 'TTSManager native module NOT available - prebuild required'); +} + +let ttsManagerEmitter: NativeEventEmitter | null = null; +if (NATIVE_MODULE_AVAILABLE) { + ttsManagerEmitter = new NativeEventEmitter(TTSManager); + debugLogger.info('TTS', 'TTS event emitter initialized'); +} + +let currentState: SherpaTTSState = { + initialized: false, + initializing: false, + speaking: false, + error: null, +}; + +// State listeners +const stateListeners: ((state: SherpaTTSState) => void)[] = []; + +function updateState(updates: Partial) { + currentState = { ...currentState, ...updates }; + notifyListeners(); +} + +function notifyListeners() { + stateListeners.forEach(listener => listener({ ...currentState })); +} + +export function addStateListener(listener: (state: SherpaTTSState) => void) { + stateListeners.push(listener); + listener({ ...currentState }); + return () => { + const index = stateListeners.indexOf(listener); + if (index >= 0) stateListeners.splice(index, 1); + }; +} + +export function getState(): SherpaTTSState { + return { ...currentState }; +} + +/** + * Download TTS model from Hugging Face + */ +async function downloadModelFromHuggingFace(): Promise { + const extractPath = `${RNFS.DocumentDirectoryPath}/voices`; + const modelDir = `${extractPath}/${ORAYAN_VOICE.modelDir}`; + const modelPath = `${modelDir}/${ORAYAN_VOICE.onnxFile}`; + + // Check if already downloaded + const exists = await RNFS.exists(modelPath); + if (exists) { + debugLogger.info('TTS', `Model already downloaded at: ${modelPath}`); + return true; + } + + debugLogger.info('TTS', `Downloading model from Hugging Face: ${ORAYAN_VOICE.hfRepo}`); + updateState({ initializing: true, error: 'Downloading voice model...' }); + + try { + // Create directories + await RNFS.mkdir(modelDir, { intermediateDirectories: true }); + + // Download model files from Hugging Face + const baseUrl = `https://huggingface.co/${ORAYAN_VOICE.hfRepo}/resolve/main`; + + const filesToDownload = [ + { url: `${baseUrl}/${ORAYAN_VOICE.onnxFile}`, path: modelPath }, + { url: `${baseUrl}/tokens.txt`, path: `${modelDir}/tokens.txt` }, + { url: `${baseUrl}/espeak-ng-data.tar.bz2`, path: `${modelDir}/espeak-ng-data.tar.bz2` }, + ]; + + // Download each file + for (const file of filesToDownload) { + debugLogger.log('TTS', `Downloading: ${file.url}`); + const downloadResult = await RNFS.downloadFile({ + fromUrl: file.url, + toFile: file.path, + }).promise; + + if (downloadResult.statusCode !== 200) { + throw new Error(`Failed to download ${file.url}: ${downloadResult.statusCode}`); + } + } + + // Extract espeak-ng-data + debugLogger.log('TTS', 'Extracting espeak-ng-data...'); + // Note: Extraction would need native module or untar library + // For now, assume it's extracted manually or via separate process + + debugLogger.info('TTS', '✅ Model downloaded successfully'); + return true; + + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Download failed'; + debugLogger.error('TTS', `Model download failed: ${errorMessage}`, error); + updateState({ error: errorMessage, initializing: false }); + return false; + } +} + +/** + * Initialize Sherpa TTS with Orayan voice + */ +export async function initializeSherpaTTS(): Promise { + if (!NATIVE_MODULE_AVAILABLE) { + debugLogger.error('TTS', 'Cannot initialize - native module not available'); + updateState({ + initialized: false, + error: 'Native module not available - run npx expo prebuild and rebuild' + }); + return false; + } + + if (currentState.initializing) { + debugLogger.warn('TTS', 'Already initializing - skipping duplicate call'); + return false; + } + + debugLogger.info('TTS', `Starting initialization with voice: ${ORAYAN_VOICE.name}`); + updateState({ initializing: true, error: null }); + + try { + // Download model if needed + const downloaded = await downloadModelFromHuggingFace(); + if (!downloaded) { + throw new Error('Model download failed'); + } + + // Build paths to model files + const extractPath = `${RNFS.DocumentDirectoryPath}/voices`; + const modelDir = `${extractPath}/${ORAYAN_VOICE.modelDir}`; + const modelPath = `${modelDir}/${ORAYAN_VOICE.onnxFile}`; + const tokensPath = `${modelDir}/tokens.txt`; + const dataDirPath = `${modelDir}/espeak-ng-data`; + + debugLogger.log('TTS', 'Model paths:', { + model: modelPath, + tokens: tokensPath, + dataDir: dataDirPath + }); + + // Create config JSON for native module + const configJSON = JSON.stringify({ + modelPath, + tokensPath, + dataDirPath, + }); + + debugLogger.log('TTS', `Calling TTSManager.initialize() with config JSON`); + + // Initialize native TTS + await TTSManager.initialize(configJSON); + + updateState({ + initialized: true, + initializing: false, + error: null + }); + + debugLogger.info('TTS', '✅ Initialization successful'); + return true; + + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + debugLogger.error('TTS', `Initialization failed: ${errorMessage}`, error); + updateState({ + initialized: false, + initializing: false, + error: errorMessage + }); + return false; + } +} + +/** + * Speak text using Sherpa TTS + */ +export async function speak( + text: string, + options?: { + speed?: number; + speakerId?: number; + onStart?: () => void; + onDone?: () => void; + onError?: (error: Error) => void; + } +): Promise { + debugLogger.log('TTS', `speak() called with text: "${text.substring(0, 50)}${text.length > 50 ? '...' : ''}"`); + + if (!NATIVE_MODULE_AVAILABLE || !currentState.initialized) { + debugLogger.error('TTS', 'Cannot speak - TTS not initialized or module unavailable'); + options?.onError?.(new Error('Sherpa TTS not initialized')); + return; + } + + if (!text || text.trim().length === 0) { + debugLogger.warn('TTS', 'Empty text provided, skipping speech'); + return; + } + + const speed = options?.speed ?? 1.0; + const speakerId = options?.speakerId ?? 0; + + debugLogger.log('TTS', `Speech parameters: speed=${speed}, speakerId=${speakerId}`); + + try { + updateState({ speaking: true }); + debugLogger.info('TTS', 'State updated to speaking=true, calling onStart callback'); + options?.onStart?.(); + + debugLogger.log('TTS', `Calling TTSManager.generateAndPlay("${text}", ${speakerId}, ${speed})`); + + await TTSManager.generateAndPlay(text, speakerId, speed); + + debugLogger.info('TTS', '✅ Speech playback completed successfully'); + updateState({ speaking: false }); + options?.onDone?.(); + } catch (error) { + const err = error instanceof Error ? error : new Error('TTS playback failed'); + debugLogger.error('TTS', `💥 Speech playback error: ${err.message}`, error); + updateState({ speaking: false }); + options?.onError?.(err); + } +} + +/** + * Stop current speech playback + */ +export async function stop(): Promise { + debugLogger.info('TTS', 'stop() called'); + + if (NATIVE_MODULE_AVAILABLE && currentState.initialized) { + try { + debugLogger.log('TTS', 'Calling TTSManager.deinitialize() to stop playback'); + TTSManager.deinitialize(); + updateState({ speaking: false }); + + // Re-initialize after stop to be ready for next speech + debugLogger.log('TTS', 'Scheduling re-initialization in 100ms'); + setTimeout(() => { + initializeSherpaTTS(); + }, 100); + + debugLogger.info('TTS', 'Playback stopped successfully'); + } catch (error) { + debugLogger.error('TTS', 'Failed to stop playback', error); + } + } else { + debugLogger.warn('TTS', 'Cannot stop - module not available or not initialized'); + } +} + +/** + * Deinitialize and free resources + */ +export function deinitialize(): void { + debugLogger.info('TTS', 'deinitialize() called'); + + if (NATIVE_MODULE_AVAILABLE) { + try { + debugLogger.log('TTS', 'Calling TTSManager.deinitialize() to free resources'); + TTSManager.deinitialize(); + debugLogger.info('TTS', 'TTS resources freed successfully'); + } catch (error) { + debugLogger.error('TTS', 'Failed to deinitialize', error); + } + } + updateState({ initialized: false, speaking: false, error: null }); +} + +/** + * Check if Sherpa TTS is available (native module loaded) + */ +export function isAvailable(): boolean { + return NATIVE_MODULE_AVAILABLE && currentState.initialized; +} + +/** + * Check if currently speaking + */ +export async function isSpeaking(): Promise { + return currentState.speaking; +} + +// Voice selection removed - only Lessac voice is available + +/** + * Add listener for volume updates during playback + */ +export function addVolumeListener(callback: (volume: number) => void): (() => void) | null { + if (!ttsManagerEmitter) return null; + + const subscription = ttsManagerEmitter.addListener('VolumeUpdate', (event) => { + callback(event.volume); + }); + + return () => subscription.remove(); +} + +export default { + initialize: initializeSherpaTTS, + speak, + stop, + deinitialize, + isAvailable, + isSpeaking, + addStateListener, + getState, + addVolumeListener, +};