import { Tabs } from 'expo-router'; import React, { useCallback, useEffect } from 'react'; import { Platform, View } from 'react-native'; import { Feather } from '@expo/vector-icons'; import { useSafeAreaInsets } from 'react-native-safe-area-context'; import { HapticTab } from '@/components/haptic-tab'; import { VoiceFAB } from '@/components/VoiceFAB'; import { AppColors } from '@/constants/theme'; import { useColorScheme } from '@/hooks/use-color-scheme'; import { useVoiceCall } from '@/contexts/VoiceCallContext'; import { useVoice } from '@/contexts/VoiceContext'; import { useSpeechRecognition } from '@/hooks/useSpeechRecognition'; export default function TabLayout() { const colorScheme = useColorScheme(); const isDark = colorScheme === 'dark'; const insets = useSafeAreaInsets(); // VoiceFAB uses VoiceCallContext internally to hide when call is active useVoiceCall(); // Ensure context is available // Voice context for listening mode toggle and TTS interruption const { isListening, status, startSession, stopSession, interruptIfSpeaking, setTranscript, setPartialTranscript, sendTranscript, } = useVoice(); // Callback for voice detection - interrupt TTS when user speaks const handleVoiceDetected = useCallback(() => { // Interrupt TTS when user starts speaking during 'speaking' state if (status === 'speaking') { console.log('[TabLayout] Voice detected during speaking - interrupting TTS'); interruptIfSpeaking(); } }, [status, interruptIfSpeaking]); // Callback for STT results const handleSpeechResult = useCallback((transcript: string, isFinal: boolean) => { if (isFinal) { setTranscript(transcript); // Send to API when final result is received sendTranscript(transcript); } else { setPartialTranscript(transcript); } }, [setTranscript, setPartialTranscript, sendTranscript]); // Speech recognition with voice detection callback const { startListening, stopListening, } = useSpeechRecognition({ continuous: true, interimResults: true, onVoiceDetected: handleVoiceDetected, onResult: handleSpeechResult, }); // Start/stop STT when voice session starts/stops useEffect(() => { if (isListening) { startListening(); } else { stopListening(); } }, [isListening, startListening, stopListening]); // Handle voice FAB press - toggle listening mode const handleVoiceFABPress = useCallback(() => { if (isListening) { stopSession(); } else { startSession(); } }, [isListening, startSession, stopSession]); // Calculate tab bar height based on safe area // On iOS with home indicator, insets.bottom is ~34px // On Android with gesture navigation or software buttons (Samsung/Pixel): // - insets.bottom should reflect the navigation bar height // - But some devices/modes may return 0, so we add a minimum for Android // Android minimum: 16px to ensure content doesn't touch system buttons const androidMinPadding = Platform.OS === 'android' ? 16 : 0; const bottomPadding = Math.max(insets.bottom, androidMinPadding, 10); const tabBarHeight = 60 + bottomPadding; // 60px for content + safe area padding return ( ( ), }} /> {/* Hide old dashboard - now index shows WebView dashboard */} {/* Chat with Julia AI */} ( ), }} /> ( ), }} /> {/* Hide explore tab */} {/* Audio Debug - hidden */} {/* Beneficiaries - hidden from tab bar but keeps tab bar visible */} {/* Voice FAB - toggle listening mode */} ); }