- Add onVoiceDetected callback to useSpeechRecognition hook - Triggered on first interim result (voice activity detected) - Uses voiceDetectedRef to ensure callback fires only once per session - Reset flag on session start/end - Connect STT to VoiceContext in _layout.tsx - Use useSpeechRecognition with onVoiceDetected callback - Call interruptIfSpeaking() when voice detected during 'speaking' state - Forward STT results to VoiceContext (setTranscript, sendTranscript) - Start/stop STT based on isListening state - Export interruptIfSpeaking from VoiceContext provider 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
177 lines
5.4 KiB
TypeScript
177 lines
5.4 KiB
TypeScript
import { Tabs } from 'expo-router';
|
|
import React, { useCallback, useEffect } from 'react';
|
|
import { Platform, View } from 'react-native';
|
|
import { Feather } from '@expo/vector-icons';
|
|
import { useSafeAreaInsets } from 'react-native-safe-area-context';
|
|
|
|
import { HapticTab } from '@/components/haptic-tab';
|
|
import { VoiceFAB } from '@/components/VoiceFAB';
|
|
import { AppColors } from '@/constants/theme';
|
|
import { useColorScheme } from '@/hooks/use-color-scheme';
|
|
import { useVoiceCall } from '@/contexts/VoiceCallContext';
|
|
import { useVoice } from '@/contexts/VoiceContext';
|
|
import { useSpeechRecognition } from '@/hooks/useSpeechRecognition';
|
|
|
|
export default function TabLayout() {
|
|
const colorScheme = useColorScheme();
|
|
const isDark = colorScheme === 'dark';
|
|
const insets = useSafeAreaInsets();
|
|
// VoiceFAB uses VoiceCallContext internally to hide when call is active
|
|
useVoiceCall(); // Ensure context is available
|
|
|
|
// Voice context for listening mode toggle and TTS interruption
|
|
const {
|
|
isListening,
|
|
status,
|
|
startSession,
|
|
stopSession,
|
|
interruptIfSpeaking,
|
|
setTranscript,
|
|
setPartialTranscript,
|
|
sendTranscript,
|
|
} = useVoice();
|
|
|
|
// Callback for voice detection - interrupt TTS when user speaks
|
|
const handleVoiceDetected = useCallback(() => {
|
|
// Interrupt TTS when user starts speaking during 'speaking' state
|
|
if (status === 'speaking') {
|
|
console.log('[TabLayout] Voice detected during speaking - interrupting TTS');
|
|
interruptIfSpeaking();
|
|
}
|
|
}, [status, interruptIfSpeaking]);
|
|
|
|
// Callback for STT results
|
|
const handleSpeechResult = useCallback((transcript: string, isFinal: boolean) => {
|
|
if (isFinal) {
|
|
setTranscript(transcript);
|
|
// Send to API when final result is received
|
|
sendTranscript(transcript);
|
|
} else {
|
|
setPartialTranscript(transcript);
|
|
}
|
|
}, [setTranscript, setPartialTranscript, sendTranscript]);
|
|
|
|
// Speech recognition with voice detection callback
|
|
const {
|
|
startListening,
|
|
stopListening,
|
|
} = useSpeechRecognition({
|
|
continuous: true,
|
|
interimResults: true,
|
|
onVoiceDetected: handleVoiceDetected,
|
|
onResult: handleSpeechResult,
|
|
});
|
|
|
|
// Start/stop STT when voice session starts/stops
|
|
useEffect(() => {
|
|
if (isListening) {
|
|
startListening();
|
|
} else {
|
|
stopListening();
|
|
}
|
|
}, [isListening, startListening, stopListening]);
|
|
|
|
// Handle voice FAB press - toggle listening mode
|
|
const handleVoiceFABPress = useCallback(() => {
|
|
if (isListening) {
|
|
stopSession();
|
|
} else {
|
|
startSession();
|
|
}
|
|
}, [isListening, startSession, stopSession]);
|
|
|
|
// Calculate tab bar height based on safe area
|
|
// On iOS with home indicator, insets.bottom is ~34px
|
|
// On Android with gesture navigation or software buttons (Samsung/Pixel):
|
|
// - insets.bottom should reflect the navigation bar height
|
|
// - But some devices/modes may return 0, so we add a minimum for Android
|
|
// Android minimum: 16px to ensure content doesn't touch system buttons
|
|
const androidMinPadding = Platform.OS === 'android' ? 16 : 0;
|
|
const bottomPadding = Math.max(insets.bottom, androidMinPadding, 10);
|
|
const tabBarHeight = 60 + bottomPadding; // 60px for content + safe area padding
|
|
|
|
return (
|
|
<View style={{ flex: 1 }}>
|
|
<Tabs
|
|
screenOptions={{
|
|
tabBarActiveTintColor: AppColors.primary,
|
|
tabBarInactiveTintColor: isDark ? '#9BA1A6' : '#687076',
|
|
tabBarStyle: {
|
|
backgroundColor: isDark ? '#151718' : AppColors.background,
|
|
borderTopColor: isDark ? '#2D3135' : AppColors.border,
|
|
height: tabBarHeight,
|
|
paddingBottom: bottomPadding,
|
|
paddingTop: 10,
|
|
},
|
|
tabBarLabelStyle: {
|
|
fontSize: 11,
|
|
fontWeight: '500',
|
|
},
|
|
headerShown: false,
|
|
tabBarButton: HapticTab,
|
|
}}
|
|
>
|
|
<Tabs.Screen
|
|
name="index"
|
|
options={{
|
|
title: 'Dashboard',
|
|
tabBarIcon: ({ color, size }) => (
|
|
<Feather name="grid" size={22} color={color} />
|
|
),
|
|
}}
|
|
/>
|
|
{/* Hide old dashboard - now index shows WebView dashboard */}
|
|
<Tabs.Screen
|
|
name="dashboard"
|
|
options={{
|
|
href: null,
|
|
}}
|
|
/>
|
|
{/* Chat with Julia AI */}
|
|
<Tabs.Screen
|
|
name="chat"
|
|
options={{
|
|
title: 'Julia',
|
|
tabBarIcon: ({ color, size }) => (
|
|
<Feather name="message-circle" size={22} color={color} />
|
|
),
|
|
}}
|
|
/>
|
|
<Tabs.Screen
|
|
name="profile"
|
|
options={{
|
|
title: 'Profile',
|
|
tabBarIcon: ({ color, size }) => (
|
|
<Feather name="user" size={22} color={color} />
|
|
),
|
|
}}
|
|
/>
|
|
{/* Hide explore tab */}
|
|
<Tabs.Screen
|
|
name="explore"
|
|
options={{
|
|
href: null,
|
|
}}
|
|
/>
|
|
{/* Audio Debug - hidden */}
|
|
<Tabs.Screen
|
|
name="audio-debug"
|
|
options={{
|
|
href: null,
|
|
}}
|
|
/>
|
|
{/* Beneficiaries - hidden from tab bar but keeps tab bar visible */}
|
|
<Tabs.Screen
|
|
name="beneficiaries"
|
|
options={{
|
|
href: null,
|
|
}}
|
|
/>
|
|
</Tabs>
|
|
|
|
{/* Voice FAB - toggle listening mode */}
|
|
<VoiceFAB onPress={handleVoiceFABPress} isListening={isListening} />
|
|
</View>
|
|
);
|
|
}
|