- FAB button now correctly stops session during speaking/processing states - Echo prevention: STT stopped during TTS playback, results ignored during speaking - Chat TTS only speaks when voice session is active (no auto-speak for text chat) - Session stop now aborts in-flight API requests and prevents race conditions - STT restarts after TTS with 800ms delay for audio focus release - Pending interrupt transcript processed after TTS completion - ChatContext added for message persistence across tab navigation - VoiceFAB redesigned with state-based animations - console.error replaced with console.warn across voice pipeline - no-speech STT errors silenced (normal silence behavior)
311 lines
9.4 KiB
TypeScript
311 lines
9.4 KiB
TypeScript
/**
|
|
* Voice Floating Action Button Component
|
|
*
|
|
* Positioned at the center of the tab bar.
|
|
* Shows different animations for each voice state:
|
|
* - idle: white mic icon, green background
|
|
* - listening: red background, expanding pulse rings
|
|
* - processing: blue background, spinning indicator
|
|
* - speaking: green background, wave-like pulse
|
|
*/
|
|
|
|
import React, { useRef, useEffect } from 'react';
|
|
import {
|
|
StyleSheet,
|
|
TouchableOpacity,
|
|
Animated,
|
|
ViewStyle,
|
|
ActivityIndicator,
|
|
} from 'react-native';
|
|
import { Ionicons } from '@expo/vector-icons';
|
|
import * as Haptics from 'expo-haptics';
|
|
import { AppColors, BorderRadius } from '@/constants/theme';
|
|
import { useVoiceCall } from '@/contexts/VoiceCallContext';
|
|
import { useVoice } from '@/contexts/VoiceContext';
|
|
|
|
interface VoiceFABProps {
|
|
onPress: () => void;
|
|
style?: ViewStyle;
|
|
disabled?: boolean;
|
|
isListening?: boolean;
|
|
}
|
|
|
|
const FAB_SIZE = 60;
|
|
|
|
export function VoiceFAB({ onPress, style, disabled = false, isListening = false }: VoiceFABProps) {
|
|
const { isCallActive } = useVoiceCall();
|
|
const { status: voiceStatus } = useVoice();
|
|
|
|
// Animation values
|
|
const scale = useRef(new Animated.Value(1)).current;
|
|
const opacity = useRef(new Animated.Value(1)).current;
|
|
|
|
// Pulse ring 1 (main expanding ring)
|
|
const pulse1Scale = useRef(new Animated.Value(1)).current;
|
|
const pulse1Opacity = useRef(new Animated.Value(0)).current;
|
|
|
|
// Pulse ring 2 (second ring, offset timing)
|
|
const pulse2Scale = useRef(new Animated.Value(1)).current;
|
|
const pulse2Opacity = useRef(new Animated.Value(0)).current;
|
|
|
|
// Speaking glow animation
|
|
const glowScale = useRef(new Animated.Value(1)).current;
|
|
|
|
// Processing rotation
|
|
const rotation = useRef(new Animated.Value(0)).current;
|
|
|
|
// Store animation refs for cleanup
|
|
const animationRef = useRef<Animated.CompositeAnimation | null>(null);
|
|
|
|
// Determine effective state
|
|
const effectiveStatus = isListening
|
|
? (voiceStatus === 'processing' ? 'processing' : voiceStatus === 'speaking' ? 'speaking' : 'listening')
|
|
: 'idle';
|
|
|
|
// Hide FAB when call is active
|
|
useEffect(() => {
|
|
if (isCallActive) {
|
|
Animated.parallel([
|
|
Animated.timing(scale, { toValue: 0, duration: 200, useNativeDriver: true }),
|
|
Animated.timing(opacity, { toValue: 0, duration: 200, useNativeDriver: true }),
|
|
]).start();
|
|
} else {
|
|
Animated.parallel([
|
|
Animated.spring(scale, { toValue: 1, friction: 5, tension: 40, useNativeDriver: true }),
|
|
Animated.timing(opacity, { toValue: 1, duration: 200, useNativeDriver: true }),
|
|
]).start();
|
|
}
|
|
}, [isCallActive, scale, opacity]);
|
|
|
|
// Animations based on voice status
|
|
useEffect(() => {
|
|
// Stop previous animation
|
|
if (animationRef.current) {
|
|
animationRef.current.stop();
|
|
animationRef.current = null;
|
|
}
|
|
|
|
// Reset all animation values
|
|
pulse1Scale.setValue(1);
|
|
pulse1Opacity.setValue(0);
|
|
pulse2Scale.setValue(1);
|
|
pulse2Opacity.setValue(0);
|
|
glowScale.setValue(1);
|
|
rotation.setValue(0);
|
|
|
|
if (effectiveStatus === 'listening') {
|
|
// Double pulse ring animation - more active/dynamic
|
|
const pulseAnim = Animated.loop(
|
|
Animated.stagger(500, [
|
|
Animated.parallel([
|
|
Animated.timing(pulse1Scale, { toValue: 2.0, duration: 1200, useNativeDriver: true }),
|
|
Animated.timing(pulse1Opacity, { toValue: 0, duration: 1200, useNativeDriver: true }),
|
|
]),
|
|
Animated.parallel([
|
|
Animated.timing(pulse1Scale, { toValue: 1, duration: 0, useNativeDriver: true }),
|
|
Animated.timing(pulse1Opacity, { toValue: 0.5, duration: 0, useNativeDriver: true }),
|
|
]),
|
|
])
|
|
);
|
|
|
|
const pulse2Anim = Animated.loop(
|
|
Animated.sequence([
|
|
Animated.delay(400),
|
|
Animated.parallel([
|
|
Animated.timing(pulse2Scale, { toValue: 1.8, duration: 1200, useNativeDriver: true }),
|
|
Animated.timing(pulse2Opacity, { toValue: 0, duration: 1200, useNativeDriver: true }),
|
|
]),
|
|
Animated.parallel([
|
|
Animated.timing(pulse2Scale, { toValue: 1, duration: 0, useNativeDriver: true }),
|
|
Animated.timing(pulse2Opacity, { toValue: 0.4, duration: 0, useNativeDriver: true }),
|
|
]),
|
|
])
|
|
);
|
|
|
|
const combined = Animated.parallel([pulseAnim, pulse2Anim]);
|
|
animationRef.current = combined;
|
|
combined.start();
|
|
|
|
} else if (effectiveStatus === 'speaking') {
|
|
// Gentle breathing glow when speaking
|
|
const glowAnim = Animated.loop(
|
|
Animated.sequence([
|
|
Animated.timing(glowScale, { toValue: 1.15, duration: 600, useNativeDriver: true }),
|
|
Animated.timing(glowScale, { toValue: 1.0, duration: 600, useNativeDriver: true }),
|
|
])
|
|
);
|
|
|
|
// Soft outer glow
|
|
const softPulse = Animated.loop(
|
|
Animated.sequence([
|
|
Animated.parallel([
|
|
Animated.timing(pulse1Scale, { toValue: 1.4, duration: 800, useNativeDriver: true }),
|
|
Animated.timing(pulse1Opacity, { toValue: 0.3, duration: 400, useNativeDriver: true }),
|
|
]),
|
|
Animated.parallel([
|
|
Animated.timing(pulse1Scale, { toValue: 1.0, duration: 800, useNativeDriver: true }),
|
|
Animated.timing(pulse1Opacity, { toValue: 0, duration: 400, useNativeDriver: true }),
|
|
]),
|
|
])
|
|
);
|
|
|
|
const combined = Animated.parallel([glowAnim, softPulse]);
|
|
animationRef.current = combined;
|
|
combined.start();
|
|
|
|
} else if (effectiveStatus === 'processing') {
|
|
// Spinning rotation for processing
|
|
const spinAnim = Animated.loop(
|
|
Animated.timing(rotation, { toValue: 1, duration: 1500, useNativeDriver: true })
|
|
);
|
|
animationRef.current = spinAnim;
|
|
spinAnim.start();
|
|
}
|
|
|
|
return () => {
|
|
if (animationRef.current) {
|
|
animationRef.current.stop();
|
|
animationRef.current = null;
|
|
}
|
|
};
|
|
}, [effectiveStatus]); // eslint-disable-line react-hooks/exhaustive-deps
|
|
|
|
// Press animation with haptic feedback
|
|
const handlePressIn = () => {
|
|
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Medium);
|
|
Animated.spring(scale, { toValue: 0.85, friction: 5, useNativeDriver: true }).start();
|
|
};
|
|
|
|
const handlePressOut = () => {
|
|
Animated.spring(scale, { toValue: 1, friction: 5, useNativeDriver: true }).start();
|
|
};
|
|
|
|
if (isCallActive) {
|
|
return null;
|
|
}
|
|
|
|
// Determine colors and icon based on state
|
|
let fabBgColor = AppColors.success; // idle: green
|
|
let iconName: 'mic-outline' | 'mic' | 'volume-high' = 'mic-outline';
|
|
let pulseColor = AppColors.error;
|
|
|
|
if (effectiveStatus === 'listening') {
|
|
fabBgColor = '#FF3B30'; // red
|
|
iconName = 'mic';
|
|
pulseColor = '#FF3B30';
|
|
} else if (effectiveStatus === 'processing') {
|
|
fabBgColor = AppColors.primary; // blue
|
|
iconName = 'mic';
|
|
pulseColor = AppColors.primary;
|
|
} else if (effectiveStatus === 'speaking') {
|
|
fabBgColor = '#34C759'; // green
|
|
iconName = 'volume-high';
|
|
pulseColor = '#34C759';
|
|
}
|
|
|
|
const spin = rotation.interpolate({
|
|
inputRange: [0, 1],
|
|
outputRange: ['0deg', '360deg'],
|
|
});
|
|
|
|
return (
|
|
<Animated.View
|
|
style={[
|
|
styles.container,
|
|
{
|
|
transform: [{ scale }],
|
|
opacity,
|
|
},
|
|
style,
|
|
]}
|
|
>
|
|
{/* Pulse ring 1 */}
|
|
{(effectiveStatus === 'listening' || effectiveStatus === 'speaking') && (
|
|
<Animated.View
|
|
style={[
|
|
styles.pulseRing,
|
|
{
|
|
backgroundColor: pulseColor,
|
|
transform: [{ scale: pulse1Scale }],
|
|
opacity: pulse1Opacity,
|
|
},
|
|
]}
|
|
/>
|
|
)}
|
|
|
|
{/* Pulse ring 2 (listening only) */}
|
|
{effectiveStatus === 'listening' && (
|
|
<Animated.View
|
|
style={[
|
|
styles.pulseRing,
|
|
{
|
|
backgroundColor: pulseColor,
|
|
transform: [{ scale: pulse2Scale }],
|
|
opacity: pulse2Opacity,
|
|
},
|
|
]}
|
|
/>
|
|
)}
|
|
|
|
<Animated.View
|
|
style={[
|
|
{ transform: [{ scale: effectiveStatus === 'speaking' ? glowScale : 1 }] },
|
|
]}
|
|
>
|
|
<TouchableOpacity
|
|
style={[
|
|
styles.fab,
|
|
{ backgroundColor: disabled ? AppColors.surface : fabBgColor },
|
|
disabled && styles.fabDisabled,
|
|
]}
|
|
onPress={onPress}
|
|
onPressIn={handlePressIn}
|
|
onPressOut={handlePressOut}
|
|
disabled={disabled}
|
|
activeOpacity={0.9}
|
|
>
|
|
{effectiveStatus === 'processing' ? (
|
|
<Animated.View style={{ transform: [{ rotate: spin }] }}>
|
|
<ActivityIndicator size="small" color={AppColors.white} />
|
|
</Animated.View>
|
|
) : (
|
|
<Ionicons
|
|
name={iconName}
|
|
size={28}
|
|
color={disabled ? AppColors.textMuted : AppColors.white}
|
|
/>
|
|
)}
|
|
</TouchableOpacity>
|
|
</Animated.View>
|
|
</Animated.View>
|
|
);
|
|
}
|
|
|
|
const styles = StyleSheet.create({
|
|
container: {
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
},
|
|
pulseRing: {
|
|
position: 'absolute',
|
|
width: FAB_SIZE,
|
|
height: FAB_SIZE,
|
|
borderRadius: FAB_SIZE / 2,
|
|
},
|
|
fab: {
|
|
width: FAB_SIZE,
|
|
height: FAB_SIZE,
|
|
borderRadius: FAB_SIZE / 2,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
shadowColor: '#000',
|
|
shadowOffset: { width: 0, height: 4 },
|
|
shadowOpacity: 0.3,
|
|
shadowRadius: 8,
|
|
elevation: 8,
|
|
},
|
|
fabDisabled: {
|
|
shadowOpacity: 0.1,
|
|
},
|
|
});
|