Voice AI Features: - LiveKit Agents integration for real-time voice calls - Julia AI agent (Python) deployed to LiveKit Cloud - Token server for authentication - Debug screen with voice call testing - Voice call screen with full-screen UI Agent Configuration: - STT: Deepgram Nova-2 - LLM: OpenAI GPT-4o - TTS: Deepgram Aura Asteria (female voice) - Turn Detection: LiveKit Multilingual Model - VAD: Silero - Noise Cancellation: LiveKit BVC Files added: - julia-agent/ - Complete agent code and token server - app/voice-call.tsx - Full-screen voice call UI - services/livekitService.ts - LiveKit client service - contexts/VoiceTranscriptContext.tsx - Transcript state - polyfills/livekit-globals.ts - WebRTC polyfills 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
860 lines
26 KiB
TypeScript
860 lines
26 KiB
TypeScript
/**
|
|
* Voice Call Screen - Fullscreen LiveKit Voice Call
|
|
*
|
|
* Opens as a modal from chat, returns to chat when call ends.
|
|
* Beautiful phone call-like UI with Julia AI.
|
|
* Uses self-hosted LiveKit Server + Deepgram STT/TTS.
|
|
*/
|
|
|
|
import React, { useState, useCallback, useRef, useEffect } from 'react';
|
|
import {
|
|
View,
|
|
Text,
|
|
StyleSheet,
|
|
TouchableOpacity,
|
|
Platform,
|
|
Animated,
|
|
Easing,
|
|
Dimensions,
|
|
ScrollView,
|
|
Alert,
|
|
AppState,
|
|
AppStateStatus,
|
|
} from 'react-native';
|
|
import * as Clipboard from 'expo-clipboard';
|
|
import { Ionicons } from '@expo/vector-icons';
|
|
import { SafeAreaView } from 'react-native-safe-area-context';
|
|
import { useRouter } from 'expo-router';
|
|
import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
|
|
// NOTE: Room and other core classes must be imported from livekit-client, not @livekit/react-native!
|
|
// @livekit/react-native only provides registerGlobals(), React hooks, and components.
|
|
import type { Room as RoomType } from 'livekit-client';
|
|
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
|
|
import { getToken, VOICE_NAME } from '@/services/livekitService';
|
|
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
|
|
import { debugLogger } from '@/services/DebugLogger';
|
|
|
|
// Polyfill Event class for React Native (livekit-client needs it)
|
|
if (typeof global.Event === 'undefined') {
|
|
(global as any).Event = class Event {
|
|
type: string;
|
|
bubbles: boolean;
|
|
cancelable: boolean;
|
|
defaultPrevented: boolean;
|
|
|
|
constructor(type: string, options?: { bubbles?: boolean; cancelable?: boolean }) {
|
|
this.type = type;
|
|
this.bubbles = options?.bubbles ?? false;
|
|
this.cancelable = options?.cancelable ?? false;
|
|
this.defaultPrevented = false;
|
|
}
|
|
|
|
preventDefault() {
|
|
this.defaultPrevented = true;
|
|
}
|
|
|
|
stopPropagation() {}
|
|
stopImmediatePropagation() {}
|
|
};
|
|
}
|
|
|
|
const { width: SCREEN_WIDTH } = Dimensions.get('window');
|
|
|
|
type CallState = 'connecting' | 'active' | 'ending';
|
|
|
|
export default function VoiceCallScreen() {
|
|
const router = useRouter();
|
|
const { addTranscriptEntry, clearTranscript } = useVoiceTranscript();
|
|
|
|
// Call state
|
|
const [callState, setCallState] = useState<CallState>('connecting');
|
|
const [isMuted, setIsMuted] = useState(false);
|
|
const [callDuration, setCallDuration] = useState(0);
|
|
const [statusText, setStatusText] = useState('Connecting...');
|
|
const callStartTimeRef = useRef<number | null>(null);
|
|
|
|
// Debug logs
|
|
const [logs, setLogs] = useState<string[]>([]);
|
|
const [showLogs, setShowLogs] = useState(false);
|
|
const [logsMinimized, setLogsMinimized] = useState(false);
|
|
const logsScrollRef = useRef<ScrollView>(null);
|
|
|
|
// Add log entry - both local and global
|
|
const addLog = useCallback((message: string) => {
|
|
const timestamp = new Date().toLocaleTimeString('en-US', { hour12: false });
|
|
setLogs(prev => [...prev, `[${timestamp}] ${message}`]);
|
|
// Also send to global debug logger so it shows on Debug tab
|
|
debugLogger.info('VOICE', message);
|
|
}, []);
|
|
|
|
// Copy logs to clipboard
|
|
const copyLogs = useCallback(async () => {
|
|
const logsText = logs.join('\n');
|
|
await Clipboard.setStringAsync(logsText);
|
|
Alert.alert('Copied!', `${logs.length} log entries copied to clipboard`);
|
|
}, [logs]);
|
|
|
|
// LiveKit room reference
|
|
const roomRef = useRef<RoomType | null>(null);
|
|
const isUnmountingRef = useRef(false);
|
|
const connectionIdRef = useRef<number>(0);
|
|
|
|
// Animations
|
|
const pulseAnim = useRef(new Animated.Value(1)).current;
|
|
const rotateAnim = useRef(new Animated.Value(0)).current;
|
|
const avatarScale = useRef(new Animated.Value(0.8)).current;
|
|
|
|
// Background state tracking
|
|
const appStateRef = useRef<AppStateStatus>(AppState.currentState);
|
|
|
|
// Keep screen awake during call & handle background mode
|
|
useEffect(() => {
|
|
// Prevent screen from sleeping during call
|
|
activateKeepAwakeAsync('voiceCall').catch(() => {});
|
|
|
|
// Handle app going to background/foreground
|
|
const handleAppStateChange = (nextAppState: AppStateStatus) => {
|
|
const prevState = appStateRef.current;
|
|
appStateRef.current = nextAppState;
|
|
|
|
if (prevState.match(/inactive|background/) && nextAppState === 'active') {
|
|
// App came back to foreground
|
|
addLog('App returned to foreground');
|
|
} else if (prevState === 'active' && nextAppState.match(/inactive|background/)) {
|
|
// App went to background - DON'T disconnect, keep call alive!
|
|
addLog('App went to background - call continues');
|
|
// The UIBackgroundModes: ["audio", "voip"] in app.json keeps audio alive
|
|
}
|
|
};
|
|
|
|
const subscription = AppState.addEventListener('change', handleAppStateChange);
|
|
|
|
return () => {
|
|
subscription.remove();
|
|
deactivateKeepAwake('voiceCall');
|
|
};
|
|
}, [addLog]);
|
|
|
|
// Start call on mount
|
|
useEffect(() => {
|
|
// Track current connection attempt
|
|
const currentConnectionId = ++connectionIdRef.current;
|
|
isUnmountingRef.current = false;
|
|
|
|
const startCall = async () => {
|
|
try {
|
|
// Clear previous transcript before starting new call
|
|
clearTranscript();
|
|
|
|
addLog('Starting voice call...');
|
|
|
|
// Check if unmounting
|
|
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
|
addLog('Aborted: screen is closing');
|
|
return;
|
|
}
|
|
|
|
// CRITICAL: Ensure WebRTC globals are registered BEFORE importing livekit-client
|
|
// This MUST happen first, otherwise Room class won't work
|
|
const { registerGlobals, AudioSession } = await import('@livekit/react-native');
|
|
|
|
// Check if globals already registered, if not - register them
|
|
if (typeof global.RTCPeerConnection === 'undefined') {
|
|
addLog('Registering WebRTC globals...');
|
|
registerGlobals();
|
|
} else {
|
|
addLog('WebRTC globals already registered');
|
|
}
|
|
|
|
// Check again if unmounting after async import
|
|
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
|
addLog('Aborted: screen is closing');
|
|
return;
|
|
}
|
|
|
|
// NOW it's safe to import livekit-client
|
|
addLog('Importing livekit-client...');
|
|
const {
|
|
Room,
|
|
RoomEvent,
|
|
ConnectionState,
|
|
Track,
|
|
} = await import('livekit-client');
|
|
|
|
addLog(`Room class: ${typeof Room} ${Room ? 'OK' : 'MISSING'}`);
|
|
addLog('LiveKit imported successfully');
|
|
|
|
// Check if unmounting
|
|
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
|
addLog('Aborted: screen is closing');
|
|
return;
|
|
}
|
|
|
|
// Configure iOS audio session
|
|
if (Platform.OS === 'ios') {
|
|
addLog('Starting iOS AudioSession...');
|
|
await AudioSession.startAudioSession();
|
|
addLog('iOS AudioSession started');
|
|
}
|
|
|
|
// Get token from our server
|
|
addLog('Requesting token from server...');
|
|
const result = await getToken(`user-${Date.now()}`);
|
|
|
|
// Check if unmounting after token request
|
|
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
|
addLog('Aborted: screen is closing after token request');
|
|
return;
|
|
}
|
|
|
|
if (!result.success || !result.data) {
|
|
throw new Error(result.error || 'Failed to get token');
|
|
}
|
|
|
|
const { token, wsUrl, roomName } = result.data;
|
|
|
|
addLog(`Token received. Room: ${roomName}`);
|
|
addLog(`WebSocket URL: ${wsUrl}`);
|
|
addLog(`Connecting to room: ${roomName}`);
|
|
|
|
// Create and connect to room
|
|
const room = new Room();
|
|
roomRef.current = room;
|
|
|
|
// Setup event listeners
|
|
room.on(RoomEvent.ConnectionStateChanged, (state: typeof ConnectionState[keyof typeof ConnectionState]) => {
|
|
addLog(`Connection state: ${state}`);
|
|
|
|
switch (state) {
|
|
case ConnectionState.Connecting:
|
|
setCallState('connecting');
|
|
setStatusText('Connecting...');
|
|
break;
|
|
case ConnectionState.Connected:
|
|
setCallState('active');
|
|
setStatusText('Connected');
|
|
if (!callStartTimeRef.current) {
|
|
callStartTimeRef.current = Date.now();
|
|
}
|
|
break;
|
|
case ConnectionState.Reconnecting:
|
|
setStatusText('Reconnecting...');
|
|
break;
|
|
case ConnectionState.Disconnected:
|
|
setCallState('ending');
|
|
setStatusText('Disconnected');
|
|
// Go back when disconnected
|
|
setTimeout(() => router.back(), 500);
|
|
break;
|
|
}
|
|
});
|
|
|
|
room.on(RoomEvent.TrackSubscribed, (track: any, publication: any, participant: any) => {
|
|
addLog(`Track subscribed: ${track.kind} from ${participant.identity}`);
|
|
if (track.kind === Track.Kind.Audio) {
|
|
addLog('Audio track received - Julia should be speaking');
|
|
setStatusText('Julia is speaking...');
|
|
}
|
|
});
|
|
|
|
room.on(RoomEvent.TrackUnsubscribed, (track: any, publication: any, participant: any) => {
|
|
addLog(`Track unsubscribed: ${track.kind}`);
|
|
});
|
|
|
|
room.on(RoomEvent.TrackMuted, (publication: any, participant: any) => {
|
|
addLog(`Track muted: ${publication.trackSid} by ${participant.identity}`);
|
|
});
|
|
|
|
room.on(RoomEvent.TrackUnmuted, (publication: any, participant: any) => {
|
|
addLog(`Track unmuted: ${publication.trackSid} by ${participant.identity}`);
|
|
});
|
|
|
|
room.on(RoomEvent.ParticipantConnected, (participant: any) => {
|
|
addLog(`Participant connected: ${participant.identity}`);
|
|
});
|
|
|
|
room.on(RoomEvent.ParticipantDisconnected, (participant: any) => {
|
|
addLog(`Participant disconnected: ${participant.identity}`);
|
|
});
|
|
|
|
room.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => {
|
|
if (speakers.length > 0) {
|
|
addLog(`Active speakers: ${speakers.map((s: any) => s.identity).join(', ')}`);
|
|
}
|
|
});
|
|
|
|
room.on(RoomEvent.DataReceived, (payload: any, participant: any) => {
|
|
try {
|
|
const data = JSON.parse(new TextDecoder().decode(payload));
|
|
addLog(`Data received: ${JSON.stringify(data).substring(0, 100)}`);
|
|
|
|
// Handle transcript data from agent
|
|
if (data.type === 'transcript') {
|
|
if (data.role === 'user' && data.text) {
|
|
addTranscriptEntry('user', data.text);
|
|
} else if (data.role === 'assistant' && data.text) {
|
|
addTranscriptEntry('assistant', data.text);
|
|
}
|
|
}
|
|
} catch (e) {
|
|
// Ignore non-JSON data
|
|
}
|
|
});
|
|
|
|
room.on(RoomEvent.AudioPlaybackStatusChanged, () => {
|
|
addLog(`Audio playback can play: ${room.canPlaybackAudio}`);
|
|
});
|
|
|
|
// Check if unmounting before connecting
|
|
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
|
addLog('Aborted: screen is closing before connect');
|
|
return;
|
|
}
|
|
|
|
// Connect to room
|
|
await room.connect(wsUrl, token, {
|
|
autoSubscribe: true,
|
|
});
|
|
|
|
// Check if unmounting after connect
|
|
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
|
addLog('Aborted: screen is closing after connect, disconnecting...');
|
|
await room.disconnect().catch(() => {});
|
|
return;
|
|
}
|
|
|
|
// Enable microphone
|
|
await room.localParticipant.setMicrophoneEnabled(true);
|
|
|
|
addLog('Connected and microphone enabled');
|
|
addLog(`Local participant: ${room.localParticipant.identity}`);
|
|
} catch (err: any) {
|
|
// Ignore errors if screen is unmounting (expected race condition)
|
|
if (isUnmountingRef.current || currentConnectionId !== connectionIdRef.current) {
|
|
console.log('[VoiceCall] Error ignored (screen closing):', err?.message);
|
|
return;
|
|
}
|
|
|
|
// Detailed error logging for debugging
|
|
console.error('[VoiceCall] Failed to start call:', err);
|
|
console.error('[VoiceCall] Error name:', err?.name);
|
|
console.error('[VoiceCall] Error message:', err?.message);
|
|
console.error('[VoiceCall] Error stack:', err?.stack);
|
|
|
|
const errorMsg = err?.message || String(err);
|
|
setStatusText(`Error: ${errorMsg.substring(0, 50)}`);
|
|
// Go back on error
|
|
setTimeout(() => router.back(), 2000);
|
|
}
|
|
};
|
|
|
|
startCall();
|
|
|
|
// Cleanup on unmount
|
|
return () => {
|
|
isUnmountingRef.current = true;
|
|
|
|
const cleanup = async () => {
|
|
if (roomRef.current) {
|
|
try {
|
|
await roomRef.current.disconnect();
|
|
} catch (e) {
|
|
// Ignore errors during cleanup
|
|
}
|
|
roomRef.current = null;
|
|
}
|
|
if (Platform.OS === 'ios') {
|
|
try {
|
|
const { AudioSession } = await import('@livekit/react-native');
|
|
await AudioSession.stopAudioSession();
|
|
} catch (e) {
|
|
// Ignore errors during cleanup
|
|
}
|
|
}
|
|
};
|
|
cleanup();
|
|
};
|
|
}, []);
|
|
|
|
// Call duration timer
|
|
useEffect(() => {
|
|
if (callState !== 'active') return;
|
|
|
|
const interval = setInterval(() => {
|
|
if (callStartTimeRef.current) {
|
|
const elapsed = Math.floor((Date.now() - callStartTimeRef.current) / 1000);
|
|
setCallDuration(elapsed);
|
|
}
|
|
}, 1000);
|
|
|
|
return () => clearInterval(interval);
|
|
}, [callState]);
|
|
|
|
// Pulse animation for active call
|
|
useEffect(() => {
|
|
if (callState === 'active') {
|
|
const pulse = Animated.loop(
|
|
Animated.sequence([
|
|
Animated.timing(pulseAnim, {
|
|
toValue: 1.1,
|
|
duration: 1500,
|
|
easing: Easing.inOut(Easing.ease),
|
|
useNativeDriver: true,
|
|
}),
|
|
Animated.timing(pulseAnim, {
|
|
toValue: 1,
|
|
duration: 1500,
|
|
easing: Easing.inOut(Easing.ease),
|
|
useNativeDriver: true,
|
|
}),
|
|
])
|
|
);
|
|
pulse.start();
|
|
|
|
// Avatar entrance animation
|
|
Animated.spring(avatarScale, {
|
|
toValue: 1,
|
|
friction: 8,
|
|
tension: 40,
|
|
useNativeDriver: true,
|
|
}).start();
|
|
|
|
return () => pulse.stop();
|
|
}
|
|
}, [callState]);
|
|
|
|
// Rotate animation for connecting
|
|
useEffect(() => {
|
|
if (callState === 'connecting') {
|
|
const rotate = Animated.loop(
|
|
Animated.timing(rotateAnim, {
|
|
toValue: 1,
|
|
duration: 2000,
|
|
easing: Easing.linear,
|
|
useNativeDriver: true,
|
|
})
|
|
);
|
|
rotate.start();
|
|
return () => rotate.stop();
|
|
} else {
|
|
rotateAnim.setValue(0);
|
|
}
|
|
}, [callState]);
|
|
|
|
// End call
|
|
const endCall = useCallback(async () => {
|
|
setCallState('ending');
|
|
setStatusText('Ending call...');
|
|
|
|
try {
|
|
if (roomRef.current) {
|
|
await roomRef.current.disconnect();
|
|
roomRef.current = null;
|
|
}
|
|
} catch (err) {
|
|
console.error('[VoiceCall] Error ending call:', err);
|
|
}
|
|
|
|
if (Platform.OS === 'ios') {
|
|
try {
|
|
const { AudioSession } = await import('@livekit/react-native');
|
|
await AudioSession.stopAudioSession();
|
|
await new Promise(resolve => setTimeout(resolve, 100));
|
|
} catch (err) {
|
|
console.error('[VoiceCall] Error stopping audio:', err);
|
|
}
|
|
}
|
|
|
|
router.back();
|
|
}, [router]);
|
|
|
|
// Toggle mute
|
|
const toggleMute = useCallback(async () => {
|
|
if (roomRef.current) {
|
|
const newMuted = !isMuted;
|
|
await roomRef.current.localParticipant.setMicrophoneEnabled(!newMuted);
|
|
setIsMuted(newMuted);
|
|
}
|
|
}, [isMuted]);
|
|
|
|
// Format duration
|
|
const formatDuration = (seconds: number): string => {
|
|
const mins = Math.floor(seconds / 60);
|
|
const secs = seconds % 60;
|
|
return `${mins}:${secs.toString().padStart(2, '0')}`;
|
|
};
|
|
|
|
const spin = rotateAnim.interpolate({
|
|
inputRange: [0, 1],
|
|
outputRange: ['0deg', '360deg'],
|
|
});
|
|
|
|
return (
|
|
<SafeAreaView style={styles.container} edges={['top', 'bottom']}>
|
|
{/* Background gradient effect */}
|
|
<View style={styles.backgroundGradient} />
|
|
|
|
{/* Top bar with back button */}
|
|
<View style={styles.topBar}>
|
|
<TouchableOpacity style={styles.backButton} onPress={endCall}>
|
|
<Ionicons name="chevron-down" size={28} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
<View style={styles.topBarCenter}>
|
|
<Text style={styles.encryptedText}>LiveKit + Deepgram</Text>
|
|
</View>
|
|
<TouchableOpacity
|
|
style={styles.logsButton}
|
|
onPress={() => setShowLogs(!showLogs)}
|
|
>
|
|
<Ionicons
|
|
name={showLogs ? 'code-slash' : 'code'}
|
|
size={22}
|
|
color={showLogs ? AppColors.success : AppColors.white}
|
|
/>
|
|
</TouchableOpacity>
|
|
</View>
|
|
|
|
{/* Main content */}
|
|
<View style={styles.content}>
|
|
{/* Avatar */}
|
|
<Animated.View
|
|
style={[
|
|
styles.avatarContainer,
|
|
{
|
|
transform: [
|
|
{ scale: callState === 'active' ? pulseAnim : avatarScale },
|
|
{ rotate: callState === 'connecting' ? spin : '0deg' }
|
|
]
|
|
}
|
|
]}
|
|
>
|
|
<View style={styles.avatar}>
|
|
<Text style={styles.avatarText}>J</Text>
|
|
</View>
|
|
{callState === 'active' && (
|
|
<View style={styles.activeIndicator} />
|
|
)}
|
|
</Animated.View>
|
|
|
|
{/* Name and status */}
|
|
<Text style={styles.name}>Julia AI</Text>
|
|
<Text style={styles.voiceName}>{VOICE_NAME} voice</Text>
|
|
|
|
{callState === 'active' ? (
|
|
<View style={styles.statusContainer}>
|
|
<View style={styles.activeDot} />
|
|
<Text style={styles.duration}>{formatDuration(callDuration)}</Text>
|
|
</View>
|
|
) : (
|
|
<Text style={styles.status}>{statusText}</Text>
|
|
)}
|
|
|
|
{/* Status indicator */}
|
|
{callState === 'active' && (
|
|
<Text style={styles.listeningStatus}>{statusText}</Text>
|
|
)}
|
|
</View>
|
|
|
|
{/* Debug logs panel */}
|
|
{showLogs && (
|
|
<View style={[styles.logsPanel, logsMinimized && styles.logsPanelMinimized]}>
|
|
<View style={styles.logsPanelHeader}>
|
|
<TouchableOpacity
|
|
style={styles.minimizeButton}
|
|
onPress={() => setLogsMinimized(!logsMinimized)}
|
|
>
|
|
<Ionicons
|
|
name={logsMinimized ? 'chevron-up' : 'chevron-down'}
|
|
size={20}
|
|
color={AppColors.white}
|
|
/>
|
|
</TouchableOpacity>
|
|
<Text style={styles.logsPanelTitle}>Logs ({logs.length})</Text>
|
|
<View style={styles.logsPanelButtons}>
|
|
<TouchableOpacity style={styles.copyButton} onPress={copyLogs}>
|
|
<Ionicons name="copy-outline" size={16} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
<TouchableOpacity
|
|
style={styles.closeLogsButton}
|
|
onPress={() => setShowLogs(false)}
|
|
>
|
|
<Ionicons name="close" size={18} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
</View>
|
|
</View>
|
|
{!logsMinimized && (
|
|
<ScrollView
|
|
ref={logsScrollRef}
|
|
style={styles.logsScrollView}
|
|
onContentSizeChange={() => logsScrollRef.current?.scrollToEnd()}
|
|
>
|
|
{logs.map((log, index) => (
|
|
<Text key={index} style={styles.logEntry}>{log}</Text>
|
|
))}
|
|
{logs.length === 0 && (
|
|
<Text style={styles.logEntryEmpty}>Waiting for events...</Text>
|
|
)}
|
|
</ScrollView>
|
|
)}
|
|
</View>
|
|
)}
|
|
|
|
{/* Bottom controls */}
|
|
<View style={styles.controls}>
|
|
{/* Mute button */}
|
|
<TouchableOpacity
|
|
style={[styles.controlButton, isMuted && styles.controlButtonActive]}
|
|
onPress={toggleMute}
|
|
disabled={callState !== 'active'}
|
|
>
|
|
<Ionicons
|
|
name={isMuted ? 'mic-off' : 'mic'}
|
|
size={28}
|
|
color={isMuted ? AppColors.error : AppColors.white}
|
|
/>
|
|
<Text style={styles.controlLabel}>{isMuted ? 'Unmute' : 'Mute'}</Text>
|
|
</TouchableOpacity>
|
|
|
|
{/* End call button */}
|
|
<TouchableOpacity
|
|
style={styles.endCallButton}
|
|
onPress={endCall}
|
|
>
|
|
<Ionicons name="call" size={32} color={AppColors.white} />
|
|
</TouchableOpacity>
|
|
|
|
{/* Speaker button (placeholder) */}
|
|
<TouchableOpacity style={styles.controlButton}>
|
|
<Ionicons name="volume-high" size={28} color={AppColors.white} />
|
|
<Text style={styles.controlLabel}>Speaker</Text>
|
|
</TouchableOpacity>
|
|
</View>
|
|
</SafeAreaView>
|
|
);
|
|
}
|
|
|
|
const styles = StyleSheet.create({
|
|
container: {
|
|
flex: 1,
|
|
backgroundColor: '#1a1a2e',
|
|
},
|
|
backgroundGradient: {
|
|
position: 'absolute',
|
|
top: 0,
|
|
left: 0,
|
|
right: 0,
|
|
height: '50%',
|
|
backgroundColor: '#16213e',
|
|
borderBottomLeftRadius: SCREEN_WIDTH,
|
|
borderBottomRightRadius: SCREEN_WIDTH,
|
|
transform: [{ scaleX: 2 }],
|
|
},
|
|
topBar: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
justifyContent: 'space-between',
|
|
paddingHorizontal: Spacing.md,
|
|
paddingVertical: Spacing.sm,
|
|
},
|
|
backButton: {
|
|
width: 44,
|
|
height: 44,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
},
|
|
topBarCenter: {
|
|
flex: 1,
|
|
alignItems: 'center',
|
|
},
|
|
encryptedText: {
|
|
fontSize: FontSizes.xs,
|
|
color: 'rgba(255,255,255,0.5)',
|
|
},
|
|
logsButton: {
|
|
width: 44,
|
|
height: 44,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
},
|
|
content: {
|
|
flex: 1,
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
paddingBottom: 100,
|
|
},
|
|
avatarContainer: {
|
|
width: 150,
|
|
height: 150,
|
|
marginBottom: Spacing.xl,
|
|
},
|
|
avatar: {
|
|
width: 150,
|
|
height: 150,
|
|
borderRadius: 75,
|
|
backgroundColor: AppColors.success,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
shadowColor: AppColors.success,
|
|
shadowOffset: { width: 0, height: 0 },
|
|
shadowOpacity: 0.5,
|
|
shadowRadius: 20,
|
|
elevation: 10,
|
|
},
|
|
avatarText: {
|
|
fontSize: 64,
|
|
fontWeight: '600',
|
|
color: AppColors.white,
|
|
},
|
|
activeIndicator: {
|
|
position: 'absolute',
|
|
bottom: 10,
|
|
right: 10,
|
|
width: 24,
|
|
height: 24,
|
|
borderRadius: 12,
|
|
backgroundColor: AppColors.success,
|
|
borderWidth: 3,
|
|
borderColor: '#1a1a2e',
|
|
},
|
|
name: {
|
|
fontSize: 32,
|
|
fontWeight: '700',
|
|
color: AppColors.white,
|
|
marginBottom: Spacing.xs,
|
|
},
|
|
voiceName: {
|
|
fontSize: FontSizes.sm,
|
|
color: 'rgba(255,255,255,0.6)',
|
|
marginBottom: Spacing.md,
|
|
},
|
|
statusContainer: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
},
|
|
activeDot: {
|
|
width: 8,
|
|
height: 8,
|
|
borderRadius: 4,
|
|
backgroundColor: AppColors.success,
|
|
marginRight: Spacing.sm,
|
|
},
|
|
duration: {
|
|
fontSize: FontSizes.lg,
|
|
color: AppColors.white,
|
|
fontVariant: ['tabular-nums'],
|
|
},
|
|
status: {
|
|
fontSize: FontSizes.base,
|
|
color: 'rgba(255,255,255,0.7)',
|
|
},
|
|
listeningStatus: {
|
|
fontSize: FontSizes.sm,
|
|
color: 'rgba(255,255,255,0.5)',
|
|
marginTop: Spacing.md,
|
|
fontStyle: 'italic',
|
|
},
|
|
controls: {
|
|
flexDirection: 'row',
|
|
justifyContent: 'space-evenly',
|
|
alignItems: 'center',
|
|
paddingVertical: Spacing.xl,
|
|
paddingHorizontal: Spacing.lg,
|
|
},
|
|
controlButton: {
|
|
alignItems: 'center',
|
|
padding: Spacing.md,
|
|
borderRadius: BorderRadius.full,
|
|
backgroundColor: 'rgba(255,255,255,0.1)',
|
|
width: 70,
|
|
height: 70,
|
|
justifyContent: 'center',
|
|
},
|
|
controlButtonActive: {
|
|
backgroundColor: 'rgba(255,255,255,0.2)',
|
|
},
|
|
controlLabel: {
|
|
fontSize: FontSizes.xs,
|
|
color: AppColors.white,
|
|
marginTop: 4,
|
|
},
|
|
endCallButton: {
|
|
width: 72,
|
|
height: 72,
|
|
borderRadius: 36,
|
|
backgroundColor: AppColors.error,
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
transform: [{ rotate: '135deg' }],
|
|
shadowColor: AppColors.error,
|
|
shadowOffset: { width: 0, height: 4 },
|
|
shadowOpacity: 0.4,
|
|
shadowRadius: 8,
|
|
elevation: 8,
|
|
},
|
|
// Logs panel styles
|
|
logsPanel: {
|
|
position: 'absolute',
|
|
top: 80,
|
|
left: Spacing.md,
|
|
right: Spacing.md,
|
|
bottom: 180,
|
|
backgroundColor: 'rgba(0,0,0,0.9)',
|
|
borderRadius: BorderRadius.lg,
|
|
padding: Spacing.sm,
|
|
zIndex: 100,
|
|
},
|
|
logsPanelMinimized: {
|
|
bottom: 'auto' as any,
|
|
height: 44,
|
|
},
|
|
logsPanelHeader: {
|
|
flexDirection: 'row',
|
|
justifyContent: 'space-between',
|
|
alignItems: 'center',
|
|
marginBottom: Spacing.sm,
|
|
paddingBottom: Spacing.sm,
|
|
borderBottomWidth: 1,
|
|
borderBottomColor: 'rgba(255,255,255,0.2)',
|
|
},
|
|
minimizeButton: {
|
|
padding: 4,
|
|
marginRight: Spacing.sm,
|
|
},
|
|
logsPanelTitle: {
|
|
flex: 1,
|
|
fontSize: FontSizes.sm,
|
|
fontWeight: '600',
|
|
color: AppColors.white,
|
|
},
|
|
logsPanelButtons: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
gap: 8,
|
|
},
|
|
copyButton: {
|
|
padding: 6,
|
|
backgroundColor: 'rgba(255,255,255,0.15)',
|
|
borderRadius: BorderRadius.sm,
|
|
},
|
|
closeLogsButton: {
|
|
padding: 6,
|
|
},
|
|
logsScrollView: {
|
|
flex: 1,
|
|
},
|
|
logEntry: {
|
|
fontSize: 11,
|
|
fontFamily: Platform.OS === 'ios' ? 'Menlo' : 'monospace',
|
|
color: '#4ade80',
|
|
lineHeight: 16,
|
|
marginBottom: 2,
|
|
},
|
|
logEntryEmpty: {
|
|
fontSize: FontSizes.xs,
|
|
color: 'rgba(255,255,255,0.5)',
|
|
fontStyle: 'italic',
|
|
textAlign: 'center',
|
|
marginTop: Spacing.lg,
|
|
},
|
|
});
|