/** * Debug Screen - Voice Call Testing with Detailed Logs * * All-in-one screen for testing Julia AI voice: * - Start/End call buttons * - Speaker/Earpiece toggle with logging * - Real-time logs of all LiveKit events * - Copy logs button * - Works on both iOS and Android */ import React, { useState, useEffect, useRef, useCallback } from 'react'; import { View, Text, StyleSheet, FlatList, TouchableOpacity, Platform, Share, AppState, AppStateStatus, TextInput, KeyboardAvoidingView, } from 'react-native'; import { SafeAreaView } from 'react-native-safe-area-context'; import { Ionicons } from '@expo/vector-icons'; import * as Clipboard from 'expo-clipboard'; import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake'; import type { Room as RoomType } from 'livekit-client'; import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; import { getToken, VOICE_NAME } from '@/services/livekitService'; import { api } from '@/services/api'; import { useBeneficiary } from '@/contexts/BeneficiaryContext'; import { configureAudioForVoiceCall, stopAudioSession, } from '@/utils/audioSession'; import { startVoiceCallService, stopVoiceCallService, checkAndPromptBatteryOptimization, requestNotificationPermission, } from '@/utils/androidVoiceService'; import Constants from 'expo-constants'; const APP_VERSION = Constants.expoConfig?.version ?? '?.?.?'; type LogEntry = { id: string; time: string; message: string; type: 'info' | 'success' | 'error' | 'event'; }; type CallState = 'idle' | 'connecting' | 'connected' | 'ending'; export default function DebugScreen() { const [logs, setLogs] = useState([]); const [callState, setCallState] = useState('idle'); const [callDuration, setCallDuration] = useState(0); const [agentState, setAgentState] = useState('—'); // listening/thinking/speaking const [lastUserText, setLastUserText] = useState(''); // Последний распознанный текст пользователя const [lastAgentText, setLastAgentText] = useState(''); // Последний ответ агента const [micLevel, setMicLevel] = useState(0); // Уровень микрофона 0-100 const [deploymentId, setDeploymentIdState] = useState(''); // Custom deployment ID const [loadingBeneficiary, setLoadingBeneficiary] = useState(true); const [accumulateResponses, setAccumulateResponses] = useState(true); // Накапливать chunks до полного ответа const flatListRef = useRef(null); // Refs для накопления chunks const accumulatedUserTextRef = useRef(''); const accumulatedAgentTextRef = useRef(''); const lastUserSegmentIdRef = useRef(null); const lastAgentSegmentIdRef = useRef(null); const roomRef = useRef(null); const callStartTimeRef = useRef(null); const appStateRef = useRef(AppState.currentState); const { currentBeneficiary, setDebugDeploymentId } = useBeneficiary(); // Sync deploymentId with context for voice-call.tsx to use const setDeploymentId = useCallback((id: string) => { setDeploymentIdState(id); // Update context so voice-call.tsx can access it setDebugDeploymentId(id.trim() || null); }, [setDebugDeploymentId]); // Load default deployment ID from first beneficiary useEffect(() => { const loadDefaultDeploymentId = async () => { try { // First check if currentBeneficiary is available if (currentBeneficiary?.id) { const id = currentBeneficiary.id.toString(); setDeploymentIdState(id); setDebugDeploymentId(id); // Also set in context setLoadingBeneficiary(false); return; } // Otherwise load from API const response = await api.getAllBeneficiaries(); if (response.ok && response.data && response.data.length > 0) { const firstBeneficiary = response.data[0]; const id = firstBeneficiary.id.toString(); setDeploymentIdState(id); setDebugDeploymentId(id); // Also set in context } } catch (error) { console.error('[Debug] Failed to load beneficiary:', error); } finally { setLoadingBeneficiary(false); } }; loadDefaultDeploymentId(); }, [currentBeneficiary, setDebugDeploymentId]); // Add log entry const log = useCallback((message: string, type: LogEntry['type'] = 'info') => { const time = new Date().toLocaleTimeString('en-US', { hour12: false, hour: '2-digit', minute: '2-digit', second: '2-digit' }); const ms = String(new Date().getMilliseconds()).padStart(3, '0'); setLogs(prev => [...prev, { id: `${Date.now()}-${Math.random()}`, time: `${time}.${ms}`, message, type, }]); }, []); // Clear logs const clearLogs = useCallback(() => { setLogs([]); }, []); // Copy logs to clipboard const copyLogs = useCallback(async () => { const text = logs.map(l => `[${l.time}] ${l.message}`).join('\n'); await Clipboard.setStringAsync(text); log('Logs copied to clipboard!', 'success'); }, [logs, log]); // Share logs const shareLogs = useCallback(async () => { const text = logs.map(l => `[${l.time}] ${l.message}`).join('\n'); try { await Share.share({ message: text, title: 'Voice Debug Logs' }); } catch (e) { log(`Share failed: ${e}`, 'error'); } }, [logs, log]); // Auto-scroll to bottom useEffect(() => { if (logs.length > 0) { setTimeout(() => flatListRef.current?.scrollToEnd({ animated: true }), 100); } }, [logs]); // Call duration timer useEffect(() => { if (callState !== 'connected') return; const interval = setInterval(() => { if (callStartTimeRef.current) { setCallDuration(Math.floor((Date.now() - callStartTimeRef.current) / 1000)); } }, 1000); return () => clearInterval(interval); }, [callState]); // Handle app background/foreground useEffect(() => { const subscription = AppState.addEventListener('change', (nextAppState) => { if (appStateRef.current.match(/inactive|background/) && nextAppState === 'active') { log('App returned to foreground', 'event'); } else if (appStateRef.current === 'active' && nextAppState.match(/inactive|background/)) { log('App went to background - call continues', 'event'); } appStateRef.current = nextAppState; }); return () => subscription.remove(); }, [log]); // Start call const startCall = useCallback(async () => { if (callState !== 'idle') return; clearLogs(); setCallState('connecting'); setCallDuration(0); callStartTimeRef.current = null; try { log('=== STARTING VOICE CALL ===', 'info'); log(`Platform: ${Platform.OS} ${Platform.Version}`, 'info'); // Android: Request notification permission and check battery optimization if (Platform.OS === 'android') { log('Android: Requesting notification permission...', 'info'); const notifPermission = await requestNotificationPermission(); log(`Notification permission: ${notifPermission ? 'granted' : 'denied'}`, notifPermission ? 'success' : 'info'); log('Android: Checking battery optimization...', 'info'); const canProceed = await checkAndPromptBatteryOptimization(); if (!canProceed) { log('User went to battery settings - call postponed', 'info'); setCallState('idle'); return; } log('Battery optimization check passed', 'success'); } // Keep screen awake await activateKeepAwakeAsync('voiceCall').catch(() => {}); log('Screen keep-awake activated', 'info'); // Step 1: Register WebRTC globals log('Step 1: Importing @livekit/react-native...', 'info'); const { registerGlobals } = await import('@livekit/react-native'); if (typeof global.RTCPeerConnection === 'undefined') { log('Registering WebRTC globals...', 'info'); registerGlobals(); log('WebRTC globals registered', 'success'); } else { log('WebRTC globals already registered', 'info'); } // Step 2: Import livekit-client log('Step 2: Importing livekit-client...', 'info'); const { Room, RoomEvent, ConnectionState, Track } = await import('livekit-client'); log('livekit-client imported', 'success'); // Step 3: Configure AudioSession (iOS + Android) log(`Step 3: Configuring AudioSession for ${Platform.OS}...`, 'info'); try { await configureAudioForVoiceCall(); log(`AudioSession configured for ${Platform.OS}`, 'success'); } catch (audioErr: any) { log(`AudioSession config error: ${audioErr?.message || audioErr}`, 'error'); // Continue anyway - might still work } // Step 4: Get token from server log('Step 4: Requesting token from server...', 'info'); log(`Token server: wellnuo.smartlaunchhub.com/julia/token`, 'info'); // Передаём deployment ID если указан const beneficiaryData = deploymentId.trim() ? { deploymentId: deploymentId.trim(), beneficiaryNamesDict: {}, } : undefined; if (beneficiaryData) { log(`📋 Using custom Deployment ID: ${deploymentId}`, 'success'); } else { log(`📋 No Deployment ID specified (default mode)`, 'info'); } const result = await getToken(`user-${Date.now()}`, beneficiaryData); if (!result.success || !result.data) { throw new Error(result.error || 'Failed to get token'); } const { token, wsUrl, roomName } = result.data; log(`Token received`, 'success'); log(`Room: ${roomName}`, 'info'); log(`WebSocket URL: ${wsUrl}`, 'info'); // Step 5: Create room and setup listeners log('Step 5: Creating Room instance...', 'info'); const newRoom = new Room(); roomRef.current = newRoom; log('Room instance created', 'success'); // Setup ALL event listeners log('Step 6: Setting up event listeners...', 'info'); newRoom.on(RoomEvent.ConnectionStateChanged, (state: any) => { log(`EVENT: ConnectionStateChanged → ${state}`, 'event'); if (state === ConnectionState.Connected) { setCallState('connected'); callStartTimeRef.current = Date.now(); } else if (state === ConnectionState.Disconnected) { setCallState('idle'); } }); newRoom.on(RoomEvent.Connected, () => { log('EVENT: Connected to room', 'success'); }); newRoom.on(RoomEvent.Disconnected, (reason?: any) => { log(`EVENT: Disconnected. Reason: ${reason || 'unknown'}`, 'event'); }); newRoom.on(RoomEvent.Reconnecting, () => { log('EVENT: Reconnecting...', 'event'); }); newRoom.on(RoomEvent.Reconnected, () => { log('EVENT: Reconnected', 'success'); }); newRoom.on(RoomEvent.ParticipantConnected, (participant: any) => { log(`👋 PARTICIPANT CONNECTED: ${participant.identity}`, 'success'); // Подписаться на события этого участника (для агента Julia) participant.on('isSpeakingChanged', (speaking: boolean) => { if (speaking) { log(`🔊 ${participant.identity} STARTED SPEAKING`, 'success'); setAgentState('speaking'); } else { log(`🔇 ${participant.identity} stopped speaking`, 'info'); } }); participant.on('trackMuted', (pub: any) => { log(`🔇 ${participant.identity} muted ${pub.kind}`, 'event'); }); participant.on('trackUnmuted', (pub: any) => { log(`🔊 ${participant.identity} unmuted ${pub.kind}`, 'event'); }); participant.on('attributesChanged', (attrs: any) => { log(`📋 ${participant.identity} ATTRIBUTES:`, 'event'); Object.entries(attrs || {}).forEach(([k, v]) => { log(` ${k}: ${v}`, 'info'); if (k === 'lk.agent.state') { setAgentState(String(v)); } }); }); participant.on('transcriptionReceived', (segments: any[]) => { log(`🤖 ${participant.identity} TRANSCRIPTION:`, 'success'); segments.forEach((seg: any, i: number) => { const text = seg.text || seg.final || ''; log(` [${i}] "${text}"`, 'info'); if (text) setLastAgentText(text); }); }); // Показать текущие атрибуты участника const attrs = participant.attributes || {}; if (Object.keys(attrs).length > 0) { log(` Initial attributes: ${JSON.stringify(attrs)}`, 'info'); } }); newRoom.on(RoomEvent.ParticipantDisconnected, (participant: any) => { log(`👋 PARTICIPANT DISCONNECTED: ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.TrackSubscribed, (track: any, publication: any, participant: any) => { log(`EVENT: Track subscribed: ${track.kind} from ${participant.identity}`, 'event'); if (track.kind === Track.Kind.Audio) { log('Audio track from Julia AI - should hear voice now', 'success'); } }); newRoom.on(RoomEvent.TrackUnsubscribed, (track: any, publication: any, participant: any) => { log(`EVENT: Track unsubscribed: ${track.kind} from ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.TrackMuted, (publication: any, participant: any) => { log(`EVENT: Track muted by ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.TrackUnmuted, (publication: any, participant: any) => { log(`EVENT: Track unmuted by ${participant.identity}`, 'event'); }); newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => { if (speakers.length > 0) { log(`EVENT: Active speakers: ${speakers.map(s => s.identity).join(', ')}`, 'event'); } }); newRoom.on(RoomEvent.DataReceived, (payload: any, participant: any, kind: any, topic: any) => { log(`📩 DATA RECEIVED from ${participant?.identity || 'unknown'}`, 'event'); log(` kind: ${kind}, topic: ${topic || 'none'}`, 'info'); try { const text = new TextDecoder().decode(payload); const data = JSON.parse(text); log(` type: ${data.type || 'unknown'}`, 'info'); // Подробное логирование разных типов сообщений if (data.type === 'transcript' || data.type === 'transcription') { log(` 🗣️ TRANSCRIPT: role=${data.role}`, 'success'); const text = data.text || data.content || ''; log(` 📝 TEXT: "${text}"`, 'success'); // Обновить UI if (data.role === 'user') { setLastUserText(text); } else if (data.role === 'assistant' || data.role === 'agent') { setLastAgentText(text); } } else if (data.type === 'state' || data.type === 'agent_state') { const stateValue = data.state || JSON.stringify(data); log(` 🤖 AGENT STATE: ${stateValue}`, 'success'); setAgentState(stateValue); } else if (data.type === 'function_call' || data.type === 'tool_call') { log(` 🔧 FUNCTION CALL: ${data.name || data.function || JSON.stringify(data)}`, 'event'); } else if (data.type === 'function_result' || data.type === 'tool_result') { log(` ✅ FUNCTION RESULT: ${JSON.stringify(data.result || data).substring(0, 200)}`, 'event'); } else { // Показать полный JSON для неизвестных типов log(` 📦 FULL DATA: ${JSON.stringify(data)}`, 'info'); } } catch (e) { // Попробовать показать как текст try { const text = new TextDecoder().decode(payload); log(` 📄 RAW TEXT: "${text.substring(0, 300)}"`, 'info'); } catch { log(` 📎 BINARY DATA: ${payload.byteLength} bytes`, 'info'); } } }); newRoom.on(RoomEvent.AudioPlaybackStatusChanged, () => { log(`EVENT: AudioPlaybackStatusChanged - canPlay: ${newRoom.canPlaybackAudio}`, 'event'); }); newRoom.on(RoomEvent.MediaDevicesError, (error: any) => { log(`EVENT: MediaDevicesError: ${error?.message || error}`, 'error'); }); newRoom.on(RoomEvent.RoomMetadataChanged, (metadata: string) => { log(`EVENT: RoomMetadataChanged: ${metadata}`, 'event'); }); // =========================================== // TRANSCRIPTION - распознанный текст (STT) // =========================================== newRoom.on(RoomEvent.TranscriptionReceived, (segments: any[], participant: any) => { const isUser = participant?.identity === newRoom.localParticipant.identity; const who = isUser ? '👤 USER' : '🤖 AGENT'; segments.forEach((segment: any, idx: number) => { const text = segment.text || segment.final || ''; const segmentId = segment.id || `seg-${Date.now()}`; const isFinalFlag = segment.final !== undefined; if (accumulateResponses) { // === РЕЖИМ НАКОПЛЕНИЯ: Показываем только финальные полные ответы === if (isUser) { // Новый сегмент или продолжение текущего if (lastUserSegmentIdRef.current !== segmentId) { // Если был предыдущий финальный - логируем его if (accumulatedUserTextRef.current && lastUserSegmentIdRef.current) { log(`👤 USER FINAL: "${accumulatedUserTextRef.current}"`, 'success'); } accumulatedUserTextRef.current = text; lastUserSegmentIdRef.current = segmentId; } else { // Обновляем текущий сегмент accumulatedUserTextRef.current = text; } // Если финальный - логируем сразу if (isFinalFlag && text) { log(`👤 USER: "${text}"`, 'success'); setLastUserText(text); accumulatedUserTextRef.current = ''; lastUserSegmentIdRef.current = null; } } else { // AGENT if (lastAgentSegmentIdRef.current !== segmentId) { if (accumulatedAgentTextRef.current && lastAgentSegmentIdRef.current) { log(`🤖 AGENT FINAL: "${accumulatedAgentTextRef.current}"`, 'success'); } accumulatedAgentTextRef.current = text; lastAgentSegmentIdRef.current = segmentId; } else { accumulatedAgentTextRef.current = text; } if (isFinalFlag && text) { log(`🤖 JULIA: "${text}"`, 'success'); setLastAgentText(text); accumulatedAgentTextRef.current = ''; lastAgentSegmentIdRef.current = null; } } } else { // === РЕЖИМ ПОЛНОГО ЛОГИРОВАНИЯ: Показываем каждый chunk === const finalLabel = isFinalFlag ? '(FINAL)' : '(interim)'; log(`🎤 TRANSCRIPTION from ${who} (${participant?.identity || 'unknown'})`, 'success'); log(` [${idx}] ${finalLabel}: "${text}"`, 'event'); if (segment.id) log(` segment.id: ${segment.id}`, 'info'); if (segment.firstReceivedTime) log(` firstReceivedTime: ${segment.firstReceivedTime}`, 'info'); if (segment.lastReceivedTime) log(` lastReceivedTime: ${segment.lastReceivedTime}`, 'info'); // Обновить UI с последним текстом if (text && (isFinalFlag || !segment.final)) { if (isUser) { setLastUserText(text); } else { setLastAgentText(text); } } } }); }); // =========================================== // PARTICIPANT ATTRIBUTES - состояние агента // =========================================== newRoom.on(RoomEvent.ParticipantAttributesChanged, (changedAttributes: any, participant: any) => { log(`👤 ATTRIBUTES CHANGED for ${participant?.identity || 'unknown'}`, 'event'); Object.entries(changedAttributes || {}).forEach(([key, value]) => { log(` ${key}: ${value}`, 'info'); // Особенно важно: lk.agent.state показывает listening/thinking/speaking if (key === 'lk.agent.state') { log(` 🤖 AGENT STATE: ${value}`, 'success'); // Обновить UI setAgentState(String(value)); } }); // Показать все текущие атрибуты const attrs = participant?.attributes || {}; if (Object.keys(attrs).length > 0) { log(` All attributes: ${JSON.stringify(attrs)}`, 'info'); } }); // =========================================== // SIGNAL CONNECTED/RECONNECTING // =========================================== newRoom.on(RoomEvent.SignalConnected, () => { log('EVENT: SignalConnected - WebSocket подключен', 'success'); }); newRoom.on(RoomEvent.SignalReconnecting, () => { log('EVENT: SignalReconnecting - переподключение сигнала...', 'event'); }); // =========================================== // LOCAL TRACK UNPUBLISHED // =========================================== newRoom.on(RoomEvent.LocalTrackUnpublished, (publication: any, participant: any) => { log(`EVENT: LocalTrackUnpublished - ${publication.trackSid}`, 'event'); }); // =========================================== // ДОПОЛНИТЕЛЬНЫЕ СОБЫТИЯ ДЛЯ ПОЛНОГО ДЕБАГА // =========================================== // Качество соединения newRoom.on(RoomEvent.ConnectionQualityChanged, (quality: any, participant: any) => { const qualityEmoji = quality === 'excellent' ? '🟢' : quality === 'good' ? '🟡' : '🔴'; log(`${qualityEmoji} CONNECTION QUALITY: ${participant?.identity || 'local'} → ${quality}`, 'event'); }); // Изменение устройств (микрофон/камера подключены/отключены) newRoom.on(RoomEvent.MediaDevicesChanged, () => { log(`🔌 MEDIA DEVICES CHANGED - устройства обновились`, 'event'); }); // Изменение активного устройства newRoom.on(RoomEvent.ActiveDeviceChanged, (kind: any, deviceId: any) => { log(`🎛️ ACTIVE DEVICE CHANGED: ${kind} → ${deviceId}`, 'event'); }); // Ошибка подписки на трек newRoom.on(RoomEvent.TrackSubscriptionFailed, (trackSid: any, participant: any, reason: any) => { log(`❌ TRACK SUBSCRIPTION FAILED: ${trackSid} from ${participant?.identity}`, 'error'); log(` Reason: ${reason}`, 'error'); }); // Публикация трека (когда агент начинает говорить) newRoom.on(RoomEvent.TrackPublished, (publication: any, participant: any) => { log(`📢 TRACK PUBLISHED by ${participant?.identity}: ${publication.kind} (${publication.source})`, 'event'); }); // Отмена публикации трека newRoom.on(RoomEvent.TrackUnpublished, (publication: any, participant: any) => { log(`📤 TRACK UNPUBLISHED by ${participant?.identity}: ${publication.kind}`, 'event'); }); // Изменение метаданных участника newRoom.on(RoomEvent.ParticipantMetadataChanged, (metadata: any, participant: any) => { log(`📋 PARTICIPANT METADATA: ${participant?.identity}`, 'event'); try { const parsed = JSON.parse(metadata || '{}'); log(` ${JSON.stringify(parsed)}`, 'info'); } catch { log(` ${metadata}`, 'info'); } }); // Изменение имени участника newRoom.on(RoomEvent.ParticipantNameChanged, (name: any, participant: any) => { log(`👤 PARTICIPANT NAME: ${participant?.identity} → ${name}`, 'event'); }); // Статус записи (если комната записывается) newRoom.on(RoomEvent.RecordingStatusChanged, (recording: any) => { log(`⏺️ RECORDING STATUS: ${recording ? 'RECORDING' : 'NOT RECORDING'}`, recording ? 'success' : 'info'); }); // Изменение статуса потока трека newRoom.on(RoomEvent.TrackStreamStateChanged, (publication: any, streamState: any, participant: any) => { log(`📊 TRACK STREAM STATE: ${participant?.identity}/${publication.trackSid} → ${streamState}`, 'event'); }); // Разрешения на подписку трека newRoom.on(RoomEvent.TrackSubscriptionPermissionChanged, (publication: any, status: any, participant: any) => { log(`🔐 TRACK PERMISSION: ${participant?.identity}/${publication.trackSid} → ${status}`, 'event'); }); // Статус подписки на трек newRoom.on(RoomEvent.TrackSubscriptionStatusChanged, (publication: any, status: any, participant: any) => { log(`📶 TRACK SUBSCRIPTION: ${participant?.identity}/${publication.trackSid} → ${status}`, 'event'); }); // Разрешения участника изменились newRoom.on(RoomEvent.ParticipantPermissionsChanged, (prevPermissions: any, participant: any) => { log(`🔑 PARTICIPANT PERMISSIONS CHANGED: ${participant?.identity}`, 'event'); log(` New permissions: ${JSON.stringify(participant?.permissions || {})}`, 'info'); }); // ChatMessage - сообщения в чате комнаты newRoom.on(RoomEvent.ChatMessage, (message: any, participant: any) => { log(`💬 CHAT MESSAGE from ${participant?.identity || 'system'}:`, 'success'); log(` ${message.message || JSON.stringify(message)}`, 'info'); }); // SIP DTMF - телефонные сигналы newRoom.on(RoomEvent.SipDTMFReceived, (dtmf: any, participant: any) => { log(`📞 SIP DTMF: ${dtmf.code} from ${participant?.identity}`, 'event'); }); // Детекция тишины микрофона newRoom.on(RoomEvent.LocalAudioSilenceDetected, (publication: any) => { log(`🔇 LOCAL AUDIO SILENCE DETECTED - микрофон молчит`, 'event'); }); // Изменения буфера DataChannel newRoom.on(RoomEvent.DCBufferStatusChanged, (isLow: any, kind: any) => { log(`📦 DC BUFFER: ${kind} buffer is ${isLow ? 'LOW' : 'OK'}`, isLow ? 'event' : 'info'); }); // Метрики производительности newRoom.on(RoomEvent.MetricsReceived, (metrics: any) => { log(`📈 METRICS RECEIVED:`, 'info'); if (metrics.audioStats) { log(` Audio: bitrate=${metrics.audioStats.bitrate}, packetsLost=${metrics.audioStats.packetsLost}`, 'info'); } if (metrics.videoStats) { log(` Video: bitrate=${metrics.videoStats.bitrate}, fps=${metrics.videoStats.fps}`, 'info'); } }); // Статус воспроизведения видео (если есть) newRoom.on(RoomEvent.VideoPlaybackStatusChanged, () => { log(`🎬 VIDEO PLAYBACK STATUS CHANGED`, 'event'); }); // Ошибка шифрования newRoom.on(RoomEvent.EncryptionError, (error: any) => { log(`🔒 ENCRYPTION ERROR: ${error?.message || error}`, 'error'); }); // Статус шифрования участника newRoom.on(RoomEvent.ParticipantEncryptionStatusChanged, (encrypted: any, participant: any) => { log(`🔐 ENCRYPTION STATUS: ${participant?.identity} → ${encrypted ? 'encrypted' : 'not encrypted'}`, 'event'); }); // Комната перемещена (редко) newRoom.on(RoomEvent.Moved, (room: any) => { log(`🚀 ROOM MOVED to new server`, 'event'); }); // Участник стал активным newRoom.on(RoomEvent.ParticipantActive, (participant: any) => { log(`✅ PARTICIPANT ACTIVE: ${participant?.identity}`, 'success'); // Проверяем, что это агент Julia (не локальный участник) const isAgent = participant?.identity?.startsWith('agent-') || (participant?.attributes?.['lk.agent_name'] === 'julia-ai'); if (isAgent) { log(``, 'success'); log(`🟢🟢🟢 AGENT READY 🟢🟢🟢`, 'success'); log(`🔊 Julia will now speak greeting...`, 'success'); log(``, 'success'); } }); log('Event listeners set up (FULL DEBUG MODE)', 'success'); // Step 7: Connect to room log('Step 7: Connecting to LiveKit room...', 'info'); await newRoom.connect(wsUrl, token, { autoSubscribe: true }); log('Connected to room', 'success'); // Step 7.5: Start audio playback (required for iOS) log('Step 7.5: Starting audio playback...', 'info'); await newRoom.startAudio(); log(`Audio playback started, canPlay: ${newRoom.canPlaybackAudio}`, 'success'); // Step 8: Enable microphone log('Step 8: Enabling microphone...', 'info'); await newRoom.localParticipant.setMicrophoneEnabled(true); log('Microphone enabled', 'success'); // Step 9: Log local audio track info log('Step 9: Checking local audio track...', 'info'); const localAudioTracks = newRoom.localParticipant.audioTrackPublications; log(`Local audio publications: ${localAudioTracks.size}`, 'info'); localAudioTracks.forEach((pub: any) => { log(`Local audio track: ${pub.trackSid}, muted: ${pub.isMuted}, source: ${pub.source}`, 'info'); if (pub.track) { log(`Track mediaStreamTrack: ${pub.track.mediaStreamTrack ? 'exists' : 'NULL'}`, 'info'); log(`Track enabled: ${pub.track.mediaStreamTrack?.enabled}`, 'info'); } }); // =========================================== // LOCAL PARTICIPANT EVENTS - события моего микрофона // =========================================== newRoom.localParticipant.on('localTrackPublished', (pub: any) => { log(`🎤 MY TRACK PUBLISHED: ${pub.kind} sid=${pub.trackSid}`, 'success'); }); newRoom.localParticipant.on('localTrackUnpublished', (pub: any) => { log(`🎤 MY TRACK UNPUBLISHED: ${pub.kind} sid=${pub.trackSid}`, 'event'); }); // IsSpeakingChanged - когда я начинаю/перестаю говорить newRoom.localParticipant.on('isSpeakingChanged', (speaking: boolean) => { if (speaking) { log(`🗣️ >>> I STARTED SPEAKING <<<`, 'success'); } else { log(`🤐 I stopped speaking`, 'info'); } }); // Мой трек замьютился/размьютился newRoom.localParticipant.on('trackMuted', (pub: any) => { log(`🔇 MY TRACK MUTED: ${pub.kind}`, 'event'); }); newRoom.localParticipant.on('trackUnmuted', (pub: any) => { log(`🔊 MY TRACK UNMUTED: ${pub.kind}`, 'success'); }); // Ошибка медиа устройства на моём участнике newRoom.localParticipant.on('mediaDevicesError', (error: any) => { log(`❌ MY MEDIA DEVICE ERROR: ${error?.message || error}`, 'error'); }); // Аудио поток захвачен newRoom.localParticipant.on('audioStreamAcquired', () => { log(`🎙️ AUDIO STREAM ACQUIRED - микрофон захвачен!`, 'success'); }); // Транскрипция на моём треке newRoom.localParticipant.on('transcriptionReceived', (segments: any[]) => { log(`🎤 MY TRANSCRIPTION (${segments.length} segments):`, 'success'); segments.forEach((seg: any, i: number) => { log(` [${i}] "${seg.text || seg.final}"`, 'info'); }); }); // Listen when I become an active speaker (means mic is working) newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => { const iAmSpeaking = speakers.some(s => s.identity === newRoom.localParticipant.identity); if (iAmSpeaking) { log(`🎙️ *** I AM SPEAKING - MIC WORKS! ***`, 'success'); } }); log(`Local participant: ${newRoom.localParticipant.identity}`, 'info'); // =========================================== // AUDIO LEVEL MONITORING - периодическая проверка уровня микрофона // =========================================== let audioLevelInterval: ReturnType | null = null; let lastLoggedLevel = -1; const startAudioLevelMonitoring = () => { if (audioLevelInterval) return; audioLevelInterval = setInterval(() => { try { // Найти microphone track среди всех публикаций const audioTracks = newRoom.localParticipant.audioTrackPublications; let localAudioTrack: any = null; audioTracks.forEach((pub: any) => { if (pub.source === 'microphone' || pub.kind === 'audio') { localAudioTrack = pub; } }); if (localAudioTrack?.track) { // Получаем audio level через LiveKit API const audioLevel = (localAudioTrack.track as any).audioLevel; if (audioLevel !== undefined) { const roundedLevel = Math.round(audioLevel * 100); // Обновить UI setMicLevel(roundedLevel); // Логируем только когда уровень существенно изменился if (Math.abs(roundedLevel - lastLoggedLevel) > 5) { lastLoggedLevel = roundedLevel; const bars = '▓'.repeat(Math.min(20, Math.round(audioLevel * 20))) + '░'.repeat(Math.max(0, 20 - Math.round(audioLevel * 20))); log(`🎚️ MIC LEVEL: [${bars}] ${roundedLevel}%`, audioLevel > 0.1 ? 'success' : 'info'); } } } } catch (e) { // Ignore errors } }, 200); // Проверять каждые 200мс для плавного UI }; // Запустить мониторинг audio level после подключения newRoom.on(RoomEvent.Connected, () => { log('Starting audio level monitoring...', 'info'); setTimeout(startAudioLevelMonitoring, 1000); }); // Остановить при отключении newRoom.on(RoomEvent.Disconnected, () => { if (audioLevelInterval) { clearInterval(audioLevelInterval); audioLevelInterval = null; } }); // Android: Start foreground service to keep call alive in background if (Platform.OS === 'android') { log('Android: Starting foreground service...', 'info'); try { await startVoiceCallService(); log('Foreground service started - call will continue in background', 'success'); } catch (fgErr: any) { log(`Foreground service error: ${fgErr?.message || fgErr}`, 'error'); // Continue anyway - call will still work, just may be killed in background } } log('=== CALL ACTIVE ===', 'success'); } catch (err: any) { log(`ERROR: ${err?.message || err}`, 'error'); log(`Stack: ${err?.stack?.substring(0, 200) || 'no stack'}`, 'error'); setCallState('idle'); deactivateKeepAwake('voiceCall'); } }, [callState, log, clearLogs]); // End call const endCall = useCallback(async () => { if (callState === 'idle') return; log('=== ENDING CALL ===', 'info'); setCallState('ending'); try { if (roomRef.current) { log('Disconnecting from room...', 'info'); await roomRef.current.disconnect(); roomRef.current = null; log('Disconnected from room', 'success'); } // Android: Stop foreground service if (Platform.OS === 'android') { log('Android: Stopping foreground service...', 'info'); try { await stopVoiceCallService(); log('Foreground service stopped', 'success'); } catch (fgErr: any) { log(`Foreground service stop error: ${fgErr?.message || fgErr}`, 'error'); } } // Stop AudioSession (iOS + Android) log(`Stopping AudioSession on ${Platform.OS}...`, 'info'); try { await stopAudioSession(); log('AudioSession stopped', 'success'); } catch (audioErr: any) { log(`AudioSession stop error: ${audioErr?.message || audioErr}`, 'error'); } deactivateKeepAwake('voiceCall'); log('Screen keep-awake deactivated', 'info'); } catch (err: any) { log(`Error during cleanup: ${err?.message || err}`, 'error'); } setCallState('idle'); log('=== CALL ENDED ===', 'info'); }, [callState, log]); // Format duration const formatDuration = (seconds: number): string => { const mins = Math.floor(seconds / 60); const secs = seconds % 60; return `${mins}:${secs.toString().padStart(2, '0')}`; }; // Get log color const getLogColor = (type: LogEntry['type']): string => { switch (type) { case 'success': return '#4ade80'; case 'error': return '#f87171'; case 'event': return '#60a5fa'; default: return '#e5e5e5'; } }; return ( {/* Header */} Voice Debug v{APP_VERSION} {VOICE_NAME} {/* Call Status */} {callState === 'idle' && 'Ready'} {callState === 'connecting' && 'Connecting...'} {callState === 'connected' && `Connected ${formatDuration(callDuration)}`} {callState === 'ending' && 'Ending...'} {logs.length} logs {/* Deployment ID Input */} {callState === 'idle' && ( Deployment ID (optional): {deploymentId.trim() && ( setDeploymentId('')} > )} )} {/* Log Mode Toggle */} Log mode: setAccumulateResponses(true)} > Clean (final only) setAccumulateResponses(false)} > Verbose (all chunks) {/* Control Buttons - Row 1: Call controls */} {callState === 'idle' ? ( Start Call ) : ( End Call )} {/* Control Buttons - Row 2: Log controls */} Copy Share Clear {Platform.OS} {Platform.Version} {/* ========== LIVE STATUS PANEL ========== */} {callState === 'connected' && ( {/* Agent State */} 🤖 Agent: {agentState === 'speaking' ? '🔊 SPEAKING' : agentState === 'thinking' ? '🧠 THINKING' : agentState === 'listening' ? '👂 LISTENING' : agentState} {/* Mic Level */} 🎙️ Mic: {micLevel}% {/* Last User Text */} {lastUserText ? ( 👤 You: {lastUserText} ) : null} {/* Last Agent Text */} {lastAgentText ? ( 🤖 Julia: {lastAgentText} ) : null} )} {/* Logs */} item.id} style={styles.logsList} contentContainerStyle={styles.logsContent} renderItem={({ item }) => ( [{item.time}] {item.message} )} ListEmptyComponent={ Press "Start Call" to begin } /> ); } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: '#0f0f0f', }, header: { padding: Spacing.md, borderBottomWidth: 1, borderBottomColor: '#333', }, headerRow: { flexDirection: 'row', alignItems: 'center', justifyContent: 'space-between', }, title: { fontSize: 24, fontWeight: '700', color: '#fff', }, versionBadge: { fontSize: 14, fontWeight: '600', color: '#22c55e', backgroundColor: 'rgba(34, 197, 94, 0.15)', paddingHorizontal: 10, paddingVertical: 4, borderRadius: 8, overflow: 'hidden', }, subtitle: { fontSize: 14, color: '#888', marginTop: 2, }, statusBar: { flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', paddingHorizontal: Spacing.md, paddingVertical: Spacing.sm, backgroundColor: '#1a1a1a', }, statusLeft: { flexDirection: 'row', alignItems: 'center', }, statusDot: { width: 10, height: 10, borderRadius: 5, marginRight: 8, }, statusText: { color: '#fff', fontSize: 14, fontWeight: '500', }, logCount: { color: '#888', fontSize: 12, }, deploymentIdContainer: { flexDirection: 'row', alignItems: 'center', paddingHorizontal: Spacing.md, paddingVertical: Spacing.sm, backgroundColor: '#1f1f1f', borderBottomWidth: 1, borderBottomColor: '#333', }, deploymentIdLabel: { color: '#9ca3af', fontSize: 12, marginRight: 8, }, deploymentIdInput: { flex: 1, backgroundColor: '#2a2a2a', color: '#fff', fontSize: 14, paddingVertical: 8, paddingHorizontal: 12, borderRadius: 8, borderWidth: 1, borderColor: '#404040', }, clearDeploymentId: { marginLeft: 8, padding: 4, }, logModeContainer: { flexDirection: 'row', alignItems: 'center', paddingHorizontal: Spacing.md, paddingVertical: Spacing.xs, backgroundColor: '#1a1a1a', gap: 8, }, logModeLabel: { color: '#9ca3af', fontSize: 12, marginRight: 4, }, logModeButton: { paddingHorizontal: 10, paddingVertical: 5, borderRadius: 6, backgroundColor: '#333', borderWidth: 1, borderColor: '#404040', }, logModeButtonActive: { backgroundColor: '#3b82f6', borderColor: '#3b82f6', }, logModeButtonText: { color: '#888', fontSize: 11, fontWeight: '500', }, logModeButtonTextActive: { color: '#fff', }, controls: { flexDirection: 'row', padding: Spacing.md, paddingBottom: Spacing.sm, gap: 10, }, controlsRow2: { flexDirection: 'row', paddingHorizontal: Spacing.md, paddingBottom: Spacing.md, gap: 10, borderBottomWidth: 1, borderBottomColor: '#333', }, startButton: { flex: 1, flexDirection: 'row', alignItems: 'center', justifyContent: 'center', backgroundColor: '#22c55e', paddingVertical: 14, borderRadius: 12, gap: 8, }, endButton: { flex: 1, flexDirection: 'row', alignItems: 'center', justifyContent: 'center', backgroundColor: '#ef4444', paddingVertical: 14, borderRadius: 12, gap: 8, }, buttonText: { color: '#fff', fontSize: 16, fontWeight: '600', }, copyButton: { alignItems: 'center', justifyContent: 'center', backgroundColor: '#3b82f6', paddingVertical: 10, paddingHorizontal: 12, borderRadius: 10, }, shareButton: { alignItems: 'center', justifyContent: 'center', backgroundColor: '#8b5cf6', paddingVertical: 10, paddingHorizontal: 12, borderRadius: 10, }, clearButton: { alignItems: 'center', justifyContent: 'center', backgroundColor: '#6b7280', paddingVertical: 10, paddingHorizontal: 12, borderRadius: 10, }, platformBadge: { flex: 1, alignItems: 'flex-end', justifyContent: 'center', }, platformText: { color: '#888', fontSize: 11, fontWeight: '500', }, smallButtonText: { color: '#fff', fontSize: 10, fontWeight: '500', marginTop: 2, }, logsList: { flex: 1, }, logsContent: { padding: Spacing.sm, paddingBottom: 100, }, logEntry: { fontSize: 12, fontFamily: Platform.OS === 'ios' ? 'Menlo' : 'monospace', lineHeight: 18, marginBottom: 2, }, logTime: { color: '#888', }, emptyContainer: { alignItems: 'center', justifyContent: 'center', paddingTop: 100, }, emptyText: { color: '#6b7280', fontSize: 16, marginTop: 12, }, // ========== LIVE STATUS PANEL STYLES ========== liveStatusPanel: { backgroundColor: '#1a1a1a', borderBottomWidth: 1, borderBottomColor: '#333', padding: Spacing.sm, gap: 6, }, liveStatusRow: { flexDirection: 'row', alignItems: 'center', gap: 8, }, liveStatusLabel: { color: '#888', fontSize: 11, fontWeight: '600', width: 55, }, agentStateBadge: { paddingHorizontal: 8, paddingVertical: 3, borderRadius: 6, backgroundColor: '#333', }, agentStateSpeaking: { backgroundColor: '#22c55e', }, agentStateThinking: { backgroundColor: '#f59e0b', }, agentStateListening: { backgroundColor: '#3b82f6', }, agentStateText: { color: '#fff', fontSize: 11, fontWeight: '700', }, micLevelContainer: { flex: 1, height: 8, backgroundColor: '#333', borderRadius: 4, overflow: 'hidden', }, micLevelBar: { height: '100%', backgroundColor: '#22c55e', borderRadius: 4, }, micLevelText: { color: '#888', fontSize: 11, fontWeight: '600', width: 35, textAlign: 'right', }, transcriptText: { flex: 1, color: '#e5e5e5', fontSize: 11, fontStyle: 'italic', }, });