Compare commits

...

3 Commits

Author SHA1 Message Date
Sergei
a578ec8081 feat: Pass Debug tab deployment ID to voice calls
- Add debugDeploymentId to BeneficiaryContext for sharing between screens
- Sync Debug tab's deploymentId state with global context
- voice-call.tsx now prioritizes debugDeploymentId when starting calls
- Enables testing voice calls with specific deployment IDs from Debug screen
2026-01-24 00:05:47 -08:00
Sergei
5ecb5f9683 Fix Julia AI voice: use SINGLE_DEPLOYMENT_MODE for Lite
- livekitService.ts: send empty beneficiaryNamesDict in Lite mode
- agent.py: handle None beneficiary_names_dict correctly
- chat.tsx: align text chat with same SINGLE_DEPLOYMENT_MODE flag

This fixes Julia saying "I didn't get the name of beneficiary"
by letting WellNuo API use the default beneficiary for deployment_id.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-22 16:49:55 -08:00
Sergei
8d98bab3cf Remove expo-speech-recognition plugin from app.json
The package was removed in commit d9fff44 but the plugin
entry was left in app.json, causing EAS Build to fail with
'Unknown error in Read app config' phase.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-22 10:25:29 -08:00
9 changed files with 839 additions and 85 deletions

View File

@ -69,13 +69,6 @@
"backgroundColor": "#000000" "backgroundColor": "#000000"
} }
} }
],
[
"expo-speech-recognition",
{
"microphonePermission": "WellNuo needs access to your microphone for voice commands.",
"speechRecognitionPermission": "WellNuo uses speech recognition to convert your voice to text."
}
] ]
], ],
"experiments": { "experiments": {

View File

@ -49,12 +49,14 @@ export default function LoginScreen() {
return ( return (
<KeyboardAvoidingView <KeyboardAvoidingView
style={styles.container} style={styles.container}
behavior={Platform.OS === 'ios' ? 'padding' : 'height'} behavior="padding"
keyboardVerticalOffset={Platform.OS === 'ios' ? 0 : 20}
> >
<ScrollView <ScrollView
contentContainerStyle={styles.scrollContent} contentContainerStyle={styles.scrollContent}
keyboardShouldPersistTaps="handled" keyboardShouldPersistTaps="handled"
showsVerticalScrollIndicator={false} showsVerticalScrollIndicator={false}
bounces={false}
> >
{/* Logo / Header */} {/* Logo / Header */}
<View style={styles.header}> <View style={styles.header}>
@ -132,8 +134,9 @@ const styles = StyleSheet.create({
}, },
scrollContent: { scrollContent: {
flexGrow: 1, flexGrow: 1,
justifyContent: 'center',
paddingHorizontal: Spacing.lg, paddingHorizontal: Spacing.lg,
paddingTop: Spacing.xxl + Spacing.xl, paddingTop: Spacing.xl,
paddingBottom: Spacing.xl, paddingBottom: Spacing.xl,
}, },
header: { header: {

View File

@ -34,6 +34,16 @@ const API_URL = 'https://eluxnetworks.net/function/well-api/api';
const WELLNUO_USER = 'anandk'; const WELLNUO_USER = 'anandk';
const WELLNUO_PASSWORD = 'anandk_8'; const WELLNUO_PASSWORD = 'anandk_8';
// ============================================================================
// SINGLE_DEPLOYMENT_MODE
// When true: sends only deployment_id (no beneficiary_names_dict)
// When false: sends both deployment_id AND beneficiary_names_dict
//
// Use true for WellNuo Lite (single beneficiary per user)
// Use false for full WellNuo app (multiple beneficiaries)
// ============================================================================
const SINGLE_DEPLOYMENT_MODE = true;
// Keywords for question normalization (same as julia-agent/julia-ai/src/agent.py) // Keywords for question normalization (same as julia-agent/julia-ai/src/agent.py)
const STATUS_KEYWORDS = [ const STATUS_KEYWORDS = [
/\bhow\s+is\b/i, /\bhow\s+is\b/i,
@ -271,19 +281,25 @@ export default function ChatScreen() {
const deploymentId = currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21'; const deploymentId = currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21';
// Call API with EXACT same params as voice agent // Call API with EXACT same params as voice agent
// Using ask_wellnuo_ai with new beneficiary_names_dict parameter // SINGLE_DEPLOYMENT_MODE: sends only deployment_id (no beneficiary_names_dict)
const response = await fetch(API_URL, { const requestParams: Record<string, string> = {
method: 'POST',
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
body: new URLSearchParams({
function: 'ask_wellnuo_ai', function: 'ask_wellnuo_ai',
clientId: 'MA_001', clientId: 'MA_001',
user_name: WELLNUO_USER, user_name: WELLNUO_USER,
token: token, token: token,
question: normalizedQuestion, question: normalizedQuestion,
deployment_id: deploymentId, deployment_id: deploymentId,
beneficiary_names_dict: JSON.stringify(beneficiaryNamesDict), };
}).toString(),
// Only add beneficiary_names_dict if NOT in single deployment mode
if (!SINGLE_DEPLOYMENT_MODE) {
requestParams.beneficiary_names_dict = JSON.stringify(beneficiaryNamesDict);
}
const response = await fetch(API_URL, {
method: 'POST',
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
body: new URLSearchParams(requestParams).toString(),
}); });
const data = await response.json(); const data = await response.json();

View File

@ -20,6 +20,8 @@ import {
Share, Share,
AppState, AppState,
AppStateStatus, AppStateStatus,
TextInput,
KeyboardAvoidingView,
} from 'react-native'; } from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context'; import { SafeAreaView } from 'react-native-safe-area-context';
import { Ionicons } from '@expo/vector-icons'; import { Ionicons } from '@expo/vector-icons';
@ -28,10 +30,11 @@ import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
import type { Room as RoomType } from 'livekit-client'; import type { Room as RoomType } from 'livekit-client';
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme'; import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
import { getToken, VOICE_NAME } from '@/services/livekitService'; import { getToken, VOICE_NAME } from '@/services/livekitService';
import { api } from '@/services/api';
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
import { import {
configureAudioForVoiceCall, configureAudioForVoiceCall,
stopAudioSession, stopAudioSession,
setAudioOutput,
} from '@/utils/audioSession'; } from '@/utils/audioSession';
import { import {
startVoiceCallService, startVoiceCallService,
@ -56,12 +59,64 @@ export default function DebugScreen() {
const [logs, setLogs] = useState<LogEntry[]>([]); const [logs, setLogs] = useState<LogEntry[]>([]);
const [callState, setCallState] = useState<CallState>('idle'); const [callState, setCallState] = useState<CallState>('idle');
const [callDuration, setCallDuration] = useState(0); const [callDuration, setCallDuration] = useState(0);
const [isSpeakerOn, setIsSpeakerOn] = useState(true); // Default to speaker const [agentState, setAgentState] = useState<string>('—'); // listening/thinking/speaking
const [lastUserText, setLastUserText] = useState<string>(''); // Последний распознанный текст пользователя
const [lastAgentText, setLastAgentText] = useState<string>(''); // Последний ответ агента
const [micLevel, setMicLevel] = useState<number>(0); // Уровень микрофона 0-100
const [deploymentId, setDeploymentIdState] = useState<string>(''); // Custom deployment ID
const [loadingBeneficiary, setLoadingBeneficiary] = useState(true);
const [accumulateResponses, setAccumulateResponses] = useState(true); // Накапливать chunks до полного ответа
const flatListRef = useRef<FlatList>(null); const flatListRef = useRef<FlatList>(null);
// Refs для накопления chunks
const accumulatedUserTextRef = useRef<string>('');
const accumulatedAgentTextRef = useRef<string>('');
const lastUserSegmentIdRef = useRef<string | null>(null);
const lastAgentSegmentIdRef = useRef<string | null>(null);
const roomRef = useRef<RoomType | null>(null); const roomRef = useRef<RoomType | null>(null);
const callStartTimeRef = useRef<number | null>(null); const callStartTimeRef = useRef<number | null>(null);
const appStateRef = useRef<AppStateStatus>(AppState.currentState); const appStateRef = useRef<AppStateStatus>(AppState.currentState);
const { currentBeneficiary, setDebugDeploymentId } = useBeneficiary();
// Sync deploymentId with context for voice-call.tsx to use
const setDeploymentId = useCallback((id: string) => {
setDeploymentIdState(id);
// Update context so voice-call.tsx can access it
setDebugDeploymentId(id.trim() || null);
}, [setDebugDeploymentId]);
// Load default deployment ID from first beneficiary
useEffect(() => {
const loadDefaultDeploymentId = async () => {
try {
// First check if currentBeneficiary is available
if (currentBeneficiary?.id) {
const id = currentBeneficiary.id.toString();
setDeploymentIdState(id);
setDebugDeploymentId(id); // Also set in context
setLoadingBeneficiary(false);
return;
}
// Otherwise load from API
const response = await api.getAllBeneficiaries();
if (response.ok && response.data && response.data.length > 0) {
const firstBeneficiary = response.data[0];
const id = firstBeneficiary.id.toString();
setDeploymentIdState(id);
setDebugDeploymentId(id); // Also set in context
}
} catch (error) {
console.error('[Debug] Failed to load beneficiary:', error);
} finally {
setLoadingBeneficiary(false);
}
};
loadDefaultDeploymentId();
}, [currentBeneficiary, setDebugDeploymentId]);
// Add log entry // Add log entry
const log = useCallback((message: string, type: LogEntry['type'] = 'info') => { const log = useCallback((message: string, type: LogEntry['type'] = 'info') => {
const time = new Date().toLocaleTimeString('en-US', { hour12: false, hour: '2-digit', minute: '2-digit', second: '2-digit' }); const time = new Date().toLocaleTimeString('en-US', { hour12: false, hour: '2-digit', minute: '2-digit', second: '2-digit' });
@ -127,20 +182,6 @@ export default function DebugScreen() {
return () => subscription.remove(); return () => subscription.remove();
}, [log]); }, [log]);
// Toggle speaker
const toggleSpeaker = useCallback(async () => {
const newState = !isSpeakerOn;
log(`=== TOGGLING SPEAKER: ${isSpeakerOn ? 'ON' : 'OFF'}${newState ? 'ON' : 'OFF'} ===`, 'info');
try {
await setAudioOutput(newState);
setIsSpeakerOn(newState);
log(`Speaker toggled to ${newState ? 'ON (loud speaker)' : 'OFF (earpiece)'}`, 'success');
} catch (err: any) {
log(`Speaker toggle error: ${err?.message || err}`, 'error');
}
}, [isSpeakerOn, log]);
// Start call // Start call
const startCall = useCallback(async () => { const startCall = useCallback(async () => {
if (callState !== 'idle') return; if (callState !== 'idle') return;
@ -148,7 +189,6 @@ export default function DebugScreen() {
clearLogs(); clearLogs();
setCallState('connecting'); setCallState('connecting');
setCallDuration(0); setCallDuration(0);
setIsSpeakerOn(true); // Reset speaker state
callStartTimeRef.current = null; callStartTimeRef.current = null;
try { try {
@ -205,7 +245,20 @@ export default function DebugScreen() {
// Step 4: Get token from server // Step 4: Get token from server
log('Step 4: Requesting token from server...', 'info'); log('Step 4: Requesting token from server...', 'info');
log(`Token server: wellnuo.smartlaunchhub.com/julia/token`, 'info'); log(`Token server: wellnuo.smartlaunchhub.com/julia/token`, 'info');
const result = await getToken(`user-${Date.now()}`);
// Передаём deployment ID если указан
const beneficiaryData = deploymentId.trim() ? {
deploymentId: deploymentId.trim(),
beneficiaryNamesDict: {},
} : undefined;
if (beneficiaryData) {
log(`📋 Using custom Deployment ID: ${deploymentId}`, 'success');
} else {
log(`📋 No Deployment ID specified (default mode)`, 'info');
}
const result = await getToken(`user-${Date.now()}`, beneficiaryData);
if (!result.success || !result.data) { if (!result.success || !result.data) {
throw new Error(result.error || 'Failed to get token'); throw new Error(result.error || 'Failed to get token');
@ -252,11 +305,54 @@ export default function DebugScreen() {
}); });
newRoom.on(RoomEvent.ParticipantConnected, (participant: any) => { newRoom.on(RoomEvent.ParticipantConnected, (participant: any) => {
log(`EVENT: Participant connected: ${participant.identity}`, 'event'); log(`👋 PARTICIPANT CONNECTED: ${participant.identity}`, 'success');
// Подписаться на события этого участника (для агента Julia)
participant.on('isSpeakingChanged', (speaking: boolean) => {
if (speaking) {
log(`🔊 ${participant.identity} STARTED SPEAKING`, 'success');
setAgentState('speaking');
} else {
log(`🔇 ${participant.identity} stopped speaking`, 'info');
}
});
participant.on('trackMuted', (pub: any) => {
log(`🔇 ${participant.identity} muted ${pub.kind}`, 'event');
});
participant.on('trackUnmuted', (pub: any) => {
log(`🔊 ${participant.identity} unmuted ${pub.kind}`, 'event');
});
participant.on('attributesChanged', (attrs: any) => {
log(`📋 ${participant.identity} ATTRIBUTES:`, 'event');
Object.entries(attrs || {}).forEach(([k, v]) => {
log(` ${k}: ${v}`, 'info');
if (k === 'lk.agent.state') {
setAgentState(String(v));
}
});
});
participant.on('transcriptionReceived', (segments: any[]) => {
log(`🤖 ${participant.identity} TRANSCRIPTION:`, 'success');
segments.forEach((seg: any, i: number) => {
const text = seg.text || seg.final || '';
log(` [${i}] "${text}"`, 'info');
if (text) setLastAgentText(text);
});
});
// Показать текущие атрибуты участника
const attrs = participant.attributes || {};
if (Object.keys(attrs).length > 0) {
log(` Initial attributes: ${JSON.stringify(attrs)}`, 'info');
}
}); });
newRoom.on(RoomEvent.ParticipantDisconnected, (participant: any) => { newRoom.on(RoomEvent.ParticipantDisconnected, (participant: any) => {
log(`EVENT: Participant disconnected: ${participant.identity}`, 'event'); log(`👋 PARTICIPANT DISCONNECTED: ${participant.identity}`, 'event');
}); });
newRoom.on(RoomEvent.TrackSubscribed, (track: any, publication: any, participant: any) => { newRoom.on(RoomEvent.TrackSubscribed, (track: any, publication: any, participant: any) => {
@ -284,12 +380,45 @@ export default function DebugScreen() {
} }
}); });
newRoom.on(RoomEvent.DataReceived, (payload: any, participant: any) => { newRoom.on(RoomEvent.DataReceived, (payload: any, participant: any, kind: any, topic: any) => {
log(`📩 DATA RECEIVED from ${participant?.identity || 'unknown'}`, 'event');
log(` kind: ${kind}, topic: ${topic || 'none'}`, 'info');
try { try {
const data = JSON.parse(new TextDecoder().decode(payload)); const text = new TextDecoder().decode(payload);
log(`EVENT: Data received: ${JSON.stringify(data).substring(0, 100)}`, 'event'); const data = JSON.parse(text);
log(` type: ${data.type || 'unknown'}`, 'info');
// Подробное логирование разных типов сообщений
if (data.type === 'transcript' || data.type === 'transcription') {
log(` 🗣️ TRANSCRIPT: role=${data.role}`, 'success');
const text = data.text || data.content || '';
log(` 📝 TEXT: "${text}"`, 'success');
// Обновить UI
if (data.role === 'user') {
setLastUserText(text);
} else if (data.role === 'assistant' || data.role === 'agent') {
setLastAgentText(text);
}
} else if (data.type === 'state' || data.type === 'agent_state') {
const stateValue = data.state || JSON.stringify(data);
log(` 🤖 AGENT STATE: ${stateValue}`, 'success');
setAgentState(stateValue);
} else if (data.type === 'function_call' || data.type === 'tool_call') {
log(` 🔧 FUNCTION CALL: ${data.name || data.function || JSON.stringify(data)}`, 'event');
} else if (data.type === 'function_result' || data.type === 'tool_result') {
log(` ✅ FUNCTION RESULT: ${JSON.stringify(data.result || data).substring(0, 200)}`, 'event');
} else {
// Показать полный JSON для неизвестных типов
log(` 📦 FULL DATA: ${JSON.stringify(data)}`, 'info');
}
} catch (e) { } catch (e) {
log(`EVENT: Data received (binary)`, 'event'); // Попробовать показать как текст
try {
const text = new TextDecoder().decode(payload);
log(` 📄 RAW TEXT: "${text.substring(0, 300)}"`, 'info');
} catch {
log(` 📎 BINARY DATA: ${payload.byteLength} bytes`, 'info');
}
} }
}); });
@ -305,7 +434,267 @@ export default function DebugScreen() {
log(`EVENT: RoomMetadataChanged: ${metadata}`, 'event'); log(`EVENT: RoomMetadataChanged: ${metadata}`, 'event');
}); });
log('Event listeners set up', 'success'); // ===========================================
// TRANSCRIPTION - распознанный текст (STT)
// ===========================================
newRoom.on(RoomEvent.TranscriptionReceived, (segments: any[], participant: any) => {
const isUser = participant?.identity === newRoom.localParticipant.identity;
const who = isUser ? '👤 USER' : '🤖 AGENT';
segments.forEach((segment: any, idx: number) => {
const text = segment.text || segment.final || '';
const segmentId = segment.id || `seg-${Date.now()}`;
const isFinalFlag = segment.final !== undefined;
if (accumulateResponses) {
// === РЕЖИМ НАКОПЛЕНИЯ: Показываем только финальные полные ответы ===
if (isUser) {
// Новый сегмент или продолжение текущего
if (lastUserSegmentIdRef.current !== segmentId) {
// Если был предыдущий финальный - логируем его
if (accumulatedUserTextRef.current && lastUserSegmentIdRef.current) {
log(`👤 USER FINAL: "${accumulatedUserTextRef.current}"`, 'success');
}
accumulatedUserTextRef.current = text;
lastUserSegmentIdRef.current = segmentId;
} else {
// Обновляем текущий сегмент
accumulatedUserTextRef.current = text;
}
// Если финальный - логируем сразу
if (isFinalFlag && text) {
log(`👤 USER: "${text}"`, 'success');
setLastUserText(text);
accumulatedUserTextRef.current = '';
lastUserSegmentIdRef.current = null;
}
} else {
// AGENT
if (lastAgentSegmentIdRef.current !== segmentId) {
if (accumulatedAgentTextRef.current && lastAgentSegmentIdRef.current) {
log(`🤖 AGENT FINAL: "${accumulatedAgentTextRef.current}"`, 'success');
}
accumulatedAgentTextRef.current = text;
lastAgentSegmentIdRef.current = segmentId;
} else {
accumulatedAgentTextRef.current = text;
}
if (isFinalFlag && text) {
log(`🤖 JULIA: "${text}"`, 'success');
setLastAgentText(text);
accumulatedAgentTextRef.current = '';
lastAgentSegmentIdRef.current = null;
}
}
} else {
// === РЕЖИМ ПОЛНОГО ЛОГИРОВАНИЯ: Показываем каждый chunk ===
const finalLabel = isFinalFlag ? '(FINAL)' : '(interim)';
log(`🎤 TRANSCRIPTION from ${who} (${participant?.identity || 'unknown'})`, 'success');
log(` [${idx}] ${finalLabel}: "${text}"`, 'event');
if (segment.id) log(` segment.id: ${segment.id}`, 'info');
if (segment.firstReceivedTime) log(` firstReceivedTime: ${segment.firstReceivedTime}`, 'info');
if (segment.lastReceivedTime) log(` lastReceivedTime: ${segment.lastReceivedTime}`, 'info');
// Обновить UI с последним текстом
if (text && (isFinalFlag || !segment.final)) {
if (isUser) {
setLastUserText(text);
} else {
setLastAgentText(text);
}
}
}
});
});
// ===========================================
// PARTICIPANT ATTRIBUTES - состояние агента
// ===========================================
newRoom.on(RoomEvent.ParticipantAttributesChanged, (changedAttributes: any, participant: any) => {
log(`👤 ATTRIBUTES CHANGED for ${participant?.identity || 'unknown'}`, 'event');
Object.entries(changedAttributes || {}).forEach(([key, value]) => {
log(` ${key}: ${value}`, 'info');
// Особенно важно: lk.agent.state показывает listening/thinking/speaking
if (key === 'lk.agent.state') {
log(` 🤖 AGENT STATE: ${value}`, 'success');
// Обновить UI
setAgentState(String(value));
}
});
// Показать все текущие атрибуты
const attrs = participant?.attributes || {};
if (Object.keys(attrs).length > 0) {
log(` All attributes: ${JSON.stringify(attrs)}`, 'info');
}
});
// ===========================================
// SIGNAL CONNECTED/RECONNECTING
// ===========================================
newRoom.on(RoomEvent.SignalConnected, () => {
log('EVENT: SignalConnected - WebSocket подключен', 'success');
});
newRoom.on(RoomEvent.SignalReconnecting, () => {
log('EVENT: SignalReconnecting - переподключение сигнала...', 'event');
});
// ===========================================
// LOCAL TRACK UNPUBLISHED
// ===========================================
newRoom.on(RoomEvent.LocalTrackUnpublished, (publication: any, participant: any) => {
log(`EVENT: LocalTrackUnpublished - ${publication.trackSid}`, 'event');
});
// ===========================================
// ДОПОЛНИТЕЛЬНЫЕ СОБЫТИЯ ДЛЯ ПОЛНОГО ДЕБАГА
// ===========================================
// Качество соединения
newRoom.on(RoomEvent.ConnectionQualityChanged, (quality: any, participant: any) => {
const qualityEmoji = quality === 'excellent' ? '🟢' : quality === 'good' ? '🟡' : '🔴';
log(`${qualityEmoji} CONNECTION QUALITY: ${participant?.identity || 'local'}${quality}`, 'event');
});
// Изменение устройств (микрофон/камера подключены/отключены)
newRoom.on(RoomEvent.MediaDevicesChanged, () => {
log(`🔌 MEDIA DEVICES CHANGED - устройства обновились`, 'event');
});
// Изменение активного устройства
newRoom.on(RoomEvent.ActiveDeviceChanged, (kind: any, deviceId: any) => {
log(`🎛️ ACTIVE DEVICE CHANGED: ${kind}${deviceId}`, 'event');
});
// Ошибка подписки на трек
newRoom.on(RoomEvent.TrackSubscriptionFailed, (trackSid: any, participant: any, reason: any) => {
log(`❌ TRACK SUBSCRIPTION FAILED: ${trackSid} from ${participant?.identity}`, 'error');
log(` Reason: ${reason}`, 'error');
});
// Публикация трека (когда агент начинает говорить)
newRoom.on(RoomEvent.TrackPublished, (publication: any, participant: any) => {
log(`📢 TRACK PUBLISHED by ${participant?.identity}: ${publication.kind} (${publication.source})`, 'event');
});
// Отмена публикации трека
newRoom.on(RoomEvent.TrackUnpublished, (publication: any, participant: any) => {
log(`📤 TRACK UNPUBLISHED by ${participant?.identity}: ${publication.kind}`, 'event');
});
// Изменение метаданных участника
newRoom.on(RoomEvent.ParticipantMetadataChanged, (metadata: any, participant: any) => {
log(`📋 PARTICIPANT METADATA: ${participant?.identity}`, 'event');
try {
const parsed = JSON.parse(metadata || '{}');
log(` ${JSON.stringify(parsed)}`, 'info');
} catch {
log(` ${metadata}`, 'info');
}
});
// Изменение имени участника
newRoom.on(RoomEvent.ParticipantNameChanged, (name: any, participant: any) => {
log(`👤 PARTICIPANT NAME: ${participant?.identity}${name}`, 'event');
});
// Статус записи (если комната записывается)
newRoom.on(RoomEvent.RecordingStatusChanged, (recording: any) => {
log(`⏺️ RECORDING STATUS: ${recording ? 'RECORDING' : 'NOT RECORDING'}`, recording ? 'success' : 'info');
});
// Изменение статуса потока трека
newRoom.on(RoomEvent.TrackStreamStateChanged, (publication: any, streamState: any, participant: any) => {
log(`📊 TRACK STREAM STATE: ${participant?.identity}/${publication.trackSid}${streamState}`, 'event');
});
// Разрешения на подписку трека
newRoom.on(RoomEvent.TrackSubscriptionPermissionChanged, (publication: any, status: any, participant: any) => {
log(`🔐 TRACK PERMISSION: ${participant?.identity}/${publication.trackSid}${status}`, 'event');
});
// Статус подписки на трек
newRoom.on(RoomEvent.TrackSubscriptionStatusChanged, (publication: any, status: any, participant: any) => {
log(`📶 TRACK SUBSCRIPTION: ${participant?.identity}/${publication.trackSid}${status}`, 'event');
});
// Разрешения участника изменились
newRoom.on(RoomEvent.ParticipantPermissionsChanged, (prevPermissions: any, participant: any) => {
log(`🔑 PARTICIPANT PERMISSIONS CHANGED: ${participant?.identity}`, 'event');
log(` New permissions: ${JSON.stringify(participant?.permissions || {})}`, 'info');
});
// ChatMessage - сообщения в чате комнаты
newRoom.on(RoomEvent.ChatMessage, (message: any, participant: any) => {
log(`💬 CHAT MESSAGE from ${participant?.identity || 'system'}:`, 'success');
log(` ${message.message || JSON.stringify(message)}`, 'info');
});
// SIP DTMF - телефонные сигналы
newRoom.on(RoomEvent.SipDTMFReceived, (dtmf: any, participant: any) => {
log(`📞 SIP DTMF: ${dtmf.code} from ${participant?.identity}`, 'event');
});
// Детекция тишины микрофона
newRoom.on(RoomEvent.LocalAudioSilenceDetected, (publication: any) => {
log(`🔇 LOCAL AUDIO SILENCE DETECTED - микрофон молчит`, 'event');
});
// Изменения буфера DataChannel
newRoom.on(RoomEvent.DCBufferStatusChanged, (isLow: any, kind: any) => {
log(`📦 DC BUFFER: ${kind} buffer is ${isLow ? 'LOW' : 'OK'}`, isLow ? 'event' : 'info');
});
// Метрики производительности
newRoom.on(RoomEvent.MetricsReceived, (metrics: any) => {
log(`📈 METRICS RECEIVED:`, 'info');
if (metrics.audioStats) {
log(` Audio: bitrate=${metrics.audioStats.bitrate}, packetsLost=${metrics.audioStats.packetsLost}`, 'info');
}
if (metrics.videoStats) {
log(` Video: bitrate=${metrics.videoStats.bitrate}, fps=${metrics.videoStats.fps}`, 'info');
}
});
// Статус воспроизведения видео (если есть)
newRoom.on(RoomEvent.VideoPlaybackStatusChanged, () => {
log(`🎬 VIDEO PLAYBACK STATUS CHANGED`, 'event');
});
// Ошибка шифрования
newRoom.on(RoomEvent.EncryptionError, (error: any) => {
log(`🔒 ENCRYPTION ERROR: ${error?.message || error}`, 'error');
});
// Статус шифрования участника
newRoom.on(RoomEvent.ParticipantEncryptionStatusChanged, (encrypted: any, participant: any) => {
log(`🔐 ENCRYPTION STATUS: ${participant?.identity}${encrypted ? 'encrypted' : 'not encrypted'}`, 'event');
});
// Комната перемещена (редко)
newRoom.on(RoomEvent.Moved, (room: any) => {
log(`🚀 ROOM MOVED to new server`, 'event');
});
// Участник стал активным
newRoom.on(RoomEvent.ParticipantActive, (participant: any) => {
log(`✅ PARTICIPANT ACTIVE: ${participant?.identity}`, 'success');
// Проверяем, что это агент Julia (не локальный участник)
const isAgent = participant?.identity?.startsWith('agent-') ||
(participant?.attributes?.['lk.agent_name'] === 'julia-ai');
if (isAgent) {
log(``, 'success');
log(`🟢🟢🟢 AGENT READY 🟢🟢🟢`, 'success');
log(`🔊 Julia will now speak greeting...`, 'success');
log(``, 'success');
}
});
log('Event listeners set up (FULL DEBUG MODE)', 'success');
// Step 7: Connect to room // Step 7: Connect to room
log('Step 7: Connecting to LiveKit room...', 'info'); log('Step 7: Connecting to LiveKit room...', 'info');
@ -335,21 +724,117 @@ export default function DebugScreen() {
} }
}); });
// Listen for local track published // ===========================================
// LOCAL PARTICIPANT EVENTS - события моего микрофона
// ===========================================
newRoom.localParticipant.on('localTrackPublished', (pub: any) => { newRoom.localParticipant.on('localTrackPublished', (pub: any) => {
log(`MY TRACK PUBLISHED: ${pub.kind} sid=${pub.trackSid}`, 'success'); log(`🎤 MY TRACK PUBLISHED: ${pub.kind} sid=${pub.trackSid}`, 'success');
});
newRoom.localParticipant.on('localTrackUnpublished', (pub: any) => {
log(`🎤 MY TRACK UNPUBLISHED: ${pub.kind} sid=${pub.trackSid}`, 'event');
});
// IsSpeakingChanged - когда я начинаю/перестаю говорить
newRoom.localParticipant.on('isSpeakingChanged', (speaking: boolean) => {
if (speaking) {
log(`🗣️ >>> I STARTED SPEAKING <<<`, 'success');
} else {
log(`🤐 I stopped speaking`, 'info');
}
});
// Мой трек замьютился/размьютился
newRoom.localParticipant.on('trackMuted', (pub: any) => {
log(`🔇 MY TRACK MUTED: ${pub.kind}`, 'event');
});
newRoom.localParticipant.on('trackUnmuted', (pub: any) => {
log(`🔊 MY TRACK UNMUTED: ${pub.kind}`, 'success');
});
// Ошибка медиа устройства на моём участнике
newRoom.localParticipant.on('mediaDevicesError', (error: any) => {
log(`❌ MY MEDIA DEVICE ERROR: ${error?.message || error}`, 'error');
});
// Аудио поток захвачен
newRoom.localParticipant.on('audioStreamAcquired', () => {
log(`🎙️ AUDIO STREAM ACQUIRED - микрофон захвачен!`, 'success');
});
// Транскрипция на моём треке
newRoom.localParticipant.on('transcriptionReceived', (segments: any[]) => {
log(`🎤 MY TRANSCRIPTION (${segments.length} segments):`, 'success');
segments.forEach((seg: any, i: number) => {
log(` [${i}] "${seg.text || seg.final}"`, 'info');
});
}); });
// Listen when I become an active speaker (means mic is working) // Listen when I become an active speaker (means mic is working)
newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => { newRoom.on(RoomEvent.ActiveSpeakersChanged, (speakers: any[]) => {
const iAmSpeaking = speakers.some(s => s.identity === newRoom.localParticipant.identity); const iAmSpeaking = speakers.some(s => s.identity === newRoom.localParticipant.identity);
if (iAmSpeaking) { if (iAmSpeaking) {
log(`*** I AM SPEAKING - MIC WORKS ***`, 'success'); log(`🎙️ *** I AM SPEAKING - MIC WORKS! ***`, 'success');
} }
}); });
log(`Local participant: ${newRoom.localParticipant.identity}`, 'info'); log(`Local participant: ${newRoom.localParticipant.identity}`, 'info');
// ===========================================
// AUDIO LEVEL MONITORING - периодическая проверка уровня микрофона
// ===========================================
let audioLevelInterval: ReturnType<typeof setInterval> | null = null;
let lastLoggedLevel = -1;
const startAudioLevelMonitoring = () => {
if (audioLevelInterval) return;
audioLevelInterval = setInterval(() => {
try {
// Найти microphone track среди всех публикаций
const audioTracks = newRoom.localParticipant.audioTrackPublications;
let localAudioTrack: any = null;
audioTracks.forEach((pub: any) => {
if (pub.source === 'microphone' || pub.kind === 'audio') {
localAudioTrack = pub;
}
});
if (localAudioTrack?.track) {
// Получаем audio level через LiveKit API
const audioLevel = (localAudioTrack.track as any).audioLevel;
if (audioLevel !== undefined) {
const roundedLevel = Math.round(audioLevel * 100);
// Обновить UI
setMicLevel(roundedLevel);
// Логируем только когда уровень существенно изменился
if (Math.abs(roundedLevel - lastLoggedLevel) > 5) {
lastLoggedLevel = roundedLevel;
const bars = '▓'.repeat(Math.min(20, Math.round(audioLevel * 20))) + '░'.repeat(Math.max(0, 20 - Math.round(audioLevel * 20)));
log(`🎚️ MIC LEVEL: [${bars}] ${roundedLevel}%`, audioLevel > 0.1 ? 'success' : 'info');
}
}
}
} catch (e) {
// Ignore errors
}
}, 200); // Проверять каждые 200мс для плавного UI
};
// Запустить мониторинг audio level после подключения
newRoom.on(RoomEvent.Connected, () => {
log('Starting audio level monitoring...', 'info');
setTimeout(startAudioLevelMonitoring, 1000);
});
// Остановить при отключении
newRoom.on(RoomEvent.Disconnected, () => {
if (audioLevelInterval) {
clearInterval(audioLevelInterval);
audioLevelInterval = null;
}
});
// Android: Start foreground service to keep call alive in background // Android: Start foreground service to keep call alive in background
if (Platform.OS === 'android') { if (Platform.OS === 'android') {
log('Android: Starting foreground service...', 'info'); log('Android: Starting foreground service...', 'info');
@ -463,6 +948,52 @@ export default function DebugScreen() {
<Text style={styles.logCount}>{logs.length} logs</Text> <Text style={styles.logCount}>{logs.length} logs</Text>
</View> </View>
{/* Deployment ID Input */}
{callState === 'idle' && (
<View style={styles.deploymentIdContainer}>
<Text style={styles.deploymentIdLabel}>Deployment ID (optional):</Text>
<TextInput
style={styles.deploymentIdInput}
value={deploymentId}
onChangeText={setDeploymentId}
placeholder="Enter deployment ID..."
placeholderTextColor="#6b7280"
keyboardType="default"
autoCapitalize="none"
autoCorrect={false}
/>
{deploymentId.trim() && (
<TouchableOpacity
style={styles.clearDeploymentId}
onPress={() => setDeploymentId('')}
>
<Ionicons name="close-circle" size={20} color="#6b7280" />
</TouchableOpacity>
)}
</View>
)}
{/* Log Mode Toggle */}
<View style={styles.logModeContainer}>
<Text style={styles.logModeLabel}>Log mode:</Text>
<TouchableOpacity
style={[styles.logModeButton, accumulateResponses && styles.logModeButtonActive]}
onPress={() => setAccumulateResponses(true)}
>
<Text style={[styles.logModeButtonText, accumulateResponses && styles.logModeButtonTextActive]}>
Clean (final only)
</Text>
</TouchableOpacity>
<TouchableOpacity
style={[styles.logModeButton, !accumulateResponses && styles.logModeButtonActive]}
onPress={() => setAccumulateResponses(false)}
>
<Text style={[styles.logModeButtonText, !accumulateResponses && styles.logModeButtonTextActive]}>
Verbose (all chunks)
</Text>
</TouchableOpacity>
</View>
{/* Control Buttons - Row 1: Call controls */} {/* Control Buttons - Row 1: Call controls */}
<View style={styles.controls}> <View style={styles.controls}>
{callState === 'idle' ? ( {callState === 'idle' ? (
@ -481,19 +1012,6 @@ export default function DebugScreen() {
</TouchableOpacity> </TouchableOpacity>
)} )}
{/* Speaker Toggle Button */}
<TouchableOpacity
style={[styles.speakerButton, isSpeakerOn ? styles.speakerOn : styles.speakerOff]}
onPress={toggleSpeaker}
disabled={callState === 'idle'}
>
<Ionicons
name={isSpeakerOn ? 'volume-high' : 'ear'}
size={20}
color="#fff"
/>
<Text style={styles.smallButtonText}>{isSpeakerOn ? 'Speaker' : 'Ear'}</Text>
</TouchableOpacity>
</View> </View>
{/* Control Buttons - Row 2: Log controls */} {/* Control Buttons - Row 2: Log controls */}
@ -518,6 +1036,54 @@ export default function DebugScreen() {
</View> </View>
</View> </View>
{/* ========== LIVE STATUS PANEL ========== */}
{callState === 'connected' && (
<View style={styles.liveStatusPanel}>
{/* Agent State */}
<View style={styles.liveStatusRow}>
<Text style={styles.liveStatusLabel}>🤖 Agent:</Text>
<View style={[
styles.agentStateBadge,
agentState === 'speaking' && styles.agentStateSpeaking,
agentState === 'thinking' && styles.agentStateThinking,
agentState === 'listening' && styles.agentStateListening,
]}>
<Text style={styles.agentStateText}>
{agentState === 'speaking' ? '🔊 SPEAKING' :
agentState === 'thinking' ? '🧠 THINKING' :
agentState === 'listening' ? '👂 LISTENING' :
agentState}
</Text>
</View>
</View>
{/* Mic Level */}
<View style={styles.liveStatusRow}>
<Text style={styles.liveStatusLabel}>🎙 Mic:</Text>
<View style={styles.micLevelContainer}>
<View style={[styles.micLevelBar, { width: `${Math.min(100, micLevel)}%` }]} />
</View>
<Text style={styles.micLevelText}>{micLevel}%</Text>
</View>
{/* Last User Text */}
{lastUserText ? (
<View style={styles.liveStatusRow}>
<Text style={styles.liveStatusLabel}>👤 You:</Text>
<Text style={styles.transcriptText} numberOfLines={2}>{lastUserText}</Text>
</View>
) : null}
{/* Last Agent Text */}
{lastAgentText ? (
<View style={styles.liveStatusRow}>
<Text style={styles.liveStatusLabel}>🤖 Julia:</Text>
<Text style={styles.transcriptText} numberOfLines={2}>{lastAgentText}</Text>
</View>
) : null}
</View>
)}
{/* Logs */} {/* Logs */}
<FlatList <FlatList
ref={flatListRef} ref={flatListRef}
@ -603,6 +1169,68 @@ const styles = StyleSheet.create({
color: '#888', color: '#888',
fontSize: 12, fontSize: 12,
}, },
deploymentIdContainer: {
flexDirection: 'row',
alignItems: 'center',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.sm,
backgroundColor: '#1f1f1f',
borderBottomWidth: 1,
borderBottomColor: '#333',
},
deploymentIdLabel: {
color: '#9ca3af',
fontSize: 12,
marginRight: 8,
},
deploymentIdInput: {
flex: 1,
backgroundColor: '#2a2a2a',
color: '#fff',
fontSize: 14,
paddingVertical: 8,
paddingHorizontal: 12,
borderRadius: 8,
borderWidth: 1,
borderColor: '#404040',
},
clearDeploymentId: {
marginLeft: 8,
padding: 4,
},
logModeContainer: {
flexDirection: 'row',
alignItems: 'center',
paddingHorizontal: Spacing.md,
paddingVertical: Spacing.xs,
backgroundColor: '#1a1a1a',
gap: 8,
},
logModeLabel: {
color: '#9ca3af',
fontSize: 12,
marginRight: 4,
},
logModeButton: {
paddingHorizontal: 10,
paddingVertical: 5,
borderRadius: 6,
backgroundColor: '#333',
borderWidth: 1,
borderColor: '#404040',
},
logModeButtonActive: {
backgroundColor: '#3b82f6',
borderColor: '#3b82f6',
},
logModeButtonText: {
color: '#888',
fontSize: 11,
fontWeight: '500',
},
logModeButtonTextActive: {
color: '#fff',
},
controls: { controls: {
flexDirection: 'row', flexDirection: 'row',
padding: Spacing.md, padding: Spacing.md,
@ -666,19 +1294,6 @@ const styles = StyleSheet.create({
paddingHorizontal: 12, paddingHorizontal: 12,
borderRadius: 10, borderRadius: 10,
}, },
speakerButton: {
alignItems: 'center',
justifyContent: 'center',
paddingVertical: 10,
paddingHorizontal: 16,
borderRadius: 10,
},
speakerOn: {
backgroundColor: '#f59e0b', // Orange when speaker is ON
},
speakerOff: {
backgroundColor: '#4b5563', // Gray when earpiece
},
platformBadge: { platformBadge: {
flex: 1, flex: 1,
alignItems: 'flex-end', alignItems: 'flex-end',
@ -721,4 +1336,68 @@ const styles = StyleSheet.create({
fontSize: 16, fontSize: 16,
marginTop: 12, marginTop: 12,
}, },
// ========== LIVE STATUS PANEL STYLES ==========
liveStatusPanel: {
backgroundColor: '#1a1a1a',
borderBottomWidth: 1,
borderBottomColor: '#333',
padding: Spacing.sm,
gap: 6,
},
liveStatusRow: {
flexDirection: 'row',
alignItems: 'center',
gap: 8,
},
liveStatusLabel: {
color: '#888',
fontSize: 11,
fontWeight: '600',
width: 55,
},
agentStateBadge: {
paddingHorizontal: 8,
paddingVertical: 3,
borderRadius: 6,
backgroundColor: '#333',
},
agentStateSpeaking: {
backgroundColor: '#22c55e',
},
agentStateThinking: {
backgroundColor: '#f59e0b',
},
agentStateListening: {
backgroundColor: '#3b82f6',
},
agentStateText: {
color: '#fff',
fontSize: 11,
fontWeight: '700',
},
micLevelContainer: {
flex: 1,
height: 8,
backgroundColor: '#333',
borderRadius: 4,
overflow: 'hidden',
},
micLevelBar: {
height: '100%',
backgroundColor: '#22c55e',
borderRadius: 4,
},
micLevelText: {
color: '#888',
fontSize: 11,
fontWeight: '600',
width: 35,
textAlign: 'right',
},
transcriptText: {
flex: 1,
color: '#e5e5e5',
fontSize: 11,
fontStyle: 'italic',
},
}); });

View File

@ -31,7 +31,7 @@ const { width: SCREEN_WIDTH } = Dimensions.get('window');
export default function VoiceCallScreen() { export default function VoiceCallScreen() {
const router = useRouter(); const router = useRouter();
const { clearTranscript, addTranscriptEntry } = useVoiceTranscript(); const { clearTranscript, addTranscriptEntry } = useVoiceTranscript();
const { currentBeneficiary } = useBeneficiary(); const { currentBeneficiary, debugDeploymentId } = useBeneficiary();
// Beneficiary state for building beneficiaryData // Beneficiary state for building beneficiaryData
const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]); const [beneficiaries, setBeneficiaries] = useState<Beneficiary[]>([]);
@ -57,6 +57,16 @@ export default function VoiceCallScreen() {
// Build beneficiaryData for voice agent // Build beneficiaryData for voice agent
const beneficiaryData = useMemo((): BeneficiaryData | undefined => { const beneficiaryData = useMemo((): BeneficiaryData | undefined => {
// PRIORITY 1: If debugDeploymentId is set (from Debug screen), use it
if (debugDeploymentId) {
console.log('[VoiceCall] Using DEBUG deployment ID:', debugDeploymentId);
return {
deploymentId: debugDeploymentId,
beneficiaryNamesDict: {},
};
}
// PRIORITY 2: Use beneficiaries from API
// Safety check - ensure beneficiaries is an array // Safety check - ensure beneficiaries is an array
if (!Array.isArray(beneficiaries) || beneficiaries.length === 0) { if (!Array.isArray(beneficiaries) || beneficiaries.length === 0) {
console.log('[VoiceCall] No beneficiaries yet, skipping beneficiaryData'); console.log('[VoiceCall] No beneficiaries yet, skipping beneficiaryData');
@ -91,7 +101,7 @@ export default function VoiceCallScreen() {
console.error('[VoiceCall] Error building beneficiaryData:', error); console.error('[VoiceCall] Error building beneficiaryData:', error);
return undefined; return undefined;
} }
}, [beneficiaries, currentBeneficiary]); }, [beneficiaries, currentBeneficiary, debugDeploymentId]);
// LiveKit hook - ALL logic is here // LiveKit hook - ALL logic is here
const { const {
@ -126,14 +136,22 @@ export default function VoiceCallScreen() {
// Track if connect has been called to prevent duplicate calls // Track if connect has been called to prevent duplicate calls
const connectCalledRef = useRef(false); const connectCalledRef = useRef(false);
// Start call ONLY after beneficiaries are loaded AND beneficiaryData is ready // Start call ONLY after beneficiaryData is ready
// IMPORTANT: We must wait for beneficiaryData to be populated! // IMPORTANT: We must wait for beneficiaryData to be populated!
// Without deploymentId, Julia AI agent won't know which beneficiary to talk about. // Without deploymentId, Julia AI agent won't know which beneficiary to talk about.
useEffect(() => { useEffect(() => {
// Prevent duplicate connect calls // Prevent duplicate connect calls
if (connectCalledRef.current) return; if (connectCalledRef.current) return;
// Only connect when beneficiaryData has a valid deploymentId // If debugDeploymentId is set, connect immediately (don't wait for beneficiaries)
if (debugDeploymentId && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with DEBUG deploymentId:', debugDeploymentId);
connectCalledRef.current = true;
connect();
return;
}
// Otherwise, only connect when beneficiaries are loaded AND beneficiaryData is ready
if (beneficiariesLoaded && beneficiaryData?.deploymentId) { if (beneficiariesLoaded && beneficiaryData?.deploymentId) {
console.log('[VoiceCall] Starting call with beneficiaryData:', JSON.stringify(beneficiaryData)); console.log('[VoiceCall] Starting call with beneficiaryData:', JSON.stringify(beneficiaryData));
connectCalledRef.current = true; connectCalledRef.current = true;
@ -145,7 +163,7 @@ export default function VoiceCallScreen() {
beneficiaryData: beneficiaryData ? JSON.stringify(beneficiaryData) : 'undefined' beneficiaryData: beneficiaryData ? JSON.stringify(beneficiaryData) : 'undefined'
}); });
} }
}, [beneficiariesLoaded, beneficiaryData, beneficiaries.length, connect]); }, [beneficiariesLoaded, beneficiaryData, beneficiaries.length, connect, debugDeploymentId]);
// Fallback: if beneficiaryData doesn't arrive in 5 seconds, connect anyway // Fallback: if beneficiaryData doesn't arrive in 5 seconds, connect anyway
// This handles edge cases where API fails or user has no beneficiaries // This handles edge cases where API fails or user has no beneficiaries

View File

@ -7,12 +7,17 @@ interface BeneficiaryContextType {
clearCurrentBeneficiary: () => void; clearCurrentBeneficiary: () => void;
// Helper to format beneficiary context for AI // Helper to format beneficiary context for AI
getBeneficiaryContext: () => string; getBeneficiaryContext: () => string;
// Debug: Override deployment ID for testing (used by Debug screen)
debugDeploymentId: string | null;
setDebugDeploymentId: (id: string | null) => void;
} }
const BeneficiaryContext = createContext<BeneficiaryContextType | undefined>(undefined); const BeneficiaryContext = createContext<BeneficiaryContextType | undefined>(undefined);
export function BeneficiaryProvider({ children }: { children: React.ReactNode }) { export function BeneficiaryProvider({ children }: { children: React.ReactNode }) {
const [currentBeneficiary, setCurrentBeneficiary] = useState<Beneficiary | null>(null); const [currentBeneficiary, setCurrentBeneficiary] = useState<Beneficiary | null>(null);
// Debug: Override deployment ID for testing purposes
const [debugDeploymentId, setDebugDeploymentId] = useState<string | null>(null);
const clearCurrentBeneficiary = useCallback(() => { const clearCurrentBeneficiary = useCallback(() => {
setCurrentBeneficiary(null); setCurrentBeneficiary(null);
@ -70,6 +75,8 @@ export function BeneficiaryProvider({ children }: { children: React.ReactNode })
setCurrentBeneficiary, setCurrentBeneficiary,
clearCurrentBeneficiary, clearCurrentBeneficiary,
getBeneficiaryContext, getBeneficiaryContext,
debugDeploymentId,
setDebugDeploymentId,
}} }}
> >
{children} {children}

View File

@ -27,7 +27,7 @@
"credentialsSource": "remote" "credentialsSource": "remote"
}, },
"android": { "android": {
"buildType": "apk" "buildType": "app-bundle"
} }
} }
}, },

View File

@ -151,7 +151,10 @@ class WellNuoLLM(llm.LLM):
self._model_name = "wellnuo-voice-ask" self._model_name = "wellnuo-voice-ask"
# Dynamic values from participant metadata (or fallback to env/defaults) # Dynamic values from participant metadata (or fallback to env/defaults)
self._deployment_id = deployment_id or DEPLOYMENT_ID self._deployment_id = deployment_id or DEPLOYMENT_ID
self._beneficiary_names_dict = beneficiary_names_dict or {} # SINGLE_DEPLOYMENT_MODE: if beneficiary_names_dict is empty or None,
# WellNuo API will automatically use the beneficiary name for this deployment_id
# This is the Lite mode - we don't need to pass the names dict
self._beneficiary_names_dict = beneficiary_names_dict if beneficiary_names_dict else None
@property @property
def model(self) -> str: def model(self) -> str:
@ -209,13 +212,19 @@ class WellNuoLLM(llm.LLM):
"question": normalized_question, "question": normalized_question,
"deployment_id": self._deployment_id, "deployment_id": self._deployment_id,
} }
# Add beneficiary_names_dict if available # Add beneficiary_names_dict ONLY if it's not empty
# In SINGLE_DEPLOYMENT_MODE (Lite app), we don't send names dict
# WellNuo API will use the beneficiary name for this deployment_id
if self._beneficiary_names_dict: if self._beneficiary_names_dict:
data["beneficiary_names_dict"] = json.dumps( data["beneficiary_names_dict"] = json.dumps(
self._beneficiary_names_dict self._beneficiary_names_dict
) )
logger.info( logger.info(
f"Using beneficiary_names_dict: {self._beneficiary_names_dict}" f"Full mode: Using beneficiary_names_dict: {self._beneficiary_names_dict}"
)
else:
logger.info(
f"Single deployment mode: deployment_id={self._deployment_id}, no beneficiary_names_dict"
) )
async with session.post(WELLNUO_API_URL, data=data) as resp: async with session.post(WELLNUO_API_URL, data=data) as resp:
result = await resp.json() result = await resp.json()

View File

@ -11,6 +11,16 @@ const JULIA_TOKEN_SERVER = 'https://wellnuo.smartlaunchhub.com/julia';
export const VOICE_ID = 'Asteria'; export const VOICE_ID = 'Asteria';
export const VOICE_NAME = 'Asteria'; export const VOICE_NAME = 'Asteria';
// ============================================================================
// SINGLE_DEPLOYMENT_MODE
// When true: sends only deploymentId (no beneficiaryNamesDict)
// When false: sends both deploymentId AND beneficiaryNamesDict
//
// Use true for WellNuo Lite (single beneficiary per user)
// Use false for full WellNuo app (multiple beneficiaries)
// ============================================================================
export const SINGLE_DEPLOYMENT_MODE = true;
// Beneficiary data to pass to voice agent // Beneficiary data to pass to voice agent
export interface BeneficiaryData { export interface BeneficiaryData {
deploymentId: string; deploymentId: string;
@ -40,8 +50,27 @@ export async function getToken(
): Promise<LiveKitTokenResponse> { ): Promise<LiveKitTokenResponse> {
try { try {
console.log('[LiveKit] Getting token for user:', userId); console.log('[LiveKit] Getting token for user:', userId);
console.log('[LiveKit] SINGLE_DEPLOYMENT_MODE:', SINGLE_DEPLOYMENT_MODE);
// Prepare request body based on SINGLE_DEPLOYMENT_MODE
let requestBody: { userId: string; beneficiaryData?: BeneficiaryData };
if (SINGLE_DEPLOYMENT_MODE && beneficiaryData) {
// In single deployment mode: send only deploymentId, no beneficiaryNamesDict
requestBody = {
userId,
beneficiaryData: {
deploymentId: beneficiaryData.deploymentId,
beneficiaryNamesDict: {}, // Empty - no list of names
},
};
console.log('[LiveKit] Single deployment mode - sending only deploymentId:', beneficiaryData.deploymentId);
} else {
// Full mode: send everything
requestBody = { userId, beneficiaryData };
if (beneficiaryData) { if (beneficiaryData) {
console.log('[LiveKit] With beneficiary data:', beneficiaryData); console.log('[LiveKit] Full mode - sending beneficiary data:', beneficiaryData);
}
} }
// Request LiveKit token from Julia Token Server // Request LiveKit token from Julia Token Server
@ -50,7 +79,7 @@ export async function getToken(
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ userId, beneficiaryData }), body: JSON.stringify(requestBody),
}); });
if (!response.ok) { if (!response.ok) {