feat: Integrate LiveKit voice calls into chat screen
- Add voice call button in chat input area - Implement LiveKit room connection with Julia AI agent - Create full-screen voice call modal with: - Visual avatar with speaking indicator - Call duration timer - Agent state display (listening/thinking/speaking) - Hang up button - Add real-time transcription tracking for voice calls - Keep screen awake during active calls - Integrate with existing VoiceTranscriptContext for history 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
6f7c79f601
commit
89afe86f54
@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Chat Screen - Text Chat with Julia AI
|
||||
*
|
||||
* Clean text chat interface.
|
||||
* Clean text chat interface with integrated voice calls.
|
||||
*/
|
||||
|
||||
import React, { useState, useCallback, useRef, useEffect } from 'react';
|
||||
@ -16,17 +16,37 @@ import {
|
||||
ActivityIndicator,
|
||||
Keyboard,
|
||||
Platform,
|
||||
Alert,
|
||||
} from 'react-native';
|
||||
import { KeyboardAvoidingView } from 'react-native-keyboard-controller';
|
||||
import { Ionicons } from '@expo/vector-icons';
|
||||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { useRouter } from 'expo-router';
|
||||
import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
|
||||
import { api } from '@/services/api';
|
||||
import { useBeneficiary } from '@/contexts/BeneficiaryContext';
|
||||
import { useVoiceTranscript } from '@/contexts/VoiceTranscriptContext';
|
||||
import { AppColors, BorderRadius, FontSizes, Spacing } from '@/constants/theme';
|
||||
import type { Message, Beneficiary } from '@/types';
|
||||
|
||||
// LiveKit imports
|
||||
import {
|
||||
registerGlobals,
|
||||
LiveKitRoom,
|
||||
useVoiceAssistant,
|
||||
useConnectionState,
|
||||
useRoomContext,
|
||||
BarVisualizer,
|
||||
useTrackTranscription,
|
||||
useTracks,
|
||||
} from '@livekit/react-native';
|
||||
import { ConnectionState, RoomEvent, Track, TranscriptionSegment } from 'livekit-client';
|
||||
import { getToken, type BeneficiaryData } from '@/services/livekitService';
|
||||
import { useAuth } from '@/contexts/AuthContext';
|
||||
|
||||
// Register LiveKit globals (must be called before using LiveKit)
|
||||
registerGlobals();
|
||||
|
||||
const API_URL = 'https://eluxnetworks.net/function/well-api/api';
|
||||
|
||||
// WellNuo API credentials (same as julia-agent)
|
||||
@ -106,10 +126,233 @@ function normalizeQuestion(userMessage: string): string {
|
||||
return userMessage;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Voice Call Overlay Component
|
||||
// ============================================================================
|
||||
|
||||
interface VoiceCallOverlayProps {
|
||||
onHangUp: () => void;
|
||||
onTranscript: (role: 'user' | 'assistant', text: string) => void;
|
||||
beneficiaryName?: string;
|
||||
}
|
||||
|
||||
function VoiceCallContent({ onHangUp, onTranscript, beneficiaryName }: VoiceCallOverlayProps) {
|
||||
const room = useRoomContext();
|
||||
const connectionState = useConnectionState();
|
||||
const { state: agentState, audioTrack } = useVoiceAssistant();
|
||||
const [callDuration, setCallDuration] = useState(0);
|
||||
const [lastProcessedId, setLastProcessedId] = useState<string | null>(null);
|
||||
|
||||
// Track all audio tracks for transcription
|
||||
const tracks = useTracks([Track.Source.Microphone], { onlySubscribed: true });
|
||||
|
||||
// Get transcription from agent's audio track
|
||||
const { segments: agentSegments } = useTrackTranscription(audioTrack);
|
||||
|
||||
// Get transcription from user's microphone
|
||||
const localTrack = tracks.find(t => t.participant?.isLocal);
|
||||
const { segments: userSegments } = useTrackTranscription(localTrack);
|
||||
|
||||
// Process agent transcription
|
||||
useEffect(() => {
|
||||
if (agentSegments && agentSegments.length > 0) {
|
||||
const lastSegment = agentSegments[agentSegments.length - 1];
|
||||
if (lastSegment && lastSegment.final && lastSegment.id !== lastProcessedId) {
|
||||
setLastProcessedId(lastSegment.id);
|
||||
onTranscript('assistant', lastSegment.text);
|
||||
console.log('[VoiceCall] Agent said:', lastSegment.text);
|
||||
}
|
||||
}
|
||||
}, [agentSegments, lastProcessedId, onTranscript]);
|
||||
|
||||
// Process user transcription
|
||||
const [lastUserSegmentId, setLastUserSegmentId] = useState<string | null>(null);
|
||||
useEffect(() => {
|
||||
if (userSegments && userSegments.length > 0) {
|
||||
const lastSegment = userSegments[userSegments.length - 1];
|
||||
if (lastSegment && lastSegment.final && lastSegment.id !== lastUserSegmentId) {
|
||||
setLastUserSegmentId(lastSegment.id);
|
||||
onTranscript('user', lastSegment.text);
|
||||
console.log('[VoiceCall] User said:', lastSegment.text);
|
||||
}
|
||||
}
|
||||
}, [userSegments, lastUserSegmentId, onTranscript]);
|
||||
|
||||
// Call duration timer
|
||||
useEffect(() => {
|
||||
if (connectionState === ConnectionState.Connected) {
|
||||
const interval = setInterval(() => {
|
||||
setCallDuration(prev => prev + 1);
|
||||
}, 1000);
|
||||
return () => clearInterval(interval);
|
||||
}
|
||||
}, [connectionState]);
|
||||
|
||||
// Keep screen awake during call
|
||||
useEffect(() => {
|
||||
activateKeepAwakeAsync('voice-call');
|
||||
return () => {
|
||||
deactivateKeepAwake('voice-call');
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Format duration as mm:ss
|
||||
const formatDuration = (seconds: number) => {
|
||||
const mins = Math.floor(seconds / 60);
|
||||
const secs = seconds % 60;
|
||||
return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
|
||||
};
|
||||
|
||||
// Get status text based on agent state
|
||||
const getStatusText = () => {
|
||||
if (connectionState === ConnectionState.Connecting) return 'Connecting...';
|
||||
if (connectionState === ConnectionState.Reconnecting) return 'Reconnecting...';
|
||||
if (connectionState !== ConnectionState.Connected) return 'Disconnected';
|
||||
|
||||
switch (agentState) {
|
||||
case 'listening': return 'Listening...';
|
||||
case 'thinking': return 'Thinking...';
|
||||
case 'speaking': return 'Speaking...';
|
||||
case 'connecting': return 'Connecting to Julia...';
|
||||
case 'initializing': return 'Starting...';
|
||||
default: return 'Connected';
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<View style={voiceStyles.container}>
|
||||
<View style={voiceStyles.content}>
|
||||
{/* Avatar */}
|
||||
<View style={voiceStyles.avatarContainer}>
|
||||
<View style={[
|
||||
voiceStyles.avatar,
|
||||
agentState === 'speaking' && voiceStyles.avatarSpeaking,
|
||||
]}>
|
||||
<Text style={voiceStyles.avatarText}>J</Text>
|
||||
</View>
|
||||
{agentState === 'speaking' && (
|
||||
<View style={voiceStyles.speakingRing} />
|
||||
)}
|
||||
</View>
|
||||
|
||||
{/* Name and status */}
|
||||
<Text style={voiceStyles.name}>Julia AI</Text>
|
||||
{beneficiaryName && (
|
||||
<Text style={voiceStyles.beneficiary}>About {beneficiaryName}</Text>
|
||||
)}
|
||||
<Text style={voiceStyles.status}>{getStatusText()}</Text>
|
||||
|
||||
{/* Duration */}
|
||||
{connectionState === ConnectionState.Connected && (
|
||||
<Text style={voiceStyles.duration}>{formatDuration(callDuration)}</Text>
|
||||
)}
|
||||
|
||||
{/* Audio Visualizer */}
|
||||
{audioTrack && agentState === 'speaking' && (
|
||||
<View style={voiceStyles.visualizerContainer}>
|
||||
<BarVisualizer
|
||||
trackRef={{ participant: audioTrack.participant, source: Track.Source.Microphone, publication: audioTrack.publication }}
|
||||
barCount={5}
|
||||
options={{ minHeight: 10 }}
|
||||
/>
|
||||
</View>
|
||||
)}
|
||||
</View>
|
||||
|
||||
{/* Hang up button */}
|
||||
<TouchableOpacity style={voiceStyles.hangUpButton} onPress={onHangUp}>
|
||||
<Ionicons name="call" size={32} color={AppColors.white} style={{ transform: [{ rotate: '135deg' }] }} />
|
||||
</TouchableOpacity>
|
||||
</View>
|
||||
);
|
||||
}
|
||||
|
||||
const voiceStyles = StyleSheet.create({
|
||||
container: {
|
||||
flex: 1,
|
||||
backgroundColor: 'rgba(0, 0, 0, 0.95)',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
paddingVertical: 60,
|
||||
},
|
||||
content: {
|
||||
flex: 1,
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
},
|
||||
avatarContainer: {
|
||||
position: 'relative',
|
||||
marginBottom: Spacing.lg,
|
||||
},
|
||||
avatar: {
|
||||
width: 120,
|
||||
height: 120,
|
||||
borderRadius: 60,
|
||||
backgroundColor: AppColors.success,
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
},
|
||||
avatarSpeaking: {
|
||||
backgroundColor: AppColors.primary,
|
||||
},
|
||||
avatarText: {
|
||||
fontSize: 48,
|
||||
fontWeight: '600',
|
||||
color: AppColors.white,
|
||||
},
|
||||
speakingRing: {
|
||||
position: 'absolute',
|
||||
top: -10,
|
||||
left: -10,
|
||||
right: -10,
|
||||
bottom: -10,
|
||||
borderRadius: 70,
|
||||
borderWidth: 3,
|
||||
borderColor: AppColors.primary,
|
||||
opacity: 0.5,
|
||||
},
|
||||
name: {
|
||||
fontSize: FontSizes['2xl'],
|
||||
fontWeight: '600',
|
||||
color: AppColors.white,
|
||||
marginBottom: Spacing.xs,
|
||||
},
|
||||
beneficiary: {
|
||||
fontSize: FontSizes.base,
|
||||
color: 'rgba(255, 255, 255, 0.7)',
|
||||
marginBottom: Spacing.sm,
|
||||
},
|
||||
status: {
|
||||
fontSize: FontSizes.base,
|
||||
color: AppColors.success,
|
||||
marginBottom: Spacing.md,
|
||||
},
|
||||
duration: {
|
||||
fontSize: FontSizes.lg,
|
||||
color: 'rgba(255, 255, 255, 0.8)',
|
||||
fontVariant: ['tabular-nums'],
|
||||
},
|
||||
visualizerContainer: {
|
||||
marginTop: Spacing.xl,
|
||||
height: 60,
|
||||
width: 200,
|
||||
},
|
||||
hangUpButton: {
|
||||
width: 72,
|
||||
height: 72,
|
||||
borderRadius: 36,
|
||||
backgroundColor: AppColors.error,
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
marginBottom: Spacing.xl,
|
||||
},
|
||||
});
|
||||
|
||||
export default function ChatScreen() {
|
||||
const router = useRouter();
|
||||
const { currentBeneficiary, setCurrentBeneficiary } = useBeneficiary();
|
||||
const { getTranscriptAsMessages, hasNewTranscript, markTranscriptAsShown } = useVoiceTranscript();
|
||||
const { getTranscriptAsMessages, hasNewTranscript, markTranscriptAsShown, addTranscriptEntry, clearTranscript } = useVoiceTranscript();
|
||||
const { user } = useAuth();
|
||||
|
||||
// Chat state
|
||||
const [messages, setMessages] = useState<Message[]>([
|
||||
@ -121,6 +364,12 @@ export default function ChatScreen() {
|
||||
},
|
||||
]);
|
||||
|
||||
// Voice call state
|
||||
const [isVoiceCallActive, setIsVoiceCallActive] = useState(false);
|
||||
const [voiceToken, setVoiceToken] = useState<string | undefined>(undefined);
|
||||
const [voiceWsUrl, setVoiceWsUrl] = useState<string | undefined>(undefined);
|
||||
const [isConnectingVoice, setIsConnectingVoice] = useState(false);
|
||||
|
||||
// Add voice call transcript to messages when returning from call
|
||||
useEffect(() => {
|
||||
if (hasNewTranscript) {
|
||||
@ -208,6 +457,69 @@ export default function ChatScreen() {
|
||||
setShowBeneficiaryPicker(false);
|
||||
}, [setCurrentBeneficiary]);
|
||||
|
||||
// ============================================================================
|
||||
// Voice Call Functions
|
||||
// ============================================================================
|
||||
|
||||
// Start voice call
|
||||
const startVoiceCall = useCallback(async () => {
|
||||
if (isConnectingVoice || isVoiceCallActive) return;
|
||||
|
||||
setIsConnectingVoice(true);
|
||||
console.log('[Chat] Starting voice call...');
|
||||
|
||||
try {
|
||||
// Build beneficiary data for the agent
|
||||
const beneficiaryData: BeneficiaryData = {
|
||||
deploymentId: currentBeneficiary?.id?.toString() || beneficiaries[0]?.id?.toString() || '21',
|
||||
beneficiaryNamesDict: {},
|
||||
};
|
||||
|
||||
// Add names dict if not in single deployment mode
|
||||
if (!SINGLE_DEPLOYMENT_MODE) {
|
||||
beneficiaries.forEach(b => {
|
||||
beneficiaryData.beneficiaryNamesDict[b.id.toString()] = b.name;
|
||||
});
|
||||
}
|
||||
|
||||
// Get LiveKit token
|
||||
const userIdStr = user?.user_id?.toString() || 'user-' + Date.now();
|
||||
const tokenResponse = await getToken(userIdStr, beneficiaryData);
|
||||
|
||||
if (!tokenResponse.success || !tokenResponse.data) {
|
||||
throw new Error(tokenResponse.error || 'Failed to get voice token');
|
||||
}
|
||||
|
||||
console.log('[Chat] Got voice token, connecting to room:', tokenResponse.data.roomName);
|
||||
|
||||
// Clear previous transcript and start call
|
||||
clearTranscript();
|
||||
setVoiceToken(tokenResponse.data.token);
|
||||
setVoiceWsUrl(tokenResponse.data.wsUrl);
|
||||
setIsVoiceCallActive(true);
|
||||
} catch (error) {
|
||||
console.error('[Chat] Voice call error:', error);
|
||||
Alert.alert(
|
||||
'Voice Call Error',
|
||||
error instanceof Error ? error.message : 'Failed to start voice call'
|
||||
);
|
||||
} finally {
|
||||
setIsConnectingVoice(false);
|
||||
}
|
||||
}, [isConnectingVoice, isVoiceCallActive, currentBeneficiary, beneficiaries, user, clearTranscript]);
|
||||
|
||||
// End voice call
|
||||
const endVoiceCall = useCallback(() => {
|
||||
console.log('[Chat] Ending voice call...');
|
||||
setIsVoiceCallActive(false);
|
||||
setVoiceToken(undefined);
|
||||
setVoiceWsUrl(undefined);
|
||||
}, []);
|
||||
|
||||
// Handle voice transcript entries
|
||||
const handleVoiceTranscript = useCallback((role: 'user' | 'assistant', text: string) => {
|
||||
addTranscriptEntry(role, text);
|
||||
}, [addTranscriptEntry]);
|
||||
|
||||
// Cached API token for WellNuo
|
||||
const apiTokenRef = useRef<string | null>(null);
|
||||
@ -466,6 +778,19 @@ export default function ChatScreen() {
|
||||
|
||||
{/* Input */}
|
||||
<View style={styles.inputContainer}>
|
||||
{/* Voice Call Button */}
|
||||
<TouchableOpacity
|
||||
style={[styles.voiceButton, isConnectingVoice && styles.voiceButtonConnecting]}
|
||||
onPress={startVoiceCall}
|
||||
disabled={isConnectingVoice}
|
||||
>
|
||||
{isConnectingVoice ? (
|
||||
<ActivityIndicator size="small" color={AppColors.primary} />
|
||||
) : (
|
||||
<Ionicons name="call" size={20} color={AppColors.primary} />
|
||||
)}
|
||||
</TouchableOpacity>
|
||||
|
||||
<TextInput
|
||||
style={styles.input}
|
||||
placeholder="Type a message..."
|
||||
@ -489,6 +814,44 @@ export default function ChatScreen() {
|
||||
</TouchableOpacity>
|
||||
</View>
|
||||
</KeyboardAvoidingView>
|
||||
|
||||
{/* Voice Call Modal */}
|
||||
<Modal
|
||||
visible={isVoiceCallActive}
|
||||
animationType="slide"
|
||||
presentationStyle="fullScreen"
|
||||
onRequestClose={endVoiceCall}
|
||||
>
|
||||
<SafeAreaView style={{ flex: 1, backgroundColor: 'black' }} edges={['top', 'bottom']}>
|
||||
{voiceToken && voiceWsUrl ? (
|
||||
<LiveKitRoom
|
||||
serverUrl={voiceWsUrl}
|
||||
token={voiceToken}
|
||||
connect={true}
|
||||
audio={true}
|
||||
video={false}
|
||||
onConnected={() => console.log('[Chat] LiveKit connected')}
|
||||
onDisconnected={endVoiceCall}
|
||||
onError={(error) => {
|
||||
console.error('[Chat] LiveKit error:', error);
|
||||
Alert.alert('Voice Call Error', error.message);
|
||||
endVoiceCall();
|
||||
}}
|
||||
>
|
||||
<VoiceCallContent
|
||||
onHangUp={endVoiceCall}
|
||||
onTranscript={handleVoiceTranscript}
|
||||
beneficiaryName={currentBeneficiary?.name}
|
||||
/>
|
||||
</LiveKitRoom>
|
||||
) : (
|
||||
<View style={{ flex: 1, justifyContent: 'center', alignItems: 'center' }}>
|
||||
<ActivityIndicator size="large" color={AppColors.primary} />
|
||||
<Text style={{ color: 'white', marginTop: 16 }}>Connecting...</Text>
|
||||
</View>
|
||||
)}
|
||||
</SafeAreaView>
|
||||
</Modal>
|
||||
</SafeAreaView>
|
||||
);
|
||||
}
|
||||
@ -631,6 +994,20 @@ const styles = StyleSheet.create({
|
||||
maxHeight: 100,
|
||||
marginRight: Spacing.sm,
|
||||
},
|
||||
voiceButton: {
|
||||
width: 44,
|
||||
height: 44,
|
||||
borderRadius: BorderRadius.full,
|
||||
backgroundColor: AppColors.surface,
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
marginRight: Spacing.sm,
|
||||
borderWidth: 1,
|
||||
borderColor: AppColors.primary,
|
||||
},
|
||||
voiceButtonConnecting: {
|
||||
opacity: 0.6,
|
||||
},
|
||||
sendButton: {
|
||||
width: 44,
|
||||
height: 44,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user