wellnua-lite-Robert/contexts/VoiceTranscriptContext.tsx
Sergei dde0ecb9cd Add Julia AI voice agent with LiveKit integration
Voice AI Features:
- LiveKit Agents integration for real-time voice calls
- Julia AI agent (Python) deployed to LiveKit Cloud
- Token server for authentication
- Debug screen with voice call testing
- Voice call screen with full-screen UI

Agent Configuration:
- STT: Deepgram Nova-2
- LLM: OpenAI GPT-4o
- TTS: Deepgram Aura Asteria (female voice)
- Turn Detection: LiveKit Multilingual Model
- VAD: Silero
- Noise Cancellation: LiveKit BVC

Files added:
- julia-agent/ - Complete agent code and token server
- app/voice-call.tsx - Full-screen voice call UI
- services/livekitService.ts - LiveKit client service
- contexts/VoiceTranscriptContext.tsx - Transcript state
- polyfills/livekit-globals.ts - WebRTC polyfills

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-17 17:58:31 -08:00

102 lines
2.7 KiB
TypeScript

/**
* Voice Transcript Context
*
* Stores transcript from voice calls to display in chat after call ends.
* This allows the chat to show what was said during the voice call.
*/
import React, { createContext, useContext, useState, useCallback, ReactNode } from 'react';
import type { Message } from '@/types';
interface TranscriptEntry {
id: string;
role: 'user' | 'assistant';
text: string;
timestamp: Date;
}
interface VoiceTranscriptContextValue {
// Transcript entries from the last voice call
transcript: TranscriptEntry[];
// Add a new transcript entry
addTranscriptEntry: (role: 'user' | 'assistant', text: string) => void;
// Clear transcript (call this when starting a new call)
clearTranscript: () => void;
// Get transcript as chat messages
getTranscriptAsMessages: () => Message[];
// Check if there's a new transcript to show
hasNewTranscript: boolean;
// Mark transcript as shown
markTranscriptAsShown: () => void;
}
const VoiceTranscriptContext = createContext<VoiceTranscriptContextValue | undefined>(undefined);
export function VoiceTranscriptProvider({ children }: { children: ReactNode }) {
const [transcript, setTranscript] = useState<TranscriptEntry[]>([]);
const [hasNewTranscript, setHasNewTranscript] = useState(false);
const addTranscriptEntry = useCallback((role: 'user' | 'assistant', text: string) => {
if (!text.trim()) return;
const entry: TranscriptEntry = {
id: `voice-${Date.now()}-${Math.random().toString(36).slice(2)}`,
role,
text: text.trim(),
timestamp: new Date(),
};
setTranscript(prev => [...prev, entry]);
setHasNewTranscript(true);
console.log(`[VoiceTranscript] Added: ${role} - ${text.slice(0, 50)}...`);
}, []);
const clearTranscript = useCallback(() => {
setTranscript([]);
setHasNewTranscript(false);
console.log('[VoiceTranscript] Cleared');
}, []);
const getTranscriptAsMessages = useCallback((): Message[] => {
return transcript.map(entry => ({
id: entry.id,
role: entry.role,
content: entry.text,
timestamp: entry.timestamp,
isVoice: true,
}));
}, [transcript]);
const markTranscriptAsShown = useCallback(() => {
setHasNewTranscript(false);
}, []);
return (
<VoiceTranscriptContext.Provider
value={{
transcript,
addTranscriptEntry,
clearTranscript,
getTranscriptAsMessages,
hasNewTranscript,
markTranscriptAsShown,
}}
>
{children}
</VoiceTranscriptContext.Provider>
);
}
export function useVoiceTranscript() {
const context = useContext(VoiceTranscriptContext);
if (!context) {
throw new Error('useVoiceTranscript must be used within VoiceTranscriptProvider');
}
return context;
}