⚠️ This is test/experimental code for API integration testing. Do not use in production. Includes: - WellNuo API integration (dashboard, patient context) - Playwright tests for API verification - WebView component for dashboard embedding - API documentation 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
439 lines
12 KiB
TypeScript
439 lines
12 KiB
TypeScript
import React, { useState, useEffect, useCallback } from 'react';
|
|
import {
|
|
StyleSheet,
|
|
View,
|
|
Text,
|
|
StatusBar,
|
|
TouchableOpacity,
|
|
Alert,
|
|
} from 'react-native';
|
|
import { SafeAreaView } from 'react-native-safe-area-context';
|
|
import { Ionicons } from '@expo/vector-icons';
|
|
import { OPENAI_API_KEY, WEBHOOK_URL } from '@env';
|
|
import { VoiceButton, StatusIndicator, TranscriptView } from './src/components';
|
|
import { useVoiceAssistant, PermissionStatus } from './src/hooks/useVoiceAssistant';
|
|
import { fetchWebhookContext, getDefaultContext } from './src/services/webhookService';
|
|
import { WebhookContext } from './src/types';
|
|
|
|
// Wellnuo brand colors
|
|
const COLORS = {
|
|
primary: '#0074be',
|
|
primaryDark: '#005a94',
|
|
teal: '#5db1a8',
|
|
purple: '#ab5b8d',
|
|
white: '#ffffff',
|
|
background: '#f4f6f8',
|
|
textDark: '#515b69',
|
|
textLight: '#7f8795',
|
|
error: '#dc3545',
|
|
};
|
|
|
|
interface Message {
|
|
role: 'user' | 'assistant';
|
|
content: string;
|
|
timestamp: Date;
|
|
}
|
|
|
|
export default function App() {
|
|
const [context, setContext] = useState<WebhookContext>(getDefaultContext());
|
|
const [messages, setMessages] = useState<Message[]>([]);
|
|
const [currentTranscript, setCurrentTranscript] = useState('');
|
|
const [assistantText, setAssistantText] = useState('');
|
|
const [isInitialized, setIsInitialized] = useState(false);
|
|
const [isMuted, setIsMuted] = useState(false);
|
|
const [apiKey] = useState(OPENAI_API_KEY || '');
|
|
|
|
// Fetch context from webhook on mount
|
|
useEffect(() => {
|
|
async function initialize() {
|
|
if (WEBHOOK_URL) {
|
|
const webhookContext = await fetchWebhookContext(WEBHOOK_URL);
|
|
setContext(webhookContext);
|
|
}
|
|
setIsInitialized(true);
|
|
}
|
|
initialize();
|
|
}, []);
|
|
|
|
const handleTranscript = useCallback((text: string, isFinal: boolean) => {
|
|
if (isFinal) {
|
|
setMessages(prev => [
|
|
...prev,
|
|
{ role: 'user', content: text, timestamp: new Date() },
|
|
]);
|
|
setCurrentTranscript('');
|
|
} else {
|
|
setCurrentTranscript(text);
|
|
}
|
|
}, []);
|
|
|
|
const handleAssistantResponse = useCallback((text: string) => {
|
|
setAssistantText(prev => prev + text);
|
|
}, []);
|
|
|
|
const {
|
|
state,
|
|
connectionStatus,
|
|
permissionStatus,
|
|
isInConversation,
|
|
connect,
|
|
disconnect,
|
|
startContinuousListening,
|
|
stopContinuousListening,
|
|
interrupt,
|
|
openSettings,
|
|
} = useVoiceAssistant({
|
|
apiKey,
|
|
context,
|
|
onTranscript: handleTranscript,
|
|
onAssistantResponse: handleAssistantResponse,
|
|
});
|
|
|
|
// Save assistant response when done
|
|
useEffect(() => {
|
|
if (!state.isSpeaking && !state.isProcessing && assistantText) {
|
|
setMessages(prev => [
|
|
...prev,
|
|
{ role: 'assistant', content: assistantText, timestamp: new Date() },
|
|
]);
|
|
setAssistantText('');
|
|
}
|
|
}, [state.isSpeaking, state.isProcessing, assistantText]);
|
|
|
|
// Toggle mute
|
|
const handleToggleMute = useCallback(async () => {
|
|
if (isMuted) {
|
|
// Unmute - resume listening
|
|
setIsMuted(false);
|
|
if (connectionStatus === 'connected') {
|
|
await startContinuousListening();
|
|
}
|
|
} else {
|
|
// Mute - stop listening but keep connection
|
|
setIsMuted(true);
|
|
await stopContinuousListening();
|
|
}
|
|
}, [isMuted, connectionStatus, startContinuousListening, stopContinuousListening]);
|
|
|
|
// Toggle conversation on/off with single tap
|
|
const handleToggleConversation = useCallback(async () => {
|
|
if (!apiKey) {
|
|
Alert.alert(
|
|
'API Key Required',
|
|
'Please add your OpenAI API key in the app configuration.',
|
|
[{ text: 'OK' }]
|
|
);
|
|
return;
|
|
}
|
|
|
|
// If in conversation, end it
|
|
if (isInConversation || connectionStatus === 'connected') {
|
|
await stopContinuousListening();
|
|
disconnect();
|
|
setIsMuted(false);
|
|
return;
|
|
}
|
|
|
|
// Start new conversation
|
|
await connect();
|
|
}, [apiKey, isInConversation, connectionStatus, connect, disconnect, stopContinuousListening]);
|
|
|
|
// Start continuous listening after connected (if not muted)
|
|
useEffect(() => {
|
|
if (connectionStatus === 'connected' && !isInConversation && !isMuted) {
|
|
const timer = setTimeout(() => {
|
|
startContinuousListening();
|
|
}, 500);
|
|
return () => clearTimeout(timer);
|
|
}
|
|
}, [connectionStatus, isInConversation, isMuted, startContinuousListening]);
|
|
|
|
// Interrupt AI when tapping during speech
|
|
const handleInterrupt = useCallback(() => {
|
|
if (state.isSpeaking) {
|
|
interrupt();
|
|
}
|
|
}, [state.isSpeaking, interrupt]);
|
|
|
|
// Main button: Start/End conversation
|
|
const handleMainButtonPress = useCallback(() => {
|
|
handleToggleConversation();
|
|
}, [handleToggleConversation]);
|
|
|
|
// For legacy VoiceButton compatibility
|
|
const handlePressIn = useCallback(() => {
|
|
// Main button now only starts/stops conversation
|
|
handleMainButtonPress();
|
|
}, [handleMainButtonPress]);
|
|
|
|
const handlePressOut = useCallback(() => {
|
|
// No longer needed for toggle mode
|
|
}, []);
|
|
|
|
const handleClearHistory = useCallback(() => {
|
|
Alert.alert(
|
|
'Clear History',
|
|
'Are you sure you want to clear the conversation history?',
|
|
[
|
|
{ text: 'Cancel', style: 'cancel' },
|
|
{
|
|
text: 'Clear',
|
|
style: 'destructive',
|
|
onPress: () => setMessages([]),
|
|
},
|
|
]
|
|
);
|
|
}, []);
|
|
|
|
if (!isInitialized) {
|
|
return (
|
|
<View style={styles.loadingContainer}>
|
|
<Text style={styles.loadingText}>Initializing...</Text>
|
|
</View>
|
|
);
|
|
}
|
|
|
|
return (
|
|
<View style={styles.container}>
|
|
<StatusBar barStyle="dark-content" backgroundColor={COLORS.white} />
|
|
<SafeAreaView style={styles.safeArea}>
|
|
{/* Header */}
|
|
<View style={styles.header}>
|
|
<Text style={styles.title}>Wellnuo</Text>
|
|
<Text style={styles.subtitle}>Your AI Health Assistant</Text>
|
|
{messages.length > 0 && (
|
|
<TouchableOpacity
|
|
style={styles.clearButton}
|
|
onPress={handleClearHistory}
|
|
>
|
|
<Ionicons name="trash-outline" size={18} color={COLORS.textLight} />
|
|
</TouchableOpacity>
|
|
)}
|
|
</View>
|
|
|
|
{/* Transcript View */}
|
|
<View style={styles.transcriptContainer}>
|
|
<TranscriptView
|
|
messages={messages}
|
|
currentTranscript={currentTranscript || assistantText}
|
|
/>
|
|
</View>
|
|
|
|
{/* Voice Controls */}
|
|
<View style={styles.controlsContainer}>
|
|
{/* Permission Denied Message */}
|
|
{permissionStatus === 'denied' && (
|
|
<View style={styles.permissionDenied}>
|
|
<Ionicons name="mic-off" size={48} color={COLORS.error} />
|
|
<Text style={styles.permissionTitle}>Microphone Access Required</Text>
|
|
<Text style={styles.permissionText}>
|
|
To use voice chat, please enable microphone access in Settings.
|
|
</Text>
|
|
<TouchableOpacity style={styles.settingsButton} onPress={openSettings}>
|
|
<Text style={styles.settingsButtonText}>Open Settings</Text>
|
|
</TouchableOpacity>
|
|
</View>
|
|
)}
|
|
|
|
{permissionStatus !== 'denied' && (
|
|
<>
|
|
<StatusIndicator
|
|
connectionStatus={connectionStatus}
|
|
isListening={state.isListening && !isMuted}
|
|
isSpeaking={state.isSpeaking}
|
|
isProcessing={state.isProcessing}
|
|
/>
|
|
|
|
<View style={styles.buttonsRow}>
|
|
{/* Main Call Button - Start/End Call */}
|
|
<VoiceButton
|
|
isListening={(state.isListening || isInConversation) && !isMuted}
|
|
isSpeaking={state.isSpeaking}
|
|
isProcessing={state.isProcessing}
|
|
isInCall={isInConversation || connectionStatus === 'connected'}
|
|
disabled={!apiKey}
|
|
onPressIn={handlePressIn}
|
|
onPressOut={handlePressOut}
|
|
style={styles.voiceButton}
|
|
/>
|
|
</View>
|
|
|
|
{/* Mute Button - only during call */}
|
|
{(isInConversation || connectionStatus === 'connected') && (
|
|
<TouchableOpacity
|
|
style={[styles.muteButton, isMuted && styles.muteButtonActive]}
|
|
onPress={handleToggleMute}
|
|
>
|
|
<Ionicons
|
|
name={isMuted ? 'mic-off' : 'mic'}
|
|
size={20}
|
|
color={isMuted ? COLORS.white : COLORS.textDark}
|
|
/>
|
|
<Text style={[styles.muteButtonText, isMuted && styles.muteButtonTextActive]}>
|
|
{isMuted ? 'Unmute' : 'Mute'}
|
|
</Text>
|
|
</TouchableOpacity>
|
|
)}
|
|
|
|
{state.error && (
|
|
<Text style={styles.errorText}>{state.error}</Text>
|
|
)}
|
|
|
|
<Text style={styles.hint}>
|
|
{connectionStatus !== 'connected'
|
|
? 'Tap to call Julia'
|
|
: isInConversation
|
|
? 'Tap again to end call'
|
|
: ''}
|
|
</Text>
|
|
</>
|
|
)}
|
|
</View>
|
|
</SafeAreaView>
|
|
</View>
|
|
);
|
|
}
|
|
|
|
const styles = StyleSheet.create({
|
|
container: {
|
|
flex: 1,
|
|
backgroundColor: COLORS.background,
|
|
},
|
|
safeArea: {
|
|
flex: 1,
|
|
},
|
|
loadingContainer: {
|
|
flex: 1,
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
backgroundColor: COLORS.background,
|
|
},
|
|
loadingText: {
|
|
fontSize: 18,
|
|
color: COLORS.textDark,
|
|
},
|
|
header: {
|
|
paddingHorizontal: 20,
|
|
paddingTop: 16,
|
|
paddingBottom: 16,
|
|
backgroundColor: COLORS.white,
|
|
alignItems: 'center',
|
|
borderBottomWidth: 1,
|
|
borderBottomColor: '#e5e7eb',
|
|
},
|
|
title: {
|
|
fontSize: 28,
|
|
fontWeight: '600',
|
|
color: COLORS.primary,
|
|
letterSpacing: 0.5,
|
|
},
|
|
subtitle: {
|
|
fontSize: 14,
|
|
color: COLORS.textLight,
|
|
marginTop: 4,
|
|
},
|
|
clearButton: {
|
|
position: 'absolute',
|
|
right: 20,
|
|
top: 20,
|
|
padding: 8,
|
|
},
|
|
transcriptContainer: {
|
|
flex: 1,
|
|
backgroundColor: COLORS.white,
|
|
marginHorizontal: 16,
|
|
marginVertical: 12,
|
|
borderRadius: 12,
|
|
overflow: 'hidden',
|
|
shadowColor: '#000',
|
|
shadowOffset: { width: 0, height: 2 },
|
|
shadowOpacity: 0.05,
|
|
shadowRadius: 8,
|
|
elevation: 2,
|
|
},
|
|
controlsContainer: {
|
|
paddingVertical: 20,
|
|
paddingHorizontal: 20,
|
|
backgroundColor: COLORS.white,
|
|
alignItems: 'center',
|
|
borderTopWidth: 1,
|
|
borderTopColor: '#e5e7eb',
|
|
},
|
|
buttonsRow: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
},
|
|
voiceButton: {
|
|
marginVertical: 8,
|
|
},
|
|
muteButton: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
paddingHorizontal: 16,
|
|
paddingVertical: 10,
|
|
borderRadius: 20,
|
|
backgroundColor: COLORS.background,
|
|
borderWidth: 1,
|
|
borderColor: '#e5e7eb',
|
|
marginTop: 12,
|
|
gap: 6,
|
|
},
|
|
muteButtonActive: {
|
|
backgroundColor: COLORS.error,
|
|
borderColor: COLORS.error,
|
|
},
|
|
muteButtonText: {
|
|
fontSize: 14,
|
|
fontWeight: '500',
|
|
color: COLORS.textDark,
|
|
},
|
|
muteButtonTextActive: {
|
|
color: COLORS.white,
|
|
},
|
|
errorText: {
|
|
color: COLORS.error,
|
|
fontSize: 14,
|
|
textAlign: 'center',
|
|
marginTop: 8,
|
|
},
|
|
hint: {
|
|
color: COLORS.textLight,
|
|
fontSize: 13,
|
|
textAlign: 'center',
|
|
marginTop: 12,
|
|
},
|
|
permissionDenied: {
|
|
alignItems: 'center',
|
|
paddingVertical: 20,
|
|
paddingHorizontal: 16,
|
|
},
|
|
permissionTitle: {
|
|
color: COLORS.textDark,
|
|
fontSize: 18,
|
|
fontWeight: '600',
|
|
marginTop: 16,
|
|
marginBottom: 8,
|
|
textAlign: 'center',
|
|
},
|
|
permissionText: {
|
|
color: COLORS.textLight,
|
|
fontSize: 14,
|
|
textAlign: 'center',
|
|
marginBottom: 20,
|
|
lineHeight: 20,
|
|
},
|
|
settingsButton: {
|
|
backgroundColor: COLORS.primary,
|
|
paddingHorizontal: 24,
|
|
paddingVertical: 12,
|
|
borderRadius: 8,
|
|
},
|
|
settingsButtonText: {
|
|
color: COLORS.white,
|
|
fontSize: 16,
|
|
fontWeight: '600',
|
|
},
|
|
});
|