'use client'; import { useState, useRef, useEffect, useMemo } from 'react'; import { useAuth } from '@/contexts/AuthContext'; import { usePathname, useRouter } from 'next/navigation'; import Link from 'next/link'; import { Loader2, MessageSquare, Mic, MicOff, PhoneCall, Send, Sparkles, } from 'lucide-react'; import { useVapi } from '@/hooks/useVapi'; import toast from 'react-hot-toast'; interface Message { role: 'user' | 'assistant'; content: string; } export default function Chat() { const { user, isLoading: authLoading } = useAuth(); const router = useRouter(); const pathname = usePathname(); const [messages, setMessages] = useState([]); const [input, setInput] = useState(''); const [isSending, setIsSending] = useState(false); const [permissionError, setPermissionError] = useState(null); // Voice state const [isListening, setIsListening] = useState(false); const recognitionRef = useRef(null); const messagesEndRef = useRef(null); const { status, error: callError, durationSeconds, callStats, startCall, endCall } = useVapi(); // Redirect if not logged in useEffect(() => { if (!authLoading && !user) { router.push('/login'); } }, [user, authLoading, router]); // Auto-scroll useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); }, [messages]); // Init speech recognition (client only) useEffect(() => { if (typeof window === 'undefined') return; const SpeechRecognition = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition; if (!SpeechRecognition) { console.warn('Speech recognition not supported in this browser'); return; } const recognition = new SpeechRecognition(); recognition.lang = 'en-US'; // change to 'es-ES' etc if you want recognition.continuous = false; recognition.interimResults = true; recognition.onresult = (event: SpeechRecognitionEvent) => { let finalText = ''; let interimText = ''; for (let i = event.resultIndex; i < event.results.length; i++) { const result = event.results[i]; if (result.isFinal) { finalText += result[0].transcript; } else { interimText += result[0].transcript; } } // show interim in input while speaking if (interimText) { setInput(interimText); } if (finalText) { const cleaned = finalText.trim(); setInput(cleaned); // If you want auto-send after speech, uncomment: // if (cleaned) { // handleVoiceSend(cleaned); // } } }; recognition.onerror = (event: any) => { console.error('Speech recognition error', event.error); setIsListening(false); }; recognition.onend = () => { setIsListening(false); }; recognitionRef.current = recognition; return () => { try { recognition.stop(); } catch { // ignore } recognitionRef.current = null; }; }, []); const sendMessage = async ( e?: React.FormEvent, overrideText?: string, ) => { if (e) e.preventDefault(); const textToSend = overrideText ?? input; if (!textToSend.trim() || isSending) return; const userMessage: Message = { role: 'user', content: textToSend }; const updatedMessages = [...messages, userMessage]; setMessages(updatedMessages); setInput(''); setIsSending(true); try { const response = await fetch('/api/chat', { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ messages: updatedMessages, }), }); if (!response.ok) { throw new Error('Failed to get response'); } const data = await response.json(); if (data.error) { throw new Error(data.error); } setMessages([ ...updatedMessages, { role: 'assistant', content: data.text }, ]); } catch (error) { console.error('Error sending message:', error); setMessages([ ...updatedMessages, { role: 'assistant', content: 'Sorry, I encountered an error. Please try again.', }, ]); toast.error('Unable to send your message.'); } finally { setIsSending(false); } }; // Manual send from voice text (if you want to click after dictation) const handleVoiceSend = (text: string) => { sendMessage(undefined, text); }; const toggleListening = () => { const recognition = recognitionRef.current; if (!recognition) { console.warn('No recognition instance; browser may not support it'); return; } if (!isListening) { setInput(''); setIsListening(true); try { recognition.start(); } catch (err) { console.error('Error starting recognition', err); setIsListening(false); } } else { try { recognition.stop(); } catch (err) { console.error('Error stopping recognition', err); } setIsListening(false); } }; const formatDuration = (seconds: number) => { const mins = Math.floor(seconds / 60); const secs = seconds % 60; return `${mins}m ${secs.toString().padStart(2, '0')}s`; }; const callStatusLabel = useMemo(() => { if (status === 'connecting') return 'Connecting...'; if (status === 'active') return 'On Call'; if (status === 'ended') return 'Call Ended'; if (status === 'error') return 'Error'; return 'Ready to Call'; }, [status]); const handleVoiceCall = async () => { setPermissionError(null); if (status === 'active' || status === 'connecting') { await endCall(); return; } try { await navigator.mediaDevices.getUserMedia({ audio: true }); } catch (error) { console.error('Microphone permission denied', error); setPermissionError( 'Microphone access is required. Please allow permission and use a modern browser.' ); toast.error('Microphone access is required to start a call.'); return; } try { await startCall(); toast.success('Call started.'); } catch (error) { console.error('Unable to start call', error); toast.error('Unable to start the call.'); } }; if (authLoading) { return (
Loading...
); } if (!user) { return null; } const navLinks = [ { label: 'Dashboard', href: '/dashboard' }, { label: 'Scheduled Calls', href: '/dashboard/scheduled-calls' }, { label: 'Voice Agent', href: '/dashboard/agent-settings' }, { label: 'Credits', href: '/dashboard/credits' }, { label: 'Notifications', href: '/dashboard/notifications' }, ]; return (

{callStatusLabel}

{status === 'connecting' && ( <> Connecting to Vapi... )} {status === 'active' && ( <> Duration: {formatDuration(durationSeconds)} )} {status === 'idle' && 'Tap start to begin a voice call'}
{(permissionError || callError) && (
{permissionError || callError}
)} {status === 'ended' && callStats && (

Call Summary

Duration

{formatDuration(callStats.durationSeconds)}

Credits Used

{callStats.creditsUsed}

Transcript

)}
{messages.length === 0 && (
Start a conversation with your AI companion
)} {messages.map((message, index) => (

{message.content}

))} {isSending && (
)}
sendMessage(e)} className="flex gap-2 items-center">
setInput(e.target.value)} placeholder={isListening ? 'Listening...' : 'Type your message...'} className="flex-1 bg-transparent text-white placeholder:text-[#9ca3af] focus:outline-none" disabled={isSending} />
); }