Add LICENSE, README, and Docs tab to Mission Control

This commit is contained in:
root
2026-02-22 07:33:18 +00:00
parent 3e7b457d5f
commit 0817444dc5
68 changed files with 6677 additions and 1673 deletions
+153 -468
View File
@@ -1,496 +1,181 @@
"use client";
import { useEffect, useMemo, useRef, useState } from "react";
import { useState, useRef } from "react";
import Vapi from "@vapi-ai/web";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type SpeechRecognitionInstance = any;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type SpeechRecognitionConstructor = new () => SpeechRecognitionInstance;
const VAPI_PUBLIC_KEY = "d44a0025-24bb-426d-919a-cb0a96416ed4";
const ASSISTANT_ID = "92630ca5-e165-4360-bce0-dd8730882569";
type ChatMessage = {
role: "user" | "assistant";
content: string;
timestamp: number;
};
type ApiResponse = {
response: string;
shouldCaptureEmail: boolean;
suggestedActions: string[];
};
type SiteMenteVoiceWidgetProps = {
initialLang?: "es" | "en";
};
const quickActions = {
es: [
{ label: "¿Cuánto cuesta?", icon: "💰" },
{ label: "Ver casos de éxito", icon: "🎯" },
{ label: "¿Cómo funciona?", icon: "⚙️" },
],
en: [
{ label: "Pricing?", icon: "💰" },
{ label: "Success stories", icon: "🎯" },
{ label: "How it works?", icon: "⚙️" },
],
} as const;
const initialGreeting = {
es: "Hola, soy el cerebro de SiteMente. ¿En qué te puedo ayudar hoy?",
en: "Hi, I'm the SiteMente brain. How can I help you today?",
} as const;
interface SiteMenteVoiceWidgetProps {
businessName?: string;
businessType?: "restaurant" | "real-estate" | "clinic" | "car-rental" | "default";
theme?: "dark" | "light";
}
export default function SiteMenteVoiceWidget({
initialLang = "es",
businessName = "SiteMente",
businessType = "default",
theme = "dark"
}: SiteMenteVoiceWidgetProps) {
const [isOpen, setIsOpen] = useState(false);
const [lang, setLang] = useState<"es" | "en">(initialLang);
const [messages, setMessages] = useState<ChatMessage[]>([
{
role: "assistant",
content: initialGreeting[initialLang],
timestamp: Date.now(),
},
]);
const [input, setInput] = useState("");
const [voiceMode, setVoiceMode] = useState(true);
const [isRecording, setIsRecording] = useState(false);
const [isSpeaking, setIsSpeaking] = useState(false);
const [isLoading, setIsLoading] = useState(false);
const [isActive, setIsActive] = useState(false);
const [status, setStatus] = useState<"idle" | "connecting" | "active" | "error">("idle");
const [transcript, setTranscript] = useState("");
const [speechSupported, setSpeechSupported] = useState(true);
const [showTooltip, setShowTooltip] = useState(false);
const recognitionRef = useRef<SpeechRecognitionInstance | null>(null);
const isRecordingRef = useRef(false);
const transcriptRef = useRef("");
const messagesEndRef = useRef<HTMLDivElement | null>(null);
const localeLabel = useMemo(
() => (lang === "es" ? "ES" : "EN"),
[lang]
);
useEffect(() => {
setLang(initialLang);
}, [initialLang]);
useEffect(() => {
const seen = window.localStorage.getItem("sitemente:voice-tooltip");
if (!seen) {
setShowTooltip(true);
window.localStorage.setItem("sitemente:voice-tooltip", "1");
const timeout = window.setTimeout(() => setShowTooltip(false), 4000);
return () => window.clearTimeout(timeout);
}
return undefined;
}, []);
useEffect(() => {
isRecordingRef.current = isRecording;
}, [isRecording]);
useEffect(() => {
transcriptRef.current = transcript;
}, [transcript]);
useEffect(() => {
const SpeechRecognitionImpl =
typeof window !== "undefined"
? ((window as typeof window & {
webkitSpeechRecognition?: SpeechRecognitionConstructor;
}).SpeechRecognition ||
(window as typeof window & {
webkitSpeechRecognition?: SpeechRecognitionConstructor;
}).webkitSpeechRecognition)
: undefined;
if (!SpeechRecognitionImpl) {
setSpeechSupported(false);
return;
}
const recognition = new SpeechRecognitionImpl();
recognition.lang = lang === "es" ? "es-ES" : "en-US";
recognition.interimResults = true;
recognition.continuous = false;
recognition.onresult = (event) => {
const result = Array.from(event.results)
.map((res) => res[0]?.transcript ?? "")
.join(" ");
setTranscript(result.trim());
};
recognition.onerror = () => {
setIsRecording(false);
};
recognition.onend = () => {
if (isRecordingRef.current) {
setIsRecording(false);
const finalTranscript = transcriptRef.current.trim();
if (finalTranscript) {
handleSend(finalTranscript);
}
setTranscript("");
}
};
recognitionRef.current = recognition;
}, [lang]);
useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}, [messages, isOpen]);
useEffect(() => {
if (!isSpeaking) return;
return () => {
window.speechSynthesis?.cancel();
};
}, [isSpeaking]);
useEffect(() => {
setMessages((prev) => {
if (prev.length === 0) return prev;
const updated = [...prev];
if (updated[0].role === "assistant") {
updated[0] = {
...updated[0],
content: initialGreeting[lang],
};
}
return updated;
});
}, [lang]);
const startRecording = () => {
if (!speechSupported || !recognitionRef.current) return;
setTranscript("");
setIsRecording(true);
recognitionRef.current.start();
};
const stopRecording = () => {
if (!recognitionRef.current) return;
recognitionRef.current.stop();
setIsRecording(false);
};
const speak = (text: string) => {
if (!("speechSynthesis" in window)) return;
window.speechSynthesis.cancel();
const utterance = new SpeechSynthesisUtterance(text);
utterance.lang = lang === "es" ? "es-ES" : "en-US";
utterance.onstart = () => setIsSpeaking(true);
utterance.onend = () => setIsSpeaking(false);
utterance.onerror = () => setIsSpeaking(false);
window.speechSynthesis.speak(utterance);
};
const handleSend = async (text: string) => {
if (!text.trim() || isLoading) return;
const userMessage: ChatMessage = {
role: "user",
content: text,
timestamp: Date.now(),
};
setMessages((prev) => [...prev, userMessage]);
setInput("");
setIsLoading(true);
const [errorMsg, setErrorMsg] = useState<string>("");
const vapiRef = useRef<any>(null);
const startCall = async () => {
try {
const response = await fetch("/api/chat/agent", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
message: text,
locale: lang,
history: messages.slice(-6),
}),
console.log("Starting call - initializing Vapi inside click handler...");
setErrorMsg("");
setStatus("connecting");
// Step 1: Verify mic exists
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
console.log("✅ Mic stream created:", stream);
console.log("✅ Audio tracks:", stream.getAudioTracks().length);
console.log("✅ Track enabled:", stream.getAudioTracks()[0]?.enabled);
console.log("✅ Track settings:", stream.getAudioTracks()[0]?.getSettings());
} catch (micErr) {
console.log("❌ Mic error:", micErr);
}
// Initialize Vapi INSIDE the click handler (required for iOS)
const vapi = new Vapi(VAPI_PUBLIC_KEY);
vapiRef.current = vapi;
// Set up event listeners
vapi.on("error", (error: any) => {
console.log("Vapi error:", error);
const msg = String(error?.message || error?.error?.message || JSON.stringify(error) || "Error desconocido");
setErrorMsg(msg);
setStatus("error");
setIsActive(false);
});
if (!response.ok) {
throw new Error("Failed to fetch response.");
}
vapi.on("call-start", () => {
console.log("✅ Call started!");
setStatus("active");
// Check peer connection for audio senders
setTimeout(() => {
try {
// @ts-ignore - internal property
const pc = vapiRef.current?._call?._pc;
if (pc) {
console.log("📡 PeerConnection found");
pc.getSenders().forEach((sender: any, i: number) => {
console.log(`Sender ${i}:`, sender.track?.kind, sender.track?.enabled);
});
} else {
console.log("⚠️ No PeerConnection found");
}
} catch (e) {
console.log("Error checking PC:", e);
}
}, 2000);
});
const data = (await response.json()) as ApiResponse;
const assistantMessage: ChatMessage = {
role: "assistant",
content: data.response,
timestamp: Date.now(),
};
setMessages((prev) => [...prev, assistantMessage]);
vapi.on("call-end", (e: any) => {
console.log("Call ended", e);
setStatus("idle");
setIsActive(false);
});
if (voiceMode) {
speak(data.response);
}
} catch (error) {
const fallbackMessage: ChatMessage = {
role: "assistant",
content:
lang === "es"
? "Hubo un problema al responder. ¿Quieres intentarlo de nuevo?"
: "There was a problem responding. Want to try again?",
timestamp: Date.now(),
};
setMessages((prev) => [...prev, fallbackMessage]);
} finally {
setIsLoading(false);
vapi.on("message", (m: any) => {
console.log("Vapi message:", m);
});
vapi.on("speech-start", () => {
console.log("User speech detected!");
});
vapi.on("speech-end", () => {
console.log("User speech ended");
});
vapi.on("transcript", (transcript: any) => {
console.log("Transcript:", transcript);
if (typeof transcript === "string") {
setTranscript(transcript);
} else if (transcript?.text) {
setTranscript(transcript.text);
}
});
console.log("Calling assistant:", ASSISTANT_ID);
// Start the call
await vapi.start(ASSISTANT_ID);
console.log("Call started successfully");
setIsActive(true);
} catch (error: any) {
console.log("Start error:", error);
const msg = String(error?.message || error?.error?.message || JSON.stringify(error) || "Error al iniciar");
setErrorMsg(msg);
setStatus("error");
}
};
const voiceIndicator = isRecording
? "🎤"
: isSpeaking
? "🔊"
: "🎤";
const endCall = async () => {
try {
if (vapiRef.current) {
await vapiRef.current.stop();
}
setIsActive(false);
setStatus("idle");
setTranscript("");
} catch (error) {
console.error("End call error:", error);
}
};
const buttonColor = theme === "dark" ? "bg-brand-pink" : "bg-blue-600";
return (
<>
{!isOpen && (
<div className="fixed bottom-6 right-6 z-[9999] flex flex-col items-end gap-2">
<div className="relative">
<button
type="button"
onClick={() => setIsOpen(true)}
className="relative flex h-[68px] w-[68px] items-center justify-center rounded-full bg-gradient-to-br from-[#8B5CF6] to-[#EC4899] text-white shadow-lg transition hover:scale-110 hover:shadow-[0_12px_30px_rgba(236,72,153,0.45)]"
>
<div className="flex items-center gap-1">
<span className="h-3 w-1 rounded-full bg-white/80 animate-pulse" />
<span className="h-5 w-1 rounded-full bg-white/90 animate-pulse" />
<span className="h-4 w-1 rounded-full bg-white/80 animate-pulse" />
</div>
</button>
{showTooltip && (
<div className="absolute right-[76px] top-1/2 -translate-y-1/2 rounded-full bg-white px-3 py-1 text-xs font-semibold text-brand-purple-dark shadow-md">
{lang === "es" ? "Prueba la voz" : "Try voice"}
</div>
)}
<div className="absolute -top-6 right-2 rounded-full bg-white/20 px-2 py-1 text-[10px] font-semibold text-white backdrop-blur">
Demo
</div>
<div className="fixed bottom-6 right-6 z-50">
{status === "error" && errorMsg && (
<div className="absolute bottom-16 right-0 w-64 bg-red-600 text-white text-xs p-2 rounded-lg mb-2">
{errorMsg}
</div>
)}
{isActive && (
<div className="absolute bottom-16 right-0 w-80 bg-[#1a1625] border border-white/20 rounded-xl p-4 shadow-2xl mb-2">
<div className="flex items-center justify-between mb-2">
<span className="text-sm font-medium text-white">🤖 AI</span>
<span className={`w-2 h-2 rounded-full ${status === "active" ? "bg-green-500 animate-pulse" : "bg-yellow-500"}`}></span>
</div>
<div className="h-32 overflow-y-auto text-sm text-white/70 bg-white/5 rounded-lg p-2">
{transcript || "Escuchando..."}
</div>
</div>
)}
{isOpen && (
<div className="fixed inset-0 z-[9999] flex items-end justify-end p-4 sm:p-6">
<div className="absolute inset-0 bg-black/50" onClick={() => setIsOpen(false)} />
<div className="relative z-10 flex h-full w-full max-w-[440px] flex-col overflow-hidden rounded-3xl border border-white/15 bg-[#4f3a78] shadow-[0_30px_80px_rgba(0,0,0,0.45)] sm:h-[700px]">
<div className="flex items-center justify-between bg-gradient-to-r from-[#6d4cc2] to-[#ff66b5] px-5 py-4 text-white">
<div>
<div className="flex items-center gap-2 text-lg font-semibold">
<span>SiteMente IA</span>
<span className="flex items-center gap-1">
<span className="h-2 w-1 rounded-full bg-white/80 animate-pulse" />
<span className="h-3 w-1 rounded-full bg-white/90 animate-pulse" />
<span className="h-2 w-1 rounded-full bg-white/80 animate-pulse" />
</span>
</div>
<p className="text-xs text-white/80">
{lang === "es" ? "El cerebro de tu web" : "Your website brain"}
</p>
</div>
<div className="flex items-center gap-3 text-lg">
<span>{voiceIndicator}</span>
<button
type="button"
onClick={() => setLang((prev) => (prev === "es" ? "en" : "es"))}
className="rounded-full border border-white/30 px-2 py-1 text-xs font-semibold"
>
{localeLabel}
</button>
<button
type="button"
onClick={() => setIsOpen(false)}
className="text-xl"
>
</button>
</div>
</div>
<button
onClick={isActive ? endCall : startCall}
className={`${buttonColor} w-14 h-14 rounded-full shadow-lg flex items-center justify-center transition-all hover:scale-110 ${
isActive ? "animate-pulse ring-4 ring-red-500/50" : ""
}`}
title={isActive ? "Colgar" : "Hablar con IA"}
>
{isActive ? (
<svg className="w-6 h-6 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M16 8l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2M5 3a2 2 0 00-2 2v1c0 8.284 6.716 15 15 15h1a2 2 0 002-2v-3.28a1 1 0 00-.684-.948l-4.493-1.498a1 1 0 00-1.21.502l-1.13 2.257a11.042 11.042 0 01-5.516-5.517l2.257-1.128a1 1 0 00.502-1.21L9.228 3.683A1 1 0 008.279 3H5z" />
</svg>
) : (
<svg className="w-6 h-6 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11a7 7 0 01-7 7m0 0a7 7 0 01-7-7m7 7v4m0 0H8m4 0h4m-4-8a3 3 0 01-3-3V5a3 3 0 116 0v6a3 3 0 01-3 3z" />
</svg>
)}
</button>
<div className="flex-1 overflow-y-auto px-5 py-4">
{messages.length === 1 && (
<div className="mb-4 flex flex-wrap gap-2">
{quickActions[lang].map((action) => (
<button
key={action.label}
type="button"
onClick={() => handleSend(action.label)}
className="rounded-full border border-white/20 bg-white/10 px-3 py-1 text-xs text-white/90 transition hover:bg-white/20"
>
{action.icon} {action.label}
</button>
))}
</div>
)}
<div className="space-y-4">
{messages.map((message) => (
<div
key={`${message.timestamp}-${message.role}`}
className={`group flex ${
message.role === "user" ? "justify-end" : "justify-start"
}`}
>
<div
className={`max-w-[80%] rounded-2xl px-4 py-3 text-sm shadow-md ${
message.role === "user"
? "bg-[#c4a1ff] text-[#3b1c66]"
: "bg-[#6a4bb0] text-white"
}`}
>
<div className="flex items-center gap-2">
{message.role === "assistant" && (
<span className="flex h-6 w-6 items-center justify-center rounded-full bg-white/20 text-xs font-semibold">
SM
</span>
)}
<p>{message.content}</p>
</div>
<span className="mt-2 block text-[10px] text-white/60 opacity-0 transition group-hover:opacity-100">
{new Date(message.timestamp).toLocaleTimeString(
lang === "es" ? "es-ES" : "en-US",
{ hour: "2-digit", minute: "2-digit" }
)}
</span>
</div>
</div>
))}
{isLoading && (
<div className="flex items-center gap-2 text-white/70">
<span className="inline-flex h-2 w-2 animate-bounce rounded-full bg-white/70" />
<span className="inline-flex h-2 w-2 animate-bounce rounded-full bg-white/60 delay-150" />
<span className="inline-flex h-2 w-2 animate-bounce rounded-full bg-white/50 delay-300" />
</div>
)}
{isSpeaking && (
<div className="flex items-center gap-2 text-xs text-white/70">
<span className="flex items-center gap-1">
<span className="h-2 w-1 rounded-full bg-white/80 animate-pulse" />
<span className="h-3 w-1 rounded-full bg-white/90 animate-pulse" />
<span className="h-2 w-1 rounded-full bg-white/80 animate-pulse" />
</span>
{lang === "es" ? "Hablando..." : "Speaking..."}
</div>
)}
<div ref={messagesEndRef} />
</div>
</div>
<div className="border-t border-white/10 bg-[#4a3572] px-5 py-4">
{voiceMode ? (
<div className="flex flex-col items-center gap-3">
{transcript && (
<p className="w-full rounded-xl bg-white/10 px-4 py-2 text-sm text-white/90">
{transcript}
</p>
)}
<button
type="button"
onClick={() => {
if (isRecording) {
stopRecording();
return;
}
if (isSpeaking) {
window.speechSynthesis?.cancel();
setIsSpeaking(false);
return;
}
startRecording();
}}
className={`flex h-14 w-14 items-center justify-center rounded-full text-white transition ${
isRecording
? "bg-red-500 animate-pulse"
: isSpeaking
? "bg-blue-500 animate-pulse"
: "bg-white/20 hover:bg-white/30"
}`}
disabled={!speechSupported}
>
{isRecording ? "🔴" : isSpeaking ? "🔊" : "🎤"}
</button>
<p className="text-xs text-white/80">
{isRecording
? lang === "es"
? "Escuchando..."
: "Listening..."
: isSpeaking
? lang === "es"
? "Hablando..."
: "Speaking..."
: lang === "es"
? "Toca para hablar"
: "Tap to talk"}
</p>
{isSpeaking && (
<div className="h-1 w-full overflow-hidden rounded-full bg-white/10">
<div className="h-full w-1/2 animate-pulse rounded-full bg-brand-pink/80" />
</div>
)}
<div className="flex w-full items-center justify-between text-xs text-white/70">
<button
type="button"
onClick={() => setVoiceMode(false)}
className="flex items-center gap-1"
>
{lang === "es" ? "Texto" : "Text"}
</button>
<button
type="button"
onClick={() => {
window.speechSynthesis?.cancel();
setIsSpeaking(false);
}}
className="flex items-center gap-1"
>
{lang === "es" ? "Pausar" : "Pause"}
</button>
</div>
</div>
) : (
<div className="flex items-center gap-3">
<input
value={input}
onChange={(event) => setInput(event.target.value)}
placeholder={
lang === "es"
? "Escribe tu mensaje..."
: "Type your message..."
}
className="flex-1 rounded-full border border-white/20 bg-white/10 px-4 py-2 text-sm text-white placeholder:text-white/50 focus:border-white/50 focus:outline-none"
/>
<button
type="button"
onClick={() => handleSend(input)}
className="rounded-full bg-brand-pink px-4 py-2 text-sm font-semibold text-white"
>
</button>
<button
type="button"
onClick={() => setVoiceMode(true)}
className="rounded-full border border-white/20 px-3 py-2 text-white/80"
>
🎤
</button>
</div>
)}
{!speechSupported && (
<p className="mt-2 text-center text-xs text-white/60">
{lang === "es"
? "Tu navegador no soporta voz. Usa el modo texto."
: "Your browser doesn't support voice. Use text mode."}
</p>
)}
</div>
</div>
{status === "connecting" && (
<div className="absolute -top-8 right-0 bg-white/10 backdrop-blur px-3 py-1 rounded-full text-xs text-white">
Conectando...
</div>
)}
</>
</div>
);
}