Add custom Synthflow WebSocket widget

This commit is contained in:
root
2026-02-24 15:35:57 +00:00
parent c1464f97df
commit 52ea3044a4
2 changed files with 242 additions and 8 deletions
+7 -8
View File
@@ -1,6 +1,7 @@
"use client"; "use client";
import { useState, useRef, useEffect } from "react"; import { useState, useRef, useEffect } from "react";
import SynthflowWidget from "./SynthflowWidget";
interface SiteMenteVoiceWidgetProps { interface SiteMenteVoiceWidgetProps {
businessName?: string; businessName?: string;
@@ -158,15 +159,13 @@ export default function SiteMenteVoiceWidget({
</button> </button>
</div> </div>
{/* Synthflow Widget - Embedded iframe */} {/* Synthflow Widget - Custom WebSocket */}
{mode === "synthflow" && ( {mode === "synthflow" && (
<div className="absolute bottom-16 right-0 w-[380px] h-[520px] mb-2 rounded-xl overflow-hidden shadow-2xl border-2 border-green-500"> <div className="absolute bottom-16 right-0 mb-2">
<iframe <SynthflowWidget
src="https://widget.synthflow.ai/widget/v2/0ee1b79c-43c2-41e0-aa6a-d2a560e0ca6a/1771945296284x399137457562280600?theme=dark" apiKey="yCNoizRk4kRcLrR4V27iem3XkFKZizWrjSXvkao-MZI"
className="w-full h-full" assistantId="0ee1b79c-43c2-41e0-aa6a-d2a560e0ca6a"
allow="microphone; autoplay; fullscreen" theme={theme}
sandbox="allow-scripts allow-same-origin allow-forms allow-popups allow-modals"
title="Synthflow AI Voice Assistant"
/> />
</div> </div>
)} )}
+235
View File
@@ -0,0 +1,235 @@
"use client";
import { useState, useRef, useEffect, useCallback } from "react";
interface SynthflowWidgetProps {
apiKey: string;
assistantId: string;
theme?: "dark" | "light";
}
export default function SynthflowWidget({
apiKey,
assistantId,
theme = "dark"
}: SynthflowWidgetProps) {
const [isConnected, setIsConnected] = useState(false);
const [isTalking, setIsTalking] = useState(false);
const [status, setStatus] = useState<"idle" | "connecting" | "ready" | "talking" | "error">("idle");
const [transcript, setTranscript] = useState("");
const [error, setError] = useState("");
const wsRef = useRef<WebSocket | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const mediaStreamRef = useRef<MediaStream | null>(null);
const audioChunksRef = useRef<Int16Array[]>([]);
const connect = useCallback(async () => {
try {
setStatus("connecting");
setError("");
// Get WebSocket token
const tokenRes = await fetch(`https://widget.synthflow.ai/websocket/token/${assistantId}`, {
headers: {
"Authorization": `Bearer ${apiKey}`
}
});
if (!tokenRes.ok) {
throw new Error("Failed to get token");
}
const { sessionURL } = await tokenRes.json();
// Connect to WebSocket
const ws = new WebSocket(sessionURL);
wsRef.current = ws;
ws.onopen = () => {
console.log("WebSocket connected");
setIsConnected(true);
setStatus("ready");
// Send ready signal
ws.send(JSON.stringify({ type: "status_client_ready" }));
};
ws.onmessage = async (event) => {
if (typeof event.data === "string") {
const data = JSON.parse(event.data);
console.log("WS message:", data);
if (data.type === "transcript") {
setTranscript(data.text || "");
} else if (data.type === "status_agent_ready") {
setStatus("ready");
}
} else if (event.data instanceof Blob) {
// Audio from agent - play it
const arrayBuffer = await event.data.arrayBuffer();
await playAudio(new Int16Array(arrayBuffer));
}
};
ws.onerror = (err) => {
console.error("WS error:", err);
setError("Connection error");
setStatus("error");
};
ws.onclose = () => {
setIsConnected(false);
setStatus("idle");
};
} catch (err: any) {
console.error("Connection failed:", err);
setError(err.message);
setStatus("error");
}
}, [apiKey, assistantId]);
const startRecording = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaStreamRef.current = stream;
// Create audio context
const audioContext = new AudioContext({ sampleRate: 48000 });
audioContextRef.current = audioContext;
const source = audioContext.createMediaStreamSource(stream);
const processor = audioContext.createScriptProcessor(4096, 1, 1);
processor.onaudioprocess = (e) => {
if (wsRef.current?.readyState === WebSocket.OPEN) {
const inputData = e.inputBuffer.getChannelData(0);
const int16Data = float32ToInt16(inputData);
wsRef.current.send(int16Data);
}
};
source.connect(processor);
processor.connect(audioContext.destination);
setIsTalking(true);
setStatus("talking");
} catch (err: any) {
console.error("Recording error:", err);
setError("Microphone access denied");
}
};
const stopRecording = () => {
if (mediaStreamRef.current) {
mediaStreamRef.current.getTracks().forEach(track => track.stop());
}
if (audioContextRef.current) {
audioContextRef.current.close();
}
setIsTalking(false);
setStatus("ready");
};
const playAudio = async (int16Data: Int16Array) => {
try {
const ctx = new AudioContext({ sampleRate: 16000 });
const buffer = ctx.createBuffer(1, int16Data.length, 16000);
buffer.getChannelData(0).set(int16ToFloat32(int16Data));
const source = ctx.createBufferSource();
source.buffer = buffer;
source.connect(ctx.destination);
source.start();
} catch (err) {
console.error("Play audio error:", err);
}
};
const float32ToInt16 = (float32: Float32Array): Int16Array => {
const int16 = new Int16Array(float32.length);
for (let i = 0; i < float32.length; i++) {
int16[i] = Math.max(-1, Math.min(1, float32[i])) * 0x7FFF;
}
return int16;
};
const int16ToFloat32 = (int16: Int16Array): Float32Array => {
const float32 = new Float32Array(int16.length);
for (let i = 0; i < int16.length; i++) {
float32[i] = int16[i] / 0x7FFF;
}
return float32;
};
const toggleCall = () => {
if (!isConnected) {
connect();
} else if (isTalking) {
stopRecording();
} else {
startRecording();
}
};
const colors = theme === "dark"
? { bg: "#1a1625", text: "#fff", accent: "#ff69b4" }
: { bg: "#fff", text: "#000", accent: "#0066ff" };
return (
<div
className="rounded-xl p-4 shadow-lg"
style={{ backgroundColor: colors.bg, color: colors.text }}
>
<div className="flex flex-col items-center gap-4">
{/* Status indicator */}
<div className="flex items-center gap-2">
<div
className={`w-3 h-3 rounded-full ${
status === "ready" ? "bg-green-500" :
status === "talking" ? "bg-green-500 animate-pulse" :
status === "connecting" ? "bg-yellow-500" :
status === "error" ? "bg-red-500" : "bg-gray-500"
}`}
/>
<span className="text-sm">
{status === "idle" && "Click to start"}
{status === "connecting" && "Connecting..."}
{status === "ready" && "Ready"}
{status === "talking" && "Listening..."}
{status === "error" && error || "Error"}
</span>
</div>
{/* Call button */}
<button
onClick={toggleCall}
className={`w-16 h-16 rounded-full flex items-center justify-center transition-all ${
isTalking
? "bg-red-500 animate-pulse"
: "bg-green-500 hover:scale-110"
}`}
>
{isTalking ? (
<svg className="w-8 h-8 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M16 8l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2M5 3a2 2 0 00-2 2v1c0 8.284 6.716 15 15 15h1a2 2 0 002-2v-3.28a1 1 0 00-.684-.948l-4.493-1.498a1 1 0 00-1.21.502l-1.13 2.257a11.042 11.042 0 01-5.516-5.517l2.257-1.128a1 1 0 00.502-1.21L9.228 3.683A1 1 0 008.279 3H5z" />
</svg>
) : (
<svg className="w-8 h-8 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 5a2 2 0 012-2h3.28a1 1 0 01.948.684l1.498 4.493a1 1 0 01-.502 1.21l-2.257 1.13a11.042 11.042 0 005.516 5.516l1.13-2.257a1 1 0 011.21-.502l4.493 1.498a1 1 0 01.684.949V19a2 2 0 01-2 2h-1C9.716 21 3 14.284 3 6V5z" />
</svg>
)}
</button>
{/* Transcript */}
{transcript && (
<div className="w-full p-3 rounded-lg bg-white/10 text-sm">
{transcript}
</div>
)}
</div>
</div>
);
}