236 lines
7.5 KiB
TypeScript
236 lines
7.5 KiB
TypeScript
"use client";
|
|
|
|
import { useState, useRef, useEffect, useCallback } from "react";
|
|
|
|
interface SynthflowWidgetProps {
|
|
apiKey: string;
|
|
assistantId: string;
|
|
theme?: "dark" | "light";
|
|
}
|
|
|
|
export default function SynthflowWidget({
|
|
apiKey,
|
|
assistantId,
|
|
theme = "dark"
|
|
}: SynthflowWidgetProps) {
|
|
const [isConnected, setIsConnected] = useState(false);
|
|
const [isTalking, setIsTalking] = useState(false);
|
|
const [status, setStatus] = useState<"idle" | "connecting" | "ready" | "talking" | "error">("idle");
|
|
const [transcript, setTranscript] = useState("");
|
|
const [error, setError] = useState("");
|
|
|
|
const wsRef = useRef<WebSocket | null>(null);
|
|
const audioContextRef = useRef<AudioContext | null>(null);
|
|
const mediaStreamRef = useRef<MediaStream | null>(null);
|
|
const audioChunksRef = useRef<Int16Array[]>([]);
|
|
|
|
const connect = useCallback(async () => {
|
|
try {
|
|
setStatus("connecting");
|
|
setError("");
|
|
|
|
// Get WebSocket token
|
|
const tokenRes = await fetch(`https://widget.synthflow.ai/websocket/token/${assistantId}`, {
|
|
headers: {
|
|
"Authorization": `Bearer ${apiKey}`
|
|
}
|
|
});
|
|
|
|
if (!tokenRes.ok) {
|
|
throw new Error("Failed to get token");
|
|
}
|
|
|
|
const { sessionURL } = await tokenRes.json();
|
|
|
|
// Connect to WebSocket
|
|
const ws = new WebSocket(sessionURL);
|
|
wsRef.current = ws;
|
|
|
|
ws.onopen = () => {
|
|
console.log("WebSocket connected");
|
|
setIsConnected(true);
|
|
setStatus("ready");
|
|
|
|
// Send ready signal
|
|
ws.send(JSON.stringify({ type: "status_client_ready" }));
|
|
};
|
|
|
|
ws.onmessage = async (event) => {
|
|
if (typeof event.data === "string") {
|
|
const data = JSON.parse(event.data);
|
|
console.log("WS message:", data);
|
|
|
|
if (data.type === "transcript") {
|
|
setTranscript(data.text || "");
|
|
} else if (data.type === "status_agent_ready") {
|
|
setStatus("ready");
|
|
}
|
|
} else if (event.data instanceof Blob) {
|
|
// Audio from agent - play it
|
|
const arrayBuffer = await event.data.arrayBuffer();
|
|
await playAudio(new Int16Array(arrayBuffer));
|
|
}
|
|
};
|
|
|
|
ws.onerror = (err) => {
|
|
console.error("WS error:", err);
|
|
setError("Connection error");
|
|
setStatus("error");
|
|
};
|
|
|
|
ws.onclose = () => {
|
|
setIsConnected(false);
|
|
setStatus("idle");
|
|
};
|
|
|
|
} catch (err: any) {
|
|
console.error("Connection failed:", err);
|
|
setError(err.message);
|
|
setStatus("error");
|
|
}
|
|
}, [apiKey, assistantId]);
|
|
|
|
const startRecording = async () => {
|
|
try {
|
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
mediaStreamRef.current = stream;
|
|
|
|
// Create audio context
|
|
const audioContext = new AudioContext({ sampleRate: 48000 });
|
|
audioContextRef.current = audioContext;
|
|
|
|
const source = audioContext.createMediaStreamSource(stream);
|
|
const processor = audioContext.createScriptProcessor(4096, 1, 1);
|
|
|
|
processor.onaudioprocess = (e) => {
|
|
if (wsRef.current?.readyState === WebSocket.OPEN) {
|
|
const inputData = e.inputBuffer.getChannelData(0);
|
|
const int16Data = float32ToInt16(inputData);
|
|
wsRef.current.send(int16Data);
|
|
}
|
|
};
|
|
|
|
source.connect(processor);
|
|
processor.connect(audioContext.destination);
|
|
|
|
setIsTalking(true);
|
|
setStatus("talking");
|
|
|
|
} catch (err: any) {
|
|
console.error("Recording error:", err);
|
|
setError("Microphone access denied");
|
|
}
|
|
};
|
|
|
|
const stopRecording = () => {
|
|
if (mediaStreamRef.current) {
|
|
mediaStreamRef.current.getTracks().forEach(track => track.stop());
|
|
}
|
|
if (audioContextRef.current) {
|
|
audioContextRef.current.close();
|
|
}
|
|
setIsTalking(false);
|
|
setStatus("ready");
|
|
};
|
|
|
|
const playAudio = async (int16Data: Int16Array) => {
|
|
try {
|
|
const ctx = new AudioContext({ sampleRate: 16000 });
|
|
const buffer = ctx.createBuffer(1, int16Data.length, 16000);
|
|
buffer.getChannelData(0).set(int16ToFloat32(int16Data));
|
|
|
|
const source = ctx.createBufferSource();
|
|
source.buffer = buffer;
|
|
source.connect(ctx.destination);
|
|
source.start();
|
|
} catch (err) {
|
|
console.error("Play audio error:", err);
|
|
}
|
|
};
|
|
|
|
const float32ToInt16 = (float32: Float32Array): Int16Array => {
|
|
const int16 = new Int16Array(float32.length);
|
|
for (let i = 0; i < float32.length; i++) {
|
|
int16[i] = Math.max(-1, Math.min(1, float32[i])) * 0x7FFF;
|
|
}
|
|
return int16;
|
|
};
|
|
|
|
const int16ToFloat32 = (int16: Int16Array): Float32Array => {
|
|
const float32 = new Float32Array(int16.length);
|
|
for (let i = 0; i < int16.length; i++) {
|
|
float32[i] = int16[i] / 0x7FFF;
|
|
}
|
|
return float32;
|
|
};
|
|
|
|
const toggleCall = () => {
|
|
if (!isConnected) {
|
|
connect();
|
|
} else if (isTalking) {
|
|
stopRecording();
|
|
} else {
|
|
startRecording();
|
|
}
|
|
};
|
|
|
|
const colors = theme === "dark"
|
|
? { bg: "#1a1625", text: "#fff", accent: "#ff69b4" }
|
|
: { bg: "#fff", text: "#000", accent: "#0066ff" };
|
|
|
|
return (
|
|
<div
|
|
className="rounded-xl p-4 shadow-lg"
|
|
style={{ backgroundColor: colors.bg, color: colors.text }}
|
|
>
|
|
<div className="flex flex-col items-center gap-4">
|
|
{/* Status indicator */}
|
|
<div className="flex items-center gap-2">
|
|
<div
|
|
className={`w-3 h-3 rounded-full ${
|
|
status === "ready" ? "bg-green-500" :
|
|
status === "talking" ? "bg-green-500 animate-pulse" :
|
|
status === "connecting" ? "bg-yellow-500" :
|
|
status === "error" ? "bg-red-500" : "bg-gray-500"
|
|
}`}
|
|
/>
|
|
<span className="text-sm">
|
|
{status === "idle" && "Click to start"}
|
|
{status === "connecting" && "Connecting..."}
|
|
{status === "ready" && "Ready"}
|
|
{status === "talking" && "Listening..."}
|
|
{status === "error" && error || "Error"}
|
|
</span>
|
|
</div>
|
|
|
|
{/* Call button */}
|
|
<button
|
|
onClick={toggleCall}
|
|
className={`w-16 h-16 rounded-full flex items-center justify-center transition-all ${
|
|
isTalking
|
|
? "bg-red-500 animate-pulse"
|
|
: "bg-green-500 hover:scale-110"
|
|
}`}
|
|
>
|
|
{isTalking ? (
|
|
<svg className="w-8 h-8 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M16 8l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2M5 3a2 2 0 00-2 2v1c0 8.284 6.716 15 15 15h1a2 2 0 002-2v-3.28a1 1 0 00-.684-.948l-4.493-1.498a1 1 0 00-1.21.502l-1.13 2.257a11.042 11.042 0 01-5.516-5.517l2.257-1.128a1 1 0 00.502-1.21L9.228 3.683A1 1 0 008.279 3H5z" />
|
|
</svg>
|
|
) : (
|
|
<svg className="w-8 h-8 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 5a2 2 0 012-2h3.28a1 1 0 01.948.684l1.498 4.493a1 1 0 01-.502 1.21l-2.257 1.13a11.042 11.042 0 005.516 5.516l1.13-2.257a1 1 0 011.21-.502l4.493 1.498a1 1 0 01.684.949V19a2 2 0 01-2 2h-1C9.716 21 3 14.284 3 6V5z" />
|
|
</svg>
|
|
)}
|
|
</button>
|
|
|
|
{/* Transcript */}
|
|
{transcript && (
|
|
<div className="w-full p-3 rounded-lg bg-white/10 text-sm">
|
|
{transcript}
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
);
|
|
}
|