Files
Haitham Khalifa b538d84e17 Initial commit
2026-02-16 12:18:06 +01:00

67 lines
1.9 KiB
Plaintext

import { NextRequest } from 'next/server';
import { GoogleGenerativeAI } from '@google/generative-ai';
export async function POST(req: NextRequest) {
try {
const { messages } = await req.json();
if (!process.env.GEMINI_API_KEY) {
return new Response(JSON.stringify({ error: 'Gemini API key not configured' }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
});
}
const genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY);
const model = genAI.getGenerativeModel({ model: 'gemini-pro' });
// Convert messages to Gemini format
const history = messages.slice(0, -1).map((msg: any) => ({
role: msg.role === 'assistant' ? 'model' : 'user',
parts: [{ text: msg.content }],
}));
const userMessage = messages[messages.length - 1].content;
const chat = model.startChat({
history: history,
generationConfig: {
temperature: 0.7,
maxOutputTokens: 1000,
},
});
const result = await chat.sendMessageStream(userMessage);
const encoder = new TextEncoder();
const stream = new ReadableStream({
async start(controller) {
try {
for await (const chunk of result.stream) {
const text = chunk.text();
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ text })}\n\n`));
}
controller.enqueue(encoder.encode('data: [DONE]\n\n'));
controller.close();
} catch (error) {
controller.error(error);
}
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
});
} catch (error: any) {
console.error('Error in chat API:', error);
return new Response(JSON.stringify({ error: error.message }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
}