This commit is contained in:
Developers Digest
2025-08-08 09:04:33 -04:00
parent 0e883102ed
commit 1629e12079
73 changed files with 24502 additions and 0 deletions
+177
View File
@@ -0,0 +1,177 @@
import { NextRequest, NextResponse } from 'next/server';
import { createGroq } from '@ai-sdk/groq';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { generateObject } from 'ai';
import { z } from 'zod';
import type { FileManifest } from '@/types/file-manifest';
const groq = createGroq({
apiKey: process.env.GROQ_API_KEY,
});
const anthropic = createAnthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
baseURL: process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1',
});
const openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.OPENAI_BASE_URL,
});
// Schema for the AI's search plan - not file selection!
const searchPlanSchema = z.object({
editType: z.enum([
'UPDATE_COMPONENT',
'ADD_FEATURE',
'FIX_ISSUE',
'UPDATE_STYLE',
'REFACTOR',
'ADD_DEPENDENCY',
'REMOVE_ELEMENT'
]).describe('The type of edit being requested'),
reasoning: z.string().describe('Explanation of the search strategy'),
searchTerms: z.array(z.string()).describe('Specific text to search for (case-insensitive). Be VERY specific - exact button text, class names, etc.'),
regexPatterns: z.array(z.string()).optional().describe('Regex patterns for finding code structures (e.g., "className=[\\"\\\'].*header.*[\\"\\\']")'),
fileTypesToSearch: z.array(z.string()).default(['.jsx', '.tsx', '.js', '.ts']).describe('File extensions to search'),
expectedMatches: z.number().min(1).max(10).default(1).describe('Expected number of matches (helps validate search worked)'),
fallbackSearch: z.object({
terms: z.array(z.string()),
patterns: z.array(z.string()).optional()
}).optional().describe('Backup search if primary fails')
});
export async function POST(request: NextRequest) {
try {
const { prompt, manifest, model = 'openai/gpt-oss-20b' } = await request.json();
console.log('[analyze-edit-intent] Request received');
console.log('[analyze-edit-intent] Prompt:', prompt);
console.log('[analyze-edit-intent] Model:', model);
console.log('[analyze-edit-intent] Manifest files count:', manifest?.files ? Object.keys(manifest.files).length : 0);
if (!prompt || !manifest) {
return NextResponse.json({
error: 'prompt and manifest are required'
}, { status: 400 });
}
// Create a summary of available files for the AI
const validFiles = Object.entries(manifest.files as Record<string, any>)
.filter(([path, info]) => {
// Filter out invalid paths
return path.includes('.') && !path.match(/\/\d+$/);
});
const fileSummary = validFiles
.map(([path, info]: [string, any]) => {
const componentName = info.componentInfo?.name || path.split('/').pop();
const hasImports = info.imports?.length > 0;
const childComponents = info.componentInfo?.childComponents?.join(', ') || 'none';
return `- ${path} (${componentName}, renders: ${childComponents})`;
})
.join('\n');
console.log('[analyze-edit-intent] Valid files found:', validFiles.length);
if (validFiles.length === 0) {
console.error('[analyze-edit-intent] No valid files found in manifest');
return NextResponse.json({
success: false,
error: 'No valid files found in manifest'
}, { status: 400 });
}
console.log('[analyze-edit-intent] Analyzing prompt:', prompt);
console.log('[analyze-edit-intent] File summary preview:', fileSummary.split('\n').slice(0, 5).join('\n'));
// Select the appropriate AI model based on the request
let aiModel;
if (model.startsWith('anthropic/')) {
aiModel = anthropic(model.replace('anthropic/', ''));
} else if (model.startsWith('openai/')) {
if (model.includes('gpt-oss')) {
aiModel = groq(model);
} else {
aiModel = openai(model.replace('openai/', ''));
}
} else {
// Default to groq if model format is unclear
aiModel = groq(model);
}
console.log('[analyze-edit-intent] Using AI model:', model);
// Use AI to create a search plan
const result = await generateObject({
model: aiModel,
schema: searchPlanSchema,
messages: [
{
role: 'system',
content: `You are an expert at planning code searches. Your job is to create a search strategy to find the exact code that needs to be edited.
DO NOT GUESS which files to edit. Instead, provide specific search terms that will locate the code.
SEARCH STRATEGY RULES:
1. For text changes (e.g., "change 'Start Deploying' to 'Go Now'"):
- Search for the EXACT text: "Start Deploying"
2. For style changes (e.g., "make header black"):
- Search for component names: "Header", "<header"
- Search for class names: "header", "navbar"
- Search for className attributes containing relevant words
3. For removing elements (e.g., "remove the deploy button"):
- Search for the button text or aria-label
- Search for relevant IDs or data-testids
4. For navigation/header issues:
- Search for: "navigation", "nav", "Header", "navbar"
- Look for Link components or href attributes
5. Be SPECIFIC:
- Use exact capitalization for user-visible text
- Include multiple search terms for redundancy
- Add regex patterns for structural searches
Current project structure for context:
${fileSummary}`
},
{
role: 'user',
content: `User request: "${prompt}"
Create a search plan to find the exact code that needs to be modified. Include specific search terms and patterns.`
}
]
});
console.log('[analyze-edit-intent] Search plan created:', {
editType: result.object.editType,
searchTerms: result.object.searchTerms,
patterns: result.object.regexPatterns?.length || 0,
reasoning: result.object.reasoning
});
// Return the search plan, not file matches
return NextResponse.json({
success: true,
searchPlan: result.object
});
} catch (error) {
console.error('[analyze-edit-intent] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+707
View File
@@ -0,0 +1,707 @@
import { NextRequest, NextResponse } from 'next/server';
import { Sandbox } from '@e2b/code-interpreter';
import type { SandboxState } from '@/types/sandbox';
import type { ConversationState } from '@/types/conversation';
declare global {
var conversationState: ConversationState | null;
var activeSandbox: any;
var existingFiles: Set<string>;
var sandboxState: SandboxState;
}
interface ParsedResponse {
explanation: string;
template: string;
files: Array<{ path: string; content: string }>;
packages: string[];
commands: string[];
structure: string | null;
}
function parseAIResponse(response: string): ParsedResponse {
const sections = {
files: [] as Array<{ path: string; content: string }>,
commands: [] as string[],
packages: [] as string[],
structure: null as string | null,
explanation: '',
template: ''
};
// Function to extract packages from import statements
function extractPackagesFromCode(content: string): string[] {
const packages: string[] = [];
// Match ES6 imports
const importRegex = /import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)(?:\s*,\s*(?:\{[^}]*\}|\*\s+as\s+\w+|\w+))*\s+from\s+)?['"]([^'"]+)['"]/g;
let importMatch;
while ((importMatch = importRegex.exec(content)) !== null) {
const importPath = importMatch[1];
// Skip relative imports and built-in React
if (!importPath.startsWith('.') && !importPath.startsWith('/') &&
importPath !== 'react' && importPath !== 'react-dom' &&
!importPath.startsWith('@/')) {
// Extract package name (handle scoped packages like @heroicons/react)
const packageName = importPath.startsWith('@')
? importPath.split('/').slice(0, 2).join('/')
: importPath.split('/')[0];
if (!packages.includes(packageName)) {
packages.push(packageName);
// Log important packages for debugging
if (packageName === 'react-router-dom' || packageName.includes('router') || packageName.includes('icon')) {
console.log(`[apply-ai-code-stream] Detected package from imports: ${packageName}`);
}
}
}
}
return packages;
}
// Parse file sections - handle duplicates and prefer complete versions
const fileMap = new Map<string, { content: string; isComplete: boolean }>();
// First pass: Find all file declarations
const fileRegex = /<file path="([^"]+)">([\s\S]*?)(?:<\/file>|$)/g;
let match;
while ((match = fileRegex.exec(response)) !== null) {
const filePath = match[1];
const content = match[2].trim();
const hasClosingTag = response.substring(match.index, match.index + match[0].length).includes('</file>');
// Check if this file already exists in our map
const existing = fileMap.get(filePath);
// Decide whether to keep this version
let shouldReplace = false;
if (!existing) {
shouldReplace = true; // First occurrence
} else if (!existing.isComplete && hasClosingTag) {
shouldReplace = true; // Replace incomplete with complete
console.log(`[apply-ai-code-stream] Replacing incomplete ${filePath} with complete version`);
} else if (existing.isComplete && hasClosingTag && content.length > existing.content.length) {
shouldReplace = true; // Replace with longer complete version
console.log(`[apply-ai-code-stream] Replacing ${filePath} with longer complete version`);
} else if (!existing.isComplete && !hasClosingTag && content.length > existing.content.length) {
shouldReplace = true; // Both incomplete, keep longer one
}
if (shouldReplace) {
// Additional validation: reject obviously broken content
if (content.includes('...') && !content.includes('...props') && !content.includes('...rest')) {
console.warn(`[apply-ai-code-stream] Warning: ${filePath} contains ellipsis, may be truncated`);
// Still use it if it's the only version we have
if (!existing) {
fileMap.set(filePath, { content, isComplete: hasClosingTag });
}
} else {
fileMap.set(filePath, { content, isComplete: hasClosingTag });
}
}
}
// Convert map to array for sections.files
for (const [path, { content, isComplete }] of fileMap.entries()) {
if (!isComplete) {
console.log(`[apply-ai-code-stream] Warning: File ${path} appears to be truncated (no closing tag)`);
}
sections.files.push({
path,
content
});
// Extract packages from file content
const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) {
if (!sections.packages.includes(pkg)) {
sections.packages.push(pkg);
console.log(`[apply-ai-code-stream] 📦 Package detected from imports: ${pkg}`);
}
}
}
// Also parse markdown code blocks with file paths
const markdownFileRegex = /```(?:file )?path="([^"]+)"\n([\s\S]*?)```/g;
while ((match = markdownFileRegex.exec(response)) !== null) {
const filePath = match[1];
const content = match[2].trim();
sections.files.push({
path: filePath,
content: content
});
// Extract packages from file content
const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) {
if (!sections.packages.includes(pkg)) {
sections.packages.push(pkg);
console.log(`[apply-ai-code-stream] 📦 Package detected from imports: ${pkg}`);
}
}
}
// Parse plain text format like "Generated Files: Header.jsx, index.css"
const generatedFilesMatch = response.match(/Generated Files?:\s*([^\n]+)/i);
if (generatedFilesMatch) {
// Split by comma first, then trim whitespace, to preserve filenames with dots
const filesList = generatedFilesMatch[1]
.split(',')
.map(f => f.trim())
.filter(f => f.endsWith('.jsx') || f.endsWith('.js') || f.endsWith('.tsx') || f.endsWith('.ts') || f.endsWith('.css') || f.endsWith('.json') || f.endsWith('.html'));
console.log(`[apply-ai-code-stream] Detected generated files from plain text: ${filesList.join(', ')}`);
// Try to extract the actual file content if it follows
for (const fileName of filesList) {
// Look for the file content after the file name
const fileContentRegex = new RegExp(`${fileName}[\\s\\S]*?(?:import[\\s\\S]+?)(?=Generated Files:|Applying code|$)`, 'i');
const fileContentMatch = response.match(fileContentRegex);
if (fileContentMatch) {
// Extract just the code part (starting from import statements)
const codeMatch = fileContentMatch[0].match(/^(import[\s\S]+)$/m);
if (codeMatch) {
const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`;
sections.files.push({
path: filePath,
content: codeMatch[1].trim()
});
console.log(`[apply-ai-code-stream] Extracted content for ${filePath}`);
// Extract packages from this file
const filePackages = extractPackagesFromCode(codeMatch[1]);
for (const pkg of filePackages) {
if (!sections.packages.includes(pkg)) {
sections.packages.push(pkg);
console.log(`[apply-ai-code-stream] Package detected from imports: ${pkg}`);
}
}
}
}
}
}
// Also try to parse if the response contains raw JSX/JS code blocks
const codeBlockRegex = /```(?:jsx?|tsx?|javascript|typescript)?\n([\s\S]*?)```/g;
while ((match = codeBlockRegex.exec(response)) !== null) {
const content = match[1].trim();
// Try to detect the file name from comments or context
const fileNameMatch = content.match(/\/\/\s*(?:File:|Component:)\s*([^\n]+)/);
if (fileNameMatch) {
const fileName = fileNameMatch[1].trim();
const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`;
// Don't add duplicate files
if (!sections.files.some(f => f.path === filePath)) {
sections.files.push({
path: filePath,
content: content
});
// Extract packages
const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) {
if (!sections.packages.includes(pkg)) {
sections.packages.push(pkg);
}
}
}
}
}
// Parse commands
const cmdRegex = /<command>(.*?)<\/command>/g;
while ((match = cmdRegex.exec(response)) !== null) {
sections.commands.push(match[1].trim());
}
// Parse packages - support both <package> and <packages> tags
const pkgRegex = /<package>(.*?)<\/package>/g;
while ((match = pkgRegex.exec(response)) !== null) {
sections.packages.push(match[1].trim());
}
// Also parse <packages> tag with multiple packages
const packagesRegex = /<packages>([\s\S]*?)<\/packages>/;
const packagesMatch = response.match(packagesRegex);
if (packagesMatch) {
const packagesContent = packagesMatch[1].trim();
// Split by newlines or commas
const packagesList = packagesContent.split(/[\n,]+/)
.map(pkg => pkg.trim())
.filter(pkg => pkg.length > 0);
sections.packages.push(...packagesList);
}
// Parse structure
const structureMatch = /<structure>([\s\S]*?)<\/structure>/;
const structResult = response.match(structureMatch);
if (structResult) {
sections.structure = structResult[1].trim();
}
// Parse explanation
const explanationMatch = /<explanation>([\s\S]*?)<\/explanation>/;
const explResult = response.match(explanationMatch);
if (explResult) {
sections.explanation = explResult[1].trim();
}
// Parse template
const templateMatch = /<template>(.*?)<\/template>/;
const templResult = response.match(templateMatch);
if (templResult) {
sections.template = templResult[1].trim();
}
return sections;
}
export async function POST(request: NextRequest) {
try {
const { response, isEdit = false, packages = [], sandboxId } = await request.json();
if (!response) {
return NextResponse.json({
error: 'response is required'
}, { status: 400 });
}
// Debug log the response
console.log('[apply-ai-code-stream] Received response to parse:');
console.log('[apply-ai-code-stream] Response length:', response.length);
console.log('[apply-ai-code-stream] Response preview:', response.substring(0, 500));
console.log('[apply-ai-code-stream] isEdit:', isEdit);
console.log('[apply-ai-code-stream] packages:', packages);
// Parse the AI response
const parsed = parseAIResponse(response);
// Log what was parsed
console.log('[apply-ai-code-stream] Parsed result:');
console.log('[apply-ai-code-stream] Files found:', parsed.files.length);
if (parsed.files.length > 0) {
parsed.files.forEach(f => {
console.log(`[apply-ai-code-stream] - ${f.path} (${f.content.length} chars)`);
});
}
console.log('[apply-ai-code-stream] Packages found:', parsed.packages);
// Initialize existingFiles if not already
if (!global.existingFiles) {
global.existingFiles = new Set<string>();
}
// First, always check the global state for active sandbox
let sandbox = global.activeSandbox;
// If we don't have a sandbox in this instance but we have a sandboxId,
// reconnect to the existing sandbox
if (!sandbox && sandboxId) {
console.log(`[apply-ai-code-stream] Sandbox ${sandboxId} not in this instance, attempting reconnect...`);
try {
// Reconnect to the existing sandbox using E2B's connect method
sandbox = await Sandbox.connect(sandboxId, { apiKey: process.env.E2B_API_KEY });
console.log(`[apply-ai-code-stream] Successfully reconnected to sandbox ${sandboxId}`);
// Store the reconnected sandbox globally for this instance
global.activeSandbox = sandbox;
// Update sandbox data if needed
if (!global.sandboxData) {
const host = (sandbox as any).getHost(5173);
global.sandboxData = {
sandboxId,
url: `https://${host}`
};
}
// Initialize existingFiles if not already
if (!global.existingFiles) {
global.existingFiles = new Set<string>();
}
} catch (reconnectError) {
console.error(`[apply-ai-code-stream] Failed to reconnect to sandbox ${sandboxId}:`, reconnectError);
// If reconnection fails, we'll still try to return a meaningful response
return NextResponse.json({
success: false,
error: `Failed to reconnect to sandbox ${sandboxId}. The sandbox may have expired or been terminated.`,
results: {
filesCreated: [],
packagesInstalled: [],
commandsExecuted: [],
errors: [`Sandbox reconnection failed: ${(reconnectError as Error).message}`]
},
explanation: parsed.explanation,
structure: parsed.structure,
parsedFiles: parsed.files,
message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox reconnection failed.`
});
}
}
// If no sandbox at all and no sandboxId provided, return an error
if (!sandbox && !sandboxId) {
console.log('[apply-ai-code-stream] No sandbox available and no sandboxId provided');
return NextResponse.json({
success: false,
error: 'No active sandbox found. Please create a sandbox first.',
results: {
filesCreated: [],
packagesInstalled: [],
commandsExecuted: [],
errors: ['No sandbox available']
},
explanation: parsed.explanation,
structure: parsed.structure,
parsedFiles: parsed.files,
message: `Parsed ${parsed.files.length} files but no sandbox available to apply them.`
});
}
// Create a response stream for real-time updates
const encoder = new TextEncoder();
const stream = new TransformStream();
const writer = stream.writable.getWriter();
// Function to send progress updates
const sendProgress = async (data: any) => {
const message = `data: ${JSON.stringify(data)}\n\n`;
await writer.write(encoder.encode(message));
};
// Start processing in background (pass sandbox and request to the async function)
(async (sandboxInstance, req) => {
const results = {
filesCreated: [] as string[],
filesUpdated: [] as string[],
packagesInstalled: [] as string[],
packagesAlreadyInstalled: [] as string[],
packagesFailed: [] as string[],
commandsExecuted: [] as string[],
errors: [] as string[]
};
try {
await sendProgress({
type: 'start',
message: 'Starting code application...',
totalSteps: 3
});
// Step 1: Install packages
const packagesArray = Array.isArray(packages) ? packages : [];
const parsedPackages = Array.isArray(parsed.packages) ? parsed.packages : [];
// Combine and deduplicate packages
const allPackages = [...packagesArray.filter(pkg => pkg && typeof pkg === 'string'), ...parsedPackages];
// Use Set to remove duplicates, then filter out pre-installed packages
const uniquePackages = [...new Set(allPackages)]
.filter(pkg => pkg && typeof pkg === 'string' && pkg.trim() !== '') // Remove empty strings
.filter(pkg => pkg !== 'react' && pkg !== 'react-dom'); // Filter pre-installed
// Log if we found duplicates
if (allPackages.length !== uniquePackages.length) {
console.log(`[apply-ai-code-stream] Removed ${allPackages.length - uniquePackages.length} duplicate packages`);
console.log(`[apply-ai-code-stream] Original packages:`, allPackages);
console.log(`[apply-ai-code-stream] Deduplicated packages:`, uniquePackages);
}
if (uniquePackages.length > 0) {
await sendProgress({
type: 'step',
step: 1,
message: `Installing ${uniquePackages.length} packages...`,
packages: uniquePackages
});
// Use streaming package installation
try {
// Construct the API URL properly for both dev and production
const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http';
const host = req.headers.get('host') || 'localhost:3000';
const apiUrl = `${protocol}://${host}/api/install-packages`;
const installResponse = await fetch(apiUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
packages: uniquePackages,
sandboxId: sandboxId || (sandboxInstance as any).sandboxId
})
});
if (installResponse.ok && installResponse.body) {
const reader = installResponse.body.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
if (!chunk) continue;
const lines = chunk.split('\n');
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const data = JSON.parse(line.slice(6));
// Forward package installation progress
await sendProgress({
type: 'package-progress',
...data
});
// Track results
if (data.type === 'success' && data.installedPackages) {
results.packagesInstalled = data.installedPackages;
}
} catch (e) {
// Ignore parse errors
}
}
}
}
}
} catch (error) {
console.error('[apply-ai-code-stream] Error installing packages:', error);
await sendProgress({
type: 'warning',
message: `Package installation skipped (${(error as Error).message}). Continuing with file creation...`
});
results.errors.push(`Package installation failed: ${(error as Error).message}`);
}
} else {
await sendProgress({
type: 'step',
step: 1,
message: 'No additional packages to install, skipping...'
});
}
// Step 2: Create/update files
const filesArray = Array.isArray(parsed.files) ? parsed.files : [];
await sendProgress({
type: 'step',
step: 2,
message: `Creating ${filesArray.length} files...`
});
// Filter out config files that shouldn't be created
const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js'];
const filteredFiles = filesArray.filter(file => {
if (!file || typeof file !== 'object') return false;
const fileName = (file.path || '').split('/').pop() || '';
return !configFiles.includes(fileName);
});
for (const [index, file] of filteredFiles.entries()) {
try {
// Send progress for each file
await sendProgress({
type: 'file-progress',
current: index + 1,
total: filteredFiles.length,
fileName: file.path,
action: 'creating'
});
// Normalize the file path
let normalizedPath = file.path;
if (normalizedPath.startsWith('/')) {
normalizedPath = normalizedPath.substring(1);
}
if (!normalizedPath.startsWith('src/') &&
!normalizedPath.startsWith('public/') &&
normalizedPath !== 'index.html' &&
!configFiles.includes(normalizedPath.split('/').pop() || '')) {
normalizedPath = 'src/' + normalizedPath;
}
const fullPath = `/home/user/app/${normalizedPath}`;
const isUpdate = global.existingFiles.has(normalizedPath);
// Remove any CSS imports from JSX/JS files (we're using Tailwind)
let fileContent = file.content;
if (file.path.endsWith('.jsx') || file.path.endsWith('.js') || file.path.endsWith('.tsx') || file.path.endsWith('.ts')) {
fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, '');
}
// Write the file using Python (code-interpreter SDK)
const escapedContent = fileContent
.replace(/\\/g, '\\\\')
.replace(/"""/g, '\\"\\"\\"')
.replace(/\$/g, '\\$');
await sandboxInstance.runCode(`
import os
os.makedirs(os.path.dirname("${fullPath}"), exist_ok=True)
with open("${fullPath}", 'w') as f:
f.write("""${escapedContent}""")
print(f"File written: ${fullPath}")
`);
// Update file cache
if (global.sandboxState?.fileCache) {
global.sandboxState.fileCache.files[normalizedPath] = {
content: fileContent,
lastModified: Date.now()
};
}
if (isUpdate) {
if (results.filesUpdated) results.filesUpdated.push(normalizedPath);
} else {
if (results.filesCreated) results.filesCreated.push(normalizedPath);
if (global.existingFiles) global.existingFiles.add(normalizedPath);
}
await sendProgress({
type: 'file-complete',
fileName: normalizedPath,
action: isUpdate ? 'updated' : 'created'
});
} catch (error) {
if (results.errors) {
results.errors.push(`Failed to create ${file.path}: ${(error as Error).message}`);
}
await sendProgress({
type: 'file-error',
fileName: file.path,
error: (error as Error).message
});
}
}
// Step 3: Execute commands
const commandsArray = Array.isArray(parsed.commands) ? parsed.commands : [];
if (commandsArray.length > 0) {
await sendProgress({
type: 'step',
step: 3,
message: `Executing ${commandsArray.length} commands...`
});
for (const [index, cmd] of commandsArray.entries()) {
try {
await sendProgress({
type: 'command-progress',
current: index + 1,
total: parsed.commands.length,
command: cmd,
action: 'executing'
});
// Use E2B commands.run() for cleaner execution
const result = await sandboxInstance.commands.run(cmd, {
cwd: '/home/user/app',
timeout: 60,
on_stdout: async (data: string) => {
await sendProgress({
type: 'command-output',
command: cmd,
output: data,
stream: 'stdout'
});
},
on_stderr: async (data: string) => {
await sendProgress({
type: 'command-output',
command: cmd,
output: data,
stream: 'stderr'
});
}
});
if (results.commandsExecuted) {
results.commandsExecuted.push(cmd);
}
await sendProgress({
type: 'command-complete',
command: cmd,
exitCode: result.exitCode,
success: result.exitCode === 0
});
} catch (error) {
if (results.errors) {
results.errors.push(`Failed to execute ${cmd}: ${(error as Error).message}`);
}
await sendProgress({
type: 'command-error',
command: cmd,
error: (error as Error).message
});
}
}
}
// Send final results
await sendProgress({
type: 'complete',
results,
explanation: parsed.explanation,
structure: parsed.structure,
message: `Successfully applied ${results.filesCreated.length} files`
});
// Track applied files in conversation state
if (global.conversationState && results.filesCreated.length > 0) {
const messages = global.conversationState.context.messages;
if (messages.length > 0) {
const lastMessage = messages[messages.length - 1];
if (lastMessage.role === 'user') {
lastMessage.metadata = {
...lastMessage.metadata,
editedFiles: results.filesCreated
};
}
}
// Track applied code in project evolution
if (global.conversationState.context.projectEvolution) {
global.conversationState.context.projectEvolution.majorChanges.push({
timestamp: Date.now(),
description: parsed.explanation || 'Code applied',
filesAffected: results.filesCreated || []
});
}
global.conversationState.lastUpdated = Date.now();
}
} catch (error) {
await sendProgress({
type: 'error',
error: (error as Error).message
});
} finally {
await writer.close();
}
})(sandbox, request);
// Return the stream
return new Response(stream.readable, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
});
} catch (error) {
console.error('Apply AI code stream error:', error);
return NextResponse.json(
{ error: error instanceof Error ? error.message : 'Failed to parse AI code' },
{ status: 500 }
);
}
}
+649
View File
@@ -0,0 +1,649 @@
import { NextRequest, NextResponse } from 'next/server';
import type { SandboxState } from '@/types/sandbox';
import type { ConversationState } from '@/types/conversation';
declare global {
var conversationState: ConversationState | null;
}
interface ParsedResponse {
explanation: string;
template: string;
files: Array<{ path: string; content: string }>;
packages: string[];
commands: string[];
structure: string | null;
}
function parseAIResponse(response: string): ParsedResponse {
const sections = {
files: [] as Array<{ path: string; content: string }>,
commands: [] as string[],
packages: [] as string[],
structure: null as string | null,
explanation: '',
template: ''
};
// Parse file sections - handle duplicates and prefer complete versions
const fileMap = new Map<string, { content: string; isComplete: boolean }>();
const fileRegex = /<file path="([^"]+)">([\s\S]*?)(?:<\/file>|$)/g;
let match;
while ((match = fileRegex.exec(response)) !== null) {
const filePath = match[1];
const content = match[2].trim();
const hasClosingTag = response.substring(match.index, match.index + match[0].length).includes('</file>');
// Check if this file already exists in our map
const existing = fileMap.get(filePath);
// Decide whether to keep this version
let shouldReplace = false;
if (!existing) {
shouldReplace = true; // First occurrence
} else if (!existing.isComplete && hasClosingTag) {
shouldReplace = true; // Replace incomplete with complete
console.log(`[parseAIResponse] Replacing incomplete ${filePath} with complete version`);
} else if (existing.isComplete && hasClosingTag && content.length > existing.content.length) {
shouldReplace = true; // Replace with longer complete version
console.log(`[parseAIResponse] Replacing ${filePath} with longer complete version`);
} else if (!existing.isComplete && !hasClosingTag && content.length > existing.content.length) {
shouldReplace = true; // Both incomplete, keep longer one
}
if (shouldReplace) {
// Additional validation: reject obviously broken content
if (content.includes('...') && !content.includes('...props') && !content.includes('...rest')) {
console.warn(`[parseAIResponse] Warning: ${filePath} contains ellipsis, may be truncated`);
// Still use it if it's the only version we have
if (!existing) {
fileMap.set(filePath, { content, isComplete: hasClosingTag });
}
} else {
fileMap.set(filePath, { content, isComplete: hasClosingTag });
}
}
}
// Convert map to array for sections.files
for (const [path, { content, isComplete }] of fileMap.entries()) {
if (!isComplete) {
console.log(`[parseAIResponse] Warning: File ${path} appears to be truncated (no closing tag)`);
}
sections.files.push({
path,
content
});
}
// Parse commands
const cmdRegex = /<command>(.*?)<\/command>/g;
while ((match = cmdRegex.exec(response)) !== null) {
sections.commands.push(match[1].trim());
}
// Parse packages - support both <package> and <packages> tags
const pkgRegex = /<package>(.*?)<\/package>/g;
while ((match = pkgRegex.exec(response)) !== null) {
sections.packages.push(match[1].trim());
}
// Also parse <packages> tag with multiple packages
const packagesRegex = /<packages>([\s\S]*?)<\/packages>/;
const packagesMatch = response.match(packagesRegex);
if (packagesMatch) {
const packagesContent = packagesMatch[1].trim();
// Split by newlines or commas
const packagesList = packagesContent.split(/[\n,]+/)
.map(pkg => pkg.trim())
.filter(pkg => pkg.length > 0);
sections.packages.push(...packagesList);
}
// Parse structure
const structureMatch = /<structure>([\s\S]*?)<\/structure>/;
const structResult = response.match(structureMatch);
if (structResult) {
sections.structure = structResult[1].trim();
}
// Parse explanation
const explanationMatch = /<explanation>([\s\S]*?)<\/explanation>/;
const explResult = response.match(explanationMatch);
if (explResult) {
sections.explanation = explResult[1].trim();
}
// Parse template
const templateMatch = /<template>(.*?)<\/template>/;
const templResult = response.match(templateMatch);
if (templResult) {
sections.template = templResult[1].trim();
}
return sections;
}
declare global {
var activeSandbox: any;
var existingFiles: Set<string>;
var sandboxState: SandboxState;
}
export async function POST(request: NextRequest) {
try {
const { response, isEdit = false, packages = [] } = await request.json();
if (!response) {
return NextResponse.json({
error: 'response is required'
}, { status: 400 });
}
// Parse the AI response
const parsed = parseAIResponse(response);
// Initialize existingFiles if not already
if (!global.existingFiles) {
global.existingFiles = new Set<string>();
}
// If no active sandbox, just return parsed results
if (!global.activeSandbox) {
return NextResponse.json({
success: true,
results: {
filesCreated: parsed.files.map(f => f.path),
packagesInstalled: parsed.packages,
commandsExecuted: parsed.commands,
errors: []
},
explanation: parsed.explanation,
structure: parsed.structure,
parsedFiles: parsed.files,
message: `Parsed ${parsed.files.length} files successfully. Create a sandbox to apply them.`
});
}
// Apply to active sandbox
console.log('[apply-ai-code] Applying code to sandbox...');
console.log('[apply-ai-code] Is edit mode:', isEdit);
console.log('[apply-ai-code] Files to write:', parsed.files.map(f => f.path));
console.log('[apply-ai-code] Existing files:', Array.from(global.existingFiles));
const results = {
filesCreated: [] as string[],
filesUpdated: [] as string[],
packagesInstalled: [] as string[],
packagesAlreadyInstalled: [] as string[],
packagesFailed: [] as string[],
commandsExecuted: [] as string[],
errors: [] as string[]
};
// Combine packages from tool calls and parsed XML tags
const allPackages = [...packages.filter((pkg: any) => pkg && typeof pkg === 'string'), ...parsed.packages];
const uniquePackages = [...new Set(allPackages)]; // Remove duplicates
if (uniquePackages.length > 0) {
console.log('[apply-ai-code] Installing packages from XML tags and tool calls:', uniquePackages);
try {
const installResponse = await fetch(`${process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'}/api/install-packages`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ packages: uniquePackages })
});
if (installResponse.ok) {
const installResult = await installResponse.json();
console.log('[apply-ai-code] Package installation result:', installResult);
if (installResult.installed && installResult.installed.length > 0) {
results.packagesInstalled = installResult.installed;
}
if (installResult.failed && installResult.failed.length > 0) {
results.packagesFailed = installResult.failed;
}
}
} catch (error) {
console.error('[apply-ai-code] Error installing packages:', error);
}
} else {
// Fallback to detecting packages from code
console.log('[apply-ai-code] No packages provided, detecting from generated code...');
console.log('[apply-ai-code] Number of files to scan:', parsed.files.length);
// Filter out config files first
const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js'];
const filteredFilesForDetection = parsed.files.filter(file => {
const fileName = file.path.split('/').pop() || '';
return !configFiles.includes(fileName);
});
// Build files object for package detection
const filesForPackageDetection: Record<string, string> = {};
for (const file of filteredFilesForDetection) {
filesForPackageDetection[file.path] = file.content;
// Log if heroicons is found
if (file.content.includes('heroicons')) {
console.log(`[apply-ai-code] Found heroicons import in ${file.path}`);
}
}
try {
console.log('[apply-ai-code] Calling detect-and-install-packages...');
const packageResponse = await fetch(`${process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'}/api/detect-and-install-packages`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ files: filesForPackageDetection })
});
console.log('[apply-ai-code] Package detection response status:', packageResponse.status);
if (packageResponse.ok) {
const packageResult = await packageResponse.json();
console.log('[apply-ai-code] Package installation result:', JSON.stringify(packageResult, null, 2));
if (packageResult.packagesInstalled && packageResult.packagesInstalled.length > 0) {
results.packagesInstalled = packageResult.packagesInstalled;
console.log(`[apply-ai-code] Installed packages: ${packageResult.packagesInstalled.join(', ')}`);
}
if (packageResult.packagesAlreadyInstalled && packageResult.packagesAlreadyInstalled.length > 0) {
results.packagesAlreadyInstalled = packageResult.packagesAlreadyInstalled;
console.log(`[apply-ai-code] Already installed: ${packageResult.packagesAlreadyInstalled.join(', ')}`);
}
if (packageResult.packagesFailed && packageResult.packagesFailed.length > 0) {
results.packagesFailed = packageResult.packagesFailed;
console.error(`[apply-ai-code] Failed to install packages: ${packageResult.packagesFailed.join(', ')}`);
results.errors.push(`Failed to install packages: ${packageResult.packagesFailed.join(', ')}`);
}
// Force Vite restart after package installation
if (results.packagesInstalled.length > 0) {
console.log('[apply-ai-code] Packages were installed, forcing Vite restart...');
try {
// Call the restart-vite endpoint
const restartResponse = await fetch(`${process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'}/api/restart-vite`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' }
});
if (restartResponse.ok) {
const restartResult = await restartResponse.json();
console.log('[apply-ai-code] Vite restart result:', restartResult.message);
} else {
console.error('[apply-ai-code] Failed to restart Vite:', await restartResponse.text());
}
} catch (e) {
console.error('[apply-ai-code] Error calling restart-vite:', e);
}
// Additional delay to ensure files can be written after restart
await new Promise(resolve => setTimeout(resolve, 1000));
}
} else {
console.error('[apply-ai-code] Package detection/installation failed:', await packageResponse.text());
}
} catch (error) {
console.error('[apply-ai-code] Error detecting/installing packages:', error);
// Continue with file writing even if package installation fails
}
}
// Filter out config files that shouldn't be created
const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js'];
const filteredFiles = parsed.files.filter(file => {
const fileName = file.path.split('/').pop() || '';
if (configFiles.includes(fileName)) {
console.warn(`[apply-ai-code] Skipping config file: ${file.path} - already exists in template`);
return false;
}
return true;
});
// Create or update files AFTER package installation
for (const file of filteredFiles) {
try {
// Normalize the file path
let normalizedPath = file.path;
// Remove leading slash if present
if (normalizedPath.startsWith('/')) {
normalizedPath = normalizedPath.substring(1);
}
// Ensure src/ prefix for component files
if (!normalizedPath.startsWith('src/') &&
!normalizedPath.startsWith('public/') &&
normalizedPath !== 'index.html' &&
normalizedPath !== 'package.json' &&
normalizedPath !== 'vite.config.js' &&
normalizedPath !== 'tailwind.config.js' &&
normalizedPath !== 'postcss.config.js') {
normalizedPath = 'src/' + normalizedPath;
}
const fullPath = `/home/user/app/${normalizedPath}`;
const isUpdate = global.existingFiles.has(normalizedPath);
// Remove any CSS imports from JSX/JS files (we're using Tailwind)
let fileContent = file.content;
if (file.path.endsWith('.jsx') || file.path.endsWith('.js') || file.path.endsWith('.tsx') || file.path.endsWith('.ts')) {
fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, '');
}
console.log(`[apply-ai-code] Writing file using E2B files API: ${fullPath}`);
try {
// Use the correct E2B API - sandbox.files.write()
await global.activeSandbox.files.write(fullPath, fileContent);
console.log(`[apply-ai-code] Successfully wrote file: ${fullPath}`);
// Update file cache
if (global.sandboxState?.fileCache) {
global.sandboxState.fileCache.files[normalizedPath] = {
content: fileContent,
lastModified: Date.now()
};
console.log(`[apply-ai-code] Updated file cache for: ${normalizedPath}`);
}
} catch (writeError) {
console.error(`[apply-ai-code] E2B file write error:`, writeError);
throw writeError;
}
if (isUpdate) {
results.filesUpdated.push(normalizedPath);
} else {
results.filesCreated.push(normalizedPath);
global.existingFiles.add(normalizedPath);
}
} catch (error) {
results.errors.push(`Failed to create ${file.path}: ${(error as Error).message}`);
}
}
// Only create App.jsx if it's not an edit and doesn't exist
const appFileInParsed = parsed.files.some(f => {
const normalized = f.path.replace(/^\//, '').replace(/^src\//, '');
return normalized === 'App.jsx' || normalized === 'App.tsx';
});
const appFileExists = global.existingFiles.has('src/App.jsx') ||
global.existingFiles.has('src/App.tsx') ||
global.existingFiles.has('App.jsx') ||
global.existingFiles.has('App.tsx');
if (!isEdit && !appFileInParsed && !appFileExists && parsed.files.length > 0) {
// Find all component files
const componentFiles = parsed.files.filter(f =>
(f.path.endsWith('.jsx') || f.path.endsWith('.tsx')) &&
f.path.includes('component')
);
// Generate imports for components
const imports = componentFiles
.filter(f => !f.path.includes('App.') && !f.path.includes('main.') && !f.path.includes('index.'))
.map(f => {
const pathParts = f.path.split('/');
const fileName = pathParts[pathParts.length - 1];
const componentName = fileName.replace(/\.(jsx|tsx)$/, '');
// Fix import path - components are in src/components/
const importPath = f.path.startsWith('src/')
? f.path.replace('src/', './').replace(/\.(jsx|tsx)$/, '')
: './' + f.path.replace(/\.(jsx|tsx)$/, '');
return `import ${componentName} from '${importPath}';`;
})
.join('\n');
// Find the main component
const mainComponent = componentFiles.find(f => {
const name = f.path.toLowerCase();
return name.includes('header') ||
name.includes('hero') ||
name.includes('layout') ||
name.includes('main') ||
name.includes('home');
}) || componentFiles[0];
const mainComponentName = mainComponent
? mainComponent.path.split('/').pop()?.replace(/\.(jsx|tsx)$/, '')
: null;
// Create App.jsx with better structure
const appContent = `import React from 'react';
${imports}
function App() {
return (
<div className="min-h-screen bg-gray-900 text-white p-8">
${mainComponentName ? `<${mainComponentName} />` : '<div className="text-center">\n <h1 className="text-4xl font-bold mb-4">Welcome to your React App</h1>\n <p className="text-gray-400">Your components have been created but need to be added here.</p>\n </div>'}
{/* Generated components: ${componentFiles.map(f => f.path).join(', ')} */}
</div>
);
}
export default App;`;
try {
await global.activeSandbox.runCode(`
file_path = "/home/user/app/src/App.jsx"
file_content = """${appContent.replace(/"/g, '\\"').replace(/\n/g, '\\n')}"""
with open(file_path, 'w') as f:
f.write(file_content)
print(f"Auto-generated: {file_path}")
`);
results.filesCreated.push('src/App.jsx (auto-generated)');
} catch (error) {
results.errors.push(`Failed to create App.jsx: ${(error as Error).message}`);
}
// Don't auto-generate App.css - we're using Tailwind CSS
// Only create index.css if it doesn't exist
const indexCssInParsed = parsed.files.some(f => {
const normalized = f.path.replace(/^\//, '').replace(/^src\//, '');
return normalized === 'index.css' || f.path === 'src/index.css';
});
const indexCssExists = global.existingFiles.has('src/index.css') ||
global.existingFiles.has('index.css');
if (!isEdit && !indexCssInParsed && !indexCssExists) {
try {
await global.activeSandbox.runCode(`
file_path = "/home/user/app/src/index.css"
file_content = """@tailwind base;
@tailwind components;
@tailwind utilities;
:root {
font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif;
line-height: 1.5;
font-weight: 400;
color-scheme: dark;
color: rgba(255, 255, 255, 0.87);
background-color: #0a0a0a;
}
* {
box-sizing: border-box;
}
body {
margin: 0;
min-width: 320px;
min-height: 100vh;
}"""
with open(file_path, 'w') as f:
f.write(file_content)
print(f"Auto-generated: {file_path}")
`);
results.filesCreated.push('src/index.css (with Tailwind)');
} catch (error) {
results.errors.push('Failed to create index.css with Tailwind');
}
}
}
// Execute commands
for (const cmd of parsed.commands) {
try {
await global.activeSandbox.runCode(`
import subprocess
os.chdir('/home/user/app')
result = subprocess.run(${JSON.stringify(cmd.split(' '))}, capture_output=True, text=True)
print(f"Executed: ${cmd}")
print(result.stdout)
if result.stderr:
print(f"Errors: {result.stderr}")
`);
results.commandsExecuted.push(cmd);
} catch (error) {
results.errors.push(`Failed to execute ${cmd}: ${(error as Error).message}`);
}
}
// Check for missing imports in App.jsx
const missingImports: string[] = [];
const appFile = parsed.files.find(f =>
f.path === 'src/App.jsx' || f.path === 'App.jsx'
);
if (appFile) {
// Extract imports from App.jsx
const importRegex = /import\s+(?:\w+|\{[^}]+\})\s+from\s+['"]([^'"]+)['"]/g;
let match;
const imports: string[] = [];
while ((match = importRegex.exec(appFile.content)) !== null) {
const importPath = match[1];
if (importPath.startsWith('./') || importPath.startsWith('../')) {
imports.push(importPath);
}
}
// Check if all imported files exist
for (const imp of imports) {
// Skip CSS imports for this check
if (imp.endsWith('.css')) continue;
// Convert import path to expected file paths
const basePath = imp.replace('./', 'src/');
const possiblePaths = [
basePath + '.jsx',
basePath + '.js',
basePath + '/index.jsx',
basePath + '/index.js'
];
const fileExists = parsed.files.some(f =>
possiblePaths.some(path => f.path === path)
);
if (!fileExists) {
missingImports.push(imp);
}
}
}
// Prepare response
const responseData: any = {
success: true,
results,
explanation: parsed.explanation,
structure: parsed.structure,
message: `Applied ${results.filesCreated.length} files successfully`
};
// Handle missing imports automatically
if (missingImports.length > 0) {
console.warn('[apply-ai-code] Missing imports detected:', missingImports);
// Automatically generate missing components
try {
console.log('[apply-ai-code] Auto-generating missing components...');
const autoCompleteResponse = await fetch(
`${request.nextUrl.origin}/api/auto-complete-components`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
missingImports,
model: 'claude-sonnet-4-20250514'
})
}
);
const autoCompleteData = await autoCompleteResponse.json();
if (autoCompleteData.success) {
responseData.autoCompleted = true;
responseData.autoCompletedComponents = autoCompleteData.components;
responseData.message = `Applied ${results.filesCreated.length} files + auto-generated ${autoCompleteData.files} missing components`;
// Add auto-completed files to results
results.filesCreated.push(...autoCompleteData.components);
} else {
// If auto-complete fails, still warn the user
responseData.warning = `Missing ${missingImports.length} imported components: ${missingImports.join(', ')}`;
responseData.missingImports = missingImports;
}
} catch (error) {
console.error('[apply-ai-code] Auto-complete failed:', error);
responseData.warning = `Missing ${missingImports.length} imported components: ${missingImports.join(', ')}`;
responseData.missingImports = missingImports;
}
}
// Track applied files in conversation state
if (global.conversationState && results.filesCreated.length > 0) {
// Update the last message metadata with edited files
const messages = global.conversationState.context.messages;
if (messages.length > 0) {
const lastMessage = messages[messages.length - 1];
if (lastMessage.role === 'user') {
lastMessage.metadata = {
...lastMessage.metadata,
editedFiles: results.filesCreated
};
}
}
// Track applied code in project evolution
if (global.conversationState.context.projectEvolution) {
global.conversationState.context.projectEvolution.majorChanges.push({
timestamp: Date.now(),
description: parsed.explanation || 'Code applied',
filesAffected: results.filesCreated
});
}
// Update last updated timestamp
global.conversationState.lastUpdated = Date.now();
console.log('[apply-ai-code] Updated conversation state with applied files:', results.filesCreated);
}
return NextResponse.json(responseData);
} catch (error) {
console.error('Apply AI code error:', error);
return NextResponse.json(
{ error: error instanceof Error ? error.message : 'Failed to parse AI code' },
{ status: 500 }
);
}
}
+12
View File
@@ -0,0 +1,12 @@
import { NextResponse } from 'next/server';
// Stub endpoint to prevent 404 errors
// This endpoint is being called but the source is unknown
// Returns empty errors array to satisfy any calling code
export async function GET() {
return NextResponse.json({
success: true,
errors: [],
message: 'No Vite errors detected'
});
}
+26
View File
@@ -0,0 +1,26 @@
import { NextResponse } from 'next/server';
declare global {
var viteErrorsCache: { errors: any[], timestamp: number } | null;
}
export async function POST() {
try {
// Clear the cache
global.viteErrorsCache = null;
console.log('[clear-vite-errors-cache] Cache cleared');
return NextResponse.json({
success: true,
message: 'Vite errors cache cleared'
});
} catch (error) {
console.error('[clear-vite-errors-cache] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+144
View File
@@ -0,0 +1,144 @@
import { NextRequest, NextResponse } from 'next/server';
import type { ConversationState } from '@/types/conversation';
declare global {
var conversationState: ConversationState | null;
}
// GET: Retrieve current conversation state
export async function GET() {
try {
if (!global.conversationState) {
return NextResponse.json({
success: true,
state: null,
message: 'No active conversation'
});
}
return NextResponse.json({
success: true,
state: global.conversationState
});
} catch (error) {
console.error('[conversation-state] Error getting state:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
// POST: Reset or update conversation state
export async function POST(request: NextRequest) {
try {
const { action, data } = await request.json();
switch (action) {
case 'reset':
global.conversationState = {
conversationId: `conv-${Date.now()}`,
startedAt: Date.now(),
lastUpdated: Date.now(),
context: {
messages: [],
edits: [],
projectEvolution: { majorChanges: [] },
userPreferences: {}
}
};
console.log('[conversation-state] Reset conversation state');
return NextResponse.json({
success: true,
message: 'Conversation state reset',
state: global.conversationState
});
case 'clear-old':
// Clear old conversation data but keep recent context
if (!global.conversationState) {
return NextResponse.json({
success: false,
error: 'No active conversation to clear'
}, { status: 400 });
}
// Keep only recent data
global.conversationState.context.messages = global.conversationState.context.messages.slice(-5);
global.conversationState.context.edits = global.conversationState.context.edits.slice(-3);
global.conversationState.context.projectEvolution.majorChanges =
global.conversationState.context.projectEvolution.majorChanges.slice(-2);
console.log('[conversation-state] Cleared old conversation data');
return NextResponse.json({
success: true,
message: 'Old conversation data cleared',
state: global.conversationState
});
case 'update':
if (!global.conversationState) {
return NextResponse.json({
success: false,
error: 'No active conversation to update'
}, { status: 400 });
}
// Update specific fields if provided
if (data) {
if (data.currentTopic) {
global.conversationState.context.currentTopic = data.currentTopic;
}
if (data.userPreferences) {
global.conversationState.context.userPreferences = {
...global.conversationState.context.userPreferences,
...data.userPreferences
};
}
global.conversationState.lastUpdated = Date.now();
}
return NextResponse.json({
success: true,
message: 'Conversation state updated',
state: global.conversationState
});
default:
return NextResponse.json({
success: false,
error: 'Invalid action. Use "reset" or "update"'
}, { status: 400 });
}
} catch (error) {
console.error('[conversation-state] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
// DELETE: Clear conversation state
export async function DELETE() {
try {
global.conversationState = null;
console.log('[conversation-state] Cleared conversation state');
return NextResponse.json({
success: true,
message: 'Conversation state cleared'
});
} catch (error) {
console.error('[conversation-state] Error clearing state:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+365
View File
@@ -0,0 +1,365 @@
import { NextResponse } from 'next/server';
import { Sandbox } from '@e2b/code-interpreter';
import type { SandboxState } from '@/types/sandbox';
import { appConfig } from '@/config/app.config';
// Store active sandbox globally
declare global {
var activeSandbox: any;
var sandboxData: any;
var existingFiles: Set<string>;
var sandboxState: SandboxState;
}
export async function POST() {
let sandbox: any = null;
try {
console.log('[create-ai-sandbox] Creating base sandbox...');
// Kill existing sandbox if any
if (global.activeSandbox) {
console.log('[create-ai-sandbox] Killing existing sandbox...');
try {
await global.activeSandbox.kill();
} catch (e) {
console.error('Failed to close existing sandbox:', e);
}
global.activeSandbox = null;
}
// Clear existing files tracking
if (global.existingFiles) {
global.existingFiles.clear();
} else {
global.existingFiles = new Set<string>();
}
// Create base sandbox - we'll set up Vite ourselves for full control
console.log(`[create-ai-sandbox] Creating base E2B sandbox with ${appConfig.e2b.timeoutMinutes} minute timeout...`);
sandbox = await Sandbox.create({
apiKey: process.env.E2B_API_KEY,
timeoutMs: appConfig.e2b.timeoutMs
});
const sandboxId = (sandbox as any).sandboxId || Date.now().toString();
const host = (sandbox as any).getHost(appConfig.e2b.vitePort);
console.log(`[create-ai-sandbox] Sandbox created: ${sandboxId}`);
console.log(`[create-ai-sandbox] Sandbox host: ${host}`);
// Set up a basic Vite React app using Python to write files
console.log('[create-ai-sandbox] Setting up Vite React app...');
// Write all files in a single Python script to avoid multiple executions
const setupScript = `
import os
import json
print('Setting up React app with Vite and Tailwind...')
# Create directory structure
os.makedirs('/home/user/app/src', exist_ok=True)
# Package.json
package_json = {
"name": "sandbox-app",
"version": "1.0.0",
"type": "module",
"scripts": {
"dev": "vite --host",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@vitejs/plugin-react": "^4.0.0",
"vite": "^4.3.9",
"tailwindcss": "^3.3.0",
"postcss": "^8.4.31",
"autoprefixer": "^10.4.16"
}
}
with open('/home/user/app/package.json', 'w') as f:
json.dump(package_json, f, indent=2)
print('✓ package.json')
# Vite config for E2B - with allowedHosts
vite_config = """import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// E2B-compatible Vite configuration
export default defineConfig({
plugins: [react()],
server: {
host: '0.0.0.0',
port: 5173,
strictPort: true,
hmr: false,
allowedHosts: ['.e2b.app', 'localhost', '127.0.0.1']
}
})"""
with open('/home/user/app/vite.config.js', 'w') as f:
f.write(vite_config)
print('✓ vite.config.js')
# Tailwind config - standard without custom design tokens
tailwind_config = """/** @type {import('tailwindcss').Config} */
export default {
content: [
"./index.html",
"./src/**/*.{js,ts,jsx,tsx}",
],
theme: {
extend: {},
},
plugins: [],
}"""
with open('/home/user/app/tailwind.config.js', 'w') as f:
f.write(tailwind_config)
print('✓ tailwind.config.js')
# PostCSS config
postcss_config = """export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}"""
with open('/home/user/app/postcss.config.js', 'w') as f:
f.write(postcss_config)
print('✓ postcss.config.js')
# Index.html
index_html = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Sandbox App</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.jsx"></script>
</body>
</html>"""
with open('/home/user/app/index.html', 'w') as f:
f.write(index_html)
print('✓ index.html')
# Main.jsx
main_jsx = """import React from 'react'
import ReactDOM from 'react-dom/client'
import App from './App.jsx'
import './index.css'
ReactDOM.createRoot(document.getElementById('root')).render(
<React.StrictMode>
<App />
</React.StrictMode>,
)"""
with open('/home/user/app/src/main.jsx', 'w') as f:
f.write(main_jsx)
print('✓ src/main.jsx')
# App.jsx with explicit Tailwind test
app_jsx = """function App() {
return (
<div className="min-h-screen bg-gray-900 text-white flex items-center justify-center p-4">
<div className="text-center max-w-2xl">
<p className="text-lg text-gray-400">
Sandbox Ready<br/>
Start building your React app with Vite and Tailwind CSS!
</p>
</div>
</div>
)
}
export default App"""
with open('/home/user/app/src/App.jsx', 'w') as f:
f.write(app_jsx)
print('✓ src/App.jsx')
# Index.css with explicit Tailwind directives
index_css = """@tailwind base;
@tailwind components;
@tailwind utilities;
/* Force Tailwind to load */
@layer base {
:root {
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
-webkit-text-size-adjust: 100%;
}
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, sans-serif;
background-color: rgb(17 24 39);
}"""
with open('/home/user/app/src/index.css', 'w') as f:
f.write(index_css)
print('✓ src/index.css')
print('\\nAll files created successfully!')
`;
// Execute the setup script
await sandbox.runCode(setupScript);
// Install dependencies
console.log('[create-ai-sandbox] Installing dependencies...');
await sandbox.runCode(`
import subprocess
import sys
print('Installing npm packages...')
result = subprocess.run(
['npm', 'install'],
cwd='/home/user/app',
capture_output=True,
text=True
)
if result.returncode == 0:
print('✓ Dependencies installed successfully')
else:
print(f'⚠ Warning: npm install had issues: {result.stderr}')
# Continue anyway as it might still work
`);
// Start Vite dev server
console.log('[create-ai-sandbox] Starting Vite dev server...');
await sandbox.runCode(`
import subprocess
import os
import time
os.chdir('/home/user/app')
# Kill any existing Vite processes
subprocess.run(['pkill', '-f', 'vite'], capture_output=True)
time.sleep(1)
# Start Vite dev server
env = os.environ.copy()
env['FORCE_COLOR'] = '0'
process = subprocess.Popen(
['npm', 'run', 'dev'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env
)
print(f'✓ Vite dev server started with PID: {process.pid}')
print('Waiting for server to be ready...')
`);
// Wait for Vite to be fully ready
await new Promise(resolve => setTimeout(resolve, appConfig.e2b.viteStartupDelay));
// Force Tailwind CSS to rebuild by touching the CSS file
await sandbox.runCode(`
import os
import time
# Touch the CSS file to trigger rebuild
css_file = '/home/user/app/src/index.css'
if os.path.exists(css_file):
os.utime(css_file, None)
print('✓ Triggered CSS rebuild')
# Also ensure PostCSS processes it
time.sleep(2)
print('✓ Tailwind CSS should be loaded')
`);
// Store sandbox globally
global.activeSandbox = sandbox;
global.sandboxData = {
sandboxId,
url: `https://${host}`
};
// Set extended timeout on the sandbox instance if method available
if (typeof sandbox.setTimeout === 'function') {
sandbox.setTimeout(appConfig.e2b.timeoutMs);
console.log(`[create-ai-sandbox] Set sandbox timeout to ${appConfig.e2b.timeoutMinutes} minutes`);
}
// Initialize sandbox state
global.sandboxState = {
fileCache: {
files: {},
lastSync: Date.now(),
sandboxId
},
sandbox,
sandboxData: {
sandboxId,
url: `https://${host}`
}
};
// Track initial files
global.existingFiles.add('src/App.jsx');
global.existingFiles.add('src/main.jsx');
global.existingFiles.add('src/index.css');
global.existingFiles.add('index.html');
global.existingFiles.add('package.json');
global.existingFiles.add('vite.config.js');
global.existingFiles.add('tailwind.config.js');
global.existingFiles.add('postcss.config.js');
console.log('[create-ai-sandbox] Sandbox ready at:', `https://${host}`);
return NextResponse.json({
success: true,
sandboxId,
url: `https://${host}`,
message: 'Sandbox created and Vite React app initialized'
});
} catch (error) {
console.error('[create-ai-sandbox] Error:', error);
// Clean up on error
if (sandbox) {
try {
await sandbox.kill();
} catch (e) {
console.error('Failed to close sandbox on error:', e);
}
}
return NextResponse.json(
{
error: error instanceof Error ? error.message : 'Failed to create sandbox',
details: error instanceof Error ? error.stack : undefined
},
{ status: 500 }
);
}
}
+71
View File
@@ -0,0 +1,71 @@
import { NextRequest, NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
}
export async function POST(request: NextRequest) {
try {
if (!global.activeSandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox'
}, { status: 400 });
}
console.log('[create-zip] Creating project zip...');
// Create zip file in sandbox
const result = await global.activeSandbox.runCode(`
import zipfile
import os
import json
os.chdir('/home/user/app')
# Create zip file
with zipfile.ZipFile('/tmp/project.zip', 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk('.'):
# Skip node_modules and .git
dirs[:] = [d for d in dirs if d not in ['node_modules', '.git', '.next', 'dist']]
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, '.')
zipf.write(file_path, arcname)
# Get file size
file_size = os.path.getsize('/tmp/project.zip')
print(f" Created project.zip ({file_size} bytes)")
`);
// Read the zip file and convert to base64
const readResult = await global.activeSandbox.runCode(`
import base64
with open('/tmp/project.zip', 'rb') as f:
content = f.read()
encoded = base64.b64encode(content).decode('utf-8')
print(encoded)
`);
const base64Content = readResult.logs.stdout.join('').trim();
// Create a data URL for download
const dataUrl = `data:application/zip;base64,${base64Content}`;
return NextResponse.json({
success: true,
dataUrl,
fileName: 'e2b-project.zip',
message: 'Zip file created successfully'
});
} catch (error) {
console.error('[create-zip] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
@@ -0,0 +1,260 @@
import { NextRequest, NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
}
export async function POST(request: NextRequest) {
try {
const { files } = await request.json();
if (!files || typeof files !== 'object') {
return NextResponse.json({
success: false,
error: 'Files object is required'
}, { status: 400 });
}
if (!global.activeSandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox'
}, { status: 404 });
}
console.log('[detect-and-install-packages] Processing files:', Object.keys(files));
// Extract all import statements from the files
const imports = new Set<string>();
const importRegex = /import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)\s*,?\s*)*(?:from\s+)?['"]([^'"]+)['"]/g;
const requireRegex = /require\s*\(['"]([^'"]+)['"]\)/g;
for (const [filePath, content] of Object.entries(files)) {
if (typeof content !== 'string') continue;
// Skip non-JS/JSX/TS/TSX files
if (!filePath.match(/\.(jsx?|tsx?)$/)) continue;
// Find ES6 imports
let match;
while ((match = importRegex.exec(content)) !== null) {
imports.add(match[1]);
}
// Find CommonJS requires
while ((match = requireRegex.exec(content)) !== null) {
imports.add(match[1]);
}
}
console.log('[detect-and-install-packages] Found imports:', Array.from(imports));
// Log specific heroicons imports
const heroiconImports = Array.from(imports).filter(imp => imp.includes('heroicons'));
if (heroiconImports.length > 0) {
console.log('[detect-and-install-packages] Heroicon imports:', heroiconImports);
}
// Filter out relative imports and built-in modules
const packages = Array.from(imports).filter(imp => {
// Skip relative imports
if (imp.startsWith('.') || imp.startsWith('/')) return false;
// Skip built-in Node modules
const builtins = ['fs', 'path', 'http', 'https', 'crypto', 'stream', 'util', 'os', 'url', 'querystring', 'child_process'];
if (builtins.includes(imp)) return false;
// Extract package name (handle scoped packages and subpaths)
const parts = imp.split('/');
if (imp.startsWith('@')) {
// Scoped package like @vitejs/plugin-react
return true;
} else {
// Regular package, return just the first part
return true;
}
});
// Extract just the package names (without subpaths)
const packageNames = packages.map(pkg => {
if (pkg.startsWith('@')) {
// Scoped package: @scope/package or @scope/package/subpath
const parts = pkg.split('/');
return parts.slice(0, 2).join('/');
} else {
// Regular package: package or package/subpath
return pkg.split('/')[0];
}
});
// Remove duplicates
const uniquePackages = [...new Set(packageNames)];
console.log('[detect-and-install-packages] Packages to install:', uniquePackages);
if (uniquePackages.length === 0) {
return NextResponse.json({
success: true,
packagesInstalled: [],
message: 'No new packages to install'
});
}
// Check which packages are already installed
const checkResult = await global.activeSandbox.runCode(`
import os
import json
installed = []
missing = []
packages = ${JSON.stringify(uniquePackages)}
for package in packages:
# Handle scoped packages
if package.startswith('@'):
package_path = f"/home/user/app/node_modules/{package}"
else:
package_path = f"/home/user/app/node_modules/{package}"
if os.path.exists(package_path):
installed.append(package)
else:
missing.append(package)
result = {
'installed': installed,
'missing': missing
}
print(json.dumps(result))
`);
const status = JSON.parse(checkResult.logs.stdout.join(''));
console.log('[detect-and-install-packages] Package status:', status);
if (status.missing.length === 0) {
return NextResponse.json({
success: true,
packagesInstalled: [],
packagesAlreadyInstalled: status.installed,
message: 'All packages already installed'
});
}
// Install missing packages
console.log('[detect-and-install-packages] Installing packages:', status.missing);
const installResult = await global.activeSandbox.runCode(`
import subprocess
import os
import json
os.chdir('/home/user/app')
packages_to_install = ${JSON.stringify(status.missing)}
# Join packages into a single install command
packages_str = ' '.join(packages_to_install)
cmd = f'npm install {packages_str} --save'
print(f"Running: {cmd}")
# Run npm install with explicit save flag
result = subprocess.run(['npm', 'install', '--save'] + packages_to_install,
capture_output=True,
text=True,
cwd='/home/user/app',
timeout=60)
print("stdout:", result.stdout)
if result.stderr:
print("stderr:", result.stderr)
# Verify installation
installed = []
failed = []
for package in packages_to_install:
# Handle scoped packages correctly
if package.startswith('@'):
# For scoped packages like @heroicons/react
package_path = f"/home/user/app/node_modules/{package}"
else:
package_path = f"/home/user/app/node_modules/{package}"
if os.path.exists(package_path):
installed.append(package)
print(f"✓ Verified installation of {package}")
else:
# Check if it's a submodule of an installed package
base_package = package.split('/')[0]
if package.startswith('@'):
# For @scope/package, the base is @scope/package
base_package = '/'.join(package.split('/')[:2])
base_path = f"/home/user/app/node_modules/{base_package}"
if os.path.exists(base_path):
installed.append(package)
print(f"✓ Verified installation of {package} (via {base_package})")
else:
failed.append(package)
print(f"✗ Failed to verify installation of {package}")
result_data = {
'installed': installed,
'failed': failed,
'returncode': result.returncode
}
print("\\nResult:", json.dumps(result_data))
`, { timeout: 60000 });
// Parse the result more safely
let installStatus;
try {
const stdout = installResult.logs.stdout.join('');
const resultMatch = stdout.match(/Result:\s*({.*})/);
if (resultMatch) {
installStatus = JSON.parse(resultMatch[1]);
} else {
// Fallback parsing
const lines = stdout.split('\n');
const resultLine = lines.find((line: string) => line.includes('Result:'));
if (resultLine) {
installStatus = JSON.parse(resultLine.split('Result:')[1].trim());
} else {
throw new Error('Could not find Result in output');
}
}
} catch (parseError) {
console.error('[detect-and-install-packages] Failed to parse install result:', parseError);
console.error('[detect-and-install-packages] stdout:', installResult.logs.stdout.join(''));
// Fallback to assuming all packages were installed
installStatus = {
installed: status.missing,
failed: [],
returncode: 0
};
}
if (installStatus.failed.length > 0) {
console.error('[detect-and-install-packages] Failed to install:', installStatus.failed);
}
return NextResponse.json({
success: true,
packagesInstalled: installStatus.installed,
packagesFailed: installStatus.failed,
packagesAlreadyInstalled: status.installed,
message: `Installed ${installStatus.installed.length} packages`,
logs: installResult.logs.stdout.join('\n')
});
} catch (error) {
console.error('[detect-and-install-packages] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
File diff suppressed because it is too large Load Diff
+183
View File
@@ -0,0 +1,183 @@
import { NextResponse } from 'next/server';
import { parseJavaScriptFile, buildComponentTree } from '@/lib/file-parser';
import { FileManifest, FileInfo, RouteInfo } from '@/types/file-manifest';
import type { SandboxState } from '@/types/sandbox';
declare global {
var activeSandbox: any;
}
export async function GET() {
try {
if (!global.activeSandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox'
}, { status: 404 });
}
console.log('[get-sandbox-files] Fetching and analyzing file structure...');
// Get all React/JS/CSS files
const result = await global.activeSandbox.runCode(`
import os
import json
def get_files_content(directory='/home/user/app', extensions=['.jsx', '.js', '.tsx', '.ts', '.css', '.json']):
files_content = {}
for root, dirs, files in os.walk(directory):
# Skip node_modules and other unwanted directories
dirs[:] = [d for d in dirs if d not in ['node_modules', '.git', 'dist', 'build']]
for file in files:
if any(file.endswith(ext) for ext in extensions):
file_path = os.path.join(root, file)
relative_path = os.path.relpath(file_path, '/home/user/app')
try:
with open(file_path, 'r') as f:
content = f.read()
# Only include files under 10KB to avoid huge responses
if len(content) < 10000:
files_content[relative_path] = content
except:
pass
return files_content
# Get the files
files = get_files_content()
# Also get the directory structure
structure = []
for root, dirs, files in os.walk('/home/user/app'):
level = root.replace('/home/user/app', '').count(os.sep)
indent = ' ' * 2 * level
structure.append(f"{indent}{os.path.basename(root)}/")
sub_indent = ' ' * 2 * (level + 1)
for file in files:
if not any(skip in root for skip in ['node_modules', '.git', 'dist', 'build']):
structure.append(f"{sub_indent}{file}")
result = {
'files': files,
'structure': '\\n'.join(structure[:50]) # Limit structure to 50 lines
}
print(json.dumps(result))
`);
const output = result.logs.stdout.join('');
const parsedResult = JSON.parse(output);
// Build enhanced file manifest
const fileManifest: FileManifest = {
files: {},
routes: [],
componentTree: {},
entryPoint: '',
styleFiles: [],
timestamp: Date.now(),
};
// Process each file
for (const [relativePath, content] of Object.entries(parsedResult.files)) {
const fullPath = `/home/user/app/${relativePath}`;
// Create base file info
const fileInfo: FileInfo = {
content: content as string,
type: 'utility',
path: fullPath,
relativePath,
lastModified: Date.now(),
};
// Parse JavaScript/JSX files
if (relativePath.match(/\.(jsx?|tsx?)$/)) {
const parseResult = parseJavaScriptFile(content as string, fullPath);
Object.assign(fileInfo, parseResult);
// Identify entry point
if (relativePath === 'src/main.jsx' || relativePath === 'src/index.jsx') {
fileManifest.entryPoint = fullPath;
}
// Identify App.jsx
if (relativePath === 'src/App.jsx' || relativePath === 'App.jsx') {
fileManifest.entryPoint = fileManifest.entryPoint || fullPath;
}
}
// Track style files
if (relativePath.endsWith('.css')) {
fileManifest.styleFiles.push(fullPath);
fileInfo.type = 'style';
}
fileManifest.files[fullPath] = fileInfo;
}
// Build component tree
fileManifest.componentTree = buildComponentTree(fileManifest.files);
// Extract routes (simplified - looks for Route components or page pattern)
fileManifest.routes = extractRoutes(fileManifest.files);
// Update global file cache with manifest
if (global.sandboxState?.fileCache) {
global.sandboxState.fileCache.manifest = fileManifest;
}
return NextResponse.json({
success: true,
files: parsedResult.files,
structure: parsedResult.structure,
fileCount: Object.keys(parsedResult.files).length,
manifest: fileManifest,
});
} catch (error) {
console.error('[get-sandbox-files] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
function extractRoutes(files: Record<string, FileInfo>): RouteInfo[] {
const routes: RouteInfo[] = [];
// Look for React Router usage
for (const [path, fileInfo] of Object.entries(files)) {
if (fileInfo.content.includes('<Route') || fileInfo.content.includes('createBrowserRouter')) {
// Extract route definitions (simplified)
const routeMatches = fileInfo.content.matchAll(/path=["']([^"']+)["'].*(?:element|component)={([^}]+)}/g);
for (const match of routeMatches) {
const [, routePath, componentRef] = match;
routes.push({
path: routePath,
component: path,
});
}
}
// Check for Next.js style pages
if (fileInfo.relativePath.startsWith('pages/') || fileInfo.relativePath.startsWith('src/pages/')) {
const routePath = '/' + fileInfo.relativePath
.replace(/^(src\/)?pages\//, '')
.replace(/\.(jsx?|tsx?)$/, '')
.replace(/index$/, '');
routes.push({
path: routePath,
component: path,
});
}
}
return routes;
}
+369
View File
@@ -0,0 +1,369 @@
import { NextRequest, NextResponse } from 'next/server';
import { Sandbox } from '@e2b/code-interpreter';
declare global {
var activeSandbox: any;
var sandboxData: any;
}
export async function POST(request: NextRequest) {
try {
const { packages, sandboxId } = await request.json();
if (!packages || !Array.isArray(packages) || packages.length === 0) {
return NextResponse.json({
success: false,
error: 'Packages array is required'
}, { status: 400 });
}
// Validate and deduplicate package names
const validPackages = [...new Set(packages)]
.filter(pkg => pkg && typeof pkg === 'string' && pkg.trim() !== '')
.map(pkg => pkg.trim());
if (validPackages.length === 0) {
return NextResponse.json({
success: false,
error: 'No valid package names provided'
}, { status: 400 });
}
// Log if duplicates were found
if (packages.length !== validPackages.length) {
console.log(`[install-packages] Cleaned packages: removed ${packages.length - validPackages.length} invalid/duplicate entries`);
console.log(`[install-packages] Original:`, packages);
console.log(`[install-packages] Cleaned:`, validPackages);
}
// Try to get sandbox - either from global or reconnect
let sandbox = global.activeSandbox;
if (!sandbox && sandboxId) {
console.log(`[install-packages] Reconnecting to sandbox ${sandboxId}...`);
try {
sandbox = await Sandbox.connect(sandboxId, { apiKey: process.env.E2B_API_KEY });
global.activeSandbox = sandbox;
console.log(`[install-packages] Successfully reconnected to sandbox ${sandboxId}`);
} catch (error) {
console.error(`[install-packages] Failed to reconnect to sandbox:`, error);
return NextResponse.json({
success: false,
error: `Failed to reconnect to sandbox: ${(error as Error).message}`
}, { status: 500 });
}
}
if (!sandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox available'
}, { status: 400 });
}
console.log('[install-packages] Installing packages:', packages);
// Create a response stream for real-time updates
const encoder = new TextEncoder();
const stream = new TransformStream();
const writer = stream.writable.getWriter();
// Function to send progress updates
const sendProgress = async (data: any) => {
const message = `data: ${JSON.stringify(data)}\n\n`;
await writer.write(encoder.encode(message));
};
// Start installation in background
(async (sandboxInstance) => {
try {
await sendProgress({
type: 'start',
message: `Installing ${validPackages.length} package${validPackages.length > 1 ? 's' : ''}...`,
packages: validPackages
});
// Kill any existing Vite process first
await sendProgress({ type: 'status', message: 'Stopping development server...' });
await sandboxInstance.runCode(`
import subprocess
import os
import signal
# Try to kill any existing Vite process
try:
with open('/tmp/vite-process.pid', 'r') as f:
pid = int(f.read().strip())
os.kill(pid, signal.SIGTERM)
print("Stopped existing Vite process")
except:
print("No existing Vite process found")
`);
// Check which packages are already installed
await sendProgress({
type: 'status',
message: 'Checking installed packages...'
});
const checkResult = await sandboxInstance.runCode(`
import os
import json
os.chdir('/home/user/app')
# Read package.json to check installed packages
try:
with open('package.json', 'r') as f:
package_json = json.load(f)
dependencies = package_json.get('dependencies', {})
dev_dependencies = package_json.get('devDependencies', {})
all_deps = {**dependencies, **dev_dependencies}
# Check which packages need to be installed
packages_to_check = ${JSON.stringify(validPackages)}
already_installed = []
need_install = []
for pkg in packages_to_check:
# Handle scoped packages
if pkg.startswith('@'):
pkg_name = pkg
else:
# Extract package name without version
pkg_name = pkg.split('@')[0]
if pkg_name in all_deps:
already_installed.append(pkg_name)
else:
need_install.append(pkg)
print(f"Already installed: {already_installed}")
print(f"Need to install: {need_install}")
print(f"NEED_INSTALL:{json.dumps(need_install)}")
except Exception as e:
print(f"Error checking packages: {e}")
print(f"NEED_INSTALL:{json.dumps(packages_to_check)}")
`);
// Parse packages that need installation
let packagesToInstall = validPackages;
// Check if checkResult has the expected structure
if (checkResult && checkResult.results && checkResult.results[0] && checkResult.results[0].text) {
const outputLines = checkResult.results[0].text.split('\n');
for (const line of outputLines) {
if (line.startsWith('NEED_INSTALL:')) {
try {
packagesToInstall = JSON.parse(line.substring('NEED_INSTALL:'.length));
} catch (e) {
console.error('Failed to parse packages to install:', e);
}
}
}
} else {
console.error('[install-packages] Invalid checkResult structure:', checkResult);
// If we can't check, just try to install all packages
packagesToInstall = validPackages;
}
if (packagesToInstall.length === 0) {
await sendProgress({
type: 'success',
message: 'All packages are already installed',
installedPackages: [],
alreadyInstalled: validPackages
});
return;
}
// Install only packages that aren't already installed
const packageList = packagesToInstall.join(' ');
await sendProgress({
type: 'command',
command: `npm install ${packageList}`,
message: `Installing ${packagesToInstall.length} new package(s)...`
});
const installResult = await sandboxInstance.runCode(`
import subprocess
import os
os.chdir('/home/user/app')
# Run npm install with output capture
packages_to_install = ${JSON.stringify(packagesToInstall)}
cmd_args = ['npm', 'install', '--legacy-peer-deps'] + packages_to_install
print(f"Running command: {' '.join(cmd_args)}")
process = subprocess.Popen(
cmd_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
)
# Stream output
while True:
output = process.stdout.readline()
if output == '' and process.poll() is not None:
break
if output:
print(output.strip())
# Get the return code
rc = process.poll()
# Capture any stderr
stderr = process.stderr.read()
if stderr:
print("STDERR:", stderr)
if 'ERESOLVE' in stderr:
print("ERESOLVE_ERROR: Dependency conflict detected - using --legacy-peer-deps flag")
print(f"\\nInstallation completed with code: {rc}")
# Verify packages were installed
import json
with open('/home/user/app/package.json', 'r') as f:
package_json = json.load(f)
installed = []
for pkg in ${JSON.stringify(packagesToInstall)}:
if pkg in package_json.get('dependencies', {}):
installed.append(pkg)
print(f"✓ Verified {pkg}")
else:
print(f"✗ Package {pkg} not found in dependencies")
print(f"\\nVerified installed packages: {installed}")
`, { timeout: 60000 }); // 60 second timeout for npm install
// Send npm output
const output = installResult?.output || installResult?.logs?.stdout?.join('\n') || '';
const npmOutputLines = output.split('\n').filter((line: string) => line.trim());
for (const line of npmOutputLines) {
if (line.includes('STDERR:')) {
const errorMsg = line.replace('STDERR:', '').trim();
if (errorMsg && errorMsg !== 'undefined') {
await sendProgress({ type: 'error', message: errorMsg });
}
} else if (line.includes('ERESOLVE_ERROR:')) {
const msg = line.replace('ERESOLVE_ERROR:', '').trim();
await sendProgress({
type: 'warning',
message: `Dependency conflict resolved with --legacy-peer-deps: ${msg}`
});
} else if (line.includes('npm WARN')) {
await sendProgress({ type: 'warning', message: line });
} else if (line.trim() && !line.includes('undefined')) {
await sendProgress({ type: 'output', message: line });
}
}
// Check if installation was successful
const installedMatch = output.match(/Verified installed packages: \[(.*?)\]/);
let installedPackages: string[] = [];
if (installedMatch && installedMatch[1]) {
installedPackages = installedMatch[1]
.split(',')
.map((p: string) => p.trim().replace(/'/g, ''))
.filter((p: string) => p.length > 0);
}
if (installedPackages.length > 0) {
await sendProgress({
type: 'success',
message: `Successfully installed: ${installedPackages.join(', ')}`,
installedPackages
});
} else {
await sendProgress({
type: 'error',
message: 'Failed to verify package installation'
});
}
// Restart Vite dev server
await sendProgress({ type: 'status', message: 'Restarting development server...' });
await sandboxInstance.runCode(`
import subprocess
import os
import time
os.chdir('/home/user/app')
# Kill any existing Vite processes
subprocess.run(['pkill', '-f', 'vite'], capture_output=True)
time.sleep(1)
# Start Vite dev server
env = os.environ.copy()
env['FORCE_COLOR'] = '0'
process = subprocess.Popen(
['npm', 'run', 'dev'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env
)
print(f'✓ Vite dev server restarted with PID: {process.pid}')
# Store process info for later
with open('/tmp/vite-process.pid', 'w') as f:
f.write(str(process.pid))
# Wait a bit for Vite to start up
time.sleep(3)
# Touch files to trigger Vite reload
subprocess.run(['touch', '/home/user/app/package.json'])
subprocess.run(['touch', '/home/user/app/vite.config.js'])
print("Vite restarted and should now recognize all packages")
`);
await sendProgress({
type: 'complete',
message: 'Package installation complete and dev server restarted!',
installedPackages
});
} catch (error) {
const errorMessage = (error as Error).message;
if (errorMessage && errorMessage !== 'undefined') {
await sendProgress({
type: 'error',
message: errorMessage
});
}
} finally {
await writer.close();
}
})(sandbox);
// Return the stream
return new Response(stream.readable, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
});
} catch (error) {
console.error('[install-packages] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+49
View File
@@ -0,0 +1,49 @@
import { NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
var sandboxData: any;
var existingFiles: Set<string>;
}
export async function POST() {
try {
console.log('[kill-sandbox] Killing active sandbox...');
let sandboxKilled = false;
// Kill existing sandbox if any
if (global.activeSandbox) {
try {
await global.activeSandbox.close();
sandboxKilled = true;
console.log('[kill-sandbox] Sandbox closed successfully');
} catch (e) {
console.error('[kill-sandbox] Failed to close sandbox:', e);
}
global.activeSandbox = null;
global.sandboxData = null;
}
// Clear existing files tracking
if (global.existingFiles) {
global.existingFiles.clear();
}
return NextResponse.json({
success: true,
sandboxKilled,
message: 'Sandbox cleaned up successfully'
});
} catch (error) {
console.error('[kill-sandbox] Error:', error);
return NextResponse.json(
{
success: false,
error: (error as Error).message
},
{ status: 500 }
);
}
}
+118
View File
@@ -0,0 +1,118 @@
import { NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
}
export async function GET() {
try {
if (!global.activeSandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox'
}, { status: 400 });
}
console.log('[monitor-vite-logs] Checking Vite process logs...');
// Check both the error file and recent logs
const result = await global.activeSandbox.runCode(`
import json
import subprocess
import re
errors = []
# First check the error file
try:
with open('/tmp/vite-errors.json', 'r') as f:
data = json.load(f)
errors.extend(data.get('errors', []))
except:
pass
# Also check if we can get recent Vite logs
try:
# Try to get the Vite process PID
with open('/tmp/vite-process.pid', 'r') as f:
pid = int(f.read().strip())
# Check if process is still running and get its logs
# This is a bit hacky but works for our use case
result = subprocess.run(['ps', '-p', str(pid)], capture_output=True, text=True)
if result.returncode == 0:
# Process is running, try to check for errors in output
# Note: We can't easily get stdout/stderr from a running process
# but we can check if there are new errors
pass
except:
pass
# Also scan the current console output for any HMR errors
# This won't catch everything but helps with recent errors
try:
# Check if there's a log file we can read
import os
log_files = []
for root, dirs, files in os.walk('/tmp'):
for file in files:
if 'vite' in file.lower() and file.endswith('.log'):
log_files.append(os.path.join(root, file))
for log_file in log_files[:5]: # Check up to 5 log files
try:
with open(log_file, 'r') as f:
content = f.read()
# Look for import errors
import_errors = re.findall(r'Failed to resolve import "([^"]+)"', content)
for pkg in import_errors:
if not pkg.startswith('.'):
# Extract base package name
if pkg.startswith('@'):
parts = pkg.split('/')
final_pkg = '/'.join(parts[:2]) if len(parts) >= 2 else pkg
else:
final_pkg = pkg.split('/')[0]
error_obj = {
"type": "npm-missing",
"package": final_pkg,
"message": f"Failed to resolve import \\"{pkg}\\"",
"file": "Unknown"
}
# Avoid duplicates
if not any(e['package'] == error_obj['package'] for e in errors):
errors.append(error_obj)
except:
pass
except Exception as e:
print(f"Error scanning logs: {e}")
# Deduplicate errors
unique_errors = []
seen_packages = set()
for error in errors:
if error.get('package') and error['package'] not in seen_packages:
seen_packages.add(error['package'])
unique_errors.append(error)
print(json.dumps({"errors": unique_errors}))
`, { timeout: 5000 });
const data = JSON.parse(result.output || '{"errors": []}');
return NextResponse.json({
success: true,
hasErrors: data.errors.length > 0,
errors: data.errors
});
} catch (error) {
console.error('[monitor-vite-logs] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+62
View File
@@ -0,0 +1,62 @@
import { NextRequest, NextResponse } from 'next/server';
declare global {
var viteErrors: any[];
}
// Initialize global viteErrors array if it doesn't exist
if (!global.viteErrors) {
global.viteErrors = [];
}
export async function POST(request: NextRequest) {
try {
const { error, file, type = 'runtime-error' } = await request.json();
if (!error) {
return NextResponse.json({
success: false,
error: 'Error message is required'
}, { status: 400 });
}
// Parse the error to extract useful information
const errorObj: any = {
type,
message: error,
file: file || 'unknown',
timestamp: new Date().toISOString()
};
// Extract import information if it's an import error
const importMatch = error.match(/Failed to resolve import ['"]([^'"]+)['"] from ['"]([^'"]+)['"]/);
if (importMatch) {
errorObj.type = 'import-error';
errorObj.import = importMatch[1];
errorObj.file = importMatch[2];
}
// Add to global errors array
global.viteErrors.push(errorObj);
// Keep only last 50 errors
if (global.viteErrors.length > 50) {
global.viteErrors = global.viteErrors.slice(-50);
}
console.log('[report-vite-error] Error reported:', errorObj);
return NextResponse.json({
success: true,
message: 'Error reported successfully',
error: errorObj
});
} catch (error) {
console.error('[report-vite-error] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+136
View File
@@ -0,0 +1,136 @@
import { NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
}
export async function POST() {
try {
if (!global.activeSandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox'
}, { status: 400 });
}
console.log('[restart-vite] Forcing Vite restart...');
// Kill existing Vite process and restart
const result = await global.activeSandbox.runCode(`
import subprocess
import os
import signal
import time
import threading
import json
import sys
# Kill existing Vite process
try:
with open('/tmp/vite-process.pid', 'r') as f:
pid = int(f.read().strip())
os.kill(pid, signal.SIGTERM)
print("Killed existing Vite process")
time.sleep(1)
except:
print("No existing Vite process found")
os.chdir('/home/user/app')
# Clear error file
error_file = '/tmp/vite-errors.json'
with open(error_file, 'w') as f:
json.dump({"errors": [], "lastChecked": time.time()}, f)
# Function to monitor Vite output for errors
def monitor_output(proc, error_file):
while True:
line = proc.stderr.readline()
if not line:
break
sys.stdout.write(line) # Also print to console
# Check for import resolution errors
if "Failed to resolve import" in line:
try:
# Extract package name from error
import_match = line.find('"')
if import_match != -1:
end_match = line.find('"', import_match + 1)
if end_match != -1:
package_name = line[import_match + 1:end_match]
# Skip relative imports
if not package_name.startswith('.'):
with open(error_file, 'r') as f:
data = json.load(f)
# Handle scoped packages correctly
if package_name.startswith('@'):
# For @scope/package, keep the scope
pkg_parts = package_name.split('/')
if len(pkg_parts) >= 2:
final_package = '/'.join(pkg_parts[:2])
else:
final_package = package_name
else:
# For regular packages, just take the first part
final_package = package_name.split('/')[0]
error_obj = {
"type": "npm-missing",
"package": final_package,
"message": line.strip(),
"timestamp": time.time()
}
# Avoid duplicates
if not any(e['package'] == error_obj['package'] for e in data['errors']):
data['errors'].append(error_obj)
with open(error_file, 'w') as f:
json.dump(data, f)
print(f"WARNING: Detected missing package: {error_obj['package']}")
except Exception as e:
print(f"Error parsing Vite error: {e}")
# Start Vite with error monitoring
process = subprocess.Popen(
['npm', 'run', 'dev'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
bufsize=1
)
# Start monitoring thread
monitor_thread = threading.Thread(target=monitor_output, args=(process, error_file))
monitor_thread.daemon = True
monitor_thread.start()
print("Vite restarted successfully!")
# Store process info for later
with open('/tmp/vite-process.pid', 'w') as f:
f.write(str(process.pid))
# Wait for Vite to fully start
time.sleep(5)
print("Vite is ready")
`);
return NextResponse.json({
success: true,
message: 'Vite restarted successfully',
output: result.output
});
} catch (error) {
console.error('[restart-vite] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+62
View File
@@ -0,0 +1,62 @@
import { NextRequest, NextResponse } from 'next/server';
import { Sandbox } from '@e2b/code-interpreter';
// Get active sandbox from global state (in production, use a proper state management solution)
declare global {
var activeSandbox: any;
}
export async function POST(request: NextRequest) {
try {
const { command } = await request.json();
if (!command) {
return NextResponse.json({
success: false,
error: 'Command is required'
}, { status: 400 });
}
if (!global.activeSandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox'
}, { status: 400 });
}
console.log(`[run-command] Executing: ${command}`);
const result = await global.activeSandbox.runCode(`
import subprocess
import os
os.chdir('/home/user/app')
result = subprocess.run(${JSON.stringify(command.split(' '))},
capture_output=True,
text=True,
shell=False)
print("STDOUT:")
print(result.stdout)
if result.stderr:
print("\\nSTDERR:")
print(result.stderr)
print(f"\\nReturn code: {result.returncode}")
`);
const output = result.logs.stdout.join('\n');
return NextResponse.json({
success: true,
output,
message: 'Command executed successfully'
});
} catch (error) {
console.error('[run-command] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+74
View File
@@ -0,0 +1,74 @@
import { NextRequest, NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
}
export async function GET(request: NextRequest) {
try {
if (!global.activeSandbox) {
return NextResponse.json({
success: false,
error: 'No active sandbox'
}, { status: 400 });
}
console.log('[sandbox-logs] Fetching Vite dev server logs...');
// Get the last N lines of the Vite dev server output
const result = await global.activeSandbox.runCode(`
import subprocess
import os
# Try to get the Vite process output
try:
# Read the last 100 lines of any log files
log_content = []
# Check if there are any node processes running
ps_result = subprocess.run(['ps', 'aux'], capture_output=True, text=True)
vite_processes = [line for line in ps_result.stdout.split('\\n') if 'vite' in line.lower()]
if vite_processes:
log_content.append("Vite is running")
else:
log_content.append("Vite process not found")
# Try to capture recent console output (this is a simplified approach)
# In a real implementation, you'd want to capture the Vite process output directly
print(json.dumps({
"hasErrors": False,
"logs": log_content,
"status": "running" if vite_processes else "stopped"
}))
except Exception as e:
print(json.dumps({
"hasErrors": True,
"logs": [str(e)],
"status": "error"
}))
`);
try {
const logData = JSON.parse(result.output || '{}');
return NextResponse.json({
success: true,
...logData
});
} catch {
return NextResponse.json({
success: true,
hasErrors: false,
logs: [result.output],
status: 'unknown'
});
}
} catch (error) {
console.error('[sandbox-logs] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}
+54
View File
@@ -0,0 +1,54 @@
import { NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
var sandboxData: any;
var existingFiles: Set<string>;
}
export async function GET() {
try {
// Check if sandbox exists
const sandboxExists = !!global.activeSandbox;
let sandboxHealthy = false;
let sandboxInfo = null;
if (sandboxExists && global.activeSandbox) {
try {
// Since Python isn't available in the Vite template, just check if sandbox exists
// The sandbox object existing is enough to confirm it's healthy
sandboxHealthy = true;
sandboxInfo = {
sandboxId: global.sandboxData?.sandboxId,
url: global.sandboxData?.url,
filesTracked: global.existingFiles ? Array.from(global.existingFiles) : [],
lastHealthCheck: new Date().toISOString()
};
} catch (error) {
console.error('[sandbox-status] Health check failed:', error);
sandboxHealthy = false;
}
}
return NextResponse.json({
success: true,
active: sandboxExists,
healthy: sandboxHealthy,
sandboxData: sandboxInfo,
message: sandboxHealthy
? 'Sandbox is active and healthy'
: sandboxExists
? 'Sandbox exists but is not responding'
: 'No active sandbox'
});
} catch (error) {
console.error('[sandbox-status] Error:', error);
return NextResponse.json({
success: false,
active: false,
error: (error as Error).message
}, { status: 500 });
}
}
+56
View File
@@ -0,0 +1,56 @@
import { NextRequest, NextResponse } from 'next/server';
export async function POST(req: NextRequest) {
try {
const { url } = await req.json();
if (!url) {
return NextResponse.json({ error: 'URL is required' }, { status: 400 });
}
// Use Firecrawl API to capture screenshot
const firecrawlResponse = await fetch('https://api.firecrawl.dev/v1/scrape', {
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.FIRECRAWL_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
url,
formats: ['screenshot'], // Regular viewport screenshot, not full page
waitFor: 3000, // Wait for page to fully load
timeout: 30000,
blockAds: true,
actions: [
{
type: 'wait',
milliseconds: 2000 // Additional wait for dynamic content
}
]
})
});
if (!firecrawlResponse.ok) {
const error = await firecrawlResponse.text();
throw new Error(`Firecrawl API error: ${error}`);
}
const data = await firecrawlResponse.json();
if (!data.success || !data.data?.screenshot) {
throw new Error('Failed to capture screenshot');
}
return NextResponse.json({
success: true,
screenshot: data.data.screenshot,
metadata: data.data.metadata
});
} catch (error: any) {
console.error('Screenshot capture error:', error);
return NextResponse.json({
error: error.message || 'Failed to capture screenshot'
}, { status: 500 });
}
}
+117
View File
@@ -0,0 +1,117 @@
import { NextRequest, NextResponse } from 'next/server';
// Function to sanitize smart quotes and other problematic characters
function sanitizeQuotes(text: string): string {
return text
// Replace smart single quotes
.replace(/[\u2018\u2019\u201A\u201B]/g, "'")
// Replace smart double quotes
.replace(/[\u201C\u201D\u201E\u201F]/g, '"')
// Replace other quote-like characters
.replace(/[\u00AB\u00BB]/g, '"') // Guillemets
.replace(/[\u2039\u203A]/g, "'") // Single guillemets
// Replace other problematic characters
.replace(/[\u2013\u2014]/g, '-') // En dash and em dash
.replace(/[\u2026]/g, '...') // Ellipsis
.replace(/[\u00A0]/g, ' '); // Non-breaking space
}
export async function POST(request: NextRequest) {
try {
const { url } = await request.json();
if (!url) {
return NextResponse.json({
success: false,
error: 'URL is required'
}, { status: 400 });
}
console.log('[scrape-url-enhanced] Scraping with Firecrawl:', url);
const FIRECRAWL_API_KEY = process.env.FIRECRAWL_API_KEY;
if (!FIRECRAWL_API_KEY) {
throw new Error('FIRECRAWL_API_KEY environment variable is not set');
}
// Make request to Firecrawl API with maxAge for 500% faster scraping
const firecrawlResponse = await fetch('https://api.firecrawl.dev/v1/scrape', {
method: 'POST',
headers: {
'Authorization': `Bearer ${FIRECRAWL_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
url,
formats: ['markdown', 'html'],
waitFor: 3000,
timeout: 30000,
blockAds: true,
maxAge: 3600000, // Use cached data if less than 1 hour old (500% faster!)
actions: [
{
type: 'wait',
milliseconds: 2000
}
]
})
});
if (!firecrawlResponse.ok) {
const error = await firecrawlResponse.text();
throw new Error(`Firecrawl API error: ${error}`);
}
const data = await firecrawlResponse.json();
if (!data.success || !data.data) {
throw new Error('Failed to scrape content');
}
const { markdown, html, metadata } = data.data;
// Sanitize the markdown content
const sanitizedMarkdown = sanitizeQuotes(markdown || '');
// Extract structured data from the response
const title = metadata?.title || '';
const description = metadata?.description || '';
// Format content for AI
const formattedContent = `
Title: ${sanitizeQuotes(title)}
Description: ${sanitizeQuotes(description)}
URL: ${url}
Main Content:
${sanitizedMarkdown}
`.trim();
return NextResponse.json({
success: true,
url,
content: formattedContent,
structured: {
title: sanitizeQuotes(title),
description: sanitizeQuotes(description),
content: sanitizedMarkdown,
url
},
metadata: {
scraper: 'firecrawl-enhanced',
timestamp: new Date().toISOString(),
contentLength: formattedContent.length,
cached: data.data.cached || false, // Indicates if data came from cache
...metadata
},
message: 'URL scraped successfully with Firecrawl (with caching for 500% faster performance)'
});
} catch (error) {
console.error('[scrape-url-enhanced] Error:', error);
return NextResponse.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
}