diff --git a/.cursor/mcp.json b/.cursor/mcp.json new file mode 100644 index 0000000..c2af9f3 --- /dev/null +++ b/.cursor/mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "dev3000": { + "type": "http", + "url": "http://localhost:3684/mcp" + } + } +} diff --git a/.env.example b/.env.example index c8b9f67..d66eef4 100644 --- a/.env.example +++ b/.env.example @@ -1,20 +1,46 @@ -# REQUIRED - Sandboxes for code execution -# Get yours at https://e2b.dev -E2B_API_KEY=your_e2b_api_key_here +# Required +FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping) -# REQUIRED - Web scraping for cloning websites -# Get yours at https://firecrawl.dev -FIRECRAWL_API_KEY=your_firecrawl_api_key_here +# ================================================================================= +# SANDBOX PROVIDER - Choose Option 1 OR 2 +# ================================================================================= -# OPTIONAL - AI Providers (need at least one) -# Get yours at https://console.anthropic.com -ANTHROPIC_API_KEY=your_anthropic_api_key_here +# Option 1: Vercel Sandbox (recommended - default) +# Set SANDBOX_PROVIDER=vercel and choose authentication method below +SANDBOX_PROVIDER=vercel -# Get yours at https://platform.openai.com -OPENAI_API_KEY=your_openai_api_key_here +# Vercel Authentication - Choose method a OR b +# Method a: OIDC Token (recommended for development) +# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically +VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull -# Get yours at https://aistudio.google.com/app/apikey -GEMINI_API_KEY=your_gemini_api_key_here +# Method b: Personal Access Token (for production or when OIDC unavailable) +# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID +# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID +# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard # Get yours at https://console.groq.com -GROQ_API_KEY=your_groq_api_key_here \ No newline at end of file +GROQ_API_KEY=your_groq_api_key_here + +======= +# Option 2: E2B Sandbox +# Set SANDBOX_PROVIDER=e2b and configure E2B_API_KEY below +# SANDBOX_PROVIDER=e2b +# E2B_API_KEY=your_e2b_api_key # Get from https://e2b.dev + +# ================================================================================= +# AI PROVIDERS - Need at least one +# ================================================================================= + +# Vercel AI Gateway (recommended - provides access to multiple models) +AI_GATEWAY_API_KEY=your_ai_gateway_api_key # Get from https://vercel.com/dashboard/ai-gateway/api-keys + +# Individual provider keys (used when AI_GATEWAY_API_KEY is not set) +ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com +OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5) +GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey +GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended) + +# Optional Morph Fast Apply +# Get yours at https://morphllm.com/ +MORPH_API_KEY=your_fast_apply_key diff --git a/.gitignore b/.gitignore index ac59fa8..79f47d8 100644 --- a/.gitignore +++ b/.gitignore @@ -56,3 +56,4 @@ e2b-template-* *.temp repomix-output.txt bun.lockb +.env*.local diff --git a/README.md b/README.md index 803cc92..b5bbbcd 100644 --- a/README.md +++ b/README.md @@ -1,40 +1,67 @@ # Open Lovable -Chat with AI to build React apps instantly. An example app made by the [Firecrawl](https://firecrawl.dev/?ref=open-lovable-github) team. For a complete cloud solution, check out [Lovable.dev ❤️](https://lovable.dev/). +Chat with AI to build React apps instantly. An example app made by the [Firecrawl](https://firecrawl.dev/?ref=open-lovable-github) team. For a complete cloud solution, check out [Lovable.dev](https://lovable.dev/) ❤️. Open Lovable Demo - - ## Setup 1. **Clone & Install** ```bash -git clone https://github.com/mendableai/open-lovable.git +git clone https://github.com/firecrawl/open-lovable.git cd open-lovable -npm install +pnpm install # or npm install / yarn install ``` 2. **Add `.env.local`** -```env -# Required -E2B_API_KEY=your_e2b_api_key # Get from https://e2b.dev (Sandboxes) -FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping) -# Optional (need at least one AI provider) -ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com -OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5) -GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey -GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended) +```env +# ================================================================= +# REQUIRED +# ================================================================= +FIRECRAWL_API_KEY=your_firecrawl_api_key # https://firecrawl.dev + +# ================================================================= +# AI PROVIDER - Choose your LLM +# ================================================================= +GEMINI_API_KEY=your_gemini_api_key # https://aistudio.google.com/app/apikey +ANTHROPIC_API_KEY=your_anthropic_api_key # https://console.anthropic.com +OPENAI_API_KEY=your_openai_api_key # https://platform.openai.com +GROQ_API_KEY=your_groq_api_key # https://console.groq.com + +# ================================================================= +# FAST APPLY (Optional - for faster edits) +# ================================================================= +MORPH_API_KEY=your_morphllm_api_key # https://morphllm.com/dashboard + +# ================================================================= +# SANDBOX PROVIDER - Choose ONE: Vercel (default) or E2B +# ================================================================= +SANDBOX_PROVIDER=vercel # or 'e2b' + +# Option 1: Vercel Sandbox (default) +# Choose one authentication method: + +# Method A: OIDC Token (recommended for development) +# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically +VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull + +# Method B: Personal Access Token (for production or when OIDC unavailable) +# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID +# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID +# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard + +# Option 2: E2B Sandbox +# E2B_API_KEY=your_e2b_api_key # https://e2b.dev ``` 3. **Run** ```bash -npm run dev +pnpm dev # or npm run dev / yarn dev ``` -Open [http://localhost:3000](http://localhost:3000) +Open [http://localhost:3000](http://localhost:3000) ## License -MIT +MIT \ No newline at end of file diff --git a/app/api/analyze-edit-intent/route.ts b/app/api/analyze-edit-intent/route.ts index 7cf35bc..07798a0 100644 --- a/app/api/analyze-edit-intent/route.ts +++ b/app/api/analyze-edit-intent/route.ts @@ -5,20 +5,30 @@ import { createOpenAI } from '@ai-sdk/openai'; import { createGoogleGenerativeAI } from '@ai-sdk/google'; import { generateObject } from 'ai'; import { z } from 'zod'; -import type { FileManifest } from '@/types/file-manifest'; +// import type { FileManifest } from '@/types/file-manifest'; // Type is used implicitly through manifest parameter + +// Check if we're using Vercel AI Gateway +const isUsingAIGateway = !!process.env.AI_GATEWAY_API_KEY; +const aiGatewayBaseURL = 'https://ai-gateway.vercel.sh/v1'; const groq = createGroq({ - apiKey: process.env.GROQ_API_KEY, + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GROQ_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined, }); const anthropic = createAnthropic({ - apiKey: process.env.ANTHROPIC_API_KEY, - baseURL: process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1', + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.ANTHROPIC_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : (process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1'), }); const openai = createOpenAI({ - apiKey: process.env.OPENAI_API_KEY, - baseURL: process.env.OPENAI_BASE_URL, + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.OPENAI_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : process.env.OPENAI_BASE_URL, +}); + +const googleGenerativeAI = createGoogleGenerativeAI({ + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GEMINI_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined, }); // Schema for the AI's search plan - not file selection! @@ -66,7 +76,7 @@ export async function POST(request: NextRequest) { // Create a summary of available files for the AI const validFiles = Object.entries(manifest.files as Record) - .filter(([path, info]) => { + .filter(([path]) => { // Filter out invalid paths return path.includes('.') && !path.match(/\/\d+$/); }); @@ -74,7 +84,7 @@ export async function POST(request: NextRequest) { const fileSummary = validFiles .map(([path, info]: [string, any]) => { const componentName = info.componentInfo?.name || path.split('/').pop(); - const hasImports = info.imports?.length > 0; + // const hasImports = info.imports?.length > 0; // Kept for future use const childComponents = info.componentInfo?.childComponents?.join(', ') || 'none'; return `- ${path} (${componentName}, renders: ${childComponents})`; }) @@ -104,7 +114,7 @@ export async function POST(request: NextRequest) { aiModel = openai(model.replace('openai/', '')); } } else if (model.startsWith('google/')) { - aiModel = createGoogleGenerativeAI(model.replace('google/', '')); + aiModel = googleGenerativeAI(model.replace('google/', '')); } else { // Default to groq if model format is unclear aiModel = groq(model); diff --git a/app/api/apply-ai-code-stream/route.ts b/app/api/apply-ai-code-stream/route.ts index c91bf11..1e0d2c4 100644 --- a/app/api/apply-ai-code-stream/route.ts +++ b/app/api/apply-ai-code-stream/route.ts @@ -1,11 +1,13 @@ import { NextRequest, NextResponse } from 'next/server'; -import { Sandbox } from '@e2b/code-interpreter'; +import { parseMorphEdits, applyMorphEditToFile } from '@/lib/morph-fast-apply'; +// Sandbox import not needed - using global sandbox from sandbox-manager import type { SandboxState } from '@/types/sandbox'; import type { ConversationState } from '@/types/conversation'; +import { sandboxManager } from '@/lib/sandbox/sandbox-manager'; declare global { var conversationState: ConversationState | null; - var activeSandbox: any; + var activeSandboxProvider: any; var existingFiles: Set; var sandboxState: SandboxState; } @@ -28,28 +30,28 @@ function parseAIResponse(response: string): ParsedResponse { explanation: '', template: '' }; - + // Function to extract packages from import statements function extractPackagesFromCode(content: string): string[] { const packages: string[] = []; // Match ES6 imports const importRegex = /import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)(?:\s*,\s*(?:\{[^}]*\}|\*\s+as\s+\w+|\w+))*\s+from\s+)?['"]([^'"]+)['"]/g; let importMatch; - + while ((importMatch = importRegex.exec(content)) !== null) { const importPath = importMatch[1]; // Skip relative imports and built-in React - if (!importPath.startsWith('.') && !importPath.startsWith('/') && - importPath !== 'react' && importPath !== 'react-dom' && - !importPath.startsWith('@/')) { + if (!importPath.startsWith('.') && !importPath.startsWith('/') && + importPath !== 'react' && importPath !== 'react-dom' && + !importPath.startsWith('@/')) { // Extract package name (handle scoped packages like @heroicons/react) - const packageName = importPath.startsWith('@') + const packageName = importPath.startsWith('@') ? importPath.split('/').slice(0, 2).join('/') : importPath.split('/')[0]; - + if (!packages.includes(packageName)) { packages.push(packageName); - + // Log important packages for debugging if (packageName === 'react-router-dom' || packageName.includes('router') || packageName.includes('icon')) { console.log(`[apply-ai-code-stream] Detected package from imports: ${packageName}`); @@ -57,13 +59,13 @@ function parseAIResponse(response: string): ParsedResponse { } } } - + return packages; } // Parse file sections - handle duplicates and prefer complete versions const fileMap = new Map(); - + // First pass: Find all file declarations const fileRegex = /([\s\S]*?)(?:<\/file>|$)/g; let match; @@ -71,10 +73,10 @@ function parseAIResponse(response: string): ParsedResponse { const filePath = match[1]; const content = match[2].trim(); const hasClosingTag = response.substring(match.index, match.index + match[0].length).includes(''); - + // Check if this file already exists in our map const existing = fileMap.get(filePath); - + // Decide whether to keep this version let shouldReplace = false; if (!existing) { @@ -88,7 +90,7 @@ function parseAIResponse(response: string): ParsedResponse { } else if (!existing.isComplete && !hasClosingTag && content.length > existing.content.length) { shouldReplace = true; // Both incomplete, keep longer one } - + if (shouldReplace) { // Additional validation: reject obviously broken content if (content.includes('...') && !content.includes('...props') && !content.includes('...rest')) { @@ -102,18 +104,18 @@ function parseAIResponse(response: string): ParsedResponse { } } } - + // Convert map to array for sections.files for (const [path, { content, isComplete }] of fileMap.entries()) { if (!isComplete) { console.log(`[apply-ai-code-stream] Warning: File ${path} appears to be truncated (no closing tag)`); } - + sections.files.push({ path, content }); - + // Extract packages from file content const filePackages = extractPackagesFromCode(content); for (const pkg of filePackages) { @@ -123,7 +125,7 @@ function parseAIResponse(response: string): ParsedResponse { } } } - + // Also parse markdown code blocks with file paths const markdownFileRegex = /```(?:file )?path="([^"]+)"\n([\s\S]*?)```/g; while ((match = markdownFileRegex.exec(response)) !== null) { @@ -133,7 +135,7 @@ function parseAIResponse(response: string): ParsedResponse { path: filePath, content: content }); - + // Extract packages from file content const filePackages = extractPackagesFromCode(content); for (const pkg of filePackages) { @@ -143,7 +145,7 @@ function parseAIResponse(response: string): ParsedResponse { } } } - + // Parse plain text format like "Generated Files: Header.jsx, index.css" const generatedFilesMatch = response.match(/Generated Files?:\s*([^\n]+)/i); if (generatedFilesMatch) { @@ -153,7 +155,7 @@ function parseAIResponse(response: string): ParsedResponse { .map(f => f.trim()) .filter(f => f.endsWith('.jsx') || f.endsWith('.js') || f.endsWith('.tsx') || f.endsWith('.ts') || f.endsWith('.css') || f.endsWith('.json') || f.endsWith('.html')); console.log(`[apply-ai-code-stream] Detected generated files from plain text: ${filesList.join(', ')}`); - + // Try to extract the actual file content if it follows for (const fileName of filesList) { // Look for the file content after the file name @@ -169,7 +171,7 @@ function parseAIResponse(response: string): ParsedResponse { content: codeMatch[1].trim() }); console.log(`[apply-ai-code-stream] Extracted content for ${filePath}`); - + // Extract packages from this file const filePackages = extractPackagesFromCode(codeMatch[1]); for (const pkg of filePackages) { @@ -182,7 +184,7 @@ function parseAIResponse(response: string): ParsedResponse { } } } - + // Also try to parse if the response contains raw JSX/JS code blocks const codeBlockRegex = /```(?:jsx?|tsx?|javascript|typescript)?\n([\s\S]*?)```/g; while ((match = codeBlockRegex.exec(response)) !== null) { @@ -192,14 +194,14 @@ function parseAIResponse(response: string): ParsedResponse { if (fileNameMatch) { const fileName = fileNameMatch[1].trim(); const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`; - + // Don't add duplicate files if (!sections.files.some(f => f.path === filePath)) { sections.files.push({ path: filePath, content: content }); - + // Extract packages const filePackages = extractPackagesFromCode(content); for (const pkg of filePackages) { @@ -222,7 +224,7 @@ function parseAIResponse(response: string): ParsedResponse { while ((match = pkgRegex.exec(response)) !== null) { sections.packages.push(match[1].trim()); } - + // Also parse tag with multiple packages const packagesRegex = /([\s\S]*?)<\/packages>/; const packagesMatch = response.match(packagesRegex); @@ -262,22 +264,28 @@ function parseAIResponse(response: string): ParsedResponse { export async function POST(request: NextRequest) { try { const { response, isEdit = false, packages = [], sandboxId } = await request.json(); - + if (!response) { return NextResponse.json({ error: 'response is required' }, { status: 400 }); } - + // Debug log the response console.log('[apply-ai-code-stream] Received response to parse:'); console.log('[apply-ai-code-stream] Response length:', response.length); console.log('[apply-ai-code-stream] Response preview:', response.substring(0, 500)); console.log('[apply-ai-code-stream] isEdit:', isEdit); console.log('[apply-ai-code-stream] packages:', packages); - + // Parse the AI response const parsed = parseAIResponse(response); + const morphEnabled = Boolean(isEdit && process.env.MORPH_API_KEY); + const morphEdits = morphEnabled ? parseMorphEdits(response) : []; + console.log('[apply-ai-code-stream] Morph Fast Apply mode:', morphEnabled); + if (morphEnabled) { + console.log('[apply-ai-code-stream] Morph edits found:', morphEdits.length); + } // Log what was parsed console.log('[apply-ai-code-stream] Parsed result:'); @@ -288,94 +296,109 @@ export async function POST(request: NextRequest) { }); } console.log('[apply-ai-code-stream] Packages found:', parsed.packages); - + // Initialize existingFiles if not already if (!global.existingFiles) { global.existingFiles = new Set(); } - - // First, always check the global state for active sandbox - let sandbox = global.activeSandbox; - - // If we don't have a sandbox in this instance but we have a sandboxId, - // reconnect to the existing sandbox - if (!sandbox && sandboxId) { - console.log(`[apply-ai-code-stream] Sandbox ${sandboxId} not in this instance, attempting reconnect...`); - + + // Try to get provider from sandbox manager first + let provider = sandboxId ? sandboxManager.getProvider(sandboxId) : sandboxManager.getActiveProvider(); + + // Fall back to global state if not found in manager + if (!provider) { + provider = global.activeSandboxProvider; + } + + // If we have a sandboxId but no provider, try to get or create one + if (!provider && sandboxId) { + console.log(`[apply-ai-code-stream] No provider found for sandbox ${sandboxId}, attempting to get or create...`); + try { - // Reconnect to the existing sandbox using E2B's connect method - sandbox = await Sandbox.connect(sandboxId, { apiKey: process.env.E2B_API_KEY }); - console.log(`[apply-ai-code-stream] Successfully reconnected to sandbox ${sandboxId}`); - - // Store the reconnected sandbox globally for this instance - global.activeSandbox = sandbox; - - // Update sandbox data if needed - if (!global.sandboxData) { - const host = (sandbox as any).getHost(5173); - global.sandboxData = { - sandboxId, - url: `https://${host}` - }; + provider = await sandboxManager.getOrCreateProvider(sandboxId); + + // If we got a new provider (not reconnected), we need to create a new sandbox + if (!provider.getSandboxInfo()) { + console.log(`[apply-ai-code-stream] Creating new sandbox since reconnection failed for ${sandboxId}`); + await provider.createSandbox(); + await provider.setupViteApp(); + sandboxManager.registerSandbox(sandboxId, provider); } - - // Initialize existingFiles if not already - if (!global.existingFiles) { - global.existingFiles = new Set(); - } - } catch (reconnectError) { - console.error(`[apply-ai-code-stream] Failed to reconnect to sandbox ${sandboxId}:`, reconnectError); - - // If reconnection fails, we'll still try to return a meaningful response + + // Update legacy global state + global.activeSandboxProvider = provider; + console.log(`[apply-ai-code-stream] Successfully got provider for sandbox ${sandboxId}`); + } catch (providerError) { + console.error(`[apply-ai-code-stream] Failed to get or create provider for sandbox ${sandboxId}:`, providerError); return NextResponse.json({ success: false, - error: `Failed to reconnect to sandbox ${sandboxId}. The sandbox may have expired or been terminated.`, + error: `Failed to create sandbox provider for ${sandboxId}. The sandbox may have expired.`, results: { filesCreated: [], packagesInstalled: [], commandsExecuted: [], - errors: [`Sandbox reconnection failed: ${(reconnectError as Error).message}`] + errors: [`Sandbox provider creation failed: ${(providerError as Error).message}`] }, explanation: parsed.explanation, structure: parsed.structure, parsedFiles: parsed.files, message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox reconnection failed.` - }); + }, { status: 500 }); } } - - // If no sandbox at all and no sandboxId provided, return an error - if (!sandbox && !sandboxId) { - console.log('[apply-ai-code-stream] No sandbox available and no sandboxId provided'); - return NextResponse.json({ - success: false, - error: 'No active sandbox found. Please create a sandbox first.', - results: { - filesCreated: [], - packagesInstalled: [], - commandsExecuted: [], - errors: ['No sandbox available'] - }, - explanation: parsed.explanation, - structure: parsed.structure, - parsedFiles: parsed.files, - message: `Parsed ${parsed.files.length} files but no sandbox available to apply them.` - }); + + // If we still don't have a provider, create a new one + if (!provider) { + console.log(`[apply-ai-code-stream] No active provider found, creating new sandbox...`); + try { + const { SandboxFactory } = await import('@/lib/sandbox/factory'); + provider = SandboxFactory.create(); + const sandboxInfo = await provider.createSandbox(); + await provider.setupViteApp(); + + // Register with sandbox manager + sandboxManager.registerSandbox(sandboxInfo.sandboxId, provider); + + // Store in legacy global state + global.activeSandboxProvider = provider; + global.sandboxData = { + sandboxId: sandboxInfo.sandboxId, + url: sandboxInfo.url + }; + + console.log(`[apply-ai-code-stream] Created new sandbox successfully`); + } catch (createError) { + console.error(`[apply-ai-code-stream] Failed to create new sandbox:`, createError); + return NextResponse.json({ + success: false, + error: `Failed to create new sandbox: ${createError instanceof Error ? createError.message : 'Unknown error'}`, + results: { + filesCreated: [], + packagesInstalled: [], + commandsExecuted: [], + errors: [`Sandbox creation failed: ${createError instanceof Error ? createError.message : 'Unknown error'}`] + }, + explanation: parsed.explanation, + structure: parsed.structure, + parsedFiles: parsed.files, + message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox creation failed.` + }, { status: 500 }); + } } - + // Create a response stream for real-time updates const encoder = new TextEncoder(); const stream = new TransformStream(); const writer = stream.writable.getWriter(); - + // Function to send progress updates const sendProgress = async (data: any) => { const message = `data: ${JSON.stringify(data)}\n\n`; await writer.write(encoder.encode(message)); }; - - // Start processing in background (pass sandbox and request to the async function) - (async (sandboxInstance, req) => { + + // Start processing in background (pass provider and request to the async function) + (async (providerInstance, req) => { const results = { filesCreated: [] as string[], filesUpdated: [] as string[], @@ -385,86 +408,94 @@ export async function POST(request: NextRequest) { commandsExecuted: [] as string[], errors: [] as string[] }; - + try { - await sendProgress({ - type: 'start', + await sendProgress({ + type: 'start', message: 'Starting code application...', totalSteps: 3 }); + if (morphEnabled) { + await sendProgress({ type: 'info', message: 'Morph Fast Apply enabled' }); + await sendProgress({ type: 'info', message: `Parsed ${morphEdits.length} Morph edits` }); + if (morphEdits.length === 0) { + console.warn('[apply-ai-code-stream] Morph enabled but no blocks found; falling back to full-file flow'); + await sendProgress({ type: 'warning', message: 'Morph enabled but no blocks found; falling back to full-file flow' }); + } + } // Step 1: Install packages const packagesArray = Array.isArray(packages) ? packages : []; const parsedPackages = Array.isArray(parsed.packages) ? parsed.packages : []; - + // Combine and deduplicate packages const allPackages = [...packagesArray.filter(pkg => pkg && typeof pkg === 'string'), ...parsedPackages]; - + // Use Set to remove duplicates, then filter out pre-installed packages const uniquePackages = [...new Set(allPackages)] .filter(pkg => pkg && typeof pkg === 'string' && pkg.trim() !== '') // Remove empty strings .filter(pkg => pkg !== 'react' && pkg !== 'react-dom'); // Filter pre-installed - + // Log if we found duplicates if (allPackages.length !== uniquePackages.length) { console.log(`[apply-ai-code-stream] Removed ${allPackages.length - uniquePackages.length} duplicate packages`); console.log(`[apply-ai-code-stream] Original packages:`, allPackages); console.log(`[apply-ai-code-stream] Deduplicated packages:`, uniquePackages); } - + if (uniquePackages.length > 0) { - await sendProgress({ - type: 'step', + await sendProgress({ + type: 'step', step: 1, message: `Installing ${uniquePackages.length} packages...`, packages: uniquePackages }); - + // Use streaming package installation try { // Construct the API URL properly for both dev and production const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http'; const host = req.headers.get('host') || 'localhost:3000'; const apiUrl = `${protocol}://${host}/api/install-packages`; - + const installResponse = await fetch(apiUrl, { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ + body: JSON.stringify({ packages: uniquePackages, - sandboxId: sandboxId || (sandboxInstance as any).sandboxId + sandboxId: sandboxId || providerInstance.getSandboxInfo()?.sandboxId }) }); - + if (installResponse.ok && installResponse.body) { const reader = installResponse.body.getReader(); const decoder = new TextDecoder(); - + while (true) { const { done, value } = await reader.read(); if (done) break; - + const chunk = decoder.decode(value); if (!chunk) continue; const lines = chunk.split('\n'); - + for (const line of lines) { if (line.startsWith('data: ')) { try { const data = JSON.parse(line.slice(6)); - + // Forward package installation progress await sendProgress({ type: 'package-progress', ...data }); - + // Track results if (data.type === 'success' && data.installedPackages) { results.packagesInstalled = data.installedPackages; } - } catch (e) { - // Ignore parse errors + } catch (parseError) { + console.debug('Error parsing terminal output:', parseError); } } } @@ -479,28 +510,83 @@ export async function POST(request: NextRequest) { results.errors.push(`Package installation failed: ${(error as Error).message}`); } } else { - await sendProgress({ - type: 'step', + await sendProgress({ + type: 'step', step: 1, message: 'No additional packages to install, skipping...' }); } - + // Step 2: Create/update files const filesArray = Array.isArray(parsed.files) ? parsed.files : []; - await sendProgress({ - type: 'step', + await sendProgress({ + type: 'step', step: 2, message: `Creating ${filesArray.length} files...` }); - + // Filter out config files that shouldn't be created const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js']; - const filteredFiles = filesArray.filter(file => { + let filteredFiles = filesArray.filter(file => { if (!file || typeof file !== 'object') return false; const fileName = (file.path || '').split('/').pop() || ''; return !configFiles.includes(fileName); }); + + // If Morph is enabled and we have edits, apply them before file writes + const morphUpdatedPaths = new Set(); + if (morphEnabled && morphEdits.length > 0) { + const morphSandbox = (global as any).activeSandbox || providerInstance; + if (!morphSandbox) { + console.warn('[apply-ai-code-stream] No sandbox available to apply Morph edits'); + await sendProgress({ type: 'warning', message: 'No sandbox available to apply Morph edits' }); + } else { + await sendProgress({ type: 'info', message: `Applying ${morphEdits.length} fast edits via Morph...` }); + for (const [idx, edit] of morphEdits.entries()) { + try { + await sendProgress({ type: 'file-progress', current: idx + 1, total: morphEdits.length, fileName: edit.targetFile, action: 'morph-applying' }); + const result = await applyMorphEditToFile({ + sandbox: morphSandbox, + targetPath: edit.targetFile, + instructions: edit.instructions, + updateSnippet: edit.update + }); + if (result.success && result.normalizedPath) { + console.log('[apply-ai-code-stream] Morph updated', result.normalizedPath); + morphUpdatedPaths.add(result.normalizedPath); + if (results.filesUpdated) results.filesUpdated.push(result.normalizedPath); + await sendProgress({ type: 'file-complete', fileName: result.normalizedPath, action: 'morph-updated' }); + } else { + const msg = result.error || 'Unknown Morph error'; + console.error('[apply-ai-code-stream] Morph apply failed for', edit.targetFile, msg); + if (results.errors) results.errors.push(`Morph apply failed for ${edit.targetFile}: ${msg}`); + await sendProgress({ type: 'file-error', fileName: edit.targetFile, error: msg }); + } + } catch (err) { + const msg = (err as Error).message; + console.error('[apply-ai-code-stream] Morph apply exception for', edit.targetFile, msg); + if (results.errors) results.errors.push(`Morph apply exception for ${edit.targetFile}: ${msg}`); + await sendProgress({ type: 'file-error', fileName: edit.targetFile, error: msg }); + } + } + } + } + + // Avoid overwriting Morph-updated files in the file write loop + if (morphUpdatedPaths.size > 0) { + filteredFiles = filteredFiles.filter(file => { + if (!file?.path) return true; + let normalizedPath = file.path.startsWith('/') ? file.path.slice(1) : file.path; + const fileName = normalizedPath.split('/').pop() || ''; + if (!normalizedPath.startsWith('src/') && + !normalizedPath.startsWith('public/') && + normalizedPath !== 'index.html' && + !configFiles.includes(fileName)) { + normalizedPath = 'src/' + normalizedPath; + } + return !morphUpdatedPaths.has(normalizedPath); + }); + } for (const [index, file] of filteredFiles.entries()) { try { @@ -512,42 +598,45 @@ export async function POST(request: NextRequest) { fileName: file.path, action: 'creating' }); - + // Normalize the file path let normalizedPath = file.path; if (normalizedPath.startsWith('/')) { normalizedPath = normalizedPath.substring(1); } - if (!normalizedPath.startsWith('src/') && - !normalizedPath.startsWith('public/') && - normalizedPath !== 'index.html' && - !configFiles.includes(normalizedPath.split('/').pop() || '')) { + if (!normalizedPath.startsWith('src/') && + !normalizedPath.startsWith('public/') && + normalizedPath !== 'index.html' && + !configFiles.includes(normalizedPath.split('/').pop() || '')) { normalizedPath = 'src/' + normalizedPath; } - - const fullPath = `/home/user/app/${normalizedPath}`; + const isUpdate = global.existingFiles.has(normalizedPath); - + // Remove any CSS imports from JSX/JS files (we're using Tailwind) let fileContent = file.content; if (file.path.endsWith('.jsx') || file.path.endsWith('.js') || file.path.endsWith('.tsx') || file.path.endsWith('.ts')) { fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, ''); } - - // Write the file using Python (code-interpreter SDK) - const escapedContent = fileContent - .replace(/\\/g, '\\\\') - .replace(/"""/g, '\\"\\"\\"') - .replace(/\$/g, '\\$'); - - await sandboxInstance.runCode(` -import os -os.makedirs(os.path.dirname("${fullPath}"), exist_ok=True) -with open("${fullPath}", 'w') as f: - f.write("""${escapedContent}""") -print(f"File written: ${fullPath}") - `); - + + // Fix common Tailwind CSS errors in CSS files + if (file.path.endsWith('.css')) { + // Replace shadow-3xl with shadow-2xl (shadow-3xl doesn't exist) + fileContent = fileContent.replace(/shadow-3xl/g, 'shadow-2xl'); + // Replace any other non-existent shadow utilities + fileContent = fileContent.replace(/shadow-4xl/g, 'shadow-2xl'); + fileContent = fileContent.replace(/shadow-5xl/g, 'shadow-2xl'); + } + + // Create directory if needed + const dirPath = normalizedPath.includes('/') ? normalizedPath.substring(0, normalizedPath.lastIndexOf('/')) : ''; + if (dirPath) { + await providerInstance.runCommand(`mkdir -p ${dirPath}`); + } + + // Write the file using provider + await providerInstance.writeFile(normalizedPath, fileContent); + // Update file cache if (global.sandboxState?.fileCache) { global.sandboxState.fileCache.files[normalizedPath] = { @@ -555,14 +644,14 @@ print(f"File written: ${fullPath}") lastModified: Date.now() }; } - + if (isUpdate) { if (results.filesUpdated) results.filesUpdated.push(normalizedPath); } else { if (results.filesCreated) results.filesCreated.push(normalizedPath); if (global.existingFiles) global.existingFiles.add(normalizedPath); } - + await sendProgress({ type: 'file-complete', fileName: normalizedPath, @@ -579,16 +668,16 @@ print(f"File written: ${fullPath}") }); } } - + // Step 3: Execute commands const commandsArray = Array.isArray(parsed.commands) ? parsed.commands : []; if (commandsArray.length > 0) { - await sendProgress({ - type: 'step', + await sendProgress({ + type: 'step', step: 3, message: `Executing ${commandsArray.length} commands...` }); - + for (const [index, cmd] of commandsArray.entries()) { try { await sendProgress({ @@ -598,33 +687,36 @@ print(f"File written: ${fullPath}") command: cmd, action: 'executing' }); - - // Use E2B commands.run() for cleaner execution - const result = await sandboxInstance.commands.run(cmd, { - cwd: '/home/user/app', - timeout: 60, - on_stdout: async (data: string) => { - await sendProgress({ - type: 'command-output', - command: cmd, - output: data, - stream: 'stdout' - }); - }, - on_stderr: async (data: string) => { - await sendProgress({ - type: 'command-output', - command: cmd, - output: data, - stream: 'stderr' - }); - } - }); - + + // Use provider runCommand + const result = await providerInstance.runCommand(cmd); + + // Get command output from provider result + const stdout = result.stdout; + const stderr = result.stderr; + + if (stdout) { + await sendProgress({ + type: 'command-output', + command: cmd, + output: stdout, + stream: 'stdout' + }); + } + + if (stderr) { + await sendProgress({ + type: 'command-output', + command: cmd, + output: stderr, + stream: 'stderr' + }); + } + if (results.commandsExecuted) { results.commandsExecuted.push(cmd); } - + await sendProgress({ type: 'command-complete', command: cmd, @@ -643,7 +735,7 @@ print(f"File written: ${fullPath}") } } } - + // Send final results await sendProgress({ type: 'complete', @@ -652,7 +744,7 @@ print(f"File written: ${fullPath}") structure: parsed.structure, message: `Successfully applied ${results.filesCreated.length} files` }); - + // Track applied files in conversation state if (global.conversationState && results.filesCreated.length > 0) { const messages = global.conversationState.context.messages; @@ -665,7 +757,7 @@ print(f"File written: ${fullPath}") }; } } - + // Track applied code in project evolution if (global.conversationState.context.projectEvolution) { global.conversationState.context.projectEvolution.majorChanges.push({ @@ -674,10 +766,10 @@ print(f"File written: ${fullPath}") filesAffected: results.filesCreated || [] }); } - + global.conversationState.lastUpdated = Date.now(); } - + } catch (error) { await sendProgress({ type: 'error', @@ -686,8 +778,8 @@ print(f"File written: ${fullPath}") } finally { await writer.close(); } - })(sandbox, request); - + })(provider, request); + // Return the stream return new Response(stream.readable, { headers: { @@ -696,7 +788,7 @@ print(f"File written: ${fullPath}") 'Connection': 'keep-alive', }, }); - + } catch (error) { console.error('Apply AI code stream error:', error); return NextResponse.json( diff --git a/app/api/apply-ai-code/route.ts b/app/api/apply-ai-code/route.ts index f00f08a..aa849b4 100644 --- a/app/api/apply-ai-code/route.ts +++ b/app/api/apply-ai-code/route.ts @@ -1,4 +1,5 @@ import { NextRequest, NextResponse } from 'next/server'; +import { parseMorphEdits, applyMorphEditToFile } from '@/lib/morph-fast-apply'; import type { SandboxState } from '@/types/sandbox'; import type { ConversationState } from '@/types/conversation'; @@ -128,6 +129,7 @@ function parseAIResponse(response: string): ParsedResponse { declare global { var activeSandbox: any; + var activeSandboxProvider: any; var existingFiles: Set; var sandboxState: SandboxState; } @@ -144,14 +146,23 @@ export async function POST(request: NextRequest) { // Parse the AI response const parsed = parseAIResponse(response); + const morphEnabled = Boolean(isEdit && process.env.MORPH_API_KEY); + const morphEdits = morphEnabled ? parseMorphEdits(response) : []; + console.log('[apply-ai-code] Morph Fast Apply mode:', morphEnabled); + if (morphEnabled) { + console.log('[apply-ai-code] Morph edits found:', morphEdits.length); + } // Initialize existingFiles if not already if (!global.existingFiles) { global.existingFiles = new Set(); } + // Get the active sandbox or provider + const sandbox = global.activeSandbox || global.activeSandboxProvider; + // If no active sandbox, just return parsed results - if (!global.activeSandbox) { + if (!sandbox) { return NextResponse.json({ success: true, results: { @@ -167,11 +178,43 @@ export async function POST(request: NextRequest) { }); } + // Verify sandbox is ready before applying code + console.log('[apply-ai-code] Verifying sandbox is ready...'); + + // For Vercel sandboxes, check if Vite is running + if (sandbox.constructor?.name === 'VercelProvider' || sandbox.getSandboxInfo?.()?.provider === 'vercel') { + console.log('[apply-ai-code] Detected Vercel sandbox, checking Vite status...'); + try { + // Check if Vite process is running + const checkResult = await sandbox.runCommand('pgrep -f vite'); + if (!checkResult || !checkResult.stdout) { + console.log('[apply-ai-code] Vite not running, starting it...'); + // Start Vite if not running + await sandbox.runCommand('sh -c "cd /vercel/sandbox && nohup npm run dev > /tmp/vite.log 2>&1 &"'); + // Wait for Vite to start + await new Promise(resolve => setTimeout(resolve, 5000)); + console.log('[apply-ai-code] Vite started, proceeding with code application'); + } else { + console.log('[apply-ai-code] Vite is already running'); + } + } catch (e) { + console.log('[apply-ai-code] Could not check Vite status, proceeding anyway:', e); + } + } + // Apply to active sandbox console.log('[apply-ai-code] Applying code to sandbox...'); console.log('[apply-ai-code] Is edit mode:', isEdit); console.log('[apply-ai-code] Files to write:', parsed.files.map(f => f.path)); console.log('[apply-ai-code] Existing files:', Array.from(global.existingFiles)); + if (morphEnabled) { + console.log('[apply-ai-code] Morph Fast Apply enabled'); + if (morphEdits.length > 0) { + console.log('[apply-ai-code] Parsed Morph edits:', morphEdits.map(e => e.targetFile)); + } else { + console.log('[apply-ai-code] No blocks found in response'); + } + } const results = { filesCreated: [] as string[], @@ -296,9 +339,46 @@ export async function POST(request: NextRequest) { } } + // Attempt Morph Fast Apply for edits before file creation + const morphUpdatedPaths = new Set(); + + if (morphEnabled && morphEdits.length > 0) { + if (!global.activeSandbox) { + console.warn('[apply-ai-code] Morph edits found but no active sandbox; skipping Morph application'); + } else { + console.log(`[apply-ai-code] Applying ${morphEdits.length} fast edits via Morph...`); + for (const edit of morphEdits) { + try { + const result = await applyMorphEditToFile({ + sandbox: global.activeSandbox, + targetPath: edit.targetFile, + instructions: edit.instructions, + updateSnippet: edit.update + }); + + if (result.success && result.normalizedPath) { + morphUpdatedPaths.add(result.normalizedPath); + results.filesUpdated.push(result.normalizedPath); + console.log('[apply-ai-code] Morph applied to', result.normalizedPath); + } else { + const msg = result.error || 'Unknown Morph error'; + console.error('[apply-ai-code] Morph apply failed:', msg); + results.errors.push(`Morph apply failed for ${edit.targetFile}: ${msg}`); + } + } catch (e) { + console.error('[apply-ai-code] Morph apply exception:', e); + results.errors.push(`Morph apply exception for ${edit.targetFile}: ${(e as Error).message}`); + } + } + } + } + if (morphEnabled && morphEdits.length === 0) { + console.warn('[apply-ai-code] Morph enabled but no blocks found; falling back to full-file flow'); + } + // Filter out config files that shouldn't be created const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js']; - const filteredFiles = parsed.files.filter(file => { + let filteredFiles = parsed.files.filter(file => { const fileName = file.path.split('/').pop() || ''; if (configFiles.includes(fileName)) { console.warn(`[apply-ai-code] Skipping config file: ${file.path} - already exists in template`); @@ -306,6 +386,21 @@ export async function POST(request: NextRequest) { } return true; }); + + // Avoid overwriting files already updated by Morph + if (morphUpdatedPaths.size > 0) { + filteredFiles = filteredFiles.filter(file => { + let normalizedPath = file.path.startsWith('/') ? file.path.slice(1) : file.path; + const fileName = normalizedPath.split('/').pop() || ''; + if (!normalizedPath.startsWith('src/') && + !normalizedPath.startsWith('public/') && + normalizedPath !== 'index.html' && + !configFiles.includes(fileName)) { + normalizedPath = 'src/' + normalizedPath; + } + return !morphUpdatedPaths.has(normalizedPath); + }); + } // Create or update files AFTER package installation for (const file of filteredFiles) { @@ -336,11 +431,28 @@ export async function POST(request: NextRequest) { fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, ''); } + // Fix common Tailwind CSS errors in CSS files + if (file.path.endsWith('.css')) { + // Replace shadow-3xl with shadow-2xl (shadow-3xl doesn't exist) + fileContent = fileContent.replace(/shadow-3xl/g, 'shadow-2xl'); + // Replace any other non-existent shadow utilities + fileContent = fileContent.replace(/shadow-4xl/g, 'shadow-2xl'); + fileContent = fileContent.replace(/shadow-5xl/g, 'shadow-2xl'); + } + console.log(`[apply-ai-code] Writing file using E2B files API: ${fullPath}`); try { - // Use the correct E2B API - sandbox.files.write() - await global.activeSandbox.files.write(fullPath, fileContent); + // Check if we're using provider pattern (v2) or direct sandbox (v1) + if (sandbox.writeFile) { + // V2: Provider pattern (Vercel/E2B provider) + await sandbox.writeFile(file.path, fileContent); + } else if (sandbox.files?.write) { + // V1: Direct E2B sandbox + await sandbox.files.write(fullPath, fileContent); + } else { + throw new Error('Unsupported sandbox type'); + } console.log(`[apply-ai-code] Successfully wrote file: ${fullPath}`); // Update file cache @@ -354,7 +466,7 @@ export async function POST(request: NextRequest) { } catch (writeError) { console.error(`[apply-ai-code] E2B file write error:`, writeError); - throw writeError; + throw writeError as Error; } @@ -432,15 +544,17 @@ function App() { export default App;`; try { - await global.activeSandbox.runCode(` -file_path = "/home/user/app/src/App.jsx" -file_content = """${appContent.replace(/"/g, '\\"').replace(/\n/g, '\\n')}""" - -with open(file_path, 'w') as f: - f.write(file_content) - -print(f"Auto-generated: {file_path}") - `); + // Use provider pattern if available + if (sandbox.writeFile) { + await sandbox.writeFile('src/App.jsx', appContent); + } else if (sandbox.writeFiles) { + await sandbox.writeFiles([{ + path: 'src/App.jsx', + content: Buffer.from(appContent) + }]); + } + + console.log('Auto-generated: src/App.jsx'); results.filesCreated.push('src/App.jsx (auto-generated)'); } catch (error) { results.errors.push(`Failed to create App.jsx: ${(error as Error).message}`); @@ -459,9 +573,7 @@ print(f"Auto-generated: {file_path}") if (!isEdit && !indexCssInParsed && !indexCssExists) { try { - await global.activeSandbox.runCode(` -file_path = "/home/user/app/src/index.css" -file_content = """@tailwind base; + const indexCssContent = `@tailwind base; @tailwind components; @tailwind utilities; @@ -483,15 +595,22 @@ body { margin: 0; min-width: 320px; min-height: 100vh; -}""" +}`; -with open(file_path, 'w') as f: - f.write(file_content) - -print(f"Auto-generated: {file_path}") - `); + // Use provider pattern if available + if (sandbox.writeFile) { + await sandbox.writeFile('src/index.css', indexCssContent); + } else if (sandbox.writeFiles) { + await sandbox.writeFiles([{ + path: 'src/index.css', + content: Buffer.from(indexCssContent) + }]); + } + + console.log('Auto-generated: src/index.css'); results.filesCreated.push('src/index.css (with Tailwind)'); } catch (error) { + console.error('Failed to create index.css:', error); results.errors.push('Failed to create index.css with Tailwind'); } } @@ -500,15 +619,47 @@ print(f"Auto-generated: {file_path}") // Execute commands for (const cmd of parsed.commands) { try { - await global.activeSandbox.runCode(` -import subprocess -os.chdir('/home/user/app') -result = subprocess.run(${JSON.stringify(cmd.split(' '))}, capture_output=True, text=True) -print(f"Executed: ${cmd}") -print(result.stdout) -if result.stderr: - print(f"Errors: {result.stderr}") - `); + // Parse command and arguments + const commandParts = cmd.trim().split(/\s+/); + const cmdName = commandParts[0]; + const args = commandParts.slice(1); + + // Execute command using sandbox + let result; + if (sandbox.runCommand && typeof sandbox.runCommand === 'function') { + // Check if this is a provider pattern sandbox + const testResult = await sandbox.runCommand(cmd); + if (testResult && typeof testResult === 'object' && 'stdout' in testResult) { + // Provider returns CommandResult directly + result = testResult; + } else { + // Direct sandbox - expects object with cmd and args + result = await sandbox.runCommand({ + cmd: cmdName, + args + }); + } + } + + console.log(`Executed: ${cmd}`); + + // Handle result based on type + let stdout = ''; + let stderr = ''; + + if (result) { + if (typeof result.stdout === 'string') { + stdout = result.stdout; + stderr = result.stderr || ''; + } else if (typeof result.stdout === 'function') { + stdout = await result.stdout(); + stderr = await result.stderr(); + } + } + + if (stdout) console.log(stdout); + if (stderr) console.log(`Errors: ${stderr}`); + results.commandsExecuted.push(cmd); } catch (error) { results.errors.push(`Failed to execute ${cmd}: ${(error as Error).message}`); diff --git a/app/api/conversation-state/route.ts b/app/api/conversation-state/route.ts index 1a37468..969692c 100644 --- a/app/api/conversation-state/route.ts +++ b/app/api/conversation-state/route.ts @@ -59,10 +59,26 @@ export async function POST(request: NextRequest) { case 'clear-old': // Clear old conversation data but keep recent context if (!global.conversationState) { + // Initialize conversation state if it doesn't exist + global.conversationState = { + conversationId: `conv-${Date.now()}`, + startedAt: Date.now(), + lastUpdated: Date.now(), + context: { + messages: [], + edits: [], + projectEvolution: { majorChanges: [] }, + userPreferences: {} + } + }; + + console.log('[conversation-state] Initialized new conversation state for clear-old'); + return NextResponse.json({ - success: false, - error: 'No active conversation to clear' - }, { status: 400 }); + success: true, + message: 'New conversation state initialized', + state: global.conversationState + }); } // Keep only recent data diff --git a/app/api/create-ai-sandbox-v2/route.ts b/app/api/create-ai-sandbox-v2/route.ts new file mode 100644 index 0000000..cd72a74 --- /dev/null +++ b/app/api/create-ai-sandbox-v2/route.ts @@ -0,0 +1,103 @@ +import { NextResponse } from 'next/server'; +import { SandboxFactory } from '@/lib/sandbox/factory'; +// SandboxProvider type is used through SandboxFactory +import type { SandboxState } from '@/types/sandbox'; +import { sandboxManager } from '@/lib/sandbox/sandbox-manager'; + +// Store active sandbox globally +declare global { + var activeSandboxProvider: any; + var sandboxData: any; + var existingFiles: Set; + var sandboxState: SandboxState; +} + +export async function POST() { + try { + console.log('[create-ai-sandbox-v2] Creating sandbox...'); + + // Clean up all existing sandboxes + console.log('[create-ai-sandbox-v2] Cleaning up existing sandboxes...'); + await sandboxManager.terminateAll(); + + // Also clean up legacy global state + if (global.activeSandboxProvider) { + try { + await global.activeSandboxProvider.terminate(); + } catch (e) { + console.error('Failed to terminate legacy global sandbox:', e); + } + global.activeSandboxProvider = null; + } + + // Clear existing files tracking + if (global.existingFiles) { + global.existingFiles.clear(); + } else { + global.existingFiles = new Set(); + } + + // Create new sandbox using factory + const provider = SandboxFactory.create(); + const sandboxInfo = await provider.createSandbox(); + + console.log('[create-ai-sandbox-v2] Setting up Vite React app...'); + await provider.setupViteApp(); + + // Register with sandbox manager + sandboxManager.registerSandbox(sandboxInfo.sandboxId, provider); + + // Also store in legacy global state for backward compatibility + global.activeSandboxProvider = provider; + global.sandboxData = { + sandboxId: sandboxInfo.sandboxId, + url: sandboxInfo.url + }; + + // Initialize sandbox state + global.sandboxState = { + fileCache: { + files: {}, + lastSync: Date.now(), + sandboxId: sandboxInfo.sandboxId + }, + sandbox: provider, // Store the provider instead of raw sandbox + sandboxData: { + sandboxId: sandboxInfo.sandboxId, + url: sandboxInfo.url + } + }; + + console.log('[create-ai-sandbox-v2] Sandbox ready at:', sandboxInfo.url); + + return NextResponse.json({ + success: true, + sandboxId: sandboxInfo.sandboxId, + url: sandboxInfo.url, + provider: sandboxInfo.provider, + message: 'Sandbox created and Vite React app initialized' + }); + + } catch (error) { + console.error('[create-ai-sandbox-v2] Error:', error); + + // Clean up on error + await sandboxManager.terminateAll(); + if (global.activeSandboxProvider) { + try { + await global.activeSandboxProvider.terminate(); + } catch (e) { + console.error('Failed to terminate sandbox on error:', e); + } + global.activeSandboxProvider = null; + } + + return NextResponse.json( + { + error: error instanceof Error ? error.message : 'Failed to create sandbox', + details: error instanceof Error ? error.stack : undefined + }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/app/api/create-ai-sandbox/route.ts b/app/api/create-ai-sandbox/route.ts index 257ce1d..daf9b84 100644 --- a/app/api/create-ai-sandbox/route.ts +++ b/app/api/create-ai-sandbox/route.ts @@ -1,5 +1,5 @@ import { NextResponse } from 'next/server'; -import { Sandbox } from '@e2b/code-interpreter'; +import { Sandbox } from '@vercel/sandbox'; import type { SandboxState } from '@/types/sandbox'; import { appConfig } from '@/config/app.config'; @@ -9,23 +9,74 @@ declare global { var sandboxData: any; var existingFiles: Set; var sandboxState: SandboxState; + var sandboxCreationInProgress: boolean; + var sandboxCreationPromise: Promise | null; } export async function POST() { + // Check if sandbox creation is already in progress + if (global.sandboxCreationInProgress && global.sandboxCreationPromise) { + console.log('[create-ai-sandbox] Sandbox creation already in progress, waiting for existing creation...'); + try { + const existingResult = await global.sandboxCreationPromise; + console.log('[create-ai-sandbox] Returning existing sandbox creation result'); + return NextResponse.json(existingResult); + } catch (error) { + console.error('[create-ai-sandbox] Existing sandbox creation failed:', error); + // Continue with new creation if the existing one failed + } + } + + // Check if we already have an active sandbox + if (global.activeSandbox && global.sandboxData) { + console.log('[create-ai-sandbox] Returning existing active sandbox'); + return NextResponse.json({ + success: true, + sandboxId: global.sandboxData.sandboxId, + url: global.sandboxData.url + }); + } + + // Set the creation flag + global.sandboxCreationInProgress = true; + + // Create the promise that other requests can await + global.sandboxCreationPromise = createSandboxInternal(); + + try { + const result = await global.sandboxCreationPromise; + return NextResponse.json(result); + } catch (error) { + console.error('[create-ai-sandbox] Sandbox creation failed:', error); + return NextResponse.json( + { + error: error instanceof Error ? error.message : 'Failed to create sandbox', + details: error instanceof Error ? error.stack : undefined + }, + { status: 500 } + ); + } finally { + global.sandboxCreationInProgress = false; + global.sandboxCreationPromise = null; + } +} + +async function createSandboxInternal() { let sandbox: any = null; try { - console.log('[create-ai-sandbox] Creating base sandbox...'); + console.log('[create-ai-sandbox] Creating Vercel sandbox...'); // Kill existing sandbox if any if (global.activeSandbox) { - console.log('[create-ai-sandbox] Killing existing sandbox...'); + console.log('[create-ai-sandbox] Stopping existing sandbox...'); try { - await global.activeSandbox.kill(); + await global.activeSandbox.stop(); } catch (e) { - console.error('Failed to close existing sandbox:', e); + console.error('Failed to stop existing sandbox:', e); } global.activeSandbox = null; + global.sandboxData = null; } // Clear existing files tracking @@ -35,81 +86,102 @@ export async function POST() { global.existingFiles = new Set(); } - // Create base sandbox - we'll set up Vite ourselves for full control - console.log(`[create-ai-sandbox] Creating base E2B sandbox with ${appConfig.e2b.timeoutMinutes} minute timeout...`); - sandbox = await Sandbox.create({ - apiKey: process.env.E2B_API_KEY, - timeoutMs: appConfig.e2b.timeoutMs - }); + // Create Vercel sandbox with flexible authentication + console.log(`[create-ai-sandbox] Creating Vercel sandbox with ${appConfig.vercelSandbox.timeoutMinutes} minute timeout...`); - const sandboxId = (sandbox as any).sandboxId || Date.now().toString(); - const host = (sandbox as any).getHost(appConfig.e2b.vitePort); + // Prepare sandbox configuration + const sandboxConfig: any = { + timeout: appConfig.vercelSandbox.timeoutMs, + runtime: appConfig.vercelSandbox.runtime, + ports: [appConfig.vercelSandbox.devPort] + }; + // Add authentication parameters if using personal access token + if (process.env.VERCEL_TOKEN && process.env.VERCEL_TEAM_ID && process.env.VERCEL_PROJECT_ID) { + console.log('[create-ai-sandbox] Using personal access token authentication'); + sandboxConfig.teamId = process.env.VERCEL_TEAM_ID; + sandboxConfig.projectId = process.env.VERCEL_PROJECT_ID; + sandboxConfig.token = process.env.VERCEL_TOKEN; + } else if (process.env.VERCEL_OIDC_TOKEN) { + console.log('[create-ai-sandbox] Using OIDC token authentication'); + } else { + console.log('[create-ai-sandbox] No authentication found - relying on default Vercel authentication'); + } + + sandbox = await Sandbox.create(sandboxConfig); + + const sandboxId = sandbox.sandboxId; console.log(`[create-ai-sandbox] Sandbox created: ${sandboxId}`); - console.log(`[create-ai-sandbox] Sandbox host: ${host}`); - // Set up a basic Vite React app using Python to write files + // Set up a basic Vite React app console.log('[create-ai-sandbox] Setting up Vite React app...'); - // Write all files in a single Python script to avoid multiple executions - const setupScript = ` -import os -import json + // First, change to the working directory + await sandbox.runCommand('pwd'); + // workDir is defined in appConfig - not needed here + + // Get the sandbox URL using the correct Vercel Sandbox API + const sandboxUrl = sandbox.domain(appConfig.vercelSandbox.devPort); + + // Extract the hostname from the sandbox URL for Vite config + const sandboxHostname = new URL(sandboxUrl).hostname; + console.log(`[create-ai-sandbox] Sandbox hostname: ${sandboxHostname}`); -print('Setting up React app with Vite and Tailwind...') - -# Create directory structure -os.makedirs('/home/user/app/src', exist_ok=True) - -# Package.json -package_json = { - "name": "sandbox-app", - "version": "1.0.0", - "type": "module", - "scripts": { - "dev": "vite --host", - "build": "vite build", - "preview": "vite preview" - }, - "dependencies": { - "react": "^18.2.0", - "react-dom": "^18.2.0" - }, - "devDependencies": { - "@vitejs/plugin-react": "^4.0.0", - "vite": "^4.3.9", - "tailwindcss": "^3.3.0", - "postcss": "^8.4.31", - "autoprefixer": "^10.4.16" - } -} - -with open('/home/user/app/package.json', 'w') as f: - json.dump(package_json, f, indent=2) -print('✓ package.json') - -# Vite config for E2B - with allowedHosts -vite_config = """import { defineConfig } from 'vite' + // Create the Vite config content with the proper hostname (using string concatenation) + const viteConfigContent = `import { defineConfig } from 'vite' import react from '@vitejs/plugin-react' -// E2B-compatible Vite configuration +// Vercel Sandbox compatible Vite configuration export default defineConfig({ plugins: [react()], server: { host: '0.0.0.0', - port: 5173, + port: ${appConfig.vercelSandbox.devPort}, strictPort: true, - hmr: false, - allowedHosts: ['.e2b.app', 'localhost', '127.0.0.1'] + hmr: true, + allowedHosts: [ + 'localhost', + '127.0.0.1', + '` + sandboxHostname + `', // Allow the Vercel Sandbox domain + '.vercel.run', // Allow all Vercel sandbox domains + '.vercel-sandbox.dev' // Fallback pattern + ] } -})""" +})`; -with open('/home/user/app/vite.config.js', 'w') as f: - f.write(vite_config) -print('✓ vite.config.js') - -# Tailwind config - standard without custom design tokens -tailwind_config = """/** @type {import('tailwindcss').Config} */ + // Create the project files (now we have the sandbox hostname) + const projectFiles = [ + { + path: 'package.json', + content: Buffer.from(JSON.stringify({ + "name": "sandbox-app", + "version": "1.0.0", + "type": "module", + "scripts": { + "dev": "vite --host --port 3000", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "react": "^18.2.0", + "react-dom": "^18.2.0" + }, + "devDependencies": { + "@vitejs/plugin-react": "^4.0.0", + "vite": "^4.3.9", + "tailwindcss": "^3.3.0", + "postcss": "^8.4.31", + "autoprefixer": "^10.4.16" + } + }, null, 2)) + }, + { + path: 'vite.config.js', + content: Buffer.from(viteConfigContent) + }, + { + path: 'tailwind.config.js', + content: Buffer.from(`/** @type {import('tailwindcss').Config} */ export default { content: [ "./index.html", @@ -119,26 +191,20 @@ export default { extend: {}, }, plugins: [], -}""" - -with open('/home/user/app/tailwind.config.js', 'w') as f: - f.write(tailwind_config) -print('✓ tailwind.config.js') - -# PostCSS config -postcss_config = """export default { +}`) + }, + { + path: 'postcss.config.js', + content: Buffer.from(`export default { plugins: { tailwindcss: {}, autoprefixer: {}, }, -}""" - -with open('/home/user/app/postcss.config.js', 'w') as f: - f.write(postcss_config) -print('✓ postcss.config.js') - -# Index.html -index_html = """ +}`) + }, + { + path: 'index.html', + content: Buffer.from(` @@ -149,14 +215,11 @@ index_html = """
-""" - -with open('/home/user/app/index.html', 'w') as f: - f.write(index_html) -print('✓ index.html') - -# Main.jsx -main_jsx = """import React from 'react' +`) + }, + { + path: 'src/main.jsx', + content: Buffer.from(`import React from 'react' import ReactDOM from 'react-dom/client' import App from './App.jsx' import './index.css' @@ -165,19 +228,18 @@ ReactDOM.createRoot(document.getElementById('root')).render( , -)""" - -with open('/home/user/app/src/main.jsx', 'w') as f: - f.write(main_jsx) -print('✓ src/main.jsx') - -# App.jsx with explicit Tailwind test -app_jsx = """function App() { +)`) + }, + { + path: 'src/App.jsx', + content: Buffer.from(`function App() { return (
+

+ Sandbox Ready +

- Sandbox Ready
Start building your React app with Vite and Tailwind CSS!

@@ -185,14 +247,11 @@ app_jsx = """function App() { ) } -export default App""" - -with open('/home/user/app/src/App.jsx', 'w') as f: - f.write(app_jsx) -print('✓ src/App.jsx') - -# Index.css with explicit Tailwind directives -index_css = """@tailwind base; +export default App`) + }, + { + path: 'src/index.css', + content: Buffer.from(`@tailwind base; @tailwind components; @tailwind utilities; @@ -216,99 +275,53 @@ index_css = """@tailwind base; body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, sans-serif; background-color: rgb(17 24 39); -}""" +}`) + } + ]; -with open('/home/user/app/src/index.css', 'w') as f: - f.write(index_css) -print('✓ src/index.css') - -print('\\nAll files created successfully!') -`; - - // Execute the setup script - await sandbox.runCode(setupScript); + // Create directory structure first + await sandbox.runCommand({ + cmd: 'mkdir', + args: ['-p', 'src'] + }); + + // Write all files + await sandbox.writeFiles(projectFiles); + console.log('[create-ai-sandbox] ✓ Project files created'); // Install dependencies console.log('[create-ai-sandbox] Installing dependencies...'); - await sandbox.runCode(` -import subprocess -import sys - -print('Installing npm packages...') -result = subprocess.run( - ['npm', 'install'], - cwd='/home/user/app', - capture_output=True, - text=True -) - -if result.returncode == 0: - print('✓ Dependencies installed successfully') -else: - print(f'⚠ Warning: npm install had issues: {result.stderr}') - # Continue anyway as it might still work - `); + const installResult = await sandbox.runCommand({ + cmd: 'npm', + args: ['install', '--loglevel', 'info'] + }); + if (installResult.exitCode === 0) { + console.log('[create-ai-sandbox] ✓ Dependencies installed successfully'); + } else { + console.log('[create-ai-sandbox] ⚠ Warning: npm install had issues but continuing...'); + } - // Start Vite dev server + // Start Vite dev server in detached mode console.log('[create-ai-sandbox] Starting Vite dev server...'); - await sandbox.runCode(` -import subprocess -import os -import time - -os.chdir('/home/user/app') - -# Kill any existing Vite processes -subprocess.run(['pkill', '-f', 'vite'], capture_output=True) -time.sleep(1) - -# Start Vite dev server -env = os.environ.copy() -env['FORCE_COLOR'] = '0' - -process = subprocess.Popen( - ['npm', 'run', 'dev'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env -) - -print(f'✓ Vite dev server started with PID: {process.pid}') -print('Waiting for server to be ready...') - `); + const viteProcess = await sandbox.runCommand({ + cmd: 'npm', + args: ['run', 'dev'], + detached: true + }); + + console.log('[create-ai-sandbox] ✓ Vite dev server started'); // Wait for Vite to be fully ready - await new Promise(resolve => setTimeout(resolve, appConfig.e2b.viteStartupDelay)); - - // Force Tailwind CSS to rebuild by touching the CSS file - await sandbox.runCode(` -import os -import time - -# Touch the CSS file to trigger rebuild -css_file = '/home/user/app/src/index.css' -if os.path.exists(css_file): - os.utime(css_file, None) - print('✓ Triggered CSS rebuild') - -# Also ensure PostCSS processes it -time.sleep(2) -print('✓ Tailwind CSS should be loaded') - `); + await new Promise(resolve => setTimeout(resolve, appConfig.vercelSandbox.devServerStartupDelay)); // Store sandbox globally global.activeSandbox = sandbox; global.sandboxData = { sandboxId, - url: `https://${host}` + url: sandboxUrl, + viteProcess }; - // Set extended timeout on the sandbox instance if method available - if (typeof sandbox.setTimeout === 'function') { - sandbox.setTimeout(appConfig.e2b.timeoutMs); - console.log(`[create-ai-sandbox] Set sandbox timeout to ${appConfig.e2b.timeoutMinutes} minutes`); - } - // Initialize sandbox state global.sandboxState = { fileCache: { @@ -319,7 +332,7 @@ print('✓ Tailwind CSS should be loaded') sandbox, sandboxData: { sandboxId, - url: `https://${host}` + url: sandboxUrl } }; @@ -333,14 +346,22 @@ print('✓ Tailwind CSS should be loaded') global.existingFiles.add('tailwind.config.js'); global.existingFiles.add('postcss.config.js'); - console.log('[create-ai-sandbox] Sandbox ready at:', `https://${host}`); + console.log('[create-ai-sandbox] Sandbox ready at:', sandboxUrl); - return NextResponse.json({ + const result = { success: true, sandboxId, - url: `https://${host}`, - message: 'Sandbox created and Vite React app initialized' - }); + url: sandboxUrl, + message: 'Vercel sandbox created and Vite React app initialized' + }; + + // Store the result for reuse + global.sandboxData = { + ...global.sandboxData, + ...result + }; + + return result; } catch (error) { console.error('[create-ai-sandbox] Error:', error); @@ -348,18 +369,16 @@ print('✓ Tailwind CSS should be loaded') // Clean up on error if (sandbox) { try { - await sandbox.kill(); + await sandbox.stop(); } catch (e) { - console.error('Failed to close sandbox on error:', e); + console.error('Failed to stop sandbox on error:', e); } } - return NextResponse.json( - { - error: error instanceof Error ? error.message : 'Failed to create sandbox', - details: error instanceof Error ? error.stack : undefined - }, - { status: 500 } - ); + // Clear global state on error + global.activeSandbox = null; + global.sandboxData = null; + + throw error; // Throw to be caught by the outer handler } } \ No newline at end of file diff --git a/app/api/create-zip/route.ts b/app/api/create-zip/route.ts index 221c843..91418c4 100644 --- a/app/api/create-zip/route.ts +++ b/app/api/create-zip/route.ts @@ -1,10 +1,10 @@ -import { NextRequest, NextResponse } from 'next/server'; +import { NextResponse } from 'next/server'; declare global { var activeSandbox: any; } -export async function POST(request: NextRequest) { +export async function POST() { try { if (!global.activeSandbox) { return NextResponse.json({ @@ -15,41 +15,37 @@ export async function POST(request: NextRequest) { console.log('[create-zip] Creating project zip...'); - // Create zip file in sandbox - const result = await global.activeSandbox.runCode(` -import zipfile -import os -import json - -os.chdir('/home/user/app') - -# Create zip file -with zipfile.ZipFile('/tmp/project.zip', 'w', zipfile.ZIP_DEFLATED) as zipf: - for root, dirs, files in os.walk('.'): - # Skip node_modules and .git - dirs[:] = [d for d in dirs if d not in ['node_modules', '.git', '.next', 'dist']] - - for file in files: - file_path = os.path.join(root, file) - arcname = os.path.relpath(file_path, '.') - zipf.write(file_path, arcname) - -# Get file size -file_size = os.path.getsize('/tmp/project.zip') -print(f" Created project.zip ({file_size} bytes)") - `); + // Create zip file in sandbox using standard commands + const zipResult = await global.activeSandbox.runCommand({ + cmd: 'bash', + args: ['-c', `zip -r /tmp/project.zip . -x "node_modules/*" ".git/*" ".next/*" "dist/*" "build/*" "*.log"`] + }); + + if (zipResult.exitCode !== 0) { + const error = await zipResult.stderr(); + throw new Error(`Failed to create zip: ${error}`); + } + + const sizeResult = await global.activeSandbox.runCommand({ + cmd: 'bash', + args: ['-c', `ls -la /tmp/project.zip | awk '{print $5}'`] + }); + + const fileSize = await sizeResult.stdout(); + console.log(`[create-zip] Created project.zip (${fileSize.trim()} bytes)`); // Read the zip file and convert to base64 - const readResult = await global.activeSandbox.runCode(` -import base64 - -with open('/tmp/project.zip', 'rb') as f: - content = f.read() - encoded = base64.b64encode(content).decode('utf-8') - print(encoded) - `); + const readResult = await global.activeSandbox.runCommand({ + cmd: 'base64', + args: ['/tmp/project.zip'] + }); - const base64Content = readResult.logs.stdout.join('').trim(); + if (readResult.exitCode !== 0) { + const error = await readResult.stderr(); + throw new Error(`Failed to read zip file: ${error}`); + } + + const base64Content = (await readResult.stdout()).trim(); // Create a data URL for download const dataUrl = `data:application/zip;base64,${base64Content}`; @@ -57,15 +53,18 @@ with open('/tmp/project.zip', 'rb') as f: return NextResponse.json({ success: true, dataUrl, - fileName: 'e2b-project.zip', + fileName: 'vercel-sandbox-project.zip', message: 'Zip file created successfully' }); } catch (error) { console.error('[create-zip] Error:', error); - return NextResponse.json({ - success: false, - error: (error as Error).message - }, { status: 500 }); + return NextResponse.json( + { + success: false, + error: (error as Error).message + }, + { status: 500 } + ); } } \ No newline at end of file diff --git a/app/api/detect-and-install-packages/route.ts b/app/api/detect-and-install-packages/route.ts index 12211b6..a2feaf9 100644 --- a/app/api/detect-and-install-packages/route.ts +++ b/app/api/detect-and-install-packages/route.ts @@ -64,15 +64,7 @@ export async function POST(request: NextRequest) { const builtins = ['fs', 'path', 'http', 'https', 'crypto', 'stream', 'util', 'os', 'url', 'querystring', 'child_process']; if (builtins.includes(imp)) return false; - // Extract package name (handle scoped packages and subpaths) - const parts = imp.split('/'); - if (imp.startsWith('@')) { - // Scoped package like @vitejs/plugin-react - return true; - } else { - // Regular package, return just the first part - return true; - } + return true; }); // Extract just the package names (without subpaths) @@ -101,153 +93,90 @@ export async function POST(request: NextRequest) { } // Check which packages are already installed - const checkResult = await global.activeSandbox.runCode(` -import os -import json - -installed = [] -missing = [] - -packages = ${JSON.stringify(uniquePackages)} - -for package in packages: - # Handle scoped packages - if package.startswith('@'): - package_path = f"/home/user/app/node_modules/{package}" - else: - package_path = f"/home/user/app/node_modules/{package}" + const installed: string[] = []; + const missing: string[] = []; - if os.path.exists(package_path): - installed.append(package) - else: - missing.append(package) + for (const packageName of uniquePackages) { + try { + const checkResult = await global.activeSandbox.runCommand({ + cmd: 'test', + args: ['-d', `node_modules/${packageName}`] + }); + + if (checkResult.exitCode === 0) { + installed.push(packageName); + } else { + missing.push(packageName); + } + } catch (checkError) { + // If test command fails, assume package is missing + console.debug(`Package check failed for ${packageName}:`, checkError); + missing.push(packageName); + } + } -result = { - 'installed': installed, - 'missing': missing -} + console.log('[detect-and-install-packages] Package status:', { installed, missing }); -print(json.dumps(result)) - `); - - const status = JSON.parse(checkResult.logs.stdout.join('')); - console.log('[detect-and-install-packages] Package status:', status); - - if (status.missing.length === 0) { + if (missing.length === 0) { return NextResponse.json({ success: true, packagesInstalled: [], - packagesAlreadyInstalled: status.installed, + packagesAlreadyInstalled: installed, message: 'All packages already installed' }); } // Install missing packages - console.log('[detect-and-install-packages] Installing packages:', status.missing); + console.log('[detect-and-install-packages] Installing packages:', missing); - const installResult = await global.activeSandbox.runCode(` -import subprocess -import os -import json + const installResult = await global.activeSandbox.runCommand({ + cmd: 'npm', + args: ['install', '--save', ...missing] + }); -os.chdir('/home/user/app') -packages_to_install = ${JSON.stringify(status.missing)} - -# Join packages into a single install command -packages_str = ' '.join(packages_to_install) -cmd = f'npm install {packages_str} --save' - -print(f"Running: {cmd}") - -# Run npm install with explicit save flag -result = subprocess.run(['npm', 'install', '--save'] + packages_to_install, - capture_output=True, - text=True, - cwd='/home/user/app', - timeout=60) - -print("stdout:", result.stdout) -if result.stderr: - print("stderr:", result.stderr) - -# Verify installation -installed = [] -failed = [] - -for package in packages_to_install: - # Handle scoped packages correctly - if package.startswith('@'): - # For scoped packages like @heroicons/react - package_path = f"/home/user/app/node_modules/{package}" - else: - package_path = f"/home/user/app/node_modules/{package}" + const stdout = await installResult.stdout(); + const stderr = await installResult.stderr(); - if os.path.exists(package_path): - installed.append(package) - print(f"✓ Verified installation of {package}") - else: - # Check if it's a submodule of an installed package - base_package = package.split('/')[0] - if package.startswith('@'): - # For @scope/package, the base is @scope/package - base_package = '/'.join(package.split('/')[:2]) - - base_path = f"/home/user/app/node_modules/{base_package}" - if os.path.exists(base_path): - installed.append(package) - print(f"✓ Verified installation of {package} (via {base_package})") - else: - failed.append(package) - print(f"✗ Failed to verify installation of {package}") - -result_data = { - 'installed': installed, - 'failed': failed, - 'returncode': result.returncode -} - -print("\\nResult:", json.dumps(result_data)) - `, { timeout: 60000 }); - - // Parse the result more safely - let installStatus; - try { - const stdout = installResult.logs.stdout.join(''); - const resultMatch = stdout.match(/Result:\s*({.*})/); - if (resultMatch) { - installStatus = JSON.parse(resultMatch[1]); - } else { - // Fallback parsing - const lines = stdout.split('\n'); - const resultLine = lines.find((line: string) => line.includes('Result:')); - if (resultLine) { - installStatus = JSON.parse(resultLine.split('Result:')[1].trim()); - } else { - throw new Error('Could not find Result in output'); - } - } - } catch (parseError) { - console.error('[detect-and-install-packages] Failed to parse install result:', parseError); - console.error('[detect-and-install-packages] stdout:', installResult.logs.stdout.join('')); - // Fallback to assuming all packages were installed - installStatus = { - installed: status.missing, - failed: [], - returncode: 0 - }; + console.log('[detect-and-install-packages] Install stdout:', stdout); + if (stderr) { + console.log('[detect-and-install-packages] Install stderr:', stderr); } - if (installStatus.failed.length > 0) { - console.error('[detect-and-install-packages] Failed to install:', installStatus.failed); + // Verify installation + const finalInstalled: string[] = []; + const failed: string[] = []; + + for (const packageName of missing) { + try { + const verifyResult = await global.activeSandbox.runCommand({ + cmd: 'test', + args: ['-d', `node_modules/${packageName}`] + }); + + if (verifyResult.exitCode === 0) { + finalInstalled.push(packageName); + console.log(`✓ Verified installation of ${packageName}`); + } else { + failed.push(packageName); + console.log(`✗ Failed to verify installation of ${packageName}`); + } + } catch (error) { + failed.push(packageName); + console.log(`✗ Error verifying ${packageName}:`, error); + } + } + + if (failed.length > 0) { + console.error('[detect-and-install-packages] Failed to install:', failed); } return NextResponse.json({ success: true, - packagesInstalled: installStatus.installed, - packagesFailed: installStatus.failed, - packagesAlreadyInstalled: status.installed, - message: `Installed ${installStatus.installed.length} packages`, - logs: installResult.logs.stdout.join('\n') + packagesInstalled: finalInstalled, + packagesFailed: failed, + packagesAlreadyInstalled: installed, + message: `Installed ${finalInstalled.length} packages`, + logs: stdout }); } catch (error) { diff --git a/app/api/extract-brand-styles/route.ts b/app/api/extract-brand-styles/route.ts new file mode 100644 index 0000000..202e837 --- /dev/null +++ b/app/api/extract-brand-styles/route.ts @@ -0,0 +1,72 @@ +import { NextRequest, NextResponse } from 'next/server'; + +export async function POST(request: NextRequest) { + try { + const body = await request.json(); + const url = body.url; + const prompt = body.prompt; + + console.log('[extract-brand-styles] Extracting brand styles for:', url); + console.log('[extract-brand-styles] User prompt:', prompt); + + // Call Firecrawl API to extract branding information + const FIRECRAWL_API_KEY = process.env.FIRECRAWL_API_KEY; + + if (!FIRECRAWL_API_KEY) { + console.error('[extract-brand-styles] No Firecrawl API key found'); + throw new Error('Firecrawl API key not configured'); + } + + console.log('[extract-brand-styles] Calling Firecrawl branding API for:', url); + + const firecrawlResponse = await fetch('https://api.firecrawl.dev/v2/scrape', { + method: 'POST', + headers: { + 'Authorization': `Bearer ${FIRECRAWL_API_KEY}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + url: url, + formats: ['branding'], + }), + }); + + if (!firecrawlResponse.ok) { + const errorText = await firecrawlResponse.text(); + console.error('[extract-brand-styles] Firecrawl API error:', firecrawlResponse.status, errorText); + throw new Error(`Firecrawl API returned ${firecrawlResponse.status}`); + } + + const firecrawlData = await firecrawlResponse.json(); + console.log('[extract-brand-styles] Firecrawl response received successfully'); + + // Extract branding data from response + const brandingData = firecrawlData.data?.branding || firecrawlData.branding; + + if (!brandingData) { + console.error('[extract-brand-styles] No branding data in Firecrawl response'); + console.log('[extract-brand-styles] Response structure:', JSON.stringify(firecrawlData, null, 2)); + throw new Error('No branding data in Firecrawl response'); + } + + console.log('[extract-brand-styles] Successfully extracted branding data'); + + // Return the branding data + return NextResponse.json({ + success: true, + url, + styleName: brandingData.name || url, + guidelines: brandingData, + }); + + } catch (error) { + console.error('[extract-brand-styles] Error occurred:', error); + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Failed to extract brand styles' + }, + { status: 500 } + ); + } +} diff --git a/app/api/generate-ai-code-stream/route.ts b/app/api/generate-ai-code-stream/route.ts index eaae15d..6a8f6aa 100644 --- a/app/api/generate-ai-code-stream/route.ts +++ b/app/api/generate-ai-code-stream/route.ts @@ -11,21 +11,37 @@ import { FileManifest } from '@/types/file-manifest'; import type { ConversationState, ConversationMessage, ConversationEdit } from '@/types/conversation'; import { appConfig } from '@/config/app.config'; +// Force dynamic route to enable streaming +export const dynamic = 'force-dynamic'; + +// Check if we're using Vercel AI Gateway +const isUsingAIGateway = !!process.env.AI_GATEWAY_API_KEY; +const aiGatewayBaseURL = 'https://ai-gateway.vercel.sh/v1'; + +console.log('[generate-ai-code-stream] AI Gateway config:', { + isUsingAIGateway, + hasGroqKey: !!process.env.GROQ_API_KEY, + hasAIGatewayKey: !!process.env.AI_GATEWAY_API_KEY +}); + const groq = createGroq({ - apiKey: process.env.GROQ_API_KEY, + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GROQ_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined, }); const anthropic = createAnthropic({ - apiKey: process.env.ANTHROPIC_API_KEY, - baseURL: process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1', + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.ANTHROPIC_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : (process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1'), }); const googleGenerativeAI = createGoogleGenerativeAI({ - apiKey: process.env.GEMINI_API_KEY, + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GEMINI_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined, }); const openai = createOpenAI({ - apiKey: process.env.OPENAI_API_KEY, + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.OPENAI_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : process.env.OPENAI_BASE_URL, }); // Helper function to analyze user preferences from conversation history @@ -142,10 +158,18 @@ export async function POST(request: NextRequest) { const stream = new TransformStream(); const writer = stream.writable.getWriter(); - // Function to send progress updates + // Function to send progress updates with flushing const sendProgress = async (data: any) => { const message = `data: ${JSON.stringify(data)}\n\n`; - await writer.write(encoder.encode(message)); + try { + await writer.write(encoder.encode(message)); + // Force flush by writing a keep-alive comment + if (data.type === 'stream' || data.type === 'conversation') { + await writer.write(encoder.encode(': keepalive\n\n')); + } + } catch (error) { + console.error('[generate-ai-code-stream] Error writing to stream:', error); + } }; // Start processing in background @@ -170,7 +194,7 @@ export async function POST(request: NextRequest) { if (manifest) { await sendProgress({ type: 'status', message: '🔍 Creating search plan...' }); - const fileContents = global.sandboxState.fileCache.files; + const fileContents = global.sandboxState.fileCache?.files || {}; console.log('[generate-ai-code-stream] Files available for search:', Object.keys(fileContents).length); // STEP 1: Get search plan from AI @@ -220,8 +244,9 @@ export async function POST(request: NextRequest) { console.log('[generate-ai-code-stream] Target selected:', target); // Create surgical edit context with exact location - const normalizedPath = target.filePath.replace('/home/user/app/', ''); - const fileContent = fileContents[normalizedPath]?.content || ''; + // normalizedPath would be: target.filePath.replace('/home/user/app/', ''); + // fileContent available but not used in current implementation + // const fileContent = fileContents[normalizedPath]?.content || ''; // Build enhanced context with search results enhancedSystemPrompt = ` @@ -331,7 +356,7 @@ User request: "${prompt}"`; // For now, fall back to keyword search since we don't have file contents for search execution // This path happens when no manifest was initially available - let targetFiles = []; + let targetFiles: any[] = []; if (!searchPlan || searchPlan.searchTerms.length === 0) { console.warn('[generate-ai-code-stream] No target files after fetch, searching for relevant files'); @@ -551,7 +576,7 @@ Remember: You are a SURGEON making a precise incision, not an artist repainting } // Build system prompt with conversation awareness - const systemPrompt = `You are an expert React developer with perfect memory of the conversation. You maintain context across messages and remember scraped websites, generated components, and applied code. Generate clean, modern React code for Vite applications. + let systemPrompt = `You are an expert React developer with perfect memory of the conversation. You maintain context across messages and remember scraped websites, generated components, and applied code. Generate clean, modern React code for Vite applications. ${conversationContext} 🚨 CRITICAL RULES - YOUR MOST IMPORTANT INSTRUCTIONS: @@ -569,6 +594,11 @@ ${conversationContext} - Simple style/text change = 1 file ONLY - New component = 2 files MAX (component + parent) - If >3 files, YOU'RE DOING TOO MUCH +6. **DO NOT CREATE SVGs FROM SCRATCH**: + - NEVER generate custom SVG code unless explicitly asked + - Use existing icon libraries (lucide-react, heroicons, etc.) + - Or use placeholder elements/text if icons are not critical + - Only create custom SVGs when user specifically requests "create an SVG" or "draw an SVG" COMPONENT RELATIONSHIPS (CHECK THESE FIRST): - Navigation usually lives INSIDE Header.jsx, not separate Nav.jsx @@ -897,6 +927,24 @@ CRITICAL: When files are provided in the context: 4. Do NOT ask to see files - they are already provided in the context above 5. Make the requested change immediately`; + // If Morph Fast Apply is enabled (edit mode + MORPH_API_KEY), force block output + const morphFastApplyEnabled = Boolean(isEdit && process.env.MORPH_API_KEY); + if (morphFastApplyEnabled) { + systemPrompt += ` + +MORPH FAST APPLY MODE (EDIT-ONLY): +- Output edits as blocks, not full blocks, for files that already exist. +- Format for each edit: + + Describe the minimal change, single sentence. + Provide the SMALLEST code snippet necessary to perform the change. + +- Only use blocks when you must CREATE a brand-new file. +- Prefer ONE edit block for a simple change; multiple edits only if absolutely needed for separate files. +- Keep updates minimal and precise; do not rewrite entire files. +`; + } + // Build full prompt with context let fullPrompt = prompt; if (context) { @@ -955,13 +1003,15 @@ CRITICAL: When files are provided in the context: // Store files in cache for (const [path, content] of Object.entries(filesData.files)) { const normalizedPath = path.replace('/home/user/app/', ''); - global.sandboxState.fileCache.files[normalizedPath] = { - content: content as string, - lastModified: Date.now() - }; + if (global.sandboxState.fileCache) { + global.sandboxState.fileCache.files[normalizedPath] = { + content: content as string, + lastModified: Date.now() + }; + } } - if (filesData.manifest) { + if (filesData.manifest && global.sandboxState.fileCache) { global.sandboxState.fileCache.manifest = filesData.manifest; // Now try to analyze edit intent with the fetched manifest @@ -993,7 +1043,7 @@ CRITICAL: When files are provided in the context: } // Update variables - backendFiles = global.sandboxState.fileCache.files; + backendFiles = global.sandboxState.fileCache?.files || {}; hasBackendFiles = Object.keys(backendFiles).length > 0; console.log('[generate-ai-code-stream] Updated backend cache with fetched files'); } @@ -1140,6 +1190,17 @@ CRITICAL: When files are provided in the context: } if (contextParts.length > 0) { + if (morphFastApplyEnabled) { + contextParts.push('\nOUTPUT FORMAT (REQUIRED IN MORPH MODE):'); + contextParts.push(''); + contextParts.push('Minimal, precise instruction.'); + contextParts.push('// Smallest necessary snippet'); + contextParts.push(''); + contextParts.push('\nIf you need to create a NEW file, then and only then output a full file:'); + contextParts.push(''); + contextParts.push('// Full file content when creating new files'); + contextParts.push(''); + } fullPrompt = `CONTEXT:\n${contextParts.join('\n')}\n\nUSER REQUEST:\n${prompt}`; } } @@ -1154,11 +1215,32 @@ CRITICAL: When files are provided in the context: // Determine which provider to use based on model const isAnthropic = model.startsWith('anthropic/'); const isGoogle = model.startsWith('google/'); - const isOpenAI = model.startsWith('openai/gpt-5'); - const modelProvider = isAnthropic ? anthropic : (isOpenAI ? openai : (isGoogle ? googleGenerativeAI : groq)); - const actualModel = isAnthropic ? model.replace('anthropic/', '') : - (model === 'openai/gpt-5') ? 'gpt-5' : - (isGoogle ? model.replace('google/', '') : model); + const isOpenAI = model.startsWith('openai/'); + const isKimiGroq = model === 'moonshotai/kimi-k2-instruct-0905'; + const modelProvider = isAnthropic ? anthropic : + (isOpenAI ? openai : + (isGoogle ? googleGenerativeAI : + (isKimiGroq ? groq : groq))); + + // Fix model name transformation for different providers + let actualModel: string; + if (isAnthropic) { + actualModel = model.replace('anthropic/', ''); + } else if (isOpenAI) { + actualModel = model.replace('openai/', ''); + } else if (isKimiGroq) { + // Kimi on Groq - use full model string + actualModel = 'moonshotai/kimi-k2-instruct-0905'; + } else if (isGoogle) { + // Google uses specific model names - convert our naming to theirs + actualModel = model.replace('google/', ''); + } else { + actualModel = model; + } + + console.log(`[generate-ai-code-stream] Using provider: ${isAnthropic ? 'Anthropic' : isGoogle ? 'Google' : isOpenAI ? 'OpenAI' : 'Groq'}, model: ${actualModel}`); + console.log(`[generate-ai-code-stream] AI Gateway enabled: ${isUsingAIGateway}`); + console.log(`[generate-ai-code-stream] Model string: ${model}`); // Make streaming API call with appropriate provider const streamOptions: any = { @@ -1243,7 +1325,61 @@ It's better to have 3 complete files than 10 incomplete files.` }; } - const result = await streamText(streamOptions); + let result; + let retryCount = 0; + const maxRetries = 2; + + while (retryCount <= maxRetries) { + try { + result = await streamText(streamOptions); + break; // Success, exit retry loop + } catch (streamError: any) { + console.error(`[generate-ai-code-stream] Error calling streamText (attempt ${retryCount + 1}/${maxRetries + 1}):`, streamError); + + // Check if this is a Groq service unavailable error + const isGroqServiceError = isKimiGroq && streamError.message?.includes('Service unavailable'); + const isRetryableError = streamError.message?.includes('Service unavailable') || + streamError.message?.includes('rate limit') || + streamError.message?.includes('timeout'); + + if (retryCount < maxRetries && isRetryableError) { + retryCount++; + console.log(`[generate-ai-code-stream] Retrying in ${retryCount * 2} seconds...`); + + // Send progress update about retry + await sendProgress({ + type: 'info', + message: `Service temporarily unavailable, retrying (attempt ${retryCount + 1}/${maxRetries + 1})...` + }); + + // Wait before retry with exponential backoff + await new Promise(resolve => setTimeout(resolve, retryCount * 2000)); + + // If Groq fails, try switching to a fallback model + if (isGroqServiceError && retryCount === maxRetries) { + console.log('[generate-ai-code-stream] Groq service unavailable, falling back to GPT-4'); + streamOptions.model = openai('gpt-4-turbo'); + actualModel = 'gpt-4-turbo'; + } + } else { + // Final error, send to user + await sendProgress({ + type: 'error', + message: `Failed to initialize ${isGoogle ? 'Gemini' : isAnthropic ? 'Claude' : isOpenAI ? 'GPT-5' : isKimiGroq ? 'Kimi (Groq)' : 'Groq'} streaming: ${streamError.message}` + }); + + // If this is a Google model error, provide helpful info + if (isGoogle) { + await sendProgress({ + type: 'info', + message: 'Tip: Make sure your GEMINI_API_KEY is set correctly and has proper permissions.' + }); + } + + throw streamError; + } + } + } // Stream the response and parse in real-time let generatedCode = ''; @@ -1258,7 +1394,7 @@ It's better to have 3 complete files than 10 incomplete files.` let tagBuffer = ''; // Stream the response and parse for packages in real-time - for await (const textPart of result.textStream) { + for await (const textPart of result?.textStream || []) { const text = textPart || ''; generatedCode += text; currentFile += text; @@ -1301,6 +1437,11 @@ It's better to have 3 complete files than 10 incomplete files.` raw: true }); + // Debug: Log every 100 characters streamed + if (generatedCode.length % 100 < text.length) { + console.log(`[generate-ai-code-stream] Streamed ${generatedCode.length} chars`); + } + // Check for package tags in buffered text (ONLY for edits, not initial generation) let lastIndex = 0; if (isEdit) { @@ -1590,12 +1731,28 @@ Provide the complete file content without any truncation. Include all necessary completionClient = openai; } else if (model.includes('claude')) { completionClient = anthropic; + } else if (model === 'moonshotai/kimi-k2-instruct-0905') { + completionClient = groq; } else { completionClient = groq; } + // Determine the correct model name for the completion + let completionModelName: string; + if (model === 'moonshotai/kimi-k2-instruct-0905') { + completionModelName = 'moonshotai/kimi-k2-instruct-0905'; + } else if (model.includes('openai')) { + completionModelName = model.replace('openai/', ''); + } else if (model.includes('anthropic')) { + completionModelName = model.replace('anthropic/', ''); + } else if (model.includes('google')) { + completionModelName = model.replace('google/', ''); + } else { + completionModelName = model; + } + const completionResult = await streamText({ - model: completionClient(modelMapping[model] || model), + model: completionClient(completionModelName), messages: [ { role: 'system', @@ -1603,8 +1760,7 @@ Provide the complete file content without any truncation. Include all necessary }, { role: 'user', content: completionPrompt } ], - temperature: isGPT5 ? undefined : appConfig.ai.defaultTemperature, - maxTokens: appConfig.ai.truncationRecoveryMaxTokens + temperature: model.startsWith('openai/gpt-5') ? undefined : appConfig.ai.defaultTemperature }); // Get the full text from the stream @@ -1715,12 +1871,18 @@ Provide the complete file content without any truncation. Include all necessary } })(); - // Return the stream + // Return the stream with proper headers for streaming support return new Response(stream.readable, { headers: { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache', 'Connection': 'keep-alive', + 'Transfer-Encoding': 'chunked', + 'Content-Encoding': 'none', // Prevent compression that can break streaming + 'X-Accel-Buffering': 'no', // Disable nginx buffering + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type, Authorization', }, }); diff --git a/app/api/get-sandbox-files/route.ts b/app/api/get-sandbox-files/route.ts index d892046..94df887 100644 --- a/app/api/get-sandbox-files/route.ts +++ b/app/api/get-sandbox-files/route.ts @@ -1,7 +1,7 @@ import { NextResponse } from 'next/server'; import { parseJavaScriptFile, buildComponentTree } from '@/lib/file-parser'; import { FileManifest, FileInfo, RouteInfo } from '@/types/file-manifest'; -import type { SandboxState } from '@/types/sandbox'; +// SandboxState type used implicitly through global.activeSandbox declare global { var activeSandbox: any; @@ -18,58 +18,82 @@ export async function GET() { console.log('[get-sandbox-files] Fetching and analyzing file structure...'); - // Get all React/JS/CSS files - const result = await global.activeSandbox.runCode(` -import os -import json - -def get_files_content(directory='/home/user/app', extensions=['.jsx', '.js', '.tsx', '.ts', '.css', '.json']): - files_content = {} + // Get list of all relevant files + const findResult = await global.activeSandbox.runCommand({ + cmd: 'find', + args: [ + '.', + '-name', 'node_modules', '-prune', '-o', + '-name', '.git', '-prune', '-o', + '-name', 'dist', '-prune', '-o', + '-name', 'build', '-prune', '-o', + '-type', 'f', + '(', + '-name', '*.jsx', + '-o', '-name', '*.js', + '-o', '-name', '*.tsx', + '-o', '-name', '*.ts', + '-o', '-name', '*.css', + '-o', '-name', '*.json', + ')', + '-print' + ] + }); - for root, dirs, files in os.walk(directory): - # Skip node_modules and other unwanted directories - dirs[:] = [d for d in dirs if d not in ['node_modules', '.git', 'dist', 'build']] + if (findResult.exitCode !== 0) { + throw new Error('Failed to list files'); + } + + const fileList = (await findResult.stdout()).split('\n').filter((f: string) => f.trim()); + console.log('[get-sandbox-files] Found', fileList.length, 'files'); + + // Read content of each file (limit to reasonable sizes) + const filesContent: Record = {}; + + for (const filePath of fileList) { + try { + // Check file size first + const statResult = await global.activeSandbox.runCommand({ + cmd: 'stat', + args: ['-f', '%z', filePath] + }); - for file in files: - if any(file.endswith(ext) for ext in extensions): - file_path = os.path.join(root, file) - relative_path = os.path.relpath(file_path, '/home/user/app') - - try: - with open(file_path, 'r') as f: - content = f.read() - # Only include files under 10KB to avoid huge responses - if len(content) < 10000: - files_content[relative_path] = content - except: - pass + if (statResult.exitCode === 0) { + const fileSize = parseInt(await statResult.stdout()); + + // Only read files smaller than 10KB + if (fileSize < 10000) { + const catResult = await global.activeSandbox.runCommand({ + cmd: 'cat', + args: [filePath] + }); + + if (catResult.exitCode === 0) { + const content = await catResult.stdout(); + // Remove leading './' from path + const relativePath = filePath.replace(/^\.\//, ''); + filesContent[relativePath] = content; + } + } + } + } catch (parseError) { + console.debug('Error parsing component info:', parseError); + // Skip files that can't be read + continue; + } + } - return files_content - -# Get the files -files = get_files_content() - -# Also get the directory structure -structure = [] -for root, dirs, files in os.walk('/home/user/app'): - level = root.replace('/home/user/app', '').count(os.sep) - indent = ' ' * 2 * level - structure.append(f"{indent}{os.path.basename(root)}/") - sub_indent = ' ' * 2 * (level + 1) - for file in files: - if not any(skip in root for skip in ['node_modules', '.git', 'dist', 'build']): - structure.append(f"{sub_indent}{file}") - -result = { - 'files': files, - 'structure': '\\n'.join(structure[:50]) # Limit structure to 50 lines -} - -print(json.dumps(result)) - `); - - const output = result.logs.stdout.join(''); - const parsedResult = JSON.parse(output); + // Get directory structure + const treeResult = await global.activeSandbox.runCommand({ + cmd: 'find', + args: ['.', '-type', 'd', '-not', '-path', '*/node_modules*', '-not', '-path', '*/.git*'] + }); + + let structure = ''; + if (treeResult.exitCode === 0) { + const dirs = (await treeResult.stdout()).split('\n').filter((d: string) => d.trim()); + structure = dirs.slice(0, 50).join('\n'); // Limit to 50 lines + } // Build enhanced file manifest const fileManifest: FileManifest = { @@ -82,12 +106,12 @@ print(json.dumps(result)) }; // Process each file - for (const [relativePath, content] of Object.entries(parsedResult.files)) { - const fullPath = `/home/user/app/${relativePath}`; + for (const [relativePath, content] of Object.entries(filesContent)) { + const fullPath = `/${relativePath}`; // Create base file info const fileInfo: FileInfo = { - content: content as string, + content: content, type: 'utility', path: fullPath, relativePath, @@ -96,7 +120,7 @@ print(json.dumps(result)) // Parse JavaScript/JSX files if (relativePath.match(/\.(jsx?|tsx?)$/)) { - const parseResult = parseJavaScriptFile(content as string, fullPath); + const parseResult = parseJavaScriptFile(content, fullPath); Object.assign(fileInfo, parseResult); // Identify entry point @@ -132,9 +156,9 @@ print(json.dumps(result)) return NextResponse.json({ success: true, - files: parsedResult.files, - structure: parsedResult.structure, - fileCount: Object.keys(parsedResult.files).length, + files: filesContent, + structure, + fileCount: Object.keys(filesContent).length, manifest: fileManifest, }); @@ -157,7 +181,8 @@ function extractRoutes(files: Record): RouteInfo[] { const routeMatches = fileInfo.content.matchAll(/path=["']([^"']+)["'].*(?:element|component)={([^}]+)}/g); for (const match of routeMatches) { - const [, routePath, componentRef] = match; + const [, routePath] = match; + // componentRef available in match but not used currently routes.push({ path: routePath, component: path, diff --git a/app/api/install-packages-v2/route.ts b/app/api/install-packages-v2/route.ts new file mode 100644 index 0000000..3797898 --- /dev/null +++ b/app/api/install-packages-v2/route.ts @@ -0,0 +1,48 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { SandboxProvider } from '@/lib/sandbox/types'; +import { sandboxManager } from '@/lib/sandbox/sandbox-manager'; + +declare global { + var activeSandboxProvider: any; +} + +export async function POST(request: NextRequest) { + try { + const { packages } = await request.json(); + + if (!packages || !Array.isArray(packages) || packages.length === 0) { + return NextResponse.json({ + success: false, + error: 'Packages array is required' + }, { status: 400 }); + } + + // Get provider from sandbox manager or global state + const provider = sandboxManager.getActiveProvider() || global.activeSandboxProvider; + + if (!provider) { + return NextResponse.json({ + success: false, + error: 'No active sandbox' + }, { status: 400 }); + } + + console.log(`[install-packages-v2] Installing: ${packages.join(', ')}`); + + const result = await provider.installPackages(packages); + + return NextResponse.json({ + success: result.success, + output: result.stdout, + error: result.stderr, + message: result.success ? 'Packages installed successfully' : 'Package installation failed' + }); + + } catch (error) { + console.error('[install-packages-v2] Error:', error); + return NextResponse.json({ + success: false, + error: (error as Error).message + }, { status: 500 }); + } +} \ No newline at end of file diff --git a/app/api/install-packages/route.ts b/app/api/install-packages/route.ts index 59d305e..6e36da4 100644 --- a/app/api/install-packages/route.ts +++ b/app/api/install-packages/route.ts @@ -1,14 +1,15 @@ import { NextRequest, NextResponse } from 'next/server'; -import { Sandbox } from '@e2b/code-interpreter'; declare global { var activeSandbox: any; + var activeSandboxProvider: any; var sandboxData: any; } export async function POST(request: NextRequest) { try { - const { packages, sandboxId } = await request.json(); + const { packages } = await request.json(); + // sandboxId not used - using global sandbox if (!packages || !Array.isArray(packages) || packages.length === 0) { return NextResponse.json({ @@ -36,32 +37,17 @@ export async function POST(request: NextRequest) { console.log(`[install-packages] Cleaned:`, validPackages); } - // Try to get sandbox - either from global or reconnect - let sandbox = global.activeSandbox; + // Get active sandbox provider + const provider = global.activeSandboxProvider; - if (!sandbox && sandboxId) { - console.log(`[install-packages] Reconnecting to sandbox ${sandboxId}...`); - try { - sandbox = await Sandbox.connect(sandboxId, { apiKey: process.env.E2B_API_KEY }); - global.activeSandbox = sandbox; - console.log(`[install-packages] Successfully reconnected to sandbox ${sandboxId}`); - } catch (error) { - console.error(`[install-packages] Failed to reconnect to sandbox:`, error); - return NextResponse.json({ - success: false, - error: `Failed to reconnect to sandbox: ${(error as Error).message}` - }, { status: 500 }); - } - } - - if (!sandbox) { + if (!provider) { return NextResponse.json({ success: false, - error: 'No active sandbox available' + error: 'No active sandbox provider available' }, { status: 400 }); } - console.log('[install-packages] Installing packages:', packages); + console.log('[install-packages] Installing packages:', validPackages); // Create a response stream for real-time updates const encoder = new TextEncoder(); @@ -75,7 +61,7 @@ export async function POST(request: NextRequest) { }; // Start installation in background - (async (sandboxInstance) => { + (async (providerInstance) => { try { await sendProgress({ type: 'start', @@ -83,23 +69,17 @@ export async function POST(request: NextRequest) { packages: validPackages }); - // Kill any existing Vite process first + // Stop any existing development server first await sendProgress({ type: 'status', message: 'Stopping development server...' }); - await sandboxInstance.runCode(` -import subprocess -import os -import signal - -# Try to kill any existing Vite process -try: - with open('/tmp/vite-process.pid', 'r') as f: - pid = int(f.read().strip()) - os.kill(pid, signal.SIGTERM) - print("Stopped existing Vite process") -except: - print("No existing Vite process found") - `); + try { + // Try to kill any running dev server processes + await providerInstance.runCommand('pkill -f vite'); + await new Promise(resolve => setTimeout(resolve, 1000)); // Wait a bit + } catch (killError) { + // It's OK if no process is found + console.debug('[install-packages] No existing dev server found:', killError); + } // Check which packages are already installed await sendProgress({ @@ -107,70 +87,52 @@ except: message: 'Checking installed packages...' }); - const checkResult = await sandboxInstance.runCode(` -import os -import json - -os.chdir('/home/user/app') - -# Read package.json to check installed packages -try: - with open('package.json', 'r') as f: - package_json = json.load(f) - - dependencies = package_json.get('dependencies', {}) - dev_dependencies = package_json.get('devDependencies', {}) - all_deps = {**dependencies, **dev_dependencies} - - # Check which packages need to be installed - packages_to_check = ${JSON.stringify(validPackages)} - already_installed = [] - need_install = [] - - for pkg in packages_to_check: - # Handle scoped packages - if pkg.startswith('@'): - pkg_name = pkg - else: - # Extract package name without version - pkg_name = pkg.split('@')[0] - - if pkg_name in all_deps: - already_installed.append(pkg_name) - else: - need_install.append(pkg) - - print(f"Already installed: {already_installed}") - print(f"Need to install: {need_install}") - print(f"NEED_INSTALL:{json.dumps(need_install)}") - -except Exception as e: - print(f"Error checking packages: {e}") - print(f"NEED_INSTALL:{json.dumps(packages_to_check)}") - `); - - // Parse packages that need installation let packagesToInstall = validPackages; - // Check if checkResult has the expected structure - if (checkResult && checkResult.results && checkResult.results[0] && checkResult.results[0].text) { - const outputLines = checkResult.results[0].text.split('\n'); - for (const line of outputLines) { - if (line.startsWith('NEED_INSTALL:')) { - try { - packagesToInstall = JSON.parse(line.substring('NEED_INSTALL:'.length)); - } catch (e) { - console.error('Failed to parse packages to install:', e); + try { + // Read package.json to check existing dependencies + let packageJsonContent = ''; + try { + packageJsonContent = await providerInstance.readFile('package.json'); + } catch (error) { + console.log('[install-packages] Error reading package.json:', error); + } + if (packageJsonContent) { + const packageJson = JSON.parse(packageJsonContent); + + const dependencies = packageJson.dependencies || {}; + const devDependencies = packageJson.devDependencies || {}; + const allDeps = { ...dependencies, ...devDependencies }; + + const alreadyInstalled = []; + const needInstall = []; + + for (const pkg of validPackages) { + // Handle scoped packages + const pkgName = pkg.startsWith('@') ? pkg : pkg.split('@')[0]; + + if (allDeps[pkgName]) { + alreadyInstalled.push(pkgName); + } else { + needInstall.push(pkg); } } + + packagesToInstall = needInstall; + + if (alreadyInstalled.length > 0) { + await sendProgress({ + type: 'info', + message: `Already installed: ${alreadyInstalled.join(', ')}` + }); + } } - } else { - console.error('[install-packages] Invalid checkResult structure:', checkResult); + } catch (error) { + console.error('[install-packages] Error checking existing packages:', error); // If we can't check, just try to install all packages packagesToInstall = validPackages; } - if (packagesToInstall.length === 0) { await sendProgress({ type: 'success', @@ -178,164 +140,92 @@ except Exception as e: installedPackages: [], alreadyInstalled: validPackages }); + + // Restart dev server + await sendProgress({ type: 'status', message: 'Restarting development server...' }); + + await providerInstance.restartViteServer(); + + await sendProgress({ + type: 'complete', + message: 'Dev server restarted!', + installedPackages: [] + }); + return; } // Install only packages that aren't already installed - const packageList = packagesToInstall.join(' '); - // Only send the npm install command message if we're actually installing new packages await sendProgress({ type: 'info', message: `Installing ${packagesToInstall.length} new package(s): ${packagesToInstall.join(', ')}` }); - const installResult = await sandboxInstance.runCode(` -import subprocess -import os - -os.chdir('/home/user/app') - -# Run npm install with output capture -packages_to_install = ${JSON.stringify(packagesToInstall)} -cmd_args = ['npm', 'install', '--legacy-peer-deps'] + packages_to_install - -print(f"Running command: {' '.join(cmd_args)}") - -process = subprocess.Popen( - cmd_args, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True -) - -# Stream output -while True: - output = process.stdout.readline() - if output == '' and process.poll() is not None: - break - if output: - print(output.strip()) - -# Get the return code -rc = process.poll() - -# Capture any stderr -stderr = process.stderr.read() -if stderr: - print("STDERR:", stderr) - if 'ERESOLVE' in stderr: - print("ERESOLVE_ERROR: Dependency conflict detected - using --legacy-peer-deps flag") - -print(f"\\nInstallation completed with code: {rc}") - -# Verify packages were installed -import json -with open('/home/user/app/package.json', 'r') as f: - package_json = json.load(f) - -installed = [] -for pkg in ${JSON.stringify(packagesToInstall)}: - if pkg in package_json.get('dependencies', {}): - installed.append(pkg) - print(f"✓ Verified {pkg}") - else: - print(f"✗ Package {pkg} not found in dependencies") + // Install packages using provider method + const installResult = await providerInstance.installPackages(packagesToInstall); -print(f"\\nVerified installed packages: {installed}") - `, { timeout: 60000 }); // 60 second timeout for npm install + // Get install output - ensure stdout/stderr are strings + const stdout = String(installResult.stdout || ''); + const stderr = String(installResult.stderr || ''); - // Send npm output - const output = installResult?.output || installResult?.logs?.stdout?.join('\n') || ''; - const npmOutputLines = output.split('\n').filter((line: string) => line.trim()); - for (const line of npmOutputLines) { - if (line.includes('STDERR:')) { - const errorMsg = line.replace('STDERR:', '').trim(); - if (errorMsg && errorMsg !== 'undefined') { - await sendProgress({ type: 'error', message: errorMsg }); + if (stdout) { + const lines = stdout.split('\n').filter(line => line.trim()); + for (const line of lines) { + if (line.includes('npm WARN')) { + await sendProgress({ type: 'warning', message: line }); + } else if (line.trim()) { + await sendProgress({ type: 'output', message: line }); } - } else if (line.includes('ERESOLVE_ERROR:')) { - const msg = line.replace('ERESOLVE_ERROR:', '').trim(); - await sendProgress({ - type: 'warning', - message: `Dependency conflict resolved with --legacy-peer-deps: ${msg}` - }); - } else if (line.includes('npm WARN')) { - await sendProgress({ type: 'warning', message: line }); - } else if (line.trim() && !line.includes('undefined')) { - await sendProgress({ type: 'output', message: line }); } } - // Check if installation was successful - const installedMatch = output.match(/Verified installed packages: \[(.*?)\]/); - let installedPackages: string[] = []; - - if (installedMatch && installedMatch[1]) { - installedPackages = installedMatch[1] - .split(',') - .map((p: string) => p.trim().replace(/'/g, '')) - .filter((p: string) => p.length > 0); + if (stderr) { + const errorLines = stderr.split('\n').filter(line => line.trim()); + for (const line of errorLines) { + if (line.includes('ERESOLVE')) { + await sendProgress({ + type: 'warning', + message: `Dependency conflict resolved with --legacy-peer-deps: ${line}` + }); + } else if (line.trim()) { + await sendProgress({ type: 'error', message: line }); + } + } } - if (installedPackages.length > 0) { + if (installResult.exitCode === 0) { await sendProgress({ type: 'success', - message: `Successfully installed: ${installedPackages.join(', ')}`, - installedPackages + message: `Successfully installed: ${packagesToInstall.join(', ')}`, + installedPackages: packagesToInstall }); } else { await sendProgress({ type: 'error', - message: 'Failed to verify package installation' + message: 'Package installation failed' }); } - // Restart Vite dev server + // Restart development server await sendProgress({ type: 'status', message: 'Restarting development server...' }); - await sandboxInstance.runCode(` -import subprocess -import os -import time - -os.chdir('/home/user/app') - -# Kill any existing Vite processes -subprocess.run(['pkill', '-f', 'vite'], capture_output=True) -time.sleep(1) - -# Start Vite dev server -env = os.environ.copy() -env['FORCE_COLOR'] = '0' - -process = subprocess.Popen( - ['npm', 'run', 'dev'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env -) - -print(f'✓ Vite dev server restarted with PID: {process.pid}') - -# Store process info for later -with open('/tmp/vite-process.pid', 'w') as f: - f.write(str(process.pid)) - -# Wait a bit for Vite to start up -time.sleep(3) - -# Touch files to trigger Vite reload -subprocess.run(['touch', '/home/user/app/package.json']) -subprocess.run(['touch', '/home/user/app/vite.config.js']) - -print("Vite restarted and should now recognize all packages") - `); - - await sendProgress({ - type: 'complete', - message: 'Package installation complete and dev server restarted!', - installedPackages - }); + try { + await providerInstance.restartViteServer(); + + // Wait a bit for the server to start + await new Promise(resolve => setTimeout(resolve, 3000)); + + await sendProgress({ + type: 'complete', + message: 'Package installation complete and dev server restarted!', + installedPackages: packagesToInstall + }); + } catch (error) { + await sendProgress({ + type: 'error', + message: `Failed to restart dev server: ${(error as Error).message}` + }); + } } catch (error) { const errorMessage = (error as Error).message; @@ -348,7 +238,7 @@ print("Vite restarted and should now recognize all packages") } finally { await writer.close(); } - })(sandbox); + })(provider); // Return the stream return new Response(stream.readable, { diff --git a/app/api/kill-sandbox/route.ts b/app/api/kill-sandbox/route.ts index 70d005a..f23550b 100644 --- a/app/api/kill-sandbox/route.ts +++ b/app/api/kill-sandbox/route.ts @@ -1,27 +1,27 @@ import { NextResponse } from 'next/server'; declare global { - var activeSandbox: any; + var activeSandboxProvider: any; var sandboxData: any; var existingFiles: Set; } export async function POST() { try { - console.log('[kill-sandbox] Killing active sandbox...'); - + console.log('[kill-sandbox] Stopping active sandbox...'); + let sandboxKilled = false; - - // Kill existing sandbox if any - if (global.activeSandbox) { + + // Stop existing sandbox if any + if (global.activeSandboxProvider) { try { - await global.activeSandbox.close(); + await global.activeSandboxProvider.terminate(); sandboxKilled = true; - console.log('[kill-sandbox] Sandbox closed successfully'); + console.log('[kill-sandbox] Sandbox stopped successfully'); } catch (e) { - console.error('[kill-sandbox] Failed to close sandbox:', e); + console.error('[kill-sandbox] Failed to stop sandbox:', e); } - global.activeSandbox = null; + global.activeSandboxProvider = null; global.sandboxData = null; } diff --git a/app/api/monitor-vite-logs/route.ts b/app/api/monitor-vite-logs/route.ts index ef537f0..a74a010 100644 --- a/app/api/monitor-vite-logs/route.ts +++ b/app/api/monitor-vite-logs/route.ts @@ -15,97 +15,100 @@ export async function GET() { console.log('[monitor-vite-logs] Checking Vite process logs...'); - // Check both the error file and recent logs - const result = await global.activeSandbox.runCode(` -import json -import subprocess -import re - -errors = [] - -# First check the error file -try: - with open('/tmp/vite-errors.json', 'r') as f: - data = json.load(f) - errors.extend(data.get('errors', [])) -except: - pass - -# Also check if we can get recent Vite logs -try: - # Try to get the Vite process PID - with open('/tmp/vite-process.pid', 'r') as f: - pid = int(f.read().strip()) + const errors: any[] = []; - # Check if process is still running and get its logs - # This is a bit hacky but works for our use case - result = subprocess.run(['ps', '-p', str(pid)], capture_output=True, text=True) - if result.returncode == 0: - # Process is running, try to check for errors in output - # Note: We can't easily get stdout/stderr from a running process - # but we can check if there are new errors - pass -except: - pass - -# Also scan the current console output for any HMR errors -# This won't catch everything but helps with recent errors -try: - # Check if there's a log file we can read - import os - log_files = [] - for root, dirs, files in os.walk('/tmp'): - for file in files: - if 'vite' in file.lower() and file.endswith('.log'): - log_files.append(os.path.join(root, file)) + // Check if there's an error file from previous runs + try { + const catResult = await global.activeSandbox.runCommand({ + cmd: 'cat', + args: ['/tmp/vite-errors.json'] + }); + + if (catResult.exitCode === 0) { + const errorFileContent = await catResult.stdout(); + const data = JSON.parse(errorFileContent); + errors.push(...(data.errors || [])); + } + } catch { + // No error file exists, that's OK + } - for log_file in log_files[:5]: # Check up to 5 log files - try: - with open(log_file, 'r') as f: - content = f.read() - # Look for import errors - import_errors = re.findall(r'Failed to resolve import "([^"]+)"', content) - for pkg in import_errors: - if not pkg.startswith('.'): - # Extract base package name - if pkg.startswith('@'): - parts = pkg.split('/') - final_pkg = '/'.join(parts[:2]) if len(parts) >= 2 else pkg - else: - final_pkg = pkg.split('/')[0] - - error_obj = { - "type": "npm-missing", - "package": final_pkg, - "message": f"Failed to resolve import \\"{pkg}\\"", - "file": "Unknown" - } - - # Avoid duplicates - if not any(e['package'] == error_obj['package'] for e in errors): - errors.append(error_obj) - except: - pass -except Exception as e: - print(f"Error scanning logs: {e}") - -# Deduplicate errors -unique_errors = [] -seen_packages = set() -for error in errors: - if error.get('package') and error['package'] not in seen_packages: - seen_packages.add(error['package']) - unique_errors.append(error) - -print(json.dumps({"errors": unique_errors})) - `, { timeout: 5000 }); + // Look for any Vite-related log files that might contain errors + try { + const findResult = await global.activeSandbox.runCommand({ + cmd: 'find', + args: ['/tmp', '-name', '*vite*', '-type', 'f'] + }); + + if (findResult.exitCode === 0) { + const logFiles = (await findResult.stdout()).split('\n').filter((f: string) => f.trim()); + + for (const logFile of logFiles.slice(0, 3)) { + try { + const grepResult = await global.activeSandbox.runCommand({ + cmd: 'grep', + args: ['-i', 'failed to resolve import', logFile] + }); + + if (grepResult.exitCode === 0) { + const errorLines = (await grepResult.stdout()).split('\n').filter((line: string) => line.trim()); + + for (const line of errorLines) { + // Extract package name from error line + const importMatch = line.match(/"([^"]+)"/); + if (importMatch) { + const importPath = importMatch[1]; + + // Skip relative imports + if (!importPath.startsWith('.')) { + // Extract base package name + let packageName; + if (importPath.startsWith('@')) { + const parts = importPath.split('/'); + packageName = parts.length >= 2 ? parts.slice(0, 2).join('/') : importPath; + } else { + packageName = importPath.split('/')[0]; + } + + const errorObj = { + type: "npm-missing", + package: packageName, + message: `Failed to resolve import "${importPath}"`, + file: "Unknown" + }; + + // Avoid duplicates + if (!errors.some(e => e.package === errorObj.package)) { + errors.push(errorObj); + } + } + } + } + } + } catch { + // Skip if grep fails + } + } + } + } catch { + // No log files found, that's OK + } - const data = JSON.parse(result.output || '{"errors": []}'); + // Deduplicate errors by package name + const uniqueErrors: any[] = []; + const seenPackages = new Set(); + + for (const error of errors) { + if (error.package && !seenPackages.has(error.package)) { + seenPackages.add(error.package); + uniqueErrors.push(error); + } + } return NextResponse.json({ success: true, - hasErrors: data.errors.length > 0, - errors: data.errors + hasErrors: uniqueErrors.length > 0, + errors: uniqueErrors }); } catch (error) { diff --git a/app/api/restart-vite/route.ts b/app/api/restart-vite/route.ts index ca6b4ba..6d97bbb 100644 --- a/app/api/restart-vite/route.ts +++ b/app/api/restart-vite/route.ts @@ -2,132 +2,99 @@ import { NextResponse } from 'next/server'; declare global { var activeSandbox: any; + var activeSandboxProvider: any; + var lastViteRestartTime: number; + var viteRestartInProgress: boolean; } +const RESTART_COOLDOWN_MS = 5000; // 5 second cooldown between restarts + export async function POST() { try { - if (!global.activeSandbox) { + // Check both v1 and v2 global references + const provider = global.activeSandbox || global.activeSandboxProvider; + + if (!provider) { return NextResponse.json({ success: false, error: 'No active sandbox' }, { status: 400 }); } - console.log('[restart-vite] Forcing Vite restart...'); + // Check if restart is already in progress + if (global.viteRestartInProgress) { + console.log('[restart-vite] Vite restart already in progress, skipping...'); + return NextResponse.json({ + success: true, + message: 'Vite restart already in progress' + }); + } - // Kill existing Vite process and restart - const result = await global.activeSandbox.runCode(` -import subprocess -import os -import signal -import time -import threading -import json -import sys - -# Kill existing Vite process -try: - with open('/tmp/vite-process.pid', 'r') as f: - pid = int(f.read().strip()) - os.kill(pid, signal.SIGTERM) - print("Killed existing Vite process") - time.sleep(1) -except: - print("No existing Vite process found") - -os.chdir('/home/user/app') - -# Clear error file -error_file = '/tmp/vite-errors.json' -with open(error_file, 'w') as f: - json.dump({"errors": [], "lastChecked": time.time()}, f) - -# Function to monitor Vite output for errors -def monitor_output(proc, error_file): - while True: - line = proc.stderr.readline() - if not line: - break + // Check cooldown + const now = Date.now(); + if (global.lastViteRestartTime && (now - global.lastViteRestartTime) < RESTART_COOLDOWN_MS) { + const remainingTime = Math.ceil((RESTART_COOLDOWN_MS - (now - global.lastViteRestartTime)) / 1000); + console.log(`[restart-vite] Cooldown active, ${remainingTime}s remaining`); + return NextResponse.json({ + success: true, + message: `Vite was recently restarted, cooldown active (${remainingTime}s remaining)` + }); + } + + // Set the restart flag + global.viteRestartInProgress = true; + + console.log('[restart-vite] Using provider method to restart Vite...'); + + // Use the provider's restartViteServer method if available + if (typeof provider.restartViteServer === 'function') { + await provider.restartViteServer(); + console.log('[restart-vite] Vite restarted via provider method'); + } else { + // Fallback to manual restart using provider's runCommand + console.log('[restart-vite] Fallback to manual Vite restart...'); + + // Kill existing Vite processes + try { + await provider.runCommand('pkill -f vite'); + console.log('[restart-vite] Killed existing Vite processes'); - sys.stdout.write(line) # Also print to console - - # Check for import resolution errors - if "Failed to resolve import" in line: - try: - # Extract package name from error - import_match = line.find('"') - if import_match != -1: - end_match = line.find('"', import_match + 1) - if end_match != -1: - package_name = line[import_match + 1:end_match] - # Skip relative imports - if not package_name.startswith('.'): - with open(error_file, 'r') as f: - data = json.load(f) - - # Handle scoped packages correctly - if package_name.startswith('@'): - # For @scope/package, keep the scope - pkg_parts = package_name.split('/') - if len(pkg_parts) >= 2: - final_package = '/'.join(pkg_parts[:2]) - else: - final_package = package_name - else: - # For regular packages, just take the first part - final_package = package_name.split('/')[0] - - error_obj = { - "type": "npm-missing", - "package": final_package, - "message": line.strip(), - "timestamp": time.time() - } - - # Avoid duplicates - if not any(e['package'] == error_obj['package'] for e in data['errors']): - data['errors'].append(error_obj) - - with open(error_file, 'w') as f: - json.dump(data, f) - - print(f"WARNING: Detected missing package: {error_obj['package']}") - except Exception as e: - print(f"Error parsing Vite error: {e}") - -# Start Vite with error monitoring -process = subprocess.Popen( - ['npm', 'run', 'dev'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - bufsize=1 -) - -# Start monitoring thread -monitor_thread = threading.Thread(target=monitor_output, args=(process, error_file)) -monitor_thread.daemon = True -monitor_thread.start() - -print("Vite restarted successfully!") - -# Store process info for later -with open('/tmp/vite-process.pid', 'w') as f: - f.write(str(process.pid)) - -# Wait for Vite to fully start -time.sleep(5) -print("Vite is ready") - `); + // Wait a moment for processes to terminate + await new Promise(resolve => setTimeout(resolve, 2000)); + } catch { + console.log('[restart-vite] No existing Vite processes found'); + } + + // Clear any error tracking files + try { + await provider.runCommand('bash -c "echo \'{\\"errors\\": [], \\"lastChecked\\": '+ Date.now() +'}\' > /tmp/vite-errors.json"'); + } catch { + // Ignore if this fails + } + + // Start Vite dev server in background + await provider.runCommand('sh -c "nohup npm run dev > /tmp/vite.log 2>&1 &"'); + console.log('[restart-vite] Vite dev server restarted'); + + // Wait for Vite to start up + await new Promise(resolve => setTimeout(resolve, 3000)); + } + + // Update global state + global.lastViteRestartTime = Date.now(); + global.viteRestartInProgress = false; return NextResponse.json({ success: true, - message: 'Vite restarted successfully', - output: result.output + message: 'Vite restarted successfully' }); } catch (error) { console.error('[restart-vite] Error:', error); + + // Clear the restart flag on error + global.viteRestartInProgress = false; + return NextResponse.json({ success: false, error: (error as Error).message diff --git a/app/api/run-command-v2/route.ts b/app/api/run-command-v2/route.ts new file mode 100644 index 0000000..18cd15f --- /dev/null +++ b/app/api/run-command-v2/route.ts @@ -0,0 +1,50 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { SandboxProvider } from '@/lib/sandbox/types'; +import { sandboxManager } from '@/lib/sandbox/sandbox-manager'; + +// Get active sandbox provider from global state +declare global { + var activeSandboxProvider: any; +} + +export async function POST(request: NextRequest) { + try { + const { command } = await request.json(); + + if (!command) { + return NextResponse.json({ + success: false, + error: 'Command is required' + }, { status: 400 }); + } + + // Get provider from sandbox manager or global state + const provider = sandboxManager.getActiveProvider() || global.activeSandboxProvider; + + if (!provider) { + return NextResponse.json({ + success: false, + error: 'No active sandbox' + }, { status: 400 }); + } + + console.log(`[run-command-v2] Executing: ${command}`); + + const result = await provider.runCommand(command); + + return NextResponse.json({ + success: result.success, + output: result.stdout, + error: result.stderr, + exitCode: result.exitCode, + message: result.success ? 'Command executed successfully' : 'Command failed' + }); + + } catch (error) { + console.error('[run-command-v2] Error:', error); + return NextResponse.json({ + success: false, + error: (error as Error).message + }, { status: 500 }); + } +} \ No newline at end of file diff --git a/app/api/run-command/route.ts b/app/api/run-command/route.ts index 53e7e7b..76ffaff 100644 --- a/app/api/run-command/route.ts +++ b/app/api/run-command/route.ts @@ -1,5 +1,4 @@ import { NextRequest, NextResponse } from 'next/server'; -import { Sandbox } from '@e2b/code-interpreter'; // Get active sandbox from global state (in production, use a proper state management solution) declare global { @@ -26,30 +25,32 @@ export async function POST(request: NextRequest) { console.log(`[run-command] Executing: ${command}`); - const result = await global.activeSandbox.runCode(` -import subprocess -import os - -os.chdir('/home/user/app') -result = subprocess.run(${JSON.stringify(command.split(' '))}, - capture_output=True, - text=True, - shell=False) - -print("STDOUT:") -print(result.stdout) -if result.stderr: - print("\\nSTDERR:") - print(result.stderr) -print(f"\\nReturn code: {result.returncode}") - `); + // Parse command and arguments + const commandParts = command.trim().split(/\s+/); + const cmd = commandParts[0]; + const args = commandParts.slice(1); - const output = result.logs.stdout.join('\n'); + // Execute command using Vercel Sandbox + const result = await global.activeSandbox.runCommand({ + cmd, + args + }); + + // Get output streams + const stdout = await result.stdout(); + const stderr = await result.stderr(); + + const output = [ + stdout ? `STDOUT:\n${stdout}` : '', + stderr ? `\nSTDERR:\n${stderr}` : '', + `\nExit code: ${result.exitCode}` + ].filter(Boolean).join(''); return NextResponse.json({ success: true, output, - message: 'Command executed successfully' + exitCode: result.exitCode, + message: result.exitCode === 0 ? 'Command executed successfully' : 'Command completed with non-zero exit code' }); } catch (error) { diff --git a/app/api/sandbox-logs/route.ts b/app/api/sandbox-logs/route.ts index 84d0208..2a7f2fe 100644 --- a/app/api/sandbox-logs/route.ts +++ b/app/api/sandbox-logs/route.ts @@ -1,10 +1,10 @@ -import { NextRequest, NextResponse } from 'next/server'; +import { NextResponse } from 'next/server'; declare global { var activeSandbox: any; } -export async function GET(request: NextRequest) { +export async function GET() { try { if (!global.activeSandbox) { return NextResponse.json({ @@ -15,55 +15,70 @@ export async function GET(request: NextRequest) { console.log('[sandbox-logs] Fetching Vite dev server logs...'); - // Get the last N lines of the Vite dev server output - const result = await global.activeSandbox.runCode(` -import subprocess -import os - -# Try to get the Vite process output -try: - # Read the last 100 lines of any log files - log_content = [] + // Check if Vite processes are running + const psResult = await global.activeSandbox.runCommand({ + cmd: 'ps', + args: ['aux'] + }); - # Check if there are any node processes running - ps_result = subprocess.run(['ps', 'aux'], capture_output=True, text=True) - vite_processes = [line for line in ps_result.stdout.split('\\n') if 'vite' in line.lower()] + let viteRunning = false; + const logContent: string[] = []; - if vite_processes: - log_content.append("Vite is running") - else: - log_content.append("Vite process not found") - - # Try to capture recent console output (this is a simplified approach) - # In a real implementation, you'd want to capture the Vite process output directly - print(json.dumps({ - "hasErrors": False, - "logs": log_content, - "status": "running" if vite_processes else "stopped" - })) -except Exception as e: - print(json.dumps({ - "hasErrors": True, - "logs": [str(e)], - "status": "error" - })) - `); - - try { - const logData = JSON.parse(result.output || '{}'); - return NextResponse.json({ - success: true, - ...logData - }); - } catch { - return NextResponse.json({ - success: true, - hasErrors: false, - logs: [result.output], - status: 'unknown' - }); + if (psResult.exitCode === 0) { + const psOutput = await psResult.stdout(); + const viteProcesses = psOutput.split('\n').filter((line: string) => + line.toLowerCase().includes('vite') || + line.toLowerCase().includes('npm run dev') + ); + + viteRunning = viteProcesses.length > 0; + + if (viteRunning) { + logContent.push("Vite is running"); + logContent.push(...viteProcesses.slice(0, 3)); // Show first 3 processes + } else { + logContent.push("Vite process not found"); + } } + // Try to read any recent log files + try { + const findResult = await global.activeSandbox.runCommand({ + cmd: 'find', + args: ['/tmp', '-name', '*vite*', '-name', '*.log', '-type', 'f'] + }); + + if (findResult.exitCode === 0) { + const logFiles = (await findResult.stdout()).split('\n').filter((f: string) => f.trim()); + + for (const logFile of logFiles.slice(0, 2)) { + try { + const catResult = await global.activeSandbox.runCommand({ + cmd: 'tail', + args: ['-n', '10', logFile] + }); + + if (catResult.exitCode === 0) { + const logFileContent = await catResult.stdout(); + logContent.push(`--- ${logFile} ---`); + logContent.push(logFileContent); + } + } catch { + // Skip if can't read log file + } + } + } + } catch { + // No log files found, that's OK + } + + return NextResponse.json({ + success: true, + hasErrors: false, + logs: logContent, + status: viteRunning ? 'running' : 'stopped' + }); + } catch (error) { console.error('[sandbox-logs] Error:', error); return NextResponse.json({ diff --git a/app/api/sandbox-status/route.ts b/app/api/sandbox-status/route.ts index 7f5e0b5..0d7bde0 100644 --- a/app/api/sandbox-status/route.ts +++ b/app/api/sandbox-status/route.ts @@ -1,27 +1,30 @@ import { NextResponse } from 'next/server'; +import { sandboxManager } from '@/lib/sandbox/sandbox-manager'; declare global { - var activeSandbox: any; + var activeSandboxProvider: any; var sandboxData: any; var existingFiles: Set; } export async function GET() { try { - // Check if sandbox exists - const sandboxExists = !!global.activeSandbox; - + // Check sandbox manager first, then fall back to global state + const provider = sandboxManager.getActiveProvider() || global.activeSandboxProvider; + const sandboxExists = !!provider; + let sandboxHealthy = false; let sandboxInfo = null; - - if (sandboxExists && global.activeSandbox) { + + if (sandboxExists && provider) { try { - // Since Python isn't available in the Vite template, just check if sandbox exists - // The sandbox object existing is enough to confirm it's healthy - sandboxHealthy = true; + // Check if sandbox is healthy by getting its info + const providerInfo = provider.getSandboxInfo(); + sandboxHealthy = !!providerInfo; + sandboxInfo = { - sandboxId: global.sandboxData?.sandboxId, - url: global.sandboxData?.url, + sandboxId: providerInfo?.sandboxId || global.sandboxData?.sandboxId, + url: providerInfo?.url || global.sandboxData?.url, filesTracked: global.existingFiles ? Array.from(global.existingFiles) : [], lastHealthCheck: new Date().toISOString() }; diff --git a/app/api/scrape-screenshot/route.ts b/app/api/scrape-screenshot/route.ts index b77820a..bd681ac 100644 --- a/app/api/scrape-screenshot/route.ts +++ b/app/api/scrape-screenshot/route.ts @@ -1,4 +1,5 @@ import { NextRequest, NextResponse } from 'next/server'; +import FirecrawlApp from '@mendable/firecrawl-js'; export async function POST(req: NextRequest) { try { @@ -8,49 +9,73 @@ export async function POST(req: NextRequest) { return NextResponse.json({ error: 'URL is required' }, { status: 400 }); } - // Use Firecrawl API to capture screenshot - const firecrawlResponse = await fetch('https://api.firecrawl.dev/v1/scrape', { - method: 'POST', - headers: { - 'Authorization': `Bearer ${process.env.FIRECRAWL_API_KEY}`, - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - url, - formats: ['screenshot'], // Regular viewport screenshot, not full page - waitFor: 3000, // Wait for page to fully load - timeout: 30000, - blockAds: true, - actions: [ - { - type: 'wait', - milliseconds: 2000 // Additional wait for dynamic content - } - ] - }) - }); - - if (!firecrawlResponse.ok) { - const error = await firecrawlResponse.text(); - throw new Error(`Firecrawl API error: ${error}`); - } - - const data = await firecrawlResponse.json(); + // Initialize Firecrawl with API key from environment + const apiKey = process.env.FIRECRAWL_API_KEY; - if (!data.success || !data.data?.screenshot) { - throw new Error('Failed to capture screenshot'); + if (!apiKey) { + console.error("FIRECRAWL_API_KEY not configured"); + return NextResponse.json({ + error: 'Firecrawl API key not configured' + }, { status: 500 }); } + + const app = new FirecrawlApp({ apiKey }); - return NextResponse.json({ - success: true, - screenshot: data.data.screenshot, - metadata: data.data.metadata + console.log('[scrape-screenshot] Attempting to capture screenshot for:', url); + console.log('[scrape-screenshot] Using Firecrawl API key:', apiKey ? 'Present' : 'Missing'); + + // Use the new v4 scrape method (not scrapeUrl) + const scrapeResult = await app.scrape(url, { + formats: ['screenshot'], // Request screenshot format + waitFor: 3000, // Wait for page to fully load + timeout: 30000, + onlyMainContent: false, // Get full page for screenshot + actions: [ + { + type: 'wait', + milliseconds: 2000 // Additional wait for dynamic content + } + ] }); + console.log('[scrape-screenshot] Full scrape result:', JSON.stringify(scrapeResult, null, 2)); + console.log('[scrape-screenshot] Scrape result type:', typeof scrapeResult); + console.log('[scrape-screenshot] Scrape result keys:', Object.keys(scrapeResult)); + + // The Firecrawl v4 API might return data directly without a success flag + // Check if we have data with screenshot + if (scrapeResult && scrapeResult.screenshot) { + // Direct screenshot response + return NextResponse.json({ + success: true, + screenshot: scrapeResult.screenshot, + metadata: scrapeResult.metadata || {} + }); + } else if ((scrapeResult as any)?.data?.screenshot) { + // Nested data structure + return NextResponse.json({ + success: true, + screenshot: (scrapeResult as any).data.screenshot, + metadata: (scrapeResult as any).data.metadata || {} + }); + } else if ((scrapeResult as any)?.success === false) { + // Explicit failure + console.error('[scrape-screenshot] Firecrawl API error:', (scrapeResult as any).error); + throw new Error((scrapeResult as any).error || 'Failed to capture screenshot'); + } else { + // No screenshot in response + console.error('[scrape-screenshot] No screenshot in response. Full response:', JSON.stringify(scrapeResult, null, 2)); + throw new Error('Screenshot not available in response - check console for full response structure'); + } + } catch (error: any) { - console.error('Screenshot capture error:', error); + console.error('[scrape-screenshot] Screenshot capture error:', error); + console.error('[scrape-screenshot] Error stack:', error.stack); + + // Provide fallback response for development - removed NODE_ENV check as it doesn't work in Next.js production builds + return NextResponse.json({ - error: error.message || 'Failed to capture screenshot' + error: error.message || 'Failed to capture screenshot' }, { status: 500 }); } } \ No newline at end of file diff --git a/app/api/scrape-url-enhanced/route.ts b/app/api/scrape-url-enhanced/route.ts index 2c74278..1baa682 100644 --- a/app/api/scrape-url-enhanced/route.ts +++ b/app/api/scrape-url-enhanced/route.ts @@ -43,7 +43,7 @@ export async function POST(request: NextRequest) { }, body: JSON.stringify({ url, - formats: ['markdown', 'html'], + formats: ['markdown', 'html', 'screenshot'], waitFor: 3000, timeout: 30000, blockAds: true, @@ -52,6 +52,10 @@ export async function POST(request: NextRequest) { { type: 'wait', milliseconds: 2000 + }, + { + type: 'screenshot', + fullPage: false // Just visible viewport for performance } ] }) @@ -68,7 +72,11 @@ export async function POST(request: NextRequest) { throw new Error('Failed to scrape content'); } - const { markdown, html, metadata } = data.data; + const { markdown, metadata, screenshot, actions } = data.data; + // html available but not used in current implementation + + // Get screenshot from either direct field or actions result + const screenshotUrl = screenshot || actions?.screenshots?.[0] || null; // Sanitize the markdown content const sanitizedMarkdown = sanitizeQuotes(markdown || ''); @@ -91,11 +99,13 @@ ${sanitizedMarkdown} success: true, url, content: formattedContent, + screenshot: screenshotUrl, structured: { title: sanitizeQuotes(title), description: sanitizeQuotes(description), content: sanitizedMarkdown, - url + url, + screenshot: screenshotUrl }, metadata: { scraper: 'firecrawl-enhanced', diff --git a/app/api/scrape-website/route.ts b/app/api/scrape-website/route.ts new file mode 100644 index 0000000..af461ea --- /dev/null +++ b/app/api/scrape-website/route.ts @@ -0,0 +1,110 @@ +import { NextRequest, NextResponse } from "next/server"; +import FirecrawlApp from '@mendable/firecrawl-js'; + +export async function POST(request: NextRequest) { + try { + const { url, formats = ['markdown', 'html'], options = {} } = await request.json(); + + if (!url) { + return NextResponse.json( + { error: "URL is required" }, + { status: 400 } + ); + } + + // Initialize Firecrawl with API key from environment + const apiKey = process.env.FIRECRAWL_API_KEY; + + if (!apiKey) { + console.error("FIRECRAWL_API_KEY not configured"); + // For demo purposes, return mock data if API key is not set + return NextResponse.json({ + success: true, + data: { + title: "Example Website", + content: `This is a mock response for ${url}. Configure FIRECRAWL_API_KEY to enable real scraping.`, + description: "A sample website", + markdown: `# Example Website\n\nThis is mock content for demonstration purposes.`, + html: `

Example Website

This is mock content for demonstration purposes.

`, + metadata: { + title: "Example Website", + description: "A sample website", + sourceURL: url, + statusCode: 200 + } + } + }); + } + + const app = new FirecrawlApp({ apiKey }); + + // Scrape the website using the latest SDK patterns + // Include screenshot if requested in formats + const scrapeResult = await app.scrape(url, { + formats: formats, + onlyMainContent: options.onlyMainContent !== false, // Default to true for cleaner content + waitFor: options.waitFor || 2000, // Wait for dynamic content + timeout: options.timeout || 30000, + ...options // Pass through any additional options + }); + + // Handle the response according to the latest SDK structure + const result = scrapeResult as any; + if (result.success === false) { + throw new Error(result.error || "Failed to scrape website"); + } + + // The SDK may return data directly or nested + const data = result.data || result; + + return NextResponse.json({ + success: true, + data: { + title: data?.metadata?.title || "Untitled", + content: data?.markdown || data?.html || "", + description: data?.metadata?.description || "", + markdown: data?.markdown || "", + html: data?.html || "", + metadata: data?.metadata || {}, + screenshot: data?.screenshot || null, + links: data?.links || [], + // Include raw data for flexibility + raw: data + } + }); + + } catch (error) { + console.error("Error scraping website:", error); + + // Return a more detailed error response + return NextResponse.json({ + success: false, + error: error instanceof Error ? error.message : "Failed to scrape website", + // Provide mock data as fallback for development + data: { + title: "Example Website", + content: "This is fallback content due to an error. Please check your configuration.", + description: "Error occurred while scraping", + markdown: `# Error\n\n${error instanceof Error ? error.message : 'Unknown error occurred'}`, + html: `

Error

${error instanceof Error ? error.message : 'Unknown error occurred'}

`, + metadata: { + title: "Error", + description: "Failed to scrape website", + statusCode: 500 + } + } + }, { status: 500 }); + } +} + +// Optional: Add OPTIONS handler for CORS if needed +export async function OPTIONS() { + return new NextResponse(null, { + status: 200, + headers: { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'POST, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type', + }, + }); +} \ No newline at end of file diff --git a/app/api/search/route.ts b/app/api/search/route.ts new file mode 100644 index 0000000..b93b1a0 --- /dev/null +++ b/app/api/search/route.ts @@ -0,0 +1,51 @@ +import { NextRequest, NextResponse } from 'next/server'; + +export async function POST(req: NextRequest) { + try { + const { query } = await req.json(); + + if (!query) { + return NextResponse.json({ error: 'Query is required' }, { status: 400 }); + } + + // Use Firecrawl search to get top 10 results with screenshots + const searchResponse = await fetch('https://api.firecrawl.dev/v1/search', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${process.env.FIRECRAWL_API_KEY}`, + }, + body: JSON.stringify({ + query, + limit: 10, + scrapeOptions: { + formats: ['markdown', 'screenshot'], + onlyMainContent: true, + }, + }), + }); + + if (!searchResponse.ok) { + throw new Error('Search failed'); + } + + const searchData = await searchResponse.json(); + + // Format results with screenshots and markdown + const results = searchData.data?.map((result: any) => ({ + url: result.url, + title: result.title || result.url, + description: result.description || '', + screenshot: result.screenshot || null, + markdown: result.markdown || '', + })) || []; + + return NextResponse.json({ results }); + } catch (error) { + console.error('Search error:', error); + return NextResponse.json( + { error: 'Failed to perform search' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/app/builder/page.tsx b/app/builder/page.tsx new file mode 100644 index 0000000..48fb724 --- /dev/null +++ b/app/builder/page.tsx @@ -0,0 +1,286 @@ +"use client"; + +import { useEffect, useState } from "react"; +import { useRouter } from "next/navigation"; +import { toast } from "sonner"; + +export default function BuilderPage() { + const [targetUrl, setTargetUrl] = useState(""); + const [selectedStyle, setSelectedStyle] = useState("modern"); + const [isLoading, setIsLoading] = useState(true); + const [previewUrl, setPreviewUrl] = useState(""); + const [progress, setProgress] = useState("Initializing..."); + const [generatedCode, setGeneratedCode] = useState(""); + const router = useRouter(); + + useEffect(() => { + // Get the URL and style from sessionStorage + const url = sessionStorage.getItem('targetUrl'); + const style = sessionStorage.getItem('selectedStyle'); + + if (!url) { + router.push('/'); + return; + } + + setTargetUrl(url); + setSelectedStyle(style || "modern"); + + // Start the website generation process + generateWebsite(url, style || "modern"); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [router]); + + const generateWebsite = async (url: string, style: string) => { + try { + setProgress("Analyzing website..."); + + // For demo purposes, we'll generate a simple HTML template + // In production, this would call the actual scraping and generation APIs + const mockGeneratedCode = ` + + + + + + ${style} Website - Reimagined + + + +
+ +
+ +
+
+

Welcome to Your ${style === 'modern' ? 'Modern' : style === 'playful' ? 'Playful' : style === 'professional' ? 'Professional' : 'Artistic'} Website

+

Reimagined from ${url}

+ Get Started +
+ +
+
+

Fast

+

Lightning-fast performance optimized for modern web standards.

+
+
+

Responsive

+

Looks great on all devices, from mobile to desktop.

+
+
+

Beautiful

+

Stunning design that captures attention and drives engagement.

+
+
+
+ +`; + + setGeneratedCode(mockGeneratedCode); + + // Create a blob URL for the preview + const blob = new Blob([mockGeneratedCode], { type: 'text/html' }); + const blobUrl = URL.createObjectURL(blob); + setPreviewUrl(blobUrl); + + setProgress("Website ready!"); + setIsLoading(false); + + // Show success message + toast.success("Website generated successfully!"); + + } catch (error) { + console.error("Error generating website:", error); + toast.error("Failed to generate website. Please try again."); + setProgress("Error occurred"); + setTimeout(() => router.push('/'), 2000); + } + }; + + const downloadCode = () => { + const blob = new Blob([generatedCode], { type: 'text/html' }); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = 'website.html'; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + URL.revokeObjectURL(url); + toast.success("Code downloaded!"); + }; + + return ( +
+
+ {/* Sidebar */} +
+

Building Your Website

+ +
+
+
Target URL
+
{targetUrl}
+
+ +
+
Style
+
{selectedStyle}
+
+ +
+
Status
+
{progress}
+
+
+ +
+ {!isLoading && ( + + )} + + +
+
+ + {/* Preview */} +
+ {isLoading ? ( +
+
+
+

{progress}

+
+
+ ) : ( + previewUrl && ( +