diff --git a/.env.example b/.env.example index e32df34..c8b9f67 100644 --- a/.env.example +++ b/.env.example @@ -1,20 +1,20 @@ -# Required -FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping) +# REQUIRED - Sandboxes for code execution +# Get yours at https://e2b.dev +E2B_API_KEY=your_e2b_api_key_here -# Vercel Sandbox Authentication (choose one method) -# See: https://vercel.com/docs/vercel-sandbox#authentication +# REQUIRED - Web scraping for cloning websites +# Get yours at https://firecrawl.dev +FIRECRAWL_API_KEY=your_firecrawl_api_key_here -# Method 1: OIDC Token (recommended for development) -# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically -# VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull +# OPTIONAL - AI Providers (need at least one) +# Get yours at https://console.anthropic.com +ANTHROPIC_API_KEY=your_anthropic_api_key_here -# Method 2: Personal Access Token (for production or when OIDC unavailable) -# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID -# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID -# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard +# Get yours at https://platform.openai.com +OPENAI_API_KEY=your_openai_api_key_here -# Optional (need at least one AI provider) -ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com -OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5) -GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey -GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended) \ No newline at end of file +# Get yours at https://aistudio.google.com/app/apikey +GEMINI_API_KEY=your_gemini_api_key_here + +# Get yours at https://console.groq.com +GROQ_API_KEY=your_groq_api_key_here \ No newline at end of file diff --git a/.gitignore b/.gitignore index 79f47d8..ac59fa8 100644 --- a/.gitignore +++ b/.gitignore @@ -56,4 +56,3 @@ e2b-template-* *.temp repomix-output.txt bun.lockb -.env*.local diff --git a/README.md b/README.md index a5c9800..803cc92 100644 --- a/README.md +++ b/README.md @@ -16,23 +16,11 @@ npm install ``` 2. **Add `.env.local`** - ```env # Required +E2B_API_KEY=your_e2b_api_key # Get from https://e2b.dev (Sandboxes) FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping) -# Vercel Sandbox Authentication (choose one method) -# See: https://vercel.com/docs/vercel-sandbox#authentication - -# Method 1: OIDC Token (recommended for development) -# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically -# VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull - -# Method 2: Personal Access Token (for production or when OIDC unavailable) -# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID -# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID -# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard - # Optional (need at least one AI provider) ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5) diff --git a/app/api/apply-ai-code-stream/route.ts b/app/api/apply-ai-code-stream/route.ts index ac382d6..c91bf11 100644 --- a/app/api/apply-ai-code-stream/route.ts +++ b/app/api/apply-ai-code-stream/route.ts @@ -1,5 +1,5 @@ import { NextRequest, NextResponse } from 'next/server'; -import { Sandbox } from '@vercel/sandbox'; +import { Sandbox } from '@e2b/code-interpreter'; import type { SandboxState } from '@/types/sandbox'; import type { ConversationState } from '@/types/conversation'; @@ -525,6 +525,7 @@ export async function POST(request: NextRequest) { normalizedPath = 'src/' + normalizedPath; } + const fullPath = `/home/user/app/${normalizedPath}`; const isUpdate = global.existingFiles.has(normalizedPath); // Remove any CSS imports from JSX/JS files (we're using Tailwind) @@ -533,20 +534,19 @@ export async function POST(request: NextRequest) { fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, ''); } - // Create directory if needed - const dirPath = normalizedPath.includes('/') ? normalizedPath.substring(0, normalizedPath.lastIndexOf('/')) : ''; - if (dirPath) { - await sandboxInstance.runCommand({ - cmd: 'mkdir', - args: ['-p', dirPath] - }); - } + // Write the file using Python (code-interpreter SDK) + const escapedContent = fileContent + .replace(/\\/g, '\\\\') + .replace(/"""/g, '\\"\\"\\"') + .replace(/\$/g, '\\$'); - // Write the file using Vercel Sandbox writeFiles - await sandboxInstance.writeFiles([{ - path: normalizedPath, - content: Buffer.from(fileContent) - }]); + await sandboxInstance.runCode(` +import os +os.makedirs(os.path.dirname("${fullPath}"), exist_ok=True) +with open("${fullPath}", 'w') as f: + f.write("""${escapedContent}""") +print(f"File written: ${fullPath}") + `); // Update file cache if (global.sandboxState?.fileCache) { @@ -599,39 +599,28 @@ export async function POST(request: NextRequest) { action: 'executing' }); - // Parse command and arguments for Vercel Sandbox - const commandParts = cmd.trim().split(/\s+/); - const cmdName = commandParts[0]; - const args = commandParts.slice(1); - - // Use Vercel Sandbox runCommand - const result = await sandboxInstance.runCommand({ - cmd: cmdName, - args + // Use E2B commands.run() for cleaner execution + const result = await sandboxInstance.commands.run(cmd, { + cwd: '/home/user/app', + timeout: 60, + on_stdout: async (data: string) => { + await sendProgress({ + type: 'command-output', + command: cmd, + output: data, + stream: 'stdout' + }); + }, + on_stderr: async (data: string) => { + await sendProgress({ + type: 'command-output', + command: cmd, + output: data, + stream: 'stderr' + }); + } }); - // Get command output - const stdout = await result.stdout(); - const stderr = await result.stderr(); - - if (stdout) { - await sendProgress({ - type: 'command-output', - command: cmd, - output: stdout, - stream: 'stdout' - }); - } - - if (stderr) { - await sendProgress({ - type: 'command-output', - command: cmd, - output: stderr, - stream: 'stderr' - }); - } - if (results.commandsExecuted) { results.commandsExecuted.push(cmd); } diff --git a/app/api/apply-ai-code/route.ts b/app/api/apply-ai-code/route.ts index f051da4..f00f08a 100644 --- a/app/api/apply-ai-code/route.ts +++ b/app/api/apply-ai-code/route.ts @@ -432,12 +432,15 @@ function App() { export default App;`; try { - await global.activeSandbox.writeFiles([{ - path: 'src/App.jsx', - content: Buffer.from(appContent) - }]); - - console.log('Auto-generated: src/App.jsx'); + await global.activeSandbox.runCode(` +file_path = "/home/user/app/src/App.jsx" +file_content = """${appContent.replace(/"/g, '\\"').replace(/\n/g, '\\n')}""" + +with open(file_path, 'w') as f: + f.write(file_content) + +print(f"Auto-generated: {file_path}") + `); results.filesCreated.push('src/App.jsx (auto-generated)'); } catch (error) { results.errors.push(`Failed to create App.jsx: ${(error as Error).message}`); @@ -456,7 +459,9 @@ export default App;`; if (!isEdit && !indexCssInParsed && !indexCssExists) { try { - const indexCssContent = `@tailwind base; + await global.activeSandbox.runCode(` +file_path = "/home/user/app/src/index.css" +file_content = """@tailwind base; @tailwind components; @tailwind utilities; @@ -478,14 +483,13 @@ body { margin: 0; min-width: 320px; min-height: 100vh; -}`; +}""" - await global.activeSandbox.writeFiles([{ - path: 'src/index.css', - content: Buffer.from(indexCssContent) - }]); - - console.log('Auto-generated: src/index.css'); +with open(file_path, 'w') as f: + f.write(file_content) + +print(f"Auto-generated: {file_path}") + `); results.filesCreated.push('src/index.css (with Tailwind)'); } catch (error) { results.errors.push('Failed to create index.css with Tailwind'); @@ -496,24 +500,15 @@ body { // Execute commands for (const cmd of parsed.commands) { try { - // Parse command and arguments - const commandParts = cmd.trim().split(/\s+/); - const cmdName = commandParts[0]; - const args = commandParts.slice(1); - - // Execute command using Vercel Sandbox - const result = await global.activeSandbox.runCommand({ - cmd: cmdName, - args - }); - - console.log(`Executed: ${cmd}`); - const stdout = await result.stdout(); - const stderr = await result.stderr(); - - if (stdout) console.log(stdout); - if (stderr) console.log(`Errors: ${stderr}`); - + await global.activeSandbox.runCode(` +import subprocess +os.chdir('/home/user/app') +result = subprocess.run(${JSON.stringify(cmd.split(' '))}, capture_output=True, text=True) +print(f"Executed: ${cmd}") +print(result.stdout) +if result.stderr: + print(f"Errors: {result.stderr}") + `); results.commandsExecuted.push(cmd); } catch (error) { results.errors.push(`Failed to execute ${cmd}: ${(error as Error).message}`); diff --git a/app/api/create-ai-sandbox/route.ts b/app/api/create-ai-sandbox/route.ts index 6fc9ebe..257ce1d 100644 --- a/app/api/create-ai-sandbox/route.ts +++ b/app/api/create-ai-sandbox/route.ts @@ -1,5 +1,5 @@ import { NextResponse } from 'next/server'; -import { Sandbox } from '@vercel/sandbox'; +import { Sandbox } from '@e2b/code-interpreter'; import type { SandboxState } from '@/types/sandbox'; import { appConfig } from '@/config/app.config'; @@ -15,15 +15,15 @@ export async function POST() { let sandbox: any = null; try { - console.log('[create-ai-sandbox] Creating Vercel sandbox...'); + console.log('[create-ai-sandbox] Creating base sandbox...'); // Kill existing sandbox if any if (global.activeSandbox) { - console.log('[create-ai-sandbox] Stopping existing sandbox...'); + console.log('[create-ai-sandbox] Killing existing sandbox...'); try { - await global.activeSandbox.stop(); + await global.activeSandbox.kill(); } catch (e) { - console.error('Failed to stop existing sandbox:', e); + console.error('Failed to close existing sandbox:', e); } global.activeSandbox = null; } @@ -35,102 +35,81 @@ export async function POST() { global.existingFiles = new Set(); } - // Create Vercel sandbox with flexible authentication - console.log(`[create-ai-sandbox] Creating Vercel sandbox with ${appConfig.vercelSandbox.timeoutMinutes} minute timeout...`); + // Create base sandbox - we'll set up Vite ourselves for full control + console.log(`[create-ai-sandbox] Creating base E2B sandbox with ${appConfig.e2b.timeoutMinutes} minute timeout...`); + sandbox = await Sandbox.create({ + apiKey: process.env.E2B_API_KEY, + timeoutMs: appConfig.e2b.timeoutMs + }); - // Prepare sandbox configuration - const sandboxConfig: any = { - timeout: appConfig.vercelSandbox.timeoutMs, - runtime: appConfig.vercelSandbox.runtime, - ports: [appConfig.vercelSandbox.devPort] - }; + const sandboxId = (sandbox as any).sandboxId || Date.now().toString(); + const host = (sandbox as any).getHost(appConfig.e2b.vitePort); - // Add authentication parameters if using personal access token - if (process.env.VERCEL_TOKEN && process.env.VERCEL_TEAM_ID && process.env.VERCEL_PROJECT_ID) { - console.log('[create-ai-sandbox] Using personal access token authentication'); - sandboxConfig.teamId = process.env.VERCEL_TEAM_ID; - sandboxConfig.projectId = process.env.VERCEL_PROJECT_ID; - sandboxConfig.token = process.env.VERCEL_TOKEN; - } else if (process.env.VERCEL_OIDC_TOKEN) { - console.log('[create-ai-sandbox] Using OIDC token authentication'); - } else { - console.log('[create-ai-sandbox] No authentication found - relying on default Vercel authentication'); - } - - sandbox = await Sandbox.create(sandboxConfig); - - const sandboxId = sandbox.sandboxId; console.log(`[create-ai-sandbox] Sandbox created: ${sandboxId}`); + console.log(`[create-ai-sandbox] Sandbox host: ${host}`); - // Set up a basic Vite React app + // Set up a basic Vite React app using Python to write files console.log('[create-ai-sandbox] Setting up Vite React app...'); - // First, change to the working directory - await sandbox.runCommand('pwd'); - const workDir = appConfig.vercelSandbox.workingDirectory; - - // Get the sandbox URL using the correct Vercel Sandbox API - const sandboxUrl = sandbox.domain(appConfig.vercelSandbox.devPort); - - // Extract the hostname from the sandbox URL for Vite config - const sandboxHostname = new URL(sandboxUrl).hostname; - console.log(`[create-ai-sandbox] Sandbox hostname: ${sandboxHostname}`); + // Write all files in a single Python script to avoid multiple executions + const setupScript = ` +import os +import json - // Create the Vite config content with the proper hostname (using string concatenation) - const viteConfigContent = `import { defineConfig } from 'vite' +print('Setting up React app with Vite and Tailwind...') + +# Create directory structure +os.makedirs('/home/user/app/src', exist_ok=True) + +# Package.json +package_json = { + "name": "sandbox-app", + "version": "1.0.0", + "type": "module", + "scripts": { + "dev": "vite --host", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "react": "^18.2.0", + "react-dom": "^18.2.0" + }, + "devDependencies": { + "@vitejs/plugin-react": "^4.0.0", + "vite": "^4.3.9", + "tailwindcss": "^3.3.0", + "postcss": "^8.4.31", + "autoprefixer": "^10.4.16" + } +} + +with open('/home/user/app/package.json', 'w') as f: + json.dump(package_json, f, indent=2) +print('✓ package.json') + +# Vite config for E2B - with allowedHosts +vite_config = """import { defineConfig } from 'vite' import react from '@vitejs/plugin-react' -// Vercel Sandbox compatible Vite configuration +// E2B-compatible Vite configuration export default defineConfig({ plugins: [react()], server: { host: '0.0.0.0', - port: ${appConfig.vercelSandbox.devPort}, + port: 5173, strictPort: true, - hmr: true, - allowedHosts: [ - 'localhost', - '127.0.0.1', - '` + sandboxHostname + `', // Allow the Vercel Sandbox domain - '.vercel.run', // Allow all Vercel sandbox domains - '.vercel-sandbox.dev' // Fallback pattern - ] + hmr: false, + allowedHosts: ['.e2b.app', 'localhost', '127.0.0.1'] } -})`; +})""" - // Create the project files (now we have the sandbox hostname) - const projectFiles = [ - { - path: 'package.json', - content: Buffer.from(JSON.stringify({ - "name": "sandbox-app", - "version": "1.0.0", - "type": "module", - "scripts": { - "dev": "vite --host --port 3000", - "build": "vite build", - "preview": "vite preview" - }, - "dependencies": { - "react": "^18.2.0", - "react-dom": "^18.2.0" - }, - "devDependencies": { - "@vitejs/plugin-react": "^4.0.0", - "vite": "^4.3.9", - "tailwindcss": "^3.3.0", - "postcss": "^8.4.31", - "autoprefixer": "^10.4.16" - } - }, null, 2)) - }, - { - path: 'vite.config.js', - content: Buffer.from(viteConfigContent) - }, - { - path: 'tailwind.config.js', - content: Buffer.from(`/** @type {import('tailwindcss').Config} */ +with open('/home/user/app/vite.config.js', 'w') as f: + f.write(vite_config) +print('✓ vite.config.js') + +# Tailwind config - standard without custom design tokens +tailwind_config = """/** @type {import('tailwindcss').Config} */ export default { content: [ "./index.html", @@ -140,20 +119,26 @@ export default { extend: {}, }, plugins: [], -}`) - }, - { - path: 'postcss.config.js', - content: Buffer.from(`export default { +}""" + +with open('/home/user/app/tailwind.config.js', 'w') as f: + f.write(tailwind_config) +print('✓ tailwind.config.js') + +# PostCSS config +postcss_config = """export default { plugins: { tailwindcss: {}, autoprefixer: {}, }, -}`) - }, - { - path: 'index.html', - content: Buffer.from(` +}""" + +with open('/home/user/app/postcss.config.js', 'w') as f: + f.write(postcss_config) +print('✓ postcss.config.js') + +# Index.html +index_html = """ @@ -164,11 +149,14 @@ export default {
-`) - }, - { - path: 'src/main.jsx', - content: Buffer.from(`import React from 'react' +""" + +with open('/home/user/app/index.html', 'w') as f: + f.write(index_html) +print('✓ index.html') + +# Main.jsx +main_jsx = """import React from 'react' import ReactDOM from 'react-dom/client' import App from './App.jsx' import './index.css' @@ -177,18 +165,19 @@ ReactDOM.createRoot(document.getElementById('root')).render( , -)`) - }, - { - path: 'src/App.jsx', - content: Buffer.from(`function App() { +)""" + +with open('/home/user/app/src/main.jsx', 'w') as f: + f.write(main_jsx) +print('✓ src/main.jsx') + +# App.jsx with explicit Tailwind test +app_jsx = """function App() { return (
-

- Sandbox Ready -

+ Sandbox Ready
Start building your React app with Vite and Tailwind CSS!

@@ -196,11 +185,14 @@ ReactDOM.createRoot(document.getElementById('root')).render( ) } -export default App`) - }, - { - path: 'src/index.css', - content: Buffer.from(`@tailwind base; +export default App""" + +with open('/home/user/app/src/App.jsx', 'w') as f: + f.write(app_jsx) +print('✓ src/App.jsx') + +# Index.css with explicit Tailwind directives +index_css = """@tailwind base; @tailwind components; @tailwind utilities; @@ -224,53 +216,99 @@ export default App`) body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, sans-serif; background-color: rgb(17 24 39); -}`) - } - ]; +}""" - // Create directory structure first - await sandbox.runCommand({ - cmd: 'mkdir', - args: ['-p', 'src'] - }); - - // Write all files - await sandbox.writeFiles(projectFiles); - console.log('[create-ai-sandbox] ✓ Project files created'); +with open('/home/user/app/src/index.css', 'w') as f: + f.write(index_css) +print('✓ src/index.css') + +print('\\nAll files created successfully!') +`; + + // Execute the setup script + await sandbox.runCode(setupScript); // Install dependencies console.log('[create-ai-sandbox] Installing dependencies...'); - const installResult = await sandbox.runCommand({ - cmd: 'npm', - args: ['install', '--loglevel', 'info'] - }); - if (installResult.exitCode === 0) { - console.log('[create-ai-sandbox] ✓ Dependencies installed successfully'); - } else { - console.log('[create-ai-sandbox] ⚠ Warning: npm install had issues but continuing...'); - } + await sandbox.runCode(` +import subprocess +import sys + +print('Installing npm packages...') +result = subprocess.run( + ['npm', 'install'], + cwd='/home/user/app', + capture_output=True, + text=True +) + +if result.returncode == 0: + print('✓ Dependencies installed successfully') +else: + print(f'⚠ Warning: npm install had issues: {result.stderr}') + # Continue anyway as it might still work + `); - // Start Vite dev server in detached mode + // Start Vite dev server console.log('[create-ai-sandbox] Starting Vite dev server...'); - const viteProcess = await sandbox.runCommand({ - cmd: 'npm', - args: ['run', 'dev'], - detached: true - }); - - console.log('[create-ai-sandbox] ✓ Vite dev server started'); + await sandbox.runCode(` +import subprocess +import os +import time + +os.chdir('/home/user/app') + +# Kill any existing Vite processes +subprocess.run(['pkill', '-f', 'vite'], capture_output=True) +time.sleep(1) + +# Start Vite dev server +env = os.environ.copy() +env['FORCE_COLOR'] = '0' + +process = subprocess.Popen( + ['npm', 'run', 'dev'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env +) + +print(f'✓ Vite dev server started with PID: {process.pid}') +print('Waiting for server to be ready...') + `); // Wait for Vite to be fully ready - await new Promise(resolve => setTimeout(resolve, appConfig.vercelSandbox.devServerStartupDelay)); + await new Promise(resolve => setTimeout(resolve, appConfig.e2b.viteStartupDelay)); + + // Force Tailwind CSS to rebuild by touching the CSS file + await sandbox.runCode(` +import os +import time + +# Touch the CSS file to trigger rebuild +css_file = '/home/user/app/src/index.css' +if os.path.exists(css_file): + os.utime(css_file, None) + print('✓ Triggered CSS rebuild') + +# Also ensure PostCSS processes it +time.sleep(2) +print('✓ Tailwind CSS should be loaded') + `); // Store sandbox globally global.activeSandbox = sandbox; global.sandboxData = { sandboxId, - url: sandboxUrl, - viteProcess + url: `https://${host}` }; + // Set extended timeout on the sandbox instance if method available + if (typeof sandbox.setTimeout === 'function') { + sandbox.setTimeout(appConfig.e2b.timeoutMs); + console.log(`[create-ai-sandbox] Set sandbox timeout to ${appConfig.e2b.timeoutMinutes} minutes`); + } + // Initialize sandbox state global.sandboxState = { fileCache: { @@ -281,7 +319,7 @@ body { sandbox, sandboxData: { sandboxId, - url: sandboxUrl + url: `https://${host}` } }; @@ -295,13 +333,13 @@ body { global.existingFiles.add('tailwind.config.js'); global.existingFiles.add('postcss.config.js'); - console.log('[create-ai-sandbox] Sandbox ready at:', sandboxUrl); + console.log('[create-ai-sandbox] Sandbox ready at:', `https://${host}`); return NextResponse.json({ success: true, sandboxId, - url: sandboxUrl, - message: 'Vercel sandbox created and Vite React app initialized' + url: `https://${host}`, + message: 'Sandbox created and Vite React app initialized' }); } catch (error) { @@ -310,9 +348,9 @@ body { // Clean up on error if (sandbox) { try { - await sandbox.stop(); + await sandbox.kill(); } catch (e) { - console.error('Failed to stop sandbox on error:', e); + console.error('Failed to close sandbox on error:', e); } } diff --git a/app/api/create-zip/route.ts b/app/api/create-zip/route.ts index 2030a39..221c843 100644 --- a/app/api/create-zip/route.ts +++ b/app/api/create-zip/route.ts @@ -15,37 +15,41 @@ export async function POST(request: NextRequest) { console.log('[create-zip] Creating project zip...'); - // Create zip file in sandbox using standard commands - const zipResult = await global.activeSandbox.runCommand({ - cmd: 'bash', - args: ['-c', `zip -r /tmp/project.zip . -x "node_modules/*" ".git/*" ".next/*" "dist/*" "build/*" "*.log"`] - }); - - if (zipResult.exitCode !== 0) { - const error = await zipResult.stderr(); - throw new Error(`Failed to create zip: ${error}`); - } - - const sizeResult = await global.activeSandbox.runCommand({ - cmd: 'bash', - args: ['-c', `ls -la /tmp/project.zip | awk '{print $5}'`] - }); - - const fileSize = await sizeResult.stdout(); - console.log(`[create-zip] Created project.zip (${fileSize.trim()} bytes)`); + // Create zip file in sandbox + const result = await global.activeSandbox.runCode(` +import zipfile +import os +import json + +os.chdir('/home/user/app') + +# Create zip file +with zipfile.ZipFile('/tmp/project.zip', 'w', zipfile.ZIP_DEFLATED) as zipf: + for root, dirs, files in os.walk('.'): + # Skip node_modules and .git + dirs[:] = [d for d in dirs if d not in ['node_modules', '.git', '.next', 'dist']] + + for file in files: + file_path = os.path.join(root, file) + arcname = os.path.relpath(file_path, '.') + zipf.write(file_path, arcname) + +# Get file size +file_size = os.path.getsize('/tmp/project.zip') +print(f" Created project.zip ({file_size} bytes)") + `); // Read the zip file and convert to base64 - const readResult = await global.activeSandbox.runCommand({ - cmd: 'base64', - args: ['/tmp/project.zip'] - }); + const readResult = await global.activeSandbox.runCode(` +import base64 + +with open('/tmp/project.zip', 'rb') as f: + content = f.read() + encoded = base64.b64encode(content).decode('utf-8') + print(encoded) + `); - if (readResult.exitCode !== 0) { - const error = await readResult.stderr(); - throw new Error(`Failed to read zip file: ${error}`); - } - - const base64Content = (await readResult.stdout()).trim(); + const base64Content = readResult.logs.stdout.join('').trim(); // Create a data URL for download const dataUrl = `data:application/zip;base64,${base64Content}`; @@ -53,18 +57,15 @@ export async function POST(request: NextRequest) { return NextResponse.json({ success: true, dataUrl, - fileName: 'vercel-sandbox-project.zip', + fileName: 'e2b-project.zip', message: 'Zip file created successfully' }); } catch (error) { console.error('[create-zip] Error:', error); - return NextResponse.json( - { - success: false, - error: (error as Error).message - }, - { status: 500 } - ); + return NextResponse.json({ + success: false, + error: (error as Error).message + }, { status: 500 }); } } \ No newline at end of file diff --git a/app/api/detect-and-install-packages/route.ts b/app/api/detect-and-install-packages/route.ts index facbd51..12211b6 100644 --- a/app/api/detect-and-install-packages/route.ts +++ b/app/api/detect-and-install-packages/route.ts @@ -64,7 +64,15 @@ export async function POST(request: NextRequest) { const builtins = ['fs', 'path', 'http', 'https', 'crypto', 'stream', 'util', 'os', 'url', 'querystring', 'child_process']; if (builtins.includes(imp)) return false; - return true; + // Extract package name (handle scoped packages and subpaths) + const parts = imp.split('/'); + if (imp.startsWith('@')) { + // Scoped package like @vitejs/plugin-react + return true; + } else { + // Regular package, return just the first part + return true; + } }); // Extract just the package names (without subpaths) @@ -93,89 +101,153 @@ export async function POST(request: NextRequest) { } // Check which packages are already installed - const installed: string[] = []; - const missing: string[] = []; + const checkResult = await global.activeSandbox.runCode(` +import os +import json + +installed = [] +missing = [] + +packages = ${JSON.stringify(uniquePackages)} + +for package in packages: + # Handle scoped packages + if package.startswith('@'): + package_path = f"/home/user/app/node_modules/{package}" + else: + package_path = f"/home/user/app/node_modules/{package}" - for (const packageName of uniquePackages) { - try { - const checkResult = await global.activeSandbox.runCommand({ - cmd: 'test', - args: ['-d', `node_modules/${packageName}`] - }); - - if (checkResult.exitCode === 0) { - installed.push(packageName); - } else { - missing.push(packageName); - } - } catch (error) { - // If test command fails, assume package is missing - missing.push(packageName); - } - } + if os.path.exists(package_path): + installed.append(package) + else: + missing.append(package) - console.log('[detect-and-install-packages] Package status:', { installed, missing }); +result = { + 'installed': installed, + 'missing': missing +} - if (missing.length === 0) { +print(json.dumps(result)) + `); + + const status = JSON.parse(checkResult.logs.stdout.join('')); + console.log('[detect-and-install-packages] Package status:', status); + + if (status.missing.length === 0) { return NextResponse.json({ success: true, packagesInstalled: [], - packagesAlreadyInstalled: installed, + packagesAlreadyInstalled: status.installed, message: 'All packages already installed' }); } // Install missing packages - console.log('[detect-and-install-packages] Installing packages:', missing); + console.log('[detect-and-install-packages] Installing packages:', status.missing); - const installResult = await global.activeSandbox.runCommand({ - cmd: 'npm', - args: ['install', '--save', ...missing] - }); + const installResult = await global.activeSandbox.runCode(` +import subprocess +import os +import json - const stdout = await installResult.stdout(); - const stderr = await installResult.stderr(); +os.chdir('/home/user/app') +packages_to_install = ${JSON.stringify(status.missing)} + +# Join packages into a single install command +packages_str = ' '.join(packages_to_install) +cmd = f'npm install {packages_str} --save' + +print(f"Running: {cmd}") + +# Run npm install with explicit save flag +result = subprocess.run(['npm', 'install', '--save'] + packages_to_install, + capture_output=True, + text=True, + cwd='/home/user/app', + timeout=60) + +print("stdout:", result.stdout) +if result.stderr: + print("stderr:", result.stderr) + +# Verify installation +installed = [] +failed = [] + +for package in packages_to_install: + # Handle scoped packages correctly + if package.startswith('@'): + # For scoped packages like @heroicons/react + package_path = f"/home/user/app/node_modules/{package}" + else: + package_path = f"/home/user/app/node_modules/{package}" - console.log('[detect-and-install-packages] Install stdout:', stdout); - if (stderr) { - console.log('[detect-and-install-packages] Install stderr:', stderr); - } - - // Verify installation - const finalInstalled: string[] = []; - const failed: string[] = []; - - for (const packageName of missing) { - try { - const verifyResult = await global.activeSandbox.runCommand({ - cmd: 'test', - args: ['-d', `node_modules/${packageName}`] - }); + if os.path.exists(package_path): + installed.append(package) + print(f"✓ Verified installation of {package}") + else: + # Check if it's a submodule of an installed package + base_package = package.split('/')[0] + if package.startswith('@'): + # For @scope/package, the base is @scope/package + base_package = '/'.join(package.split('/')[:2]) - if (verifyResult.exitCode === 0) { - finalInstalled.push(packageName); - console.log(`✓ Verified installation of ${packageName}`); + base_path = f"/home/user/app/node_modules/{base_package}" + if os.path.exists(base_path): + installed.append(package) + print(f"✓ Verified installation of {package} (via {base_package})") + else: + failed.append(package) + print(f"✗ Failed to verify installation of {package}") + +result_data = { + 'installed': installed, + 'failed': failed, + 'returncode': result.returncode +} + +print("\\nResult:", json.dumps(result_data)) + `, { timeout: 60000 }); + + // Parse the result more safely + let installStatus; + try { + const stdout = installResult.logs.stdout.join(''); + const resultMatch = stdout.match(/Result:\s*({.*})/); + if (resultMatch) { + installStatus = JSON.parse(resultMatch[1]); + } else { + // Fallback parsing + const lines = stdout.split('\n'); + const resultLine = lines.find((line: string) => line.includes('Result:')); + if (resultLine) { + installStatus = JSON.parse(resultLine.split('Result:')[1].trim()); } else { - failed.push(packageName); - console.log(`✗ Failed to verify installation of ${packageName}`); + throw new Error('Could not find Result in output'); } - } catch (error) { - failed.push(packageName); - console.log(`✗ Error verifying ${packageName}:`, error); } + } catch (parseError) { + console.error('[detect-and-install-packages] Failed to parse install result:', parseError); + console.error('[detect-and-install-packages] stdout:', installResult.logs.stdout.join('')); + // Fallback to assuming all packages were installed + installStatus = { + installed: status.missing, + failed: [], + returncode: 0 + }; } - if (failed.length > 0) { - console.error('[detect-and-install-packages] Failed to install:', failed); + if (installStatus.failed.length > 0) { + console.error('[detect-and-install-packages] Failed to install:', installStatus.failed); } return NextResponse.json({ success: true, - packagesInstalled: finalInstalled, - packagesFailed: failed, - packagesAlreadyInstalled: installed, - message: `Installed ${finalInstalled.length} packages`, - logs: stdout + packagesInstalled: installStatus.installed, + packagesFailed: installStatus.failed, + packagesAlreadyInstalled: status.installed, + message: `Installed ${installStatus.installed.length} packages`, + logs: installResult.logs.stdout.join('\n') }); } catch (error) { diff --git a/app/api/generate-ai-code-stream/route.ts b/app/api/generate-ai-code-stream/route.ts index 9a7fa4a..eaae15d 100644 --- a/app/api/generate-ai-code-stream/route.ts +++ b/app/api/generate-ai-code-stream/route.ts @@ -11,9 +11,6 @@ import { FileManifest } from '@/types/file-manifest'; import type { ConversationState, ConversationMessage, ConversationEdit } from '@/types/conversation'; import { appConfig } from '@/config/app.config'; -// Force dynamic route to enable streaming -export const dynamic = 'force-dynamic'; - const groq = createGroq({ apiKey: process.env.GROQ_API_KEY, }); @@ -1159,21 +1156,9 @@ CRITICAL: When files are provided in the context: const isGoogle = model.startsWith('google/'); const isOpenAI = model.startsWith('openai/gpt-5'); const modelProvider = isAnthropic ? anthropic : (isOpenAI ? openai : (isGoogle ? googleGenerativeAI : groq)); - - // Fix model name transformation for different providers - let actualModel: string; - if (isAnthropic) { - actualModel = model.replace('anthropic/', ''); - } else if (model === 'openai/gpt-5') { - actualModel = 'gpt-5'; - } else if (isGoogle) { - // Google uses specific model names - convert our naming to theirs - actualModel = model.replace('google/', ''); - } else { - actualModel = model; - } - - console.log(`[generate-ai-code-stream] Using provider: ${isAnthropic ? 'Anthropic' : isGoogle ? 'Google' : isOpenAI ? 'OpenAI' : 'Groq'}, model: ${actualModel}`); + const actualModel = isAnthropic ? model.replace('anthropic/', '') : + (model === 'openai/gpt-5') ? 'gpt-5' : + (isGoogle ? model.replace('google/', '') : model); // Make streaming API call with appropriate provider const streamOptions: any = { @@ -1258,28 +1243,7 @@ It's better to have 3 complete files than 10 incomplete files.` }; } - let result; - try { - result = await streamText(streamOptions); - } catch (streamError) { - console.error('[generate-ai-code-stream] Error calling streamText:', streamError); - - // Send specific error for debugging - await sendProgress({ - type: 'error', - message: `Failed to initialize ${isGoogle ? 'Gemini' : isAnthropic ? 'Claude' : isOpenAI ? 'GPT-5' : 'Groq'} streaming: ${(streamError as Error).message}` - }); - - // If this is a Google model error, provide helpful info - if (isGoogle) { - await sendProgress({ - type: 'info', - message: 'Tip: Make sure your GEMINI_API_KEY is set correctly and has proper permissions.' - }); - } - - throw streamError; - } + const result = await streamText(streamOptions); // Stream the response and parse in real-time let generatedCode = ''; @@ -1751,18 +1715,12 @@ Provide the complete file content without any truncation. Include all necessary } })(); - // Return the stream with proper headers for streaming support + // Return the stream return new Response(stream.readable, { headers: { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache', 'Connection': 'keep-alive', - 'Transfer-Encoding': 'chunked', - 'Content-Encoding': 'none', // Prevent compression that can break streaming - 'X-Accel-Buffering': 'no', // Disable nginx buffering - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS', - 'Access-Control-Allow-Headers': 'Content-Type, Authorization', }, }); diff --git a/app/api/get-sandbox-files/route.ts b/app/api/get-sandbox-files/route.ts index 9dad8b1..d892046 100644 --- a/app/api/get-sandbox-files/route.ts +++ b/app/api/get-sandbox-files/route.ts @@ -18,81 +18,58 @@ export async function GET() { console.log('[get-sandbox-files] Fetching and analyzing file structure...'); - // Get list of all relevant files - const findResult = await global.activeSandbox.runCommand({ - cmd: 'find', - args: [ - '.', - '-name', 'node_modules', '-prune', '-o', - '-name', '.git', '-prune', '-o', - '-name', 'dist', '-prune', '-o', - '-name', 'build', '-prune', '-o', - '-type', 'f', - '(', - '-name', '*.jsx', - '-o', '-name', '*.js', - '-o', '-name', '*.tsx', - '-o', '-name', '*.ts', - '-o', '-name', '*.css', - '-o', '-name', '*.json', - ')', - '-print' - ] - }); + // Get all React/JS/CSS files + const result = await global.activeSandbox.runCode(` +import os +import json + +def get_files_content(directory='/home/user/app', extensions=['.jsx', '.js', '.tsx', '.ts', '.css', '.json']): + files_content = {} - if (findResult.exitCode !== 0) { - throw new Error('Failed to list files'); - } - - const fileList = (await findResult.stdout()).split('\n').filter(f => f.trim()); - console.log('[get-sandbox-files] Found', fileList.length, 'files'); - - // Read content of each file (limit to reasonable sizes) - const filesContent: Record = {}; - - for (const filePath of fileList) { - try { - // Check file size first - const statResult = await global.activeSandbox.runCommand({ - cmd: 'stat', - args: ['-f', '%z', filePath] - }); + for root, dirs, files in os.walk(directory): + # Skip node_modules and other unwanted directories + dirs[:] = [d for d in dirs if d not in ['node_modules', '.git', 'dist', 'build']] - if (statResult.exitCode === 0) { - const fileSize = parseInt(await statResult.stdout()); - - // Only read files smaller than 10KB - if (fileSize < 10000) { - const catResult = await global.activeSandbox.runCommand({ - cmd: 'cat', - args: [filePath] - }); - - if (catResult.exitCode === 0) { - const content = await catResult.stdout(); - // Remove leading './' from path - const relativePath = filePath.replace(/^\.\//, ''); - filesContent[relativePath] = content; - } - } - } - } catch (error) { - // Skip files that can't be read - continue; - } - } + for file in files: + if any(file.endswith(ext) for ext in extensions): + file_path = os.path.join(root, file) + relative_path = os.path.relpath(file_path, '/home/user/app') + + try: + with open(file_path, 'r') as f: + content = f.read() + # Only include files under 10KB to avoid huge responses + if len(content) < 10000: + files_content[relative_path] = content + except: + pass - // Get directory structure - const treeResult = await global.activeSandbox.runCommand({ - cmd: 'find', - args: ['.', '-type', 'd', '-not', '-path', '*/node_modules*', '-not', '-path', '*/.git*'] - }); - - let structure = ''; - if (treeResult.exitCode === 0) { - const dirs = (await treeResult.stdout()).split('\n').filter(d => d.trim()); - structure = dirs.slice(0, 50).join('\n'); // Limit to 50 lines - } + return files_content + +# Get the files +files = get_files_content() + +# Also get the directory structure +structure = [] +for root, dirs, files in os.walk('/home/user/app'): + level = root.replace('/home/user/app', '').count(os.sep) + indent = ' ' * 2 * level + structure.append(f"{indent}{os.path.basename(root)}/") + sub_indent = ' ' * 2 * (level + 1) + for file in files: + if not any(skip in root for skip in ['node_modules', '.git', 'dist', 'build']): + structure.append(f"{sub_indent}{file}") + +result = { + 'files': files, + 'structure': '\\n'.join(structure[:50]) # Limit structure to 50 lines +} + +print(json.dumps(result)) + `); + + const output = result.logs.stdout.join(''); + const parsedResult = JSON.parse(output); // Build enhanced file manifest const fileManifest: FileManifest = { @@ -105,12 +82,12 @@ export async function GET() { }; // Process each file - for (const [relativePath, content] of Object.entries(filesContent)) { - const fullPath = `/${relativePath}`; + for (const [relativePath, content] of Object.entries(parsedResult.files)) { + const fullPath = `/home/user/app/${relativePath}`; // Create base file info const fileInfo: FileInfo = { - content: content, + content: content as string, type: 'utility', path: fullPath, relativePath, @@ -119,7 +96,7 @@ export async function GET() { // Parse JavaScript/JSX files if (relativePath.match(/\.(jsx?|tsx?)$/)) { - const parseResult = parseJavaScriptFile(content, fullPath); + const parseResult = parseJavaScriptFile(content as string, fullPath); Object.assign(fileInfo, parseResult); // Identify entry point @@ -155,9 +132,9 @@ export async function GET() { return NextResponse.json({ success: true, - files: filesContent, - structure, - fileCount: Object.keys(filesContent).length, + files: parsedResult.files, + structure: parsedResult.structure, + fileCount: Object.keys(parsedResult.files).length, manifest: fileManifest, }); diff --git a/app/api/install-packages/route.ts b/app/api/install-packages/route.ts index dd8eb82..59d305e 100644 --- a/app/api/install-packages/route.ts +++ b/app/api/install-packages/route.ts @@ -1,4 +1,5 @@ import { NextRequest, NextResponse } from 'next/server'; +import { Sandbox } from '@e2b/code-interpreter'; declare global { var activeSandbox: any; @@ -35,8 +36,23 @@ export async function POST(request: NextRequest) { console.log(`[install-packages] Cleaned:`, validPackages); } - // Get active sandbox - const sandbox = global.activeSandbox; + // Try to get sandbox - either from global or reconnect + let sandbox = global.activeSandbox; + + if (!sandbox && sandboxId) { + console.log(`[install-packages] Reconnecting to sandbox ${sandboxId}...`); + try { + sandbox = await Sandbox.connect(sandboxId, { apiKey: process.env.E2B_API_KEY }); + global.activeSandbox = sandbox; + console.log(`[install-packages] Successfully reconnected to sandbox ${sandboxId}`); + } catch (error) { + console.error(`[install-packages] Failed to reconnect to sandbox:`, error); + return NextResponse.json({ + success: false, + error: `Failed to reconnect to sandbox: ${(error as Error).message}` + }, { status: 500 }); + } + } if (!sandbox) { return NextResponse.json({ @@ -45,7 +61,7 @@ export async function POST(request: NextRequest) { }, { status: 400 }); } - console.log('[install-packages] Installing packages:', validPackages); + console.log('[install-packages] Installing packages:', packages); // Create a response stream for real-time updates const encoder = new TextEncoder(); @@ -67,20 +83,23 @@ export async function POST(request: NextRequest) { packages: validPackages }); - // Stop any existing development server first + // Kill any existing Vite process first await sendProgress({ type: 'status', message: 'Stopping development server...' }); - try { - // Try to kill any running dev server processes - await sandboxInstance.runCommand({ - cmd: 'pkill', - args: ['-f', 'vite'] - }); - await new Promise(resolve => setTimeout(resolve, 1000)); // Wait a bit - } catch (error) { - // It's OK if no process is found - console.log('[install-packages] No existing dev server found'); - } + await sandboxInstance.runCode(` +import subprocess +import os +import signal + +# Try to kill any existing Vite process +try: + with open('/tmp/vite-process.pid', 'r') as f: + pid = int(f.read().strip()) + os.kill(pid, signal.SIGTERM) + print("Stopped existing Vite process") +except: + print("No existing Vite process found") + `); // Check which packages are already installed await sendProgress({ @@ -88,51 +107,70 @@ export async function POST(request: NextRequest) { message: 'Checking installed packages...' }); + const checkResult = await sandboxInstance.runCode(` +import os +import json + +os.chdir('/home/user/app') + +# Read package.json to check installed packages +try: + with open('package.json', 'r') as f: + package_json = json.load(f) + + dependencies = package_json.get('dependencies', {}) + dev_dependencies = package_json.get('devDependencies', {}) + all_deps = {**dependencies, **dev_dependencies} + + # Check which packages need to be installed + packages_to_check = ${JSON.stringify(validPackages)} + already_installed = [] + need_install = [] + + for pkg in packages_to_check: + # Handle scoped packages + if pkg.startswith('@'): + pkg_name = pkg + else: + # Extract package name without version + pkg_name = pkg.split('@')[0] + + if pkg_name in all_deps: + already_installed.append(pkg_name) + else: + need_install.append(pkg) + + print(f"Already installed: {already_installed}") + print(f"Need to install: {need_install}") + print(f"NEED_INSTALL:{json.dumps(need_install)}") + +except Exception as e: + print(f"Error checking packages: {e}") + print(f"NEED_INSTALL:{json.dumps(packages_to_check)}") + `); + + // Parse packages that need installation let packagesToInstall = validPackages; - try { - // Read package.json to check existing dependencies - const catResult = await sandboxInstance.runCommand({ - cmd: 'cat', - args: ['package.json'] - }); - if (catResult.exitCode === 0) { - const packageJsonContent = await catResult.stdout(); - const packageJson = JSON.parse(packageJsonContent); - - const dependencies = packageJson.dependencies || {}; - const devDependencies = packageJson.devDependencies || {}; - const allDeps = { ...dependencies, ...devDependencies }; - - const alreadyInstalled = []; - const needInstall = []; - - for (const pkg of validPackages) { - // Handle scoped packages - const pkgName = pkg.startsWith('@') ? pkg : pkg.split('@')[0]; - - if (allDeps[pkgName]) { - alreadyInstalled.push(pkgName); - } else { - needInstall.push(pkg); + // Check if checkResult has the expected structure + if (checkResult && checkResult.results && checkResult.results[0] && checkResult.results[0].text) { + const outputLines = checkResult.results[0].text.split('\n'); + for (const line of outputLines) { + if (line.startsWith('NEED_INSTALL:')) { + try { + packagesToInstall = JSON.parse(line.substring('NEED_INSTALL:'.length)); + } catch (e) { + console.error('Failed to parse packages to install:', e); } } - - packagesToInstall = needInstall; - - if (alreadyInstalled.length > 0) { - await sendProgress({ - type: 'info', - message: `Already installed: ${alreadyInstalled.join(', ')}` - }); - } } - } catch (error) { - console.error('[install-packages] Error checking existing packages:', error); + } else { + console.error('[install-packages] Invalid checkResult structure:', checkResult); // If we can't check, just try to install all packages packagesToInstall = validPackages; } + if (packagesToInstall.length === 0) { await sendProgress({ type: 'success', @@ -140,104 +178,164 @@ export async function POST(request: NextRequest) { installedPackages: [], alreadyInstalled: validPackages }); - - // Restart dev server - await sendProgress({ type: 'status', message: 'Restarting development server...' }); - - const devServerProcess = await sandboxInstance.runCommand({ - cmd: 'npm', - args: ['run', 'dev'], - detached: true - }); - - await sendProgress({ - type: 'complete', - message: 'Dev server restarted!', - installedPackages: [] - }); - return; } // Install only packages that aren't already installed + const packageList = packagesToInstall.join(' '); + // Only send the npm install command message if we're actually installing new packages await sendProgress({ type: 'info', message: `Installing ${packagesToInstall.length} new package(s): ${packagesToInstall.join(', ')}` }); - // Run npm install - const installArgs = ['install', '--legacy-peer-deps', ...packagesToInstall]; - const installResult = await sandboxInstance.runCommand({ - cmd: 'npm', - args: installArgs - }); + const installResult = await sandboxInstance.runCode(` +import subprocess +import os + +os.chdir('/home/user/app') + +# Run npm install with output capture +packages_to_install = ${JSON.stringify(packagesToInstall)} +cmd_args = ['npm', 'install', '--legacy-peer-deps'] + packages_to_install + +print(f"Running command: {' '.join(cmd_args)}") + +process = subprocess.Popen( + cmd_args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True +) + +# Stream output +while True: + output = process.stdout.readline() + if output == '' and process.poll() is not None: + break + if output: + print(output.strip()) + +# Get the return code +rc = process.poll() + +# Capture any stderr +stderr = process.stderr.read() +if stderr: + print("STDERR:", stderr) + if 'ERESOLVE' in stderr: + print("ERESOLVE_ERROR: Dependency conflict detected - using --legacy-peer-deps flag") + +print(f"\\nInstallation completed with code: {rc}") + +# Verify packages were installed +import json +with open('/home/user/app/package.json', 'r') as f: + package_json = json.load(f) + +installed = [] +for pkg in ${JSON.stringify(packagesToInstall)}: + if pkg in package_json.get('dependencies', {}): + installed.append(pkg) + print(f"✓ Verified {pkg}") + else: + print(f"✗ Package {pkg} not found in dependencies") - // Get install output - const stdout = await installResult.stdout(); - const stderr = await installResult.stderr(); +print(f"\\nVerified installed packages: {installed}") + `, { timeout: 60000 }); // 60 second timeout for npm install - if (stdout) { - const lines = stdout.split('\n').filter(line => line.trim()); - for (const line of lines) { - if (line.includes('npm WARN')) { - await sendProgress({ type: 'warning', message: line }); - } else if (line.trim()) { - await sendProgress({ type: 'output', message: line }); + // Send npm output + const output = installResult?.output || installResult?.logs?.stdout?.join('\n') || ''; + const npmOutputLines = output.split('\n').filter((line: string) => line.trim()); + for (const line of npmOutputLines) { + if (line.includes('STDERR:')) { + const errorMsg = line.replace('STDERR:', '').trim(); + if (errorMsg && errorMsg !== 'undefined') { + await sendProgress({ type: 'error', message: errorMsg }); } + } else if (line.includes('ERESOLVE_ERROR:')) { + const msg = line.replace('ERESOLVE_ERROR:', '').trim(); + await sendProgress({ + type: 'warning', + message: `Dependency conflict resolved with --legacy-peer-deps: ${msg}` + }); + } else if (line.includes('npm WARN')) { + await sendProgress({ type: 'warning', message: line }); + } else if (line.trim() && !line.includes('undefined')) { + await sendProgress({ type: 'output', message: line }); } } - if (stderr) { - const errorLines = stderr.split('\n').filter(line => line.trim()); - for (const line of errorLines) { - if (line.includes('ERESOLVE')) { - await sendProgress({ - type: 'warning', - message: `Dependency conflict resolved with --legacy-peer-deps: ${line}` - }); - } else if (line.trim()) { - await sendProgress({ type: 'error', message: line }); - } - } + // Check if installation was successful + const installedMatch = output.match(/Verified installed packages: \[(.*?)\]/); + let installedPackages: string[] = []; + + if (installedMatch && installedMatch[1]) { + installedPackages = installedMatch[1] + .split(',') + .map((p: string) => p.trim().replace(/'/g, '')) + .filter((p: string) => p.length > 0); } - if (installResult.exitCode === 0) { + if (installedPackages.length > 0) { await sendProgress({ type: 'success', - message: `Successfully installed: ${packagesToInstall.join(', ')}`, - installedPackages: packagesToInstall + message: `Successfully installed: ${installedPackages.join(', ')}`, + installedPackages }); } else { await sendProgress({ type: 'error', - message: 'Package installation failed' + message: 'Failed to verify package installation' }); } - // Restart development server + // Restart Vite dev server await sendProgress({ type: 'status', message: 'Restarting development server...' }); - try { - const devServerProcess = await sandboxInstance.runCommand({ - cmd: 'npm', - args: ['run', 'dev'], - detached: true - }); - - // Wait a bit for the server to start - await new Promise(resolve => setTimeout(resolve, 3000)); - - await sendProgress({ - type: 'complete', - message: 'Package installation complete and dev server restarted!', - installedPackages: packagesToInstall - }); - } catch (error) { - await sendProgress({ - type: 'error', - message: `Failed to restart dev server: ${(error as Error).message}` - }); - } + await sandboxInstance.runCode(` +import subprocess +import os +import time + +os.chdir('/home/user/app') + +# Kill any existing Vite processes +subprocess.run(['pkill', '-f', 'vite'], capture_output=True) +time.sleep(1) + +# Start Vite dev server +env = os.environ.copy() +env['FORCE_COLOR'] = '0' + +process = subprocess.Popen( + ['npm', 'run', 'dev'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env +) + +print(f'✓ Vite dev server restarted with PID: {process.pid}') + +# Store process info for later +with open('/tmp/vite-process.pid', 'w') as f: + f.write(str(process.pid)) + +# Wait a bit for Vite to start up +time.sleep(3) + +# Touch files to trigger Vite reload +subprocess.run(['touch', '/home/user/app/package.json']) +subprocess.run(['touch', '/home/user/app/vite.config.js']) + +print("Vite restarted and should now recognize all packages") + `); + + await sendProgress({ + type: 'complete', + message: 'Package installation complete and dev server restarted!', + installedPackages + }); } catch (error) { const errorMessage = (error as Error).message; diff --git a/app/api/kill-sandbox/route.ts b/app/api/kill-sandbox/route.ts index accaf7e..70d005a 100644 --- a/app/api/kill-sandbox/route.ts +++ b/app/api/kill-sandbox/route.ts @@ -8,18 +8,18 @@ declare global { export async function POST() { try { - console.log('[kill-sandbox] Stopping active sandbox...'); + console.log('[kill-sandbox] Killing active sandbox...'); let sandboxKilled = false; - // Stop existing sandbox if any + // Kill existing sandbox if any if (global.activeSandbox) { try { - await global.activeSandbox.stop(); + await global.activeSandbox.close(); sandboxKilled = true; - console.log('[kill-sandbox] Sandbox stopped successfully'); + console.log('[kill-sandbox] Sandbox closed successfully'); } catch (e) { - console.error('[kill-sandbox] Failed to stop sandbox:', e); + console.error('[kill-sandbox] Failed to close sandbox:', e); } global.activeSandbox = null; global.sandboxData = null; diff --git a/app/api/monitor-vite-logs/route.ts b/app/api/monitor-vite-logs/route.ts index 3cb2a9b..ef537f0 100644 --- a/app/api/monitor-vite-logs/route.ts +++ b/app/api/monitor-vite-logs/route.ts @@ -15,100 +15,97 @@ export async function GET() { console.log('[monitor-vite-logs] Checking Vite process logs...'); - const errors: any[] = []; + // Check both the error file and recent logs + const result = await global.activeSandbox.runCode(` +import json +import subprocess +import re + +errors = [] + +# First check the error file +try: + with open('/tmp/vite-errors.json', 'r') as f: + data = json.load(f) + errors.extend(data.get('errors', [])) +except: + pass + +# Also check if we can get recent Vite logs +try: + # Try to get the Vite process PID + with open('/tmp/vite-process.pid', 'r') as f: + pid = int(f.read().strip()) - // Check if there's an error file from previous runs - try { - const catResult = await global.activeSandbox.runCommand({ - cmd: 'cat', - args: ['/tmp/vite-errors.json'] - }); - - if (catResult.exitCode === 0) { - const errorFileContent = await catResult.stdout(); - const data = JSON.parse(errorFileContent); - errors.push(...(data.errors || [])); - } - } catch (error) { - // No error file exists, that's OK - } + # Check if process is still running and get its logs + # This is a bit hacky but works for our use case + result = subprocess.run(['ps', '-p', str(pid)], capture_output=True, text=True) + if result.returncode == 0: + # Process is running, try to check for errors in output + # Note: We can't easily get stdout/stderr from a running process + # but we can check if there are new errors + pass +except: + pass + +# Also scan the current console output for any HMR errors +# This won't catch everything but helps with recent errors +try: + # Check if there's a log file we can read + import os + log_files = [] + for root, dirs, files in os.walk('/tmp'): + for file in files: + if 'vite' in file.lower() and file.endswith('.log'): + log_files.append(os.path.join(root, file)) - // Look for any Vite-related log files that might contain errors - try { - const findResult = await global.activeSandbox.runCommand({ - cmd: 'find', - args: ['/tmp', '-name', '*vite*', '-type', 'f'] - }); - - if (findResult.exitCode === 0) { - const logFiles = (await findResult.stdout()).split('\n').filter(f => f.trim()); - - for (const logFile of logFiles.slice(0, 3)) { - try { - const grepResult = await global.activeSandbox.runCommand({ - cmd: 'grep', - args: ['-i', 'failed to resolve import', logFile] - }); - - if (grepResult.exitCode === 0) { - const errorLines = (await grepResult.stdout()).split('\n').filter(line => line.trim()); - - for (const line of errorLines) { - // Extract package name from error line - const importMatch = line.match(/"([^"]+)"/); - if (importMatch) { - const importPath = importMatch[1]; - - // Skip relative imports - if (!importPath.startsWith('.')) { - // Extract base package name - let packageName; - if (importPath.startsWith('@')) { - const parts = importPath.split('/'); - packageName = parts.length >= 2 ? parts.slice(0, 2).join('/') : importPath; - } else { - packageName = importPath.split('/')[0]; - } - - const errorObj = { - type: "npm-missing", - package: packageName, - message: `Failed to resolve import "${importPath}"`, - file: "Unknown" - }; - - // Avoid duplicates - if (!errors.some(e => e.package === errorObj.package)) { - errors.push(errorObj); - } - } - } - } - } - } catch (error) { - // Skip if grep fails - } - } - } - } catch (error) { - // No log files found, that's OK - } + for log_file in log_files[:5]: # Check up to 5 log files + try: + with open(log_file, 'r') as f: + content = f.read() + # Look for import errors + import_errors = re.findall(r'Failed to resolve import "([^"]+)"', content) + for pkg in import_errors: + if not pkg.startswith('.'): + # Extract base package name + if pkg.startswith('@'): + parts = pkg.split('/') + final_pkg = '/'.join(parts[:2]) if len(parts) >= 2 else pkg + else: + final_pkg = pkg.split('/')[0] + + error_obj = { + "type": "npm-missing", + "package": final_pkg, + "message": f"Failed to resolve import \\"{pkg}\\"", + "file": "Unknown" + } + + # Avoid duplicates + if not any(e['package'] == error_obj['package'] for e in errors): + errors.append(error_obj) + except: + pass +except Exception as e: + print(f"Error scanning logs: {e}") + +# Deduplicate errors +unique_errors = [] +seen_packages = set() +for error in errors: + if error.get('package') and error['package'] not in seen_packages: + seen_packages.add(error['package']) + unique_errors.append(error) + +print(json.dumps({"errors": unique_errors})) + `, { timeout: 5000 }); - // Deduplicate errors by package name - const uniqueErrors: any[] = []; - const seenPackages = new Set(); - - for (const error of errors) { - if (error.package && !seenPackages.has(error.package)) { - seenPackages.add(error.package); - uniqueErrors.push(error); - } - } + const data = JSON.parse(result.output || '{"errors": []}'); return NextResponse.json({ success: true, - hasErrors: uniqueErrors.length > 0, - errors: uniqueErrors + hasErrors: data.errors.length > 0, + errors: data.errors }); } catch (error) { diff --git a/app/api/restart-vite/route.ts b/app/api/restart-vite/route.ts index 64bf973..ca6b4ba 100644 --- a/app/api/restart-vite/route.ts +++ b/app/api/restart-vite/route.ts @@ -15,45 +15,115 @@ export async function POST() { console.log('[restart-vite] Forcing Vite restart...'); - // Kill existing Vite processes - try { - await global.activeSandbox.runCommand({ - cmd: 'pkill', - args: ['-f', 'vite'] - }); - console.log('[restart-vite] Killed existing Vite processes'); - - // Wait a moment for processes to terminate - await new Promise(resolve => setTimeout(resolve, 2000)); - } catch (error) { - console.log('[restart-vite] No existing Vite processes found'); - } - - // Clear any error tracking files - try { - await global.activeSandbox.runCommand({ - cmd: 'bash', - args: ['-c', 'echo \'{"errors": [], "lastChecked": '+ Date.now() +'}\' > /tmp/vite-errors.json'] - }); - } catch (error) { - // Ignore if this fails - } - - // Start Vite dev server in detached mode - const viteProcess = await global.activeSandbox.runCommand({ - cmd: 'npm', - args: ['run', 'dev'], - detached: true - }); - - console.log('[restart-vite] Vite dev server restarted'); - - // Wait for Vite to start up - await new Promise(resolve => setTimeout(resolve, 3000)); + // Kill existing Vite process and restart + const result = await global.activeSandbox.runCode(` +import subprocess +import os +import signal +import time +import threading +import json +import sys + +# Kill existing Vite process +try: + with open('/tmp/vite-process.pid', 'r') as f: + pid = int(f.read().strip()) + os.kill(pid, signal.SIGTERM) + print("Killed existing Vite process") + time.sleep(1) +except: + print("No existing Vite process found") + +os.chdir('/home/user/app') + +# Clear error file +error_file = '/tmp/vite-errors.json' +with open(error_file, 'w') as f: + json.dump({"errors": [], "lastChecked": time.time()}, f) + +# Function to monitor Vite output for errors +def monitor_output(proc, error_file): + while True: + line = proc.stderr.readline() + if not line: + break + + sys.stdout.write(line) # Also print to console + + # Check for import resolution errors + if "Failed to resolve import" in line: + try: + # Extract package name from error + import_match = line.find('"') + if import_match != -1: + end_match = line.find('"', import_match + 1) + if end_match != -1: + package_name = line[import_match + 1:end_match] + # Skip relative imports + if not package_name.startswith('.'): + with open(error_file, 'r') as f: + data = json.load(f) + + # Handle scoped packages correctly + if package_name.startswith('@'): + # For @scope/package, keep the scope + pkg_parts = package_name.split('/') + if len(pkg_parts) >= 2: + final_package = '/'.join(pkg_parts[:2]) + else: + final_package = package_name + else: + # For regular packages, just take the first part + final_package = package_name.split('/')[0] + + error_obj = { + "type": "npm-missing", + "package": final_package, + "message": line.strip(), + "timestamp": time.time() + } + + # Avoid duplicates + if not any(e['package'] == error_obj['package'] for e in data['errors']): + data['errors'].append(error_obj) + + with open(error_file, 'w') as f: + json.dump(data, f) + + print(f"WARNING: Detected missing package: {error_obj['package']}") + except Exception as e: + print(f"Error parsing Vite error: {e}") + +# Start Vite with error monitoring +process = subprocess.Popen( + ['npm', 'run', 'dev'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=1 +) + +# Start monitoring thread +monitor_thread = threading.Thread(target=monitor_output, args=(process, error_file)) +monitor_thread.daemon = True +monitor_thread.start() + +print("Vite restarted successfully!") + +# Store process info for later +with open('/tmp/vite-process.pid', 'w') as f: + f.write(str(process.pid)) + +# Wait for Vite to fully start +time.sleep(5) +print("Vite is ready") + `); return NextResponse.json({ success: true, - message: 'Vite restarted successfully' + message: 'Vite restarted successfully', + output: result.output }); } catch (error) { diff --git a/app/api/run-command/route.ts b/app/api/run-command/route.ts index 76ffaff..53e7e7b 100644 --- a/app/api/run-command/route.ts +++ b/app/api/run-command/route.ts @@ -1,4 +1,5 @@ import { NextRequest, NextResponse } from 'next/server'; +import { Sandbox } from '@e2b/code-interpreter'; // Get active sandbox from global state (in production, use a proper state management solution) declare global { @@ -25,32 +26,30 @@ export async function POST(request: NextRequest) { console.log(`[run-command] Executing: ${command}`); - // Parse command and arguments - const commandParts = command.trim().split(/\s+/); - const cmd = commandParts[0]; - const args = commandParts.slice(1); + const result = await global.activeSandbox.runCode(` +import subprocess +import os + +os.chdir('/home/user/app') +result = subprocess.run(${JSON.stringify(command.split(' '))}, + capture_output=True, + text=True, + shell=False) + +print("STDOUT:") +print(result.stdout) +if result.stderr: + print("\\nSTDERR:") + print(result.stderr) +print(f"\\nReturn code: {result.returncode}") + `); - // Execute command using Vercel Sandbox - const result = await global.activeSandbox.runCommand({ - cmd, - args - }); - - // Get output streams - const stdout = await result.stdout(); - const stderr = await result.stderr(); - - const output = [ - stdout ? `STDOUT:\n${stdout}` : '', - stderr ? `\nSTDERR:\n${stderr}` : '', - `\nExit code: ${result.exitCode}` - ].filter(Boolean).join(''); + const output = result.logs.stdout.join('\n'); return NextResponse.json({ success: true, output, - exitCode: result.exitCode, - message: result.exitCode === 0 ? 'Command executed successfully' : 'Command completed with non-zero exit code' + message: 'Command executed successfully' }); } catch (error) { diff --git a/app/api/sandbox-logs/route.ts b/app/api/sandbox-logs/route.ts index 177c370..84d0208 100644 --- a/app/api/sandbox-logs/route.ts +++ b/app/api/sandbox-logs/route.ts @@ -15,70 +15,55 @@ export async function GET(request: NextRequest) { console.log('[sandbox-logs] Fetching Vite dev server logs...'); - // Check if Vite processes are running - const psResult = await global.activeSandbox.runCommand({ - cmd: 'ps', - args: ['aux'] - }); + // Get the last N lines of the Vite dev server output + const result = await global.activeSandbox.runCode(` +import subprocess +import os + +# Try to get the Vite process output +try: + # Read the last 100 lines of any log files + log_content = [] - let viteRunning = false; - let logContent: string[] = []; + # Check if there are any node processes running + ps_result = subprocess.run(['ps', 'aux'], capture_output=True, text=True) + vite_processes = [line for line in ps_result.stdout.split('\\n') if 'vite' in line.lower()] - if (psResult.exitCode === 0) { - const psOutput = await psResult.stdout(); - const viteProcesses = psOutput.split('\n').filter(line => - line.toLowerCase().includes('vite') || - line.toLowerCase().includes('npm run dev') - ); - - viteRunning = viteProcesses.length > 0; - - if (viteRunning) { - logContent.push("Vite is running"); - logContent.push(...viteProcesses.slice(0, 3)); // Show first 3 processes - } else { - logContent.push("Vite process not found"); - } - } + if vite_processes: + log_content.append("Vite is running") + else: + log_content.append("Vite process not found") + + # Try to capture recent console output (this is a simplified approach) + # In a real implementation, you'd want to capture the Vite process output directly + print(json.dumps({ + "hasErrors": False, + "logs": log_content, + "status": "running" if vite_processes else "stopped" + })) +except Exception as e: + print(json.dumps({ + "hasErrors": True, + "logs": [str(e)], + "status": "error" + })) + `); - // Try to read any recent log files try { - const findResult = await global.activeSandbox.runCommand({ - cmd: 'find', - args: ['/tmp', '-name', '*vite*', '-name', '*.log', '-type', 'f'] + const logData = JSON.parse(result.output || '{}'); + return NextResponse.json({ + success: true, + ...logData + }); + } catch { + return NextResponse.json({ + success: true, + hasErrors: false, + logs: [result.output], + status: 'unknown' }); - - if (findResult.exitCode === 0) { - const logFiles = (await findResult.stdout()).split('\n').filter(f => f.trim()); - - for (const logFile of logFiles.slice(0, 2)) { - try { - const catResult = await global.activeSandbox.runCommand({ - cmd: 'tail', - args: ['-n', '10', logFile] - }); - - if (catResult.exitCode === 0) { - const logFileContent = await catResult.stdout(); - logContent.push(`--- ${logFile} ---`); - logContent.push(logFileContent); - } - } catch (error) { - // Skip if can't read log file - } - } - } - } catch (error) { - // No log files found, that's OK } - return NextResponse.json({ - success: true, - hasErrors: false, - logs: logContent, - status: viteRunning ? 'running' : 'stopped' - }); - } catch (error) { console.error('[sandbox-logs] Error:', error); return NextResponse.json({ diff --git a/components/SandboxPreview.tsx b/components/SandboxPreview.tsx index 502873e..3808c26 100644 --- a/components/SandboxPreview.tsx +++ b/components/SandboxPreview.tsx @@ -1,24 +1,32 @@ -import { useState } from 'react'; +import { useState, useEffect } from 'react'; import { Loader2, ExternalLink, RefreshCw, Terminal } from 'lucide-react'; interface SandboxPreviewProps { + sandboxId: string; + port: number; type: 'vite' | 'nextjs' | 'console'; output?: string; isLoading?: boolean; - sandboxUrl?: string; // Real URL from Vercel Sandbox API } export default function SandboxPreview({ + sandboxId, + port, type, output, - isLoading = false, - sandboxUrl + isLoading = false }: SandboxPreviewProps) { + const [previewUrl, setPreviewUrl] = useState(''); const [showConsole, setShowConsole] = useState(false); const [iframeKey, setIframeKey] = useState(0); - // Use the real sandbox URL passed from the API - const previewUrl = sandboxUrl || ''; + useEffect(() => { + if (sandboxId && type !== 'console') { + // In production, this would be the actual E2B sandbox URL + // Format: https://{sandboxId}-{port}.e2b.dev + setPreviewUrl(`https://${sandboxId}-${port}.e2b.dev`); + } + }, [sandboxId, port, type]); const handleRefresh = () => { setIframeKey(prev => prev + 1); @@ -42,13 +50,9 @@ export default function SandboxPreview({ {type === 'vite' ? '⚡ Vite' : '▲ Next.js'} Preview - {previewUrl ? ( - - {previewUrl} - - ) : ( - Waiting for sandbox URL... - )} + + {previewUrl} +
- {previewUrl && ( - - - - )} + + +
{/* Main Preview */}
- {(isLoading || !previewUrl) && ( + {isLoading && (

- {!previewUrl - ? 'Setting up sandbox environment...' - : type === 'vite' - ? 'Starting Vite dev server...' - : 'Starting Next.js dev server...' - } + {type === 'vite' ? 'Starting Vite dev server...' : 'Starting Next.js dev server...'}

)} - {previewUrl && ( -