diff --git a/.cursor/mcp.json b/.cursor/mcp.json
new file mode 100644
index 0000000..c2af9f3
--- /dev/null
+++ b/.cursor/mcp.json
@@ -0,0 +1,8 @@
+{
+ "mcpServers": {
+ "dev3000": {
+ "type": "http",
+ "url": "http://localhost:3684/mcp"
+ }
+ }
+}
diff --git a/.env.example b/.env.example
index c8b9f67..d66eef4 100644
--- a/.env.example
+++ b/.env.example
@@ -1,20 +1,46 @@
-# REQUIRED - Sandboxes for code execution
-# Get yours at https://e2b.dev
-E2B_API_KEY=your_e2b_api_key_here
+# Required
+FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping)
-# REQUIRED - Web scraping for cloning websites
-# Get yours at https://firecrawl.dev
-FIRECRAWL_API_KEY=your_firecrawl_api_key_here
+# =================================================================================
+# SANDBOX PROVIDER - Choose Option 1 OR 2
+# =================================================================================
-# OPTIONAL - AI Providers (need at least one)
-# Get yours at https://console.anthropic.com
-ANTHROPIC_API_KEY=your_anthropic_api_key_here
+# Option 1: Vercel Sandbox (recommended - default)
+# Set SANDBOX_PROVIDER=vercel and choose authentication method below
+SANDBOX_PROVIDER=vercel
-# Get yours at https://platform.openai.com
-OPENAI_API_KEY=your_openai_api_key_here
+# Vercel Authentication - Choose method a OR b
+# Method a: OIDC Token (recommended for development)
+# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically
+VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull
-# Get yours at https://aistudio.google.com/app/apikey
-GEMINI_API_KEY=your_gemini_api_key_here
+# Method b: Personal Access Token (for production or when OIDC unavailable)
+# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID
+# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID
+# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard
# Get yours at https://console.groq.com
-GROQ_API_KEY=your_groq_api_key_here
\ No newline at end of file
+GROQ_API_KEY=your_groq_api_key_here
+
+=======
+# Option 2: E2B Sandbox
+# Set SANDBOX_PROVIDER=e2b and configure E2B_API_KEY below
+# SANDBOX_PROVIDER=e2b
+# E2B_API_KEY=your_e2b_api_key # Get from https://e2b.dev
+
+# =================================================================================
+# AI PROVIDERS - Need at least one
+# =================================================================================
+
+# Vercel AI Gateway (recommended - provides access to multiple models)
+AI_GATEWAY_API_KEY=your_ai_gateway_api_key # Get from https://vercel.com/dashboard/ai-gateway/api-keys
+
+# Individual provider keys (used when AI_GATEWAY_API_KEY is not set)
+ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com
+OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5)
+GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey
+GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended)
+
+# Optional Morph Fast Apply
+# Get yours at https://morphllm.com/
+MORPH_API_KEY=your_fast_apply_key
diff --git a/.gitignore b/.gitignore
index ac59fa8..79f47d8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -56,3 +56,4 @@ e2b-template-*
*.temp
repomix-output.txt
bun.lockb
+.env*.local
diff --git a/README.md b/README.md
index 803cc92..b5bbbcd 100644
--- a/README.md
+++ b/README.md
@@ -1,40 +1,67 @@
# Open Lovable
-Chat with AI to build React apps instantly. An example app made by the [Firecrawl](https://firecrawl.dev/?ref=open-lovable-github) team. For a complete cloud solution, check out [Lovable.dev ❤️](https://lovable.dev/).
+Chat with AI to build React apps instantly. An example app made by the [Firecrawl](https://firecrawl.dev/?ref=open-lovable-github) team. For a complete cloud solution, check out [Lovable.dev](https://lovable.dev/) ❤️.
-
-
## Setup
1. **Clone & Install**
```bash
-git clone https://github.com/mendableai/open-lovable.git
+git clone https://github.com/firecrawl/open-lovable.git
cd open-lovable
-npm install
+pnpm install # or npm install / yarn install
```
2. **Add `.env.local`**
-```env
-# Required
-E2B_API_KEY=your_e2b_api_key # Get from https://e2b.dev (Sandboxes)
-FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping)
-# Optional (need at least one AI provider)
-ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com
-OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5)
-GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey
-GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended)
+```env
+# =================================================================
+# REQUIRED
+# =================================================================
+FIRECRAWL_API_KEY=your_firecrawl_api_key # https://firecrawl.dev
+
+# =================================================================
+# AI PROVIDER - Choose your LLM
+# =================================================================
+GEMINI_API_KEY=your_gemini_api_key # https://aistudio.google.com/app/apikey
+ANTHROPIC_API_KEY=your_anthropic_api_key # https://console.anthropic.com
+OPENAI_API_KEY=your_openai_api_key # https://platform.openai.com
+GROQ_API_KEY=your_groq_api_key # https://console.groq.com
+
+# =================================================================
+# FAST APPLY (Optional - for faster edits)
+# =================================================================
+MORPH_API_KEY=your_morphllm_api_key # https://morphllm.com/dashboard
+
+# =================================================================
+# SANDBOX PROVIDER - Choose ONE: Vercel (default) or E2B
+# =================================================================
+SANDBOX_PROVIDER=vercel # or 'e2b'
+
+# Option 1: Vercel Sandbox (default)
+# Choose one authentication method:
+
+# Method A: OIDC Token (recommended for development)
+# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically
+VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull
+
+# Method B: Personal Access Token (for production or when OIDC unavailable)
+# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID
+# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID
+# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard
+
+# Option 2: E2B Sandbox
+# E2B_API_KEY=your_e2b_api_key # https://e2b.dev
```
3. **Run**
```bash
-npm run dev
+pnpm dev # or npm run dev / yarn dev
```
-Open [http://localhost:3000](http://localhost:3000)
+Open [http://localhost:3000](http://localhost:3000)
## License
-MIT
+MIT
\ No newline at end of file
diff --git a/app/api/analyze-edit-intent/route.ts b/app/api/analyze-edit-intent/route.ts
index 7cf35bc..07798a0 100644
--- a/app/api/analyze-edit-intent/route.ts
+++ b/app/api/analyze-edit-intent/route.ts
@@ -5,20 +5,30 @@ import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { generateObject } from 'ai';
import { z } from 'zod';
-import type { FileManifest } from '@/types/file-manifest';
+// import type { FileManifest } from '@/types/file-manifest'; // Type is used implicitly through manifest parameter
+
+// Check if we're using Vercel AI Gateway
+const isUsingAIGateway = !!process.env.AI_GATEWAY_API_KEY;
+const aiGatewayBaseURL = 'https://ai-gateway.vercel.sh/v1';
const groq = createGroq({
- apiKey: process.env.GROQ_API_KEY,
+ apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GROQ_API_KEY,
+ baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined,
});
const anthropic = createAnthropic({
- apiKey: process.env.ANTHROPIC_API_KEY,
- baseURL: process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1',
+ apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.ANTHROPIC_API_KEY,
+ baseURL: isUsingAIGateway ? aiGatewayBaseURL : (process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1'),
});
const openai = createOpenAI({
- apiKey: process.env.OPENAI_API_KEY,
- baseURL: process.env.OPENAI_BASE_URL,
+ apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.OPENAI_API_KEY,
+ baseURL: isUsingAIGateway ? aiGatewayBaseURL : process.env.OPENAI_BASE_URL,
+});
+
+const googleGenerativeAI = createGoogleGenerativeAI({
+ apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GEMINI_API_KEY,
+ baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined,
});
// Schema for the AI's search plan - not file selection!
@@ -66,7 +76,7 @@ export async function POST(request: NextRequest) {
// Create a summary of available files for the AI
const validFiles = Object.entries(manifest.files as Record)
- .filter(([path, info]) => {
+ .filter(([path]) => {
// Filter out invalid paths
return path.includes('.') && !path.match(/\/\d+$/);
});
@@ -74,7 +84,7 @@ export async function POST(request: NextRequest) {
const fileSummary = validFiles
.map(([path, info]: [string, any]) => {
const componentName = info.componentInfo?.name || path.split('/').pop();
- const hasImports = info.imports?.length > 0;
+ // const hasImports = info.imports?.length > 0; // Kept for future use
const childComponents = info.componentInfo?.childComponents?.join(', ') || 'none';
return `- ${path} (${componentName}, renders: ${childComponents})`;
})
@@ -104,7 +114,7 @@ export async function POST(request: NextRequest) {
aiModel = openai(model.replace('openai/', ''));
}
} else if (model.startsWith('google/')) {
- aiModel = createGoogleGenerativeAI(model.replace('google/', ''));
+ aiModel = googleGenerativeAI(model.replace('google/', ''));
} else {
// Default to groq if model format is unclear
aiModel = groq(model);
diff --git a/app/api/apply-ai-code-stream/route.ts b/app/api/apply-ai-code-stream/route.ts
index c91bf11..1e0d2c4 100644
--- a/app/api/apply-ai-code-stream/route.ts
+++ b/app/api/apply-ai-code-stream/route.ts
@@ -1,11 +1,13 @@
import { NextRequest, NextResponse } from 'next/server';
-import { Sandbox } from '@e2b/code-interpreter';
+import { parseMorphEdits, applyMorphEditToFile } from '@/lib/morph-fast-apply';
+// Sandbox import not needed - using global sandbox from sandbox-manager
import type { SandboxState } from '@/types/sandbox';
import type { ConversationState } from '@/types/conversation';
+import { sandboxManager } from '@/lib/sandbox/sandbox-manager';
declare global {
var conversationState: ConversationState | null;
- var activeSandbox: any;
+ var activeSandboxProvider: any;
var existingFiles: Set;
var sandboxState: SandboxState;
}
@@ -28,28 +30,28 @@ function parseAIResponse(response: string): ParsedResponse {
explanation: '',
template: ''
};
-
+
// Function to extract packages from import statements
function extractPackagesFromCode(content: string): string[] {
const packages: string[] = [];
// Match ES6 imports
const importRegex = /import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)(?:\s*,\s*(?:\{[^}]*\}|\*\s+as\s+\w+|\w+))*\s+from\s+)?['"]([^'"]+)['"]/g;
let importMatch;
-
+
while ((importMatch = importRegex.exec(content)) !== null) {
const importPath = importMatch[1];
// Skip relative imports and built-in React
- if (!importPath.startsWith('.') && !importPath.startsWith('/') &&
- importPath !== 'react' && importPath !== 'react-dom' &&
- !importPath.startsWith('@/')) {
+ if (!importPath.startsWith('.') && !importPath.startsWith('/') &&
+ importPath !== 'react' && importPath !== 'react-dom' &&
+ !importPath.startsWith('@/')) {
// Extract package name (handle scoped packages like @heroicons/react)
- const packageName = importPath.startsWith('@')
+ const packageName = importPath.startsWith('@')
? importPath.split('/').slice(0, 2).join('/')
: importPath.split('/')[0];
-
+
if (!packages.includes(packageName)) {
packages.push(packageName);
-
+
// Log important packages for debugging
if (packageName === 'react-router-dom' || packageName.includes('router') || packageName.includes('icon')) {
console.log(`[apply-ai-code-stream] Detected package from imports: ${packageName}`);
@@ -57,13 +59,13 @@ function parseAIResponse(response: string): ParsedResponse {
}
}
}
-
+
return packages;
}
// Parse file sections - handle duplicates and prefer complete versions
const fileMap = new Map();
-
+
// First pass: Find all file declarations
const fileRegex = /([\s\S]*?)(?:<\/file>|$)/g;
let match;
@@ -71,10 +73,10 @@ function parseAIResponse(response: string): ParsedResponse {
const filePath = match[1];
const content = match[2].trim();
const hasClosingTag = response.substring(match.index, match.index + match[0].length).includes('');
-
+
// Check if this file already exists in our map
const existing = fileMap.get(filePath);
-
+
// Decide whether to keep this version
let shouldReplace = false;
if (!existing) {
@@ -88,7 +90,7 @@ function parseAIResponse(response: string): ParsedResponse {
} else if (!existing.isComplete && !hasClosingTag && content.length > existing.content.length) {
shouldReplace = true; // Both incomplete, keep longer one
}
-
+
if (shouldReplace) {
// Additional validation: reject obviously broken content
if (content.includes('...') && !content.includes('...props') && !content.includes('...rest')) {
@@ -102,18 +104,18 @@ function parseAIResponse(response: string): ParsedResponse {
}
}
}
-
+
// Convert map to array for sections.files
for (const [path, { content, isComplete }] of fileMap.entries()) {
if (!isComplete) {
console.log(`[apply-ai-code-stream] Warning: File ${path} appears to be truncated (no closing tag)`);
}
-
+
sections.files.push({
path,
content
});
-
+
// Extract packages from file content
const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) {
@@ -123,7 +125,7 @@ function parseAIResponse(response: string): ParsedResponse {
}
}
}
-
+
// Also parse markdown code blocks with file paths
const markdownFileRegex = /```(?:file )?path="([^"]+)"\n([\s\S]*?)```/g;
while ((match = markdownFileRegex.exec(response)) !== null) {
@@ -133,7 +135,7 @@ function parseAIResponse(response: string): ParsedResponse {
path: filePath,
content: content
});
-
+
// Extract packages from file content
const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) {
@@ -143,7 +145,7 @@ function parseAIResponse(response: string): ParsedResponse {
}
}
}
-
+
// Parse plain text format like "Generated Files: Header.jsx, index.css"
const generatedFilesMatch = response.match(/Generated Files?:\s*([^\n]+)/i);
if (generatedFilesMatch) {
@@ -153,7 +155,7 @@ function parseAIResponse(response: string): ParsedResponse {
.map(f => f.trim())
.filter(f => f.endsWith('.jsx') || f.endsWith('.js') || f.endsWith('.tsx') || f.endsWith('.ts') || f.endsWith('.css') || f.endsWith('.json') || f.endsWith('.html'));
console.log(`[apply-ai-code-stream] Detected generated files from plain text: ${filesList.join(', ')}`);
-
+
// Try to extract the actual file content if it follows
for (const fileName of filesList) {
// Look for the file content after the file name
@@ -169,7 +171,7 @@ function parseAIResponse(response: string): ParsedResponse {
content: codeMatch[1].trim()
});
console.log(`[apply-ai-code-stream] Extracted content for ${filePath}`);
-
+
// Extract packages from this file
const filePackages = extractPackagesFromCode(codeMatch[1]);
for (const pkg of filePackages) {
@@ -182,7 +184,7 @@ function parseAIResponse(response: string): ParsedResponse {
}
}
}
-
+
// Also try to parse if the response contains raw JSX/JS code blocks
const codeBlockRegex = /```(?:jsx?|tsx?|javascript|typescript)?\n([\s\S]*?)```/g;
while ((match = codeBlockRegex.exec(response)) !== null) {
@@ -192,14 +194,14 @@ function parseAIResponse(response: string): ParsedResponse {
if (fileNameMatch) {
const fileName = fileNameMatch[1].trim();
const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`;
-
+
// Don't add duplicate files
if (!sections.files.some(f => f.path === filePath)) {
sections.files.push({
path: filePath,
content: content
});
-
+
// Extract packages
const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) {
@@ -222,7 +224,7 @@ function parseAIResponse(response: string): ParsedResponse {
while ((match = pkgRegex.exec(response)) !== null) {
sections.packages.push(match[1].trim());
}
-
+
// Also parse tag with multiple packages
const packagesRegex = /([\s\S]*?)<\/packages>/;
const packagesMatch = response.match(packagesRegex);
@@ -262,22 +264,28 @@ function parseAIResponse(response: string): ParsedResponse {
export async function POST(request: NextRequest) {
try {
const { response, isEdit = false, packages = [], sandboxId } = await request.json();
-
+
if (!response) {
return NextResponse.json({
error: 'response is required'
}, { status: 400 });
}
-
+
// Debug log the response
console.log('[apply-ai-code-stream] Received response to parse:');
console.log('[apply-ai-code-stream] Response length:', response.length);
console.log('[apply-ai-code-stream] Response preview:', response.substring(0, 500));
console.log('[apply-ai-code-stream] isEdit:', isEdit);
console.log('[apply-ai-code-stream] packages:', packages);
-
+
// Parse the AI response
const parsed = parseAIResponse(response);
+ const morphEnabled = Boolean(isEdit && process.env.MORPH_API_KEY);
+ const morphEdits = morphEnabled ? parseMorphEdits(response) : [];
+ console.log('[apply-ai-code-stream] Morph Fast Apply mode:', morphEnabled);
+ if (morphEnabled) {
+ console.log('[apply-ai-code-stream] Morph edits found:', morphEdits.length);
+ }
// Log what was parsed
console.log('[apply-ai-code-stream] Parsed result:');
@@ -288,94 +296,109 @@ export async function POST(request: NextRequest) {
});
}
console.log('[apply-ai-code-stream] Packages found:', parsed.packages);
-
+
// Initialize existingFiles if not already
if (!global.existingFiles) {
global.existingFiles = new Set();
}
-
- // First, always check the global state for active sandbox
- let sandbox = global.activeSandbox;
-
- // If we don't have a sandbox in this instance but we have a sandboxId,
- // reconnect to the existing sandbox
- if (!sandbox && sandboxId) {
- console.log(`[apply-ai-code-stream] Sandbox ${sandboxId} not in this instance, attempting reconnect...`);
-
+
+ // Try to get provider from sandbox manager first
+ let provider = sandboxId ? sandboxManager.getProvider(sandboxId) : sandboxManager.getActiveProvider();
+
+ // Fall back to global state if not found in manager
+ if (!provider) {
+ provider = global.activeSandboxProvider;
+ }
+
+ // If we have a sandboxId but no provider, try to get or create one
+ if (!provider && sandboxId) {
+ console.log(`[apply-ai-code-stream] No provider found for sandbox ${sandboxId}, attempting to get or create...`);
+
try {
- // Reconnect to the existing sandbox using E2B's connect method
- sandbox = await Sandbox.connect(sandboxId, { apiKey: process.env.E2B_API_KEY });
- console.log(`[apply-ai-code-stream] Successfully reconnected to sandbox ${sandboxId}`);
-
- // Store the reconnected sandbox globally for this instance
- global.activeSandbox = sandbox;
-
- // Update sandbox data if needed
- if (!global.sandboxData) {
- const host = (sandbox as any).getHost(5173);
- global.sandboxData = {
- sandboxId,
- url: `https://${host}`
- };
+ provider = await sandboxManager.getOrCreateProvider(sandboxId);
+
+ // If we got a new provider (not reconnected), we need to create a new sandbox
+ if (!provider.getSandboxInfo()) {
+ console.log(`[apply-ai-code-stream] Creating new sandbox since reconnection failed for ${sandboxId}`);
+ await provider.createSandbox();
+ await provider.setupViteApp();
+ sandboxManager.registerSandbox(sandboxId, provider);
}
-
- // Initialize existingFiles if not already
- if (!global.existingFiles) {
- global.existingFiles = new Set();
- }
- } catch (reconnectError) {
- console.error(`[apply-ai-code-stream] Failed to reconnect to sandbox ${sandboxId}:`, reconnectError);
-
- // If reconnection fails, we'll still try to return a meaningful response
+
+ // Update legacy global state
+ global.activeSandboxProvider = provider;
+ console.log(`[apply-ai-code-stream] Successfully got provider for sandbox ${sandboxId}`);
+ } catch (providerError) {
+ console.error(`[apply-ai-code-stream] Failed to get or create provider for sandbox ${sandboxId}:`, providerError);
return NextResponse.json({
success: false,
- error: `Failed to reconnect to sandbox ${sandboxId}. The sandbox may have expired or been terminated.`,
+ error: `Failed to create sandbox provider for ${sandboxId}. The sandbox may have expired.`,
results: {
filesCreated: [],
packagesInstalled: [],
commandsExecuted: [],
- errors: [`Sandbox reconnection failed: ${(reconnectError as Error).message}`]
+ errors: [`Sandbox provider creation failed: ${(providerError as Error).message}`]
},
explanation: parsed.explanation,
structure: parsed.structure,
parsedFiles: parsed.files,
message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox reconnection failed.`
- });
+ }, { status: 500 });
}
}
-
- // If no sandbox at all and no sandboxId provided, return an error
- if (!sandbox && !sandboxId) {
- console.log('[apply-ai-code-stream] No sandbox available and no sandboxId provided');
- return NextResponse.json({
- success: false,
- error: 'No active sandbox found. Please create a sandbox first.',
- results: {
- filesCreated: [],
- packagesInstalled: [],
- commandsExecuted: [],
- errors: ['No sandbox available']
- },
- explanation: parsed.explanation,
- structure: parsed.structure,
- parsedFiles: parsed.files,
- message: `Parsed ${parsed.files.length} files but no sandbox available to apply them.`
- });
+
+ // If we still don't have a provider, create a new one
+ if (!provider) {
+ console.log(`[apply-ai-code-stream] No active provider found, creating new sandbox...`);
+ try {
+ const { SandboxFactory } = await import('@/lib/sandbox/factory');
+ provider = SandboxFactory.create();
+ const sandboxInfo = await provider.createSandbox();
+ await provider.setupViteApp();
+
+ // Register with sandbox manager
+ sandboxManager.registerSandbox(sandboxInfo.sandboxId, provider);
+
+ // Store in legacy global state
+ global.activeSandboxProvider = provider;
+ global.sandboxData = {
+ sandboxId: sandboxInfo.sandboxId,
+ url: sandboxInfo.url
+ };
+
+ console.log(`[apply-ai-code-stream] Created new sandbox successfully`);
+ } catch (createError) {
+ console.error(`[apply-ai-code-stream] Failed to create new sandbox:`, createError);
+ return NextResponse.json({
+ success: false,
+ error: `Failed to create new sandbox: ${createError instanceof Error ? createError.message : 'Unknown error'}`,
+ results: {
+ filesCreated: [],
+ packagesInstalled: [],
+ commandsExecuted: [],
+ errors: [`Sandbox creation failed: ${createError instanceof Error ? createError.message : 'Unknown error'}`]
+ },
+ explanation: parsed.explanation,
+ structure: parsed.structure,
+ parsedFiles: parsed.files,
+ message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox creation failed.`
+ }, { status: 500 });
+ }
}
-
+
// Create a response stream for real-time updates
const encoder = new TextEncoder();
const stream = new TransformStream();
const writer = stream.writable.getWriter();
-
+
// Function to send progress updates
const sendProgress = async (data: any) => {
const message = `data: ${JSON.stringify(data)}\n\n`;
await writer.write(encoder.encode(message));
};
-
- // Start processing in background (pass sandbox and request to the async function)
- (async (sandboxInstance, req) => {
+
+ // Start processing in background (pass provider and request to the async function)
+ (async (providerInstance, req) => {
const results = {
filesCreated: [] as string[],
filesUpdated: [] as string[],
@@ -385,86 +408,94 @@ export async function POST(request: NextRequest) {
commandsExecuted: [] as string[],
errors: [] as string[]
};
-
+
try {
- await sendProgress({
- type: 'start',
+ await sendProgress({
+ type: 'start',
message: 'Starting code application...',
totalSteps: 3
});
+ if (morphEnabled) {
+ await sendProgress({ type: 'info', message: 'Morph Fast Apply enabled' });
+ await sendProgress({ type: 'info', message: `Parsed ${morphEdits.length} Morph edits` });
+ if (morphEdits.length === 0) {
+ console.warn('[apply-ai-code-stream] Morph enabled but no blocks found; falling back to full-file flow');
+ await sendProgress({ type: 'warning', message: 'Morph enabled but no blocks found; falling back to full-file flow' });
+ }
+ }
// Step 1: Install packages
const packagesArray = Array.isArray(packages) ? packages : [];
const parsedPackages = Array.isArray(parsed.packages) ? parsed.packages : [];
-
+
// Combine and deduplicate packages
const allPackages = [...packagesArray.filter(pkg => pkg && typeof pkg === 'string'), ...parsedPackages];
-
+
// Use Set to remove duplicates, then filter out pre-installed packages
const uniquePackages = [...new Set(allPackages)]
.filter(pkg => pkg && typeof pkg === 'string' && pkg.trim() !== '') // Remove empty strings
.filter(pkg => pkg !== 'react' && pkg !== 'react-dom'); // Filter pre-installed
-
+
// Log if we found duplicates
if (allPackages.length !== uniquePackages.length) {
console.log(`[apply-ai-code-stream] Removed ${allPackages.length - uniquePackages.length} duplicate packages`);
console.log(`[apply-ai-code-stream] Original packages:`, allPackages);
console.log(`[apply-ai-code-stream] Deduplicated packages:`, uniquePackages);
}
-
+
if (uniquePackages.length > 0) {
- await sendProgress({
- type: 'step',
+ await sendProgress({
+ type: 'step',
step: 1,
message: `Installing ${uniquePackages.length} packages...`,
packages: uniquePackages
});
-
+
// Use streaming package installation
try {
// Construct the API URL properly for both dev and production
const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http';
const host = req.headers.get('host') || 'localhost:3000';
const apiUrl = `${protocol}://${host}/api/install-packages`;
-
+
const installResponse = await fetch(apiUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({
+ body: JSON.stringify({
packages: uniquePackages,
- sandboxId: sandboxId || (sandboxInstance as any).sandboxId
+ sandboxId: sandboxId || providerInstance.getSandboxInfo()?.sandboxId
})
});
-
+
if (installResponse.ok && installResponse.body) {
const reader = installResponse.body.getReader();
const decoder = new TextDecoder();
-
+
while (true) {
const { done, value } = await reader.read();
if (done) break;
-
+
const chunk = decoder.decode(value);
if (!chunk) continue;
const lines = chunk.split('\n');
-
+
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const data = JSON.parse(line.slice(6));
-
+
// Forward package installation progress
await sendProgress({
type: 'package-progress',
...data
});
-
+
// Track results
if (data.type === 'success' && data.installedPackages) {
results.packagesInstalled = data.installedPackages;
}
- } catch (e) {
- // Ignore parse errors
+ } catch (parseError) {
+ console.debug('Error parsing terminal output:', parseError);
}
}
}
@@ -479,28 +510,83 @@ export async function POST(request: NextRequest) {
results.errors.push(`Package installation failed: ${(error as Error).message}`);
}
} else {
- await sendProgress({
- type: 'step',
+ await sendProgress({
+ type: 'step',
step: 1,
message: 'No additional packages to install, skipping...'
});
}
-
+
// Step 2: Create/update files
const filesArray = Array.isArray(parsed.files) ? parsed.files : [];
- await sendProgress({
- type: 'step',
+ await sendProgress({
+ type: 'step',
step: 2,
message: `Creating ${filesArray.length} files...`
});
-
+
// Filter out config files that shouldn't be created
const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js'];
- const filteredFiles = filesArray.filter(file => {
+ let filteredFiles = filesArray.filter(file => {
if (!file || typeof file !== 'object') return false;
const fileName = (file.path || '').split('/').pop() || '';
return !configFiles.includes(fileName);
});
+
+ // If Morph is enabled and we have edits, apply them before file writes
+ const morphUpdatedPaths = new Set();
+ if (morphEnabled && morphEdits.length > 0) {
+ const morphSandbox = (global as any).activeSandbox || providerInstance;
+ if (!morphSandbox) {
+ console.warn('[apply-ai-code-stream] No sandbox available to apply Morph edits');
+ await sendProgress({ type: 'warning', message: 'No sandbox available to apply Morph edits' });
+ } else {
+ await sendProgress({ type: 'info', message: `Applying ${morphEdits.length} fast edits via Morph...` });
+ for (const [idx, edit] of morphEdits.entries()) {
+ try {
+ await sendProgress({ type: 'file-progress', current: idx + 1, total: morphEdits.length, fileName: edit.targetFile, action: 'morph-applying' });
+ const result = await applyMorphEditToFile({
+ sandbox: morphSandbox,
+ targetPath: edit.targetFile,
+ instructions: edit.instructions,
+ updateSnippet: edit.update
+ });
+ if (result.success && result.normalizedPath) {
+ console.log('[apply-ai-code-stream] Morph updated', result.normalizedPath);
+ morphUpdatedPaths.add(result.normalizedPath);
+ if (results.filesUpdated) results.filesUpdated.push(result.normalizedPath);
+ await sendProgress({ type: 'file-complete', fileName: result.normalizedPath, action: 'morph-updated' });
+ } else {
+ const msg = result.error || 'Unknown Morph error';
+ console.error('[apply-ai-code-stream] Morph apply failed for', edit.targetFile, msg);
+ if (results.errors) results.errors.push(`Morph apply failed for ${edit.targetFile}: ${msg}`);
+ await sendProgress({ type: 'file-error', fileName: edit.targetFile, error: msg });
+ }
+ } catch (err) {
+ const msg = (err as Error).message;
+ console.error('[apply-ai-code-stream] Morph apply exception for', edit.targetFile, msg);
+ if (results.errors) results.errors.push(`Morph apply exception for ${edit.targetFile}: ${msg}`);
+ await sendProgress({ type: 'file-error', fileName: edit.targetFile, error: msg });
+ }
+ }
+ }
+ }
+
+ // Avoid overwriting Morph-updated files in the file write loop
+ if (morphUpdatedPaths.size > 0) {
+ filteredFiles = filteredFiles.filter(file => {
+ if (!file?.path) return true;
+ let normalizedPath = file.path.startsWith('/') ? file.path.slice(1) : file.path;
+ const fileName = normalizedPath.split('/').pop() || '';
+ if (!normalizedPath.startsWith('src/') &&
+ !normalizedPath.startsWith('public/') &&
+ normalizedPath !== 'index.html' &&
+ !configFiles.includes(fileName)) {
+ normalizedPath = 'src/' + normalizedPath;
+ }
+ return !morphUpdatedPaths.has(normalizedPath);
+ });
+ }
for (const [index, file] of filteredFiles.entries()) {
try {
@@ -512,42 +598,45 @@ export async function POST(request: NextRequest) {
fileName: file.path,
action: 'creating'
});
-
+
// Normalize the file path
let normalizedPath = file.path;
if (normalizedPath.startsWith('/')) {
normalizedPath = normalizedPath.substring(1);
}
- if (!normalizedPath.startsWith('src/') &&
- !normalizedPath.startsWith('public/') &&
- normalizedPath !== 'index.html' &&
- !configFiles.includes(normalizedPath.split('/').pop() || '')) {
+ if (!normalizedPath.startsWith('src/') &&
+ !normalizedPath.startsWith('public/') &&
+ normalizedPath !== 'index.html' &&
+ !configFiles.includes(normalizedPath.split('/').pop() || '')) {
normalizedPath = 'src/' + normalizedPath;
}
-
- const fullPath = `/home/user/app/${normalizedPath}`;
+
const isUpdate = global.existingFiles.has(normalizedPath);
-
+
// Remove any CSS imports from JSX/JS files (we're using Tailwind)
let fileContent = file.content;
if (file.path.endsWith('.jsx') || file.path.endsWith('.js') || file.path.endsWith('.tsx') || file.path.endsWith('.ts')) {
fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, '');
}
-
- // Write the file using Python (code-interpreter SDK)
- const escapedContent = fileContent
- .replace(/\\/g, '\\\\')
- .replace(/"""/g, '\\"\\"\\"')
- .replace(/\$/g, '\\$');
-
- await sandboxInstance.runCode(`
-import os
-os.makedirs(os.path.dirname("${fullPath}"), exist_ok=True)
-with open("${fullPath}", 'w') as f:
- f.write("""${escapedContent}""")
-print(f"File written: ${fullPath}")
- `);
-
+
+ // Fix common Tailwind CSS errors in CSS files
+ if (file.path.endsWith('.css')) {
+ // Replace shadow-3xl with shadow-2xl (shadow-3xl doesn't exist)
+ fileContent = fileContent.replace(/shadow-3xl/g, 'shadow-2xl');
+ // Replace any other non-existent shadow utilities
+ fileContent = fileContent.replace(/shadow-4xl/g, 'shadow-2xl');
+ fileContent = fileContent.replace(/shadow-5xl/g, 'shadow-2xl');
+ }
+
+ // Create directory if needed
+ const dirPath = normalizedPath.includes('/') ? normalizedPath.substring(0, normalizedPath.lastIndexOf('/')) : '';
+ if (dirPath) {
+ await providerInstance.runCommand(`mkdir -p ${dirPath}`);
+ }
+
+ // Write the file using provider
+ await providerInstance.writeFile(normalizedPath, fileContent);
+
// Update file cache
if (global.sandboxState?.fileCache) {
global.sandboxState.fileCache.files[normalizedPath] = {
@@ -555,14 +644,14 @@ print(f"File written: ${fullPath}")
lastModified: Date.now()
};
}
-
+
if (isUpdate) {
if (results.filesUpdated) results.filesUpdated.push(normalizedPath);
} else {
if (results.filesCreated) results.filesCreated.push(normalizedPath);
if (global.existingFiles) global.existingFiles.add(normalizedPath);
}
-
+
await sendProgress({
type: 'file-complete',
fileName: normalizedPath,
@@ -579,16 +668,16 @@ print(f"File written: ${fullPath}")
});
}
}
-
+
// Step 3: Execute commands
const commandsArray = Array.isArray(parsed.commands) ? parsed.commands : [];
if (commandsArray.length > 0) {
- await sendProgress({
- type: 'step',
+ await sendProgress({
+ type: 'step',
step: 3,
message: `Executing ${commandsArray.length} commands...`
});
-
+
for (const [index, cmd] of commandsArray.entries()) {
try {
await sendProgress({
@@ -598,33 +687,36 @@ print(f"File written: ${fullPath}")
command: cmd,
action: 'executing'
});
-
- // Use E2B commands.run() for cleaner execution
- const result = await sandboxInstance.commands.run(cmd, {
- cwd: '/home/user/app',
- timeout: 60,
- on_stdout: async (data: string) => {
- await sendProgress({
- type: 'command-output',
- command: cmd,
- output: data,
- stream: 'stdout'
- });
- },
- on_stderr: async (data: string) => {
- await sendProgress({
- type: 'command-output',
- command: cmd,
- output: data,
- stream: 'stderr'
- });
- }
- });
-
+
+ // Use provider runCommand
+ const result = await providerInstance.runCommand(cmd);
+
+ // Get command output from provider result
+ const stdout = result.stdout;
+ const stderr = result.stderr;
+
+ if (stdout) {
+ await sendProgress({
+ type: 'command-output',
+ command: cmd,
+ output: stdout,
+ stream: 'stdout'
+ });
+ }
+
+ if (stderr) {
+ await sendProgress({
+ type: 'command-output',
+ command: cmd,
+ output: stderr,
+ stream: 'stderr'
+ });
+ }
+
if (results.commandsExecuted) {
results.commandsExecuted.push(cmd);
}
-
+
await sendProgress({
type: 'command-complete',
command: cmd,
@@ -643,7 +735,7 @@ print(f"File written: ${fullPath}")
}
}
}
-
+
// Send final results
await sendProgress({
type: 'complete',
@@ -652,7 +744,7 @@ print(f"File written: ${fullPath}")
structure: parsed.structure,
message: `Successfully applied ${results.filesCreated.length} files`
});
-
+
// Track applied files in conversation state
if (global.conversationState && results.filesCreated.length > 0) {
const messages = global.conversationState.context.messages;
@@ -665,7 +757,7 @@ print(f"File written: ${fullPath}")
};
}
}
-
+
// Track applied code in project evolution
if (global.conversationState.context.projectEvolution) {
global.conversationState.context.projectEvolution.majorChanges.push({
@@ -674,10 +766,10 @@ print(f"File written: ${fullPath}")
filesAffected: results.filesCreated || []
});
}
-
+
global.conversationState.lastUpdated = Date.now();
}
-
+
} catch (error) {
await sendProgress({
type: 'error',
@@ -686,8 +778,8 @@ print(f"File written: ${fullPath}")
} finally {
await writer.close();
}
- })(sandbox, request);
-
+ })(provider, request);
+
// Return the stream
return new Response(stream.readable, {
headers: {
@@ -696,7 +788,7 @@ print(f"File written: ${fullPath}")
'Connection': 'keep-alive',
},
});
-
+
} catch (error) {
console.error('Apply AI code stream error:', error);
return NextResponse.json(
diff --git a/app/api/apply-ai-code/route.ts b/app/api/apply-ai-code/route.ts
index f00f08a..aa849b4 100644
--- a/app/api/apply-ai-code/route.ts
+++ b/app/api/apply-ai-code/route.ts
@@ -1,4 +1,5 @@
import { NextRequest, NextResponse } from 'next/server';
+import { parseMorphEdits, applyMorphEditToFile } from '@/lib/morph-fast-apply';
import type { SandboxState } from '@/types/sandbox';
import type { ConversationState } from '@/types/conversation';
@@ -128,6 +129,7 @@ function parseAIResponse(response: string): ParsedResponse {
declare global {
var activeSandbox: any;
+ var activeSandboxProvider: any;
var existingFiles: Set;
var sandboxState: SandboxState;
}
@@ -144,14 +146,23 @@ export async function POST(request: NextRequest) {
// Parse the AI response
const parsed = parseAIResponse(response);
+ const morphEnabled = Boolean(isEdit && process.env.MORPH_API_KEY);
+ const morphEdits = morphEnabled ? parseMorphEdits(response) : [];
+ console.log('[apply-ai-code] Morph Fast Apply mode:', morphEnabled);
+ if (morphEnabled) {
+ console.log('[apply-ai-code] Morph edits found:', morphEdits.length);
+ }
// Initialize existingFiles if not already
if (!global.existingFiles) {
global.existingFiles = new Set();
}
+ // Get the active sandbox or provider
+ const sandbox = global.activeSandbox || global.activeSandboxProvider;
+
// If no active sandbox, just return parsed results
- if (!global.activeSandbox) {
+ if (!sandbox) {
return NextResponse.json({
success: true,
results: {
@@ -167,11 +178,43 @@ export async function POST(request: NextRequest) {
});
}
+ // Verify sandbox is ready before applying code
+ console.log('[apply-ai-code] Verifying sandbox is ready...');
+
+ // For Vercel sandboxes, check if Vite is running
+ if (sandbox.constructor?.name === 'VercelProvider' || sandbox.getSandboxInfo?.()?.provider === 'vercel') {
+ console.log('[apply-ai-code] Detected Vercel sandbox, checking Vite status...');
+ try {
+ // Check if Vite process is running
+ const checkResult = await sandbox.runCommand('pgrep -f vite');
+ if (!checkResult || !checkResult.stdout) {
+ console.log('[apply-ai-code] Vite not running, starting it...');
+ // Start Vite if not running
+ await sandbox.runCommand('sh -c "cd /vercel/sandbox && nohup npm run dev > /tmp/vite.log 2>&1 &"');
+ // Wait for Vite to start
+ await new Promise(resolve => setTimeout(resolve, 5000));
+ console.log('[apply-ai-code] Vite started, proceeding with code application');
+ } else {
+ console.log('[apply-ai-code] Vite is already running');
+ }
+ } catch (e) {
+ console.log('[apply-ai-code] Could not check Vite status, proceeding anyway:', e);
+ }
+ }
+
// Apply to active sandbox
console.log('[apply-ai-code] Applying code to sandbox...');
console.log('[apply-ai-code] Is edit mode:', isEdit);
console.log('[apply-ai-code] Files to write:', parsed.files.map(f => f.path));
console.log('[apply-ai-code] Existing files:', Array.from(global.existingFiles));
+ if (morphEnabled) {
+ console.log('[apply-ai-code] Morph Fast Apply enabled');
+ if (morphEdits.length > 0) {
+ console.log('[apply-ai-code] Parsed Morph edits:', morphEdits.map(e => e.targetFile));
+ } else {
+ console.log('[apply-ai-code] No blocks found in response');
+ }
+ }
const results = {
filesCreated: [] as string[],
@@ -296,9 +339,46 @@ export async function POST(request: NextRequest) {
}
}
+ // Attempt Morph Fast Apply for edits before file creation
+ const morphUpdatedPaths = new Set();
+
+ if (morphEnabled && morphEdits.length > 0) {
+ if (!global.activeSandbox) {
+ console.warn('[apply-ai-code] Morph edits found but no active sandbox; skipping Morph application');
+ } else {
+ console.log(`[apply-ai-code] Applying ${morphEdits.length} fast edits via Morph...`);
+ for (const edit of morphEdits) {
+ try {
+ const result = await applyMorphEditToFile({
+ sandbox: global.activeSandbox,
+ targetPath: edit.targetFile,
+ instructions: edit.instructions,
+ updateSnippet: edit.update
+ });
+
+ if (result.success && result.normalizedPath) {
+ morphUpdatedPaths.add(result.normalizedPath);
+ results.filesUpdated.push(result.normalizedPath);
+ console.log('[apply-ai-code] Morph applied to', result.normalizedPath);
+ } else {
+ const msg = result.error || 'Unknown Morph error';
+ console.error('[apply-ai-code] Morph apply failed:', msg);
+ results.errors.push(`Morph apply failed for ${edit.targetFile}: ${msg}`);
+ }
+ } catch (e) {
+ console.error('[apply-ai-code] Morph apply exception:', e);
+ results.errors.push(`Morph apply exception for ${edit.targetFile}: ${(e as Error).message}`);
+ }
+ }
+ }
+ }
+ if (morphEnabled && morphEdits.length === 0) {
+ console.warn('[apply-ai-code] Morph enabled but no blocks found; falling back to full-file flow');
+ }
+
// Filter out config files that shouldn't be created
const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js'];
- const filteredFiles = parsed.files.filter(file => {
+ let filteredFiles = parsed.files.filter(file => {
const fileName = file.path.split('/').pop() || '';
if (configFiles.includes(fileName)) {
console.warn(`[apply-ai-code] Skipping config file: ${file.path} - already exists in template`);
@@ -306,6 +386,21 @@ export async function POST(request: NextRequest) {
}
return true;
});
+
+ // Avoid overwriting files already updated by Morph
+ if (morphUpdatedPaths.size > 0) {
+ filteredFiles = filteredFiles.filter(file => {
+ let normalizedPath = file.path.startsWith('/') ? file.path.slice(1) : file.path;
+ const fileName = normalizedPath.split('/').pop() || '';
+ if (!normalizedPath.startsWith('src/') &&
+ !normalizedPath.startsWith('public/') &&
+ normalizedPath !== 'index.html' &&
+ !configFiles.includes(fileName)) {
+ normalizedPath = 'src/' + normalizedPath;
+ }
+ return !morphUpdatedPaths.has(normalizedPath);
+ });
+ }
// Create or update files AFTER package installation
for (const file of filteredFiles) {
@@ -336,11 +431,28 @@ export async function POST(request: NextRequest) {
fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, '');
}
+ // Fix common Tailwind CSS errors in CSS files
+ if (file.path.endsWith('.css')) {
+ // Replace shadow-3xl with shadow-2xl (shadow-3xl doesn't exist)
+ fileContent = fileContent.replace(/shadow-3xl/g, 'shadow-2xl');
+ // Replace any other non-existent shadow utilities
+ fileContent = fileContent.replace(/shadow-4xl/g, 'shadow-2xl');
+ fileContent = fileContent.replace(/shadow-5xl/g, 'shadow-2xl');
+ }
+
console.log(`[apply-ai-code] Writing file using E2B files API: ${fullPath}`);
try {
- // Use the correct E2B API - sandbox.files.write()
- await global.activeSandbox.files.write(fullPath, fileContent);
+ // Check if we're using provider pattern (v2) or direct sandbox (v1)
+ if (sandbox.writeFile) {
+ // V2: Provider pattern (Vercel/E2B provider)
+ await sandbox.writeFile(file.path, fileContent);
+ } else if (sandbox.files?.write) {
+ // V1: Direct E2B sandbox
+ await sandbox.files.write(fullPath, fileContent);
+ } else {
+ throw new Error('Unsupported sandbox type');
+ }
console.log(`[apply-ai-code] Successfully wrote file: ${fullPath}`);
// Update file cache
@@ -354,7 +466,7 @@ export async function POST(request: NextRequest) {
} catch (writeError) {
console.error(`[apply-ai-code] E2B file write error:`, writeError);
- throw writeError;
+ throw writeError as Error;
}
@@ -432,15 +544,17 @@ function App() {
export default App;`;
try {
- await global.activeSandbox.runCode(`
-file_path = "/home/user/app/src/App.jsx"
-file_content = """${appContent.replace(/"/g, '\\"').replace(/\n/g, '\\n')}"""
-
-with open(file_path, 'w') as f:
- f.write(file_content)
-
-print(f"Auto-generated: {file_path}")
- `);
+ // Use provider pattern if available
+ if (sandbox.writeFile) {
+ await sandbox.writeFile('src/App.jsx', appContent);
+ } else if (sandbox.writeFiles) {
+ await sandbox.writeFiles([{
+ path: 'src/App.jsx',
+ content: Buffer.from(appContent)
+ }]);
+ }
+
+ console.log('Auto-generated: src/App.jsx');
results.filesCreated.push('src/App.jsx (auto-generated)');
} catch (error) {
results.errors.push(`Failed to create App.jsx: ${(error as Error).message}`);
@@ -459,9 +573,7 @@ print(f"Auto-generated: {file_path}")
if (!isEdit && !indexCssInParsed && !indexCssExists) {
try {
- await global.activeSandbox.runCode(`
-file_path = "/home/user/app/src/index.css"
-file_content = """@tailwind base;
+ const indexCssContent = `@tailwind base;
@tailwind components;
@tailwind utilities;
@@ -483,15 +595,22 @@ body {
margin: 0;
min-width: 320px;
min-height: 100vh;
-}"""
+}`;
-with open(file_path, 'w') as f:
- f.write(file_content)
-
-print(f"Auto-generated: {file_path}")
- `);
+ // Use provider pattern if available
+ if (sandbox.writeFile) {
+ await sandbox.writeFile('src/index.css', indexCssContent);
+ } else if (sandbox.writeFiles) {
+ await sandbox.writeFiles([{
+ path: 'src/index.css',
+ content: Buffer.from(indexCssContent)
+ }]);
+ }
+
+ console.log('Auto-generated: src/index.css');
results.filesCreated.push('src/index.css (with Tailwind)');
} catch (error) {
+ console.error('Failed to create index.css:', error);
results.errors.push('Failed to create index.css with Tailwind');
}
}
@@ -500,15 +619,47 @@ print(f"Auto-generated: {file_path}")
// Execute commands
for (const cmd of parsed.commands) {
try {
- await global.activeSandbox.runCode(`
-import subprocess
-os.chdir('/home/user/app')
-result = subprocess.run(${JSON.stringify(cmd.split(' '))}, capture_output=True, text=True)
-print(f"Executed: ${cmd}")
-print(result.stdout)
-if result.stderr:
- print(f"Errors: {result.stderr}")
- `);
+ // Parse command and arguments
+ const commandParts = cmd.trim().split(/\s+/);
+ const cmdName = commandParts[0];
+ const args = commandParts.slice(1);
+
+ // Execute command using sandbox
+ let result;
+ if (sandbox.runCommand && typeof sandbox.runCommand === 'function') {
+ // Check if this is a provider pattern sandbox
+ const testResult = await sandbox.runCommand(cmd);
+ if (testResult && typeof testResult === 'object' && 'stdout' in testResult) {
+ // Provider returns CommandResult directly
+ result = testResult;
+ } else {
+ // Direct sandbox - expects object with cmd and args
+ result = await sandbox.runCommand({
+ cmd: cmdName,
+ args
+ });
+ }
+ }
+
+ console.log(`Executed: ${cmd}`);
+
+ // Handle result based on type
+ let stdout = '';
+ let stderr = '';
+
+ if (result) {
+ if (typeof result.stdout === 'string') {
+ stdout = result.stdout;
+ stderr = result.stderr || '';
+ } else if (typeof result.stdout === 'function') {
+ stdout = await result.stdout();
+ stderr = await result.stderr();
+ }
+ }
+
+ if (stdout) console.log(stdout);
+ if (stderr) console.log(`Errors: ${stderr}`);
+
results.commandsExecuted.push(cmd);
} catch (error) {
results.errors.push(`Failed to execute ${cmd}: ${(error as Error).message}`);
diff --git a/app/api/conversation-state/route.ts b/app/api/conversation-state/route.ts
index 1a37468..969692c 100644
--- a/app/api/conversation-state/route.ts
+++ b/app/api/conversation-state/route.ts
@@ -59,10 +59,26 @@ export async function POST(request: NextRequest) {
case 'clear-old':
// Clear old conversation data but keep recent context
if (!global.conversationState) {
+ // Initialize conversation state if it doesn't exist
+ global.conversationState = {
+ conversationId: `conv-${Date.now()}`,
+ startedAt: Date.now(),
+ lastUpdated: Date.now(),
+ context: {
+ messages: [],
+ edits: [],
+ projectEvolution: { majorChanges: [] },
+ userPreferences: {}
+ }
+ };
+
+ console.log('[conversation-state] Initialized new conversation state for clear-old');
+
return NextResponse.json({
- success: false,
- error: 'No active conversation to clear'
- }, { status: 400 });
+ success: true,
+ message: 'New conversation state initialized',
+ state: global.conversationState
+ });
}
// Keep only recent data
diff --git a/app/api/create-ai-sandbox-v2/route.ts b/app/api/create-ai-sandbox-v2/route.ts
new file mode 100644
index 0000000..cd72a74
--- /dev/null
+++ b/app/api/create-ai-sandbox-v2/route.ts
@@ -0,0 +1,103 @@
+import { NextResponse } from 'next/server';
+import { SandboxFactory } from '@/lib/sandbox/factory';
+// SandboxProvider type is used through SandboxFactory
+import type { SandboxState } from '@/types/sandbox';
+import { sandboxManager } from '@/lib/sandbox/sandbox-manager';
+
+// Store active sandbox globally
+declare global {
+ var activeSandboxProvider: any;
+ var sandboxData: any;
+ var existingFiles: Set;
+ var sandboxState: SandboxState;
+}
+
+export async function POST() {
+ try {
+ console.log('[create-ai-sandbox-v2] Creating sandbox...');
+
+ // Clean up all existing sandboxes
+ console.log('[create-ai-sandbox-v2] Cleaning up existing sandboxes...');
+ await sandboxManager.terminateAll();
+
+ // Also clean up legacy global state
+ if (global.activeSandboxProvider) {
+ try {
+ await global.activeSandboxProvider.terminate();
+ } catch (e) {
+ console.error('Failed to terminate legacy global sandbox:', e);
+ }
+ global.activeSandboxProvider = null;
+ }
+
+ // Clear existing files tracking
+ if (global.existingFiles) {
+ global.existingFiles.clear();
+ } else {
+ global.existingFiles = new Set();
+ }
+
+ // Create new sandbox using factory
+ const provider = SandboxFactory.create();
+ const sandboxInfo = await provider.createSandbox();
+
+ console.log('[create-ai-sandbox-v2] Setting up Vite React app...');
+ await provider.setupViteApp();
+
+ // Register with sandbox manager
+ sandboxManager.registerSandbox(sandboxInfo.sandboxId, provider);
+
+ // Also store in legacy global state for backward compatibility
+ global.activeSandboxProvider = provider;
+ global.sandboxData = {
+ sandboxId: sandboxInfo.sandboxId,
+ url: sandboxInfo.url
+ };
+
+ // Initialize sandbox state
+ global.sandboxState = {
+ fileCache: {
+ files: {},
+ lastSync: Date.now(),
+ sandboxId: sandboxInfo.sandboxId
+ },
+ sandbox: provider, // Store the provider instead of raw sandbox
+ sandboxData: {
+ sandboxId: sandboxInfo.sandboxId,
+ url: sandboxInfo.url
+ }
+ };
+
+ console.log('[create-ai-sandbox-v2] Sandbox ready at:', sandboxInfo.url);
+
+ return NextResponse.json({
+ success: true,
+ sandboxId: sandboxInfo.sandboxId,
+ url: sandboxInfo.url,
+ provider: sandboxInfo.provider,
+ message: 'Sandbox created and Vite React app initialized'
+ });
+
+ } catch (error) {
+ console.error('[create-ai-sandbox-v2] Error:', error);
+
+ // Clean up on error
+ await sandboxManager.terminateAll();
+ if (global.activeSandboxProvider) {
+ try {
+ await global.activeSandboxProvider.terminate();
+ } catch (e) {
+ console.error('Failed to terminate sandbox on error:', e);
+ }
+ global.activeSandboxProvider = null;
+ }
+
+ return NextResponse.json(
+ {
+ error: error instanceof Error ? error.message : 'Failed to create sandbox',
+ details: error instanceof Error ? error.stack : undefined
+ },
+ { status: 500 }
+ );
+ }
+}
\ No newline at end of file
diff --git a/app/api/create-ai-sandbox/route.ts b/app/api/create-ai-sandbox/route.ts
index 257ce1d..daf9b84 100644
--- a/app/api/create-ai-sandbox/route.ts
+++ b/app/api/create-ai-sandbox/route.ts
@@ -1,5 +1,5 @@
import { NextResponse } from 'next/server';
-import { Sandbox } from '@e2b/code-interpreter';
+import { Sandbox } from '@vercel/sandbox';
import type { SandboxState } from '@/types/sandbox';
import { appConfig } from '@/config/app.config';
@@ -9,23 +9,74 @@ declare global {
var sandboxData: any;
var existingFiles: Set;
var sandboxState: SandboxState;
+ var sandboxCreationInProgress: boolean;
+ var sandboxCreationPromise: Promise | null;
}
export async function POST() {
+ // Check if sandbox creation is already in progress
+ if (global.sandboxCreationInProgress && global.sandboxCreationPromise) {
+ console.log('[create-ai-sandbox] Sandbox creation already in progress, waiting for existing creation...');
+ try {
+ const existingResult = await global.sandboxCreationPromise;
+ console.log('[create-ai-sandbox] Returning existing sandbox creation result');
+ return NextResponse.json(existingResult);
+ } catch (error) {
+ console.error('[create-ai-sandbox] Existing sandbox creation failed:', error);
+ // Continue with new creation if the existing one failed
+ }
+ }
+
+ // Check if we already have an active sandbox
+ if (global.activeSandbox && global.sandboxData) {
+ console.log('[create-ai-sandbox] Returning existing active sandbox');
+ return NextResponse.json({
+ success: true,
+ sandboxId: global.sandboxData.sandboxId,
+ url: global.sandboxData.url
+ });
+ }
+
+ // Set the creation flag
+ global.sandboxCreationInProgress = true;
+
+ // Create the promise that other requests can await
+ global.sandboxCreationPromise = createSandboxInternal();
+
+ try {
+ const result = await global.sandboxCreationPromise;
+ return NextResponse.json(result);
+ } catch (error) {
+ console.error('[create-ai-sandbox] Sandbox creation failed:', error);
+ return NextResponse.json(
+ {
+ error: error instanceof Error ? error.message : 'Failed to create sandbox',
+ details: error instanceof Error ? error.stack : undefined
+ },
+ { status: 500 }
+ );
+ } finally {
+ global.sandboxCreationInProgress = false;
+ global.sandboxCreationPromise = null;
+ }
+}
+
+async function createSandboxInternal() {
let sandbox: any = null;
try {
- console.log('[create-ai-sandbox] Creating base sandbox...');
+ console.log('[create-ai-sandbox] Creating Vercel sandbox...');
// Kill existing sandbox if any
if (global.activeSandbox) {
- console.log('[create-ai-sandbox] Killing existing sandbox...');
+ console.log('[create-ai-sandbox] Stopping existing sandbox...');
try {
- await global.activeSandbox.kill();
+ await global.activeSandbox.stop();
} catch (e) {
- console.error('Failed to close existing sandbox:', e);
+ console.error('Failed to stop existing sandbox:', e);
}
global.activeSandbox = null;
+ global.sandboxData = null;
}
// Clear existing files tracking
@@ -35,81 +86,102 @@ export async function POST() {
global.existingFiles = new Set();
}
- // Create base sandbox - we'll set up Vite ourselves for full control
- console.log(`[create-ai-sandbox] Creating base E2B sandbox with ${appConfig.e2b.timeoutMinutes} minute timeout...`);
- sandbox = await Sandbox.create({
- apiKey: process.env.E2B_API_KEY,
- timeoutMs: appConfig.e2b.timeoutMs
- });
+ // Create Vercel sandbox with flexible authentication
+ console.log(`[create-ai-sandbox] Creating Vercel sandbox with ${appConfig.vercelSandbox.timeoutMinutes} minute timeout...`);
- const sandboxId = (sandbox as any).sandboxId || Date.now().toString();
- const host = (sandbox as any).getHost(appConfig.e2b.vitePort);
+ // Prepare sandbox configuration
+ const sandboxConfig: any = {
+ timeout: appConfig.vercelSandbox.timeoutMs,
+ runtime: appConfig.vercelSandbox.runtime,
+ ports: [appConfig.vercelSandbox.devPort]
+ };
+ // Add authentication parameters if using personal access token
+ if (process.env.VERCEL_TOKEN && process.env.VERCEL_TEAM_ID && process.env.VERCEL_PROJECT_ID) {
+ console.log('[create-ai-sandbox] Using personal access token authentication');
+ sandboxConfig.teamId = process.env.VERCEL_TEAM_ID;
+ sandboxConfig.projectId = process.env.VERCEL_PROJECT_ID;
+ sandboxConfig.token = process.env.VERCEL_TOKEN;
+ } else if (process.env.VERCEL_OIDC_TOKEN) {
+ console.log('[create-ai-sandbox] Using OIDC token authentication');
+ } else {
+ console.log('[create-ai-sandbox] No authentication found - relying on default Vercel authentication');
+ }
+
+ sandbox = await Sandbox.create(sandboxConfig);
+
+ const sandboxId = sandbox.sandboxId;
console.log(`[create-ai-sandbox] Sandbox created: ${sandboxId}`);
- console.log(`[create-ai-sandbox] Sandbox host: ${host}`);
- // Set up a basic Vite React app using Python to write files
+ // Set up a basic Vite React app
console.log('[create-ai-sandbox] Setting up Vite React app...');
- // Write all files in a single Python script to avoid multiple executions
- const setupScript = `
-import os
-import json
+ // First, change to the working directory
+ await sandbox.runCommand('pwd');
+ // workDir is defined in appConfig - not needed here
+
+ // Get the sandbox URL using the correct Vercel Sandbox API
+ const sandboxUrl = sandbox.domain(appConfig.vercelSandbox.devPort);
+
+ // Extract the hostname from the sandbox URL for Vite config
+ const sandboxHostname = new URL(sandboxUrl).hostname;
+ console.log(`[create-ai-sandbox] Sandbox hostname: ${sandboxHostname}`);
-print('Setting up React app with Vite and Tailwind...')
-
-# Create directory structure
-os.makedirs('/home/user/app/src', exist_ok=True)
-
-# Package.json
-package_json = {
- "name": "sandbox-app",
- "version": "1.0.0",
- "type": "module",
- "scripts": {
- "dev": "vite --host",
- "build": "vite build",
- "preview": "vite preview"
- },
- "dependencies": {
- "react": "^18.2.0",
- "react-dom": "^18.2.0"
- },
- "devDependencies": {
- "@vitejs/plugin-react": "^4.0.0",
- "vite": "^4.3.9",
- "tailwindcss": "^3.3.0",
- "postcss": "^8.4.31",
- "autoprefixer": "^10.4.16"
- }
-}
-
-with open('/home/user/app/package.json', 'w') as f:
- json.dump(package_json, f, indent=2)
-print('✓ package.json')
-
-# Vite config for E2B - with allowedHosts
-vite_config = """import { defineConfig } from 'vite'
+ // Create the Vite config content with the proper hostname (using string concatenation)
+ const viteConfigContent = `import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
-// E2B-compatible Vite configuration
+// Vercel Sandbox compatible Vite configuration
export default defineConfig({
plugins: [react()],
server: {
host: '0.0.0.0',
- port: 5173,
+ port: ${appConfig.vercelSandbox.devPort},
strictPort: true,
- hmr: false,
- allowedHosts: ['.e2b.app', 'localhost', '127.0.0.1']
+ hmr: true,
+ allowedHosts: [
+ 'localhost',
+ '127.0.0.1',
+ '` + sandboxHostname + `', // Allow the Vercel Sandbox domain
+ '.vercel.run', // Allow all Vercel sandbox domains
+ '.vercel-sandbox.dev' // Fallback pattern
+ ]
}
-})"""
+})`;
-with open('/home/user/app/vite.config.js', 'w') as f:
- f.write(vite_config)
-print('✓ vite.config.js')
-
-# Tailwind config - standard without custom design tokens
-tailwind_config = """/** @type {import('tailwindcss').Config} */
+ // Create the project files (now we have the sandbox hostname)
+ const projectFiles = [
+ {
+ path: 'package.json',
+ content: Buffer.from(JSON.stringify({
+ "name": "sandbox-app",
+ "version": "1.0.0",
+ "type": "module",
+ "scripts": {
+ "dev": "vite --host --port 3000",
+ "build": "vite build",
+ "preview": "vite preview"
+ },
+ "dependencies": {
+ "react": "^18.2.0",
+ "react-dom": "^18.2.0"
+ },
+ "devDependencies": {
+ "@vitejs/plugin-react": "^4.0.0",
+ "vite": "^4.3.9",
+ "tailwindcss": "^3.3.0",
+ "postcss": "^8.4.31",
+ "autoprefixer": "^10.4.16"
+ }
+ }, null, 2))
+ },
+ {
+ path: 'vite.config.js',
+ content: Buffer.from(viteConfigContent)
+ },
+ {
+ path: 'tailwind.config.js',
+ content: Buffer.from(`/** @type {import('tailwindcss').Config} */
export default {
content: [
"./index.html",
@@ -119,26 +191,20 @@ export default {
extend: {},
},
plugins: [],
-}"""
-
-with open('/home/user/app/tailwind.config.js', 'w') as f:
- f.write(tailwind_config)
-print('✓ tailwind.config.js')
-
-# PostCSS config
-postcss_config = """export default {
+}`)
+ },
+ {
+ path: 'postcss.config.js',
+ content: Buffer.from(`export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
-}"""
-
-with open('/home/user/app/postcss.config.js', 'w') as f:
- f.write(postcss_config)
-print('✓ postcss.config.js')
-
-# Index.html
-index_html = """
+}`)
+ },
+ {
+ path: 'index.html',
+ content: Buffer.from(`
@@ -149,14 +215,11 @@ index_html = """
-"""
-
-with open('/home/user/app/index.html', 'w') as f:
- f.write(index_html)
-print('✓ index.html')
-
-# Main.jsx
-main_jsx = """import React from 'react'
+