Save current v2 sandbox implementation before styling refactor

- Modified sandbox API routes for v2 implementation
- Updated sandbox providers (E2B and Vercel)
- Added styling-reference directory with Firecrawl AI-ready website
- Preparing for styling system port from Firecrawl design

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Developers Digest
2025-09-04 10:21:28 -04:00
parent defd90a0ac
commit b96d048dbd
15 changed files with 340 additions and 172 deletions
+24 -6
View File
@@ -1,20 +1,38 @@
# Required
FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping)
# Vercel Sandbox Authentication (choose one method)
# See: https://vercel.com/docs/vercel-sandbox#authentication
# =================================================================================
# SANDBOX PROVIDER - Choose Option 1 OR 2
# =================================================================================
# Method 1: OIDC Token (recommended for development)
# Option 1: E2B Sandbox (recommended for development)
# Set SANDBOX_PROVIDER=e2b and configure E2B_API_KEY below
SANDBOX_PROVIDER=e2b
E2B_API_KEY=your_e2b_api_key # Get from https://e2b.dev
# Option 2: Vercel Sandbox
# Set SANDBOX_PROVIDER=vercel and choose authentication method below
# SANDBOX_PROVIDER=vercel
# Vercel Authentication - Choose method a OR b
# Method a: OIDC Token (recommended for development)
# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically
# VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull
# Method 2: Personal Access Token (for production or when OIDC unavailable)
# Method b: Personal Access Token (for production or when OIDC unavailable)
# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID
# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID
# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard
# Optional (need at least one AI provider)
# =================================================================================
# AI PROVIDERS - Need at least one
# =================================================================================
# Vercel AI Gateway (recommended - provides access to multiple models)
AI_GATEWAY_API_KEY=your_ai_gateway_api_key # Get from https://vercel.com/dashboard/ai-gateway/api-keys
# Individual provider keys (used when AI_GATEWAY_API_KEY is not set)
ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com
OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5)
GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey
GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended)
GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended)
+16 -6
View File
@@ -7,18 +7,28 @@ import { generateObject } from 'ai';
import { z } from 'zod';
import type { FileManifest } from '@/types/file-manifest';
// Check if we're using Vercel AI Gateway
const isUsingAIGateway = !!process.env.AI_GATEWAY_API_KEY;
const aiGatewayBaseURL = 'https://ai-gateway.vercel.sh/v1';
const groq = createGroq({
apiKey: process.env.GROQ_API_KEY,
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GROQ_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined,
});
const anthropic = createAnthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
baseURL: process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1',
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.ANTHROPIC_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : (process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1'),
});
const openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.OPENAI_BASE_URL,
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.OPENAI_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : process.env.OPENAI_BASE_URL,
});
const googleGenerativeAI = createGoogleGenerativeAI({
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GEMINI_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined,
});
// Schema for the AI's search plan - not file selection!
@@ -104,7 +114,7 @@ export async function POST(request: NextRequest) {
aiModel = openai(model.replace('openai/', ''));
}
} else if (model.startsWith('google/')) {
aiModel = createGoogleGenerativeAI(model.replace('google/', ''));
aiModel = googleGenerativeAI(model.replace('google/', ''));
} else {
// Default to groq if model format is unclear
aiModel = groq(model);
+81 -82
View File
@@ -1,11 +1,11 @@
import { NextRequest, NextResponse } from 'next/server';
import { Sandbox } from '@vercel/sandbox';
import { Sandbox } from '@e2b/code-interpreter';
import type { SandboxState } from '@/types/sandbox';
import type { ConversationState } from '@/types/conversation';
declare global {
var conversationState: ConversationState | null;
var activeSandbox: any;
var activeSandboxProvider: any;
var existingFiles: Set<string>;
var sandboxState: SandboxState;
}
@@ -294,75 +294,88 @@ export async function POST(request: NextRequest) {
global.existingFiles = new Set<string>();
}
// First, always check the global state for active sandbox
let sandbox = global.activeSandbox;
// First, always check the global state for active provider
let provider = global.activeSandboxProvider;
// If we don't have a provider in this instance but we have a sandboxId,
// try to use the existing sandbox data or create a new one
if (!provider && sandboxId) {
console.log(`[apply-ai-code-stream] Provider not in this instance for sandbox ${sandboxId}, checking existing data...`);
// If we have sandbox data but no provider, we'll create a new provider
// E2B doesn't support reconnection like Vercel does
if (global.sandboxData && global.sandboxData.sandboxId === sandboxId) {
console.log(`[apply-ai-code-stream] Creating new provider for existing sandbox ${sandboxId}`);
// Create a new provider instance (this will create a new sandbox since E2B doesn't support reconnection)
try {
const { SandboxFactory } = await import('@/lib/sandbox/factory');
provider = SandboxFactory.create();
await provider.createSandbox();
// Update the global state
global.activeSandboxProvider = provider;
console.log(`[apply-ai-code-stream] Created new provider for sandbox ${sandboxId}`);
} catch (providerError) {
console.error(`[apply-ai-code-stream] Failed to create provider for sandbox ${sandboxId}:`, providerError);
return NextResponse.json({
success: false,
error: `Failed to create sandbox provider for ${sandboxId}. The sandbox may have expired.`,
results: {
filesCreated: [],
packagesInstalled: [],
commandsExecuted: [],
errors: [`Sandbox provider creation failed: ${(providerError as Error).message}`]
},
explanation: parsed.explanation,
structure: parsed.structure,
parsedFiles: parsed.files,
message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox reconnection failed.`
}, { status: 500 });
}
}
}
// If we don't have a sandbox in this instance but we have a sandboxId,
// reconnect to the existing sandbox
if (!sandbox && sandboxId) {
console.log(`[apply-ai-code-stream] Sandbox ${sandboxId} not in this instance, attempting reconnect...`);
// If we still don't have a provider, create a new one
if (!provider) {
console.log(`[apply-ai-code-stream] No active provider found, creating new sandbox...`);
try {
// Reconnect to the existing sandbox using E2B's connect method
sandbox = await Sandbox.connect(sandboxId, { apiKey: process.env.E2B_API_KEY });
console.log(`[apply-ai-code-stream] Successfully reconnected to sandbox ${sandboxId}`);
// Store the reconnected sandbox globally for this instance
global.activeSandbox = sandbox;
// Update sandbox data if needed
if (!global.sandboxData) {
const host = (sandbox as any).getHost(5173);
const { SandboxFactory } = await import('@/lib/sandbox/factory');
provider = SandboxFactory.create();
await provider.createSandbox();
// Store the provider globally
global.activeSandboxProvider = provider;
// Update sandbox data
const sandboxInfo = provider.getSandboxInfo();
if (sandboxInfo) {
global.sandboxData = {
sandboxId,
url: `https://${host}`
sandboxId: sandboxInfo.sandboxId,
url: sandboxInfo.url
};
}
// Initialize existingFiles if not already
if (!global.existingFiles) {
global.existingFiles = new Set<string>();
}
} catch (reconnectError) {
console.error(`[apply-ai-code-stream] Failed to reconnect to sandbox ${sandboxId}:`, reconnectError);
// If reconnection fails, we'll still try to return a meaningful response
console.log(`[apply-ai-code-stream] Created new sandbox successfully`);
} catch (createError) {
console.error(`[apply-ai-code-stream] Failed to create new sandbox:`, createError);
return NextResponse.json({
success: false,
error: `Failed to reconnect to sandbox ${sandboxId}. The sandbox may have expired or been terminated.`,
error: `Failed to create new sandbox: ${createError instanceof Error ? createError.message : 'Unknown error'}`,
results: {
filesCreated: [],
packagesInstalled: [],
commandsExecuted: [],
errors: [`Sandbox reconnection failed: ${(reconnectError as Error).message}`]
errors: [`Sandbox creation failed: ${createError instanceof Error ? createError.message : 'Unknown error'}`]
},
explanation: parsed.explanation,
structure: parsed.structure,
parsedFiles: parsed.files,
message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox reconnection failed.`
});
message: `Parsed ${parsed.files.length} files but couldn't apply them - sandbox creation failed.`
}, { status: 500 });
}
}
// If no sandbox at all and no sandboxId provided, return an error
if (!sandbox && !sandboxId) {
console.log('[apply-ai-code-stream] No sandbox available and no sandboxId provided');
return NextResponse.json({
success: false,
error: 'No active sandbox found. Please create a sandbox first.',
results: {
filesCreated: [],
packagesInstalled: [],
commandsExecuted: [],
errors: ['No sandbox available']
},
explanation: parsed.explanation,
structure: parsed.structure,
parsedFiles: parsed.files,
message: `Parsed ${parsed.files.length} files but no sandbox available to apply them.`
});
}
// Create a response stream for real-time updates
const encoder = new TextEncoder();
const stream = new TransformStream();
@@ -374,8 +387,8 @@ export async function POST(request: NextRequest) {
await writer.write(encoder.encode(message));
};
// Start processing in background (pass sandbox and request to the async function)
(async (sandboxInstance, req) => {
// Start processing in background (pass provider and request to the async function)
(async (providerInstance, req) => {
const results = {
filesCreated: [] as string[],
filesUpdated: [] as string[],
@@ -432,7 +445,7 @@ export async function POST(request: NextRequest) {
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
packages: uniquePackages,
sandboxId: sandboxId || (sandboxInstance as any).sandboxId
sandboxId: sandboxId || providerInstance.getSandboxInfo()?.sandboxId
})
});
@@ -536,17 +549,11 @@ export async function POST(request: NextRequest) {
// Create directory if needed
const dirPath = normalizedPath.includes('/') ? normalizedPath.substring(0, normalizedPath.lastIndexOf('/')) : '';
if (dirPath) {
await sandboxInstance.runCommand({
cmd: 'mkdir',
args: ['-p', dirPath]
});
await providerInstance.runCommand(`mkdir -p ${dirPath}`);
}
// Write the file using Vercel Sandbox writeFiles
await sandboxInstance.writeFiles([{
path: normalizedPath,
content: Buffer.from(fileContent)
}]);
// Write the file using provider
await providerInstance.writeFile(normalizedPath, fileContent);
// Update file cache
if (global.sandboxState?.fileCache) {
@@ -599,20 +606,12 @@ export async function POST(request: NextRequest) {
action: 'executing'
});
// Parse command and arguments for Vercel Sandbox
const commandParts = cmd.trim().split(/\s+/);
const cmdName = commandParts[0];
const args = commandParts.slice(1);
// Use Vercel Sandbox runCommand
const result = await sandboxInstance.runCommand({
cmd: cmdName,
args
});
// Get command output
const stdout = await result.stdout();
const stderr = await result.stderr();
// Use provider runCommand
const result = await providerInstance.runCommand(cmd);
// Get command output from provider result
const stdout = result.stdout;
const stderr = result.stderr;
if (stdout) {
await sendProgress({
@@ -697,7 +696,7 @@ export async function POST(request: NextRequest) {
} finally {
await writer.close();
}
})(sandbox, request);
})(provider, request);
// Return the stream
return new Response(stream.readable, {
@@ -707,7 +706,7 @@ export async function POST(request: NextRequest) {
'Connection': 'keep-alive',
},
});
} catch (error) {
console.error('Apply AI code stream error:', error);
return NextResponse.json(
+66 -9
View File
@@ -9,9 +9,59 @@ declare global {
var sandboxData: any;
var existingFiles: Set<string>;
var sandboxState: SandboxState;
var sandboxCreationInProgress: boolean;
var sandboxCreationPromise: Promise<any> | null;
}
export async function POST() {
// Check if sandbox creation is already in progress
if (global.sandboxCreationInProgress && global.sandboxCreationPromise) {
console.log('[create-ai-sandbox] Sandbox creation already in progress, waiting for existing creation...');
try {
const existingResult = await global.sandboxCreationPromise;
console.log('[create-ai-sandbox] Returning existing sandbox creation result');
return NextResponse.json(existingResult);
} catch (error) {
console.error('[create-ai-sandbox] Existing sandbox creation failed:', error);
// Continue with new creation if the existing one failed
}
}
// Check if we already have an active sandbox
if (global.activeSandbox && global.sandboxData) {
console.log('[create-ai-sandbox] Returning existing active sandbox');
return NextResponse.json({
success: true,
sandboxId: global.sandboxData.sandboxId,
url: global.sandboxData.url
});
}
// Set the creation flag
global.sandboxCreationInProgress = true;
// Create the promise that other requests can await
global.sandboxCreationPromise = createSandboxInternal();
try {
const result = await global.sandboxCreationPromise;
return NextResponse.json(result);
} catch (error) {
console.error('[create-ai-sandbox] Sandbox creation failed:', error);
return NextResponse.json(
{
error: error instanceof Error ? error.message : 'Failed to create sandbox',
details: error instanceof Error ? error.stack : undefined
},
{ status: 500 }
);
} finally {
global.sandboxCreationInProgress = false;
global.sandboxCreationPromise = null;
}
}
async function createSandboxInternal() {
let sandbox: any = null;
try {
@@ -26,6 +76,7 @@ export async function POST() {
console.error('Failed to stop existing sandbox:', e);
}
global.activeSandbox = null;
global.sandboxData = null;
}
// Clear existing files tracking
@@ -297,12 +348,20 @@ body {
console.log('[create-ai-sandbox] Sandbox ready at:', sandboxUrl);
return NextResponse.json({
const result = {
success: true,
sandboxId,
url: sandboxUrl,
message: 'Vercel sandbox created and Vite React app initialized'
});
};
// Store the result for reuse
global.sandboxData = {
...global.sandboxData,
...result
};
return result;
} catch (error) {
console.error('[create-ai-sandbox] Error:', error);
@@ -316,12 +375,10 @@ body {
}
}
return NextResponse.json(
{
error: error instanceof Error ? error.message : 'Failed to create sandbox',
details: error instanceof Error ? error.stack : undefined
},
{ status: 500 }
);
// Clear global state on error
global.activeSandbox = null;
global.sandboxData = null;
throw error; // Throw to be caught by the outer handler
}
}
+12 -5
View File
@@ -14,21 +14,28 @@ import { appConfig } from '@/config/app.config';
// Force dynamic route to enable streaming
export const dynamic = 'force-dynamic';
// Check if we're using Vercel AI Gateway
const isUsingAIGateway = !!process.env.AI_GATEWAY_API_KEY;
const aiGatewayBaseURL = 'https://ai-gateway.vercel.sh/v1';
const groq = createGroq({
apiKey: process.env.GROQ_API_KEY,
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GROQ_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined,
});
const anthropic = createAnthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
baseURL: process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1',
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.ANTHROPIC_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : (process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1'),
});
const googleGenerativeAI = createGoogleGenerativeAI({
apiKey: process.env.GEMINI_API_KEY,
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GEMINI_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined,
});
const openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.OPENAI_API_KEY,
baseURL: isUsingAIGateway ? aiGatewayBaseURL : process.env.OPENAI_BASE_URL,
});
// Helper function to analyze user preferences from conversation history
+21 -34
View File
@@ -2,6 +2,7 @@ import { NextRequest, NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
var activeSandboxProvider: any;
var sandboxData: any;
}
@@ -35,13 +36,13 @@ export async function POST(request: NextRequest) {
console.log(`[install-packages] Cleaned:`, validPackages);
}
// Get active sandbox
const sandbox = global.activeSandbox;
// Get active sandbox provider
const provider = global.activeSandboxProvider;
if (!sandbox) {
if (!provider) {
return NextResponse.json({
success: false,
error: 'No active sandbox available'
error: 'No active sandbox provider available'
}, { status: 400 });
}
@@ -59,7 +60,7 @@ export async function POST(request: NextRequest) {
};
// Start installation in background
(async (sandboxInstance) => {
(async (providerInstance) => {
try {
await sendProgress({
type: 'start',
@@ -72,10 +73,7 @@ export async function POST(request: NextRequest) {
try {
// Try to kill any running dev server processes
await sandboxInstance.runCommand({
cmd: 'pkill',
args: ['-f', 'vite']
});
await providerInstance.runCommand('pkill -f vite');
await new Promise(resolve => setTimeout(resolve, 1000)); // Wait a bit
} catch (error) {
// It's OK if no process is found
@@ -92,12 +90,13 @@ export async function POST(request: NextRequest) {
try {
// Read package.json to check existing dependencies
const catResult = await sandboxInstance.runCommand({
cmd: 'cat',
args: ['package.json']
});
if (catResult.exitCode === 0) {
const packageJsonContent = await catResult.stdout();
let packageJsonContent = '';
try {
packageJsonContent = await providerInstance.readFile('package.json');
} catch (error) {
console.log('[install-packages] Error reading package.json:', error);
}
if (packageJsonContent) {
const packageJson = JSON.parse(packageJsonContent);
const dependencies = packageJson.dependencies || {};
@@ -144,11 +143,7 @@ export async function POST(request: NextRequest) {
// Restart dev server
await sendProgress({ type: 'status', message: 'Restarting development server...' });
const devServerProcess = await sandboxInstance.runCommand({
cmd: 'npm',
args: ['run', 'dev'],
detached: true
});
await providerInstance.restartViteServer();
await sendProgress({
type: 'complete',
@@ -165,16 +160,12 @@ export async function POST(request: NextRequest) {
message: `Installing ${packagesToInstall.length} new package(s): ${packagesToInstall.join(', ')}`
});
// Run npm install
const installArgs = ['install', '--legacy-peer-deps', ...packagesToInstall];
const installResult = await sandboxInstance.runCommand({
cmd: 'npm',
args: installArgs
});
// Install packages using provider method
const installResult = await providerInstance.installPackages(packagesToInstall);
// Get install output
const stdout = await installResult.stdout();
const stderr = await installResult.stderr();
const stdout = installResult.stdout;
const stderr = installResult.stderr;
if (stdout) {
const lines = stdout.split('\n').filter(line => line.trim());
@@ -218,11 +209,7 @@ export async function POST(request: NextRequest) {
await sendProgress({ type: 'status', message: 'Restarting development server...' });
try {
const devServerProcess = await sandboxInstance.runCommand({
cmd: 'npm',
args: ['run', 'dev'],
detached: true
});
await providerInstance.restartViteServer();
// Wait a bit for the server to start
await new Promise(resolve => setTimeout(resolve, 3000));
@@ -250,7 +237,7 @@ export async function POST(request: NextRequest) {
} finally {
await writer.close();
}
})(sandbox);
})(provider);
// Return the stream
return new Response(stream.readable, {
+6 -6
View File
@@ -1,7 +1,7 @@
import { NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
var activeSandboxProvider: any;
var sandboxData: any;
var existingFiles: Set<string>;
}
@@ -9,19 +9,19 @@ declare global {
export async function POST() {
try {
console.log('[kill-sandbox] Stopping active sandbox...');
let sandboxKilled = false;
// Stop existing sandbox if any
if (global.activeSandbox) {
if (global.activeSandboxProvider) {
try {
await global.activeSandbox.stop();
await global.activeSandboxProvider.terminate();
sandboxKilled = true;
console.log('[kill-sandbox] Sandbox stopped successfully');
} catch (e) {
console.error('[kill-sandbox] Failed to stop sandbox:', e);
}
global.activeSandbox = null;
global.activeSandboxProvider = null;
global.sandboxData = null;
}
+35
View File
@@ -2,8 +2,12 @@ import { NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
var lastViteRestartTime: number;
var viteRestartInProgress: boolean;
}
const RESTART_COOLDOWN_MS = 5000; // 5 second cooldown between restarts
export async function POST() {
try {
if (!global.activeSandbox) {
@@ -13,6 +17,29 @@ export async function POST() {
}, { status: 400 });
}
// Check if restart is already in progress
if (global.viteRestartInProgress) {
console.log('[restart-vite] Vite restart already in progress, skipping...');
return NextResponse.json({
success: true,
message: 'Vite restart already in progress'
});
}
// Check cooldown
const now = Date.now();
if (global.lastViteRestartTime && (now - global.lastViteRestartTime) < RESTART_COOLDOWN_MS) {
const remainingTime = Math.ceil((RESTART_COOLDOWN_MS - (now - global.lastViteRestartTime)) / 1000);
console.log(`[restart-vite] Cooldown active, ${remainingTime}s remaining`);
return NextResponse.json({
success: true,
message: `Vite was recently restarted, cooldown active (${remainingTime}s remaining)`
});
}
// Set the restart flag
global.viteRestartInProgress = true;
console.log('[restart-vite] Forcing Vite restart...');
// Kill existing Vite processes
@@ -51,6 +78,10 @@ export async function POST() {
// Wait for Vite to start up
await new Promise(resolve => setTimeout(resolve, 3000));
// Update global state
global.lastViteRestartTime = Date.now();
global.viteRestartInProgress = false;
return NextResponse.json({
success: true,
message: 'Vite restarted successfully'
@@ -58,6 +89,10 @@ export async function POST() {
} catch (error) {
console.error('[restart-vite] Error:', error);
// Clear the restart flag on error
global.viteRestartInProgress = false;
return NextResponse.json({
success: false,
error: (error as Error).message
+6 -7
View File
@@ -1,7 +1,7 @@
import { NextResponse } from 'next/server';
declare global {
var activeSandbox: any;
var activeSandboxProvider: any;
var sandboxData: any;
var existingFiles: Set<string>;
}
@@ -9,15 +9,14 @@ declare global {
export async function GET() {
try {
// Check if sandbox exists
const sandboxExists = !!global.activeSandbox;
const sandboxExists = !!global.activeSandboxProvider;
let sandboxHealthy = false;
let sandboxInfo = null;
if (sandboxExists && global.activeSandbox) {
if (sandboxExists && global.activeSandboxProvider) {
try {
// Since Python isn't available in the Vite template, just check if sandbox exists
// The sandbox object existing is enough to confirm it's healthy
// Check if sandbox is healthy by calling a method that should work
sandboxHealthy = true;
sandboxInfo = {
sandboxId: global.sandboxData?.sandboxId,
+17 -1
View File
@@ -137,8 +137,11 @@ export default function AISandboxPage() {
// Clear old conversation data on component mount and create/restore sandbox
useEffect(() => {
let isMounted = true;
let sandboxCreated = false; // Track if sandbox was created in this effect
const initializePage = async () => {
// Prevent double execution in React StrictMode
if (sandboxCreated) return;
// Clear old conversation
try {
await fetch('/api/conversation-state', {
@@ -165,9 +168,11 @@ export default function AISandboxPage() {
console.log('[home] Attempting to restore sandbox:', sandboxIdParam);
// For now, just create a new sandbox - you could enhance this to actually restore
// the specific sandbox if your backend supports it
sandboxCreated = true;
await createSandbox(true);
} else {
console.log('[home] No sandbox in URL, creating new sandbox automatically...');
sandboxCreated = true;
await createSandbox(true);
}
} catch (error) {
@@ -369,7 +374,16 @@ export default function AISandboxPage() {
}
};
const sandboxCreationRef = useRef<boolean>(false);
const createSandbox = async (fromHomeScreen = false) => {
// Prevent duplicate sandbox creation
if (sandboxCreationRef.current) {
console.log('[createSandbox] Sandbox creation already in progress, skipping...');
return;
}
sandboxCreationRef.current = true;
console.log('[createSandbox] Starting sandbox creation...');
setLoading(true);
setShowLoadingBackground(true);
@@ -378,7 +392,7 @@ export default function AISandboxPage() {
setScreenshotError(null);
try {
const response = await fetch('/api/create-ai-sandbox', {
const response = await fetch('/api/create-ai-sandbox-v2', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({})
@@ -388,6 +402,7 @@ export default function AISandboxPage() {
console.log('[createSandbox] Response data:', data);
if (data.success) {
sandboxCreationRef.current = false; // Reset the ref on success
setSandboxData(data);
updateStatus('Sandbox active', true);
log('Sandbox created successfully!');
@@ -454,6 +469,7 @@ Tip: I automatically detect and install npm packages from your code imports (lik
addChatMessage(`Failed to create sandbox: ${error.message}`, 'system');
} finally {
setLoading(false);
sandboxCreationRef.current = false; // Reset the ref
}
};
+26 -6
View File
@@ -6,27 +6,47 @@ export const appConfig = {
vercelSandbox: {
// Sandbox timeout in minutes
timeoutMinutes: 15,
// Convert to milliseconds for Vercel Sandbox API
get timeoutMs() {
return this.timeoutMinutes * 60 * 1000;
},
// Development server port (Vercel Sandbox typically uses 3000 for Next.js/React)
devPort: 3000,
// Time to wait for dev server to be ready (in milliseconds)
devServerStartupDelay: 7000,
// Time to wait for CSS rebuild (in milliseconds)
cssRebuildDelay: 2000,
// Working directory in sandbox
workingDirectory: '/app',
// Default runtime for sandbox
runtime: 'node22' // Available: node22, python3.13, v0-next-shadcn, cua-ubuntu-xfce
},
// E2B Sandbox Configuration
e2b: {
// Sandbox timeout in minutes
timeoutMinutes: 30,
// Convert to milliseconds for E2B API
get timeoutMs() {
return this.timeoutMinutes * 60 * 1000;
},
// Development server port (E2B uses 5173 for Vite)
vitePort: 5173,
// Time to wait for Vite dev server to be ready (in milliseconds)
viteStartupDelay: 10000,
// Working directory in sandbox
workingDirectory: '/home/user/app',
},
// AI Model Configuration
ai: {
+24 -10
View File
@@ -100,18 +100,28 @@ export class E2BProvider extends SandboxProvider {
const fullPath = path.startsWith('/') ? path : `/home/user/app/${path}`;
await this.sandbox.runCode(`
import os
// Use the E2B filesystem API to write the file
// Note: E2B SDK uses files.write() method
if ((this.sandbox as any).files && typeof (this.sandbox as any).files.write === 'function') {
// Use the files.write API if available
await (this.sandbox as any).files.write(fullPath, Buffer.from(content));
console.log(`[E2BProvider] Written file using files.write: ${fullPath}`);
} else {
// Fallback to Python code execution
await this.sandbox.runCode(`
import os
# Ensure directory exists
dir_path = os.path.dirname("${fullPath}")
os.makedirs(dir_path, exist_ok=True)
# Ensure directory exists
dir_path = os.path.dirname("${fullPath}")
os.makedirs(dir_path, exist_ok=True)
# Write file
with open("${fullPath}", 'w') as f:
f.write(${JSON.stringify(content)})
print(f"✓ Written: ${fullPath}")
`);
# Write file
with open("${fullPath}", 'w') as f:
f.write(${JSON.stringify(content)})
print(f"✓ Written: ${fullPath}")
`);
console.log(`[E2BProvider] Written file using Python: ${fullPath}`);
}
this.existingFiles.add(path);
}
@@ -475,6 +485,10 @@ print(f'✓ Vite restarted with PID: {process.pid}')
return this.sandboxInfo?.url || null;
}
getSandboxInfo(): SandboxInfo | null {
return this.sandboxInfo;
}
async terminate(): Promise<void> {
if (this.sandbox) {
console.log('[E2BProvider] Terminating sandbox...');
+4
View File
@@ -450,6 +450,10 @@ body {
return this.sandboxInfo?.url || null;
}
getSandboxInfo(): SandboxInfo | null {
return this.sandboxInfo;
}
async terminate(): Promise<void> {
if (this.sandbox) {
console.log('[VercelProvider] Terminating sandbox...');
+1
View File
@@ -48,6 +48,7 @@ export abstract class SandboxProvider {
abstract listFiles(directory?: string): Promise<string[]>;
abstract installPackages(packages: string[]): Promise<CommandResult>;
abstract getSandboxUrl(): string | null;
abstract getSandboxInfo(): SandboxInfo | null;
abstract terminate(): Promise<void>;
abstract isAlive(): boolean;
Submodule styling-reference/ai-ready-website added at 6944f04ada