Merge pull request #123 from bekbull/morph-fast-apply

Optional Fast Apply model for better/faster code edits #120
This commit is contained in:
Developers Digest
2025-09-27 11:07:07 +03:00
committed by GitHub
6 changed files with 1838 additions and 96 deletions
+8
View File
@@ -19,6 +19,10 @@ VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull
# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID # VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID
# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard # VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard
# Get yours at https://console.groq.com
GROQ_API_KEY=your_groq_api_key_here
=======
# Option 2: E2B Sandbox # Option 2: E2B Sandbox
# Set SANDBOX_PROVIDER=e2b and configure E2B_API_KEY below # Set SANDBOX_PROVIDER=e2b and configure E2B_API_KEY below
# SANDBOX_PROVIDER=e2b # SANDBOX_PROVIDER=e2b
@@ -36,3 +40,7 @@ ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.c
OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5) OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5)
GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey
GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended) GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended)
# Optional Morph Fast Apply
# Get yours at https://morphllm.com/
MORPH_API_KEY=your_fast_apply_key
+162 -93
View File
@@ -1,4 +1,5 @@
import { NextRequest, NextResponse } from 'next/server'; import { NextRequest, NextResponse } from 'next/server';
import { parseMorphEdits, applyMorphEditToFile } from '@/lib/morph-fast-apply';
// Sandbox import not needed - using global sandbox from sandbox-manager // Sandbox import not needed - using global sandbox from sandbox-manager
import type { SandboxState } from '@/types/sandbox'; import type { SandboxState } from '@/types/sandbox';
import type { ConversationState } from '@/types/conversation'; import type { ConversationState } from '@/types/conversation';
@@ -29,28 +30,28 @@ function parseAIResponse(response: string): ParsedResponse {
explanation: '', explanation: '',
template: '' template: ''
}; };
// Function to extract packages from import statements // Function to extract packages from import statements
function extractPackagesFromCode(content: string): string[] { function extractPackagesFromCode(content: string): string[] {
const packages: string[] = []; const packages: string[] = [];
// Match ES6 imports // Match ES6 imports
const importRegex = /import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)(?:\s*,\s*(?:\{[^}]*\}|\*\s+as\s+\w+|\w+))*\s+from\s+)?['"]([^'"]+)['"]/g; const importRegex = /import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)(?:\s*,\s*(?:\{[^}]*\}|\*\s+as\s+\w+|\w+))*\s+from\s+)?['"]([^'"]+)['"]/g;
let importMatch; let importMatch;
while ((importMatch = importRegex.exec(content)) !== null) { while ((importMatch = importRegex.exec(content)) !== null) {
const importPath = importMatch[1]; const importPath = importMatch[1];
// Skip relative imports and built-in React // Skip relative imports and built-in React
if (!importPath.startsWith('.') && !importPath.startsWith('/') && if (!importPath.startsWith('.') && !importPath.startsWith('/') &&
importPath !== 'react' && importPath !== 'react-dom' && importPath !== 'react' && importPath !== 'react-dom' &&
!importPath.startsWith('@/')) { !importPath.startsWith('@/')) {
// Extract package name (handle scoped packages like @heroicons/react) // Extract package name (handle scoped packages like @heroicons/react)
const packageName = importPath.startsWith('@') const packageName = importPath.startsWith('@')
? importPath.split('/').slice(0, 2).join('/') ? importPath.split('/').slice(0, 2).join('/')
: importPath.split('/')[0]; : importPath.split('/')[0];
if (!packages.includes(packageName)) { if (!packages.includes(packageName)) {
packages.push(packageName); packages.push(packageName);
// Log important packages for debugging // Log important packages for debugging
if (packageName === 'react-router-dom' || packageName.includes('router') || packageName.includes('icon')) { if (packageName === 'react-router-dom' || packageName.includes('router') || packageName.includes('icon')) {
console.log(`[apply-ai-code-stream] Detected package from imports: ${packageName}`); console.log(`[apply-ai-code-stream] Detected package from imports: ${packageName}`);
@@ -58,13 +59,13 @@ function parseAIResponse(response: string): ParsedResponse {
} }
} }
} }
return packages; return packages;
} }
// Parse file sections - handle duplicates and prefer complete versions // Parse file sections - handle duplicates and prefer complete versions
const fileMap = new Map<string, { content: string; isComplete: boolean }>(); const fileMap = new Map<string, { content: string; isComplete: boolean }>();
// First pass: Find all file declarations // First pass: Find all file declarations
const fileRegex = /<file path="([^"]+)">([\s\S]*?)(?:<\/file>|$)/g; const fileRegex = /<file path="([^"]+)">([\s\S]*?)(?:<\/file>|$)/g;
let match; let match;
@@ -72,10 +73,10 @@ function parseAIResponse(response: string): ParsedResponse {
const filePath = match[1]; const filePath = match[1];
const content = match[2].trim(); const content = match[2].trim();
const hasClosingTag = response.substring(match.index, match.index + match[0].length).includes('</file>'); const hasClosingTag = response.substring(match.index, match.index + match[0].length).includes('</file>');
// Check if this file already exists in our map // Check if this file already exists in our map
const existing = fileMap.get(filePath); const existing = fileMap.get(filePath);
// Decide whether to keep this version // Decide whether to keep this version
let shouldReplace = false; let shouldReplace = false;
if (!existing) { if (!existing) {
@@ -89,7 +90,7 @@ function parseAIResponse(response: string): ParsedResponse {
} else if (!existing.isComplete && !hasClosingTag && content.length > existing.content.length) { } else if (!existing.isComplete && !hasClosingTag && content.length > existing.content.length) {
shouldReplace = true; // Both incomplete, keep longer one shouldReplace = true; // Both incomplete, keep longer one
} }
if (shouldReplace) { if (shouldReplace) {
// Additional validation: reject obviously broken content // Additional validation: reject obviously broken content
if (content.includes('...') && !content.includes('...props') && !content.includes('...rest')) { if (content.includes('...') && !content.includes('...props') && !content.includes('...rest')) {
@@ -103,18 +104,18 @@ function parseAIResponse(response: string): ParsedResponse {
} }
} }
} }
// Convert map to array for sections.files // Convert map to array for sections.files
for (const [path, { content, isComplete }] of fileMap.entries()) { for (const [path, { content, isComplete }] of fileMap.entries()) {
if (!isComplete) { if (!isComplete) {
console.log(`[apply-ai-code-stream] Warning: File ${path} appears to be truncated (no closing tag)`); console.log(`[apply-ai-code-stream] Warning: File ${path} appears to be truncated (no closing tag)`);
} }
sections.files.push({ sections.files.push({
path, path,
content content
}); });
// Extract packages from file content // Extract packages from file content
const filePackages = extractPackagesFromCode(content); const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) { for (const pkg of filePackages) {
@@ -124,7 +125,7 @@ function parseAIResponse(response: string): ParsedResponse {
} }
} }
} }
// Also parse markdown code blocks with file paths // Also parse markdown code blocks with file paths
const markdownFileRegex = /```(?:file )?path="([^"]+)"\n([\s\S]*?)```/g; const markdownFileRegex = /```(?:file )?path="([^"]+)"\n([\s\S]*?)```/g;
while ((match = markdownFileRegex.exec(response)) !== null) { while ((match = markdownFileRegex.exec(response)) !== null) {
@@ -134,7 +135,7 @@ function parseAIResponse(response: string): ParsedResponse {
path: filePath, path: filePath,
content: content content: content
}); });
// Extract packages from file content // Extract packages from file content
const filePackages = extractPackagesFromCode(content); const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) { for (const pkg of filePackages) {
@@ -144,7 +145,7 @@ function parseAIResponse(response: string): ParsedResponse {
} }
} }
} }
// Parse plain text format like "Generated Files: Header.jsx, index.css" // Parse plain text format like "Generated Files: Header.jsx, index.css"
const generatedFilesMatch = response.match(/Generated Files?:\s*([^\n]+)/i); const generatedFilesMatch = response.match(/Generated Files?:\s*([^\n]+)/i);
if (generatedFilesMatch) { if (generatedFilesMatch) {
@@ -154,7 +155,7 @@ function parseAIResponse(response: string): ParsedResponse {
.map(f => f.trim()) .map(f => f.trim())
.filter(f => f.endsWith('.jsx') || f.endsWith('.js') || f.endsWith('.tsx') || f.endsWith('.ts') || f.endsWith('.css') || f.endsWith('.json') || f.endsWith('.html')); .filter(f => f.endsWith('.jsx') || f.endsWith('.js') || f.endsWith('.tsx') || f.endsWith('.ts') || f.endsWith('.css') || f.endsWith('.json') || f.endsWith('.html'));
console.log(`[apply-ai-code-stream] Detected generated files from plain text: ${filesList.join(', ')}`); console.log(`[apply-ai-code-stream] Detected generated files from plain text: ${filesList.join(', ')}`);
// Try to extract the actual file content if it follows // Try to extract the actual file content if it follows
for (const fileName of filesList) { for (const fileName of filesList) {
// Look for the file content after the file name // Look for the file content after the file name
@@ -170,7 +171,7 @@ function parseAIResponse(response: string): ParsedResponse {
content: codeMatch[1].trim() content: codeMatch[1].trim()
}); });
console.log(`[apply-ai-code-stream] Extracted content for ${filePath}`); console.log(`[apply-ai-code-stream] Extracted content for ${filePath}`);
// Extract packages from this file // Extract packages from this file
const filePackages = extractPackagesFromCode(codeMatch[1]); const filePackages = extractPackagesFromCode(codeMatch[1]);
for (const pkg of filePackages) { for (const pkg of filePackages) {
@@ -183,7 +184,7 @@ function parseAIResponse(response: string): ParsedResponse {
} }
} }
} }
// Also try to parse if the response contains raw JSX/JS code blocks // Also try to parse if the response contains raw JSX/JS code blocks
const codeBlockRegex = /```(?:jsx?|tsx?|javascript|typescript)?\n([\s\S]*?)```/g; const codeBlockRegex = /```(?:jsx?|tsx?|javascript|typescript)?\n([\s\S]*?)```/g;
while ((match = codeBlockRegex.exec(response)) !== null) { while ((match = codeBlockRegex.exec(response)) !== null) {
@@ -193,14 +194,14 @@ function parseAIResponse(response: string): ParsedResponse {
if (fileNameMatch) { if (fileNameMatch) {
const fileName = fileNameMatch[1].trim(); const fileName = fileNameMatch[1].trim();
const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`; const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`;
// Don't add duplicate files // Don't add duplicate files
if (!sections.files.some(f => f.path === filePath)) { if (!sections.files.some(f => f.path === filePath)) {
sections.files.push({ sections.files.push({
path: filePath, path: filePath,
content: content content: content
}); });
// Extract packages // Extract packages
const filePackages = extractPackagesFromCode(content); const filePackages = extractPackagesFromCode(content);
for (const pkg of filePackages) { for (const pkg of filePackages) {
@@ -223,7 +224,7 @@ function parseAIResponse(response: string): ParsedResponse {
while ((match = pkgRegex.exec(response)) !== null) { while ((match = pkgRegex.exec(response)) !== null) {
sections.packages.push(match[1].trim()); sections.packages.push(match[1].trim());
} }
// Also parse <packages> tag with multiple packages // Also parse <packages> tag with multiple packages
const packagesRegex = /<packages>([\s\S]*?)<\/packages>/; const packagesRegex = /<packages>([\s\S]*?)<\/packages>/;
const packagesMatch = response.match(packagesRegex); const packagesMatch = response.match(packagesRegex);
@@ -263,22 +264,28 @@ function parseAIResponse(response: string): ParsedResponse {
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
try { try {
const { response, isEdit = false, packages = [], sandboxId } = await request.json(); const { response, isEdit = false, packages = [], sandboxId } = await request.json();
if (!response) { if (!response) {
return NextResponse.json({ return NextResponse.json({
error: 'response is required' error: 'response is required'
}, { status: 400 }); }, { status: 400 });
} }
// Debug log the response // Debug log the response
console.log('[apply-ai-code-stream] Received response to parse:'); console.log('[apply-ai-code-stream] Received response to parse:');
console.log('[apply-ai-code-stream] Response length:', response.length); console.log('[apply-ai-code-stream] Response length:', response.length);
console.log('[apply-ai-code-stream] Response preview:', response.substring(0, 500)); console.log('[apply-ai-code-stream] Response preview:', response.substring(0, 500));
console.log('[apply-ai-code-stream] isEdit:', isEdit); console.log('[apply-ai-code-stream] isEdit:', isEdit);
console.log('[apply-ai-code-stream] packages:', packages); console.log('[apply-ai-code-stream] packages:', packages);
// Parse the AI response // Parse the AI response
const parsed = parseAIResponse(response); const parsed = parseAIResponse(response);
const morphEnabled = Boolean(isEdit && process.env.MORPH_API_KEY);
const morphEdits = morphEnabled ? parseMorphEdits(response) : [];
console.log('[apply-ai-code-stream] Morph Fast Apply mode:', morphEnabled);
if (morphEnabled) {
console.log('[apply-ai-code-stream] Morph edits found:', morphEdits.length);
}
// Log what was parsed // Log what was parsed
console.log('[apply-ai-code-stream] Parsed result:'); console.log('[apply-ai-code-stream] Parsed result:');
@@ -289,15 +296,15 @@ export async function POST(request: NextRequest) {
}); });
} }
console.log('[apply-ai-code-stream] Packages found:', parsed.packages); console.log('[apply-ai-code-stream] Packages found:', parsed.packages);
// Initialize existingFiles if not already // Initialize existingFiles if not already
if (!global.existingFiles) { if (!global.existingFiles) {
global.existingFiles = new Set<string>(); global.existingFiles = new Set<string>();
} }
// Try to get provider from sandbox manager first // Try to get provider from sandbox manager first
let provider = sandboxId ? sandboxManager.getProvider(sandboxId) : sandboxManager.getActiveProvider(); let provider = sandboxId ? sandboxManager.getProvider(sandboxId) : sandboxManager.getActiveProvider();
// Fall back to global state if not found in manager // Fall back to global state if not found in manager
if (!provider) { if (!provider) {
provider = global.activeSandboxProvider; provider = global.activeSandboxProvider;
@@ -306,10 +313,10 @@ export async function POST(request: NextRequest) {
// If we have a sandboxId but no provider, try to get or create one // If we have a sandboxId but no provider, try to get or create one
if (!provider && sandboxId) { if (!provider && sandboxId) {
console.log(`[apply-ai-code-stream] No provider found for sandbox ${sandboxId}, attempting to get or create...`); console.log(`[apply-ai-code-stream] No provider found for sandbox ${sandboxId}, attempting to get or create...`);
try { try {
provider = await sandboxManager.getOrCreateProvider(sandboxId); provider = await sandboxManager.getOrCreateProvider(sandboxId);
// If we got a new provider (not reconnected), we need to create a new sandbox // If we got a new provider (not reconnected), we need to create a new sandbox
if (!provider.getSandboxInfo()) { if (!provider.getSandboxInfo()) {
console.log(`[apply-ai-code-stream] Creating new sandbox since reconnection failed for ${sandboxId}`); console.log(`[apply-ai-code-stream] Creating new sandbox since reconnection failed for ${sandboxId}`);
@@ -317,7 +324,7 @@ export async function POST(request: NextRequest) {
await provider.setupViteApp(); await provider.setupViteApp();
sandboxManager.registerSandbox(sandboxId, provider); sandboxManager.registerSandbox(sandboxId, provider);
} }
// Update legacy global state // Update legacy global state
global.activeSandboxProvider = provider; global.activeSandboxProvider = provider;
console.log(`[apply-ai-code-stream] Successfully got provider for sandbox ${sandboxId}`); console.log(`[apply-ai-code-stream] Successfully got provider for sandbox ${sandboxId}`);
@@ -339,7 +346,7 @@ export async function POST(request: NextRequest) {
}, { status: 500 }); }, { status: 500 });
} }
} }
// If we still don't have a provider, create a new one // If we still don't have a provider, create a new one
if (!provider) { if (!provider) {
console.log(`[apply-ai-code-stream] No active provider found, creating new sandbox...`); console.log(`[apply-ai-code-stream] No active provider found, creating new sandbox...`);
@@ -351,7 +358,7 @@ export async function POST(request: NextRequest) {
// Register with sandbox manager // Register with sandbox manager
sandboxManager.registerSandbox(sandboxInfo.sandboxId, provider); sandboxManager.registerSandbox(sandboxInfo.sandboxId, provider);
// Store in legacy global state // Store in legacy global state
global.activeSandboxProvider = provider; global.activeSandboxProvider = provider;
global.sandboxData = { global.sandboxData = {
@@ -378,18 +385,18 @@ export async function POST(request: NextRequest) {
}, { status: 500 }); }, { status: 500 });
} }
} }
// Create a response stream for real-time updates // Create a response stream for real-time updates
const encoder = new TextEncoder(); const encoder = new TextEncoder();
const stream = new TransformStream(); const stream = new TransformStream();
const writer = stream.writable.getWriter(); const writer = stream.writable.getWriter();
// Function to send progress updates // Function to send progress updates
const sendProgress = async (data: any) => { const sendProgress = async (data: any) => {
const message = `data: ${JSON.stringify(data)}\n\n`; const message = `data: ${JSON.stringify(data)}\n\n`;
await writer.write(encoder.encode(message)); await writer.write(encoder.encode(message));
}; };
// Start processing in background (pass provider and request to the async function) // Start processing in background (pass provider and request to the async function)
(async (providerInstance, req) => { (async (providerInstance, req) => {
const results = { const results = {
@@ -401,87 +408,94 @@ export async function POST(request: NextRequest) {
commandsExecuted: [] as string[], commandsExecuted: [] as string[],
errors: [] as string[] errors: [] as string[]
}; };
try { try {
await sendProgress({ await sendProgress({
type: 'start', type: 'start',
message: 'Starting code application...', message: 'Starting code application...',
totalSteps: 3 totalSteps: 3
}); });
if (morphEnabled) {
await sendProgress({ type: 'info', message: 'Morph Fast Apply enabled' });
await sendProgress({ type: 'info', message: `Parsed ${morphEdits.length} Morph edits` });
if (morphEdits.length === 0) {
console.warn('[apply-ai-code-stream] Morph enabled but no <edit> blocks found; falling back to full-file flow');
await sendProgress({ type: 'warning', message: 'Morph enabled but no <edit> blocks found; falling back to full-file flow' });
}
}
// Step 1: Install packages // Step 1: Install packages
const packagesArray = Array.isArray(packages) ? packages : []; const packagesArray = Array.isArray(packages) ? packages : [];
const parsedPackages = Array.isArray(parsed.packages) ? parsed.packages : []; const parsedPackages = Array.isArray(parsed.packages) ? parsed.packages : [];
// Combine and deduplicate packages // Combine and deduplicate packages
const allPackages = [...packagesArray.filter(pkg => pkg && typeof pkg === 'string'), ...parsedPackages]; const allPackages = [...packagesArray.filter(pkg => pkg && typeof pkg === 'string'), ...parsedPackages];
// Use Set to remove duplicates, then filter out pre-installed packages // Use Set to remove duplicates, then filter out pre-installed packages
const uniquePackages = [...new Set(allPackages)] const uniquePackages = [...new Set(allPackages)]
.filter(pkg => pkg && typeof pkg === 'string' && pkg.trim() !== '') // Remove empty strings .filter(pkg => pkg && typeof pkg === 'string' && pkg.trim() !== '') // Remove empty strings
.filter(pkg => pkg !== 'react' && pkg !== 'react-dom'); // Filter pre-installed .filter(pkg => pkg !== 'react' && pkg !== 'react-dom'); // Filter pre-installed
// Log if we found duplicates // Log if we found duplicates
if (allPackages.length !== uniquePackages.length) { if (allPackages.length !== uniquePackages.length) {
console.log(`[apply-ai-code-stream] Removed ${allPackages.length - uniquePackages.length} duplicate packages`); console.log(`[apply-ai-code-stream] Removed ${allPackages.length - uniquePackages.length} duplicate packages`);
console.log(`[apply-ai-code-stream] Original packages:`, allPackages); console.log(`[apply-ai-code-stream] Original packages:`, allPackages);
console.log(`[apply-ai-code-stream] Deduplicated packages:`, uniquePackages); console.log(`[apply-ai-code-stream] Deduplicated packages:`, uniquePackages);
} }
if (uniquePackages.length > 0) { if (uniquePackages.length > 0) {
await sendProgress({ await sendProgress({
type: 'step', type: 'step',
step: 1, step: 1,
message: `Installing ${uniquePackages.length} packages...`, message: `Installing ${uniquePackages.length} packages...`,
packages: uniquePackages packages: uniquePackages
}); });
// Use streaming package installation // Use streaming package installation
try { try {
// Construct the API URL properly for both dev and production // Construct the API URL properly for both dev and production
const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http'; const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http';
const host = req.headers.get('host') || 'localhost:3000'; const host = req.headers.get('host') || 'localhost:3000';
const apiUrl = `${protocol}://${host}/api/install-packages`; const apiUrl = `${protocol}://${host}/api/install-packages`;
const installResponse = await fetch(apiUrl, { const installResponse = await fetch(apiUrl, {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ body: JSON.stringify({
packages: uniquePackages, packages: uniquePackages,
sandboxId: sandboxId || providerInstance.getSandboxInfo()?.sandboxId sandboxId: sandboxId || providerInstance.getSandboxInfo()?.sandboxId
}) })
}); });
if (installResponse.ok && installResponse.body) { if (installResponse.ok && installResponse.body) {
const reader = installResponse.body.getReader(); const reader = installResponse.body.getReader();
const decoder = new TextDecoder(); const decoder = new TextDecoder();
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
if (done) break; if (done) break;
const chunk = decoder.decode(value); const chunk = decoder.decode(value);
if (!chunk) continue; if (!chunk) continue;
const lines = chunk.split('\n'); const lines = chunk.split('\n');
for (const line of lines) { for (const line of lines) {
if (line.startsWith('data: ')) { if (line.startsWith('data: ')) {
try { try {
const data = JSON.parse(line.slice(6)); const data = JSON.parse(line.slice(6));
// Forward package installation progress // Forward package installation progress
await sendProgress({ await sendProgress({
type: 'package-progress', type: 'package-progress',
...data ...data
}); });
// Track results // Track results
if (data.type === 'success' && data.installedPackages) { if (data.type === 'success' && data.installedPackages) {
results.packagesInstalled = data.installedPackages; results.packagesInstalled = data.installedPackages;
} }
} catch (parseError) { } catch (parseError) {
console.debug('Error parsing terminal output:', parseError); console.debug('Error parsing terminal output:', parseError);
// Ignore parse errors
} }
} }
} }
@@ -496,28 +510,83 @@ export async function POST(request: NextRequest) {
results.errors.push(`Package installation failed: ${(error as Error).message}`); results.errors.push(`Package installation failed: ${(error as Error).message}`);
} }
} else { } else {
await sendProgress({ await sendProgress({
type: 'step', type: 'step',
step: 1, step: 1,
message: 'No additional packages to install, skipping...' message: 'No additional packages to install, skipping...'
}); });
} }
// Step 2: Create/update files // Step 2: Create/update files
const filesArray = Array.isArray(parsed.files) ? parsed.files : []; const filesArray = Array.isArray(parsed.files) ? parsed.files : [];
await sendProgress({ await sendProgress({
type: 'step', type: 'step',
step: 2, step: 2,
message: `Creating ${filesArray.length} files...` message: `Creating ${filesArray.length} files...`
}); });
// Filter out config files that shouldn't be created // Filter out config files that shouldn't be created
const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js']; const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js'];
const filteredFiles = filesArray.filter(file => { let filteredFiles = filesArray.filter(file => {
if (!file || typeof file !== 'object') return false; if (!file || typeof file !== 'object') return false;
const fileName = (file.path || '').split('/').pop() || ''; const fileName = (file.path || '').split('/').pop() || '';
return !configFiles.includes(fileName); return !configFiles.includes(fileName);
}); });
// If Morph is enabled and we have edits, apply them before file writes
const morphUpdatedPaths = new Set<string>();
if (morphEnabled && morphEdits.length > 0) {
const morphSandbox = (global as any).activeSandbox || providerInstance;
if (!morphSandbox) {
console.warn('[apply-ai-code-stream] No sandbox available to apply Morph edits');
await sendProgress({ type: 'warning', message: 'No sandbox available to apply Morph edits' });
} else {
await sendProgress({ type: 'info', message: `Applying ${morphEdits.length} fast edits via Morph...` });
for (const [idx, edit] of morphEdits.entries()) {
try {
await sendProgress({ type: 'file-progress', current: idx + 1, total: morphEdits.length, fileName: edit.targetFile, action: 'morph-applying' });
const result = await applyMorphEditToFile({
sandbox: morphSandbox,
targetPath: edit.targetFile,
instructions: edit.instructions,
updateSnippet: edit.update
});
if (result.success && result.normalizedPath) {
console.log('[apply-ai-code-stream] Morph updated', result.normalizedPath);
morphUpdatedPaths.add(result.normalizedPath);
if (results.filesUpdated) results.filesUpdated.push(result.normalizedPath);
await sendProgress({ type: 'file-complete', fileName: result.normalizedPath, action: 'morph-updated' });
} else {
const msg = result.error || 'Unknown Morph error';
console.error('[apply-ai-code-stream] Morph apply failed for', edit.targetFile, msg);
if (results.errors) results.errors.push(`Morph apply failed for ${edit.targetFile}: ${msg}`);
await sendProgress({ type: 'file-error', fileName: edit.targetFile, error: msg });
}
} catch (err) {
const msg = (err as Error).message;
console.error('[apply-ai-code-stream] Morph apply exception for', edit.targetFile, msg);
if (results.errors) results.errors.push(`Morph apply exception for ${edit.targetFile}: ${msg}`);
await sendProgress({ type: 'file-error', fileName: edit.targetFile, error: msg });
}
}
}
}
// Avoid overwriting Morph-updated files in the file write loop
if (morphUpdatedPaths.size > 0) {
filteredFiles = filteredFiles.filter(file => {
if (!file?.path) return true;
let normalizedPath = file.path.startsWith('/') ? file.path.slice(1) : file.path;
const fileName = normalizedPath.split('/').pop() || '';
if (!normalizedPath.startsWith('src/') &&
!normalizedPath.startsWith('public/') &&
normalizedPath !== 'index.html' &&
!configFiles.includes(fileName)) {
normalizedPath = 'src/' + normalizedPath;
}
return !morphUpdatedPaths.has(normalizedPath);
});
}
for (const [index, file] of filteredFiles.entries()) { for (const [index, file] of filteredFiles.entries()) {
try { try {
@@ -529,27 +598,27 @@ export async function POST(request: NextRequest) {
fileName: file.path, fileName: file.path,
action: 'creating' action: 'creating'
}); });
// Normalize the file path // Normalize the file path
let normalizedPath = file.path; let normalizedPath = file.path;
if (normalizedPath.startsWith('/')) { if (normalizedPath.startsWith('/')) {
normalizedPath = normalizedPath.substring(1); normalizedPath = normalizedPath.substring(1);
} }
if (!normalizedPath.startsWith('src/') && if (!normalizedPath.startsWith('src/') &&
!normalizedPath.startsWith('public/') && !normalizedPath.startsWith('public/') &&
normalizedPath !== 'index.html' && normalizedPath !== 'index.html' &&
!configFiles.includes(normalizedPath.split('/').pop() || '')) { !configFiles.includes(normalizedPath.split('/').pop() || '')) {
normalizedPath = 'src/' + normalizedPath; normalizedPath = 'src/' + normalizedPath;
} }
const isUpdate = global.existingFiles.has(normalizedPath); const isUpdate = global.existingFiles.has(normalizedPath);
// Remove any CSS imports from JSX/JS files (we're using Tailwind) // Remove any CSS imports from JSX/JS files (we're using Tailwind)
let fileContent = file.content; let fileContent = file.content;
if (file.path.endsWith('.jsx') || file.path.endsWith('.js') || file.path.endsWith('.tsx') || file.path.endsWith('.ts')) { if (file.path.endsWith('.jsx') || file.path.endsWith('.js') || file.path.endsWith('.tsx') || file.path.endsWith('.ts')) {
fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, ''); fileContent = fileContent.replace(/import\s+['"]\.\/[^'"]+\.css['"];?\s*\n?/g, '');
} }
// Fix common Tailwind CSS errors in CSS files // Fix common Tailwind CSS errors in CSS files
if (file.path.endsWith('.css')) { if (file.path.endsWith('.css')) {
// Replace shadow-3xl with shadow-2xl (shadow-3xl doesn't exist) // Replace shadow-3xl with shadow-2xl (shadow-3xl doesn't exist)
@@ -558,7 +627,7 @@ export async function POST(request: NextRequest) {
fileContent = fileContent.replace(/shadow-4xl/g, 'shadow-2xl'); fileContent = fileContent.replace(/shadow-4xl/g, 'shadow-2xl');
fileContent = fileContent.replace(/shadow-5xl/g, 'shadow-2xl'); fileContent = fileContent.replace(/shadow-5xl/g, 'shadow-2xl');
} }
// Create directory if needed // Create directory if needed
const dirPath = normalizedPath.includes('/') ? normalizedPath.substring(0, normalizedPath.lastIndexOf('/')) : ''; const dirPath = normalizedPath.includes('/') ? normalizedPath.substring(0, normalizedPath.lastIndexOf('/')) : '';
if (dirPath) { if (dirPath) {
@@ -567,7 +636,7 @@ export async function POST(request: NextRequest) {
// Write the file using provider // Write the file using provider
await providerInstance.writeFile(normalizedPath, fileContent); await providerInstance.writeFile(normalizedPath, fileContent);
// Update file cache // Update file cache
if (global.sandboxState?.fileCache) { if (global.sandboxState?.fileCache) {
global.sandboxState.fileCache.files[normalizedPath] = { global.sandboxState.fileCache.files[normalizedPath] = {
@@ -575,14 +644,14 @@ export async function POST(request: NextRequest) {
lastModified: Date.now() lastModified: Date.now()
}; };
} }
if (isUpdate) { if (isUpdate) {
if (results.filesUpdated) results.filesUpdated.push(normalizedPath); if (results.filesUpdated) results.filesUpdated.push(normalizedPath);
} else { } else {
if (results.filesCreated) results.filesCreated.push(normalizedPath); if (results.filesCreated) results.filesCreated.push(normalizedPath);
if (global.existingFiles) global.existingFiles.add(normalizedPath); if (global.existingFiles) global.existingFiles.add(normalizedPath);
} }
await sendProgress({ await sendProgress({
type: 'file-complete', type: 'file-complete',
fileName: normalizedPath, fileName: normalizedPath,
@@ -599,16 +668,16 @@ export async function POST(request: NextRequest) {
}); });
} }
} }
// Step 3: Execute commands // Step 3: Execute commands
const commandsArray = Array.isArray(parsed.commands) ? parsed.commands : []; const commandsArray = Array.isArray(parsed.commands) ? parsed.commands : [];
if (commandsArray.length > 0) { if (commandsArray.length > 0) {
await sendProgress({ await sendProgress({
type: 'step', type: 'step',
step: 3, step: 3,
message: `Executing ${commandsArray.length} commands...` message: `Executing ${commandsArray.length} commands...`
}); });
for (const [index, cmd] of commandsArray.entries()) { for (const [index, cmd] of commandsArray.entries()) {
try { try {
await sendProgress({ await sendProgress({
@@ -618,14 +687,14 @@ export async function POST(request: NextRequest) {
command: cmd, command: cmd,
action: 'executing' action: 'executing'
}); });
// Use provider runCommand // Use provider runCommand
const result = await providerInstance.runCommand(cmd); const result = await providerInstance.runCommand(cmd);
// Get command output from provider result // Get command output from provider result
const stdout = result.stdout; const stdout = result.stdout;
const stderr = result.stderr; const stderr = result.stderr;
if (stdout) { if (stdout) {
await sendProgress({ await sendProgress({
type: 'command-output', type: 'command-output',
@@ -634,7 +703,7 @@ export async function POST(request: NextRequest) {
stream: 'stdout' stream: 'stdout'
}); });
} }
if (stderr) { if (stderr) {
await sendProgress({ await sendProgress({
type: 'command-output', type: 'command-output',
@@ -643,11 +712,11 @@ export async function POST(request: NextRequest) {
stream: 'stderr' stream: 'stderr'
}); });
} }
if (results.commandsExecuted) { if (results.commandsExecuted) {
results.commandsExecuted.push(cmd); results.commandsExecuted.push(cmd);
} }
await sendProgress({ await sendProgress({
type: 'command-complete', type: 'command-complete',
command: cmd, command: cmd,
@@ -666,7 +735,7 @@ export async function POST(request: NextRequest) {
} }
} }
} }
// Send final results // Send final results
await sendProgress({ await sendProgress({
type: 'complete', type: 'complete',
@@ -675,7 +744,7 @@ export async function POST(request: NextRequest) {
structure: parsed.structure, structure: parsed.structure,
message: `Successfully applied ${results.filesCreated.length} files` message: `Successfully applied ${results.filesCreated.length} files`
}); });
// Track applied files in conversation state // Track applied files in conversation state
if (global.conversationState && results.filesCreated.length > 0) { if (global.conversationState && results.filesCreated.length > 0) {
const messages = global.conversationState.context.messages; const messages = global.conversationState.context.messages;
@@ -688,7 +757,7 @@ export async function POST(request: NextRequest) {
}; };
} }
} }
// Track applied code in project evolution // Track applied code in project evolution
if (global.conversationState.context.projectEvolution) { if (global.conversationState.context.projectEvolution) {
global.conversationState.context.projectEvolution.majorChanges.push({ global.conversationState.context.projectEvolution.majorChanges.push({
@@ -697,10 +766,10 @@ export async function POST(request: NextRequest) {
filesAffected: results.filesCreated || [] filesAffected: results.filesCreated || []
}); });
} }
global.conversationState.lastUpdated = Date.now(); global.conversationState.lastUpdated = Date.now();
} }
} catch (error) { } catch (error) {
await sendProgress({ await sendProgress({
type: 'error', type: 'error',
@@ -710,7 +779,7 @@ export async function POST(request: NextRequest) {
await writer.close(); await writer.close();
} }
})(provider, request); })(provider, request);
// Return the stream // Return the stream
return new Response(stream.readable, { return new Response(stream.readable, {
headers: { headers: {
+69 -2
View File
@@ -1,4 +1,5 @@
import { NextRequest, NextResponse } from 'next/server'; import { NextRequest, NextResponse } from 'next/server';
import { parseMorphEdits, applyMorphEditToFile } from '@/lib/morph-fast-apply';
import type { SandboxState } from '@/types/sandbox'; import type { SandboxState } from '@/types/sandbox';
import type { ConversationState } from '@/types/conversation'; import type { ConversationState } from '@/types/conversation';
@@ -145,6 +146,12 @@ export async function POST(request: NextRequest) {
// Parse the AI response // Parse the AI response
const parsed = parseAIResponse(response); const parsed = parseAIResponse(response);
const morphEnabled = Boolean(isEdit && process.env.MORPH_API_KEY);
const morphEdits = morphEnabled ? parseMorphEdits(response) : [];
console.log('[apply-ai-code] Morph Fast Apply mode:', morphEnabled);
if (morphEnabled) {
console.log('[apply-ai-code] Morph edits found:', morphEdits.length);
}
// Initialize existingFiles if not already // Initialize existingFiles if not already
if (!global.existingFiles) { if (!global.existingFiles) {
@@ -200,6 +207,14 @@ export async function POST(request: NextRequest) {
console.log('[apply-ai-code] Is edit mode:', isEdit); console.log('[apply-ai-code] Is edit mode:', isEdit);
console.log('[apply-ai-code] Files to write:', parsed.files.map(f => f.path)); console.log('[apply-ai-code] Files to write:', parsed.files.map(f => f.path));
console.log('[apply-ai-code] Existing files:', Array.from(global.existingFiles)); console.log('[apply-ai-code] Existing files:', Array.from(global.existingFiles));
if (morphEnabled) {
console.log('[apply-ai-code] Morph Fast Apply enabled');
if (morphEdits.length > 0) {
console.log('[apply-ai-code] Parsed Morph edits:', morphEdits.map(e => e.targetFile));
} else {
console.log('[apply-ai-code] No <edit> blocks found in response');
}
}
const results = { const results = {
filesCreated: [] as string[], filesCreated: [] as string[],
@@ -324,9 +339,46 @@ export async function POST(request: NextRequest) {
} }
} }
// Attempt Morph Fast Apply for edits before file creation
const morphUpdatedPaths = new Set<string>();
if (morphEnabled && morphEdits.length > 0) {
if (!global.activeSandbox) {
console.warn('[apply-ai-code] Morph edits found but no active sandbox; skipping Morph application');
} else {
console.log(`[apply-ai-code] Applying ${morphEdits.length} fast edits via Morph...`);
for (const edit of morphEdits) {
try {
const result = await applyMorphEditToFile({
sandbox: global.activeSandbox,
targetPath: edit.targetFile,
instructions: edit.instructions,
updateSnippet: edit.update
});
if (result.success && result.normalizedPath) {
morphUpdatedPaths.add(result.normalizedPath);
results.filesUpdated.push(result.normalizedPath);
console.log('[apply-ai-code] Morph applied to', result.normalizedPath);
} else {
const msg = result.error || 'Unknown Morph error';
console.error('[apply-ai-code] Morph apply failed:', msg);
results.errors.push(`Morph apply failed for ${edit.targetFile}: ${msg}`);
}
} catch (e) {
console.error('[apply-ai-code] Morph apply exception:', e);
results.errors.push(`Morph apply exception for ${edit.targetFile}: ${(e as Error).message}`);
}
}
}
}
if (morphEnabled && morphEdits.length === 0) {
console.warn('[apply-ai-code] Morph enabled but no <edit> blocks found; falling back to full-file flow');
}
// Filter out config files that shouldn't be created // Filter out config files that shouldn't be created
const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js']; const configFiles = ['tailwind.config.js', 'vite.config.js', 'package.json', 'package-lock.json', 'tsconfig.json', 'postcss.config.js'];
const filteredFiles = parsed.files.filter(file => { let filteredFiles = parsed.files.filter(file => {
const fileName = file.path.split('/').pop() || ''; const fileName = file.path.split('/').pop() || '';
if (configFiles.includes(fileName)) { if (configFiles.includes(fileName)) {
console.warn(`[apply-ai-code] Skipping config file: ${file.path} - already exists in template`); console.warn(`[apply-ai-code] Skipping config file: ${file.path} - already exists in template`);
@@ -334,6 +386,21 @@ export async function POST(request: NextRequest) {
} }
return true; return true;
}); });
// Avoid overwriting files already updated by Morph
if (morphUpdatedPaths.size > 0) {
filteredFiles = filteredFiles.filter(file => {
let normalizedPath = file.path.startsWith('/') ? file.path.slice(1) : file.path;
const fileName = normalizedPath.split('/').pop() || '';
if (!normalizedPath.startsWith('src/') &&
!normalizedPath.startsWith('public/') &&
normalizedPath !== 'index.html' &&
!configFiles.includes(fileName)) {
normalizedPath = 'src/' + normalizedPath;
}
return !morphUpdatedPaths.has(normalizedPath);
});
}
// Create or update files AFTER package installation // Create or update files AFTER package installation
for (const file of filteredFiles) { for (const file of filteredFiles) {
@@ -399,7 +466,7 @@ export async function POST(request: NextRequest) {
} catch (writeError) { } catch (writeError) {
console.error(`[apply-ai-code] E2B file write error:`, writeError); console.error(`[apply-ai-code] E2B file write error:`, writeError);
throw writeError; throw writeError as Error;
} }
+30 -1
View File
@@ -576,7 +576,7 @@ Remember: You are a SURGEON making a precise incision, not an artist repainting
} }
// Build system prompt with conversation awareness // Build system prompt with conversation awareness
const systemPrompt = `You are an expert React developer with perfect memory of the conversation. You maintain context across messages and remember scraped websites, generated components, and applied code. Generate clean, modern React code for Vite applications. let systemPrompt = `You are an expert React developer with perfect memory of the conversation. You maintain context across messages and remember scraped websites, generated components, and applied code. Generate clean, modern React code for Vite applications.
${conversationContext} ${conversationContext}
🚨 CRITICAL RULES - YOUR MOST IMPORTANT INSTRUCTIONS: 🚨 CRITICAL RULES - YOUR MOST IMPORTANT INSTRUCTIONS:
@@ -927,6 +927,24 @@ CRITICAL: When files are provided in the context:
4. Do NOT ask to see files - they are already provided in the context above 4. Do NOT ask to see files - they are already provided in the context above
5. Make the requested change immediately`; 5. Make the requested change immediately`;
// If Morph Fast Apply is enabled (edit mode + MORPH_API_KEY), force <edit> block output
const morphFastApplyEnabled = Boolean(isEdit && process.env.MORPH_API_KEY);
if (morphFastApplyEnabled) {
systemPrompt += `
MORPH FAST APPLY MODE (EDIT-ONLY):
- Output edits as <edit> blocks, not full <file> blocks, for files that already exist.
- Format for each edit:
<edit target_file="src/components/Header.jsx">
<instructions>Describe the minimal change, single sentence.</instructions>
<update>Provide the SMALLEST code snippet necessary to perform the change.</update>
</edit>
- Only use <file> blocks when you must CREATE a brand-new file.
- Prefer ONE edit block for a simple change; multiple edits only if absolutely needed for separate files.
- Keep updates minimal and precise; do not rewrite entire files.
`;
}
// Build full prompt with context // Build full prompt with context
let fullPrompt = prompt; let fullPrompt = prompt;
if (context) { if (context) {
@@ -1172,6 +1190,17 @@ CRITICAL: When files are provided in the context:
} }
if (contextParts.length > 0) { if (contextParts.length > 0) {
if (morphFastApplyEnabled) {
contextParts.push('\nOUTPUT FORMAT (REQUIRED IN MORPH MODE):');
contextParts.push('<edit target_file="src/components/Component.jsx">');
contextParts.push('<instructions>Minimal, precise instruction.</instructions>');
contextParts.push('<update>// Smallest necessary snippet</update>');
contextParts.push('</edit>');
contextParts.push('\nIf you need to create a NEW file, then and only then output a full file:');
contextParts.push('<file path="src/components/NewComponent.jsx">');
contextParts.push('// Full file content when creating new files');
contextParts.push('</file>');
}
fullPrompt = `CONTEXT:\n${contextParts.join('\n')}\n\nUSER REQUEST:\n${prompt}`; fullPrompt = `CONTEXT:\n${contextParts.join('\n')}\n\nUSER REQUEST:\n${prompt}`;
} }
} }
+1350
View File
File diff suppressed because it is too large Load Diff
+219
View File
@@ -0,0 +1,219 @@
// Using direct fetch to Morph's OpenAI-compatible API to avoid SDK type issues
export interface MorphEditBlock {
targetFile: string;
instructions: string;
update: string;
}
export interface MorphApplyResult {
success: boolean;
normalizedPath?: string;
mergedCode?: string;
error?: string;
}
// Normalize project-relative paths to sandbox layout
export function normalizeProjectPath(inputPath: string): { normalizedPath: string; fullPath: string } {
let normalizedPath = inputPath.trim();
if (normalizedPath.startsWith('/')) normalizedPath = normalizedPath.slice(1);
const configFiles = new Set([
'tailwind.config.js',
'vite.config.js',
'package.json',
'package-lock.json',
'tsconfig.json',
'postcss.config.js'
]);
const fileName = normalizedPath.split('/').pop() || '';
if (!normalizedPath.startsWith('src/') &&
!normalizedPath.startsWith('public/') &&
normalizedPath !== 'index.html' &&
!configFiles.has(fileName)) {
normalizedPath = 'src/' + normalizedPath;
}
const fullPath = `/home/user/app/${normalizedPath}`;
return { normalizedPath, fullPath };
}
async function morphChatCompletionsCreate(payload: any) {
if (!process.env.MORPH_API_KEY) throw new Error('MORPH_API_KEY is not set');
const res = await fetch('https://api.morphllm.com/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MORPH_API_KEY}`
},
body: JSON.stringify(payload)
});
if (!res.ok) {
const text = await res.text();
throw new Error(`Morph API error ${res.status}: ${text}`);
}
return res.json();
}
// Parse <edit> blocks from LLM output
export function parseMorphEdits(text: string): MorphEditBlock[] {
const edits: MorphEditBlock[] = [];
const editRegex = /<edit\s+target_file="([^"]+)">([\s\S]*?)<\/edit>/g;
let match: RegExpExecArray | null;
while ((match = editRegex.exec(text)) !== null) {
const targetFile = match[1].trim();
const inner = match[2];
const instrMatch = inner.match(/<instructions>([\s\S]*?)<\/instructions>/);
const updateMatch = inner.match(/<update>([\s\S]*?)<\/update>/);
const instructions = instrMatch ? instrMatch[1].trim() : '';
const update = updateMatch ? updateMatch[1].trim() : '';
if (targetFile && update) {
edits.push({ targetFile, instructions, update });
}
}
return edits;
}
// Read a file from sandbox: prefers cache, then sandbox.files, then commands.run("cat ...")
async function readFileFromSandbox(sandbox: any, normalizedPath: string, fullPath: string): Promise<string> {
// Try backend cache first
if ((global as any).sandboxState?.fileCache?.files?.[normalizedPath]?.content) {
return (global as any).sandboxState.fileCache.files[normalizedPath].content as string;
}
// Try E2B files API
if (sandbox?.files?.read) {
return await sandbox.files.read(fullPath);
}
// Try provider runCommand (preferred for provider pattern)
if (typeof sandbox?.runCommand === 'function') {
try {
const res = await sandbox.runCommand(`cat ${normalizedPath}`);
if (res && typeof res.stdout === 'string') {
return res.stdout as string;
}
} catch {}
// fallback to absolute path
try {
const resAbs = await sandbox.runCommand(`cat ${fullPath}`);
if (resAbs && typeof resAbs.stdout === 'string') {
return resAbs.stdout as string;
}
} catch {}
}
// Try shell cat via commands.run
if (sandbox?.commands?.run) {
const result = await sandbox.commands.run(`cat ${fullPath}`, { cwd: '/home/user/app', timeout: 30 });
if (result?.exitCode === 0 && typeof result?.stdout === 'string') {
return result.stdout as string;
}
}
throw new Error(`Unable to read file: ${normalizedPath}`);
}
// Write a file to sandbox and update cache
async function writeFileToSandbox(sandbox: any, normalizedPath: string, fullPath: string, content: string): Promise<void> {
// Provider pattern (writeFile)
if (typeof sandbox?.writeFile === 'function') {
await sandbox.writeFile(normalizedPath, content);
return;
}
// Provider pattern (runCommand redirect)
if (typeof sandbox?.runCommand === 'function') {
// Ensure directory exists
const dir = normalizedPath.includes('/') ? normalizedPath.substring(0, normalizedPath.lastIndexOf('/')) : '';
if (dir) {
try { await sandbox.runCommand(`mkdir -p ${dir}`); } catch {}
}
// Write via heredoc with proper escaping
const heredoc = `bash -lc 'cat > ${normalizedPath} <<\"EOF\"\n${content.replace(/\\/g, '\\\\').replace(/\n/g, '\n').replace(/\$/g, '\$')}\nEOF'`;
const result = await sandbox.runCommand(heredoc);
if (result?.stdout || result?.stderr) {
// no-op
}
return;
}
// Prefer E2B files API
if (sandbox?.files?.write) {
await sandbox.files.write(fullPath, content);
} else if (sandbox?.runCode) {
// Use Python to write safely
const escaped = content
.replace(/\\/g, '\\\\')
.replace(/"""/g, '\"\"\"');
await sandbox.runCode(`
import os
os.makedirs(os.path.dirname("${fullPath}"), exist_ok=True)
with open("${fullPath}", 'w') as f:
f.write("""${escaped}""")
print("WROTE:${fullPath}")
`);
} else if (sandbox?.commands?.run) {
// Shell redirection (fallback)
// Note: beware of special chars; this is a last-resort path
const result = await sandbox.commands.run(`bash -lc 'mkdir -p "$(dirname "${fullPath}")" && cat > "${fullPath}" << \EOF\n${content}\nEOF'`, { cwd: '/home/user/app', timeout: 60 });
if (result?.exitCode !== 0) {
throw new Error(`Failed to write file via shell: ${normalizedPath}`);
}
} else {
throw new Error('No available method to write files to sandbox');
}
// Update backend cache if available
if ((global as any).sandboxState?.fileCache) {
(global as any).sandboxState.fileCache.files[normalizedPath] = {
content,
lastModified: Date.now()
};
}
if ((global as any).existingFiles) {
(global as any).existingFiles.add(normalizedPath);
}
}
export async function applyMorphEditToFile(params: {
sandbox: any;
targetPath: string;
instructions: string;
updateSnippet: string;
}): Promise<MorphApplyResult> {
try {
if (!process.env.MORPH_API_KEY) {
return { success: false, error: 'MORPH_API_KEY not set' };
}
const { normalizedPath, fullPath } = normalizeProjectPath(params.targetPath);
// Read original code (existence validation happens here)
const initialCode = await readFileFromSandbox(params.sandbox, normalizedPath, fullPath);
const resp = await morphChatCompletionsCreate({
model: 'morph-v3-large',
messages: [
{
role: 'user',
content: `<instruction>${params.instructions || ''}</instruction>\n<code>${initialCode}</code>\n<update>${params.updateSnippet}</update>`
}
]
});
const mergedCode = (resp as any)?.choices?.[0]?.message?.content || '';
if (!mergedCode) {
return { success: false, error: 'Morph returned empty content', normalizedPath };
}
await writeFileToSandbox(params.sandbox, normalizedPath, fullPath, mergedCode);
return { success: true, normalizedPath, mergedCode };
} catch (error) {
return { success: false, error: (error as Error).message };
}
}