From 3e0d0c1e4d014410d4b80b82a92ea3e6a130b2c0 Mon Sep 17 00:00:00 2001 From: Nick Bradley Date: Thu, 29 Jan 2026 14:22:21 +0000 Subject: [PATCH] fix(upload): implement chunked upload for large files --- src/commands/uploadWidget.ts | 64 +++++++++++++++++++++++++++--------- 1 file changed, 49 insertions(+), 15 deletions(-) diff --git a/src/commands/uploadWidget.ts b/src/commands/uploadWidget.ts index 231ac08..c0103d9 100644 --- a/src/commands/uploadWidget.ts +++ b/src/commands/uploadWidget.ts @@ -101,8 +101,22 @@ function openOrRevealUploadPanel( }); } +/** + * Size threshold (in bytes) above which to use chunked upload. + * Files over 100 MB require chunked upload to avoid 413 errors. + * We use a lower threshold (20 MB) for better reliability. + */ +const CHUNKED_UPLOAD_THRESHOLD = 20 * 1024 * 1024; // 20 MB + +/** + * Chunk size for chunked uploads. 6 MB is a good balance between + * network resilience and upload speed. + */ +const UPLOAD_CHUNK_SIZE = 6 * 1024 * 1024; // 6 MB + /** * Uploads a file to Cloudinary with progress tracking. + * Uses chunked upload for large files to avoid 413 errors. */ async function uploadWithProgress( panel: vscode.WebviewPanel, @@ -110,30 +124,50 @@ async function uploadWithProgress( options: Record, fileId: string ): Promise { + // Convert data URI to buffer + const base64Data = dataUri.split(",")[1]; + const buffer = Buffer.from(base64Data, "base64"); + + // Use chunked upload for large files to avoid 413 errors + const useChunkedUpload = buffer.length > CHUNKED_UPLOAD_THRESHOLD; + return new Promise((resolve, reject) => { - const uploadStream = cloudinary.uploader.upload_stream( - options, - (error, result) => { - if (error) { - reject(error); - } else { - resolve(result); + let uploadStream; + + if (useChunkedUpload) { + // Use chunked upload for large files + uploadStream = cloudinary.uploader.upload_chunked_stream( + { ...options, chunk_size: UPLOAD_CHUNK_SIZE }, + (error, result) => { + if (error) { + reject(error); + } else { + resolve(result); + } } - } - ); - - // Convert data URI to buffer - const base64Data = dataUri.split(",")[1]; - const buffer = Buffer.from(base64Data, "base64"); + ); + } else { + // Use standard upload stream for smaller files + uploadStream = cloudinary.uploader.upload_stream( + options, + (error, result) => { + if (error) { + reject(error); + } else { + resolve(result); + } + } + ); + } // Create readable stream with progress tracking let uploaded = 0; const total = buffer.length; - const chunkSize = 64 * 1024; // 64KB chunks + const progressChunkSize = 64 * 1024; // 64KB chunks for progress reporting const readable = new Readable({ read() { - const chunk = buffer.slice(uploaded, uploaded + chunkSize); + const chunk = buffer.slice(uploaded, uploaded + progressChunkSize); if (chunk.length > 0) { uploaded += chunk.length; const percent = Math.round((uploaded / total) * 100);