diff --git a/test/utils/chunkingUtils.test.js b/test/utils/chunkingUtils.test.js index b32fb9f46..d3027fabd 100644 --- a/test/utils/chunkingUtils.test.js +++ b/test/utils/chunkingUtils.test.js @@ -10,7 +10,8 @@ import { createChunkUploadErrorMessage, createChunkAnalyticsData, DEFAULT_CHUNK_CONFIG, - ChunkingUtils, + getAssetId, + executeInBatches, } from '../../unitylibs/utils/chunkingUtils.js'; describe('Chunking Utils', () => { @@ -150,6 +151,86 @@ describe('Chunking Utils', () => { }); }); + describe('calculateChunkProgress', () => { + it('should calculate progress correctly', () => { + const progress = calculateChunkProgress(5, 10, 20); + expect(progress).to.equal(60); // 20 + (5/10) * 80 = 20 + 40 = 60 + }); + + it('should not exceed 100%', () => { + const progress = calculateChunkProgress(10, 10, 90); + expect(progress).to.equal(100); + }); + + it('should handle zero completed chunks', () => { + const progress = calculateChunkProgress(0, 10, 0); + expect(progress).to.equal(0); + }); + }); + + describe('createChunkUploadErrorMessage', () => { + it('should create proper error message', () => { + const message = createChunkUploadErrorMessage('asset123', 1024, 'text/plain', 2); + expect(message).to.equal('One or more chunks failed to upload for asset: asset123, 1024 bytes, text/plain. Failed chunks: 2'); + }); + }); + + describe('createChunkAnalyticsData', () => { + it('should create analytics data with timestamp', () => { + const data = createChunkAnalyticsData('Test Event', { assetId: 'test' }); + expect(data.event).to.equal('Test Event'); + expect(data.assetId).to.equal('test'); + expect(data.timestamp).to.be.a('string'); + }); + }); + + describe('getAssetId', () => { + it('should return id when id is present', () => { + const assetData = { id: 'asset-123', assetId: 'asset-456' }; + expect(getAssetId(assetData)).to.equal('asset-123'); + }); + + it('should return assetId when id is not present', () => { + const assetData = { assetId: 'asset-456' }; + expect(getAssetId(assetData)).to.equal('asset-456'); + }); + + it('should return undefined when neither is present', () => { + const assetData = {}; + expect(getAssetId(assetData)).to.be.undefined; + }); + }); + + describe('executeInBatches', () => { + it('should execute all items with concurrency limit', async () => { + const items = [1, 2, 3, 4, 5]; + const results = []; + const processFn = async (item) => { + results.push(item); + }; + await executeInBatches(items, 2, processFn); + expect(results).to.have.length(5); + expect(results).to.include.members([1, 2, 3, 4, 5]); + }); + + it('should handle empty items array', async () => { + const results = []; + await executeInBatches([], 2, async (item) => { results.push(item); }); + expect(results).to.have.length(0); + }); + + it('should handle errors gracefully', async () => { + const items = [1, 2, 3]; + let errorCount = 0; + const processFn = async (item) => { + if (item === 2) throw new Error('Test error'); + errorCount += 1; + }; + await executeInBatches(items, 2, processFn); + expect(errorCount).to.equal(2); + }); + }); + describe('batchChunkUpload', () => { let mockUploadFunction; let mockSignal; @@ -159,116 +240,188 @@ describe('Chunking Utils', () => { mockSignal = { aborted: false }; }); - it('should handle batch upload with multiple files', async () => { - const fileData = [ - { assetId: 'asset1', blocksize: 1024, uploadUrls: ['https://upload.com/chunk1?partNumber=1', 'https://upload.com/chunk2?partNumber=2'] }, - { assetId: 'asset2', blocksize: 1024, uploadUrls: ['https://upload.com/chunk3?partNumber=1', 'https://upload.com/chunk4?partNumber=2'] }, + it('should upload chunks for multiple files with flat batching', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk1?partNumber=1' }] }, + { id: 'asset2', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk2?partNumber=1' }] }, ]; const blobDataArray = [ - new File(['x'.repeat(2048)], 'file1.txt'), - new File(['y'.repeat(2048)], 'file2.txt'), + new File(['x'.repeat(512)], 'file1.txt'), + new File(['y'.repeat(512)], 'file2.txt'), ]; const filetypeArray = ['text/plain', 'text/plain']; - mockUploadFunction.resolves({ response: 'success', attempt: 1 }); + mockUploadFunction.resolves({ attempt: 1 }); const result = await batchChunkUpload( - fileData, + assetDataArray, blobDataArray, filetypeArray, 2, mockUploadFunction, mockSignal, - {}, ); expect(result.failedFiles.size).to.equal(0); - expect(mockUploadFunction.callCount).to.equal(4); // Should be called 4 times - expect(result.attemptMap.size).to.equal(4); // 2 files * 2 chunks each + expect(mockUploadFunction.callCount).to.equal(2); + expect(result.attemptMap.size).to.equal(2); }); - it('should handle file upload failures', async () => { - const fileData = [{ assetId: 'asset1', blocksize: 1024, uploadUrls: ['https://upload.com/chunk1?partNumber=1'] }]; + it('should handle assetData.id (Acrobat style)', async () => { + const assetDataArray = [ + { id: 'asset-with-id', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk?partNumber=1' }] }, + ]; const blobDataArray = [new File(['test'], 'file.txt')]; const filetypeArray = ['text/plain']; - const uploadError = new Error('Upload failed'); - mockUploadFunction.rejects(uploadError); - const result = await batchChunkUpload( - fileData, + mockUploadFunction.resolves({ attempt: 1 }); + await batchChunkUpload( + assetDataArray, blobDataArray, filetypeArray, 1, mockUploadFunction, mockSignal, - {}, ); - expect(result.failedFiles.size).to.equal(1); - expect(mockUploadFunction.callCount).to.equal(1); // Should be called once before failing - }); - }); - - describe('calculateChunkProgress', () => { - it('should calculate progress correctly', () => { - const progress = calculateChunkProgress(5, 10, 20); - expect(progress).to.equal(60); // 20 + (5/10) * 80 = 20 + 40 = 60 - }); - - it('should not exceed 100%', () => { - const progress = calculateChunkProgress(10, 10, 90); - expect(progress).to.equal(100); + expect(mockUploadFunction.calledOnce).to.be.true; + const callArgs = mockUploadFunction.firstCall.args; + expect(callArgs[3]).to.equal('asset-with-id'); // assetId argument }); - it('should handle zero completed chunks', () => { - const progress = calculateChunkProgress(0, 10, 0); - expect(progress).to.equal(0); + it('should skip files with mismatched chunk count', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 512, uploadUrls: [{ href: 'https://upload.com/chunk1' }] }, // expects 2 chunks but only 1 URL + ]; + const blobDataArray = [new File(['x'.repeat(1024)], 'file1.txt')]; // 1024 bytes / 512 blocksize = 2 chunks + const filetypeArray = ['text/plain']; + mockUploadFunction.resolves({ attempt: 1 }); + const result = await batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + ); + expect(mockUploadFunction.called).to.be.false; + expect(result.failedFiles.size).to.equal(0); }); - }); - describe('createChunkUploadErrorMessage', () => { - it('should create proper error message', () => { - const message = createChunkUploadErrorMessage('asset123', 1024, 'text/plain', 2); - expect(message).to.equal('One or more chunks failed to upload for asset: asset123, 1024 bytes, text/plain. Failed chunks: 2'); + it('should handle upload failures and mark file as failed', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk1?partNumber=1' }] }, + ]; + const blobDataArray = [new File(['test'], 'file.txt')]; + const filetypeArray = ['text/plain']; + mockUploadFunction.rejects(new Error('Upload failed')); + const result = await batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + ); + expect(result.failedFiles.size).to.equal(1); }); - }); - describe('createChunkAnalyticsData', () => { - it('should create analytics data with timestamp', () => { - const data = createChunkAnalyticsData('Test Event', { assetId: 'test' }); - expect(data.event).to.equal('Test Event'); - expect(data.assetId).to.equal('test'); - expect(data.timestamp).to.be.a('string'); + it('should stop uploading file chunks after first failure', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 512, uploadUrls: [{ href: 'https://upload.com/chunk1?partNumber=1' }, { href: 'https://upload.com/chunk2?partNumber=2' }] }, + ]; + const blobDataArray = [new File(['x'.repeat(1024)], 'file1.txt')]; // 2 chunks + const filetypeArray = ['text/plain']; + mockUploadFunction.onFirstCall().rejects(new Error('First chunk failed')); + mockUploadFunction.onSecondCall().resolves({ attempt: 1 }); + const result = await batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + ); + expect(result.failedFiles.size).to.equal(1); + // Second chunk should not be uploaded due to fileUploadFailed flag }); - }); - describe('ChunkingUtils class', () => { - let chunkingUtils; - - beforeEach(() => { - chunkingUtils = new ChunkingUtils(); + it('should handle aborted signal', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk1' }] }, + ]; + const blobDataArray = [new File(['test'], 'file.txt')]; + const filetypeArray = ['text/plain']; + mockSignal.aborted = true; + const result = await batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + ); + expect(mockUploadFunction.called).to.be.false; + expect(result.failedFiles.size).to.equal(0); }); - it('should use default config', () => { - expect(chunkingUtils.config.blockSize).to.equal(DEFAULT_CHUNK_CONFIG.blockSize); - expect(chunkingUtils.config.maxRetries).to.equal(DEFAULT_CHUNK_CONFIG.maxRetries); + it('should pass chunkContext to upload function', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk?partNumber=5' }] }, + ]; + const blobDataArray = [new File(['test'], 'file.txt')]; + const filetypeArray = ['text/plain']; + mockUploadFunction.resolves({ attempt: 2 }); + await batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + ); + const callArgs = mockUploadFunction.firstCall.args; + const chunkContext = callArgs[6]; // 7th argument + expect(chunkContext.assetId).to.equal('asset1'); + expect(chunkContext.chunkNumber).to.equal(5); + expect(chunkContext.fileType).to.equal('text/plain'); + expect(chunkContext.fileIndex).to.equal(0); + expect(chunkContext.chunkIndex).to.equal(0); }); - it('should allow custom config', () => { - const customConfig = { blockSize: 2048, maxRetries: 5 }; - const customUtils = new ChunkingUtils(customConfig); - expect(customUtils.config.blockSize).to.equal(2048); - expect(customUtils.config.maxRetries).to.equal(5); + it('should call onChunkSuccess callback on successful upload', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk' }] }, + ]; + const blobDataArray = [new File(['test'], 'file.txt')]; + const filetypeArray = ['text/plain']; + mockUploadFunction.resolves({ attempt: 1 }); + const onChunkSuccess = sinon.stub(); + await batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + { onChunkSuccess }, + ); + expect(onChunkSuccess.calledOnce).to.be.true; }); - it('should upload file with chunking', async () => { - const mockUploadFunction = sinon.stub().resolves({ response: 'success', attempt: 1 }); - const mockFile = new File(['test'], 'test.txt'); - const uploadUrls = ['https://example.com/upload']; - const result = await chunkingUtils.uploadFile({ - uploadUrls, - file: mockFile, - blockSize: 1024, - uploadFunction: mockUploadFunction, - signal: { aborted: false }, - }); - expect(result.failedChunks.size).to.equal(0); - expect(mockUploadFunction.calledOnce).to.be.true; + it('should call onChunkError callback on failed upload', async () => { + const assetDataArray = [ + { id: 'asset1', blocksize: 1024, uploadUrls: [{ href: 'https://upload.com/chunk' }] }, + ]; + const blobDataArray = [new File(['test'], 'file.txt')]; + const filetypeArray = ['text/plain']; + mockUploadFunction.rejects(new Error('Upload failed')); + const onChunkError = sinon.stub(); + await batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + { onChunkError }, + ); + expect(onChunkError.calledOnce).to.be.true; }); }); }); diff --git a/unitylibs/core/workflow/workflow-acrobat/upload-handler.js b/unitylibs/core/workflow/workflow-acrobat/upload-handler.js index cd029a50f..7518800e7 100644 --- a/unitylibs/core/workflow/workflow-acrobat/upload-handler.js +++ b/unitylibs/core/workflow/workflow-acrobat/upload-handler.js @@ -105,88 +105,43 @@ export default class UploadHandler { return 'MID_RANGE'; } - async executeInBatches(items, maxConcurrent, processFn) { - const executing = new Set(); - for (const item of items) { - const promise = processFn(item) - .then(() => { executing.delete(promise); }) - .catch(() => { executing.delete(promise); }); - executing.add(promise); - if (executing.size >= maxConcurrent) await Promise.any(executing); - } - if (executing.size > 0) { - await Promise.all(executing); - } - } - - async batchUpload(tasks, batchSize) { - await this.executeInBatches(tasks, batchSize, async (task) => { await task(); }); + async uploadChunkToStorage(urlString, chunk, fileType, assetId, signal, chunkNumber, chunkContext) { + const putOpts = { + method: 'PUT', + headers: { 'Content-Type': fileType }, + body: chunk, + signal, + }; + const modifiedRetryConfig = { ...this.actionBinder.workflowCfg.targetCfg.fetchApiConfig.default }; + modifiedRetryConfig.extraRetryCheck = async (response) => !response.ok; + + const { attempt } = await this.networkUtils.fetchFromServiceWithRetry( + urlString, + putOpts, + modifiedRetryConfig, + (response, attemptNum) => this.afterUploadFileToUnity({ + ...chunkContext, + response, + attempt: attemptNum, + }), + (error) => this.errorAfterUploadFileToUnity({ + ...chunkContext, + error, + }), + ); + return { attempt }; } async chunkPdf(assetDataArray, blobDataArray, filetypeArray, batchSize, signal) { - const uploadTasks = []; - const failedFiles = new Set(); - const attemptMap = new Map(); - assetDataArray.forEach((assetData, fileIndex) => { - if (signal?.aborted) return; - const blobData = blobDataArray[fileIndex]; - const fileType = filetypeArray[fileIndex]; - const totalChunks = Math.ceil(blobData.size / assetData.blocksize); - if (assetData.uploadUrls.length !== totalChunks) return; - let fileUploadFailed = false; - let maxAttempts = 0; - const chunkTasks = Array.from({ length: totalChunks }, (_, i) => { - const start = i * assetData.blocksize; - const end = Math.min(start + assetData.blocksize, blobData.size); - const chunk = blobData.slice(start, end); - const url = assetData.uploadUrls[i]; - return async () => { - if (fileUploadFailed || signal?.aborted) return; - const urlObj = new URL(url.href); - const chunkNumber = urlObj.searchParams.get('partNumber') || 0; - try { - const putOpts = { - method: 'PUT', - headers: { 'Content-Type': fileType }, - body: chunk, - signal, - }; - const chunkNumberInt = parseInt(chunkNumber, 10); - const modifiedRetryConfig = { ...this.actionBinder.workflowCfg.targetCfg.fetchApiConfig.default }; - modifiedRetryConfig.extraRetryCheck = async (response) => !response.ok; - const chunkContext = { - assetId: assetData.id, - blobData: chunk, - chunkNumber: chunkNumberInt, - fileType, - }; - const { attempt } = await this.networkUtils.fetchFromServiceWithRetry( - url.href, - putOpts, - modifiedRetryConfig, - (response, attempt) => this.afterUploadFileToUnity({ - ...chunkContext, - response, - attempt, - }), - (error) => this.errorAfterUploadFileToUnity({ - ...chunkContext, - error, - }), - ); - if (attempt > maxAttempts) maxAttempts = attempt; - attemptMap.set(fileIndex, maxAttempts); - } catch (err) { - failedFiles.add({ fileIndex, chunkNumber }); - fileUploadFailed = true; - } - }; - }); - uploadTasks.push(...chunkTasks); - }); - if (signal?.aborted) return { failedFiles, attemptMap }; - await this.batchUpload(uploadTasks, batchSize); - return { failedFiles, attemptMap }; + const { batchChunkUpload } = await import(`${getUnityLibs()}/utils/chunkingUtils.js`); + return batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + batchSize, + this.uploadChunkToStorage.bind(this), + signal, + ); } async verifyContent(assetData, signal) { @@ -557,10 +512,11 @@ export default class UploadHandler { } async createInitialAssets(files, workflowId, maxConcurrentFiles) { + const { executeInBatches } = await import(`${getUnityLibs()}/utils/chunkingUtils.js`); const blobDataArray = []; const assetDataArray = []; const fileTypeArray = []; - await this.executeInBatches(files, maxConcurrentFiles, async (file) => { + await executeInBatches(files, maxConcurrentFiles, async (file) => { try { const [blobData, assetData] = await Promise.all([ this.getBlobData(file), @@ -592,8 +548,9 @@ export default class UploadHandler { } async processUploadedAssets(uploadedAssets) { + const { executeInBatches } = await import(`${getUnityLibs()}/utils/chunkingUtils.js`); const assetsToDelete = []; - await this.executeInBatches(uploadedAssets, this.getConcurrentLimits().maxConcurrentFiles, async (assetData) => { + await executeInBatches(uploadedAssets, this.getConcurrentLimits().maxConcurrentFiles, async (assetData) => { const verified = await this.verifyContent(assetData); if (verified) { const validated = await this.handleValidations(assetData, true); diff --git a/unitylibs/utils/chunkingUtils.js b/unitylibs/utils/chunkingUtils.js index d6361c084..5f2e9f1bd 100644 --- a/unitylibs/utils/chunkingUtils.js +++ b/unitylibs/utils/chunkingUtils.js @@ -1,4 +1,9 @@ /* eslint-disable no-await-in-loop */ +/* eslint-disable no-restricted-syntax */ + +export function getAssetId(assetData) { + return assetData.id ?? assetData.assetId; +} export function createFileChunks(file, blockSize) { const totalChunks = Math.ceil(file.size / blockSize); @@ -65,114 +70,109 @@ export async function createChunkUploadTasks(uploadUrls, file, blockSize, upload } } -export async function batchChunkUpload(fileData, blobDataArray, filetypeArray, batchSize, uploadFunction, signal = null, options = {}) { - const { onFileComplete, onFileError } = options; +export function calculateChunkProgress(completedChunks, totalChunks, baseProgress = 0) { + const chunkProgress = (completedChunks / totalChunks) * (100 - baseProgress); + return Math.min(baseProgress + chunkProgress, 100); +} + +export function createChunkUploadErrorMessage(assetId, fileSize, fileType, failedChunkCount) { + return `One or more chunks failed to upload for asset: ${assetId}, ${fileSize} bytes, ${fileType}. Failed chunks: ${failedChunkCount}`; +} + +export function createChunkAnalyticsData(eventName, data = {}) { + return { + event: eventName, + timestamp: new Date().toISOString(), + ...data, + }; +} + +export const DEFAULT_CHUNK_CONFIG = { + maxRetries: 3, + retryDelay: 1000, + batchSize: 5, +}; + +export async function executeInBatches(items, maxConcurrent, processFn) { + const executing = new Set(); + for (const item of items) { + const promise = processFn(item) + .then(() => { executing.delete(promise); }) + .catch(() => { executing.delete(promise); }); + executing.add(promise); + if (executing.size >= maxConcurrent) await Promise.any(executing); + } + if (executing.size > 0) { + await Promise.all(executing); + } +} + +export async function batchChunkUpload( + assetDataArray, + blobDataArray, + filetypeArray, + batchSize, + uploadFunction, + signal = null, + options = {}, +) { + const { onChunkSuccess, onChunkError } = options; + const uploadTasks = []; const failedFiles = new Set(); const attemptMap = new Map(); - const uploadTasks = []; - fileData.forEach((assetData, fileIndex) => { + + assetDataArray.forEach((assetData, fileIndex) => { if (signal?.aborted) return; const blobData = blobDataArray[fileIndex]; const fileType = filetypeArray[fileIndex]; const totalChunks = Math.ceil(blobData.size / assetData.blocksize); - if (assetData.uploadUrls.length !== totalChunks) { - const error = new Error(`Mismatch between chunks and URLs for file ${fileIndex}`); - failedFiles.add({ fileIndex, error }); - return; - } + + if (assetData.uploadUrls.length !== totalChunks) return; + let fileUploadFailed = false; let maxAttempts = 0; + const chunkTasks = Array.from({ length: totalChunks }, (_, i) => { const start = i * assetData.blocksize; const end = Math.min(start + assetData.blocksize, blobData.size); const chunk = blobData.slice(start, end); const url = assetData.uploadUrls[i]; + return async () => { if (fileUploadFailed || signal?.aborted) return null; const urlString = typeof url === 'object' ? url.href : url; const chunkNumber = extractChunkNumber(url, i); + const assetId = getAssetId(assetData); + + const chunkContext = { + assetId, + blobData: chunk, + chunkNumber, + fileType, + fileIndex, + chunkIndex: i, + }; + try { - const result = await uploadFunction(urlString, chunk, fileType, assetData.assetId, signal, chunkNumber); + const result = await uploadFunction(urlString, chunk, fileType, assetId, signal, chunkNumber, chunkContext); const attempt = result?.attempt || 1; if (attempt > maxAttempts) maxAttempts = attempt; - attemptMap.set(`${fileIndex}-${i}`, attempt); + attemptMap.set(fileIndex, maxAttempts); + if (onChunkSuccess) onChunkSuccess(chunkContext, result); return result; } catch (err) { + failedFiles.add({ fileIndex, chunkNumber }); fileUploadFailed = true; - failedFiles.add({ fileIndex, chunkIndex: i, error: err }); - throw err; + if (onChunkError) onChunkError(chunkContext, err); + return null; } }; }); - uploadTasks.push({ - fileIndex, - assetData, - chunkTasks, - maxAttempts: () => maxAttempts, - }); - }); - if (signal?.aborted) return { failedFiles, attemptMap }; - try { - for (let i = 0; i < uploadTasks.length; i += batchSize) { - const batch = uploadTasks.slice(i, i + batchSize); - const batchPromises = batch.map(async (task) => { - try { - await Promise.all(task.chunkTasks.map((chunkTask) => chunkTask())); - if (onFileComplete) onFileComplete(task.fileIndex, task.assetData); - } catch (error) { - if (onFileError) onFileError(task.fileIndex, error); - throw error; - } - }); - await Promise.all(batchPromises); - } - - return { failedFiles, attemptMap }; - } catch (error) { - return { failedFiles, attemptMap }; - } -} - -export function calculateChunkProgress(completedChunks, totalChunks, baseProgress = 0) { - const chunkProgress = (completedChunks / totalChunks) * (100 - baseProgress); - return Math.min(baseProgress + chunkProgress, 100); -} - -export function createChunkUploadErrorMessage(assetId, fileSize, fileType, failedChunkCount) { - return `One or more chunks failed to upload for asset: ${assetId}, ${fileSize} bytes, ${fileType}. Failed chunks: ${failedChunkCount}`; -} - -export function createChunkAnalyticsData(eventName, data = {}) { - return { - event: eventName, - timestamp: new Date().toISOString(), - ...data, - }; -} -export const DEFAULT_CHUNK_CONFIG = { - maxRetries: 3, - retryDelay: 1000, - batchSize: 5, -}; - -export class ChunkingUtils { - constructor(config = {}) { - this.config = { ...DEFAULT_CHUNK_CONFIG, ...config }; - } - - // eslint-disable-next-line class-methods-use-this - async uploadFile(params) { - const { - uploadUrls, file, blockSize, uploadFunction, signal, options = {}, - } = params; - return createChunkUploadTasks(uploadUrls, file, blockSize, uploadFunction, signal, options); - } + uploadTasks.push(...chunkTasks); + }); - async batchUpload(params) { - const { - fileData, blobDataArray, filetypeArray, batchSize = this.config.batchSize, uploadFunction, signal, options = {}, - } = params; - return batchChunkUpload(fileData, blobDataArray, filetypeArray, batchSize, uploadFunction, signal, options); - } + if (signal?.aborted) return { failedFiles, attemptMap }; + await executeInBatches(uploadTasks, batchSize, async (task) => { await task(); }); + return { failedFiles, attemptMap }; }