diff --git a/nala/assets/1-PDF-merge-pdf-1.pdf b/nala/assets/1-PDF-merge-pdf-1.pdf new file mode 100644 index 000000000..8f9647e21 Binary files /dev/null and b/nala/assets/1-PDF-merge-pdf-1.pdf differ diff --git a/nala/assets/1-PDF-merge-pdf-2.pdf b/nala/assets/1-PDF-merge-pdf-2.pdf new file mode 100644 index 000000000..8f9647e21 Binary files /dev/null and b/nala/assets/1-PDF-merge-pdf-2.pdf differ diff --git a/nala/assets/lightroom.jpg b/nala/assets/lightroom.jpg new file mode 100644 index 000000000..ec1070bed Binary files /dev/null and b/nala/assets/lightroom.jpg differ diff --git a/nala/features/firefly/firefly.test.cjs b/nala/features/firefly/firefly.test.cjs index 4c773c688..9733a2c40 100644 --- a/nala/features/firefly/firefly.test.cjs +++ b/nala/features/firefly/firefly.test.cjs @@ -25,7 +25,6 @@ test.describe('Firefly test suite', () => { await test.step('step-2: Verify Firefly Image verb content/specs', async () => { await expect(fireflyPage.unityWrapper).toBeVisible(); - await expect(fireflyPage.imageVerbButton).toBeVisible(); await expect(fireflyPage.promptInput).toBeVisible(); await expect(fireflyPage.generateImageButton).toBeVisible(); const actualText = await fireflyPage.promptInput.getAttribute('placeholder'); diff --git a/nala/features/lightroom/unitywidget.page.cjs b/nala/features/lightroom/unitywidget.page.cjs new file mode 100644 index 000000000..16e7017a9 --- /dev/null +++ b/nala/features/lightroom/unitywidget.page.cjs @@ -0,0 +1,14 @@ +export default class psUnityWidget { + constructor(page) { + this.page = page; + this.unityWidgetContainer = page.locator('.upload.upload-block.con-block.unity-enabled'); + this.unityVideo = this.unityWidgetContainer.locator('.video-container.video-holder').nth(0); + this.dropZone = this.unityWidgetContainer.locator('.drop-zone-container').nth(0); + this.dropZoneText = this.dropZone.locator('//div[@class="drop-zone-container"]/div[@class="drop-zone"]/p[1]').nth(2); + this.dropZoneFileText = this.dropZone.locator('//div[@class="drop-zone-container"]/div[@class="drop-zone"]/p[2]').nth(2); + this.fileUploadCta = this.unityWidgetContainer.locator('.con-button.blue.action-button.button-xl').nth(2); + this.legelTerms = this.unityWidgetContainer.locator('//a[@daa-ll="Terms of Use-11--"]'); + this.privacyPolicy = this.unityWidgetContainer.locator('//a[@daa-ll="Privacy Policy-12--"]'); + this.splashScreen = this.unityWidgetContainer.locator('//div[@class="fragment splash -loader show" and @style="display: none"]'); + } +} diff --git a/nala/features/lightroom/unitywidget.spec.cjs b/nala/features/lightroom/unitywidget.spec.cjs new file mode 100644 index 000000000..d48d1d866 --- /dev/null +++ b/nala/features/lightroom/unitywidget.spec.cjs @@ -0,0 +1,31 @@ +module.exports = { + FeatureName: 'Lr Unity Widget', + features: [ + { + tcid: '0', + name: '@lr-unityUI', + path: '/drafts/nala/unity/lightroom', + data: { + CTATxt: 'Upload your photo', + fileFormatTxt: 'File must be JPEG or JPG and up to 40MB', + dropZoneTxt: 'Drag and drop an image to try it today.', + }, + tags: '@lr-unity @smoke @regression @unity', + }, + + { + tcid: '1', + name: '@lr-unityFileUpload', + path: '/drafts/nala/unity/lightroom', + tags: '@lr-unity @smoke @regression @unity', + }, + + { + tcid: '2', + name: '@lr-unityLrProductpage', + path: '/drafts/nala/unity/lightroom', + url: 'f0.lightroom.adobe.com', + tags: '@lr-unity @smoke @regression @unity', + }, + ], +}; diff --git a/nala/features/lightroom/unitywidget.test.cjs b/nala/features/lightroom/unitywidget.test.cjs new file mode 100644 index 000000000..c0ddb794a --- /dev/null +++ b/nala/features/lightroom/unitywidget.test.cjs @@ -0,0 +1,76 @@ +import path from 'path'; +import { expect, test } from '@playwright/test'; +import { features } from './unitywidget.spec.cjs'; +import UnityWidget from './unitywidget.page.cjs'; + +const imageFilePath = path.resolve(__dirname, '../../assets/lightroom.jpg'); +console.log(__dirname); + +let unityWidget; +const unityLibs = process.env.UNITY_LIBS || ''; + +test.describe('Unity Widget Lr test suite', () => { + test.beforeEach(async ({ page }) => { + unityWidget = new UnityWidget(page); + await page.setViewportSize({ width: 1250, height: 850 }); + await page.context().clearCookies(); + }); + + // Test 0 : Unity Widget PS UI checks + test(`${features[0].name},${features[0].tags}`, async ({ page, baseURL }) => { + const ccBaseURL = baseURL.replace('--dc--', '--cc--'); + console.info(`[Test Page]: ${ccBaseURL}${features[0].path}${unityLibs}`); + + await test.step('step-1: Go to Unity Widget Lr test page', async () => { + await page.goto(`${ccBaseURL}${features[0].path}${unityLibs}`); + await page.waitForLoadState('domcontentloaded'); + await expect(page).toHaveURL(`${ccBaseURL}${features[0].path}${unityLibs}`); + }); + + await test.step('step-2: Verify Unity Widget Lr verb user interface', async () => { + await page.waitForTimeout(3000); + await expect(await unityWidget.unityWidgetContainer).toBeTruthy(); + await expect(await unityWidget.unityVideo).toBeTruthy(); + await expect(await unityWidget.dropZone).toBeTruthy(); + await expect(await unityWidget.dropZoneText).toBeTruthy(); + }); + }); + // Test 1 : Unity Widget File Upload & splash screen display + test(`${features[1].name},${features[1].tags}`, async ({ page, baseURL }) => { + const ccBaseURL = baseURL.replace('--dc--', '--cc--'); + console.info(`[Test Page]: ${ccBaseURL}${features[1].path}${unityLibs}`); + + await test.step('check lightroom file upload', async () => { + await page.goto(`${ccBaseURL}${features[1].path}${unityLibs}`); + await page.waitForLoadState('domcontentloaded'); + await expect(page).toHaveURL(`${ccBaseURL}${features[1].path}${unityLibs}`); + }); + await test.step('jpg image file upload and splash screen display', async () => { + const fileInput = page.locator('//input[@type="file" and @id="file-upload"]').nth(0); + console.log('fileinput', fileInput); + await page.waitForTimeout(10000); + await fileInput.setInputFiles(imageFilePath); + await page.waitForTimeout(3000); + await expect(unityWidget.splashScreen).toBeTruthy(); + }); + }); + // Test 2 : Unity Widget user navigation to Photoshop Product Page + test(`${features[2].name},${features[2].tags}`, async ({ page, baseURL }) => { + const ccBaseURL = baseURL.replace('--dc--', '--cc--'); + console.info(`[Test Page]: ${ccBaseURL}${features[2].path}${unityLibs}`); + + await test.step('check user landing on Lr product page post file upload', async () => { + await page.goto(`${ccBaseURL}${features[2].path}${unityLibs}`); + await page.waitForLoadState('domcontentloaded'); + await expect(page).toHaveURL(`${ccBaseURL}${features[2].path}${unityLibs}`); + }); + await test.step('jpg image file upload and user navigation to product page', async () => { + const fileInput = page.locator('//input[@type="file" and @id="file-upload"]').nth(0); + await page.waitForTimeout(10000); + await fileInput.setInputFiles(imageFilePath); + await page.waitForTimeout(10000); + const productPageUrl = await page.url(); + expect(productPageUrl).toContain(features[2].url); + }); + }); +}); diff --git a/nala/features/merge-pdf/merge-pdf.page.cjs b/nala/features/merge-pdf/merge-pdf.page.cjs new file mode 100644 index 000000000..ea4a8abed --- /dev/null +++ b/nala/features/merge-pdf/merge-pdf.page.cjs @@ -0,0 +1,7 @@ +import AcrobatWidget from '../../widget/acrobat-widget.cjs'; + +export default class MergePdf extends AcrobatWidget { + constructor(page, nth = 0) { + super(page, '.combine-pdf.unity-enabled', nth); + } +} diff --git a/nala/features/merge-pdf/merge-pdf.spec.cjs b/nala/features/merge-pdf/merge-pdf.spec.cjs new file mode 100644 index 000000000..6aa14ea85 --- /dev/null +++ b/nala/features/merge-pdf/merge-pdf.spec.cjs @@ -0,0 +1,16 @@ +module.exports = { + FeatureName: 'Merge PDF', + features: [ + { + tcid: '0', + name: '@merge-pdf', + path: '/drafts/nala/acrobat/online/test/merge-pdf', + data: { + verbTitle: 'Adobe Acrobat', + verbHeading: 'Merge PDF files', + verbCopy: 'Drag and drop PDFs to merge them into one file with our PDF combiner.', + }, + tags: '@merge-pdf @smoke @regression @unity', + }, + ], +}; diff --git a/nala/features/merge-pdf/merge-pdf.test.cjs b/nala/features/merge-pdf/merge-pdf.test.cjs new file mode 100644 index 000000000..f8352a5a0 --- /dev/null +++ b/nala/features/merge-pdf/merge-pdf.test.cjs @@ -0,0 +1,63 @@ +import path from 'path'; +import { expect, test } from '@playwright/test'; +import { features } from './merge-pdf.spec.cjs'; +import MergePdf from './merge-pdf.page.cjs'; + +const pdfFilePath1 = path.resolve(__dirname, '../../assets/1-PDF-merge-pdf-1.pdf'); +const pdfFilePath2 = path.resolve(__dirname, '../../assets/1-PDF-merge-pdf-2.pdf'); + +let mergePdf; + +const unityLibs = process.env.UNITY_LIBS || ''; + +test.describe('Unity Merge PDF test suite', () => { + test.beforeEach(async ({ page }) => { + mergePdf = new MergePdf(page); + }); + + // Test 0 : Merge PDF + test(`${features[0].name},${features[0].tags}`, async ({ page, baseURL }) => { + console.info(`[Test Page]: ${baseURL}${features[0].path}${unityLibs}`); + const { data } = features[0]; + + await test.step('step-1: Go to Merge PDF test page', async () => { + await page.goto(`${baseURL}${features[0].path}${unityLibs}`); + await page.waitForLoadState('domcontentloaded'); + await expect(page).toHaveURL(`${baseURL}${features[0].path}${unityLibs}`); + }); + + await test.step('step-2: Verify Merge PDF widget content/specs', async () => { + await expect(await mergePdf.widget).toBeVisible(); + await expect(await mergePdf.dropZone).toBeVisible(); + await expect(await mergePdf.verbImage).toBeVisible(); + await expect(await mergePdf.acrobatIcon).toBeVisible(); + const actualText = await mergePdf.verbHeader.textContent(); + expect(actualText.trim()).toBe(data.verbHeading); + await expect(await mergePdf.verbTitle).toContainText(data.verbTitle); + await expect(await mergePdf.verbCopy).toContainText(data.verbCopy); + }); + + await test.step('step-3: Upload a sample PDF files to be merged', async () => { + // upload and wait for some page change indicator (like a new element or URL change) + const fileInput = page.locator('input[type="file"]#file-upload'); + await page.waitForTimeout(10000); + await fileInput.setInputFiles([pdfFilePath1, pdfFilePath2]); + await page.waitForTimeout(15000); + + // Verify the URL parameters + const currentUrl = page.url(); + console.log(`[Post-upload URL]: ${currentUrl}`); + const urlObj = new URL(currentUrl); + expect(urlObj.searchParams.get('x_api_client_id')).toBe('unity'); + expect(urlObj.searchParams.get('x_api_client_location')).toBe('combine-pdf'); + expect(urlObj.searchParams.get('user')).toBe('frictionless_new_user'); + expect(urlObj.searchParams.get('attempts')).toBe('1st'); + console.log({ + x_api_client_id: urlObj.searchParams.get('x_api_client_id'), + x_api_client_location: urlObj.searchParams.get('x_api_client_location'), + user: urlObj.searchParams.get('user'), + attempts: urlObj.searchParams.get('attempts'), + }); + }); + }); +}); diff --git a/nala/features/word-pdf/word-to-pdf.spec.cjs b/nala/features/word-pdf/word-to-pdf.spec.cjs index 41ae10764..84c936d28 100644 --- a/nala/features/word-pdf/word-to-pdf.spec.cjs +++ b/nala/features/word-pdf/word-to-pdf.spec.cjs @@ -8,7 +8,7 @@ module.exports = { data: { verbTitle: 'Adobe Acrobat', verbHeading: 'Word to PDF converter', - verbCopy: 'Drag and drop a Microsoft Word document (DOCX or DOC) to convert to PDF.', + verbCopy: 'Drag and drop a Microsoft Word (DOCX or DOC), RTF, or TXT file to convert to PDF.', }, tags: '@word-to-pdf @smoke @regression @unity', }, diff --git a/test/core/workflow/workflow.upload.test.js b/test/core/workflow/workflow-upload/action-binder.test.js similarity index 98% rename from test/core/workflow/workflow.upload.test.js rename to test/core/workflow/workflow-upload/action-binder.test.js index 196651708..c2a10c24f 100644 --- a/test/core/workflow/workflow.upload.test.js +++ b/test/core/workflow/workflow-upload/action-binder.test.js @@ -1,7 +1,7 @@ import { expect } from '@esm-bundle/chai'; import sinon from 'sinon'; import { readFile } from '@web/test-runner-commands'; -import { setUnityLibs } from '../../../unitylibs/scripts/utils.js'; +import { setUnityLibs } from '../../../../unitylibs/scripts/utils.js'; setUnityLibs('/unitylibs'); @@ -14,8 +14,8 @@ window.lana = { log: sinon.stub() }; window.sendAnalyticsEvent = sinon.stub(); -const { default: init } = await import('../../../unitylibs/blocks/unity/unity.js'); -document.body.innerHTML = await readFile({ path: './mocks/upload-body.html' }); +const { default: init } = await import('../../../../unitylibs/blocks/unity/unity.js'); +document.body.innerHTML = await readFile({ path: '../mocks/upload-body.html' }); function delay(ms) { return new Promise((resolve) => { @@ -87,7 +87,7 @@ describe('Unity Upload Block', () => { await init(unityEl); await delay(100); - const module = await import('../../../unitylibs/core/workflow/workflow-upload/action-binder.js'); + const module = await import('../../../../unitylibs/core/workflow/workflow-upload/action-binder.js'); ActionBinder = module.default; workflowCfg = { @@ -115,7 +115,7 @@ describe('Unity Upload Block', () => { }); beforeEach(async () => { - document.body.innerHTML = await readFile({ path: './mocks/upload-body.html' }); + document.body.innerHTML = await readFile({ path: '../mocks/upload-body.html' }); unityEl = document.querySelector('.unity.workflow-upload'); await delay(50); }); @@ -540,7 +540,7 @@ describe('Unity Upload Block', () => { actionBinder.continueInApp = async () => {}; const file = new File(['test'], 'test.jpg', { type: 'image/jpeg' }); - await actionBinder.photoshopActionMaps('upload', [file]); + await actionBinder.executeActionMaps('upload', [file]); window.fetch = originalFetch; actionBinder.checkImageDimensions = originalCheckImageDimensions; @@ -556,7 +556,7 @@ describe('Unity Upload Block', () => { }; try { - await actionBinder.photoshopActionMaps('interrupt'); + await actionBinder.executeActionMaps('interrupt'); expect.fail('Should have thrown an error'); } catch (error) { expect(error).to.be.instanceOf(Error); @@ -566,7 +566,7 @@ describe('Unity Upload Block', () => { it('should handle photoshop action maps for unknown action', async () => { const actionBinder = new ActionBinder(unityEl, workflowCfg, unityEl, [unityEl]); - await actionBinder.photoshopActionMaps('unknown', []); + await actionBinder.executeActionMaps('unknown', []); }); it('should initialize action listeners', async () => { @@ -1457,7 +1457,7 @@ describe('Unity Upload Block', () => { this.showErrorToast(errorCallbackOptions, err, this.lanaOptions); throw err; } - } + }, }; const originalFetch = window.fetch; @@ -1500,7 +1500,7 @@ describe('Unity Upload Block', () => { this.showErrorToast(errorCallbackOptions, err, this.lanaOptions); throw err; } - } + }, }; const originalFetch = window.fetch; @@ -1843,9 +1843,9 @@ describe('Unity Upload Block', () => { dataTransfer, }); - // Mock the photoshopActionMaps method - const originalPhotoshopActionMaps = actionBinder.photoshopActionMaps; - actionBinder.photoshopActionMaps = async (action, files) => { + // Mock the executeActionMaps method + const originalExecuteActionMaps = actionBinder.executeActionMaps; + actionBinder.executeActionMaps = async (action, files) => { expect(action).to.equal('upload'); expect(files).to.have.length(1); expect(files[0]).to.equal(mockFile); @@ -1853,7 +1853,7 @@ describe('Unity Upload Block', () => { testDiv.dispatchEvent(dropEvent); - actionBinder.photoshopActionMaps = originalPhotoshopActionMaps; + actionBinder.executeActionMaps = originalExecuteActionMaps; }); it('should handle click event on DIV element', async () => { diff --git a/test/core/workflow/workflow-upload/upload-handler.test.js b/test/core/workflow/workflow-upload/upload-handler.test.js new file mode 100644 index 000000000..f5a47c3f9 --- /dev/null +++ b/test/core/workflow/workflow-upload/upload-handler.test.js @@ -0,0 +1,392 @@ +import { expect } from '@esm-bundle/chai'; +import sinon from 'sinon'; + +describe('UploadHandler', () => { + let UploadHandler; + let uploadHandler; + let mockActionBinder; + let mockServiceHandler; + let utilsModule; + + before(async () => { + window.unityConfig = { + surfaceId: 'test-surface', + apiEndPoint: 'https://test-api.adobe.com', + apiKey: 'test-api-key', + }; + + // Mock adobeIMS with a valid, non-expired token + window.adobeIMS = { + getAccessToken: () => ({ + token: 'mock-token', + expire: Date.now() + (10 * 60 * 1000), // expires in 10 minutes + isGuestToken: false, + }), + }; + + utilsModule = await import('../../../../unitylibs/scripts/utils.js'); + utilsModule.setUnityLibs('../../../../unitylibs'); + const module = await import('../../../../unitylibs/core/workflow/workflow-upload/upload-handler.js'); + UploadHandler = module.default; + }); + + beforeEach(() => { + window.unityConfig = { + surfaceId: 'test-surface', + apiEndPoint: 'https://test-api.adobe.com', + apiKey: 'test-api-key', + }; + + mockActionBinder = { + assetId: 'test-asset-123', + workflowCfg: { + productName: 'test-product', + supportedFeatures: { values: () => ({ next: () => ({ value: 'test-feature' }) }) }, + }, + apiConfig: { endPoint: { acmpCheck: '/api/asset/finalize' } }, + errorToastEl: document.createElement('div'), + lanaOptions: { sampleRate: 100, tags: 'Unity-PS-Upload' }, + logAnalyticsinSplunk: sinon.stub(), + getAdditionalHeaders: () => ({ + 'x-unity-product': 'test-product', + 'x-unity-action': 'test-feature', + }), + }; + + mockServiceHandler = { showErrorToast: sinon.stub() }; + + uploadHandler = new UploadHandler(mockActionBinder, mockServiceHandler); + }); + + describe('Constructor', () => { + it('should initialize with actionBinder and serviceHandler', () => { + expect(uploadHandler.actionBinder).to.equal(mockActionBinder); + expect(uploadHandler.serviceHandler).to.equal(mockServiceHandler); + }); + }); + + describe('uploadFileToUnity', () => { + let originalFetchFromServiceWithRetry; + + beforeEach(() => { + originalFetchFromServiceWithRetry = uploadHandler.networkUtils.fetchFromServiceWithRetry; + }); + + afterEach(() => { + uploadHandler.networkUtils.fetchFromServiceWithRetry = originalFetchFromServiceWithRetry; + }); + + it('should upload file chunk successfully', async () => { + const mockResponse = { ok: true, status: 200 }; + uploadHandler.networkUtils.fetchFromServiceWithRetry = sinon.stub().resolves(mockResponse); + + const blob = new Blob(['test data'], { type: 'text/plain' }); + const result = await uploadHandler.uploadFileToUnity('http://upload.com', blob, 'text/plain', 'asset-123'); + + expect(uploadHandler.networkUtils.fetchFromServiceWithRetry.calledOnce).to.be.true; + expect(result).to.equal(mockResponse); + }); + + it('should throw error for failed upload', async () => { + uploadHandler.networkUtils.fetchFromServiceWithRetry = sinon.stub().rejects(new Error('Max retry delay exceeded')); + + const blob = new Blob(['test data'], { type: 'text/plain' }); + + try { + await uploadHandler.uploadFileToUnity('http://upload.com', blob, 'text/plain', 'asset-123'); + expect.fail('Should have thrown error'); + } catch (error) { + expect(error.message).to.include('Max retry delay exceeded'); + } + }); + + it('should handle network errors', async () => { + uploadHandler.networkUtils.fetchFromServiceWithRetry = sinon.stub().rejects(new Error('Network error')); + + const blob = new Blob(['test data'], { type: 'text/plain' }); + + try { + await uploadHandler.uploadFileToUnity('http://upload.com', blob, 'text/plain', 'asset-123'); + expect.fail('Should have thrown error'); + } catch (error) { + expect(error.message).to.include('Network error'); + } + }); + + it('should handle abort signal', async () => { + uploadHandler.networkUtils.fetchFromServiceWithRetry = sinon.stub().rejects(new Error('Request aborted')); + + const signal = { aborted: true }; + const blob = new Blob(['test data'], { type: 'text/plain' }); + + try { + await uploadHandler.uploadFileToUnity('http://upload.com', blob, 'text/plain', 'asset-123', signal); + expect.fail('Should have thrown error'); + } catch (error) { + expect(error.message).to.include('Request aborted'); + } + }); + }); + + describe('uploadFileToUnityWithRetry', () => { + let originalFetch; + + beforeEach(() => { + originalFetch = window.fetch; + }); + + afterEach(() => { + window.fetch = originalFetch; + }); + }); + + describe('uploadChunksToUnity', () => { + let originalFetch; + + beforeEach(() => { + originalFetch = window.fetch; + }); + + afterEach(() => { + window.fetch = originalFetch; + }); + + it('should upload all chunks successfully', async () => { + const mockResponse = { ok: true, status: 200 }; + window.fetch = sinon.stub().resolves(mockResponse); + + // Create a file that will result in exactly 2 chunks + const file = new File(['test data for chunking that is long enough to create exactly two chunks with more content'], 'test.txt', { type: 'text/plain' }); + const uploadUrls = ['http://upload1.com', 'http://upload2.com']; + const blockSize = 50; // This will create exactly 2 chunks (file is ~80 chars) + + const result = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize); + + expect(window.fetch.calledTwice).to.be.true; + expect(result.failedChunks.size).to.equal(0); + expect(mockActionBinder.logAnalyticsinSplunk.calledWith('Chunked Upload Completed|UnityWidget')).to.be.true; + }); + + it('should handle empty file', async () => { + const mockResponse = { ok: true, status: 200 }; + window.fetch = sinon.stub().resolves(mockResponse); + + const file = new File([], 'empty.txt', { type: 'text/plain' }); + const uploadUrls = []; // Empty URLs array for empty file + const blockSize = 10; + + const result = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize); + + expect(window.fetch.called).to.be.false; // No chunks to upload + expect(result.failedChunks.size).to.equal(0); + }); + + it('should throw error for URL count mismatch', async () => { + const file = new File(['test data for chunking that is long enough to create exactly two chunks with more content'], 'test.txt', { type: 'text/plain' }); + const uploadUrls = ['http://upload1.com']; // Only 1 URL but file needs 2 chunks + const blockSize = 50; + + try { + await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize); + expect.fail('Should have thrown error'); + } catch (error) { + expect(error.message).to.include('Mismatch between number of chunks'); + } + }); + + it('should handle URL objects with href property', async () => { + const mockResponse = { ok: true, status: 200 }; + window.fetch = sinon.stub().resolves(mockResponse); + + const file = new File(['test data'], 'test.txt', { type: 'text/plain' }); + const uploadUrls = [{ href: 'http://upload1.com' }]; + const blockSize = 20; + + const result = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize); + + expect(window.fetch.calledOnce).to.be.true; + expect(result.failedChunks.size).to.equal(0); + }); + + it('should handle abort signal', async () => { + const signal = { aborted: true }; + const file = new File(['test data'], 'test.txt', { type: 'text/plain' }); + const uploadUrls = ['http://upload1.com']; + const blockSize = 20; + + // Set up fetch stub + window.fetch = sinon.stub(); + + const result = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize, signal); + + expect(result.failedChunks.size).to.equal(0); + expect(window.fetch.called).to.be.false; + }); + + it('should handle single chunk file', async () => { + const mockResponse = { ok: true, status: 200 }; + window.fetch = sinon.stub().resolves(mockResponse); + + const file = new File(['small data'], 'small.txt', { type: 'text/plain' }); + const uploadUrls = ['http://upload1.com']; + const blockSize = 100; // Larger than file size + + const result = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize); + + expect(window.fetch.calledOnce).to.be.true; + expect(result.failedChunks.size).to.equal(0); + }); + + it('should handle large file with many chunks', async () => { + const mockResponse = { ok: true, status: 200 }; + window.fetch = sinon.stub().resolves(mockResponse); + + // Create a larger file content + const largeContent = 'x'.repeat(200); // 200 characters + const file = new File([largeContent], 'large.txt', { type: 'text/plain' }); + const uploadUrls = ['http://upload1.com', 'http://upload2.com', 'http://upload3.com', 'http://upload4.com']; + const blockSize = 50; // 4 chunks + + const result = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize); + + expect(window.fetch.callCount).to.equal(4); + expect(result.failedChunks.size).to.equal(0); + }); + }); + + describe('uploadFileToUnityWithRetry', () => { + let originalFetch; + + beforeEach(() => { + originalFetch = window.fetch; + }); + + afterEach(() => { + window.fetch = originalFetch; + }); + }); + + describe('uploadFileToUnity Error Handling', () => { + let originalFetch; + + beforeEach(() => { + originalFetch = window.fetch; + }); + + afterEach(() => { + window.fetch = originalFetch; + }); + + it('should handle upload failure with no statusText', async () => { + const mockResponse = { ok: false, status: 500 }; + window.fetch = sinon.stub().resolves(mockResponse); + + const blob = new Blob(['test data'], { type: 'text/plain' }); + + try { + await uploadHandler.uploadFileToUnity('http://upload.com', blob, 'text/plain', 'asset-123'); + expect.fail('Should have thrown error'); + } catch (error) { + expect(error.message).to.include('Max retry delay exceeded'); + } + }); + + it('should handle AbortError during upload', async () => { + const abortError = new Error('Request aborted'); + abortError.name = 'AbortError'; + window.fetch = sinon.stub().rejects(abortError); + + const blob = new Blob(['test data'], { type: 'text/plain' }); + + try { + await uploadHandler.uploadFileToUnity('http://upload.com', blob, 'text/plain', 'asset-123'); + expect.fail('Should have thrown AbortError'); + } catch (error) { + expect(error.message).to.include('Max retry delay exceeded'); + } + }); + + it('should handle Timeout error during upload', async () => { + const timeoutError = new Error('Request timed out'); + timeoutError.name = 'Timeout'; + window.fetch = sinon.stub().rejects(timeoutError); + + const blob = new Blob(['test data'], { type: 'text/plain' }); + + try { + await uploadHandler.uploadFileToUnity('http://upload.com', blob, 'text/plain', 'asset-123'); + expect.fail('Should have thrown error'); + } catch (error) { + expect(error.message).to.include('Max retry delay exceeded'); + } + }); + }); + + describe('uploadChunksToUnity Error Handling', () => { + let originalFetch; + + beforeEach(() => { + originalFetch = window.fetch; + }); + + afterEach(() => { + window.fetch = originalFetch; + }); + + it('should log chunk errors when upload fails', async () => { + const mockError = new Error('Network error'); + window.fetch = sinon.stub().rejects(mockError); + const file = new File(['test data'], 'test.txt', { type: 'text/plain' }); + const uploadUrls = ['http://upload1.com']; + const blockSize = 10; + const result = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blockSize); + expect(result.failedChunks.size).to.equal(1); + expect(mockActionBinder.logAnalyticsinSplunk.calledWith('Upload Chunk Error|UnityWidget')).to.be.true; + expect(mockActionBinder.logAnalyticsinSplunk.calledWith('Chunked Upload Failed|UnityWidget')).to.be.true; + }); + }); + + describe('scanImgForSafetyWithRetry', () => { + let originalFetchFromServiceWithRetry; + + beforeEach(() => { + originalFetchFromServiceWithRetry = uploadHandler.networkUtils.fetchFromServiceWithRetry; + }); + + afterEach(() => { + uploadHandler.networkUtils.fetchFromServiceWithRetry = originalFetchFromServiceWithRetry; + }); + + it('should call fetchFromServiceWithRetry for safety scan', async () => { + // Mock the fetchFromServiceWithRetry method + uploadHandler.networkUtils.fetchFromServiceWithRetry = sinon.stub().resolves({ success: true }); + + await uploadHandler.scanImgForSafetyWithRetry('test-asset-id'); + + expect(uploadHandler.networkUtils.fetchFromServiceWithRetry.calledOnce).to.be.true; + const callArgs = uploadHandler.networkUtils.fetchFromServiceWithRetry.firstCall.args; + expect(callArgs[0]).to.equal(mockActionBinder.apiConfig.endPoint.acmpCheck); + expect(callArgs[1].method).to.equal('POST'); + expect(callArgs[1].body).to.equal(JSON.stringify({ assetId: 'test-asset-id', targetProduct: 'test-product' })); + expect(callArgs[2]).to.deep.include({ + retryType: 'polling', + retryParams: { + maxRetryDelay: 300000, + defaultRetryDelay: 5000, + }, + }); + }); + + it('should handle fetchFromServiceWithRetry error', async () => { + const serviceError = new Error('Service error'); + uploadHandler.networkUtils.fetchFromServiceWithRetry = sinon.stub().rejects(serviceError); + + try { + await uploadHandler.scanImgForSafetyWithRetry('test-asset-id'); + expect.fail('Should have thrown error'); + } catch (error) { + expect(error.message).to.equal('Service error'); + } + }); + }); +}); diff --git a/test/utils/chunkingUtils.test.js b/test/utils/chunkingUtils.test.js new file mode 100644 index 000000000..b32fb9f46 --- /dev/null +++ b/test/utils/chunkingUtils.test.js @@ -0,0 +1,274 @@ +import { expect } from '@esm-bundle/chai'; +import sinon from 'sinon'; +import { + createFileChunks, + validateChunkUrls, + extractChunkNumber, + createChunkUploadTasks, + batchChunkUpload, + calculateChunkProgress, + createChunkUploadErrorMessage, + createChunkAnalyticsData, + DEFAULT_CHUNK_CONFIG, + ChunkingUtils, +} from '../../unitylibs/utils/chunkingUtils.js'; + +describe('Chunking Utils', () => { + describe('createFileChunks', () => { + it('should create correct number of chunks for small file', () => { + const file = new File(['test data'], 'test.txt', { type: 'text/plain' }); + const chunks = createFileChunks(file, 1024); + expect(chunks).to.have.length(1); + expect(chunks[0].size).to.equal(9); // 'test data' length + }); + + it('should create multiple chunks for large file', () => { + const largeData = 'x'.repeat(2048); // 2KB + const file = new File([largeData], 'large.txt', { type: 'text/plain' }); + const chunks = createFileChunks(file, 1024); // 1KB chunks + expect(chunks).to.have.length(2); + expect(chunks[0].size).to.equal(1024); + expect(chunks[1].size).to.equal(1024); + }); + + it('should handle edge case where file size equals block size', () => { + const data = 'x'.repeat(1024); + const file = new File([data], 'exact.txt', { type: 'text/plain' }); + const chunks = createFileChunks(file, 1024); + expect(chunks).to.have.length(1); + expect(chunks[0].size).to.equal(1024); + }); + }); + + describe('validateChunkUrls', () => { + it('should not throw error for matching URLs and chunks', () => { + const uploadUrls = ['url1', 'url2', 'url3']; + expect(() => validateChunkUrls(uploadUrls, 3)).to.not.throw(); + }); + + it('should throw error for mismatched URLs and chunks', () => { + const uploadUrls = ['url1', 'url2']; + expect(() => validateChunkUrls(uploadUrls, 3)).to.throw('Mismatch between number of chunks (3) and upload URLs (2)'); + }); + }); + + describe('extractChunkNumber', () => { + it('should extract chunk number from URL with partNumber param', () => { + const url = 'https://example.com/upload?partNumber=5'; + const chunkNumber = extractChunkNumber(url, 0); + expect(chunkNumber).to.equal(5); + }); + + it('should use fallback index when partNumber not found', () => { + const url = 'https://example.com/upload'; + const chunkNumber = extractChunkNumber(url, 3); + expect(chunkNumber).to.equal(3); + }); + + it('should handle URL object', () => { + const url = new URL('https://example.com/upload?partNumber=7'); + const chunkNumber = extractChunkNumber(url, 0); + expect(chunkNumber).to.equal(7); + }); + }); + + describe('createChunkUploadTasks', () => { + let mockUploadFunction; + let mockFile; + let mockSignal; + + beforeEach(() => { + mockUploadFunction = sinon.stub(); + mockFile = new File(['test data'], 'test.txt', { type: 'text/plain' }); + mockSignal = { aborted: false }; + }); + + it('should create upload tasks for single chunk', async () => { + const uploadUrls = ['https://example.com/upload']; + mockUploadFunction.resolves({ response: 'success', attempt: 1 }); + const result = await createChunkUploadTasks( + uploadUrls, + mockFile, + 1024, + mockUploadFunction, + mockSignal, + { assetId: 'test-asset' }, + ); + expect(result.failedChunks.size).to.equal(0); + expect(result.attemptMap.size).to.equal(1); + expect(mockUploadFunction.calledOnce).to.be.true; + }); + + it('should handle multiple chunks', async () => { + const largeData = 'x'.repeat(2048); + const largeFile = new File([largeData], 'large.txt', { type: 'text/plain' }); + const uploadUrls = ['https://example.com/upload1', 'https://example.com/upload2']; + mockUploadFunction.resolves({ response: 'success', attempt: 1 }); + const result = await createChunkUploadTasks( + uploadUrls, + largeFile, + 1024, + mockUploadFunction, + mockSignal, + { assetId: 'test-asset' }, + ); + expect(result.failedChunks.size).to.equal(0); + expect(result.attemptMap.size).to.equal(2); + expect(mockUploadFunction.calledTwice).to.be.true; + }); + + it('should handle upload failures', async () => { + const uploadUrls = ['https://example.com/upload']; + const uploadError = new Error('Upload failed'); + mockUploadFunction.rejects(uploadError); + const result = await createChunkUploadTasks( + uploadUrls, + mockFile, + 1024, + mockUploadFunction, + mockSignal, + { assetId: 'test-asset' }, + ); + expect(result.failedChunks.size).to.equal(1); + expect(result.attemptMap.size).to.equal(0); + }); + + it('should handle aborted signal', async () => { + const uploadUrls = ['https://example.com/upload']; + mockSignal.aborted = true; + const result = await createChunkUploadTasks( + uploadUrls, + mockFile, + 1024, + mockUploadFunction, + mockSignal, + { assetId: 'test-asset' }, + ); + expect(result.failedChunks.size).to.equal(0); + expect(result.attemptMap.size).to.equal(0); + expect(mockUploadFunction.called).to.be.false; + }); + }); + + describe('batchChunkUpload', () => { + let mockUploadFunction; + let mockSignal; + + beforeEach(() => { + mockUploadFunction = sinon.stub(); + mockSignal = { aborted: false }; + }); + + it('should handle batch upload with multiple files', async () => { + const fileData = [ + { assetId: 'asset1', blocksize: 1024, uploadUrls: ['https://upload.com/chunk1?partNumber=1', 'https://upload.com/chunk2?partNumber=2'] }, + { assetId: 'asset2', blocksize: 1024, uploadUrls: ['https://upload.com/chunk3?partNumber=1', 'https://upload.com/chunk4?partNumber=2'] }, + ]; + const blobDataArray = [ + new File(['x'.repeat(2048)], 'file1.txt'), + new File(['y'.repeat(2048)], 'file2.txt'), + ]; + const filetypeArray = ['text/plain', 'text/plain']; + mockUploadFunction.resolves({ response: 'success', attempt: 1 }); + const result = await batchChunkUpload( + fileData, + blobDataArray, + filetypeArray, + 2, + mockUploadFunction, + mockSignal, + {}, + ); + expect(result.failedFiles.size).to.equal(0); + expect(mockUploadFunction.callCount).to.equal(4); // Should be called 4 times + expect(result.attemptMap.size).to.equal(4); // 2 files * 2 chunks each + }); + + it('should handle file upload failures', async () => { + const fileData = [{ assetId: 'asset1', blocksize: 1024, uploadUrls: ['https://upload.com/chunk1?partNumber=1'] }]; + const blobDataArray = [new File(['test'], 'file.txt')]; + const filetypeArray = ['text/plain']; + const uploadError = new Error('Upload failed'); + mockUploadFunction.rejects(uploadError); + const result = await batchChunkUpload( + fileData, + blobDataArray, + filetypeArray, + 1, + mockUploadFunction, + mockSignal, + {}, + ); + expect(result.failedFiles.size).to.equal(1); + expect(mockUploadFunction.callCount).to.equal(1); // Should be called once before failing + }); + }); + + describe('calculateChunkProgress', () => { + it('should calculate progress correctly', () => { + const progress = calculateChunkProgress(5, 10, 20); + expect(progress).to.equal(60); // 20 + (5/10) * 80 = 20 + 40 = 60 + }); + + it('should not exceed 100%', () => { + const progress = calculateChunkProgress(10, 10, 90); + expect(progress).to.equal(100); + }); + + it('should handle zero completed chunks', () => { + const progress = calculateChunkProgress(0, 10, 0); + expect(progress).to.equal(0); + }); + }); + + describe('createChunkUploadErrorMessage', () => { + it('should create proper error message', () => { + const message = createChunkUploadErrorMessage('asset123', 1024, 'text/plain', 2); + expect(message).to.equal('One or more chunks failed to upload for asset: asset123, 1024 bytes, text/plain. Failed chunks: 2'); + }); + }); + + describe('createChunkAnalyticsData', () => { + it('should create analytics data with timestamp', () => { + const data = createChunkAnalyticsData('Test Event', { assetId: 'test' }); + expect(data.event).to.equal('Test Event'); + expect(data.assetId).to.equal('test'); + expect(data.timestamp).to.be.a('string'); + }); + }); + + describe('ChunkingUtils class', () => { + let chunkingUtils; + + beforeEach(() => { + chunkingUtils = new ChunkingUtils(); + }); + + it('should use default config', () => { + expect(chunkingUtils.config.blockSize).to.equal(DEFAULT_CHUNK_CONFIG.blockSize); + expect(chunkingUtils.config.maxRetries).to.equal(DEFAULT_CHUNK_CONFIG.maxRetries); + }); + + it('should allow custom config', () => { + const customConfig = { blockSize: 2048, maxRetries: 5 }; + const customUtils = new ChunkingUtils(customConfig); + expect(customUtils.config.blockSize).to.equal(2048); + expect(customUtils.config.maxRetries).to.equal(5); + }); + + it('should upload file with chunking', async () => { + const mockUploadFunction = sinon.stub().resolves({ response: 'success', attempt: 1 }); + const mockFile = new File(['test'], 'test.txt'); + const uploadUrls = ['https://example.com/upload']; + const result = await chunkingUtils.uploadFile({ + uploadUrls, + file: mockFile, + blockSize: 1024, + uploadFunction: mockUploadFunction, + signal: { aborted: false }, + }); + expect(result.failedChunks.size).to.equal(0); + expect(mockUploadFunction.calledOnce).to.be.true; + }); + }); +}); diff --git a/unitylibs/core/styles/splash-screen.css b/unitylibs/core/styles/splash-screen.css index da330a27e..133e48b9e 100644 --- a/unitylibs/core/styles/splash-screen.css +++ b/unitylibs/core/styles/splash-screen.css @@ -100,7 +100,7 @@ body > .splash-loader { background: #FA0F00; } -:root:has(.workflow-upload.product-photoshop) .progress-holder .spectrum-ProgressBar .spectrum-ProgressBar-fill { +:root:has(.workflow-upload.product-photoshop, .workflow-upload.product-lightroom) .progress-holder .spectrum-ProgressBar .spectrum-ProgressBar-fill { background: #1273E6; } diff --git a/unitylibs/core/workflow/workflow-upload/action-binder.js b/unitylibs/core/workflow/workflow-upload/action-binder.js index dbec6bf3f..01e0c0533 100644 --- a/unitylibs/core/workflow/workflow-upload/action-binder.js +++ b/unitylibs/core/workflow/workflow-upload/action-binder.js @@ -16,20 +16,18 @@ import { } from '../../../scripts/utils.js'; class ServiceHandler { - constructor(renderWidget = false, canvasArea = null, unityEl = null, workflowCfg = {}) { + constructor(renderWidget = false, canvasArea = null, unityEl = null, workflowCfg = {}, getAdditionalHeaders = null) { this.renderWidget = renderWidget; this.canvasArea = canvasArea; this.unityEl = unityEl; this.workflowCfg = workflowCfg; + this.getAdditionalHeaders = getAdditionalHeaders; } async postCallToService(api, options, errorCallbackOptions = {}, failOnError = true) { const postOpts = { method: 'POST', - headers: await getHeaders(unityConfig.apiKey, { - 'x-unity-product': this.workflowCfg?.productName, - 'x-unity-action': this.workflowCfg?.supportedFeatures?.values()?.next()?.value, - }), + headers: await getHeaders(unityConfig.apiKey, this.getAdditionalHeaders?.() || {}), ...options, }; try { @@ -74,28 +72,38 @@ export default class ActionBinder { this.actionMap = actionMap; this.canvasArea = canvasArea; this.errorToastEl = null; - this.psApiConfig = this.getPsApiConfig(); + this.apiConfig = this.getApiConfig(); this.serviceHandler = null; this.splashScreenEl = null; this.transitionScreen = null; this.LOADER_LIMIT = 95; - this.limits = workflowCfg.targetCfg.limits; + const commonLimits = workflowCfg.targetCfg.limits || {}; + const productLimits = workflowCfg.targetCfg[`limits-${workflowCfg.productName.toLowerCase()}`] || {}; + this.limits = { ...commonLimits, ...productLimits }; this.promiseStack = []; this.initActionListeners = this.initActionListeners.bind(this); - this.lanaOptions = { sampleRate: 100, tags: 'Unity-PS-Upload' }; + const productTag = workflowCfg.targetCfg[`productTag-${workflowCfg.productName.toLowerCase()}`] || 'UNKNOWN'; + this.lanaOptions = { sampleRate: 100, tags: `Unity-${productTag}-Upload` }; this.desktop = false; this.sendAnalyticsToSplunk = null; this.assetId = null; } - getPsApiConfig() { - unityConfig.psEndPoint = { + getApiConfig() { + unityConfig.endPoint = { assetUpload: `${unityConfig.apiEndPoint}/asset`, acmpCheck: `${unityConfig.apiEndPoint}/asset/finalize`, }; return unityConfig; } + getAdditionalHeaders() { + return { + 'x-unity-product': this.workflowCfg?.productName, + 'x-unity-action': this.workflowCfg?.supportedFeatures?.values()?.next()?.value, + }; + } + async handlePreloads() { const parr = []; if (this.workflowCfg.targetCfg.showSplashScreen) { @@ -155,7 +163,7 @@ export default class ActionBinder { const assetData = { assetId, targetProduct: this.workflowCfg.productName }; const optionsBody = { body: JSON.stringify(assetData) }; const res = await this.serviceHandler.postCallToService( - this.psApiConfig.psEndPoint.acmpCheck, + this.apiConfig.endPoint.acmpCheck, optionsBody, {}, false, @@ -166,22 +174,46 @@ export default class ActionBinder { } async uploadAsset(file) { + const assetDetails = { + targetProduct: this.workflowCfg.productName, + name: file.name, + size: file.size, + format: file.type, + }; try { const resJson = await this.serviceHandler.postCallToService( - this.psApiConfig.psEndPoint.assetUpload, - {}, + this.apiConfig.endPoint.assetUpload, + { body: JSON.stringify(assetDetails) }, { errorToastEl: this.errorToastEl, errorType: '.icon-error-request' }, ); - const { id, href } = resJson; + const { id, href, blocksize, uploadUrls } = resJson; this.assetId = id; this.logAnalyticsinSplunk('Asset Created|UnityWidget', { assetId: this.assetId }); - await this.uploadImgToUnity(href, id, file, file.type); - this.scanImgForSafety(this.assetId); + if (blocksize && uploadUrls && Array.isArray(uploadUrls)) { + const { default: UploadHandler } = await import(`${getUnityLibs()}/core/workflow/workflow-upload/upload-handler.js`); + const uploadHandler = new UploadHandler(this, this.serviceHandler); + const { failedChunks, attemptMap } = await uploadHandler.uploadChunksToUnity(uploadUrls, file, blocksize); + if (failedChunks && failedChunks.size > 0) { + const error = new Error(`One or more chunks failed to upload for asset: ${id}, ${file.size} bytes, ${file.type}`); + error.status = 504; + this.logAnalyticsinSplunk('Chunked Upload Failed|UnityWidget', { + assetId: this.assetId, + failedChunks: failedChunks.size, + maxRetryCount: Math.max(...Array.from(attemptMap.values())), + }); + throw error; + } + await uploadHandler.scanImgForSafetyWithRetry(this.assetId); + } else { + await this.uploadImgToUnity(href, id, file, file.type); + this.scanImgForSafety(this.assetId); + } } catch (e) { const { default: TransitionScreen } = await import(`${getUnityLibs()}/scripts/transition-screen.js`); this.transitionScreen = new TransitionScreen(this.transitionScreen.splashScreenEl, this.initActionListeners, this.LOADER_LIMIT, this.workflowCfg, this.desktop); await this.transitionScreen.showSplashScreen(); this.serviceHandler.showErrorToast({ errorToastEl: this.errorToastEl, errorType: '.icon-error-request' }, e, this.lanaOptions); + sendAnalyticsEvent(new CustomEvent('Upload or Transition error|UnityWidget')); this.logAnalyticsinSplunk('Upload server error|UnityWidget', { errorData: { code: 'error-request', @@ -214,8 +246,7 @@ export default class ActionBinder { const alertToast = createTag('div', { class: 'alert-toast' }, alertContent); const errholder = createTag('div', { class: 'alert-holder' }, alertToast); alertClose.addEventListener('click', (e) => { - e.preventDefault(); - e.stopPropagation(); + this.preventDefault(e); errholder.classList.remove('show'); element.style.pointerEvents = 'auto'; }); @@ -229,7 +260,7 @@ export default class ActionBinder { } } - async continueInApp(assetId) { + async continueInApp(assetId, file) { const cgen = this.unityEl.querySelector('.icon-cgen')?.nextSibling?.textContent?.trim(); const queryParams = {}; if (cgen) { @@ -240,16 +271,20 @@ export default class ActionBinder { } }); } + const payload = { + locale: getLocale(), + additionalQueryParams: queryParams, + workflow: this.workflowCfg.supportedFeatures.values().next().value, + type: file.type, + }; + if (this.workflowCfg.productName.toLowerCase() === 'photoshop') { + payload.referer = window.location.href; + payload.desktopDevice = this.desktop; + } const cOpts = { assetId, targetProduct: this.workflowCfg.productName, - payload: { - locale: getLocale(), - workflow: this.workflowCfg.supportedFeatures.values().next().value, - referer: window.location.href, - desktopDevice: this.desktop, - additionalQueryParams: queryParams, - }, + payload, }; try { const { default: TransitionScreen } = await import(`${getUnityLibs()}/scripts/transition-screen.js`); @@ -257,7 +292,7 @@ export default class ActionBinder { this.transitionScreen = new TransitionScreen(this.transitionScreen.splashScreenEl, this.initActionListeners, this.LOADER_LIMIT, this.workflowCfg, this.desktop); this.transitionScreen.updateProgressBar(this.transitionScreen.splashScreenEl, 100); const servicePromise = this.serviceHandler.postCallToService( - this.psApiConfig.connectorApiEndPoint, + this.apiConfig.connectorApiEndPoint, { body: JSON.stringify(cOpts) }, { errorToastEl: this.errorToastEl, errorType: '.icon-error-request' }, ); @@ -275,6 +310,7 @@ export default class ActionBinder { if (e.message === 'Operation termination requested.') return; await this.transitionScreen.showSplashScreen(); this.serviceHandler.showErrorToast({ errorToastEl: this.errorToastEl, errorType: '.icon-error-request' }, e, this.lanaOptions); + sendAnalyticsEvent(new CustomEvent('Upload or Transition error|UnityWidget')); this.logAnalyticsinSplunk('Upload server error|UnityWidget', { errorData: { code: 'error-request', @@ -352,7 +388,7 @@ export default class ActionBinder { this.transitionScreen = new TransitionScreen(this.transitionScreen.splashScreenEl, this.initActionListeners, this.LOADER_LIMIT, this.workflowCfg, this.desktop); await this.transitionScreen.showSplashScreen(true); await this.uploadAsset(file); - await this.continueInApp(this.assetId); + await this.continueInApp(this.assetId, file); } async loadTransitionScreen() { @@ -368,7 +404,7 @@ export default class ActionBinder { } } - async photoshopActionMaps(value, files) { + async executeActionMaps(value, files) { await this.loadTransitionScreen(); await this.handlePreloads(); if (!this.errorToastEl) this.errorToastEl = await this.createErrorToast(); @@ -391,21 +427,21 @@ export default class ActionBinder { this.canvasArea, this.unityEl, this.workflowCfg, + this.getAdditionalHeaders.bind(this), ); const actions = { A: (el, key) => { el.addEventListener('click', async (e) => { e.preventDefault(); - await this.photoshopActionMaps(actMap[key]); + await this.executeActionMaps(actMap[key]); }); }, DIV: (el, key) => { el.addEventListener('drop', async (e) => { sendAnalyticsEvent(new CustomEvent('Drag and drop|UnityWidget')); - e.preventDefault(); - e.stopPropagation(); + this.preventDefault(e); const files = this.extractFiles(e); - await this.photoshopActionMaps(actMap[key], files); + await this.executeActionMaps(actMap[key], files); }); el.addEventListener('click', () => { sendAnalyticsEvent(new CustomEvent('Click Drag and drop|UnityWidget')); @@ -423,7 +459,7 @@ export default class ActionBinder { }); el.addEventListener('change', async (e) => { const files = this.extractFiles(e); - await this.photoshopActionMaps(actMap[key], files); + await this.executeActionMaps(actMap[key], files); e.target.value = ''; }); }, @@ -452,6 +488,8 @@ export default class ActionBinder { window.location.reload(); } }); + window.addEventListener('dragover', this.preventDefault.bind(this), false); + window.addEventListener('drop', this.preventDefault.bind(this), false); } preventDefault(e) { diff --git a/unitylibs/core/workflow/workflow-upload/target-config.json b/unitylibs/core/workflow/workflow-upload/target-config.json index 6d28269b5..68c43cee0 100644 --- a/unitylibs/core/workflow/workflow-upload/target-config.json +++ b/unitylibs/core/workflow/workflow-upload/target-config.json @@ -4,16 +4,24 @@ "renderWidget": false, "source": ".drop-zone", "target": ".drop-zone", + "productTag-photoshop": "PS", + "productTag-lightroom": "LR", "limits": { "maxNumFiles": 1, "maxFileSize": 40000000, "maxHeignt": 8000, - "maxWidth": 8000, + "maxWidth": 8000 + }, + "limits-photoshop": { "allowedFileTypes": ["image/jpeg", "image/png", "image/jpg"] }, + "limits-lightroom": { + "allowedFileTypes": ["image/jpeg", "image/jpg"] + }, "showSplashScreen": true, "splashScreenConfig": { - "fragmentLink": "/cc-shared/fragments/products/photoshop/unity/splash-page/splashscreen", + "fragmentLink-photoshop": "/cc-shared/fragments/products/photoshop/unity/splash-page/splashscreen", + "fragmentLink-lightroom": "/creativecloud/animation/testdoc/unity/lightroom/fragments/splash-page/splashscreen", "splashScreenParent": "body" }, "actionMap": { diff --git a/unitylibs/core/workflow/workflow-upload/upload-handler.js b/unitylibs/core/workflow/workflow-upload/upload-handler.js new file mode 100644 index 000000000..eaa95f6ca --- /dev/null +++ b/unitylibs/core/workflow/workflow-upload/upload-handler.js @@ -0,0 +1,119 @@ +/* eslint-disable no-await-in-loop */ +/* eslint-disable class-methods-use-this */ +/* eslint-disable no-restricted-syntax */ +/* eslint-disable no-loop-func */ + +import { unityConfig, getApiCallOptions } from '../../../scripts/utils.js'; +import NetworkUtils from '../../../utils/NetworkUtils.js'; +import { createChunkUploadTasks, createChunkAnalyticsData } from '../../../utils/chunkingUtils.js'; + +export default class UploadHandler { + constructor(actionBinder, serviceHandler) { + this.actionBinder = actionBinder; + this.serviceHandler = serviceHandler; + this.networkUtils = new NetworkUtils(); + } + + logError(eventName, errorData, debugMessage) { + if (debugMessage) { + window.lana?.log(debugMessage, this.actionBinder.lanaOptions); + } + this.actionBinder.logAnalyticsinSplunk(eventName, { + ...errorData, + assetId: this.actionBinder.assetId, + }); + } + + async uploadFileToUnity(storageUrl, blobData, fileType, assetId, signal, chunkNumber = 'unknown') { + const uploadOptions = { + method: 'PUT', + headers: { 'Content-Type': fileType }, + body: blobData, + signal, + }; + const retryConfig = { + retryType: 'exponential', + retryParams: { + maxRetries: 4, + retryDelay: 1000, + }, + }; + const onSuccess = (response) => { + if (response.ok) { + return response; + } + const error = new Error(response.statusText || 'Upload request failed'); + error.status = response.status; + throw error; + }; + const onError = (error) => { + this.logError('Upload Chunk Error|UnityWidget', { + chunkNumber, + size: blobData.size, + fileType, + errorData: { + code: 'upload-chunk-error', + desc: `Exception during chunk ${chunkNumber} upload: ${error.message}`, + }, + }, `Message: Exception raised when uploading chunk to Unity, Error: ${error.message}, Asset ID: ${assetId}, ${blobData.size} bytes`); + throw error; + }; + return this.networkUtils.fetchFromServiceWithRetry(storageUrl, uploadOptions, retryConfig, onSuccess, onError); + } + + async uploadChunksToUnity(uploadUrls, file, blockSize, signal = null) { + const options = { + assetId: this.actionBinder.assetId, + fileType: file.type, + }; + const result = await createChunkUploadTasks( + uploadUrls, + file, + blockSize, + this.uploadFileToUnity.bind(this), + signal, + options, + ); + const { failedChunks, attemptMap } = result; + const totalChunks = Math.ceil(file.size / blockSize); + if (failedChunks.size === 0) { + this.actionBinder.logAnalyticsinSplunk( + 'Chunked Upload Completed|UnityWidget', + createChunkAnalyticsData('Chunked Upload Completed|UnityWidget', { + assetId: this.actionBinder.assetId, + chunkCount: totalChunks, + totalFileSize: file.size, + fileType: file.type, + }), + ); + } else { + this.actionBinder.logAnalyticsinSplunk( + 'Chunked Upload Failed|UnityWidget', + createChunkAnalyticsData('Chunked Upload Failed|UnityWidget', { + assetId: this.actionBinder.assetId, + error: 'One or more chunks failed', + failedChunks: failedChunks.size, + totalChunks, + }), + ); + } + return { failedChunks, attemptMap }; + } + + async scanImgForSafetyWithRetry(assetId) { + const assetData = { assetId, targetProduct: this.actionBinder.workflowCfg.productName }; + const postOpts = await getApiCallOptions('POST', unityConfig.apiKey, this.actionBinder.getAdditionalHeaders() || {}, { body: JSON.stringify(assetData) }); + const retryConfig = { + retryType: 'polling', + retryParams: { + maxRetryDelay: 300000, + defaultRetryDelay: 5000, + }, + }; + return this.networkUtils.fetchFromServiceWithRetry( + this.actionBinder.apiConfig.endPoint.acmpCheck, + postOpts, + retryConfig, + ); + } +} diff --git a/unitylibs/scripts/transition-screen.js b/unitylibs/scripts/transition-screen.js index 0fa6cce69..31ce5093b 100644 --- a/unitylibs/scripts/transition-screen.js +++ b/unitylibs/scripts/transition-screen.js @@ -73,7 +73,11 @@ export default class TransitionScreen { async loadSplashFragment() { if (!this.workflowCfg.targetCfg.showSplashScreen) return; - this.splashFragmentLink = localizeLink(`${window.location.origin}${this.workflowCfg.targetCfg.splashScreenConfig.fragmentLink}`); + const productName = this.workflowCfg.productName.toLowerCase(); + const fragmentLink = this.workflowCfg.name === 'workflow-upload' + ? this.workflowCfg.targetCfg.splashScreenConfig[`fragmentLink-${productName}`] + : this.workflowCfg.targetCfg.splashScreenConfig.fragmentLink; + this.splashFragmentLink = localizeLink(`${window.location.origin}${fragmentLink}`); const resp = await fetch(`${this.splashFragmentLink}.plain.html`); const html = await resp.text(); const doc = new DOMParser().parseFromString(html, 'text/html'); diff --git a/unitylibs/utils/chunkingUtils.js b/unitylibs/utils/chunkingUtils.js new file mode 100644 index 000000000..d6361c084 --- /dev/null +++ b/unitylibs/utils/chunkingUtils.js @@ -0,0 +1,178 @@ +/* eslint-disable no-await-in-loop */ + +export function createFileChunks(file, blockSize) { + const totalChunks = Math.ceil(file.size / blockSize); + const chunks = []; + for (let i = 0; i < totalChunks; i++) { + const start = i * blockSize; + const end = Math.min(start + blockSize, file.size); + const chunk = file.slice(start, end); + chunks.push(chunk); + } + return chunks; +} + +export function validateChunkUrls(uploadUrls, totalChunks) { + if (uploadUrls.length !== totalChunks) { + throw new Error(`Mismatch between number of chunks (${totalChunks}) and upload URLs (${uploadUrls.length})`); + } +} + +export function extractChunkNumber(url, fallbackIndex = 0) { + const urlString = typeof url === 'object' ? url.href : url; + const urlObj = new URL(urlString); + const chunkNumber = urlObj.searchParams.get('partNumber'); + return chunkNumber ? parseInt(chunkNumber, 10) : fallbackIndex; +} + +export async function createChunkUploadTasks(uploadUrls, file, blockSize, uploadFunction, signal = null, options = {}) { + const { assetId, fileType, onChunkComplete, onChunkError } = options; + const totalChunks = Math.ceil(file.size / blockSize); + validateChunkUrls(uploadUrls, totalChunks); + const failedChunks = new Set(); + const attemptMap = new Map(); + const uploadPromises = []; + for (let i = 0; i < totalChunks; i++) { + const start = i * blockSize; + const end = Math.min(start + blockSize, file.size); + const chunk = file.slice(start, end); + const url = uploadUrls[i]; + const uploadPromise = (async () => { + if (signal?.aborted) return null; + const urlString = typeof url === 'object' ? url.href : url; + const chunkNumber = extractChunkNumber(url, i); + try { + const result = await uploadFunction(urlString, chunk, fileType || file.type, assetId, signal, chunkNumber); + const attempt = result?.attempt || 1; + attemptMap.set(i, attempt); + if (onChunkComplete) onChunkComplete(i, chunkNumber, result); + return result; + } catch (err) { + const chunkInfo = { chunkIndex: i, chunkNumber }; + failedChunks.add(chunkInfo); + if (onChunkError) onChunkError(chunkInfo, err); + throw err; + } + })(); + uploadPromises.push(uploadPromise); + } + if (signal?.aborted) return { failedChunks, attemptMap }; + try { + await Promise.all(uploadPromises); + return { failedChunks, attemptMap }; + } catch (error) { + return { failedChunks, attemptMap }; + } +} + +export async function batchChunkUpload(fileData, blobDataArray, filetypeArray, batchSize, uploadFunction, signal = null, options = {}) { + const { onFileComplete, onFileError } = options; + const failedFiles = new Set(); + const attemptMap = new Map(); + const uploadTasks = []; + fileData.forEach((assetData, fileIndex) => { + if (signal?.aborted) return; + const blobData = blobDataArray[fileIndex]; + const fileType = filetypeArray[fileIndex]; + const totalChunks = Math.ceil(blobData.size / assetData.blocksize); + if (assetData.uploadUrls.length !== totalChunks) { + const error = new Error(`Mismatch between chunks and URLs for file ${fileIndex}`); + failedFiles.add({ fileIndex, error }); + return; + } + let fileUploadFailed = false; + let maxAttempts = 0; + const chunkTasks = Array.from({ length: totalChunks }, (_, i) => { + const start = i * assetData.blocksize; + const end = Math.min(start + assetData.blocksize, blobData.size); + const chunk = blobData.slice(start, end); + const url = assetData.uploadUrls[i]; + return async () => { + if (fileUploadFailed || signal?.aborted) return null; + const urlString = typeof url === 'object' ? url.href : url; + const chunkNumber = extractChunkNumber(url, i); + try { + const result = await uploadFunction(urlString, chunk, fileType, assetData.assetId, signal, chunkNumber); + const attempt = result?.attempt || 1; + if (attempt > maxAttempts) maxAttempts = attempt; + attemptMap.set(`${fileIndex}-${i}`, attempt); + return result; + } catch (err) { + fileUploadFailed = true; + failedFiles.add({ fileIndex, chunkIndex: i, error: err }); + throw err; + } + }; + }); + uploadTasks.push({ + fileIndex, + assetData, + chunkTasks, + maxAttempts: () => maxAttempts, + }); + }); + if (signal?.aborted) return { failedFiles, attemptMap }; + try { + for (let i = 0; i < uploadTasks.length; i += batchSize) { + const batch = uploadTasks.slice(i, i + batchSize); + const batchPromises = batch.map(async (task) => { + try { + await Promise.all(task.chunkTasks.map((chunkTask) => chunkTask())); + if (onFileComplete) onFileComplete(task.fileIndex, task.assetData); + } catch (error) { + if (onFileError) onFileError(task.fileIndex, error); + throw error; + } + }); + await Promise.all(batchPromises); + } + + return { failedFiles, attemptMap }; + } catch (error) { + return { failedFiles, attemptMap }; + } +} + +export function calculateChunkProgress(completedChunks, totalChunks, baseProgress = 0) { + const chunkProgress = (completedChunks / totalChunks) * (100 - baseProgress); + return Math.min(baseProgress + chunkProgress, 100); +} + +export function createChunkUploadErrorMessage(assetId, fileSize, fileType, failedChunkCount) { + return `One or more chunks failed to upload for asset: ${assetId}, ${fileSize} bytes, ${fileType}. Failed chunks: ${failedChunkCount}`; +} + +export function createChunkAnalyticsData(eventName, data = {}) { + return { + event: eventName, + timestamp: new Date().toISOString(), + ...data, + }; +} + +export const DEFAULT_CHUNK_CONFIG = { + maxRetries: 3, + retryDelay: 1000, + batchSize: 5, +}; + +export class ChunkingUtils { + constructor(config = {}) { + this.config = { ...DEFAULT_CHUNK_CONFIG, ...config }; + } + + // eslint-disable-next-line class-methods-use-this + async uploadFile(params) { + const { + uploadUrls, file, blockSize, uploadFunction, signal, options = {}, + } = params; + return createChunkUploadTasks(uploadUrls, file, blockSize, uploadFunction, signal, options); + } + + async batchUpload(params) { + const { + fileData, blobDataArray, filetypeArray, batchSize = this.config.batchSize, uploadFunction, signal, options = {}, + } = params; + return batchChunkUpload(fileData, blobDataArray, filetypeArray, batchSize, uploadFunction, signal, options); + } +}