From db0b26aaed9ea7a231cd769daae58293ad0b4fbe Mon Sep 17 00:00:00 2001 From: Adithyan Dinesh Date: Fri, 20 Sep 2024 14:59:12 +0530 Subject: [PATCH 1/5] Supports relative filepaths and added FileOffsets to the instance metadata --- .../src/DicomJSONDataSource/index.js | 68 ++++++++++++++++--- 1 file changed, 57 insertions(+), 11 deletions(-) diff --git a/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js b/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js index e780fc6..a5dda4c 100644 --- a/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js +++ b/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js @@ -37,13 +37,23 @@ const getMetaDataByURL = url => { return _store.urls.find(metaData => metaData.url === url); }; -const getInstanceUrl = (url, prefix) => { - let modifiedUrl = prefix - ? url.replace( +const getInstanceUrl = (url, prefix, bucket, bucketPrefix) => { + let modifiedUrl = url; + + const schemaPresent = !!url.match(/^(dicomweb:|dicomzip:|wadouri:)/) + if (!schemaPresent) { + const filePath = url.split('studies/')[1]; + modifiedUrl = `dicomweb:https://storage.googleapis.com/${bucket}/${ + bucketPrefix ? bucketPrefix + '/' : '' + }studies/${filePath}`; + } + + modifiedUrl = prefix + ? modifiedUrl.replace( 'https://storage.googleapis.com', `https://storage.googleapis.com/${prefix}` ) - : url; + : modifiedUrl; const dicomwebRegex = /^dicomweb:/ modifiedUrl = modifiedUrl.includes(":zip//") @@ -53,16 +63,40 @@ const getInstanceUrl = (url, prefix) => { return modifiedUrl; } +const mergeInstanceProperties = (instance) => { + return { + ...instance.metadata, + ...(instance.headers && { + FileOffsets: { + startByte: instance.headers.start_byte, + endByte: instance.headers.end_byte, + }, + }), + }; +}; + const getProperty = (serieMetadata, property) => { return ( serieMetadata[property] || serieMetadata.instances[0].metadata[property] ); }; -const getMetadataFromRows = (rows, prefix, seriesuidArray) => { +const getMetadataFromRows = (data, prefix, seriesuidArray) => { + const rows = data.flatMap(({ metadata }) => metadata); + const bucketMap = data.reduce( + (dataMap, { bucket, bucketPrefix, metadata }) => { + metadata.forEach(({ instances }) => + instances.forEach(({ url, uri }) => { + dataMap[url || uri] = { bucket, bucketPrefix }; + }) + ); + return dataMap; + }, + {} + ); // TODO: bq should not have dups let filteredRows = rows.map(row => { - row.instances = _.uniqBy(row.instances, (x)=>x.url) + row.instances = _.uniqBy(row.instances, (x) => x.url || x.uri); return row }); @@ -99,9 +133,15 @@ const getMetadataFromRows = (rows, prefix, seriesuidArray) => { ? 0 : parseInt(row['NumInstances']), instances: row['instances'].map(instance => { + const url = instance.url || instance.uri; return { - metadata: instance.metadata, - url: getInstanceUrl(instance.url, prefix), + metadata: mergeInstanceProperties(instance), + url: getInstanceUrl( + url, + prefix, + bucketMap[url].bucket, + bucketMap[url].bucketPrefix + ), }; }), }; @@ -381,14 +421,20 @@ function createDicomJSONApi(dicomJsonConfig, servicesManager) { const studyMetadata = []; for (let i = 0; i < buckets.length; i++) { + const bucket = buckets[i], + bucketPrefix = query.get('bucket-prefix') || 'dicomweb'; const metadataPerBucket = await filesFromStudyInstanceUID({ - bucketName: buckets[i], - prefix: query.get('bucket-prefix') || 'dicomweb', + bucketName: bucket, + prefix: bucketPrefix, studyuids: query.getAll('StudyInstanceUID'), headers: UserAuthenticationService.getAuthorizationHeader(), }); - studyMetadata.push(...metadataPerBucket); + studyMetadata.push({ + bucket, + bucketPrefix, + metadata: metadataPerBucket.flatMap(e=>e), + }); } const data = getMetadataFromRows( From 6b83cff3dd3aa45909f16aeca1d3dc66d4186f49 Mon Sep 17 00:00:00 2001 From: Adithyan Dinesh Date: Tue, 1 Oct 2024 18:07:28 +0530 Subject: [PATCH 2/5] Changed the usage of ExtendedOffsetTable to CustomOffsetTable and changed the name of metadata json to to metadata.json --- .../src/DicomJSONDataSource/index.js | 71 +++++++++++++------ 1 file changed, 48 insertions(+), 23 deletions(-) diff --git a/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js b/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js index a5dda4c..5e4399a 100644 --- a/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js +++ b/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js @@ -12,13 +12,18 @@ import getImageId from '../DicomWebDataSource/utils/getImageId'; import _ from 'lodash'; const metadataProvider = classes.MetadataProvider; -const { datasetToBlob } = dcmjs.data; +const { datasetToBlob, DicomMetaDictionary } = dcmjs.data; const mappings = { studyInstanceUid: 'StudyInstanceUID', patientId: 'PatientID', }; +const GH_CUSTOM_TAGS = { + CustomOffsetTable: '60011002', + CustomOffsetTableLengths: '60011003', +}; + let _store = { urls: [], studyInstanceUIDMap: new Map(), // map of urls to array of study instance UIDs @@ -63,15 +68,25 @@ const getInstanceUrl = (url, prefix, bucket, bucketPrefix) => { return modifiedUrl; } +const naturalizeMetadata = (metadata) => { + return { + ...DicomMetaDictionary.naturalizeDataset(metadata), + CustomOffsetTable: metadata[GH_CUSTOM_TAGS.CustomOffsetTable]?.Value, + CustomOffsetTableLengths: + metadata[GH_CUSTOM_TAGS.CustomOffsetTableLengths]?.Value, + }; +}; + const mergeInstanceProperties = (instance) => { return { ...instance.metadata, - ...(instance.headers && { - FileOffsets: { - startByte: instance.headers.start_byte, - endByte: instance.headers.end_byte, - }, - }), + ...(instance.headers.start_byte && + instance.headers.end_byte && { + FileOffsets: { + startByte: instance.headers.start_byte, + endByte: instance.headers.end_byte, + }, + }), }; }; @@ -82,7 +97,16 @@ const getProperty = (serieMetadata, property) => { }; const getMetadataFromRows = (data, prefix, seriesuidArray) => { - const rows = data.flatMap(({ metadata }) => metadata); + const rows = data.flatMap(({ metadata }) => + metadata.map((seriesMetadata) => ({ + ...seriesMetadata, + instances: seriesMetadata.instances.map((instance) => ({ + ...instance, + metadata: naturalizeMetadata(instance.metadata), + url: instance.url || instance.uri, + })), + })) + ); const bucketMap = data.reduce( (dataMap, { bucket, bucketPrefix, metadata }) => { metadata.forEach(({ instances }) => @@ -96,7 +120,7 @@ const getMetadataFromRows = (data, prefix, seriesuidArray) => { ); // TODO: bq should not have dups let filteredRows = rows.map(row => { - row.instances = _.uniqBy(row.instances, (x) => x.url || x.uri); + row.instances = _.uniqBy(row.instances, (x)=>x.url) return row }); @@ -108,7 +132,7 @@ const getMetadataFromRows = (data, prefix, seriesuidArray) => { const rowsByStudy = Object.values( filteredRows.reduce((rowsByStudy, row) => { - const studyuid = row['StudyInstanceUID']; + const studyuid = getProperty(row, 'StudyInstanceUID'); if (!rowsByStudy[studyuid]) rowsByStudy[studyuid] = []; rowsByStudy[studyuid].push(row); return rowsByStudy; @@ -122,18 +146,19 @@ const getMetadataFromRows = (data, prefix, seriesuidArray) => { const series = rows.map(row => { return { - SeriesInstanceUID: row['SeriesInstanceUID'], - Modality: row['Modality'], - SeriesDescription: row['SeriesDescription'] || 'No description', - StudyInstanceUID: row['StudyInstanceUID'], - SeriesNumber: row['SeriesNumber'], - SeriesDate: row['SeriesDate'], - SeriesTime: row['SeriesTime'], - NumInstances: isNaN(parseInt(row['NumInstances'])) + SeriesInstanceUID: getProperty(row, 'SeriesInstanceUID'), + Modality: getProperty(row, 'Modality'), + SeriesDescription: + getProperty(row, 'SeriesDescription') || 'No description', + StudyInstanceUID: getProperty(row, 'StudyInstanceUID'), + SeriesNumber: getProperty(row, 'SeriesNumber'), + SeriesDate: getProperty(row, 'SeriesDate'), + SeriesTime: getProperty(row, 'SeriesTime'), + NumInstances: isNaN(parseInt(getProperty(row, 'NumInstances'))) ? 0 - : parseInt(row['NumInstances']), - instances: row['instances'].map(instance => { - const url = instance.url || instance.uri; + : parseInt(getProperty(row, 'NumInstances')), + instances: row['instances'].map((instance) => { + const url = instance.url; return { metadata: mergeInstanceProperties(instance), url: getInstanceUrl( @@ -235,7 +260,7 @@ const filesFromStudyInstanceUID = async ({bucketName, prefix, studyuids, headers const files = res.items || []; const folders = res.prefixes || []; const series = folders.map(async (folderPath)=>{ - const objectName = `${folderPath}metadata`; + const objectName = `${folderPath}metadata.json`; const apiUrl = `https://storage.googleapis.com/storage/v1/b/${bucketName}/o/${encodeURIComponent(objectName)}?alt=media`; const response = await fetch(apiUrl, { headers }); return response.json() @@ -353,7 +378,7 @@ const storeDicomSeg = async (naturalizedReport, headers, displaySetService) => { const compressedFile = pako.gzip(JSON.stringify(segSeries)); return fetch( - `https://storage.googleapis.com/upload/storage/v1/b/${segBucket}/o?uploadType=media&name=${segPrefix}/studies/${StudyInstanceUID}/series/${SeriesInstanceUID}/metadata&contentEncoding=gzip`, + `https://storage.googleapis.com/upload/storage/v1/b/${segBucket}/o?uploadType=media&name=${segPrefix}/studies/${StudyInstanceUID}/series/${SeriesInstanceUID}/metadata.json&contentEncoding=gzip`, { method: 'POST', headers: { From 73398a07738ae0905f55a6715aebe1befb937ad2 Mon Sep 17 00:00:00 2001 From: Adithyan Dinesh Date: Mon, 21 Oct 2024 15:08:13 +0530 Subject: [PATCH 3/5] Added support for dicomtar scheme --- .../src/DicomJSONDataSource/index.js | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js b/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js index 5e4399a..c596b56 100644 --- a/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js +++ b/extensions/ohif-gradienthealth-extension/src/DicomJSONDataSource/index.js @@ -45,7 +45,9 @@ const getMetaDataByURL = url => { const getInstanceUrl = (url, prefix, bucket, bucketPrefix) => { let modifiedUrl = url; - const schemaPresent = !!url.match(/^(dicomweb:|dicomzip:|wadouri:)/) + const schemaPresent = !!url.match( + /^(dicomweb:|dicomzip:|wadouri:|dicomtar:)/ + ); if (!schemaPresent) { const filePath = url.split('studies/')[1]; modifiedUrl = `dicomweb:https://storage.googleapis.com/${bucket}/${ @@ -65,6 +67,10 @@ const getInstanceUrl = (url, prefix, bucket, bucketPrefix) => { ? modifiedUrl.replace(dicomwebRegex, 'dicomzip:') : modifiedUrl; + modifiedUrl = modifiedUrl.includes('.tar://') + ? modifiedUrl.replace(dicomwebRegex, 'dicomtar:') + : modifiedUrl; + return modifiedUrl; } From 459807d071c3a84c6146af1d75452cfe2e8004e9 Mon Sep 17 00:00:00 2001 From: Adithyan Dinesh Date: Fri, 1 Nov 2024 12:24:49 +0530 Subject: [PATCH 4/5] Remove the previous study tar files when another study is loaded and handle Maximum size error --- .../CacheAPIService/CacheAPIService.ts | 12 ++++- .../GoogleSheetsService.js | 48 ++++++++++++++----- .../src/services/utils.ts | 41 ++++++++++++++++ 3 files changed, 88 insertions(+), 13 deletions(-) diff --git a/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts b/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts index a2ff803..382dbc9 100644 --- a/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts +++ b/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts @@ -137,7 +137,7 @@ export default class CacheAPIService { (serie) => !segSOPClassUIDs.includes(serie.instances[0].SOPClassUID) ) .flatMap((serie) => utils.getImageIdsFromInstances(serie.instances)); - await Promise.all([ + return await Promise.all([ this.cacheImageIds(imageIds), this.cacheSegFiles(StudyInstanceUID), ]); @@ -190,7 +190,15 @@ export default class CacheAPIService { ); }); - await Promise.all(promises) + return new Promise((resolve, reject) => { + const id = setInterval(async () => { + if (promises.length === imageIds.length) { + clearInterval(id); + await Promise.all(promises).catch((error) => reject(error)); + resolve(); + } + }, 1000); + }); } public async cacheSegFiles(studyInstanceUID) { diff --git a/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js b/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js index 86f7cba..3e48788 100644 --- a/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js +++ b/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js @@ -2,6 +2,7 @@ import { eventTarget, Enums, cache } from '@cornerstonejs/core'; import { utilities as csToolsUtils } from '@cornerstonejs/tools'; import { DicomMetadataStore, pubSubServiceInterface, utils } from '@ohif/core'; import { alphabet } from './utils'; +import { removePreviousFilesArraybuffer } from '../utils'; const MAX_ROWS = 100000; @@ -50,7 +51,8 @@ export default class GoogleSheetsService { } cacheNearbyStudyInstanceUIDs(id, bufferBack, bufferFront) { - const { CacheAPIService } = this.serviceManager.services; + const { CacheAPIService, uiNotificationService } = + this.serviceManager.services; const index = this.studyUIDToIndex[id]; const min = index - bufferBack < 2 ? 2 : index - bufferBack; const max = index + bufferFront; @@ -64,16 +66,35 @@ export default class GoogleSheetsService { const element = rowsToCache.splice(indexOfCurrentId, 1); rowsToCache.unshift(element[0]); // making the current studyid as first element + let hadMaxSizeError = false; rowsToCache.reduce((promise, row) => { - return promise.then(() => { - const url = row[urlIndex]; - const params = new URLSearchParams('?' + url.split('?')[1]); - const StudyInstanceUID = getStudyInstanceUIDFromParams(params); - return CacheAPIService.cacheStudy( - StudyInstanceUID, - params.getAll('bucket') - ); - }); + return promise + .then(() => { + if (hadMaxSizeError) return Promise.resolve(); + + const url = row[urlIndex]; + const params = new URLSearchParams('?' + url.split('?')[1]); + const StudyInstanceUID = getStudyInstanceUIDFromParams(params); + return CacheAPIService.cacheStudy( + StudyInstanceUID, + params.getAll('bucket') + ); + }) + .catch((error) => { + if (error.message?.includes('Maximum size')) { + hadMaxSizeError = true; + uiNotificationService.show({ + title: 'Maximum size has reached', + message: + error.message || + 'You have reached the maximum size of fetching files for this study.', + type: 'error', + duration: 10000, + }); + } + + return; + }); }, Promise.resolve()); } @@ -320,6 +341,7 @@ export default class GoogleSheetsService { ); const nextParams = new URLSearchParams(window.location.search); + const prevStudyUID = getStudyInstanceUIDFromParams(nextParams); if (nextParams.get('StudyInstanceUIDs')) nextParams.set('StudyInstanceUIDs', StudyInstanceUID); else { @@ -330,6 +352,10 @@ export default class GoogleSheetsService { nextParams.append('bucket', bucket); }); + if (prevStudyUID !== StudyInstanceUID) { + removePreviousFilesArraybuffer(prevStudyUID, this.serviceManager); + } + const nextURL = window.location.href.split('?')[0] + '?' + nextParams.toString(); window.history.replaceState({}, null, nextURL); @@ -440,7 +466,7 @@ function loadSegFiles(serviceManager) { segmentationsOfLoadedImage[0].displaySetInstanceUID ); }); - + unsubscribe?.(); } }; diff --git a/extensions/ohif-gradienthealth-extension/src/services/utils.ts b/extensions/ohif-gradienthealth-extension/src/services/utils.ts index 7ea1241..c40027e 100644 --- a/extensions/ohif-gradienthealth-extension/src/services/utils.ts +++ b/extensions/ohif-gradienthealth-extension/src/services/utils.ts @@ -1,3 +1,6 @@ +// @ts-ignore +import dicomImageLoader from '@cornerstonejs/dicom-image-loader'; + export const getSegDisplaysetsOfReferencedImagesIds = ( imageIds: string[] = [], displaySetService: any @@ -11,3 +14,41 @@ export const getSegDisplaysetsOfReferencedImagesIds = ( (ds) => ds.referencedSeriesInstanceUID === referencedSeriesInstanceUID ); }; + +export const removePreviousFilesArraybuffer = ( + studyInstanceUID: string, + servicesManager: Record +) => { + const { displaySetService } = servicesManager.services; + const studyDisplaySets = displaySetService.getDisplaySetsBy( + (ds) => ds.StudyInstanceUID === studyInstanceUID + ); + const urls = studyDisplaySets.flatMap((displaySet) => + displaySet.instances.reduce((imageIds, instance) => { + const handledUrl = instance.imageId.split( + /dicomweb:|dicomtar:|dicomzip:/ + )[1]; + return [...imageIds, ...(handledUrl ? [handledUrl] : [])]; + }, []) + ); + + const fileUrls = new Set(); + for (const url of urls) { + // Handles .tar files + const urlParts = url.split('.tar'); + + if (urlParts.length > 1) { + fileUrls.add(urlParts[0] + '.tar'); + } + } + + fileUrls.forEach((fileUrl) => { + if (fileUrl.includes('.tar')) { + try { + dicomImageLoader.wadors.tarFileManager.remove(fileUrl); + } catch (error) { + console.warn(error); + } + } + }); +}; From 9a9951faa7200508a64b8c77d929e7bf186858e2 Mon Sep 17 00:00:00 2001 From: Adithyan Dinesh Date: Thu, 7 Nov 2024 16:22:24 +0530 Subject: [PATCH 5/5] Changing the request type of precaching next studies to PreCache --- .../src/services/CacheAPIService/CacheAPIService.ts | 2 +- .../services/GoogleSheetsService/GoogleSheetsService.js | 6 ++++-- .../ohif-gradienthealth-extension/src/services/utils.ts | 7 ++++--- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts b/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts index 382dbc9..11dceab 100644 --- a/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts +++ b/extensions/ohif-gradienthealth-extension/src/services/CacheAPIService/CacheAPIService.ts @@ -172,7 +172,7 @@ export default class CacheAPIService { } const priority = 0; - const requestType = Enums.RequestType.Prefetch; + const requestType = Enums.RequestType.PreCache; const options = { preScale: { enabled: true, diff --git a/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js b/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js index 3e48788..33cd023 100644 --- a/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js +++ b/extensions/ohif-gradienthealth-extension/src/services/GoogleSheetsService/GoogleSheetsService.js @@ -2,7 +2,7 @@ import { eventTarget, Enums, cache } from '@cornerstonejs/core'; import { utilities as csToolsUtils } from '@cornerstonejs/tools'; import { DicomMetadataStore, pubSubServiceInterface, utils } from '@ohif/core'; import { alphabet } from './utils'; -import { removePreviousFilesArraybuffer } from '../utils'; +import { removeStudyFilesFromCache } from '../utils'; const MAX_ROWS = 100000; @@ -353,7 +353,9 @@ export default class GoogleSheetsService { }); if (prevStudyUID !== StudyInstanceUID) { - removePreviousFilesArraybuffer(prevStudyUID, this.serviceManager); + // Remove the file arraybuffers( currently TAR files ) of the previous + // study when switching to another study. + removeStudyFilesFromCache(prevStudyUID, this.serviceManager); } const nextURL = diff --git a/extensions/ohif-gradienthealth-extension/src/services/utils.ts b/extensions/ohif-gradienthealth-extension/src/services/utils.ts index c40027e..f682504 100644 --- a/extensions/ohif-gradienthealth-extension/src/services/utils.ts +++ b/extensions/ohif-gradienthealth-extension/src/services/utils.ts @@ -15,7 +15,7 @@ export const getSegDisplaysetsOfReferencedImagesIds = ( ); }; -export const removePreviousFilesArraybuffer = ( +export const removeStudyFilesFromCache = ( studyInstanceUID: string, servicesManager: Record ) => { @@ -25,10 +25,10 @@ export const removePreviousFilesArraybuffer = ( ); const urls = studyDisplaySets.flatMap((displaySet) => displaySet.instances.reduce((imageIds, instance) => { - const handledUrl = instance.imageId.split( + const instanceUrl = instance.imageId.split( /dicomweb:|dicomtar:|dicomzip:/ )[1]; - return [...imageIds, ...(handledUrl ? [handledUrl] : [])]; + return [...imageIds, ...(instanceUrl ? [instanceUrl] : [])]; }, []) ); @@ -38,6 +38,7 @@ export const removePreviousFilesArraybuffer = ( const urlParts = url.split('.tar'); if (urlParts.length > 1) { + // Adding the '.tar' to the part since spliting with it removes it from the parts. fileUrls.add(urlParts[0] + '.tar'); } }