Skip to content

Commit

Permalink
fix(load-data): avoid circular import
Browse files Browse the repository at this point in the history
Moves loadDataSources logic to composable,
keeps store mostly state.
  • Loading branch information
PaulHax committed Apr 29, 2024
1 parent 72c920c commit 10097fe
Show file tree
Hide file tree
Showing 2 changed files with 264 additions and 248 deletions.
264 changes: 259 additions & 5 deletions src/actions/loadUserFiles.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,263 @@
import { fileToDataSource, uriToDataSource } from '@/src/io/import/dataSource';
import { UrlParams } from '@vueuse/core';
import {
fileToDataSource,
uriToDataSource,
DataSource,
getDataSourceName,
} from '@/src/io/import/dataSource';
import useLoadDataStore from '@/src/store/load-data';
import { wrapInArray } from '@/src/utils';
import { useDatasetStore } from '@/src/store/datasets';
import { useDICOMStore } from '@/src/store/datasets-dicom';
import { useLayersStore } from '@/src/store/datasets-layers';
import { useSegmentGroupStore } from '@/src/store/segmentGroups';
import { wrapInArray, nonNullable } from '@/src/utils';
import { basename } from '@/src/utils/path';
import { parseUrl } from '@/src/utils/url';
import { UrlParams } from '@vueuse/core';
import { logError } from '@/src/utils/loggers';
import { PipelineResultSuccess, partitionResults } from '@/src/core/pipeline';
import {
ImportDataSourcesResult,
importDataSources,
toDataSelection,
} from '@/src/io/import/importDataSources';
import {
ImportResult,
LoadableResult,
VolumeResult,
isLoadableResult,
isVolumeResult,
} from '@/src/io/import/common';

// higher value priority is preferred for picking a primary selection
const BASE_MODALITY_TYPES = {
CT: { priority: 3 },
MR: { priority: 3 },
US: { priority: 2 },
DX: { priority: 1 },
} as const;

function findBaseDicom(loadableDataSources: Array<LoadableResult>) {
// find dicom dataset for primary selection if available
const dicoms = loadableDataSources.filter(
({ dataType }) => dataType === 'dicom'
);
// prefer some modalities as base
const dicomStore = useDICOMStore();
const baseDicomVolumes = dicoms
.map((dicomSource) => {
const volumeInfo = dicomStore.volumeInfo[dicomSource.dataID];
const modality = volumeInfo?.Modality as keyof typeof BASE_MODALITY_TYPES;
if (modality in BASE_MODALITY_TYPES)
return {
dicomSource,
priority: BASE_MODALITY_TYPES[modality]?.priority,
volumeInfo,
};
return undefined;
})
.filter(nonNullable)
.sort(
(
{ priority: a, volumeInfo: infoA },
{ priority: b, volumeInfo: infoB }
) => {
const priorityDiff = a - b;
if (priorityDiff !== 0) return priorityDiff;
// same modality, then more slices preferred
if (!infoA.NumberOfSlices) return 1;
if (!infoB.NumberOfSlices) return -1;
return infoB.NumberOfSlices - infoA.NumberOfSlices;
}
);
if (baseDicomVolumes.length) return baseDicomVolumes[0].dicomSource;
return undefined;
}

// returns image and dicom sources, no config files
function filterLoadableDataSources(
succeeded: Array<PipelineResultSuccess<ImportResult>>
) {
return succeeded.flatMap((result) => {
return result.data.filter(isLoadableResult);
});
}

// Returns list of dataSources with file names where the name has the extension argument
// and the start of the file name matches the primary file name.
function filterMatchingNames(
primaryDataSource: VolumeResult,
succeeded: Array<PipelineResultSuccess<ImportResult>>,
extension: string
) {
const primaryName = getDataSourceName(primaryDataSource.dataSource);
if (!primaryName) return [];
const primaryNamePrefix = primaryName.split('.').slice(0, 1).join();
return filterLoadableDataSources(succeeded)
.filter((ds) => ds !== primaryDataSource)
.map((importResult) => ({
importResult,
name: getDataSourceName(importResult.dataSource),
}))
.filter(({ name }) => {
if (!name) return false;
const extensions = name.split('.').slice(1);
const hasExtension = extensions.includes(extension);
const nameMatchesPrimary = name.startsWith(primaryNamePrefix);
return hasExtension && nameMatchesPrimary;
})
.map(({ importResult }) => importResult);
}

function getStudyUID(volumeID: string) {
const dicomStore = useDICOMStore();
const studyKey = dicomStore.volumeStudy[volumeID];
return dicomStore.studyInfo[studyKey]?.StudyInstanceUID;
}

function findBaseDataSource(
succeeded: Array<PipelineResultSuccess<ImportResult>>
) {
const loadableDataSources = filterLoadableDataSources(succeeded);
const baseDicom = findBaseDicom(loadableDataSources);
return baseDicom ?? loadableDataSources[0];
}

function filterOtherVolumesInStudy(
volumeID: string,
succeeded: Array<PipelineResultSuccess<ImportResult>>
) {
const targetStudyUID = getStudyUID(volumeID);
const dicomDataSources = filterLoadableDataSources(succeeded).filter(
({ dataType }) => dataType === 'dicom'
);
return dicomDataSources.filter((ds) => {
const sourceStudyUID = getStudyUID(ds.dataID);
return sourceStudyUID === targetStudyUID && ds.dataID !== volumeID;
}) as Array<VolumeResult>;
}

// Layers a DICOM PET on a CT if found
function loadLayers(
primaryDataSource: VolumeResult,
succeeded: Array<PipelineResultSuccess<ImportResult>>
) {
if (primaryDataSource.dataType !== 'dicom') return;
const otherVolumesInStudy = filterOtherVolumesInStudy(
primaryDataSource.dataID,
succeeded
);
const dicomStore = useDICOMStore();
const primaryModality =
dicomStore.volumeInfo[primaryDataSource.dataID].Modality;
if (primaryModality !== 'CT') return;
// Look for one PET volume to layer with CT. Only one as there are often multiple "White Balance" corrected PET volumes.
const toLayer = otherVolumesInStudy.find((ds) => {
const otherModality = dicomStore.volumeInfo[ds.dataID].Modality;
return otherModality === 'PT';
});
if (!toLayer) return;

const primarySelection = toDataSelection(primaryDataSource);
const layersStore = useLayersStore();
const layerSelection = toDataSelection(toLayer);
layersStore.addLayer(primarySelection, layerSelection);
}

// Loads other DataSources Segment Groups:
// - DICOM SEG modalities with matching StudyUIDs.
// - DataSources that have a name like foo.segmentation.bar and the primary DataSource is named foo.baz
function loadSegmentations(
primaryDataSource: VolumeResult,
succeeded: Array<PipelineResultSuccess<ImportResult>>,
matchNames: boolean
) {
const matchingNames = matchNames
? filterMatchingNames(primaryDataSource, succeeded, 'segmentation').filter(
isVolumeResult // filter out models
)
: [];

const dicomStore = useDICOMStore();
const otherSegVolumesInStudy = filterOtherVolumesInStudy(
primaryDataSource.dataID,
succeeded
).filter((ds) => {
const modality = dicomStore.volumeInfo[ds.dataID].Modality;
if (!modality) return false;
return modality.trim() === 'SEG';
});

const segmentGroupStore = useSegmentGroupStore();
[...otherSegVolumesInStudy, ...matchingNames].forEach((ds) => {
const loadable = toDataSelection(ds);
segmentGroupStore.convertImageToLabelmap(
loadable,
toDataSelection(primaryDataSource)
);
});
}

function loadDataSources(sources: DataSource[]) {
const load = async () => {
const loadDataStore = useLoadDataStore();
const dataStore = useDatasetStore();

let results: ImportDataSourcesResult[];
try {
results = await importDataSources(sources);
} catch (error) {
loadDataStore.setError(error as Error);
return;
}

const [succeeded, errored] = partitionResults(results);

if (!dataStore.primarySelection && succeeded.length) {
const primaryDataSource = findBaseDataSource(succeeded);
if (isVolumeResult(primaryDataSource)) {
const selection = toDataSelection(primaryDataSource);
dataStore.setPrimarySelection(selection);
loadLayers(primaryDataSource, succeeded);
loadSegmentations(
primaryDataSource,
succeeded,
loadDataStore.matchNames
);
} // then must be primaryDataSource.type === 'model'
}

if (errored.length) {
const errorMessages = errored.map((errResult) => {
// pick first error
const [firstError] = errResult.errors;
// pick innermost dataset that errored
const name = getDataSourceName(firstError.inputDataStackTrace[0]);
// log error for debugging
logError(firstError.cause);
return `- ${name}: ${firstError.message}`;
});
const failedError = new Error(
`These files failed to load:\n${errorMessages.join('\n')}`
);

loadDataStore.setError(failedError);
}
};

const wrapWithLoading = <T extends (...args: any[]) => void>(fn: T) => {
const { startLoading, stopLoading } = useLoadDataStore();
return async function wrapper(...args: any[]) {
try {
startLoading();
await fn(...args);
} finally {
stopLoading();
}
};
};

return wrapWithLoading(load)();
}

export function openFileDialog() {
return new Promise<File[]>((resolve) => {
Expand All @@ -21,7 +275,7 @@ export function openFileDialog() {

export async function loadFiles(files: File[]) {
const dataSources = files.map(fileToDataSource);
return useLoadDataStore().loadDataSources(dataSources);
return loadDataSources(dataSources);
}

export async function loadUserPromptedFiles() {
Expand All @@ -41,5 +295,5 @@ export async function loadUrls(params: UrlParams) {
)
);

return useLoadDataStore().loadDataSources(sources);
return loadDataSources(sources);
}
Loading

0 comments on commit 10097fe

Please sign in to comment.