Skip to content

Commit

Permalink
feat(import): simpler import + state restore logic
Browse files Browse the repository at this point in the history
The pipeline code has been removed in favor of a simpler
chain-of-responsibility approach, using `evaluateChain` and
`asyncSelect`.

`evaluateChain` is responsible for evaluating a data source against a
chain of import handlers until one of them returns a new data source.

To keep processing a data source like how the old pipeline code
supported nested executions, `evaluateChain` is invoked inside a loop
for every data source. `asyncSelect` is used to drive the loop
execution, seleting `evaluateChain` promises whenever they are done.

The state schema is updated to generically operate on serialized data
sources. Instead of special-casing for remote files, the serialized
DataSource type encodes this state.
  • Loading branch information
floryst committed Aug 8, 2024
1 parent 19e8af5 commit f59f77d
Show file tree
Hide file tree
Showing 34 changed files with 863 additions and 1,319 deletions.
2 changes: 1 addition & 1 deletion src/actions/importDicomChunks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ export async function importDicomChunks(chunks: Chunk[]) {
})
);

return Object.keys(chunksByVolume);
return chunksByVolume;
}
65 changes: 33 additions & 32 deletions src/actions/loadUserFiles.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,23 +10,24 @@ import { useDatasetStore } from '@/src/store/datasets';
import { useDICOMStore } from '@/src/store/datasets-dicom';
import { useLayersStore } from '@/src/store/datasets-layers';
import { useSegmentGroupStore } from '@/src/store/segmentGroups';
import { wrapInArray, nonNullable } from '@/src/utils';
import { wrapInArray, nonNullable, partition } from '@/src/utils';
import { basename } from '@/src/utils/path';
import { parseUrl } from '@/src/utils/url';
import { logError } from '@/src/utils/loggers';
import { PipelineResultSuccess, partitionResults } from '@/src/core/pipeline';
import {
ImportDataSourcesResult,
importDataSources,
toDataSelection,
} from '@/src/io/import/importDataSources';
import {
ErrorResult,
ImportResult,
LoadableResult,
VolumeResult,
LoadableVolumeResult,
isLoadableResult,
isVolumeResult,
ImportDataSourcesResult,
} from '@/src/io/import/common';
import { isDicomImage } from '@/src/utils/dataSelection';

// higher value priority is preferred for picking a primary selection
const BASE_MODALITY_TYPES = {
Expand All @@ -38,8 +39,8 @@ const BASE_MODALITY_TYPES = {

function findBaseDicom(loadableDataSources: Array<LoadableResult>) {
// find dicom dataset for primary selection if available
const dicoms = loadableDataSources.filter(
({ dataType }) => dataType === 'dicom'
const dicoms = loadableDataSources.filter(({ dataID }) =>
isDicomImage(dataID)
);
// prefer some modalities as base
const dicomStore = useDICOMStore();
Expand Down Expand Up @@ -97,19 +98,15 @@ function findBaseImage(
}

// returns image and dicom sources, no config files
function filterLoadableDataSources(
succeeded: Array<PipelineResultSuccess<ImportResult>>
) {
return succeeded.flatMap((result) => {
return result.data.filter(isLoadableResult);
});
function filterLoadableDataSources(succeeded: Array<ImportResult>) {
return succeeded.filter(isLoadableResult);
}

// Returns list of dataSources with file names where the name has the extension argument
// and the start of the file name matches the primary file name.
function filterMatchingNames(
primaryDataSource: VolumeResult,
succeeded: Array<PipelineResultSuccess<ImportResult>>,
primaryDataSource: LoadableVolumeResult,
succeeded: Array<ImportResult>,
extension: string
) {
const primaryName = getDataSourceName(primaryDataSource.dataSource);
Expand Down Expand Up @@ -137,7 +134,7 @@ function getStudyUID(volumeID: string) {
}

function findBaseDataSource(
succeeded: Array<PipelineResultSuccess<ImportResult>>,
succeeded: Array<ImportResult>,
segmentGroupExtension: string
) {
const loadableDataSources = filterLoadableDataSources(succeeded);
Expand All @@ -151,24 +148,24 @@ function findBaseDataSource(

function filterOtherVolumesInStudy(
volumeID: string,
succeeded: Array<PipelineResultSuccess<ImportResult>>
succeeded: Array<ImportResult>
) {
const targetStudyUID = getStudyUID(volumeID);
const dicomDataSources = filterLoadableDataSources(succeeded).filter(
({ dataType }) => dataType === 'dicom'
({ dataID }) => isDicomImage(dataID)
);
return dicomDataSources.filter((ds) => {
const sourceStudyUID = getStudyUID(ds.dataID);
return sourceStudyUID === targetStudyUID && ds.dataID !== volumeID;
}) as Array<VolumeResult>;
}) as Array<LoadableVolumeResult>;
}

// Layers a DICOM PET on a CT if found
function loadLayers(
primaryDataSource: VolumeResult,
succeeded: Array<PipelineResultSuccess<ImportResult>>
primaryDataSource: LoadableVolumeResult,
succeeded: Array<ImportResult>
) {
if (primaryDataSource.dataType !== 'dicom') return;
if (!isDicomImage(primaryDataSource.dataID)) return;
const otherVolumesInStudy = filterOtherVolumesInStudy(
primaryDataSource.dataID,
succeeded
Expand All @@ -194,8 +191,8 @@ function loadLayers(
// - DICOM SEG modalities with matching StudyUIDs.
// - DataSources that have a name like foo.segmentation.bar and the primary DataSource is named foo.baz
function loadSegmentations(
primaryDataSource: VolumeResult,
succeeded: Array<PipelineResultSuccess<ImportResult>>,
primaryDataSource: LoadableVolumeResult,
succeeded: Array<ImportResult>,
segmentGroupExtension: string
) {
const matchingNames = filterMatchingNames(
Expand Down Expand Up @@ -233,13 +230,19 @@ function loadDataSources(sources: DataSource[]) {

let results: ImportDataSourcesResult[];
try {
results = await importDataSources(sources);
results = (await importDataSources(sources)).filter((result) =>
// only look at data and error results
['data', 'error'].includes(result.type)
);
} catch (error) {
loadDataStore.setError(error as Error);
return;
}

const [succeeded, errored] = partitionResults(results);
const [succeeded, errored] = partition(
(result) => result.type !== 'error',
results
);

if (!dataStore.primarySelection && succeeded.length) {
const primaryDataSource = findBaseDataSource(
Expand All @@ -260,14 +263,12 @@ function loadDataSources(sources: DataSource[]) {
}

if (errored.length) {
const errorMessages = errored.map((errResult) => {
// pick first error
const [firstError] = errResult.errors;
// pick innermost dataset that errored
const name = getDataSourceName(firstError.inputDataStackTrace[0]);
const errorMessages = (errored as ErrorResult[]).map((errResult) => {
const { dataSource, error } = errResult;
const name = getDataSourceName(dataSource);
// log error for debugging
logError(firstError.cause);
return `- ${name}: ${firstError.message}`;
logError(error);
return `- ${name}: ${error.message}`;
});
const failedError = new Error(
`These files failed to load:\n${errorMessages.join('\n')}`
Expand Down
4 changes: 2 additions & 2 deletions src/components/SampleDataBrowser.vue
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ export default defineComponent({
if (!loadResult) {
throw new Error('Did not receive a load result');
}
if (!loadResult.ok) {
throw loadResult.errors[0].cause;
if (loadResult.type === 'error') {
throw loadResult.error;
}
const selection = convertSuccessResultToDataSelection(loadResult);
Expand Down
Loading

0 comments on commit f59f77d

Please sign in to comment.