From c4a763c5f494aaf7a6aa8f7b98228a0b97c2b166 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Tue, 24 Oct 2023 21:21:13 -0400 Subject: [PATCH] fix(importDataSources): get error result of error in URL fetch --- src/components/App.vue | 5 +- src/core/__tests__/pipeline.spec.ts | 83 +++++---- src/core/pipeline.ts | 96 ++++++----- .../__tests__/importDataSources.spec.ts | 22 +++ src/io/import/importDataSources.ts | 162 ++++++++++-------- src/io/import/processors/prioritize.ts | 33 +--- src/utils/index.ts | 11 ++ tests/pageobjects/volview.page.ts | 2 - tests/specs/session-zip.e2e.ts | 6 +- 9 files changed, 239 insertions(+), 181 deletions(-) create mode 100644 src/io/import/__tests__/importDataSources.spec.ts diff --git a/src/components/App.vue b/src/components/App.vue index 03e20525c..bea4a24c0 100644 --- a/src/components/App.vue +++ b/src/components/App.vue @@ -273,8 +273,9 @@ import SaveSession from './SaveSession.vue'; import { useGlobalErrorHook } from '../composables/useGlobalErrorHook'; import { useWebGLWatchdog } from '../composables/useWebGLWatchdog'; import { useAppLoadingNotifications } from '../composables/useAppLoadingNotifications'; -import { partition, wrapInArray } from '../utils'; +import { wrapInArray } from '../utils'; import { useKeyboardShortcuts } from '../composables/useKeyboardShortcuts'; +import { partitionResults } from '../core/pipeline'; async function loadFiles( sources: DataSource[], @@ -290,7 +291,7 @@ async function loadFiles( return; } - const [succeeded, errored] = partition((result) => result.ok, results); + const [succeeded, errored] = partitionResults(results); if (!dataStore.primarySelection && succeeded.length) { const selection = convertSuccessResultToDataSelection(succeeded[0]); diff --git a/src/core/__tests__/pipeline.spec.ts b/src/core/__tests__/pipeline.spec.ts index 88fcb53af..7b4aaeaae 100644 --- a/src/core/__tests__/pipeline.spec.ts +++ b/src/core/__tests__/pipeline.spec.ts @@ -33,8 +33,9 @@ describe('Pipeline', () => { const result = await pipeline.execute(); expect(result.ok).to.be.true; - expect(result.errors).to.have.length(0); - expect(result.data).to.deep.equal([42]); + if (result.ok) { + expect(result.data).to.deep.equal([42]); + } expect(callOrder).to.deep.equal([1, 2, 3]); }); @@ -56,8 +57,9 @@ describe('Pipeline', () => { const result = await pipeline.execute(); expect(result.ok).to.be.true; - expect(result.errors).to.have.length(0); - expect(result.data).to.have.length(0); + if (result.ok) { + expect(result.data).to.have.length(0); + } expect(callOrder).to.deep.equal([1, 2, 3]); }); @@ -82,7 +84,6 @@ describe('Pipeline', () => { const result = await pipeline.execute(5); expect(result.ok).to.be.true; - expect(result.errors).to.have.length(0); expect(calc).to.equal(8); }); @@ -111,7 +112,6 @@ describe('Pipeline', () => { const result = await pipeline.execute(); expect(result.ok).to.be.true; - expect(result.errors).to.have.length(0); expect(callOrder).to.deep.equal([1, 2, 3]); }); @@ -127,8 +127,9 @@ describe('Pipeline', () => { const result = await pipeline.execute(); expect(result.ok).to.be.true; - expect(result.errors).to.have.length(0); - expect(result.data).to.deep.equal([]); + if (result.ok) { + expect(result.data).to.deep.equal([]); + } }); it('should detect double done()', async () => { @@ -142,7 +143,9 @@ describe('Pipeline', () => { const result = await pipeline.execute(); expect(result.ok).to.be.false; - expect(result.errors).to.have.length(1); + if (!result.ok) { + expect(result.errors).to.have.length(1); + } }); it('should handle top-level errors', async () => { @@ -156,8 +159,10 @@ describe('Pipeline', () => { const result = await pipeline.execute(); expect(result.ok).to.be.false; - expect(result.errors).to.have.length(1); - expect(result.errors[0].message).to.equal(error.message); + if (!result.ok) { + expect(result.errors).to.have.length(1); + expect(result.errors[0].message).to.equal(error.message); + } }); it('should handle top-level async errors', async () => { @@ -172,8 +177,10 @@ describe('Pipeline', () => { const result = await pipeline.execute(); expect(result.ok).to.be.false; - expect(result.errors).to.have.length(1); - expect(result.errors[0].message).to.equal(error.message); + if (!result.ok) { + expect(result.errors).to.have.length(1); + expect(result.errors[0].message).to.equal(error.message); + } }); it('should handle nested executions', async () => { @@ -186,11 +193,17 @@ describe('Pipeline', () => { return idx; }, async (idx, { execute, done }) => { - let fnum = (await execute(idx - 1)).data[0]; - if (idx > 1) { - fnum += (await execute(idx - 2)).data[0]; + const result = await execute(idx - 1); + if (result.ok) { + let fnum = result.data[0]; + if (idx > 1) { + const r = await execute(idx - 2); + if (r.ok) fnum += r.data[0]; + else throw new Error('error'); + } + return done(fnum); } - return done(fnum); + throw new Error('error'); }, ]; @@ -199,8 +212,10 @@ describe('Pipeline', () => { const result = await pipeline.execute(N); expect(result.ok).to.be.true; - // pick first result data, which is the top-level pipeline result - expect(result.data[0]).to.equal(8); + if (result.ok) { + // pick first result data, which is the top-level pipeline result + expect(result.data[0]).to.equal(8); + } }); it('should handle allow extra context overriding', async () => { @@ -219,7 +234,9 @@ describe('Pipeline', () => { const result = await pipeline.execute(0, 21); expect(result.ok).to.be.true; - expect(result.data).to.deep.equal([42]); + if (result.ok) { + expect(result.data).to.deep.equal([42]); + } }); it('should handle nested async errors', async () => { @@ -248,16 +265,20 @@ describe('Pipeline', () => { const result = await pipeline.execute(N); expect(result.ok).to.be.false; - // we expect there to be fib(N+1) errors - expect(result.errors).to.have.length(8); - - result.errors.forEach((err) => { - const { message, inputDataStackTrace } = err; - expect(message).to.equal(error.message); - // first object should be the input passed to the erroring handler - expect(inputDataStackTrace[0]).to.equal(0); - // last object should be the input passed to the pipeline. - expect(inputDataStackTrace.at(-1)).to.equal(N); - }); + if (!result.ok) { + // we expect there to be fib(N+1) errors + expect(result.errors).to.have.length(8); + + result.errors.forEach((err) => { + const { message, inputDataStackTrace } = err; + expect(message).to.equal(error.message); + // first object should be the input passed to the erroring handler + expect(inputDataStackTrace[0]).to.equal(0); + // last object should be the input passed to the pipeline. + expect(inputDataStackTrace.at(-1)).to.equal(N); + }); + } else { + expect.fail('Expected not ok result'); + } }); }); diff --git a/src/core/pipeline.ts b/src/core/pipeline.ts index a6cf2f007..9771ffb6e 100644 --- a/src/core/pipeline.ts +++ b/src/core/pipeline.ts @@ -1,6 +1,6 @@ import { Awaitable } from '@vueuse/core'; import { Maybe } from '@/src/types'; -import { defer, partition } from '../utils'; +import { defer, partitionByType } from '../utils'; /** * Represents a pipeline error. @@ -18,6 +18,16 @@ export interface PipelineError { cause: unknown; } +export interface PipelineResultSuccess { + ok: true; + data: ResultType[]; +} + +export interface PipelineResultError { + ok: false; + errors: PipelineError[]; +} + /** * Represents a pipeline's execution result. * @@ -26,11 +36,15 @@ export interface PipelineError { * The errors property holds any errors reported from (potentially nested) * executions. */ -export interface PipelineResult { - ok: boolean; - data: ResultType[]; - errors: PipelineError[]; -} +export type PipelineResult = + | PipelineResultSuccess + | PipelineResultError; + +export const partitionResults = (arr: Array>) => + partitionByType( + (r: PipelineResult): r is PipelineResultSuccess => r.ok, + arr + ); function createPipelineError( message: string, @@ -228,48 +242,50 @@ export default class Pipeline< invokeHandler(output as DataType, index + 1); }; - const result: PipelineResult = { - ok: true, - data: [], - errors: [], - }; - - try { - await invokeHandler(input, 0); - const ret = await execution.promise; - if (ret != null) { - result.data.push(ret); + const result: PipelineResult = await (async () => { + try { + await invokeHandler(input, 0); + const ret = await execution.promise; + if (ret != null) { + return { + ok: true as const, + data: [ret] as Array, + }; + } + return { ok: true as const, data: [] }; + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + return { + ok: false as const, + errors: [createPipelineError(message, input, err)], + }; } - } catch (err) { - const message = err instanceof Error ? err.message : String(err); - result.ok = false; - result.errors.push(createPipelineError(message, input, err)); - } + })(); const innerResults = await Promise.all(nestedExecutions); - const [succeededInner, failedInner] = partition( - (res) => res.ok, - innerResults - ); + const [succeededInner, failedInner] = partitionResults(innerResults); if (failedInner.length > 0) { - result.ok = false; - } - - succeededInner.forEach((okResult) => { - result.data.push(...okResult.data); - }); + const errors = failedInner.flatMap((failedResult) => { + const { errors: innerErrors } = failedResult; + // add current input to the input stack trace + innerErrors.forEach((err) => { + err.inputDataStackTrace.push(input); + }); + return innerErrors; + }); - failedInner.forEach((failedResult) => { - const { errors } = failedResult; + return { + ok: false as const, + errors, + }; + } - // add current input to the input stack trace - errors.forEach((err) => { - err.inputDataStackTrace.push(input); + if (result.ok) { + succeededInner.forEach((okResult) => { + result.data.push(...okResult.data); }); - - result.errors.push(...errors); - }); + } return result; } diff --git a/src/io/import/__tests__/importDataSources.spec.ts b/src/io/import/__tests__/importDataSources.spec.ts new file mode 100644 index 000000000..172b16354 --- /dev/null +++ b/src/io/import/__tests__/importDataSources.spec.ts @@ -0,0 +1,22 @@ +import { describe, it } from 'vitest'; +import { expect } from 'chai'; +import { DataSource } from '../dataSource'; +import { importDataSources } from '../importDataSources'; + +describe('importDataSources', () => { + it('should return error if illegal URI', async () => { + const input: DataSource = { + uriSrc: { + uri: '// asdf asdf', + name: 'image.jpg', + }, + }; + const output = await importDataSources([input]); + + const firstResult = output[0]; + expect(firstResult.ok).to.equals(false); + if (!firstResult.ok) { + expect(firstResult.errors.length).to.greaterThan(0); + } + }); +}); diff --git a/src/io/import/importDataSources.ts b/src/io/import/importDataSources.ts index 054af08b9..698bfd9af 100644 --- a/src/io/import/importDataSources.ts +++ b/src/io/import/importDataSources.ts @@ -1,4 +1,9 @@ -import Pipeline, { PipelineResult } from '@/src/core/pipeline'; +import Pipeline, { + PipelineResult, + PipelineResultError, + PipelineResultSuccess, + partitionResults, +} from '@/src/core/pipeline'; import { ImportHandler, ImportResult } from '@/src/io/import/common'; import { DataSource, DataSourceWithFile } from '@/src/io/import/dataSource'; import handleDicomFile from '@/src/io/import/processors/handleDicomFile'; @@ -15,39 +20,13 @@ import handleConfig from '@/src/io/import/processors/handleConfig'; import { useDICOMStore } from '@/src/store/datasets-dicom'; import { makeDICOMSelection, makeImageSelection } from '@/src/store/datasets'; import { + EARLIEST_PRIORITY, earliestPriority, - earliestPriorityForImportResult, prioritizeJSON, prioritizeStateFile, - priorityIdentity, PriorityResult, } from './processors/prioritize'; -export type ImportDataSourcesResult = PipelineResult; - -export function convertSuccessResultToDataSelection( - result: ImportDataSourcesResult -) { - if (result.data.length === 0) { - return null; - } - - const { dataID, dataType } = result.data[0]; - if (!dataID) { - return null; - } - - if (dataType === 'dicom') { - return makeDICOMSelection(dataID); - } - - if (dataType === 'image') { - return makeImageSelection(dataID); - } - - return null; -} - /** * Tries to turn a thrown object into a meaningful error string. * @param error @@ -99,28 +78,32 @@ export async function importDataSources(dataSources: DataSource[]) { dataSources.map((r) => prioritizer.execute(r, importContext)) ); - const importResultsToPriorityResults = new Pipeline([ - priorityIdentity, - earliestPriorityForImportResult, - ]); - const withPriority = await Promise.all( + const [fetchSuccess, errorOnDataFetches] = partitionResults( priorityOrImportResult - .flatMap((r) => r.data) - .map((r) => importResultsToPriorityResults.execute(r)) ); - // Group same priorities, lowest priority first - const priorityBuckets = withPriority + const withPriority = fetchSuccess .flatMap((r) => r.data) - .reduce((buckets, result) => { - const { priority = 0 } = result; - if (!buckets[priority]) { - // eslint-disable-next-line no-param-reassign - buckets[priority] = []; - } - buckets[priority].push(result.dataSource); - return buckets; - }, [] as Array>); + .map((importOrPriority) => { + return { + dataSource: importOrPriority.dataSource, + priority: + 'priority' in importOrPriority + ? importOrPriority.priority + : EARLIEST_PRIORITY, + }; + }); + + // Group same priorities, lowest priority first + const priorityBuckets = withPriority.reduce((buckets, result) => { + const { priority = 0 } = result; + if (!buckets[priority]) { + // eslint-disable-next-line no-param-reassign + buckets[priority] = []; + } + buckets[priority].push(result.dataSource); + return buckets; + }, [] as Array>); const loaders = [ restoreStateFile, @@ -151,42 +134,75 @@ export async function importDataSources(dataSources: DataSource[]) { } if (!importContext.dicomDataSources.length) { - return results; + return [...errorOnDataFetches, ...results]; } // handle DICOM loading - const dicomDataSource: DataSource = { dicomSrc: { sources: importContext.dicomDataSources, }, }; - const dicomResult: PipelineResult = { - ok: true, - data: [], - errors: [], - }; + const dicomResult = await (async () => { + try { + const volumeKeys = await useDICOMStore().importFiles( + importContext.dicomDataSources + ); + return { + ok: true, + data: volumeKeys.map((key) => ({ + dataID: key, + dataType: 'dicom' as const, + dataSource: dicomDataSource, + })), + } as PipelineResultSuccess; + } catch (err) { + return { + ok: false, + errors: [ + { + message: toMeaningfulErrorString(err), + cause: err, + inputDataStackTrace: [dicomDataSource], + }, + ], + } as PipelineResultError; + } + })(); - try { - const volumeKeys = await useDICOMStore().importFiles( - importContext.dicomDataSources - ); - dicomResult.data.push( - ...volumeKeys.map((key) => ({ - dataID: key, - dataType: 'dicom' as const, - dataSource: dicomDataSource, - })) - ); - } catch (err) { - dicomResult.ok = false; - dicomResult.errors.push({ - message: toMeaningfulErrorString(err), - cause: err, - inputDataStackTrace: [dicomDataSource], - }); + return [ + ...errorOnDataFetches, + // remove all results that have no result data + ...results.filter((result) => !result.ok || result.data.length), + dicomResult, + ]; +} + +export type ImportDataSourcesResult = Awaited< + ReturnType +>[number]; + +export function convertSuccessResultToDataSelection( + result: ImportDataSourcesResult +) { + if (!result.ok) return null; + + if (result.data.length === 0) { + return null; } - // remove all results that have no result data - return [...results.filter((result) => result.data.length), dicomResult]; + const { dataID, dataType } = result.data[0]; + if (!dataID) { + return null; + } + + if (dataType === 'dicom') { + return makeDICOMSelection(dataID); + } + + if (dataType === 'image') { + return makeImageSelection(dataID); + } + + return null; } diff --git a/src/io/import/processors/prioritize.ts b/src/io/import/processors/prioritize.ts index 2ba9ce58f..2b8ee7893 100644 --- a/src/io/import/processors/prioritize.ts +++ b/src/io/import/processors/prioritize.ts @@ -1,9 +1,9 @@ import { isStateFile } from '@/src/io/state-file'; -import { ImportContext, ImportResult } from '@/src/io/import/common'; +import { ImportContext } from '@/src/io/import/common'; import { Handler } from '@/src/core/pipeline'; import { DataSource } from '../dataSource'; -const EARLIEST_PRIORITY = 0; +export const EARLIEST_PRIORITY = 0; const CONFIG_PRIORITY = 1; export interface PriorityResult { @@ -55,32 +55,3 @@ export const prioritizeStateFile: PriorityHandler = async ( } return dataSource; }; - -type ImportOrPriorityResultHandler = Handler< - ImportResult | PriorityResult, - PriorityResult ->; - -/** - * Passthrough PriorityResult - */ -export const priorityIdentity: ImportOrPriorityResultHandler = async ( - dataSource, - { done } -) => { - if ('priority' in dataSource) { - done(dataSource); - } - return dataSource; -}; - -/** - * Assigns first in line priority - */ -export const earliestPriorityForImportResult: ImportOrPriorityResultHandler = - async (src, { done }) => { - return done({ - dataSource: src.dataSource, - priority: EARLIEST_PRIORITY, - }); - }; diff --git a/src/utils/index.ts b/src/utils/index.ts index c1ac4e746..4c509e24c 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -136,6 +136,17 @@ export function partition( return partitioned; } +export function partitionByType( + guard: (x: T) => x is U, + arr: T[] +): [U[], Exclude[]] { + const ret: [U[], Exclude[]] = [[], []]; + arr.forEach((el) => + guard(el) ? ret[0].push(el) : ret[1].push(el as Exclude) + ); + return ret; +} + export const chunk = (arr: T[], size: number) => Array.from({ length: Math.ceil(arr.length / size) }, (_: any, i: number) => arr.slice(i * size, i * size + size) diff --git a/tests/pageobjects/volview.page.ts b/tests/pageobjects/volview.page.ts index 5e348fdeb..8cf5166b1 100644 --- a/tests/pageobjects/volview.page.ts +++ b/tests/pageobjects/volview.page.ts @@ -22,8 +22,6 @@ export const setValueVueInput = async ( await browser.keys([Key.ArrowRight, ...backspaces]); } await input.setValue(value); - - // await browser.keys([Key.Enter]); }; class VolViewPage extends Page { diff --git a/tests/specs/session-zip.e2e.ts b/tests/specs/session-zip.e2e.ts index f4d8ac8f5..46d644556 100644 --- a/tests/specs/session-zip.e2e.ts +++ b/tests/specs/session-zip.e2e.ts @@ -5,6 +5,8 @@ import { cleanuptotal } from 'wdio-cleanuptotal-service'; import { setValueVueInput, volViewPage } from '../pageobjects/volview.page'; import { TEMP_DIR } from '../../wdio.shared.conf'; +const SESSION_SAVE_TIMEOUT = 40000; + // from https://stackoverflow.com/a/47764403 function waitForFileExists(filePath: string, timeout: number) { return new Promise((resolve, reject) => { @@ -41,7 +43,7 @@ function waitForFileExists(filePath: string, timeout: number) { const saveSession = async () => { const sessionFileName = await volViewPage.saveSession(); const downloadedPath = path.join(TEMP_DIR, sessionFileName); - await waitForFileExists(downloadedPath, 5000); + await waitForFileExists(downloadedPath, SESSION_SAVE_TIMEOUT); return sessionFileName; }; @@ -80,7 +82,7 @@ describe('VolView config and deserialization', () => { const { manifest, session } = await saveGetManifest(); expect(manifest.tools.rectangles.tools.length).toEqual(1); - expect(manifest.tools.rectangles.tools?.[0].color).not.toEqual('green'); + expect(manifest.tools.rectangles.tools?.[0].color).not.toEqual(newColor); const config = { labels: {