diff --git a/packages/jbrowse-plugin-apollo/src/OntologyManager/OntologyStore/indexeddb-storage.ts b/packages/jbrowse-plugin-apollo/src/OntologyManager/OntologyStore/indexeddb-storage.ts index 746ad349..d4795883 100644 --- a/packages/jbrowse-plugin-apollo/src/OntologyManager/OntologyStore/indexeddb-storage.ts +++ b/packages/jbrowse-plugin-apollo/src/OntologyManager/OntologyStore/indexeddb-storage.ts @@ -81,7 +81,8 @@ function serializeWords(foundWords: Iterable<[string, string]>): string[] { export async function loadOboGraphJson(this: OntologyStore, db: Database) { const startTime = Date.now() - this.options.update?.('Parsing JSON', 1) + let percent_progress = 1 + this.options.update?.('Parsing JSON', percent_progress) // TODO: using file streaming along with an event-based json parser // instead of JSON.parse and .readFile could probably make this faster // and less memory intensive @@ -93,7 +94,9 @@ export async function loadOboGraphJson(this: OntologyStore, db: Database) { } catch { throw new Error('Error in loading ontology') } - this.options.update?.('Parsing JSON complete', 10) + + percent_progress += 5 + this.options.update?.('Parsing JSON complete', percent_progress) const parseTime = Date.now() @@ -116,25 +119,46 @@ export async function loadOboGraphJson(this: OntologyStore, db: Database) { const fullTextIndexPaths = getTextIndexFields .call(this) .map((def) => def.jsonPath) - for (const node of graph.nodes ?? []) { - if (isOntologyDBNode(node)) { - await nodeStore.add({ - ...node, - fullTextWords: serializeWords( - getWords(node, fullTextIndexPaths, this.prefixes), - ), - }) + if (graph.nodes) { + let last_progress = Math.round(percent_progress) + for (const [, node] of graph.nodes.entries()) { + percent_progress += 64 * (1 / graph.nodes.length) + if ( + Math.round(percent_progress) != last_progress && + percent_progress < 100 + ) { + this.options.update?.('Processing nodes', percent_progress) + last_progress = Math.round(percent_progress) + } + if (isOntologyDBNode(node)) { + await nodeStore.add({ + ...node, + fullTextWords: serializeWords( + getWords(node, fullTextIndexPaths, this.prefixes), + ), + }) + } } } // load edges const edgeStore = tx.objectStore('edges') - for (const edge of graph.edges ?? []) { - if (isOntologyDBEdge(edge)) { - await edgeStore.add(edge) + if (graph.edges) { + let last_progress = Math.round(percent_progress) + for (const [, edge] of graph.edges.entries()) { + percent_progress += 30 * (1 / graph.edges.length) + if ( + Math.round(percent_progress) != last_progress && + percent_progress < 100 + ) { + this.options.update?.('Processing edges', percent_progress) + last_progress = Math.round(percent_progress) + } + if (isOntologyDBEdge(edge)) { + await edgeStore.add(edge) + } } } - await tx.done // record some metadata about this ontology and load operation diff --git a/packages/jbrowse-plugin-apollo/src/session/ClientDataStore.ts b/packages/jbrowse-plugin-apollo/src/session/ClientDataStore.ts index ba0719b9..3fb2ab50 100644 --- a/packages/jbrowse-plugin-apollo/src/session/ClientDataStore.ts +++ b/packages/jbrowse-plugin-apollo/src/session/ClientDataStore.ts @@ -165,7 +165,6 @@ export function clientDataStoreFactory( ) as TextIndexFieldDefinition[], ] if (!ontologyManager.findOntology(name)) { - // eslint-disable-next-line no-inner-declarations const session = getSession( self, ) as unknown as ApolloSessionModel @@ -181,7 +180,7 @@ export function clientDataStoreFactory( jobsManager.abortJob(job.name) }, } - const update = (message: string, progress: number) => { + const update = (message: string, progress: number): void => { if (progress === 0) { jobsManager.runJob(job) return @@ -191,6 +190,7 @@ export function clientDataStoreFactory( return } jobsManager.update(jobName, message, progress) + return } ontologyManager.addOntology(name, version, source, { textIndexing: { indexFields },