From 696583f5258504fcac146fba699f4567ddde098e Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Thu, 9 Jan 2025 19:20:13 +0000 Subject: [PATCH 01/41] dependency is stored individually --- build.js | 3 +-- index.js | 1 + lib/tx.js | 12 ++++++------ spec/hyperschema/index.js | 8 ++++---- spec/hyperschema/schema.json | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/build.js b/build.js index 91e76ca..5483154 100644 --- a/build.js +++ b/build.js @@ -246,9 +246,8 @@ core.register({ }) core.register({ - name: 'dependencies', + name: 'dependency', compact: true, - array: true, fields: [{ name: 'dataPointer', type: 'uint', diff --git a/index.js b/index.js index 15551dc..548bab8 100644 --- a/index.js +++ b/index.js @@ -77,6 +77,7 @@ class HypercoreStorage { if (deps[i].length >= length) continue deps[i].length = length this.core.dependencies = deps.slice(0, i + 1) + return } throw new Error('Dependency not found') diff --git a/lib/tx.js b/lib/tx.js index 0a2f226..0410104 100644 --- a/lib/tx.js +++ b/lib/tx.js @@ -11,7 +11,7 @@ const CORE_AUTH = schema.getEncoding('@core/auth') const CORE_BATCHES = schema.getEncoding('@core/batches') const CORE_HEAD = schema.getEncoding('@core/head') const CORE_TREE_NODE = schema.getEncoding('@core/tree-node') -const CORE_DEPENDENCIES = schema.getEncoding('@core/dependencies') +const CORE_DEPENDENCY = schema.getEncoding('@core/dependency') const CORE_HINTS = schema.getEncoding('@core/hints') class CoreTX { @@ -34,12 +34,12 @@ class CoreTX { this.changes.push([core.head(this.core.dataPointer), encode(CORE_HEAD, head), null]) } - setDependencies (deps) { - this.changes.push([core.dependencies(this.core.dataPointer), encode(CORE_DEPENDENCIES, deps), null]) + setDependency (dep) { + this.changes.push([core.dependency(this.core.dataPointer), encode(CORE_DEPENDENCY, dep), null]) } setHints (hints) { - this.changes.push([core.dependencies(this.core.dataPointer), encode(CORE_HINTS, hints), null]) + this.changes.push([core.hints(this.core.dataPointer), encode(CORE_HINTS, hints), null]) } putBlock (index, data) { @@ -126,8 +126,8 @@ class CoreRX { return await decode(CORE_HEAD, await this.view.get(this.read, core.head(this.core.dataPointer))) } - async getDependencies () { - return await decode(CORE_DEPENDENCIES, await this.view.get(this.read, core.dependencies(this.core.dataPointer))) + async getDependency () { + return await decode(CORE_DEPENDENCY, await this.view.get(this.read, core.dependency(this.core.dataPointer))) } async getHints () { diff --git a/spec/hyperschema/index.js b/spec/hyperschema/index.js index 609cb4a..b56de44 100644 --- a/spec/hyperschema/index.js +++ b/spec/hyperschema/index.js @@ -421,8 +421,8 @@ const encoding14 = c.array({ } }) -// @core/dependencies -const encoding15 = c.array({ +// @core/dependency +const encoding15 = { preencode (state, m) { c.uint.preencode(state, m.dataPointer) c.uint.preencode(state, m.length) @@ -440,7 +440,7 @@ const encoding15 = c.array({ length: r1 } } -}) +} function setVersion (v) { version = v @@ -481,7 +481,7 @@ function getEncoding (name) { case '@core/head': return encoding12 case '@core/hints': return encoding13 case '@core/batches': return encoding14 - case '@core/dependencies': return encoding15 + case '@core/dependency': return encoding15 default: throw new Error('Encoder not found ' + name) } } diff --git a/spec/hyperschema/schema.json b/spec/hyperschema/schema.json index 295fa9c..e6e0f4b 100644 --- a/spec/hyperschema/schema.json +++ b/spec/hyperschema/schema.json @@ -353,7 +353,7 @@ ] }, { - "name": "dependencies", + "name": "dependency", "namespace": "core", "compact": true, "flagsPosition": -1, From 66e996068d07a33f2421e45da87d027167302792 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Thu, 9 Jan 2025 19:21:36 +0000 Subject: [PATCH 02/41] add missing methods and correct typos --- index.js | 16 ++++++++++++++-- lib/streams.js | 18 +++++++++--------- lib/tx.js | 12 ++++++++++++ 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/index.js b/index.js index 548bab8..4f594a7 100644 --- a/index.js +++ b/index.js @@ -125,7 +125,7 @@ class HypercoreStorage { dependencies: [] } - const batchRx = new CoreRX(this.core, this.db, this.view) + const batchRx = new CoreRX(core, this.db, this.view) const dependencyPromise = batchRx.getDependency() batchRx.tryFlush() @@ -224,6 +224,10 @@ class CorestoreStorage { this.flushing = null } + static isCoreStorage (db) { + return isCorestoreStorage(db) + } + static from (db) { if (isCorestoreStorage(db)) return db return new this(db) @@ -379,6 +383,11 @@ class CorestoreStorage { } async resume (discoveryKey) { + if (!discoveryKey) { + discoveryKey = await this.getDefaultKey() + if (!discoveryKey) return null + } + const rx = new CorestoreRX(this.db, EMPTY) const corePromise = rx.getCore(discoveryKey) @@ -417,7 +426,10 @@ class CorestoreStorage { let [core, head] = await Promise.all([corePromise, headPromise]) if (core) return this._resumeFromPointers(tx.view, core) - if (head === null) head = initStoreHead() + if (head === null) { + head = initStoreHead() + head.defaultKey = discoveryKey + } const corePointer = head.allocated.cores++ const dataPointer = head.allocated.datas++ diff --git a/lib/streams.js b/lib/streams.js index fb7fd1c..13820a0 100644 --- a/lib/streams.js +++ b/lib/streams.js @@ -32,22 +32,22 @@ function createAliasStream (db, updates, namespace) { return ite } -function createBlockStream (core, db, updates, start, end) { - return new BlockStream(core, db, updates, start, end) +function createBlockStream (ptr, db, updates, start, end, reverse) { + return new BlockStream(ptr, db, updates, start, end, reverse) } -function createBitfieldStream (core, db, updates, start, end) { - const s = core.bitfield(core.dataPointer, start, 0) - const e = core.bitfield(core.dataPointer, end === -1 ? Infinity : end, 0) +function createBitfieldStream (ptr, db, updates, start, end) { + const s = core.bitfield(ptr.dataPointer, start, 0) + const e = core.bitfield(ptr.dataPointer, end === -1 ? Infinity : end, 0) const ite = updates.iterator(db, s, e) ite._readableState.map = mapBitfield return ite } -function createUserDataStream (core, db, updates, start, end) { - const s = core.userData(core.dataPointer, start || '') - const e = end === null ? core.userDataEnd(core.dataPointer) : core.userData(core.dataPointer, end) +function createUserDataStream (ptr, db, updates, start, end) { + const s = core.userData(ptr.dataPointer, start || '') + const e = end === null ? core.userDataEnd(ptr.dataPointer) : core.userData(ptr.dataPointer, end) const ite = updates.iterator(db, s, e) ite._readableState.map = mapUserData @@ -57,7 +57,7 @@ function createUserDataStream (core, db, updates, start, end) { function mapBitfield (data) { const [index, type] = core.bitfieldIndexAndType(data.key) if (type !== 0) return null // ignore for now - return { index, value: data.value } + return { index, page: data.value } } function mapUserData (data) { diff --git a/lib/tx.js b/lib/tx.js index 0410104..2f32b45 100644 --- a/lib/tx.js +++ b/lib/tx.js @@ -66,6 +66,14 @@ class CoreTX { this.changes.push([core.bitfield(this.core.dataPointer, index, 0), null, null]) } + deleteBitfieldPageRange (start, end) { + this.changes.push([ + core.bitfield(this.core.dataPointer, start, 0), + null, + core.bitfield(this.core.dataPointer, end === -1 ? Infinity : end, 0) + ]) + } + putTreeNode (node) { this.changes.push([core.tree(this.core.dataPointer, node.index), encode(CORE_TREE_NODE, node), null]) } @@ -150,6 +158,10 @@ class CoreRX { return decode(CORE_TREE_NODE, await this.view.get(this.read, core.tree(data, index))) } + async hasTreeNode (index) { + return (await this.getTreeNode(index)) !== null + } + getUserData (key) { return this.view.get(this.read, core.userData(this.core.dataPointer, key)) } From f5e050496f52d9e5ffec8ceb8de93b4669e46d59 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Thu, 9 Jan 2025 19:22:04 +0000 Subject: [PATCH 03/41] support reverse block stream --- index.js | 6 +++--- lib/block-stream.js | 8 +++++--- lib/view.js | 4 ++-- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/index.js b/index.js index 4f594a7..0c5a688 100644 --- a/index.js +++ b/index.js @@ -95,15 +95,15 @@ class HypercoreStorage { return this.store.atom() } - createBlockStream (start, end) { - return createBlockStream(this.core, this.db, this.view, start, end) + createBlockStream (start, end, reverse) { + return createBlockStream(this.core, this.db, this.view, start, end, reverse) } createBitfieldStream (start, end) { return createBitfieldStream(this.core, this.db, this.view, start, end) } - createUserDataStream (start, end) { + createUserDataStream (start, end = null) { return createUserDataStream(this.core, this.db, this.view, start, end) } diff --git a/lib/block-stream.js b/lib/block-stream.js index 2446de5..1b2944e 100644 --- a/lib/block-stream.js +++ b/lib/block-stream.js @@ -2,13 +2,14 @@ const { Readable, getStreamError } = require('streamx') const { core } = require('./keys') module.exports = class BlockStream extends Readable { - constructor (core, db, updates, start, end) { + constructor (core, db, updates, start, end, reverse) { super({ mapReadable }) this.core = core this.db = db this.updates = updates this.end = end + this.reverse = reverse === true this._drained = true this._consumed = 0 @@ -22,7 +23,8 @@ module.exports = class BlockStream extends Readable { _update () { if (this._consumed > this.core.dependencies.length) return - const offset = this._consumed === 0 ? 0 : this.core.dependencies[this._consumed - 1].length + const index = this.reverse ? this.core.dependencies.length - this._consumed : this._consumed + const offset = index === 0 ? 0 : this.core.dependencies[index - 1].length let end = 0 let ptr = 0 @@ -95,7 +97,7 @@ module.exports = class BlockStream extends Readable { } _makeStream (start, end) { - this._stream = this.updates.iterator(this.db, start, end) + this._stream = this.updates.iterator(this.db, start, end, this.reverse) this._stream.on('readable', this._maybeDrainBound) this._stream.on('error', noop) this._stream.on('close', this._oncloseBound) diff --git a/lib/view.js b/lib/view.js index abb771c..c17763a 100644 --- a/lib/view.js +++ b/lib/view.js @@ -170,8 +170,8 @@ class View { this.snap = this.map = this.changes = this.ranges = null } - iterator (db, start, end) { - const stream = db.iterator({ gte: start, lt: end }) + iterator (db, start, end, reverse) { + const stream = db.iterator({ gte: start, lt: end, reverse }) if (this.changes === null) return stream const changes = this.map === null ? [] : [...this.map.values()] From ea5ed53291bb153c960def06b712e220a80e9775 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Thu, 9 Jan 2025 19:22:18 +0000 Subject: [PATCH 04/41] close returns if already closed --- index.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/index.js b/index.js index 0c5a688..de418eb 100644 --- a/index.js +++ b/index.js @@ -293,6 +293,8 @@ class CorestoreStorage { } async close () { + if (this.db.closed) return + await this._enter() await this._exit() await this.db.close() From 8a2885b927a0f1622e28dd6b34b6f2122d30d27f Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Thu, 9 Jan 2025 19:22:40 +0000 Subject: [PATCH 05/41] changes array reset to empty array --- lib/tx.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/tx.js b/lib/tx.js index 2f32b45..9c5a23f 100644 --- a/lib/tx.js +++ b/lib/tx.js @@ -210,7 +210,7 @@ class CorestoreTX { apply () { if (this.changes === null) return this.view.apply(this.changes) - this.changes = null + this.changes = [] } } From 45322296b12c7f3dea014cc4e51b22bca4528d89 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Thu, 9 Jan 2025 19:22:53 +0000 Subject: [PATCH 06/41] fix package.json --- package.json | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 09a3314..961758d 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "2", + "name": "hypercore-storage", "version": "1.0.0", "main": "index.js", "directories": { @@ -12,9 +12,14 @@ "license": "ISC", "description": "", "dependencies": { + "b4a": "^1.6.7", + "compact-encoding": "^2.16.0", + "flat-tree": "^1.12.1", "hyperschema": "^1.3.3", + "index-encoder": "^3.3.2", "resolve-reject-promise": "^1.0.0", - "rocksdb-native": "^3.1.0", - "scope-lock": "^1.2.4" + "rocksdb-native": "^3.1.1", + "scope-lock": "^1.2.4", + "streamx": "^2.21.1" } } From cb377809f3e231d38454bd7c3aaeb9f6eddfa89c Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Thu, 9 Jan 2025 20:44:04 +0100 Subject: [PATCH 07/41] move defaultKey to init store --- index.js | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/index.js b/index.js index de418eb..981691d 100644 --- a/index.js +++ b/index.js @@ -339,7 +339,7 @@ class CorestoreStorage { rx.tryFlush() - const head = (await headPromise) || initStoreHead() + const head = (await headPromise) || initStoreHead(null) head.seed = seed tx.setHead(head) @@ -366,7 +366,7 @@ class CorestoreStorage { rx.tryFlush() - const head = (await headPromise) || initStoreHead() + const head = (await headPromise) || initStoreHead(null) head.defaultKey = defaultKey tx.setHead(head) @@ -428,10 +428,7 @@ class CorestoreStorage { let [core, head] = await Promise.all([corePromise, headPromise]) if (core) return this._resumeFromPointers(tx.view, core) - if (head === null) { - head = initStoreHead() - head.defaultKey = discoveryKey - } + if (head === null) head = initStoreHead(discoveryKey) const corePointer = head.allocated.cores++ const dataPointer = head.allocated.datas++ @@ -475,14 +472,15 @@ class CorestoreStorage { module.exports = CorestoreStorage -function initStoreHead () { +function initStoreHead (defaultKey) { return { version: 0, allocated: { datas: 0, cores: 0 }, - seed: null + seed: null, + defaultKey } } From 2fb76f7dbb6cc4e96818a8fc9fbb1c4712e57950 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Thu, 9 Jan 2025 20:49:09 +0100 Subject: [PATCH 08/41] make proper flush method --- index.js | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/index.js b/index.js index 981691d..c713b59 100644 --- a/index.js +++ b/index.js @@ -233,6 +233,13 @@ class CorestoreStorage { return new this(db) } + async _flush () { + while (this.enters > 0) { + await this.lock.lock() + await this.lock.unlock() + } + } + async _enter () { this.enters++ await this.lock.lock() @@ -294,9 +301,7 @@ class CorestoreStorage { async close () { if (this.db.closed) return - - await this._enter() - await this._exit() + await this._flush() await this.db.close() } From 374eb22eb2d27b8d8eadad061f913206e1d28af5 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Thu, 9 Jan 2025 22:17:10 +0100 Subject: [PATCH 09/41] fix tons of iterator bugs --- index.js | 18 +- ...k-stream.js => block-dependency-stream.js} | 6 +- lib/streams.js | 39 +++-- lib/view.js | 58 ++++++- package.json | 19 +- test/helpers/index.js | 27 +++ test/streams.js | 164 ++++++++++++++++++ 7 files changed, 291 insertions(+), 40 deletions(-) rename lib/{block-stream.js => block-dependency-stream.js} (94%) create mode 100644 test/helpers/index.js create mode 100644 test/streams.js diff --git a/index.js b/index.js index c713b59..98557b4 100644 --- a/index.js +++ b/index.js @@ -39,12 +39,12 @@ class Atom { } class HypercoreStorage { - constructor (store, db, core, view, atom) { + constructor (store, db, core, view, atomic) { this.store = store this.db = db this.core = core this.view = view - this.atom = atom + this.atomic = atomic this.view.readStart() store.opened++ @@ -84,11 +84,11 @@ class HypercoreStorage { } snapshot () { - return new HypercoreStorage(this.store, this.db.snapshot(), this.core, this.view.snapshot(), null) + return new HypercoreStorage(this.store, this.db.snapshot(), this.core, this.view.snapshot(), this.atomic) } atomize (atom) { - return new HypercoreStorage(this.store, this.db.session(), this.core, atom.view, atom) + return new HypercoreStorage(this.store, this.db.session(), this.core, atom.view, true) } atom () { @@ -133,7 +133,7 @@ class HypercoreStorage { const dependency = await dependencyPromise if (dependency) core.dependencies = this._addDependency(dependency) - return new HypercoreStorage(this.store, this.db.session(), core, this.atom ? this.view : new View(), this.atom) + return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } async createBatch (name, head) { @@ -174,7 +174,7 @@ class HypercoreStorage { await tx.flush() - return new HypercoreStorage(this.store, this.db.session(), core, this.atom ? this.view : new View(), this.atom) + return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } _addDependency (dep) { @@ -200,7 +200,7 @@ class HypercoreStorage { } write () { - return new CoreTX(this.core, this.db, this.atom ? this.view : null, []) + return new CoreTX(this.core, this.db, this.atomic ? this.view : null, []) } close () { @@ -418,7 +418,7 @@ class CorestoreStorage { dataPointer = dependency.dataPointer } - return new HypercoreStorage(this, this.db.session(), core, EMPTY, null) + return new HypercoreStorage(this, this.db.session(), core, EMPTY, false) } // not allowed to throw validation errors as its a shared tx! @@ -461,7 +461,7 @@ class CorestoreStorage { } } - return new HypercoreStorage(this, this.db.session(), ptr, EMPTY, null) + return new HypercoreStorage(this, this.db.session(), ptr, EMPTY, false) } async create (data) { diff --git a/lib/block-stream.js b/lib/block-dependency-stream.js similarity index 94% rename from lib/block-stream.js rename to lib/block-dependency-stream.js index 1b2944e..ddfd05f 100644 --- a/lib/block-stream.js +++ b/lib/block-dependency-stream.js @@ -3,7 +3,7 @@ const { core } = require('./keys') module.exports = class BlockStream extends Readable { constructor (core, db, updates, start, end, reverse) { - super({ mapReadable }) + super() this.core = core this.db = db @@ -105,7 +105,3 @@ module.exports = class BlockStream extends Readable { } function noop () {} - -function mapReadable (entry) { - return { index: core.blockIndex(entry.key), value: entry.value } -} diff --git a/lib/streams.js b/lib/streams.js index 13820a0..f2383ff 100644 --- a/lib/streams.js +++ b/lib/streams.js @@ -1,4 +1,4 @@ -const BlockStream = require('./block-stream.js') +const BlockDependencyStream = require('./block-dependency-stream.js') const { core, store } = require('./keys.js') const schema = require('../spec/hyperschema') @@ -12,43 +12,56 @@ module.exports = { createAliasStream } -function createCoreStream (db, updates) { +function createCoreStream (db, view) { const start = store.coreStart() const end = store.coreEnd() - const ite = updates.iterator(db, start, end) + const ite = view.iterator(db, start, end, false) ite._readableState.map = mapCore return ite } -function createAliasStream (db, updates, namespace) { +function createAliasStream (db, view, namespace) { const start = store.coreByAliasStart(namespace) const end = store.coreByAliasEnd(namespace) - const ite = updates.iterator(db, start, end) + const ite = view.iterator(db, start, end, false) ite._readableState.map = mapAlias return ite } -function createBlockStream (ptr, db, updates, start, end, reverse) { - return new BlockStream(ptr, db, updates, start, end, reverse) +function createBlockIterator (ptr, db, view, start, end, reverse) { + if (ptr.dependencies.length > 0) { + return new BlockDependencyStream(ptr, db, view, start, end, reverse) + } + + const s = core.block(ptr.dataPointer, start) + const e = core.block(ptr.dataPointer, end === -1 ? Infinity : end) + return view.iterator(db, s, e, reverse) } -function createBitfieldStream (ptr, db, updates, start, end) { +function createBlockStream (ptr, db, view, start, end, reverse) { + const ite = createBlockIterator(ptr, db, view, start, end, reverse) + + ite._readableState.map = mapBlock + return ite +} + +function createBitfieldStream (ptr, db, view, start, end) { const s = core.bitfield(ptr.dataPointer, start, 0) const e = core.bitfield(ptr.dataPointer, end === -1 ? Infinity : end, 0) - const ite = updates.iterator(db, s, e) + const ite = view.iterator(db, s, e, false) ite._readableState.map = mapBitfield return ite } -function createUserDataStream (ptr, db, updates, start, end) { +function createUserDataStream (ptr, db, view, start, end) { const s = core.userData(ptr.dataPointer, start || '') const e = end === null ? core.userDataEnd(ptr.dataPointer) : core.userData(ptr.dataPointer, end) - const ite = updates.iterator(db, s, e) + const ite = view.iterator(db, s, e, false) ite._readableState.map = mapUserData return ite @@ -75,3 +88,7 @@ function mapAlias (data) { const alias = store.alias(data.key) return { alias, discoveryKey: data.value } } + +function mapBlock (data) { + return { index: core.blockIndex(data.key), value: data.value } +} diff --git a/lib/view.js b/lib/view.js index c17763a..1da43da 100644 --- a/lib/view.js +++ b/lib/view.js @@ -2,11 +2,12 @@ const { Readable, getStreamError } = require('streamx') const b4a = require('b4a') class OverlayStream extends Readable { - constructor (stream, start, end, changes, ranges) { + constructor (stream, start, end, reverse, changes, ranges) { super() this.start = start this.end = end + this.reverse = reverse this.changes = changes this.ranges = ranges this.change = 0 @@ -70,17 +71,19 @@ class OverlayStream extends Readable { } _push (entry) { + const key = entry.key + while (this.range < this.ranges.length) { const r = this.ranges[this.range] // we moved past the range - if (b4a.compare(r[2], entry.key) <= 0) { + if (this.reverse ? b4a.compare(key, r[0]) < 0 : b4a.compare(r[2], key) <= 0) { this.range++ continue } // we didnt move past and are in, drop - if (b4a.compare(r[0], entry.key) <= 0) { + if (b4a.compare(r[0], key) <= 0 && b4a.compare(key, r[2]) < 0) { return true } @@ -100,7 +103,7 @@ class OverlayStream extends Readable { } // we moved past the change, push it - if (cmp < 0) { + if (this.reverse ? cmp > 0 : cmp < 0) { this.change++ if (value !== null && this._inRange(key) === true) this.push({ key, value }) continue @@ -123,6 +126,7 @@ class View { this.indexed = 0 this.changes = null this.ranges = null + this.overlay = null this.snap = null this.readers = 0 } @@ -174,13 +178,39 @@ class View { const stream = db.iterator({ gte: start, lt: end, reverse }) if (this.changes === null) return stream - const changes = this.map === null ? [] : [...this.map.values()] - const ranges = this.ranges === null ? [] : this.ranges.slice(0) + this._index() + + if (this.overlay === null || this.overlay.indexed !== this.indexed) { + const changes = this.map === null ? [] : [...this.map.values()] + const ranges = this.ranges === null ? [] : this.ranges.slice(0) + + const cmp = reverse ? cmpChangeReverse : cmpChange + + changes.sort(cmp) + ranges.sort(cmp) + + this.overlay = { indexed: this.indexed, changes, ranges, reverse } + } - changes.sort(cmpChange) - ranges.sort(cmpChange) + if (this.overlay.reverse !== reverse) { + return new OverlayStream( + stream, + start, + end, + reverse, + reverseArray(this.overlay.changes), + reverseArray(this.overlay.ranges) + ) + } - return new OverlayStream(stream, start, end, changes, ranges) + return new OverlayStream( + stream, + start, + end, + reverse, + this.overlay.changes, + this.overlay.ranges + ) } _indexAndGet (read, key) { @@ -273,4 +303,14 @@ function cmpChange (a, b) { return c === 0 ? b4a.compare(a[2], b[2]) : c } +function cmpChangeReverse (a, b) { + return cmpChange(b, a) +} + function noop () {} + +function reverseArray (list) { + const r = new Array(list.length) + for (let i = 0; i < list.length; i++) r[r.length - 1 - i] = list[i] + return r +} diff --git a/package.json b/package.json index 961758d..572fc70 100644 --- a/package.json +++ b/package.json @@ -2,14 +2,16 @@ "name": "hypercore-storage", "version": "1.0.0", "main": "index.js", - "directories": { - "lib": "lib" - }, + "files": [ + "index.js", + "lib/*.js", + "spec/hyperschema/*.js" + ], "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "test": "standard && brittle test/*.js" }, - "author": "", - "license": "ISC", + "author": "Holepunch Inc.", + "license": "Apache-2.0", "description": "", "dependencies": { "b4a": "^1.6.7", @@ -21,5 +23,10 @@ "rocksdb-native": "^3.1.1", "scope-lock": "^1.2.4", "streamx": "^2.21.1" + }, + "devDependencies": { + "brittle": "^3.7.0", + "standard": "^17.1.2", + "test-tmp": "^1.3.1" } } diff --git a/test/helpers/index.js b/test/helpers/index.js new file mode 100644 index 0000000..ee8412c --- /dev/null +++ b/test/helpers/index.js @@ -0,0 +1,27 @@ +const b4a = require('b4a') +const tmp = require('test-tmp') +const Storage = require('../../') + +module.exports = { createCore, create, toArray } + +async function createCore (t) { + const s = await create(t) + const core = await s.create({ key: b4a.alloc(32), discoveryKey: b4a.alloc(32) }) + + t.teardown(async function () { + await core.close() + await s.close() + }) + + return core +} + +async function create (t) { + return new Storage(await tmp(t)) +} + +async function toArray (stream) { + const all = [] + for await (const data of stream) all.push(data) + return all +} diff --git a/test/streams.js b/test/streams.js new file mode 100644 index 0000000..d6f5468 --- /dev/null +++ b/test/streams.js @@ -0,0 +1,164 @@ +const test = require('brittle') +const b4a = require('b4a') +const { createCore, toArray } = require('./helpers') + +test('block stream', async function (t) { + const core = await createCore(t) + + const tx = core.write() + const expected = [] + + for (let i = 0; i < 10; i++) { + tx.putBlock(i, b4a.from([i])) + expected.push({ index: i, value: b4a.from([i]) }) + } + + await tx.flush() + + const blocks = await toArray(core.createBlockStream(0, 10, false)) + + t.alike(blocks, expected) +}) + +test('reverse block stream', async function (t) { + const core = await createCore(t) + + const tx = core.write() + const expected = [] + + for (let i = 0; i < 10; i++) { + tx.putBlock(i, b4a.from([i])) + expected.push({ index: i, value: b4a.from([i]) }) + } + + await tx.flush() + + const blocks = await toArray(core.createBlockStream(0, 10, true)) + + t.alike(blocks, expected.reverse()) +}) + +test('block stream (atom)', async function (t) { + const core = await createCore(t) + const atom = core.atom() + + const a = core.atomize(atom) + + const expected = [] + + { + const tx = a.write() + + for (let i = 0; i < 5; i++) { + const index = 2 * i + tx.putBlock(index, b4a.from([index])) + expected.push({ index, value: b4a.from([index]) }) + } + + await tx.flush() + } + + await atom.flush() + + { + const tx = a.write() + + for (let i = 0; i < 5; i++) { + const index = 2 * i + 1 + tx.putBlock(index, b4a.from([index])) + expected.push({ index, value: b4a.from([index]) }) + } + + await tx.flush() + } + + { + const blocks = await toArray(a.createBlockStream(0, 10, false)) + t.alike(blocks, expected.sort(cmpBlock)) + } + + { + const blocks = await toArray(a.createBlockStream(0, 10, true)) + t.alike(blocks, expected.sort(cmpBlock).reverse()) + } + + { + const tx = a.write() + tx.deleteBlockRange(4, 6) + await tx.flush() + } + + expected.sort(cmpBlock).splice(4, 2) + + { + const blocks = await toArray(a.createBlockStream(0, 10, false)) + t.alike(blocks, expected.sort(cmpBlock)) + } + + { + const blocks = await toArray(a.createBlockStream(0, 10, true)) + t.alike(blocks, expected.sort(cmpBlock).reverse()) + } + + { + const tx = a.write() + tx.deleteBlockRange(0, 2) + tx.deleteBlockRange(8, 9) + await tx.flush() + } + + expected.sort(cmpBlock) + + expected.shift() + expected.shift() + const tmp = expected.pop() + expected.pop() + expected.push(tmp) + + { + const blocks = await toArray(a.createBlockStream(0, 10, false)) + t.alike(blocks, expected.sort(cmpBlock)) + } + + { + const blocks = await toArray(a.createBlockStream(0, 10, true)) + t.alike(blocks, expected.sort(cmpBlock).reverse()) + } + + { + const tx = a.write() + tx.deleteBlockRange(8, 10) + await tx.flush() + } + + expected.sort(cmpBlock) + expected.pop() + + { + const blocks = await toArray(a.createBlockStream(0, 10, false)) + t.alike(blocks, expected.sort(cmpBlock)) + } + + { + const blocks = await toArray(a.createBlockStream(0, 10, true)) + t.alike(blocks, expected.sort(cmpBlock).reverse()) + } + + t.ok(a.view.changes, 'used atomic view') + + await atom.flush() + + { + const blocks = await toArray(a.createBlockStream(0, 10, false)) + t.alike(blocks, expected.sort(cmpBlock)) + } + + { + const blocks = await toArray(a.createBlockStream(0, 10, true)) + t.alike(blocks, expected.sort(cmpBlock).reverse()) + } +}) + +function cmpBlock (a, b) { + return a.index - b.index +} From e08ccb3027878d43abca3df8edb8bf35bdb07699 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Thu, 9 Jan 2025 22:27:28 +0100 Subject: [PATCH 10/41] cleanup --- lib/view.js | 71 ++++++++++++++++++++++++++++++----------------------- 1 file changed, 40 insertions(+), 31 deletions(-) diff --git a/lib/view.js b/lib/view.js index 1da43da..32c342b 100644 --- a/lib/view.js +++ b/lib/view.js @@ -120,6 +120,43 @@ class OverlayStream extends Readable { } } +class Overlay { + constructor () { + this.indexed = 0 + this.changes = null + this.ranges = null + this.reverse = false + } + + update (view, reverse) { + if (view.indexed === this.indexed) return + + const changes = view.map === null ? [] : [...view.map.values()] + const ranges = view.ranges === null ? [] : view.ranges.slice(0) + + const cmp = reverse ? cmpChangeReverse : cmpChange + + changes.sort(cmp) + ranges.sort(cmp) + + this.indexed = view.indexed + this.changes = changes + this.ranges = ranges + this.reverse = reverse + } + + createStream (stream, start, end, reverse) { + return new OverlayStream( + stream, + start, + end, + reverse, + this.reverse === reverse ? this.changes : reverseArray(this.changes), + this.reverse === reverse ? this.ranges : reverseArray(this.ranges) + ) + } +} + class View { constructor () { this.map = null @@ -180,37 +217,9 @@ class View { this._index() - if (this.overlay === null || this.overlay.indexed !== this.indexed) { - const changes = this.map === null ? [] : [...this.map.values()] - const ranges = this.ranges === null ? [] : this.ranges.slice(0) - - const cmp = reverse ? cmpChangeReverse : cmpChange - - changes.sort(cmp) - ranges.sort(cmp) - - this.overlay = { indexed: this.indexed, changes, ranges, reverse } - } - - if (this.overlay.reverse !== reverse) { - return new OverlayStream( - stream, - start, - end, - reverse, - reverseArray(this.overlay.changes), - reverseArray(this.overlay.ranges) - ) - } - - return new OverlayStream( - stream, - start, - end, - reverse, - this.overlay.changes, - this.overlay.ranges - ) + if (this.overlay === null) this.overlay = new Overlay() + this.overlay.update(this, reverse) + return this.overlay.createStream(stream, start, end, reverse) } _indexAndGet (read, key) { From 32194a339a93c27f11726f6a740e29b81194bab6 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Thu, 9 Jan 2025 22:29:02 +0100 Subject: [PATCH 11/41] safety --- index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.js b/index.js index 98557b4..0f4278c 100644 --- a/index.js +++ b/index.js @@ -96,7 +96,7 @@ class HypercoreStorage { } createBlockStream (start, end, reverse) { - return createBlockStream(this.core, this.db, this.view, start, end, reverse) + return createBlockStream(this.core, this.db, this.view, start, end, !!reverse) } createBitfieldStream (start, end) { From c7d8eb8649e059d208af63a6fe638882aa64b538 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 00:04:28 +0000 Subject: [PATCH 12/41] add snapshotted getter --- index.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/index.js b/index.js index 0f4278c..2ae20cb 100644 --- a/index.js +++ b/index.js @@ -83,6 +83,10 @@ class HypercoreStorage { throw new Error('Dependency not found') } + get snapshotted () { + return this.db._snapshot !== null + } + snapshot () { return new HypercoreStorage(this.store, this.db.snapshot(), this.core, this.view.snapshot(), this.atomic) } From 66da47c113f154c4697aca53ca20bca9743ee6fe Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 10:14:11 +0100 Subject: [PATCH 13/41] view should be shared, not tx in corestore storage --- index.js | 54 +++++++++++++++++++++++++++++++++------------------ lib/tx.js | 5 ++--- test/basic.js | 49 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 86 insertions(+), 22 deletions(-) create mode 100644 test/basic.js diff --git a/index.js b/index.js index 2ae20cb..5ab4bb2 100644 --- a/index.js +++ b/index.js @@ -222,6 +222,7 @@ class CorestoreStorage { constructor (db) { this.db = typeof db === 'string' ? new RocksDB(db) : db this.opened = 0 + this.view = null this.tx = null this.enters = 0 this.lock = new ScopeLock() @@ -247,26 +248,25 @@ class CorestoreStorage { async _enter () { this.enters++ await this.lock.lock() - if (this.tx === null) this.tx = new CorestoreTX(this.db, new View()) - return this.tx + if (this.view === null) this.view = new View() + return this.view } async _exit () { this.enters-- - this.tx.apply() if (this.flushing === null) this.flushing = rrp() const flushed = this.flushing.promise - if (this.enters === 0 || this.tx.view.size() > 128) { + if (this.enters === 0 || this.view.size() > 128) { try { - await View.flush(this.tx.view.changes, this.db) + await View.flush(this.view.changes, this.db) this.flushing.resolve() } catch (err) { this.flushing.reject(err) } finally { this.flushing = null - this.tx = null + this.view = null } } @@ -279,10 +279,11 @@ class CorestoreStorage { async _allocData () { let dataPointer = 0 - const tx = await this._enter() + const view = await this._enter() + const tx = new CorestoreTX(view) try { - const rx = new CorestoreRX(this.db, tx.view) + const rx = new CorestoreRX(this.db, view) const headPromise = rx.getHead() rx.tryFlush() @@ -291,7 +292,9 @@ class CorestoreStorage { if (head === null) head = initStoreHead() dataPointer = head.allocated.datas++ + tx.setHead(head) + tx.apply() } finally { await this._exit() } @@ -310,8 +313,12 @@ class CorestoreStorage { } async clear () { - const tx = await this._enter() + const view = await this._enter() + const tx = new CorestoreTX(view) + tx.clear() + tx.apply() + await this._exit() } @@ -341,9 +348,11 @@ class CorestoreStorage { } async setSeed (seed) { - const tx = await this._enter() + const view = await this._enter() + const tx = new CorestoreTX(view) + try { - const rx = new CorestoreRX(this.db, tx.view) + const rx = new CorestoreRX(this.db, view) const headPromise = rx.getHead() rx.tryFlush() @@ -352,6 +361,7 @@ class CorestoreStorage { head.seed = seed tx.setHead(head) + tx.apply() } finally { await this._exit() } @@ -368,9 +378,11 @@ class CorestoreStorage { } async setDefaultKey (defaultKey) { - const tx = await this._enter() + const view = await this._enter() + const tx = new CorestoreTX(view) + try { - const rx = new CorestoreRX(this.db, tx.view) + const rx = new CorestoreRX(this.db, view) const headPromise = rx.getHead() rx.tryFlush() @@ -379,6 +391,7 @@ class CorestoreStorage { head.defaultKey = defaultKey tx.setHead(head) + tx.apply() } finally { await this._exit() } @@ -426,8 +439,9 @@ class CorestoreStorage { } // not allowed to throw validation errors as its a shared tx! - async _create (tx, { key, manifest, keyPair, encryptionKey, discoveryKey, alias, userData }) { - const rx = new CorestoreRX(this.db, tx.view) + async _create (view, { key, manifest, keyPair, encryptionKey, discoveryKey, alias, userData }) { + const rx = new CorestoreRX(this.db, view) + const tx = new CorestoreTX(view) const corePromise = rx.getCore(discoveryKey) const headPromise = rx.getHead() @@ -435,7 +449,7 @@ class CorestoreStorage { rx.tryFlush() let [core, head] = await Promise.all([corePromise, headPromise]) - if (core) return this._resumeFromPointers(tx.view, core) + if (core) return this._resumeFromPointers(view, core) if (head === null) head = initStoreHead(discoveryKey) @@ -449,7 +463,7 @@ class CorestoreStorage { if (alias) tx.putCoreByAlias(alias, discoveryKey) const ptr = { corePointer, dataPointer, dependencies: [] } - const ctx = new CoreTX(ptr, this.db, tx.view, tx.changes) + const ctx = new CoreTX(ptr, this.db, view, tx.changes) ctx.setAuth({ key, @@ -465,14 +479,16 @@ class CorestoreStorage { } } + tx.apply() + return new HypercoreStorage(this, this.db.session(), ptr, EMPTY, false) } async create (data) { - const tx = await this._enter() + const view = await this._enter() try { - return await this._create(tx, data) + return await this._create(view, data) } finally { await this._exit() } diff --git a/lib/tx.js b/lib/tx.js index 9c5a23f..b346c74 100644 --- a/lib/tx.js +++ b/lib/tx.js @@ -184,8 +184,7 @@ class CoreRX { } class CorestoreTX { - constructor (db, view) { - this.db = db + constructor (view) { this.view = view this.changes = [] } @@ -210,7 +209,7 @@ class CorestoreTX { apply () { if (this.changes === null) return this.view.apply(this.changes) - this.changes = [] + this.changes = null } } diff --git a/test/basic.js b/test/basic.js new file mode 100644 index 0000000..4902bec --- /dev/null +++ b/test/basic.js @@ -0,0 +1,49 @@ +const test = require('brittle') +const b4a = require('b4a') +const { create } = require('./helpers') + +test('make storage and core', async function (t) { + const s = await create(t) + + t.is(await s.has(b4a.alloc(32)), false) + t.is(await s.resume(b4a.alloc(32)), null) + + const c = await s.create({ key: b4a.alloc(32), discoveryKey: b4a.alloc(32) }) + + t.is(await s.has(b4a.alloc(32)), true) + + await c.close() + + t.is(await s.has(b4a.alloc(32)), true) + + const r = await s.resume(b4a.alloc(32)) + + t.ok(!!r) + + await r.close() + await s.close() +}) + +test('make many in parallel', async function (t) { + const s = await create(t) + + const all = [] + for (let i = 0; i < 50; i++) { + const c = s.create({ key: b4a.alloc(32, i), discoveryKey: b4a.alloc(32, i) }) + all.push(c) + } + + const cores = await Promise.all(all) + const ptrs = new Set() + + for (const c of cores) { + ptrs.add(c.core.corePointer) + } + + // all unique allocations + t.is(ptrs.size, cores.length) + + for (const c of cores) await c.close() + + await s.close() +}) From 41896778f1bfb9e56d7fa4019a2f8af3de673cd2 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 10:31:04 +0100 Subject: [PATCH 14/41] defaultKey -> defaultDiscoveryKey and fix some internals --- build.js | 2 +- index.js | 46 +++++++++++++++++++++--------------- spec/hyperschema/index.js | 8 +++---- spec/hyperschema/schema.json | 2 +- test/basic.js | 17 +++++++++++++ 5 files changed, 50 insertions(+), 25 deletions(-) diff --git a/build.js b/build.js index 5483154..ef2306e 100644 --- a/build.js +++ b/build.js @@ -32,7 +32,7 @@ corestore.register({ name: 'seed', type: 'fixed32' }, { - name: 'defaultKey', + name: 'defaultDiscoveryKey', type: 'fixed32' }] }) diff --git a/index.js b/index.js index 5ab4bb2..c9a8a99 100644 --- a/index.js +++ b/index.js @@ -47,7 +47,6 @@ class HypercoreStorage { this.atomic = atomic this.view.readStart() - store.opened++ } get dependencies () { @@ -209,7 +208,6 @@ class HypercoreStorage { close () { if (this.view !== null) { - this.store.opened-- this.view.readStop() this.view = null } @@ -221,14 +219,20 @@ class HypercoreStorage { class CorestoreStorage { constructor (db) { this.db = typeof db === 'string' ? new RocksDB(db) : db - this.opened = 0 this.view = null - this.tx = null this.enters = 0 this.lock = new ScopeLock() this.flushing = null } + get opened () { + return this.db.opened + } + + get closed () { + return this.db.closed + } + static isCoreStorage (db) { return isCorestoreStorage(db) } @@ -289,7 +293,7 @@ class CorestoreStorage { rx.tryFlush() let head = await headPromise - if (head === null) head = initStoreHead() + if (head === null) head = initStoreHead(null, null) dataPointer = head.allocated.datas++ @@ -347,7 +351,7 @@ class CorestoreStorage { return head === null ? null : head.seed } - async setSeed (seed) { + async setSeed (seed, { overwrite = true } = {}) { const view = await this._enter() const tx = new CorestoreTX(view) @@ -357,27 +361,29 @@ class CorestoreStorage { rx.tryFlush() - const head = (await headPromise) || initStoreHead(null) + const head = (await headPromise) || initStoreHead(null, null) - head.seed = seed + if (head.seed === null || overwrite) head.seed = seed tx.setHead(head) tx.apply() + + return head.seed } finally { await this._exit() } } - async getDefaultKey () { + async getDefaultDiscoveryKey () { const rx = new CorestoreRX(this.db, EMPTY) const headPromise = rx.getHead() rx.tryFlush() const head = await headPromise - return head === null ? null : head.defaultKey + return head === null ? null : head.defaultDiscoveryKey } - async setDefaultKey (defaultKey) { + async setDefaultDiscoveryKey (discoveryKey, { overwrite = true } = {}) { const view = await this._enter() const tx = new CorestoreTX(view) @@ -387,11 +393,13 @@ class CorestoreStorage { rx.tryFlush() - const head = (await headPromise) || initStoreHead(null) + const head = (await headPromise) || initStoreHead(null, null) - head.defaultKey = defaultKey + if (head.defaultDiscoveryKey === null || overwrite) head.defaultDiscoveryKey = discoveryKey tx.setHead(head) tx.apply() + + return head.defaultDiscoveryKey } finally { await this._exit() } @@ -408,7 +416,7 @@ class CorestoreStorage { async resume (discoveryKey) { if (!discoveryKey) { - discoveryKey = await this.getDefaultKey() + discoveryKey = await this.getDefaultDiscoveryKey() if (!discoveryKey) return null } @@ -451,7 +459,7 @@ class CorestoreStorage { let [core, head] = await Promise.all([corePromise, headPromise]) if (core) return this._resumeFromPointers(view, core) - if (head === null) head = initStoreHead(discoveryKey) + if (head === null) head = initStoreHead(null, discoveryKey) const corePointer = head.allocated.cores++ const dataPointer = head.allocated.datas++ @@ -497,15 +505,15 @@ class CorestoreStorage { module.exports = CorestoreStorage -function initStoreHead (defaultKey) { +function initStoreHead (seed, defaultDiscoveryKey) { return { version: 0, allocated: { datas: 0, cores: 0 }, - seed: null, - defaultKey + seed, + defaultDiscoveryKey } } @@ -522,5 +530,5 @@ function getBatch (batches, name, alloc) { } function isCorestoreStorage (s) { - return typeof s === 'object' && !!s && typeof s.setDefaultKey === 'function' + return typeof s === 'object' && !!s && typeof s.setDefaultDiscoveryKey === 'function' } diff --git a/spec/hyperschema/index.js b/spec/hyperschema/index.js index b56de44..ab60d95 100644 --- a/spec/hyperschema/index.js +++ b/spec/hyperschema/index.js @@ -38,20 +38,20 @@ const encoding1 = { if (m.allocated) encoding0.preencode(state, m.allocated) if (m.seed) c.fixed32.preencode(state, m.seed) - if (m.defaultKey) c.fixed32.preencode(state, m.defaultKey) + if (m.defaultDiscoveryKey) c.fixed32.preencode(state, m.defaultDiscoveryKey) }, encode (state, m) { const flags = (m.allocated ? 1 : 0) | (m.seed ? 2 : 0) | - (m.defaultKey ? 4 : 0) + (m.defaultDiscoveryKey ? 4 : 0) c.uint.encode(state, m.version) c.uint.encode(state, flags) if (m.allocated) encoding0.encode(state, m.allocated) if (m.seed) c.fixed32.encode(state, m.seed) - if (m.defaultKey) c.fixed32.encode(state, m.defaultKey) + if (m.defaultDiscoveryKey) c.fixed32.encode(state, m.defaultDiscoveryKey) }, decode (state) { const r0 = c.uint.decode(state) @@ -61,7 +61,7 @@ const encoding1 = { version: r0, allocated: (flags & 1) !== 0 ? encoding0.decode(state) : null, seed: (flags & 2) !== 0 ? c.fixed32.decode(state) : null, - defaultKey: (flags & 4) !== 0 ? c.fixed32.decode(state) : null + defaultDiscoveryKey: (flags & 4) !== 0 ? c.fixed32.decode(state) : null } } } diff --git a/spec/hyperschema/schema.json b/spec/hyperschema/schema.json index e6e0f4b..b4cae6d 100644 --- a/spec/hyperschema/schema.json +++ b/spec/hyperschema/schema.json @@ -44,7 +44,7 @@ "version": 1 }, { - "name": "defaultKey", + "name": "defaultDiscoveryKey", "type": "fixed32", "version": 1 } diff --git a/test/basic.js b/test/basic.js index 4902bec..009c5cb 100644 --- a/test/basic.js +++ b/test/basic.js @@ -47,3 +47,20 @@ test('make many in parallel', async function (t) { await s.close() }) + +test('first core created is the default core', async function (t) { + const s = await create(t) + + t.is(await s.getDefaultDiscoveryKey(), null) + const c = await s.create({ key: b4a.alloc(32), discoveryKey: b4a.alloc(32) }) + + t.alike(await s.getDefaultDiscoveryKey(), b4a.alloc(32)) + + const c1 = await s.create({ key: b4a.alloc(32, 1), discoveryKey: b4a.alloc(32, 1) }) + + t.alike(await s.getDefaultDiscoveryKey(), b4a.alloc(32)) + + await c.close() + await c1.close() + await s.close() +}) From 9035ef10e6e42986a7c4aabdad3e647c39bbabb7 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 09:33:54 +0000 Subject: [PATCH 15/41] add method for creating atomic batch --- index.js | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index c9a8a99..4b329d0 100644 --- a/index.js +++ b/index.js @@ -139,7 +139,9 @@ class HypercoreStorage { return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } - async createBatch (name, head) { + async createBatch (name, head, atom) { + if (atom) return this._createAtomicBatch(atom, head) + const rx = this.read() const existingBatchesPromise = rx.getBatches() @@ -180,6 +182,24 @@ class HypercoreStorage { return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } + async _createAtomicBatch (atom, head) { + const length = head === null ? 0 : head.length + const core = { + corePointer: this.core.corePointer, + dataPointer: this.core.dataPointer, + dependencies: this._addDependency({ dataPointer: this.core.dataPointer, length }) + } + + const batchTx = new CoreTX(core, this.db, atom.view, []) + + if (length > 0) batchTx.setHead(head) + batchTx.setDependency(core.dependencies[core.dependencies.length - 1]) + + await batchTx.flush() + + return this.atomize() + } + _addDependency (dep) { const deps = [] From 959b41489d5f17658e87e90037a7c9dfbfec453e Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 09:45:51 +0000 Subject: [PATCH 16/41] createAtomicBatch is separate method --- index.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/index.js b/index.js index 4b329d0..b4bb3e2 100644 --- a/index.js +++ b/index.js @@ -140,8 +140,6 @@ class HypercoreStorage { } async createBatch (name, head, atom) { - if (atom) return this._createAtomicBatch(atom, head) - const rx = this.read() const existingBatchesPromise = rx.getBatches() @@ -182,7 +180,7 @@ class HypercoreStorage { return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } - async _createAtomicBatch (atom, head) { + async createAtomicBatch (atom, head) { const length = head === null ? 0 : head.length const core = { corePointer: this.core.corePointer, @@ -197,7 +195,7 @@ class HypercoreStorage { await batchTx.flush() - return this.atomize() + return this.atomize(atom) } _addDependency (dep) { From 30819cb1d38d1d27e7e135b53ed01557da98e180 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 09:52:22 +0000 Subject: [PATCH 17/41] rename batch to session --- build.js | 2 +- index.js | 60 ++++++++++++++++++------------------ lib/keys.js | 6 ++-- lib/tx.js | 10 +++--- spec/hyperschema/index.js | 4 +-- spec/hyperschema/schema.json | 2 +- 6 files changed, 42 insertions(+), 42 deletions(-) diff --git a/build.js b/build.js index ef2306e..3db669b 100644 --- a/build.js +++ b/build.js @@ -231,7 +231,7 @@ core.register({ }) core.register({ - name: 'batches', + name: 'sessions', compact: true, array: true, fields: [{ diff --git a/index.js b/index.js index b4bb3e2..de1d1a0 100644 --- a/index.js +++ b/index.js @@ -110,28 +110,28 @@ class HypercoreStorage { return createUserDataStream(this.core, this.db, this.view, start, end) } - async resumeBatch (name) { + async resumeSession (name) { const rx = this.read() - const existingBatchesPromise = rx.getBatches() + const existingSessionsPromise = rx.getSessions() rx.tryFlush() - const existingBatches = await existingBatchesPromise + const existingSessions = await existingSessionsPromise - const batches = existingBatches || [] - const batch = getBatch(batches, name, false) + const sessions = existingSessions || [] + const session = getBatch(sessions, name, false) - if (batch === null) return null + if (session === null) return null const core = { corePointer: this.core.corePointer, - dataPointer: batch.dataPointer, + dataPointer: session.dataPointer, dependencies: [] } - const batchRx = new CoreRX(core, this.db, this.view) + const coreRx = new CoreRX(core, this.db, this.view) - const dependencyPromise = batchRx.getDependency() - batchRx.tryFlush() + const dependencyPromise = coreRx.getDependency() + coreRx.tryFlush() const dependency = await dependencyPromise if (dependency) core.dependencies = this._addDependency(dependency) @@ -139,48 +139,48 @@ class HypercoreStorage { return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } - async createBatch (name, head, atom) { + async createSession (name, head, atom) { const rx = this.read() - const existingBatchesPromise = rx.getBatches() + const existingSessionsPromise = rx.getSessions() const existingHeadPromise = rx.getHead() rx.tryFlush() - const [existingBatches, existingHead] = await Promise.all([existingBatchesPromise, existingHeadPromise]) + const [existingSessions, existingHead] = await Promise.all([existingSessionsPromise, existingHeadPromise]) if (head === null) head = existingHead if (existingHead !== null && head.length > existingHead.length) { throw new Error('Invalid head passed, ahead of core') } - const batches = existingBatches || [] - const batch = getBatch(batches, name, true) + const sessions = existingSessions || [] + const session = getBatch(sessions, name, true) - batch.dataPointer = await this.store._allocData() + session.dataPointer = await this.store._allocData() const tx = this.write() - tx.setBatches(batches) + tx.setSessions(sessions) const length = head === null ? 0 : head.length const core = { corePointer: this.core.corePointer, - dataPointer: batch.dataPointer, + dataPointer: session.dataPointer, dependencies: this._addDependency({ dataPointer: this.core.dataPointer, length }) } - const batchTx = new CoreTX(core, this.db, tx.view, tx.changes) + const coreTx = new CoreTX(core, this.db, tx.view, tx.changes) - if (length > 0) batchTx.setHead(head) - batchTx.setDependency(core.dependencies[core.dependencies.length - 1]) + if (length > 0) coreTx.setHead(head) + coreTx.setDependency(core.dependencies[core.dependencies.length - 1]) await tx.flush() return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } - async createAtomicBatch (atom, head) { + async createAtomicSession (atom, head) { const length = head === null ? 0 : head.length const core = { corePointer: this.core.corePointer, @@ -188,12 +188,12 @@ class HypercoreStorage { dependencies: this._addDependency({ dataPointer: this.core.dataPointer, length }) } - const batchTx = new CoreTX(core, this.db, atom.view, []) + const coreTx = new CoreTX(core, this.db, atom.view, []) - if (length > 0) batchTx.setHead(head) - batchTx.setDependency(core.dependencies[core.dependencies.length - 1]) + if (length > 0) coreTx.setHead(head) + coreTx.setDependency(core.dependencies[core.dependencies.length - 1]) - await batchTx.flush() + await coreTx.flush() return this.atomize(atom) } @@ -535,15 +535,15 @@ function initStoreHead (seed, defaultDiscoveryKey) { } } -function getBatch (batches, name, alloc) { - for (let i = 0; i < batches.length; i++) { - if (batches[i].name === name) return batches[i] +function getBatch (sessions, name, alloc) { + for (let i = 0; i < sessions.length; i++) { + if (sessions[i].name === name) return sessions[i] } if (!alloc) return null const result = { name, dataPointer: 0 } - batches.push(result) + sessions.push(result) return result } diff --git a/lib/keys.js b/lib/keys.js index 9f06a4d..5780031 100644 --- a/lib/keys.js +++ b/lib/keys.js @@ -11,7 +11,7 @@ const TL_DATA = 4 const TL_END = TL_DATA + 1 const CORE_AUTH = 0 -const CORE_BATCHES = 1 +const CORE_SESSIONS = 1 const DATA_HEAD = 0 const DATA_DEPENDENCY = 1 @@ -121,12 +121,12 @@ core.auth = function (ptr) { return state.buffer.subarray(start, state.start) } -core.batches = function (ptr) { +core.sessions = function (ptr) { const state = alloc() const start = state.start UINT.encode(state, TL_CORE) UINT.encode(state, ptr) - UINT.encode(state, CORE_BATCHES) + UINT.encode(state, CORE_SESSIONS) return state.buffer.subarray(start, state.start) } diff --git a/lib/tx.js b/lib/tx.js index b346c74..6667d34 100644 --- a/lib/tx.js +++ b/lib/tx.js @@ -8,7 +8,7 @@ const CORESTORE_HEAD = schema.getEncoding('@corestore/head') const CORESTORE_CORE = schema.getEncoding('@corestore/core') const CORE_AUTH = schema.getEncoding('@core/auth') -const CORE_BATCHES = schema.getEncoding('@core/batches') +const CORE_SESSIONS = schema.getEncoding('@core/sessions') const CORE_HEAD = schema.getEncoding('@core/head') const CORE_TREE_NODE = schema.getEncoding('@core/tree-node') const CORE_DEPENDENCY = schema.getEncoding('@core/dependency') @@ -26,8 +26,8 @@ class CoreTX { this.changes.push([core.auth(this.core.corePointer), encode(CORE_AUTH, auth), null]) } - setBatches (batches) { - this.changes.push([core.batches(this.core.corePointer), encode(CORE_BATCHES, batches), null]) + setSessions (sessions) { + this.changes.push([core.sessions(this.core.corePointer), encode(CORE_SESSIONS, sessions), null]) } setHead (head) { @@ -126,8 +126,8 @@ class CoreRX { return await decode(CORE_AUTH, await this.view.get(this.read, core.auth(this.core.corePointer))) } - async getBatches () { - return await decode(CORE_BATCHES, await this.view.get(this.read, core.batches(this.core.corePointer))) + async getSessions () { + return await decode(CORE_SESSIONS, await this.view.get(this.read, core.sessions(this.core.corePointer))) } async getHead () { diff --git a/spec/hyperschema/index.js b/spec/hyperschema/index.js index ab60d95..dbd36a3 100644 --- a/spec/hyperschema/index.js +++ b/spec/hyperschema/index.js @@ -400,7 +400,7 @@ const encoding13 = { } } -// @core/batches +// @core/sessions const encoding14 = c.array({ preencode (state, m) { c.string.preencode(state, m.name) @@ -480,7 +480,7 @@ function getEncoding (name) { case '@core/auth': return encoding11 case '@core/head': return encoding12 case '@core/hints': return encoding13 - case '@core/batches': return encoding14 + case '@core/sessions': return encoding14 case '@core/dependency': return encoding15 default: throw new Error('Encoder not found ' + name) } diff --git a/spec/hyperschema/schema.json b/spec/hyperschema/schema.json index b4cae6d..4ff6ac9 100644 --- a/spec/hyperschema/schema.json +++ b/spec/hyperschema/schema.json @@ -333,7 +333,7 @@ ] }, { - "name": "batches", + "name": "sessions", "namespace": "core", "compact": true, "flagsPosition": -1, From 9b457488ed5bd11b5d0f1eee971e1d999f92d464 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 10:56:01 +0100 Subject: [PATCH 18/41] remove unused arg --- index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.js b/index.js index de1d1a0..b7c0dce 100644 --- a/index.js +++ b/index.js @@ -139,7 +139,7 @@ class HypercoreStorage { return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } - async createSession (name, head, atom) { + async createSession (name, head) { const rx = this.read() const existingSessionsPromise = rx.getSessions() From e60dfae2e569361c64a53231eac10de594ee50eb Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 11:08:17 +0100 Subject: [PATCH 19/41] streams take gt ranges now --- index.js | 12 ++++++------ lib/streams.js | 18 ++++++++++-------- test/streams.js | 24 ++++++++++++------------ 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/index.js b/index.js index b7c0dce..191db7e 100644 --- a/index.js +++ b/index.js @@ -98,16 +98,16 @@ class HypercoreStorage { return this.store.atom() } - createBlockStream (start, end, reverse) { - return createBlockStream(this.core, this.db, this.view, start, end, !!reverse) + createBlockStream (opts) { + return createBlockStream(this.core, this.db, this.view, opts) } - createBitfieldStream (start, end) { - return createBitfieldStream(this.core, this.db, this.view, start, end) + createBitfieldStream (opts) { + return createBitfieldStream(this.core, this.db, this.view, opts) } - createUserDataStream (start, end = null) { - return createUserDataStream(this.core, this.db, this.view, start, end) + createUserDataStream (opts) { + return createUserDataStream(this.core, this.db, this.view, opts) } async resumeSession (name) { diff --git a/lib/streams.js b/lib/streams.js index f2383ff..b72f10f 100644 --- a/lib/streams.js +++ b/lib/streams.js @@ -42,25 +42,27 @@ function createBlockIterator (ptr, db, view, start, end, reverse) { return view.iterator(db, s, e, reverse) } -function createBlockStream (ptr, db, view, start, end, reverse) { - const ite = createBlockIterator(ptr, db, view, start, end, reverse) +function createBlockStream (ptr, db, view, { gt = -1, gte = gt + 1, lte = -1, lt = lte === -1 ? -1 : lte + 1, reverse = false } = {}) { + const ite = createBlockIterator(ptr, db, view, gte, lt, reverse) ite._readableState.map = mapBlock return ite } -function createBitfieldStream (ptr, db, view, start, end) { - const s = core.bitfield(ptr.dataPointer, start, 0) - const e = core.bitfield(ptr.dataPointer, end === -1 ? Infinity : end, 0) +function createBitfieldStream (ptr, db, view, { gt = -1, gte = gt + 1, lte = -1, lt = lte === -1 ? -1 : lte + 1, reverse = false } = {}) { + const s = core.bitfield(ptr.dataPointer, gte, 0) + const e = core.bitfield(ptr.dataPointer, lt === -1 ? Infinity : lt, 0) const ite = view.iterator(db, s, e, false) ite._readableState.map = mapBitfield return ite } -function createUserDataStream (ptr, db, view, start, end) { - const s = core.userData(ptr.dataPointer, start || '') - const e = end === null ? core.userDataEnd(ptr.dataPointer) : core.userData(ptr.dataPointer, end) +function createUserDataStream (ptr, db, view, { gt = null, gte = '', lte = null, lt = null, reverse = false }) { + if (gt !== null || lte !== null) throw new Error('gt and lte not yet supported for user data streams') + + const s = core.userData(ptr.dataPointer, gte) + const e = lt === null ? core.userDataEnd(ptr.dataPointer) : core.userData(ptr.dataPointer, lt) const ite = view.iterator(db, s, e, false) ite._readableState.map = mapUserData diff --git a/test/streams.js b/test/streams.js index d6f5468..1bbf520 100644 --- a/test/streams.js +++ b/test/streams.js @@ -15,7 +15,7 @@ test('block stream', async function (t) { await tx.flush() - const blocks = await toArray(core.createBlockStream(0, 10, false)) + const blocks = await toArray(core.createBlockStream({ gte: 0, lt: 10 })) t.alike(blocks, expected) }) @@ -33,7 +33,7 @@ test('reverse block stream', async function (t) { await tx.flush() - const blocks = await toArray(core.createBlockStream(0, 10, true)) + const blocks = await toArray(core.createBlockStream({ gte: 0, lt: 10, reverse: true })) t.alike(blocks, expected.reverse()) }) @@ -73,12 +73,12 @@ test('block stream (atom)', async function (t) { } { - const blocks = await toArray(a.createBlockStream(0, 10, false)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10 })) t.alike(blocks, expected.sort(cmpBlock)) } { - const blocks = await toArray(a.createBlockStream(0, 10, true)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10, reverse: true })) t.alike(blocks, expected.sort(cmpBlock).reverse()) } @@ -91,12 +91,12 @@ test('block stream (atom)', async function (t) { expected.sort(cmpBlock).splice(4, 2) { - const blocks = await toArray(a.createBlockStream(0, 10, false)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10 })) t.alike(blocks, expected.sort(cmpBlock)) } { - const blocks = await toArray(a.createBlockStream(0, 10, true)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10, reverse: true })) t.alike(blocks, expected.sort(cmpBlock).reverse()) } @@ -116,12 +116,12 @@ test('block stream (atom)', async function (t) { expected.push(tmp) { - const blocks = await toArray(a.createBlockStream(0, 10, false)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10 })) t.alike(blocks, expected.sort(cmpBlock)) } { - const blocks = await toArray(a.createBlockStream(0, 10, true)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10, reverse: true })) t.alike(blocks, expected.sort(cmpBlock).reverse()) } @@ -135,12 +135,12 @@ test('block stream (atom)', async function (t) { expected.pop() { - const blocks = await toArray(a.createBlockStream(0, 10, false)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10 })) t.alike(blocks, expected.sort(cmpBlock)) } { - const blocks = await toArray(a.createBlockStream(0, 10, true)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10, reverse: true })) t.alike(blocks, expected.sort(cmpBlock).reverse()) } @@ -149,12 +149,12 @@ test('block stream (atom)', async function (t) { await atom.flush() { - const blocks = await toArray(a.createBlockStream(0, 10, false)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10 })) t.alike(blocks, expected.sort(cmpBlock)) } { - const blocks = await toArray(a.createBlockStream(0, 10, true)) + const blocks = await toArray(a.createBlockStream({ gte: 0, lt: 10, reverse: true })) t.alike(blocks, expected.sort(cmpBlock).reverse()) } }) From 1e1978ceaa2f91a531d42a170ace076cedff798e Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 11:21:13 +0100 Subject: [PATCH 20/41] version cores also for migration --- build.js | 5 +++++ index.js | 15 ++++++++++----- spec/hyperschema/index.js | 17 +++++++++-------- spec/hyperschema/schema.json | 10 ++++++++-- 4 files changed, 32 insertions(+), 15 deletions(-) diff --git a/build.js b/build.js index 3db669b..6ce7607 100644 --- a/build.js +++ b/build.js @@ -39,6 +39,7 @@ corestore.register({ corestore.register({ name: 'alias', + compact: true, fields: [{ name: 'name', type: 'string', @@ -53,6 +54,10 @@ corestore.register({ corestore.register({ name: 'core', fields: [{ + name: 'version', + type: 'uint', + required: true + }, { name: 'corePointer', type: 'uint', required: true diff --git a/index.js b/index.js index 191db7e..ee5f662 100644 --- a/index.js +++ b/index.js @@ -3,6 +3,8 @@ const rrp = require('resolve-reject-promise') const ScopeLock = require('scope-lock') const View = require('./lib/view.js') +const VERSION = 1 + const { CorestoreRX, CorestoreTX, @@ -123,6 +125,7 @@ class HypercoreStorage { if (session === null) return null const core = { + version: this.core.version, corePointer: this.core.corePointer, dataPointer: session.dataPointer, dependencies: [] @@ -165,6 +168,7 @@ class HypercoreStorage { const length = head === null ? 0 : head.length const core = { + version: this.core.version, corePointer: this.core.corePointer, dataPointer: session.dataPointer, dependencies: this._addDependency({ dataPointer: this.core.dataPointer, length }) @@ -183,6 +187,7 @@ class HypercoreStorage { async createAtomicSession (atom, head) { const length = head === null ? 0 : head.length const core = { + version: this.core.version, corePointer: this.core.corePointer, dataPointer: this.core.dataPointer, dependencies: this._addDependency({ dataPointer: this.core.dataPointer, length }) @@ -448,11 +453,11 @@ class CorestoreStorage { return this._resumeFromPointers(EMPTY, core) } - async _resumeFromPointers (view, { corePointer, dataPointer }) { - const core = { corePointer, dataPointer, dependencies: [] } + async _resumeFromPointers (view, { version, corePointer, dataPointer }) { + const core = { version, corePointer, dataPointer, dependencies: [] } while (true) { - const rx = new CoreRX({ dataPointer, corePointer: 0, dependencies: [] }, this.db, view) + const rx = new CoreRX({ version, dataPointer, corePointer: 0, dependencies: [] }, this.db, view) const dependencyPromise = rx.getDependency() rx.tryFlush() const dependency = await dependencyPromise @@ -482,7 +487,7 @@ class CorestoreStorage { const corePointer = head.allocated.cores++ const dataPointer = head.allocated.datas++ - core = { corePointer, dataPointer, alias } + core = { version: VERSION, corePointer, dataPointer, alias } tx.setHead(head) tx.putCore(discoveryKey, core) @@ -525,7 +530,7 @@ module.exports = CorestoreStorage function initStoreHead (seed, defaultDiscoveryKey) { return { - version: 0, + version: VERSION, allocated: { datas: 0, cores: 0 diff --git a/spec/hyperschema/index.js b/spec/hyperschema/index.js index dbd36a3..b344a52 100644 --- a/spec/hyperschema/index.js +++ b/spec/hyperschema/index.js @@ -87,36 +87,37 @@ const encoding2 = { } } -// @corestore/core.alias -const encoding3_2 = c.frame(encoding2) - // @corestore/core const encoding3 = { preencode (state, m) { + c.uint.preencode(state, m.version) c.uint.preencode(state, m.corePointer) c.uint.preencode(state, m.dataPointer) state.end++ // max flag is 1 so always one byte - if (m.alias) encoding3_2.preencode(state, m.alias) + if (m.alias) encoding2.preencode(state, m.alias) }, encode (state, m) { const flags = m.alias ? 1 : 0 + c.uint.encode(state, m.version) c.uint.encode(state, m.corePointer) c.uint.encode(state, m.dataPointer) c.uint.encode(state, flags) - if (m.alias) encoding3_2.encode(state, m.alias) + if (m.alias) encoding2.encode(state, m.alias) }, decode (state) { const r0 = c.uint.decode(state) const r1 = c.uint.decode(state) + const r2 = c.uint.decode(state) const flags = c.uint.decode(state) return { - corePointer: r0, - dataPointer: r1, - alias: (flags & 1) !== 0 ? encoding3_2.decode(state) : null + version: r0, + corePointer: r1, + dataPointer: r2, + alias: (flags & 1) !== 0 ? encoding2.decode(state) : null } } } diff --git a/spec/hyperschema/schema.json b/spec/hyperschema/schema.json index 4ff6ac9..2663111 100644 --- a/spec/hyperschema/schema.json +++ b/spec/hyperschema/schema.json @@ -53,7 +53,7 @@ { "name": "alias", "namespace": "corestore", - "compact": false, + "compact": true, "flagsPosition": -1, "fields": [ { @@ -74,8 +74,14 @@ "name": "core", "namespace": "corestore", "compact": false, - "flagsPosition": 2, + "flagsPosition": 3, "fields": [ + { + "name": "version", + "required": true, + "type": "uint", + "version": 1 + }, { "name": "corePointer", "required": true, From b04c6fb2e04ad2e4cc169a8c3a9fa8849087c54f Mon Sep 17 00:00:00 2001 From: HDegroote <75906619+HDegroote@users.noreply.github.com> Date: Fri, 10 Jan 2025 13:44:40 +0100 Subject: [PATCH 21/41] Add hypercore put/get/del and put/get tree-node tests + ci (#49) * Add hypercore put/get/del and put/get tree-node tests * Add CI * Temp run CI on rewrite-integration branch too * Fix promise usage * Run tests on bare too * Use generated test/all --- .github/workflows/ci.yml | 28 ++++++++++++ .gitignore | 1 + package.json | 4 +- test/all.js | 15 +++++++ test/core.js | 97 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 144 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/ci.yml create mode 100644 test/all.js create mode 100644 test/core.js diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..1b6d828 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,28 @@ +name: ci +on: + push: + branches: + - main + - rewrite-integration # temp + pull_request: + branches: + - main + - rewrite-integration # temp + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + timeout-minutes: 5 + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 https://github.com/actions/checkout/releases/tag/v4.1.1 + - name: install node + uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # v3.8.2 https://github.com/actions/setup-node/releases/tag/v3.8.2 + with: + node-version: 20 + - run: npm install + - run: npm test + - run: npm install -g bare-runtime + - run: npm run test:bare diff --git a/.gitignore b/.gitignore index d5f19d8..72f406a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ node_modules package-lock.json +coverage/ diff --git a/package.json b/package.json index 572fc70..6143d26 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,9 @@ "spec/hyperschema/*.js" ], "scripts": { - "test": "standard && brittle test/*.js" + "test": "standard && node test/all.js", + "test:bare": "bare test/all.js", + "test:generate": "brittle -r test/all.js test/*.js" }, "author": "Holepunch Inc.", "license": "Apache-2.0", diff --git a/test/all.js b/test/all.js new file mode 100644 index 0000000..2b4b8f0 --- /dev/null +++ b/test/all.js @@ -0,0 +1,15 @@ +// This runner is auto-generated by Brittle + +runTests() + +async function runTests () { + const test = (await import('brittle')).default + + test.pause() + + await import('./basic.js') + await import('./core.js') + await import('./streams.js') + + test.resume() +} diff --git a/test/core.js b/test/core.js new file mode 100644 index 0000000..8585300 --- /dev/null +++ b/test/core.js @@ -0,0 +1,97 @@ +const test = require('brittle') +const b4a = require('b4a') +const { createCore } = require('./helpers') + +test('read and write hypercore blocks', async (t) => { + const core = await createCore(t) + await writeBlocks(core, 2) + + const rx = core.read() + const proms = [rx.getBlock(0), rx.getBlock(1), rx.getBlock(2)] + rx.tryFlush() + const res = await Promise.all(proms) + t.is(b4a.toString(res[0]), 'block0') + t.is(b4a.toString(res[1]), 'block1') + t.is(res[2], null) +}) + +test('read and write hypercore blocks from snapshot', async (t) => { + const core = await createCore(t) + await writeBlocks(core, 2) + + const snap = core.snapshot() + await writeBlocks(core, 2, { start: 2 }) + + { + const rx = snap.read() + const proms = [rx.getBlock(0), rx.getBlock(1), rx.getBlock(2)] + rx.tryFlush() + const res = await Promise.all(proms) + t.is(b4a.toString(res[0]), 'block0') + t.is(b4a.toString(res[1]), 'block1') + t.is(res[2], null) + } + + { + const rx = core.read() + const p = rx.getBlock(2) + rx.tryFlush() + t.is(b4a.toString(await p), 'block2', 'sanity check: does exist in non-snapshot core') + } +}) + +test('delete hypercore block', async (t) => { + const core = await createCore(t) + await writeBlocks(core, 2) + + const tx = core.write() + + tx.deleteBlock(0) + tx.deleteBlock(2) // doesn't exist + await tx.flush() + + const rx = core.read() + const p = Promise.all([rx.getBlock(0), rx.getBlock(1), rx.getBlock(2)]) + rx.tryFlush() + const [res0, res1, res2] = await p + t.is(res0, null) + t.is(b4a.toString(res1), 'block1') + t.is(res2, null) +}) + +test('put and get tree node', async (t) => { + const core = await createCore(t) + + const node1 = { + index: 0, + size: 1, + hash: b4a.from('a'.repeat(64), 'hex') + } + const node2 = { + index: 1, + size: 10, + hash: b4a.from('b'.repeat(64), 'hex') + } + + const tx = core.write() + tx.putTreeNode(node1) + tx.putTreeNode(node2) + await tx.flush() + + const rx = core.read() + const p = Promise.all([rx.getTreeNode(0), rx.getTreeNode(1), rx.getTreeNode(2)]) + rx.tryFlush() + const [res0, res1, res2] = await p + + t.alike(res0, node1) + t.alike(res1, node2) + t.is(res2, null) +}) + +async function writeBlocks (core, amount, { start = 0 } = {}) { + const tx = core.write() + for (let i = start; i < amount + start; i++) { + tx.putBlock(i, `block${i}`) + } + await tx.flush() +} From 0767ddee6ad45bb5717ad00a91c5f783a1ee8678 Mon Sep 17 00:00:00 2001 From: HDegroote <75906619+HDegroote@users.noreply.github.com> Date: Fri, 10 Jan 2025 14:47:14 +0100 Subject: [PATCH 22/41] Add block and tree-node (range) delete tests + a multi-hypercore test (#50) * Add block and tree-node (range) delete tests + a multi-hypercore test * Add teardown to mulit-core test --- test/core.js | 171 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 168 insertions(+), 3 deletions(-) diff --git a/test/core.js b/test/core.js index 8585300..15fd458 100644 --- a/test/core.js +++ b/test/core.js @@ -1,6 +1,6 @@ const test = require('brittle') const b4a = require('b4a') -const { createCore } = require('./helpers') +const { createCore, create } = require('./helpers') test('read and write hypercore blocks', async (t) => { const core = await createCore(t) @@ -15,6 +15,60 @@ test('read and write hypercore blocks', async (t) => { t.is(res[2], null) }) +test('read and write hypercore blocks across multiple cores', async (t) => { + const storage = await create(t) + const keys0 = { + key: b4a.from('0'.repeat(64), 'hex'), + discoveryKey: b4a.from('a'.repeat(64), 'hex') + } + const keys1 = { + key: b4a.from('1'.repeat(64), 'hex'), + discoveryKey: b4a.from('b'.repeat(64), 'hex') + } + const keys2 = { + key: b4a.from('2'.repeat(64), 'hex'), + discoveryKey: b4a.from('c'.repeat(64), 'hex') + } + + const [core0, core1, core2] = await Promise.all([ + storage.create(keys0), + storage.create(keys1), + storage.create(keys2) + ]) + + await Promise.all([ + writeBlocks(core0, 2, { pre: 'core0-' }), + writeBlocks(core1, 2, { pre: 'core1-' }), + writeBlocks(core2, 2, { pre: 'core2-' }) + ]) + + const rx0 = core0.read() + const rx1 = core1.read() + const rx2 = core2.read() + const p = Promise.all([ + rx0.getBlock(0), + rx0.getBlock(1), + rx1.getBlock(0), + rx1.getBlock(1), + rx2.getBlock(0), + rx2.getBlock(1) + ]) + rx0.tryFlush() + rx1.tryFlush() + rx2.tryFlush() + + const [c0Block0, c0Block1, c1Block0, c1Block1, c2Block0, c2Block1] = await p + t.is(b4a.toString(c0Block0), 'core0-block0') + t.is(b4a.toString(c0Block1), 'core0-block1') + t.is(b4a.toString(c1Block0), 'core1-block0') + t.is(b4a.toString(c1Block1), 'core1-block1') + t.is(b4a.toString(c2Block0), 'core2-block0') + t.is(b4a.toString(c2Block1), 'core2-block1') + + await Promise.all([core0.close(), core1.close(), core2.close()]) + await storage.close() +}) + test('read and write hypercore blocks from snapshot', async (t) => { const core = await createCore(t) await writeBlocks(core, 2) @@ -59,6 +113,30 @@ test('delete hypercore block', async (t) => { t.is(res2, null) }) +test('delete hypercore block range', async (t) => { + const core = await createCore(t) + await writeBlocks(core, 4) + + const tx = core.write() + + tx.deleteBlockRange(1, 3) + await tx.flush() + + const rx = core.read() + const p = Promise.all([ + rx.getBlock(0), + rx.getBlock(1), + rx.getBlock(2), + rx.getBlock(3) + ]) + rx.tryFlush() + const [res0, res1, res2, res3] = await p + t.is(b4a.toString(res0), 'block0') + t.is(res1, null) + t.is(res2, null) + t.is(b4a.toString(res3), 'block3') +}) + test('put and get tree node', async (t) => { const core = await createCore(t) @@ -88,10 +166,97 @@ test('put and get tree node', async (t) => { t.is(res2, null) }) -async function writeBlocks (core, amount, { start = 0 } = {}) { +test('delete tree node', async (t) => { + const core = await createCore(t) + + const node0 = { + index: 0, + size: 1, + hash: b4a.from('a'.repeat(64), 'hex') + } + const node1 = { + index: 1, + size: 10, + hash: b4a.from('b'.repeat(64), 'hex') + } + + { + const tx = core.write() + tx.putTreeNode(node0) + tx.putTreeNode(node1) + await tx.flush() + } + + { + const tx = core.write() + tx.deleteTreeNode(0) + tx.deleteTreeNode(10) // Doesn't exist + await tx.flush() + } + + const rx = core.read() + const p = Promise.all([rx.getTreeNode(0), rx.getTreeNode(1), rx.getTreeNode(2)]) + rx.tryFlush() + const [res0, res1] = await p + + t.is(res0, null) + t.alike(res1, node1) +}) + +test('delete tree node range', async (t) => { + const core = await createCore(t) + + const node0 = { + index: 0, + size: 1, + hash: b4a.from('a'.repeat(64), 'hex') + } + const node1 = { + index: 1, + size: 10, + hash: b4a.from('b'.repeat(64), 'hex') + } + const node2 = { + index: 2, + size: 20, + hash: b4a.from('c'.repeat(64), 'hex') + } + const node3 = { + index: 3, + size: 30, + hash: b4a.from('d'.repeat(64), 'hex') + } + + { + const tx = core.write() + tx.putTreeNode(node0) + tx.putTreeNode(node1) + tx.putTreeNode(node2) + tx.putTreeNode(node3) + await tx.flush() + } + + { + const tx = core.write() + tx.deleteTreeNodeRange(1, 3) + await tx.flush() + } + + const rx = core.read() + const p = Promise.all([rx.getTreeNode(0), rx.getTreeNode(1), rx.getTreeNode(2), rx.getTreeNode(3)]) + rx.tryFlush() + const [res0, res1, res2, res3] = await p + + t.alike(res0, node0) + t.is(res1, null) + t.is(res2, null) + t.alike(res3, node3) +}) + +async function writeBlocks (core, amount, { start = 0, pre = '' } = {}) { const tx = core.write() for (let i = start; i < amount + start; i++) { - tx.putBlock(i, `block${i}`) + tx.putBlock(i, `${pre}block${i}`) } await tx.flush() } From db6b56143aca3d46b32607c40f9fcadd2a2cb68a Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 15:06:57 +0100 Subject: [PATCH 23/41] support corestore metadata migration --- index.js | 77 ++- migrations/0/index.js | 403 ++++++++++++++ migrations/0/messages.js | 1069 ++++++++++++++++++++++++++++++++++++++ package.json | 4 +- 4 files changed, 1544 insertions(+), 9 deletions(-) create mode 100644 migrations/0/index.js create mode 100644 migrations/0/messages.js diff --git a/index.js b/index.js index ee5f662..e5ccd0a 100644 --- a/index.js +++ b/index.js @@ -241,11 +241,14 @@ class HypercoreStorage { class CorestoreStorage { constructor (db) { + this.path = typeof db === 'string' ? db : db.path this.db = typeof db === 'string' ? new RocksDB(db) : db this.view = null this.enters = 0 this.lock = new ScopeLock() this.flushing = null + this.version = 0 + this.migrating = null } get opened () { @@ -256,6 +259,10 @@ class CorestoreStorage { return this.db.closed } + async ready () { + if (this.version === 0) await this._migrate() + } + static isCoreStorage (db) { return isCorestoreStorage(db) } @@ -272,6 +279,39 @@ class CorestoreStorage { } } + async _migrate () { + const view = await this._enter() + + try { + if (this.version === VERSION) return + + const rx = new CorestoreRX(this.db, view) + const headPromise = rx.getHead() + + rx.tryFlush() + const head = await headPromise + + const version = head === null ? 0 : head.version + if (version === VERSION) return + + const target = { version: VERSION, dryRun: false } + + switch (version) { + case 0: { + await require('./migrations/0').store(this, target) + break + } + default: { + throw new Error('Unsupported version: ' + version + ' - you should probably upgrade your dependencies') + } + } + + this.version = VERSION + } finally { + await this._exit() + } + } + async _enter () { this.enters++ await this.lock.lock() @@ -310,13 +350,7 @@ class CorestoreStorage { const tx = new CorestoreTX(view) try { - const rx = new CorestoreRX(this.db, view) - - const headPromise = rx.getHead() - rx.tryFlush() - - let head = await headPromise - if (head === null) head = initStoreHead(null, null) + const head = this._getHead(view) dataPointer = head.allocated.datas++ @@ -329,6 +363,17 @@ class CorestoreStorage { return dataPointer } + // exposes here so migrations can easily access the head in an init state + async _getHead (view) { + const rx = new CorestoreRX(this.db, view) + const headPromise = rx.getHead() + rx.tryFlush() + + let head = await headPromise + if (head === null) head = initStoreHead(null, null) + return head + } + atom () { return new Atom(this.db) } @@ -340,6 +385,8 @@ class CorestoreStorage { } async clear () { + if (this.version === 0) await this._migrate() + const view = await this._enter() const tx = new CorestoreTX(view) @@ -365,6 +412,8 @@ class CorestoreStorage { } async getSeed () { + if (this.version === 0) await this._migrate() + const rx = new CorestoreRX(this.db, EMPTY) const headPromise = rx.getHead() @@ -375,6 +424,8 @@ class CorestoreStorage { } async setSeed (seed, { overwrite = true } = {}) { + if (this.version === 0) await this._migrate() + const view = await this._enter() const tx = new CorestoreTX(view) @@ -397,6 +448,8 @@ class CorestoreStorage { } async getDefaultDiscoveryKey () { + if (this.version === 0) await this._migrate() + const rx = new CorestoreRX(this.db, EMPTY) const headPromise = rx.getHead() @@ -407,6 +460,8 @@ class CorestoreStorage { } async setDefaultDiscoveryKey (discoveryKey, { overwrite = true } = {}) { + if (this.version === 0) await this._migrate() + const view = await this._enter() const tx = new CorestoreTX(view) @@ -429,6 +484,8 @@ class CorestoreStorage { } async has (discoveryKey) { + if (this.version === 0) await this._migrate() + const rx = new CorestoreRX(this.db, EMPTY) const promise = rx.getCore(discoveryKey) @@ -438,6 +495,8 @@ class CorestoreStorage { } async resume (discoveryKey) { + if (this.version === 0) await this._migrate() + if (!discoveryKey) { discoveryKey = await this.getDefaultDiscoveryKey() if (!discoveryKey) return null @@ -516,6 +575,8 @@ class CorestoreStorage { } async create (data) { + if (this.version === 0) await this._migrate() + const view = await this._enter() try { @@ -530,7 +591,7 @@ module.exports = CorestoreStorage function initStoreHead (seed, defaultDiscoveryKey) { return { - version: VERSION, + version: 0, // cause we wanna run the migration allocated: { datas: 0, cores: 0 diff --git a/migrations/0/index.js b/migrations/0/index.js new file mode 100644 index 0000000..025b97e --- /dev/null +++ b/migrations/0/index.js @@ -0,0 +1,403 @@ +const fs = require('fs') +const path = require('path') +const { Readable } = require('streamx') +const b4a = require('b4a') +const flat = require('flat-tree') +const crypto = require('hypercore-crypto') +const c = require('compact-encoding') +const m = require('./messages.js') +const View = require('../../lib/view.js') +const { CorestoreTX, CoreTX } = require('../../lib/tx.js') + +class CoreListStream extends Readable { + constructor (storage) { + super() + + this.storage = storage + this.stack = [] + } + + async _open (cb) { + for (const a of await readdir(path.join(this.storage, 'cores'))) { + for (const b of await readdir(path.join(this.storage, 'cores', a))) { + for (const dkey of await readdir(path.join(this.storage, 'cores', a, b))) { + this.stack.push(path.join(this.storage, 'cores', a, b, dkey)) + } + } + } + + cb(null) + } + + _read (cb) { + const next = this.stack.pop() + if (!next) { + this.push(null) + cb(null) + return + } + + const oplog = path.join(next, 'oplog') + fs.readFile(oplog, (err, buffer) => { + if (err) return this._read(cb) // next + + const state = { start: 0, end: buffer.byteLength, buffer } + const headers = [1, 0] + + const h1 = decodeOplogHeader(state) + state.start = 4096 + + const h2 = decodeOplogHeader(state) + state.start = 4096 * 2 + + if (!h1 && !h2) return this._read(cb) + + if (h1 && !h2) { + headers[0] = h1.header + headers[1] = h1.header + } else if (!h1 && h2) { + headers[0] = (h2.header + 1) & 1 + headers[1] = h2.header + } else { + headers[0] = h1.header + headers[1] = h2.header + } + + const header = (headers[0] + headers[1]) & 1 + const result = { path: next, header: null, entries: [] } + const decoded = [] + + result.header = header ? h2.message : h1.message + + if (result.header.external) { + throw new Error('External headers not migrate-able atm') + } + + while (true) { + const entry = decodeOplogEntry(state) + if (!entry) break + if (entry.header !== header) break + + decoded.push(entry) + } + + while (decoded.length > 0 && decoded[decoded.length - 1].partial) decoded.pop() + + for (const e of decoded) { + result.entries.push(e.message) + } + + this.push(result) + + cb(null) + }) + } +} + +function decodeOplogHeader (state) { + c.uint32.decode(state) // cksum, ignore for now + + const l = c.uint32.decode(state) + const length = l >> 2 + const headerBit = l & 1 + const partialBit = l & 2 + + if (state.end - state.start < length) return null + + const end = state.start + length + const result = { header: headerBit, partial: partialBit !== 0, byteLength: length + 8, message: null } + + try { + result.message = m.oplog.header.decode({ start: state.start, end, buffer: state.buffer }) + } catch { + return null + } + + state.start = end + return result +} + +function decodeOplogEntry (state) { + if (state.end - state.start < 8) return null + + c.uint32.decode(state) // cksum, ignore for now + + const l = c.uint32.decode(state) + const length = l >>> 2 + const headerBit = l & 1 + const partialBit = l & 2 + + if (state.end - state.start < length) return null + + const end = state.start + length + + const result = { header: headerBit, partial: partialBit !== 0, byteLength: length + 8, message: null } + + try { + result.message = m.oplog.entry.decode({ start: state.start, end, buffer: state.buffer }) + } catch { + return null + } + + state.start = end + + return result +} + +module.exports = { store, core } + +async function store (storage, { version, dryRun = true }) { + const stream = new CoreListStream(storage.path) + const view = new View() + + const tx = new CorestoreTX(view) + const head = await storage._getHead(view) + + const primaryKey = await readFile(path.join(storage.path, 'primary-key')) + + if (!head.seed) head.seed = primaryKey + + for await (const data of stream) { + const key = data.header.key + const discoveryKey = crypto.discoveryKey(data.header.key) + const files = getFiles(data.path) + + if (head.defaultDiscoveryKey === null) head.defaultDiscoveryKey = discoveryKey + + const core = { + version: 0, // need later migration + corePointer: head.allocated.cores++, + dataPointer: head.allocated.datas++, + alias: null + } + + const ptr = { version: 0, corePointer: core.corePointer, dataPointer: core.dataPointer, dependencies: [] } + const ctx = new CoreTX(ptr, storage.db, view, []) + const userData = new Map() + const treeNodes = new Map() + + const auth = { + key, + discoveryKey, + manifest: data.header.manifest, + keyPair: data.header.keyPair, + encryptionKey: null + } + + const blocks = [] + const tree = { + length: 0, + fork: 0, + rootHash: null, + signature: null + } + + let contiguousLength = 0 + + if (data.header.tree && data.header.tree.length) { + tree.length = data.header.tree.length + tree.fork = data.header.tree.fork + tree.rootHash = data.header.tree.rootHash + tree.signature = data.header.tree.signature + } + + if (data.header.hints) { + contiguousLength = data.header.hints.contiguousLength + } + + for (const { key, value } of data.header.userData) { + userData.set(key, value) + } + + for (const e of data.entries) { + if (e.userData) userData.set(e.userData.key, e.userData.value) + + if (e.treeNodes) { + for (const node of e.treeNodes) { + treeNodes.set(node.index, node) + ctx.putTreeNode(node) + } + } + + if (e.treeUpgrade) { + if (e.treeUpgrade.ancestors !== tree.length) { + throw new Error('Unflushed truncations not migrate-able atm') + } + + tree.length = e.treeUpgrade.length + tree.fork = e.treeUpgrade.fork + tree.rootHash = null + tree.signature = e.treeUpgrade.signature + } + + if (e.bitfield) { + if (e.bitfield.drop) { + throw new Error('Unflushed truncations not migrate-able atm') + } + + for (let i = e.bitfield.start; i < e.bitfield.start + e.bitfield.length; i++) { + blocks.push(i) + } + } + } + + if (userData.has('corestore/name') && userData.has('corestore/namespace')) { + core.alias = { + name: b4a.toString(userData.get('corestore/name')), + namespace: userData.get('corestore/namespace') + } + userData.delete('corestore/name') + userData.delete('corestore/namespace') + } + + for (const [key, value] of userData) { + ctx.putUserData(key, value) + } + + ctx.setAuth(auth) + + const getTreeNode = (index) => (treeNodes.get(index) || getTreeNodeFromFile(files.tree, index)) + const roots = tree.rootHash === null || blocks.length > 0 ? await getRoots(tree.length, getTreeNode) : null + + if (tree.length) { + if (tree.rootHash === null) tree.rootHash = crypto.tree(roots) + ctx.setHead(tree) + } + + blocks.sort((a, b) => a - b) + + for (const index of blocks) { + if (index === contiguousLength) contiguousLength++ + const blk = await getBlockFromFile(files.data, index, tree.length, roots, getTreeNode) + ctx.putBlock(index, blk) + } + + if (contiguousLength > 0) { + ctx.setHints({ contiguousLength }) + } + + tx.putCore(discoveryKey, core) + if (core.alias) tx.putCoreByAlias(core.alias, discoveryKey) + + await ctx.flush() + } + + head.version = version + tx.setHead(head) + tx.apply() + + await View.flush(view.changes, storage.db) +} + +function getFiles (dir) { + return { + oplog: path.join(dir, 'oplog'), + data: path.join(dir, 'data'), + tree: path.join(dir, 'tree'), + bitfield: path.join(dir, 'bitfield') + } +} + +async function core () { + +} + +async function getRoots (length, getTreeNode) { + const all = [] + for (const index of flat.fullRoots(2 * length)) { + all.push(await getTreeNode(index)) + } + return all +} + +async function getBlockFromFile (file, index, length, roots, getTreeNode) { + const size = (await getTreeNode(2 * index)).size + const offset = await getByteOffset(2 * index, length, roots, getTreeNode) + + return new Promise(function (resolve) { + readAll(file, size, offset, function (err, buf) { + if (err) return resolve(null) + resolve(buf) + }) + }) +} + +async function getByteOffset (index, length, roots, getTreeNode) { + if (index === 0) return 0 + if (index === length) return roots.map(r => r.size).reduce((a, b) => a + b) + if ((index & 1) === 1) index = flat.leftSpan(index) + + let head = 0 + let offset = 0 + + for (const node of roots) { // all async ticks happen once we find the root so safe + head += 2 * ((node.index - head) + 1) + + if (index >= head) { + offset += node.size + continue + } + + const ite = flat.iterator(node.index) + + while (ite.index !== index) { + if (index < ite.index) { + ite.leftChild() + } else { + offset += (await getTreeNode(ite.leftChild())).size + ite.sibling() + } + } + + return offset + } + + throw new Error('Failed to find offset') +} + +async function getTreeNodeFromFile (file, index) { + return new Promise(function (resolve) { + readAll(file, 40, index * 40, function (err, buf) { + if (err) return resolve(null) + resolve({ index, size: c.decode(c.uint64, buf), hash: buf.subarray(8) }) + }) + }) +} + +function readAll (filename, length, pos, cb) { + const buf = b4a.alloc(length) + + fs.open(filename, 'r', function (err, fd) { + if (err) return cb(err) + + let offset = 0 + + fs.read(fd, buf, offset, buf.byteLength, pos, function loop (err, read) { + if (err) return done(err) + if (read === 0) return done(new Error('Partial read')) + offset += read + if (offset === buf.byteLength) return done(null, buf) + fs.read(fd, offset, buf.byteLength - offset, buf, pos + offset, loop) + }) + + function done (err, value) { + fs.close(fd, () => cb(err, value)) + } + }) +} + +async function readdir (dir) { + try { + return fs.promises.readdir(dir) + } catch { + return [] + } +} + +async function readFile (file) { + try { + return fs.promises.readFile(file) + } catch { + return [] + } +} diff --git a/migrations/0/messages.js b/migrations/0/messages.js new file mode 100644 index 0000000..4e1798c --- /dev/null +++ b/migrations/0/messages.js @@ -0,0 +1,1069 @@ +// needed here for compat, copied from old hypercore, do not change this + +const c = require('compact-encoding') +const b4a = require('b4a') + +const EMPTY = b4a.alloc(0) +const DEFAULT_NAMESPACE = b4a.from('4144eea531e483d54e0c14f4ca68e0644f355343ff6fcb0f005200e12cd747cb', 'hex') + +const hashes = { + preencode (state, m) { + state.end++ // small uint + }, + encode (state, m) { + if (m === 'blake2b') { + c.uint.encode(state, 0) + return + } + + throw new Error('Unknown hash: ' + m) + }, + decode (state) { + const n = c.uint.decode(state) + if (n === 0) return 'blake2b' + throw new Error('Unknown hash id: ' + n) + } +} + +const signatures = { + preencode (state, m) { + state.end++ // small uint + }, + encode (state, m) { + if (m === 'ed25519') { + c.uint.encode(state, 0) + return + } + + throw new Error('Unknown signature: ' + m) + }, + decode (state) { + const n = c.uint.decode(state) + if (n === 0) return 'ed25519' + throw new Error('Unknown signature id: ' + n) + } +} + +const signer = { + preencode (state, m) { + signatures.preencode(state, m.signature) + c.fixed32.preencode(state, m.namespace) + c.fixed32.preencode(state, m.publicKey) + }, + encode (state, m) { + signatures.encode(state, m.signature) + c.fixed32.encode(state, m.namespace) + c.fixed32.encode(state, m.publicKey) + }, + decode (state) { + return { + signature: signatures.decode(state), + namespace: c.fixed32.decode(state), + publicKey: c.fixed32.decode(state) + } + } +} + +const signerArray = c.array(signer) + +const prologue = { + preencode (state, p) { + c.fixed32.preencode(state, p.hash) + c.uint.preencode(state, p.length) + }, + encode (state, p) { + c.fixed32.encode(state, p.hash) + c.uint.encode(state, p.length) + }, + decode (state) { + return { + hash: c.fixed32.decode(state), + length: c.uint.decode(state) + } + } +} + +const manifestv0 = { + preencode (state, m) { + hashes.preencode(state, m.hash) + state.end++ // type + + if (m.prologue && m.signers.length === 0) { + c.fixed32.preencode(state, m.prologue.hash) + return + } + + if (m.quorum === 1 && m.signers.length === 1 && !m.allowPatch) { + signer.preencode(state, m.signers[0]) + } else { + state.end++ // flags + c.uint.preencode(state, m.quorum) + signerArray.preencode(state, m.signers) + } + }, + encode (state, m) { + hashes.encode(state, m.hash) + + if (m.prologue && m.signers.length === 0) { + c.uint.encode(state, 0) + c.fixed32.encode(state, m.prologue.hash) + return + } + + if (m.quorum === 1 && m.signers.length === 1 && !m.allowPatch) { + c.uint.encode(state, 1) + signer.encode(state, m.signers[0]) + } else { + c.uint.encode(state, 2) + c.uint.encode(state, m.allowPatch ? 1 : 0) + c.uint.encode(state, m.quorum) + signerArray.encode(state, m.signers) + } + }, + decode (state) { + const hash = hashes.decode(state) + const type = c.uint.decode(state) + + if (type > 2) throw new Error('Unknown type: ' + type) + + if (type === 0) { + return { + version: 0, + hash, + allowPatch: false, + quorum: 0, + signers: [], + prologue: { + hash: c.fixed32.decode(state), + length: 0 + } + } + } + + if (type === 1) { + return { + version: 0, + hash, + allowPatch: false, + quorum: 1, + signers: [signer.decode(state)], + prologue: null + } + } + + const flags = c.uint.decode(state) + + return { + version: 0, + hash, + allowPatch: (flags & 1) !== 0, + quorum: c.uint.decode(state), + signers: signerArray.decode(state), + prologue: null + } + } +} + +const manifest = exports.manifest = { + preencode (state, m) { + state.end++ // version + if (m.version === 0) return manifestv0.preencode(state, m) + + state.end++ // flags + hashes.preencode(state, m.hash) + + c.uint.preencode(state, m.quorum) + signerArray.preencode(state, m.signers) + if (m.prologue) prologue.preencode(state, m.prologue) + }, + encode (state, m) { + c.uint.encode(state, m.version) + if (m.version === 0) return manifestv0.encode(state, m) + + c.uint.encode(state, (m.allowPatch ? 1 : 0) | (m.prologue ? 2 : 0)) + hashes.encode(state, m.hash) + + c.uint.encode(state, m.quorum) + signerArray.encode(state, m.signers) + if (m.prologue) prologue.encode(state, m.prologue) + }, + decode (state) { + const v = c.uint.decode(state) + if (v === 0) return manifestv0.decode(state) + if (v !== 1) throw new Error('Unknown version: ' + v) + + const flags = c.uint.decode(state) + const hash = hashes.decode(state) + const quorum = c.uint.decode(state) + const signers = signerArray.decode(state) + + return { + version: 1, + hash, + allowPatch: (flags & 1) !== 0, + quorum, + signers, + prologue: (flags & 2) === 0 ? null : prologue.decode(state) + } + } +} + +const node = { + preencode (state, n) { + c.uint.preencode(state, n.index) + c.uint.preencode(state, n.size) + c.fixed32.preencode(state, n.hash) + }, + encode (state, n) { + c.uint.encode(state, n.index) + c.uint.encode(state, n.size) + c.fixed32.encode(state, n.hash) + }, + decode (state) { + return { + index: c.uint.decode(state), + size: c.uint.decode(state), + hash: c.fixed32.decode(state) + } + } +} + +const nodeArray = c.array(node) + +const wire = exports.wire = {} + +wire.handshake = { + preencode (state, m) { + c.uint.preencode(state, 1) + c.fixed32.preencode(state, m.capability) + }, + encode (state, m) { + c.uint.encode(state, m.seeks ? 1 : 0) + c.fixed32.encode(state, m.capability) + }, + decode (state) { + const flags = c.uint.decode(state) + return { + seeks: (flags & 1) !== 0, + capability: c.fixed32.decode(state) + } + } +} + +const requestBlock = { + preencode (state, b) { + c.uint.preencode(state, b.index) + c.uint.preencode(state, b.nodes) + }, + encode (state, b) { + c.uint.encode(state, b.index) + c.uint.encode(state, b.nodes) + }, + decode (state) { + return { + index: c.uint.decode(state), + nodes: c.uint.decode(state) + } + } +} + +const requestSeek = { + preencode (state, s) { + c.uint.preencode(state, s.bytes) + c.uint.preencode(state, s.padding) + }, + encode (state, s) { + c.uint.encode(state, s.bytes) + c.uint.encode(state, s.padding) + }, + decode (state) { + return { + bytes: c.uint.decode(state), + padding: c.uint.decode(state) + } + } +} + +const requestUpgrade = { + preencode (state, u) { + c.uint.preencode(state, u.start) + c.uint.preencode(state, u.length) + }, + encode (state, u) { + c.uint.encode(state, u.start) + c.uint.encode(state, u.length) + }, + decode (state) { + return { + start: c.uint.decode(state), + length: c.uint.decode(state) + } + } +} + +wire.request = { + preencode (state, m) { + state.end++ // flags + c.uint.preencode(state, m.id) + c.uint.preencode(state, m.fork) + + if (m.block) requestBlock.preencode(state, m.block) + if (m.hash) requestBlock.preencode(state, m.hash) + if (m.seek) requestSeek.preencode(state, m.seek) + if (m.upgrade) requestUpgrade.preencode(state, m.upgrade) + if (m.priority) c.uint.preencode(state, m.priority) + }, + encode (state, m) { + const flags = (m.block ? 1 : 0) | (m.hash ? 2 : 0) | (m.seek ? 4 : 0) | (m.upgrade ? 8 : 0) | (m.manifest ? 16 : 0) | (m.priority ? 32 : 0) + + c.uint.encode(state, flags) + c.uint.encode(state, m.id) + c.uint.encode(state, m.fork) + + if (m.block) requestBlock.encode(state, m.block) + if (m.hash) requestBlock.encode(state, m.hash) + if (m.seek) requestSeek.encode(state, m.seek) + if (m.upgrade) requestUpgrade.encode(state, m.upgrade) + if (m.priority) c.uint.encode(state, m.priority) + }, + decode (state) { + const flags = c.uint.decode(state) + + return { + id: c.uint.decode(state), + fork: c.uint.decode(state), + block: flags & 1 ? requestBlock.decode(state) : null, + hash: flags & 2 ? requestBlock.decode(state) : null, + seek: flags & 4 ? requestSeek.decode(state) : null, + upgrade: flags & 8 ? requestUpgrade.decode(state) : null, + manifest: (flags & 16) !== 0, + priority: flags & 32 ? c.uint.decode(state) : 0 + } + } +} + +wire.cancel = { + preencode (state, m) { + c.uint.preencode(state, m.request) + }, + encode (state, m) { + c.uint.encode(state, m.request) + }, + decode (state, m) { + return { + request: c.uint.decode(state) + } + } +} + +const dataUpgrade = { + preencode (state, u) { + c.uint.preencode(state, u.start) + c.uint.preencode(state, u.length) + nodeArray.preencode(state, u.nodes) + nodeArray.preencode(state, u.additionalNodes) + c.buffer.preencode(state, u.signature) + }, + encode (state, u) { + c.uint.encode(state, u.start) + c.uint.encode(state, u.length) + nodeArray.encode(state, u.nodes) + nodeArray.encode(state, u.additionalNodes) + c.buffer.encode(state, u.signature) + }, + decode (state) { + return { + start: c.uint.decode(state), + length: c.uint.decode(state), + nodes: nodeArray.decode(state), + additionalNodes: nodeArray.decode(state), + signature: c.buffer.decode(state) + } + } +} + +const dataSeek = { + preencode (state, s) { + c.uint.preencode(state, s.bytes) + nodeArray.preencode(state, s.nodes) + }, + encode (state, s) { + c.uint.encode(state, s.bytes) + nodeArray.encode(state, s.nodes) + }, + decode (state) { + return { + bytes: c.uint.decode(state), + nodes: nodeArray.decode(state) + } + } +} + +const dataBlock = { + preencode (state, b) { + c.uint.preencode(state, b.index) + c.buffer.preencode(state, b.value) + nodeArray.preencode(state, b.nodes) + }, + encode (state, b) { + c.uint.encode(state, b.index) + c.buffer.encode(state, b.value) + nodeArray.encode(state, b.nodes) + }, + decode (state) { + return { + index: c.uint.decode(state), + value: c.buffer.decode(state) || EMPTY, + nodes: nodeArray.decode(state) + } + } +} + +const dataHash = { + preencode (state, b) { + c.uint.preencode(state, b.index) + nodeArray.preencode(state, b.nodes) + }, + encode (state, b) { + c.uint.encode(state, b.index) + nodeArray.encode(state, b.nodes) + }, + decode (state) { + return { + index: c.uint.decode(state), + nodes: nodeArray.decode(state) + } + } +} + +wire.data = { + preencode (state, m) { + state.end++ // flags + c.uint.preencode(state, m.request) + c.uint.preencode(state, m.fork) + + if (m.block) dataBlock.preencode(state, m.block) + if (m.hash) dataHash.preencode(state, m.hash) + if (m.seek) dataSeek.preencode(state, m.seek) + if (m.upgrade) dataUpgrade.preencode(state, m.upgrade) + if (m.manifest) manifest.preencode(state, m.manifest) + }, + encode (state, m) { + const flags = (m.block ? 1 : 0) | (m.hash ? 2 : 0) | (m.seek ? 4 : 0) | (m.upgrade ? 8 : 0) | (m.manifest ? 16 : 0) + + c.uint.encode(state, flags) + c.uint.encode(state, m.request) + c.uint.encode(state, m.fork) + + if (m.block) dataBlock.encode(state, m.block) + if (m.hash) dataHash.encode(state, m.hash) + if (m.seek) dataSeek.encode(state, m.seek) + if (m.upgrade) dataUpgrade.encode(state, m.upgrade) + if (m.manifest) manifest.encode(state, m.manifest) + }, + decode (state) { + const flags = c.uint.decode(state) + + return { + request: c.uint.decode(state), + fork: c.uint.decode(state), + block: flags & 1 ? dataBlock.decode(state) : null, + hash: flags & 2 ? dataHash.decode(state) : null, + seek: flags & 4 ? dataSeek.decode(state) : null, + upgrade: flags & 8 ? dataUpgrade.decode(state) : null, + manifest: flags & 16 ? manifest.decode(state) : null + } + } +} + +wire.noData = { + preencode (state, m) { + c.uint.preencode(state, m.request) + }, + encode (state, m) { + c.uint.encode(state, m.request) + }, + decode (state, m) { + return { + request: c.uint.decode(state) + } + } +} + +wire.want = { + preencode (state, m) { + c.uint.preencode(state, m.start) + c.uint.preencode(state, m.length) + }, + encode (state, m) { + c.uint.encode(state, m.start) + c.uint.encode(state, m.length) + }, + decode (state) { + return { + start: c.uint.decode(state), + length: c.uint.decode(state) + } + } +} + +wire.unwant = { + preencode (state, m) { + c.uint.preencode(state, m.start) + c.uint.preencode(state, m.length) + }, + encode (state, m) { + c.uint.encode(state, m.start) + c.uint.encode(state, m.length) + }, + decode (state, m) { + return { + start: c.uint.decode(state), + length: c.uint.decode(state) + } + } +} + +wire.range = { + preencode (state, m) { + state.end++ // flags + c.uint.preencode(state, m.start) + if (m.length !== 1) c.uint.preencode(state, m.length) + }, + encode (state, m) { + c.uint.encode(state, (m.drop ? 1 : 0) | (m.length === 1 ? 2 : 0)) + c.uint.encode(state, m.start) + if (m.length !== 1) c.uint.encode(state, m.length) + }, + decode (state) { + const flags = c.uint.decode(state) + + return { + drop: (flags & 1) !== 0, + start: c.uint.decode(state), + length: (flags & 2) !== 0 ? 1 : c.uint.decode(state) + } + } +} + +wire.bitfield = { + preencode (state, m) { + c.uint.preencode(state, m.start) + c.uint32array.preencode(state, m.bitfield) + }, + encode (state, m) { + c.uint.encode(state, m.start) + c.uint32array.encode(state, m.bitfield) + }, + decode (state, m) { + return { + start: c.uint.decode(state), + bitfield: c.uint32array.decode(state) + } + } +} + +wire.sync = { + preencode (state, m) { + state.end++ // flags + c.uint.preencode(state, m.fork) + c.uint.preencode(state, m.length) + c.uint.preencode(state, m.remoteLength) + }, + encode (state, m) { + c.uint.encode(state, (m.canUpgrade ? 1 : 0) | (m.uploading ? 2 : 0) | (m.downloading ? 4 : 0) | (m.hasManifest ? 8 : 0)) + c.uint.encode(state, m.fork) + c.uint.encode(state, m.length) + c.uint.encode(state, m.remoteLength) + }, + decode (state) { + const flags = c.uint.decode(state) + + return { + fork: c.uint.decode(state), + length: c.uint.decode(state), + remoteLength: c.uint.decode(state), + canUpgrade: (flags & 1) !== 0, + uploading: (flags & 2) !== 0, + downloading: (flags & 4) !== 0, + hasManifest: (flags & 8) !== 0 + } + } +} + +wire.reorgHint = { + preencode (state, m) { + c.uint.preencode(state, m.from) + c.uint.preencode(state, m.to) + c.uint.preencode(state, m.ancestors) + }, + encode (state, m) { + c.uint.encode(state, m.from) + c.uint.encode(state, m.to) + c.uint.encode(state, m.ancestors) + }, + decode (state) { + return { + from: c.uint.encode(state), + to: c.uint.encode(state), + ancestors: c.uint.encode(state) + } + } +} + +wire.extension = { + preencode (state, m) { + c.string.preencode(state, m.name) + c.raw.preencode(state, m.message) + }, + encode (state, m) { + c.string.encode(state, m.name) + c.raw.encode(state, m.message) + }, + decode (state) { + return { + name: c.string.decode(state), + message: c.raw.decode(state) + } + } +} + +const keyValue = { + preencode (state, p) { + c.string.preencode(state, p.key) + c.buffer.preencode(state, p.value) + }, + encode (state, p) { + c.string.encode(state, p.key) + c.buffer.encode(state, p.value) + }, + decode (state) { + return { + key: c.string.decode(state), + value: c.buffer.decode(state) + } + } +} + +const treeUpgrade = { + preencode (state, u) { + c.uint.preencode(state, u.fork) + c.uint.preencode(state, u.ancestors) + c.uint.preencode(state, u.length) + c.buffer.preencode(state, u.signature) + }, + encode (state, u) { + c.uint.encode(state, u.fork) + c.uint.encode(state, u.ancestors) + c.uint.encode(state, u.length) + c.buffer.encode(state, u.signature) + }, + decode (state) { + return { + fork: c.uint.decode(state), + ancestors: c.uint.decode(state), + length: c.uint.decode(state), + signature: c.buffer.decode(state) + } + } +} + +const bitfieldUpdate = { // TODO: can maybe be folded into a HAVE later on with the most recent spec + preencode (state, b) { + state.end++ // flags + c.uint.preencode(state, b.start) + c.uint.preencode(state, b.length) + }, + encode (state, b) { + state.buffer[state.start++] = b.drop ? 1 : 0 + c.uint.encode(state, b.start) + c.uint.encode(state, b.length) + }, + decode (state) { + const flags = c.uint.decode(state) + return { + drop: (flags & 1) !== 0, + start: c.uint.decode(state), + length: c.uint.decode(state) + } + } +} + +const oplog = exports.oplog = {} + +oplog.entry = { + preencode (state, m) { + state.end++ // flags + if (m.userData) keyValue.preencode(state, m.userData) + if (m.treeNodes) nodeArray.preencode(state, m.treeNodes) + if (m.treeUpgrade) treeUpgrade.preencode(state, m.treeUpgrade) + if (m.bitfield) bitfieldUpdate.preencode(state, m.bitfield) + }, + encode (state, m) { + const s = state.start++ + let flags = 0 + + if (m.userData) { + flags |= 1 + keyValue.encode(state, m.userData) + } + if (m.treeNodes) { + flags |= 2 + nodeArray.encode(state, m.treeNodes) + } + if (m.treeUpgrade) { + flags |= 4 + treeUpgrade.encode(state, m.treeUpgrade) + } + if (m.bitfield) { + flags |= 8 + bitfieldUpdate.encode(state, m.bitfield) + } + + state.buffer[s] = flags + }, + decode (state) { + const flags = c.uint.decode(state) + return { + userData: (flags & 1) !== 0 ? keyValue.decode(state) : null, + treeNodes: (flags & 2) !== 0 ? nodeArray.decode(state) : null, + treeUpgrade: (flags & 4) !== 0 ? treeUpgrade.decode(state) : null, + bitfield: (flags & 8) !== 0 ? bitfieldUpdate.decode(state) : null + } + } +} + +const keyPair = { + preencode (state, kp) { + c.buffer.preencode(state, kp.publicKey) + c.buffer.preencode(state, kp.secretKey) + }, + encode (state, kp) { + c.buffer.encode(state, kp.publicKey) + c.buffer.encode(state, kp.secretKey) + }, + decode (state) { + return { + publicKey: c.buffer.decode(state), + secretKey: c.buffer.decode(state) + } + } +} + +const reorgHint = { + preencode (state, r) { + c.uint.preencode(state, r.from) + c.uint.preencode(state, r.to) + c.uint.preencode(state, r.ancestors) + }, + encode (state, r) { + c.uint.encode(state, r.from) + c.uint.encode(state, r.to) + c.uint.encode(state, r.ancestors) + }, + decode (state) { + return { + from: c.uint.decode(state), + to: c.uint.decode(state), + ancestors: c.uint.decode(state) + } + } +} + +const reorgHintArray = c.array(reorgHint) + +const hints = { + preencode (state, h) { + reorgHintArray.preencode(state, h.reorgs) + c.uint.preencode(state, h.contiguousLength) + }, + encode (state, h) { + reorgHintArray.encode(state, h.reorgs) + c.uint.encode(state, h.contiguousLength) + }, + decode (state) { + return { + reorgs: reorgHintArray.decode(state), + contiguousLength: state.start < state.end ? c.uint.decode(state) : 0 + } + } +} + +const treeHeader = { + preencode (state, t) { + c.uint.preencode(state, t.fork) + c.uint.preencode(state, t.length) + c.buffer.preencode(state, t.rootHash) + c.buffer.preencode(state, t.signature) + }, + encode (state, t) { + c.uint.encode(state, t.fork) + c.uint.encode(state, t.length) + c.buffer.encode(state, t.rootHash) + c.buffer.encode(state, t.signature) + }, + decode (state) { + return { + fork: c.uint.decode(state), + length: c.uint.decode(state), + rootHash: c.buffer.decode(state), + signature: c.buffer.decode(state) + } + } +} + +const types = { + preencode (state, t) { + c.string.preencode(state, t.tree) + c.string.preencode(state, t.bitfield) + c.string.preencode(state, t.signer) + }, + encode (state, t) { + c.string.encode(state, t.tree) + c.string.encode(state, t.bitfield) + c.string.encode(state, t.signer) + }, + decode (state) { + return { + tree: c.string.decode(state), + bitfield: c.string.decode(state), + signer: c.string.decode(state) + } + } +} + +const externalHeader = { + preencode (state, m) { + c.uint.preencode(state, m.start) + c.uint.preencode(state, m.length) + }, + encode (state, m) { + c.uint.encode(state, m.start) + c.uint.encode(state, m.length) + }, + decode (state) { + return { + start: c.uint.decode(state), + length: c.uint.decode(state) + } + } +} + +const keyValueArray = c.array(keyValue) + +oplog.header = { + preencode (state, h) { + state.end += 2 // version + flags + if (h.external) { + externalHeader.preencode(state, h.external) + return + } + c.fixed32.preencode(state, h.key) + if (h.manifest) manifest.preencode(state, h.manifest) + if (h.keyPair) keyPair.preencode(state, h.keyPair) + keyValueArray.preencode(state, h.userData) + treeHeader.preencode(state, h.tree) + hints.preencode(state, h.hints) + }, + encode (state, h) { + c.uint.encode(state, 1) + if (h.external) { + c.uint.encode(state, 1) // ONLY set the first big for clarity + externalHeader.encode(state, h.external) + return + } + c.uint.encode(state, (h.manifest ? 2 : 0) | (h.keyPair ? 4 : 0)) + c.fixed32.encode(state, h.key) + if (h.manifest) manifest.encode(state, h.manifest) + if (h.keyPair) keyPair.encode(state, h.keyPair) + keyValueArray.encode(state, h.userData) + treeHeader.encode(state, h.tree) + hints.encode(state, h.hints) + }, + decode (state) { + const version = c.uint.decode(state) + + if (version > 1) { + throw new Error('Invalid header version. Expected <= 1, got ' + version) + } + + if (version === 0) { + const old = { + types: types.decode(state), + userData: keyValueArray.decode(state), + tree: treeHeader.decode(state), + signer: keyPair.decode(state), + hints: hints.decode(state) + } + + return { + external: null, + key: old.signer.publicKey, + manifest: { + version: 0, + hash: old.types.tree, + allowPatch: false, + quorum: 1, + signers: [{ + signature: old.types.signer, + namespace: DEFAULT_NAMESPACE, + publicKey: old.signer.publicKey + }], + prologue: null + }, + keyPair: old.signer.secretKey ? old.signer : null, + userData: old.userData, + tree: old.tree, + hints: old.hints + } + } + + const flags = c.uint.decode(state) + + if (flags & 1) { + return { + external: externalHeader.decode(state), + key: null, + manifest: null, + keyPair: null, + userData: null, + tree: null, + hints: null + } + } + + return { + external: null, + key: c.fixed32.decode(state), + manifest: (flags & 2) !== 0 ? manifest.decode(state) : null, + keyPair: (flags & 4) !== 0 ? keyPair.decode(state) : null, + userData: keyValueArray.decode(state), + tree: treeHeader.decode(state), + hints: hints.decode(state) + } + } +} + +const uintArray = c.array(c.uint) + +const multisigInput = { + preencode (state, inp) { + c.uint.preencode(state, inp.signer) + c.fixed64.preencode(state, inp.signature) + c.uint.preencode(state, inp.patch) + }, + encode (state, inp) { + c.uint.encode(state, inp.signer) + c.fixed64.encode(state, inp.signature) + c.uint.encode(state, inp.patch) + }, + decode (state) { + return { + signer: c.uint.decode(state), + signature: c.fixed64.decode(state), + patch: c.uint.decode(state) + } + } +} + +const patchEncodingv0 = { + preencode (state, n) { + c.uint.preencode(state, n.start) + c.uint.preencode(state, n.length) + uintArray.preencode(state, n.nodes) + }, + encode (state, n) { + c.uint.encode(state, n.start) + c.uint.encode(state, n.length) + uintArray.encode(state, n.nodes) + }, + decode (state) { + return { + start: c.uint.decode(state), + length: c.uint.decode(state), + nodes: uintArray.decode(state) + } + } +} + +const multisigInputv0 = { + preencode (state, n) { + state.end++ + c.uint.preencode(state, n.signer) + c.fixed64.preencode(state, n.signature) + if (n.patch) patchEncodingv0.preencode(state, n.patch) + }, + encode (state, n) { + c.uint.encode(state, n.patch ? 1 : 0) + c.uint.encode(state, n.signer) + c.fixed64.encode(state, n.signature) + if (n.patch) patchEncodingv0.encode(state, n.patch) + }, + decode (state) { + const flags = c.uint.decode(state) + return { + signer: c.uint.decode(state), + signature: c.fixed64.decode(state), + patch: (flags & 1) ? patchEncodingv0.decode(state) : null + } + } +} + +const multisigInputArrayv0 = c.array(multisigInputv0) +const multisigInputArray = c.array(multisigInput) + +const compactNode = { + preencode (state, n) { + c.uint.preencode(state, n.index) + c.uint.preencode(state, n.size) + c.fixed32.preencode(state, n.hash) + }, + encode (state, n) { + c.uint.encode(state, n.index) + c.uint.encode(state, n.size) + c.fixed32.encode(state, n.hash) + }, + decode (state) { + return { + index: c.uint.decode(state), + size: c.uint.decode(state), + hash: c.fixed32.decode(state) + } + } +} + +const compactNodeArray = c.array(compactNode) + +exports.multiSignaturev0 = { + preencode (state, s) { + multisigInputArrayv0.preencode(state, s.proofs) + compactNodeArray.preencode(state, s.patch) + }, + encode (state, s) { + multisigInputArrayv0.encode(state, s.proofs) + compactNodeArray.encode(state, s.patch) + }, + decode (state) { + return { + proofs: multisigInputArrayv0.decode(state), + patch: compactNodeArray.decode(state) + } + } +} + +exports.multiSignature = { + preencode (state, s) { + multisigInputArray.preencode(state, s.proofs) + compactNodeArray.preencode(state, s.patch) + }, + encode (state, s) { + multisigInputArray.encode(state, s.proofs) + compactNodeArray.encode(state, s.patch) + }, + decode (state) { + return { + proofs: multisigInputArray.decode(state), + patch: compactNodeArray.decode(state) + } + } +} diff --git a/package.json b/package.json index 6143d26..1c14877 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,8 @@ "files": [ "index.js", "lib/*.js", - "spec/hyperschema/*.js" + "spec/hyperschema/*.js", + "migrations/0/*.js" ], "scripts": { "test": "standard && node test/all.js", @@ -19,6 +20,7 @@ "b4a": "^1.6.7", "compact-encoding": "^2.16.0", "flat-tree": "^1.12.1", + "hypercore-crypto": "^3.4.2", "hyperschema": "^1.3.3", "index-encoder": "^3.3.2", "resolve-reject-promise": "^1.0.0", From 6893958b26438066824f03c1c1dfa11ce247cfe8 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 15:49:24 +0100 Subject: [PATCH 24/41] fix migrations when nothing to migrate --- index.js | 18 +++++++++--------- migrations/0/index.js | 6 +++--- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/index.js b/index.js index e5ccd0a..e37f5d3 100644 --- a/index.js +++ b/index.js @@ -369,9 +369,8 @@ class CorestoreStorage { const headPromise = rx.getHead() rx.tryFlush() - let head = await headPromise - if (head === null) head = initStoreHead(null, null) - return head + const head = await headPromise + return head === null ? initStoreHead() : head } atom () { @@ -435,7 +434,7 @@ class CorestoreStorage { rx.tryFlush() - const head = (await headPromise) || initStoreHead(null, null) + const head = (await headPromise) || initStoreHead() if (head.seed === null || overwrite) head.seed = seed tx.setHead(head) @@ -471,7 +470,7 @@ class CorestoreStorage { rx.tryFlush() - const head = (await headPromise) || initStoreHead(null, null) + const head = (await headPromise) || initStoreHead() if (head.defaultDiscoveryKey === null || overwrite) head.defaultDiscoveryKey = discoveryKey tx.setHead(head) @@ -541,7 +540,8 @@ class CorestoreStorage { let [core, head] = await Promise.all([corePromise, headPromise]) if (core) return this._resumeFromPointers(view, core) - if (head === null) head = initStoreHead(null, discoveryKey) + if (head === null) head = initStoreHead() + if (head.defaultDiscoveryKey === null) head.defaultDiscoveryKey = discoveryKey const corePointer = head.allocated.cores++ const dataPointer = head.allocated.datas++ @@ -589,15 +589,15 @@ class CorestoreStorage { module.exports = CorestoreStorage -function initStoreHead (seed, defaultDiscoveryKey) { +function initStoreHead () { return { version: 0, // cause we wanna run the migration allocated: { datas: 0, cores: 0 }, - seed, - defaultDiscoveryKey + seed: null, + defaultDiscoveryKey: null } } diff --git a/migrations/0/index.js b/migrations/0/index.js index 025b97e..559dd2a 100644 --- a/migrations/0/index.js +++ b/migrations/0/index.js @@ -388,7 +388,7 @@ function readAll (filename, length, pos, cb) { async function readdir (dir) { try { - return fs.promises.readdir(dir) + return await fs.promises.readdir(dir) } catch { return [] } @@ -396,8 +396,8 @@ async function readdir (dir) { async function readFile (file) { try { - return fs.promises.readFile(file) + return await fs.promises.readFile(file) } catch { - return [] + return null } } From be855aa302d80411b203584b11e19d0748fe7930 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 15:28:35 +0000 Subject: [PATCH 25/41] atom can be injected for registration --- index.js | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/index.js b/index.js index e37f5d3..8b9d5af 100644 --- a/index.js +++ b/index.js @@ -142,8 +142,8 @@ class HypercoreStorage { return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) } - async createSession (name, head) { - const rx = this.read() + async createSession (name, head, atom) { + const rx = this.read(atom) const existingSessionsPromise = rx.getSessions() const existingHeadPromise = rx.getHead() @@ -160,9 +160,11 @@ class HypercoreStorage { const sessions = existingSessions || [] const session = getBatch(sessions, name, true) - session.dataPointer = await this.store._allocData() + if (session.dataPointer === -1) { + session.dataPointer = await this.store._allocData() + } - const tx = this.write() + const tx = this.write(atom) tx.setSessions(sessions) @@ -181,7 +183,7 @@ class HypercoreStorage { await tx.flush() - return new HypercoreStorage(this.store, this.db.session(), core, this.atomic ? this.view : new View(), this.atomic) + return new HypercoreStorage(this.store, this.db.session(), core, atom ? atom.view : this.atomic ? this.view : new View(), !!atom || this.atomic) } async createAtomicSession (atom, head) { @@ -221,12 +223,12 @@ class HypercoreStorage { return deps } - read () { - return new CoreRX(this.core, this.db, this.view) + read (atom) { + return new CoreRX(this.core, this.db, atom ? atom.view : this.view) } - write () { - return new CoreTX(this.core, this.db, this.atomic ? this.view : null, []) + write (atom) { + return new CoreTX(this.core, this.db, atom ? atom.view : this.atomic ? this.view : null, []) } close () { @@ -608,7 +610,7 @@ function getBatch (sessions, name, alloc) { if (!alloc) return null - const result = { name, dataPointer: 0 } + const result = { name, dataPointer: -1 } sessions.push(result) return result } From dbb9eb6f474dd311287d328fa795943e581304f1 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 15:29:24 +0000 Subject: [PATCH 26/41] add unsafe assume session api --- index.js | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/index.js b/index.js index 8b9d5af..201ef2a 100644 --- a/index.js +++ b/index.js @@ -112,6 +112,17 @@ class HypercoreStorage { return createUserDataStream(this.core, this.db, this.view, opts) } + async assumeSessionUnsafe (session) { + const core = { + version: this.core.version, + corePointer: this.core.corePointer, + dataPointer: session.dataPointer, + dependencies: [] + } + + return new HypercoreStorage(this.store, this.db.session(), core, new View(), false) + } + async resumeSession (name) { const rx = this.read() const existingSessionsPromise = rx.getSessions() From 1cbf48bd72be0a5efd3555f8cbca0b3c504d9da2 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 15:29:43 +0000 Subject: [PATCH 27/41] userData fixes: return buffers like rocks --- lib/streams.js | 2 +- lib/view.js | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/streams.js b/lib/streams.js index b72f10f..3e2fa51 100644 --- a/lib/streams.js +++ b/lib/streams.js @@ -58,7 +58,7 @@ function createBitfieldStream (ptr, db, view, { gt = -1, gte = gt + 1, lte = -1, return ite } -function createUserDataStream (ptr, db, view, { gt = null, gte = '', lte = null, lt = null, reverse = false }) { +function createUserDataStream (ptr, db, view, { gt = null, gte = '', lte = null, lt = null, reverse = false } = {}) { if (gt !== null || lte !== null) throw new Error('gt and lte not yet supported for user data streams') const s = core.userData(ptr.dataPointer, gte) diff --git a/lib/view.js b/lib/view.js index 32c342b..315561a 100644 --- a/lib/view.js +++ b/lib/view.js @@ -93,7 +93,7 @@ class OverlayStream extends Readable { while (this.change < this.changes.length) { const c = this.changes[this.change] const key = c[0] - const value = c[1] + const value = typeof c[1] === 'string' ? b4a.from(c[1]) : c[1] const cmp = b4a.compare(key, entry.key) // same value, if not deleted, return new one @@ -225,7 +225,9 @@ class View { _indexAndGet (read, key) { this._index() const change = this.map.get(b4a.toString(key, 'hex')) - return change === undefined ? read.get(key) : Promise.resolve(change[1]) + if (change === undefined) return read.get(key) + if (typeof change[1] === 'string') return Promise.resolve(b4a.from(change[1])) + return Promise.resolve(change[1]) } _attached () { From e3f6828486a715f715da94e9b37d2e3531501dcc Mon Sep 17 00:00:00 2001 From: HDegroote <75906619+HDegroote@users.noreply.github.com> Date: Fri, 10 Jan 2025 17:24:10 +0100 Subject: [PATCH 28/41] Fix bare CI (#52) --- package.json | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/package.json b/package.json index 1c14877..b94ff7b 100644 --- a/package.json +++ b/package.json @@ -16,8 +16,20 @@ "author": "Holepunch Inc.", "license": "Apache-2.0", "description": "", + "imports": { + "fs": { + "bare": "bare-fs", + "default": "fs" + }, + "path": { + "bare": "bare-path", + "default": "path" + } + }, "dependencies": { "b4a": "^1.6.7", + "bare-fs": "^4.0.1", + "bare-path": "^3.0.0", "compact-encoding": "^2.16.0", "flat-tree": "^1.12.1", "hypercore-crypto": "^3.4.2", From c47696bd1f23e4940d9cc6a772315dc23a558c3f Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 16:23:37 +0000 Subject: [PATCH 29/41] missing await --- index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.js b/index.js index 201ef2a..abe9b73 100644 --- a/index.js +++ b/index.js @@ -363,7 +363,7 @@ class CorestoreStorage { const tx = new CorestoreTX(view) try { - const head = this._getHead(view) + const head = await this._getHead(view) dataPointer = head.allocated.datas++ From a5eecdbccc2cda4d81a1c272dfd4bdf998bc263d Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 16:24:23 +0000 Subject: [PATCH 30/41] support async onflush handlers --- index.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index abe9b73..968e685 100644 --- a/index.js +++ b/index.js @@ -36,7 +36,11 @@ class Atom { async flush () { await View.flush(this.view.changes, this.db) this.view.reset() - while (this.flushes.length) this.flushes.pop()() + + const promises = [] + while (this.flushes.length) promises.push(this.flushes.pop()()) + + return Promise.all(promises) } } From a7becfeb69b91424a034687e33029bb3015725d1 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Fri, 10 Jan 2025 18:36:52 +0000 Subject: [PATCH 31/41] remove unnecessary method --- index.js | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/index.js b/index.js index 968e685..4b6aaed 100644 --- a/index.js +++ b/index.js @@ -116,17 +116,6 @@ class HypercoreStorage { return createUserDataStream(this.core, this.db, this.view, opts) } - async assumeSessionUnsafe (session) { - const core = { - version: this.core.version, - corePointer: this.core.corePointer, - dataPointer: session.dataPointer, - dependencies: [] - } - - return new HypercoreStorage(this.store, this.db.session(), core, new View(), false) - } - async resumeSession (name) { const rx = this.read() const existingSessionsPromise = rx.getSessions() From 28c55afc63e8ed15e3614f50477589b34853805f Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 21:05:31 +0100 Subject: [PATCH 32/41] core migrations are in --- index.js | 86 ++++++++++++--- lib/streams.js | 18 +++- migrations/0/index.js | 244 +++++++++++++++++++++++++++++++++++++++--- 3 files changed, 318 insertions(+), 30 deletions(-) diff --git a/index.js b/index.js index 4b6aaed..86fd01d 100644 --- a/index.js +++ b/index.js @@ -17,7 +17,8 @@ const { createAliasStream, createBlockStream, createBitfieldStream, - createUserDataStream + createUserDataStream, + createTreeNodeStream } = require('./lib/streams.js') const EMPTY = new View() @@ -108,6 +109,10 @@ class HypercoreStorage { return createBlockStream(this.core, this.db, this.view, opts) } + createTreeNodeStream (opts) { + return createTreeNodeStream(this.core, this.db, this.view, opts) + } + createBitfieldStream (opts) { return createBitfieldStream(this.core, this.db, this.view, opts) } @@ -266,7 +271,7 @@ class CorestoreStorage { } async ready () { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() } static isCoreStorage (db) { @@ -285,7 +290,8 @@ class CorestoreStorage { } } - async _migrate () { + // runs pre any other mutation and read + async _migrateStore () { const view = await this._enter() try { @@ -318,6 +324,47 @@ class CorestoreStorage { } } + // runs pre the core is returned to the user + async _migrateCore (core, discoveryKey, locked) { + const view = locked ? this.view : await this._enter() + + const version = core.core.version + + try { + if (version === VERSION) return + + const target = { version: VERSION, dryRun: false } + + switch (version) { + case 0: { + await require('./migrations/0').core(core, target) + break + } + default: { + throw new Error('Unsupported version: ' + version + ' - you should probably upgrade your dependencies') + } + } + + core.core.version = VERSION + + if (locked === false) return + + // if its locked, then move the core state into the memview + // in case the core is reopened from the memview, pre flush + + const rx = new CorestoreRX(this.db, EMPTY) + const tx = new CorestoreTX(view) + + const corePromise = rx.getCore(discoveryKey) + rx.tryFlush() + + tx.putCore(discoveryKey, await corePromise) + tx.apply() + } finally { + if (!locked) await this._exit() + } + } + async _enter () { this.enters++ await this.lock.lock() @@ -390,7 +437,7 @@ class CorestoreStorage { } async clear () { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() const view = await this._enter() const tx = new CorestoreTX(view) @@ -402,14 +449,18 @@ class CorestoreStorage { } createCoreStream () { + // TODO: be nice to run the mgiration here also, but too much plumbing atm return createCoreStream(this.db, EMPTY) } createAliasStream (namespace) { + // TODO: be nice to run the mgiration here also, but too much plumbing atm return createAliasStream(this.db, EMPTY, namespace) } - getAlias (alias) { + async getAlias (alias) { + if (this.version === 0) await this._migrateStore() + const rx = new CorestoreRX(this.db, EMPTY) const discoveryKeyPromise = rx.getCoreByAlias(alias) rx.tryFlush() @@ -417,7 +468,7 @@ class CorestoreStorage { } async getSeed () { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() const rx = new CorestoreRX(this.db, EMPTY) const headPromise = rx.getHead() @@ -429,7 +480,7 @@ class CorestoreStorage { } async setSeed (seed, { overwrite = true } = {}) { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() const view = await this._enter() const tx = new CorestoreTX(view) @@ -453,7 +504,7 @@ class CorestoreStorage { } async getDefaultDiscoveryKey () { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() const rx = new CorestoreRX(this.db, EMPTY) const headPromise = rx.getHead() @@ -465,7 +516,7 @@ class CorestoreStorage { } async setDefaultDiscoveryKey (discoveryKey, { overwrite = true } = {}) { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() const view = await this._enter() const tx = new CorestoreTX(view) @@ -489,7 +540,7 @@ class CorestoreStorage { } async has (discoveryKey) { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() const rx = new CorestoreRX(this.db, EMPTY) const promise = rx.getCore(discoveryKey) @@ -500,7 +551,7 @@ class CorestoreStorage { } async resume (discoveryKey) { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() if (!discoveryKey) { discoveryKey = await this.getDefaultDiscoveryKey() @@ -514,10 +565,10 @@ class CorestoreStorage { const core = await corePromise if (core === null) return null - return this._resumeFromPointers(EMPTY, core) + return this._resumeFromPointers(EMPTY, discoveryKey, false, core) } - async _resumeFromPointers (view, { version, corePointer, dataPointer }) { + async _resumeFromPointers (view, discoveryKey, create, { version, corePointer, dataPointer }) { const core = { version, corePointer, dataPointer, dependencies: [] } while (true) { @@ -530,7 +581,10 @@ class CorestoreStorage { dataPointer = dependency.dataPointer } - return new HypercoreStorage(this, this.db.session(), core, EMPTY, false) + const result = new HypercoreStorage(this, this.db.session(), core, EMPTY, false) + + if (result.core.version === 0) await this._migrateCore(result, discoveryKey, create) + return result } // not allowed to throw validation errors as its a shared tx! @@ -544,7 +598,7 @@ class CorestoreStorage { rx.tryFlush() let [core, head] = await Promise.all([corePromise, headPromise]) - if (core) return this._resumeFromPointers(view, core) + if (core) return this._resumeFromPointers(view, discoveryKey, true, core) if (head === null) head = initStoreHead() if (head.defaultDiscoveryKey === null) head.defaultDiscoveryKey = discoveryKey @@ -581,7 +635,7 @@ class CorestoreStorage { } async create (data) { - if (this.version === 0) await this._migrate() + if (this.version === 0) await this._migrateStore() const view = await this._enter() diff --git a/lib/streams.js b/lib/streams.js index 3e2fa51..0c46738 100644 --- a/lib/streams.js +++ b/lib/streams.js @@ -3,13 +3,15 @@ const { core, store } = require('./keys.js') const schema = require('../spec/hyperschema') const CORESTORE_CORE = schema.getEncoding('@corestore/core') +const CORE_TREE_NODE = schema.getEncoding('@core/tree-node') module.exports = { createBlockStream, createBitfieldStream, createUserDataStream, createCoreStream, - createAliasStream + createAliasStream, + createTreeNodeStream } function createCoreStream (db, view) { @@ -58,6 +60,16 @@ function createBitfieldStream (ptr, db, view, { gt = -1, gte = gt + 1, lte = -1, return ite } +// NOTE: this does not do dependency lookups atm +function createTreeNodeStream (ptr, db, view, { gt = -1, gte = gt + 1, lte = -1, lt = lte === -1 ? -1 : lte + 1, reverse = false } = {}) { + const s = core.tree(ptr.dataPointer, gte, 0) + const e = core.tree(ptr.dataPointer, lt === -1 ? Infinity : lt, 0) + const ite = view.iterator(db, s, e, false) + + ite._readableState.map = mapTreeNode + return ite +} + function createUserDataStream (ptr, db, view, { gt = null, gte = '', lte = null, lt = null, reverse = false } = {}) { if (gt !== null || lte !== null) throw new Error('gt and lte not yet supported for user data streams') @@ -94,3 +106,7 @@ function mapAlias (data) { function mapBlock (data) { return { index: core.blockIndex(data.key), value: data.value } } + +function mapTreeNode (data) { + return CORE_TREE_NODE.decode({ start: 0, end: data.value.byteLength, buffer: data.value }) +} diff --git a/migrations/0/index.js b/migrations/0/index.js index 559dd2a..e9179e9 100644 --- a/migrations/0/index.js +++ b/migrations/0/index.js @@ -7,7 +7,10 @@ const crypto = require('hypercore-crypto') const c = require('compact-encoding') const m = require('./messages.js') const View = require('../../lib/view.js') -const { CorestoreTX, CoreTX } = require('../../lib/tx.js') +const { CorestoreTX, CoreTX, CorestoreRX } = require('../../lib/tx.js') + +const EMPTY_NODE = b4a.alloc(40) +const EMPTY_PAGE = b4a.alloc(4096) class CoreListStream extends Readable { constructor (storage) { @@ -146,14 +149,15 @@ function decodeOplogEntry (state) { module.exports = { store, core } -async function store (storage, { version, dryRun = true }) { +async function store (storage, { version, dryRun = true, gc = true }) { const stream = new CoreListStream(storage.path) const view = new View() const tx = new CorestoreTX(view) const head = await storage._getHead(view) + const primaryKeyFile = path.join(storage.path, 'primary-key') - const primaryKey = await readFile(path.join(storage.path, 'primary-key')) + const primaryKey = await readFile(primaryKeyFile) if (!head.seed) head.seed = primaryKey @@ -268,7 +272,7 @@ async function store (storage, { version, dryRun = true }) { for (const index of blocks) { if (index === contiguousLength) contiguousLength++ - const blk = await getBlockFromFile(files.data, index, tree.length, roots, getTreeNode) + const blk = await getBlockFromFile(files.data, index, roots, getTreeNode) ctx.putBlock(index, blk) } @@ -286,11 +290,178 @@ async function store (storage, { version, dryRun = true }) { tx.setHead(head) tx.apply() + if (dryRun) return + await View.flush(view.changes, storage.db) + + if (gc) await rm(primaryKeyFile) +} + +class Slicer { + constructor () { + this.buffer = null + this.offset = 0 + } + + get size () { + return this.buffer === null ? 0 : this.buffer.byteLength + } + + push (data) { + if (this.buffer === null) this.buffer = data + else this.buffer = b4a.concat([this.buffer, data]) + this.offset += data.byteLength + } + + take (len) { + if (len <= this.size) { + const chunk = this.buffer.subarray(0, len) + this.buffer = this.buffer.subarray(len) + return chunk + } + + return null + } +} + +async function core (core, { version, dryRun = true, gc = true }) { + if (dryRun) return // dryRun mode not supported atm + + const rx = core.read() + + const promises = [rx.getAuth(), rx.getHead()] + rx.tryFlush() + + const [auth, head] = await Promise.all(promises) + + if (!auth) return + + const dk = b4a.toString(auth.discoveryKey, 'hex') + const files = getFiles(path.join(core.store.path, 'cores', dk.slice(0, 2), dk.slice(2, 4), dk)) + + if (head === null || head.length === 0) { + if (gc) await runGC() + return // no data + } + + const treeData = new Slicer() + + let treeIndex = 0 + + if (await exists(files.tree)) { + for await (const data of fs.createReadStream(files.tree)) { + treeData.push(data) + + const write = core.write() + + while (true) { + const buf = treeData.take(40) + if (buf === null) break + + const index = treeIndex++ + if (b4a.equals(buf, EMPTY_NODE)) continue + + write.putTreeNode(decodeTreeNode(index, buf)) + } + + await write.flush() + } + } + + const buf = [] + if (await exists(files.bitfield)) { + for await (const data of fs.createReadStream(files.bitfield)) { + buf.push(data) + } + } + + let bitfield = b4a.concat(buf) + if (bitfield.byteLength & 4095) bitfield = b4a.concat([bitfield, b4a.alloc(4096 - (bitfield.byteLength & 4095))]) + + const pages = new Map() + + for await (const data of core.createBlockStream()) { + const { page, n } = getPage(data.index) + setBit(page, n) + } + + const roots = await getRoots(head.length, getTreeNode) + + let w = core.write() + for (const index of allBits(bitfield)) { + const { page, n } = getPage(index) + setBit(page, n) + + const blk = await getBlockFromFile(files.data, index, roots, getTreeNode) + + if (w.changes.length > 1024) { + await w.flush() + w = core.write() + } + + w.putBlock(index, blk) + } + + for (const [index, page] of pages) { + w.putBitfieldPage(index, b4a.from(page.buffer, page.byteOffset, page.byteLength)) + } + + await w.flush() + + await commitCoreMigration(auth, core, version) + + if (gc) await runGC() + + async function runGC () { + await rm(files.path) + await rmdir(path.join(files.path, '..')) + await rmdir(path.join(files.path, '../..')) + await rmdir(path.join(core.store.path, 'cores')) + } + + function getPage (index) { + const n = index & 32767 + const p = (index - n) / 32768 + + let page = pages.get(p) + if (page) return { n, page } + + page = new Uint32Array(1024) + pages.set(p, page) + + return { n, page } + } + + function getTreeNode (index) { + const read = core.read() + const promise = read.getTreeNode(index) + read.tryFlush() + return promise + } +} + +async function commitCoreMigration (auth, core, version) { + const view = new View() + const rx = new CorestoreRX(core.db, view) + + const storeCorePromise = rx.getCore(auth.discoveryKey) + rx.tryFlush() + + const storeCore = await storeCorePromise + + storeCore.version = version + + const tx = new CorestoreTX(view) + + tx.putCore(auth.discoveryKey, storeCore) + tx.apply() + + await View.flush(view.changes, core.db) } function getFiles (dir) { return { + path: dir, oplog: path.join(dir, 'oplog'), data: path.join(dir, 'data'), tree: path.join(dir, 'tree'), @@ -298,10 +469,6 @@ function getFiles (dir) { } } -async function core () { - -} - async function getRoots (length, getTreeNode) { const all = [] for (const index of flat.fullRoots(2 * length)) { @@ -310,9 +477,9 @@ async function getRoots (length, getTreeNode) { return all } -async function getBlockFromFile (file, index, length, roots, getTreeNode) { +async function getBlockFromFile (file, index, roots, getTreeNode) { const size = (await getTreeNode(2 * index)).size - const offset = await getByteOffset(2 * index, length, roots, getTreeNode) + const offset = await getByteOffset(2 * index, roots, getTreeNode) return new Promise(function (resolve) { readAll(file, size, offset, function (err, buf) { @@ -322,9 +489,8 @@ async function getBlockFromFile (file, index, length, roots, getTreeNode) { }) } -async function getByteOffset (index, length, roots, getTreeNode) { +async function getByteOffset (index, roots, getTreeNode) { if (index === 0) return 0 - if (index === length) return roots.map(r => r.size).reduce((a, b) => a + b) if ((index & 1) === 1) index = flat.leftSpan(index) let head = 0 @@ -355,11 +521,15 @@ async function getByteOffset (index, length, roots, getTreeNode) { throw new Error('Failed to find offset') } +function decodeTreeNode (index, buf) { + return { index, size: c.decode(c.uint64, buf), hash: buf.subarray(8) } +} + async function getTreeNodeFromFile (file, index) { return new Promise(function (resolve) { readAll(file, 40, index * 40, function (err, buf) { if (err) return resolve(null) - resolve({ index, size: c.decode(c.uint64, buf), hash: buf.subarray(8) }) + resolve(decodeTreeNode(index, buf)) }) }) } @@ -394,6 +564,15 @@ async function readdir (dir) { } } +async function exists (file) { + try { + await fs.promises.stat(file) + return true + } catch { + return false + } +} + async function readFile (file) { try { return await fs.promises.readFile(file) @@ -401,3 +580,42 @@ async function readFile (file) { return null } } + +async function rm (dir) { + try { + await fs.promises.rm(dir, { recursive: true }) + } catch {} +} + +async function rmdir (dir) { + try { + await fs.promises.rmdir(dir) + } catch {} +} + +function * allBits (buffer) { + for (let i = 0; i < buffer.byteLength; i += EMPTY_PAGE.byteLength) { + const page = buffer.subarray(i, i + EMPTY_NODE.byteLength) + if (b4a.equals(page, EMPTY_PAGE)) continue + + const view = new Uint32Array(page.buffer, page.byteOffset, EMPTY_PAGE.byteLength / 4) + + for (let j = 0; j < view.length; j++) { + const n = view[j] + if (n === 0) continue + + for (let k = 0; k < 32; k++) { + const m = 1 << k + if (n & m) yield i * EMPTY_PAGE.byteLength * 8 + j * 32 + k + } + } + } +} + +function setBit (page, n) { + const o = n & 31 + const b = (n - o) / 32 + const v = 1 << o + + page[b] |= v +} From ae4b90c9f15bf221e51bc940fbe953ebcbcab501 Mon Sep 17 00:00:00 2001 From: HDegroote <75906619+HDegroote@users.noreply.github.com> Date: Fri, 10 Jan 2025 21:06:50 +0100 Subject: [PATCH 33/41] Add tests for untested tx/rx api's (#51) * Add tests for untested tx/rx api's * rm logs --- test/core.js | 378 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 378 insertions(+) diff --git a/test/core.js b/test/core.js index 15fd458..57da80d 100644 --- a/test/core.js +++ b/test/core.js @@ -253,6 +253,384 @@ test('delete tree node range', async (t) => { t.alike(res3, node3) }) +test('set and get auth', async (t) => { + const core = await createCore(t) + + { + const rx = core.read() + const p = rx.getAuth() + rx.tryFlush() + const initAuth = await p + t.alike( + initAuth, + { + key: b4a.alloc(32), + discoveryKey: b4a.alloc(32), + manifest: null, + keyPair: null, + encryptionKey: null + }, + 'fresh core auth' + ) + } + + { + const tx = core.write() + tx.setAuth({ + key: b4a.alloc(32), + discoveryKey: b4a.alloc(32), + manifest: null, + keyPair: null, + encryptionKey: b4a.from('a'.repeat(64, 'hex')) + }) + await tx.flush() + } + + { + const rx = core.read() + const p = rx.getAuth() + rx.tryFlush() + t.alike( + await p, + { + key: b4a.alloc(32), + discoveryKey: b4a.alloc(32), + manifest: null, + keyPair: null, + encryptionKey: b4a.from('a'.repeat(64, 'hex')) + }, + 'updated auth' + ) + } +}) + +test('set and get hypercore blocks', async (t) => { + const core = await createCore(t) + { + const rx = core.read() + const p = rx.getSessions() + rx.tryFlush() + t.alike(await p, null, 'No sessions on init core') + } + + { + const tx = core.write() + tx.setSessions([ + { name: 'session0', dataPointer: 0 }, + { name: 'session1', dataPointer: 1 } + ]) + await tx.flush() + } + + { + const rx = core.read() + const p = rx.getSessions() + rx.tryFlush() + t.alike( + await p, + [ + { name: 'session0', dataPointer: 0 }, + { name: 'session1', dataPointer: 1 } + ] + ) + } +}) + +test('set and get hypercore head', async (t) => { + const core = await createCore(t) + { + const rx = core.read() + const p = rx.getHead() + rx.tryFlush() + t.alike(await p, null, 'No head on init core') + } + + { + const tx = core.write() + tx.setHead({ + fork: 1, + length: 3, + rootHash: b4a.from('a'.repeat(64), 'hex'), + signature: b4a.from('b'.repeat(64), 'hex') + }) + await tx.flush() + } + + { + const rx = core.read() + const p = rx.getHead() + rx.tryFlush() + t.alike( + await p, + { + fork: 1, + length: 3, + rootHash: b4a.from('a'.repeat(64), 'hex'), + signature: b4a.from('b'.repeat(64), 'hex') + }, + 'updated head') + } +}) + +test('set and get hypercore dependency', async (t) => { + const core = await createCore(t) + { + const rx = core.read() + const p = rx.getDependency() + rx.tryFlush() + t.alike(await p, null, 'No dependency on init core') + } + + { + const tx = core.write() + tx.setDependency({ + dataPointer: 1, + length: 3 + }) + await tx.flush() + } + + { + const rx = core.read() + const p = rx.getDependency() + rx.tryFlush() + t.alike( + await p, + { + dataPointer: 1, + length: 3 + }, + 'updated dependency') + } +}) + +test('set and get hypercore hints', async (t) => { + const core = await createCore(t) + { + const rx = core.read() + const p = rx.getHints() + rx.tryFlush() + t.alike(await p, null, 'No hints on init core') + } + + { + const tx = core.write() + tx.setHints({ + contiguousLength: 1 + }) + await tx.flush() + } + + { + const rx = core.read() + const p = rx.getHints() + rx.tryFlush() + t.alike( + await p, + { contiguousLength: 1 }, + 'updated hints') + } +}) + +test('set and get hypercore userdata', async (t) => { + const core = await createCore(t) + { + const rx = core.read() + const p = rx.getUserData() + rx.tryFlush() + t.alike(await p, null, 'No userdata on init core') + } + + { + const tx = core.write() + tx.putUserData('key', 'value') + tx.putUserData('key2', 'value2') + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getUserData('key'), + rx.getUserData('key2'), + rx.getUserData('no-key') + ]) + rx.tryFlush() + const [data1, data2, data3] = await p + + t.is(b4a.toString(data1), 'value') + t.is(b4a.toString(data2), 'value2') + t.is(data3, null) + } +}) + +test('delete hypercore userdata', async (t) => { + const core = await createCore(t) + + { + const tx = core.write() + tx.putUserData('key', 'value') + tx.putUserData('key2', 'value2') + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getUserData('key'), + rx.getUserData('key2') + ]) + rx.tryFlush() + const [data1, data2] = await p + + t.is(b4a.toString(data1), 'value', 'sanity check') + t.is(b4a.toString(data2), 'value2', 'sanity check') + } + + { + const tx = core.write() + tx.deleteUserData('key') + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getUserData('key'), + rx.getUserData('key2') + ]) + rx.tryFlush() + const [data1, data2] = await p + + t.is(data1, null, 'deleted') + t.is(b4a.toString(data2), 'value2') + } +}) + +test('set and get bitfield page', async (t) => { + const core = await createCore(t) + + { + // Note: not sure these values are valid bitfield data + // but the API seems to accept generic buffers + const tx = core.write() + tx.putBitfieldPage(0, 'bitfield-data-1') + tx.putBitfieldPage(1, 'bitfield-data-2') + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getBitfieldPage(0), + rx.getBitfieldPage(1), + rx.getBitfieldPage(2) + ]) + rx.tryFlush() + const [data1, data2, data3] = await p + + t.is(b4a.toString(data1), 'bitfield-data-1') + t.is(b4a.toString(data2), 'bitfield-data-2') + t.is(data3, null) + } +}) + +test('delete bitfield page', async (t) => { + const core = await createCore(t) + + { + const tx = core.write() + tx.putBitfieldPage(0, 'bitfield-data-1') + tx.putBitfieldPage(1, 'bitfield-data-2') + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getBitfieldPage(0), + rx.getBitfieldPage(1) + ]) + rx.tryFlush() + const [data1, data2] = await p + + t.is(b4a.toString(data1), 'bitfield-data-1', 'sanity check') + t.is(b4a.toString(data2), 'bitfield-data-2', 'sanity check') + } + + { + const tx = core.write() + tx.deleteBitfieldPage(0) + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getBitfieldPage(0), + rx.getBitfieldPage(1) + ]) + rx.tryFlush() + const [data1, data2] = await p + + t.is(data1, null, 'deleted') + t.is(b4a.toString(data2), 'bitfield-data-2', 'sanity check') + } +}) + +test('delete bitfield page range', async (t) => { + const core = await createCore(t) + + { + const tx = core.write() + tx.putBitfieldPage(0, 'bitfield-data-1') + tx.putBitfieldPage(1, 'bitfield-data-2') + tx.putBitfieldPage(2, 'bitfield-data-3') + tx.putBitfieldPage(3, 'bitfield-data-4') + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getBitfieldPage(0), + rx.getBitfieldPage(1), + rx.getBitfieldPage(2), + rx.getBitfieldPage(3) + ]) + rx.tryFlush() + const [data1, data2, data3, data4] = await p + + t.is(b4a.toString(data1), 'bitfield-data-1', 'sanity check') + t.is(b4a.toString(data2), 'bitfield-data-2', 'sanity check') + t.is(b4a.toString(data3), 'bitfield-data-3', 'sanity check') + t.is(b4a.toString(data4), 'bitfield-data-4', 'sanity check') + } + + { + const tx = core.write() + tx.deleteBitfieldPageRange(1, 3) + await tx.flush() + } + + { + const rx = core.read() + const p = Promise.all([ + rx.getBitfieldPage(0), + rx.getBitfieldPage(1), + rx.getBitfieldPage(2), + rx.getBitfieldPage(3) + ]) + rx.tryFlush() + const [data1, data2, data3, data4] = await p + + t.is(b4a.toString(data1), 'bitfield-data-1') + t.is(data2, null) + t.is(data3, null) + t.is(b4a.toString(data4), 'bitfield-data-4') + } +}) + async function writeBlocks (core, amount, { start = 0, pre = '' } = {}) { const tx = core.write() for (let i = start; i < amount + start; i++) { From 9337da186917d4dd2783dd4dbde666d6ee0fe2d5 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 21:15:25 +0100 Subject: [PATCH 34/41] ready proxies db.ready and do not return array --- index.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index 86fd01d..fbe7cf6 100644 --- a/index.js +++ b/index.js @@ -41,7 +41,7 @@ class Atom { const promises = [] while (this.flushes.length) promises.push(this.flushes.pop()()) - return Promise.all(promises) + await Promise.all(promises) } } @@ -272,6 +272,7 @@ class CorestoreStorage { async ready () { if (this.version === 0) await this._migrateStore() + return this.db.ready() } static isCoreStorage (db) { From b5ca1b9e5b60a1474095f977cd42ab8a21acc09c Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Fri, 10 Jan 2025 22:06:54 +0100 Subject: [PATCH 35/41] support dropped blocks in migrations and fix some bugs --- migrations/0/index.js | 219 ++++++++++++++++++++++-------------------- 1 file changed, 113 insertions(+), 106 deletions(-) diff --git a/migrations/0/index.js b/migrations/0/index.js index e9179e9..818fc6a 100644 --- a/migrations/0/index.js +++ b/migrations/0/index.js @@ -32,68 +32,23 @@ class CoreListStream extends Readable { cb(null) } - _read (cb) { - const next = this.stack.pop() - if (!next) { - this.push(null) - cb(null) - return - } - - const oplog = path.join(next, 'oplog') - fs.readFile(oplog, (err, buffer) => { - if (err) return this._read(cb) // next - - const state = { start: 0, end: buffer.byteLength, buffer } - const headers = [1, 0] - - const h1 = decodeOplogHeader(state) - state.start = 4096 - - const h2 = decodeOplogHeader(state) - state.start = 4096 * 2 - - if (!h1 && !h2) return this._read(cb) - - if (h1 && !h2) { - headers[0] = h1.header - headers[1] = h1.header - } else if (!h1 && h2) { - headers[0] = (h2.header + 1) & 1 - headers[1] = h2.header - } else { - headers[0] = h1.header - headers[1] = h2.header - } - - const header = (headers[0] + headers[1]) & 1 - const result = { path: next, header: null, entries: [] } - const decoded = [] - - result.header = header ? h2.message : h1.message - - if (result.header.external) { - throw new Error('External headers not migrate-able atm') - } - - while (true) { - const entry = decodeOplogEntry(state) - if (!entry) break - if (entry.header !== header) break - - decoded.push(entry) + async _read (cb) { + while (true) { + const next = this.stack.pop() + if (!next) { + this.push(null) + break } - while (decoded.length > 0 && decoded[decoded.length - 1].partial) decoded.pop() - - for (const e of decoded) { - result.entries.push(e.message) - } + const oplog = path.join(next, 'oplog') + const result = await readOplog(oplog) + if (!result) continue this.push(result) + break + } - cb(null) - }) + cb(null) } } @@ -188,7 +143,6 @@ async function store (storage, { version, dryRun = true, gc = true }) { encryptionKey: null } - const blocks = [] const tree = { length: 0, fork: 0, @@ -196,8 +150,6 @@ async function store (storage, { version, dryRun = true, gc = true }) { signature: null } - let contiguousLength = 0 - if (data.header.tree && data.header.tree.length) { tree.length = data.header.tree.length tree.fork = data.header.tree.fork @@ -205,10 +157,6 @@ async function store (storage, { version, dryRun = true, gc = true }) { tree.signature = data.header.tree.signature } - if (data.header.hints) { - contiguousLength = data.header.hints.contiguousLength - } - for (const { key, value } of data.header.userData) { userData.set(key, value) } @@ -233,16 +181,6 @@ async function store (storage, { version, dryRun = true, gc = true }) { tree.rootHash = null tree.signature = e.treeUpgrade.signature } - - if (e.bitfield) { - if (e.bitfield.drop) { - throw new Error('Unflushed truncations not migrate-able atm') - } - - for (let i = e.bitfield.start; i < e.bitfield.start + e.bitfield.length; i++) { - blocks.push(i) - } - } } if (userData.has('corestore/name') && userData.has('corestore/namespace')) { @@ -261,25 +199,12 @@ async function store (storage, { version, dryRun = true, gc = true }) { ctx.setAuth(auth) const getTreeNode = (index) => (treeNodes.get(index) || getTreeNodeFromFile(files.tree, index)) - const roots = tree.rootHash === null || blocks.length > 0 ? await getRoots(tree.length, getTreeNode) : null if (tree.length) { - if (tree.rootHash === null) tree.rootHash = crypto.tree(roots) + if (tree.rootHash === null) tree.rootHash = crypto.tree(await getRoots(tree.length, getTreeNode)) ctx.setHead(tree) } - blocks.sort((a, b) => a - b) - - for (const index of blocks) { - if (index === contiguousLength) contiguousLength++ - const blk = await getBlockFromFile(files.data, index, roots, getTreeNode) - ctx.putBlock(index, blk) - } - - if (contiguousLength > 0) { - ctx.setHints({ contiguousLength }) - } - tx.putCore(discoveryKey, core) if (core.alias) tx.putCoreByAlias(core.alias, discoveryKey) @@ -344,6 +269,9 @@ async function core (core, { version, dryRun = true, gc = true }) { return // no data } + const oplog = await readOplog(files.oplog) + if (!oplog) throw new Error('No oplog available') + const treeData = new Slicer() let treeIndex = 0 @@ -379,18 +307,23 @@ async function core (core, { version, dryRun = true, gc = true }) { if (bitfield.byteLength & 4095) bitfield = b4a.concat([bitfield, b4a.alloc(4096 - (bitfield.byteLength & 4095))]) const pages = new Map() - - for await (const data of core.createBlockStream()) { - const { page, n } = getPage(data.index) - setBit(page, n) - } + const headerBits = new Map() const roots = await getRoots(head.length, getTreeNode) + for (const e of oplog.entries) { + if (!e.bitfield) continue + + for (let i = 0; i < e.bitfield.length; i++) { + headerBits.set(i + e.bitfield.start, !e.bitfield.drop) + } + } + let w = core.write() for (const index of allBits(bitfield)) { - const { page, n } = getPage(index) - setBit(page, n) + if (headerBits.get(index) === false) continue + + setBitInPage(index) const blk = await getBlockFromFile(files.data, index, roots, getTreeNode) @@ -402,12 +335,33 @@ async function core (core, { version, dryRun = true, gc = true }) { w.putBlock(index, blk) } + for (const [index, bit] of headerBits) { + if (!bit) continue + + setBitInPage(index) + + const blk = await getBlockFromFile(files.data, index, roots, getTreeNode) + w.putBlock(index, blk) + } + for (const [index, page] of pages) { w.putBitfieldPage(index, b4a.from(page.buffer, page.byteOffset, page.byteLength)) } await w.flush() + let contiguousLength = 0 + for await (const data of core.createBlockStream()) { + if (data.index === contiguousLength) contiguousLength++ + else break + } + + if (contiguousLength) { + const w = core.write() + w.setHints({ contiguousLength }) + await w.flush() + } + await commitCoreMigration(auth, core, version) if (gc) await runGC() @@ -419,17 +373,22 @@ async function core (core, { version, dryRun = true, gc = true }) { await rmdir(path.join(core.store.path, 'cores')) } - function getPage (index) { + function setBitInPage (index) { const n = index & 32767 const p = (index - n) / 32768 let page = pages.get(p) - if (page) return { n, page } - page = new Uint32Array(1024) - pages.set(p, page) + if (!page) { + page = new Uint32Array(1024) + pages.set(p, page) + } + + const o = n & 31 + const b = (n - o) / 32 + const v = 1 << o - return { n, page } + page[b] |= v } function getTreeNode (index) { @@ -606,16 +565,64 @@ function * allBits (buffer) { for (let k = 0; k < 32; k++) { const m = 1 << k - if (n & m) yield i * EMPTY_PAGE.byteLength * 8 + j * 32 + k + if (n & m) yield i * 8 + j * 32 + k } } } } -function setBit (page, n) { - const o = n & 31 - const b = (n - o) / 32 - const v = 1 << o +function readOplog (oplog) { + return new Promise(function (resolve) { + fs.readFile(oplog, function (err, buffer) { + if (err) return resolve(null) + + const state = { start: 0, end: buffer.byteLength, buffer } + const headers = [1, 0] + + const h1 = decodeOplogHeader(state) + state.start = 4096 - page[b] |= v + const h2 = decodeOplogHeader(state) + state.start = 4096 * 2 + + if (!h1 && !h2) return resolve(null) + + if (h1 && !h2) { + headers[0] = h1.header + headers[1] = h1.header + } else if (!h1 && h2) { + headers[0] = (h2.header + 1) & 1 + headers[1] = h2.header + } else { + headers[0] = h1.header + headers[1] = h2.header + } + + const header = (headers[0] + headers[1]) & 1 + const result = { path: path.dirname(oplog), header: null, entries: [] } + const decoded = [] + + result.header = header ? h2.message : h1.message + + if (result.header.external) { + throw new Error('External headers not migrate-able atm') + } + + while (true) { + const entry = decodeOplogEntry(state) + if (!entry) break + if (entry.header !== header) break + + decoded.push(entry) + } + + while (decoded.length > 0 && decoded[decoded.length - 1].partial) decoded.pop() + + for (const e of decoded) { + result.entries.push(e.message) + } + + resolve(result) + }) + }) } From eda7389217baf4ca5a1bf08c02fafb85f69a2943 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Sat, 11 Jan 2025 12:44:53 +0100 Subject: [PATCH 36/41] latest hyperschema --- spec/hyperschema/index.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/spec/hyperschema/index.js b/spec/hyperschema/index.js index b344a52..a252d24 100644 --- a/spec/hyperschema/index.js +++ b/spec/hyperschema/index.js @@ -505,4 +505,6 @@ function getStruct (name, v = VERSION) { } } -module.exports = { resolveStruct: getStruct, getStruct, getEnum, getEncoding, encode, decode, setVersion, version } +const resolveStruct = getStruct // compat + +module.exports = { resolveStruct, getStruct, getEnum, getEncoding, encode, decode, setVersion, version } From bedf95949dbde1e277f176d149409a8409e7930f Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Sat, 11 Jan 2025 12:47:58 +0100 Subject: [PATCH 37/41] schema is not versioned --- build.js | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.js b/build.js index 6ce7607..86f9a71 100644 --- a/build.js +++ b/build.js @@ -2,7 +2,7 @@ const Hyperschema = require('hyperschema') const SPEC = './spec/hyperschema' -const schema = Hyperschema.from(SPEC) +const schema = Hyperschema.from(SPEC, { versioned: false }) const corestore = schema.namespace('corestore') corestore.register({ diff --git a/package.json b/package.json index b94ff7b..9c6c14c 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,7 @@ "compact-encoding": "^2.16.0", "flat-tree": "^1.12.1", "hypercore-crypto": "^3.4.2", - "hyperschema": "^1.3.3", + "hyperschema": "^1.7.0", "index-encoder": "^3.3.2", "resolve-reject-promise": "^1.0.0", "rocksdb-native": "^3.1.1", From ad596f874f49f21888528ad72d7ebfbae091ac48 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Mon, 13 Jan 2025 09:53:26 +0100 Subject: [PATCH 38/41] storage takes ownership of the column family --- index.js | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index fbe7cf6..3f1a868 100644 --- a/index.js +++ b/index.js @@ -4,6 +4,7 @@ const ScopeLock = require('scope-lock') const View = require('./lib/view.js') const VERSION = 1 +const COLUMN_FAMILY = 'corestore' const { CorestoreRX, @@ -253,7 +254,8 @@ class HypercoreStorage { class CorestoreStorage { constructor (db) { this.path = typeof db === 'string' ? db : db.path - this.db = typeof db === 'string' ? new RocksDB(db) : db + this.rocks = typeof db === 'string' ? new RocksDB(db) : db + this.db = createColumnFamily(this.rocks) this.view = null this.enters = 0 this.lock = new ScopeLock() @@ -435,6 +437,7 @@ class CorestoreStorage { if (this.db.closed) return await this._flush() await this.db.close() + await this.rocks.close() } async clear () { @@ -677,3 +680,18 @@ function getBatch (sessions, name, alloc) { function isCorestoreStorage (s) { return typeof s === 'object' && !!s && typeof s.setDefaultDiscoveryKey === 'function' } + +function createColumnFamily (db) { + const col = new RocksDB.ColumnFamily(COLUMN_FAMILY, { + // tuning! atm just the default tuning from rocks, TODO: tweak + enableBlobFiles: false, + minBlobSize: 0, + blobFileSize: 0, + enableBlobGarbageCollection: true, + tableBlockSize: 16384, + tableCacheIndexAndFilterBlocks: true, + tableFormatVersion: 4 + }) + + return db.columnFamily(col) +} From 890f2c72290ea059ae92652e24859511e14c9ee1 Mon Sep 17 00:00:00 2001 From: Christophe Diederichs Date: Mon, 13 Jan 2025 13:04:25 +0000 Subject: [PATCH 39/41] view should check ranges during get --- lib/view.js | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/lib/view.js b/lib/view.js index 315561a..bfecf49 100644 --- a/lib/view.js +++ b/lib/view.js @@ -225,9 +225,10 @@ class View { _indexAndGet (read, key) { this._index() const change = this.map.get(b4a.toString(key, 'hex')) - if (change === undefined) return read.get(key) - if (typeof change[1] === 'string') return Promise.resolve(b4a.from(change[1])) - return Promise.resolve(change[1]) + if (change !== undefined) return Promise.resolve(ensureBuffer(change[1])) + const range = this._findRange(key) + if (range) return Promise.resolve(range.value) + return read.get(key) } _attached () { @@ -271,6 +272,15 @@ class View { this.ranges.push(range) } + // unsorted so writes are cheap and reads are expensive + _findRange (key) { + if (this.ranges === null) return null + for (const r of this.ranges) { + if (b4a.compare(r[0], key) <= 0 && b4a.compare(r[2], key) > 0) return r + } + return null + } + apply (changes) { if (this.snap !== null) throw new Error('Illegal to push changes to a snapshot') @@ -325,3 +335,8 @@ function reverseArray (list) { for (let i = 0; i < list.length; i++) r[r.length - 1 - i] = list[i] return r } + +function ensureBuffer (value) { + if (typeof value === 'string') return b4a.from(value) + return value +} From ef77a2e3791a5f0f745ca6a9ba2872b391c94b7e Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Mon, 13 Jan 2025 15:44:38 +0100 Subject: [PATCH 40/41] fix dropped range issues and user data being mb strings --- lib/tx.js | 3 ++- lib/view.js | 31 ++++++++++++------------------- 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/lib/tx.js b/lib/tx.js index 6667d34..83885c6 100644 --- a/lib/tx.js +++ b/lib/tx.js @@ -91,7 +91,8 @@ class CoreTX { } putUserData (key, value) { - this.changes.push([core.userData(this.core.dataPointer, key), value, null]) + const buffer = typeof value === 'string' ? b4a.from(value) : value + this.changes.push([core.userData(this.core.dataPointer, key), buffer, null]) } deleteUserData (key) { diff --git a/lib/view.js b/lib/view.js index bfecf49..cc7252f 100644 --- a/lib/view.js +++ b/lib/view.js @@ -1,6 +1,8 @@ const { Readable, getStreamError } = require('streamx') const b4a = require('b4a') +const DROPPED = [null, null, null] + class OverlayStream extends Readable { constructor (stream, start, end, reverse, changes, ranges) { super() @@ -139,6 +141,10 @@ class Overlay { changes.sort(cmp) ranges.sort(cmp) + while (changes.length > 0 && changes[changes.length - 1] === DROPPED) { + changes.pop() + } + this.indexed = view.indexed this.changes = changes this.ranges = ranges @@ -225,10 +231,8 @@ class View { _indexAndGet (read, key) { this._index() const change = this.map.get(b4a.toString(key, 'hex')) - if (change !== undefined) return Promise.resolve(ensureBuffer(change[1])) - const range = this._findRange(key) - if (range) return Promise.resolve(range.value) - return read.get(key) + if (change === undefined) return read.get(key) + return Promise.resolve(change[1]) } _attached () { @@ -265,22 +269,13 @@ class View { const e = b4a.toString(range[2], 'hex') for (const key of this.map.keys()) { - if (s <= key && key < e) this.map.delete(key) + if (s <= key && key < e) this.map.set(key, DROPPED) } if (this.ranges === null) this.ranges = [] this.ranges.push(range) } - // unsorted so writes are cheap and reads are expensive - _findRange (key) { - if (this.ranges === null) return null - for (const r of this.ranges) { - if (b4a.compare(r[0], key) <= 0 && b4a.compare(r[2], key) > 0) return r - } - return null - } - apply (changes) { if (this.snap !== null) throw new Error('Illegal to push changes to a snapshot') @@ -320,6 +315,9 @@ class View { module.exports = View function cmpChange (a, b) { + if (a === DROPPED) return b === DROPPED ? 0 : 1 + if (b === DROPPED) return a === DROPPED ? 0 : -1 + const c = b4a.compare(a[0], b[0]) return c === 0 ? b4a.compare(a[2], b[2]) : c } @@ -335,8 +333,3 @@ function reverseArray (list) { for (let i = 0; i < list.length; i++) r[r.length - 1 - i] = list[i] return r } - -function ensureBuffer (value) { - if (typeof value === 'string') return b4a.from(value) - return value -} From ec4b2f0cfc616bdd3efd38429ead3ccad87ed89b Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Mon, 13 Jan 2025 15:53:34 +0100 Subject: [PATCH 41/41] no need for special casing it --- lib/view.js | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/lib/view.js b/lib/view.js index cc7252f..5287877 100644 --- a/lib/view.js +++ b/lib/view.js @@ -1,8 +1,6 @@ const { Readable, getStreamError } = require('streamx') const b4a = require('b4a') -const DROPPED = [null, null, null] - class OverlayStream extends Readable { constructor (stream, start, end, reverse, changes, ranges) { super() @@ -141,10 +139,6 @@ class Overlay { changes.sort(cmp) ranges.sort(cmp) - while (changes.length > 0 && changes[changes.length - 1] === DROPPED) { - changes.pop() - } - this.indexed = view.indexed this.changes = changes this.ranges = ranges @@ -268,8 +262,8 @@ class View { const s = b4a.toString(range[0], 'hex') const e = b4a.toString(range[2], 'hex') - for (const key of this.map.keys()) { - if (s <= key && key < e) this.map.set(key, DROPPED) + for (const [key, c] of this.map) { + if (s <= key && key < e) this.map.set(key, [c[0], null, null]) } if (this.ranges === null) this.ranges = [] @@ -315,9 +309,6 @@ class View { module.exports = View function cmpChange (a, b) { - if (a === DROPPED) return b === DROPPED ? 0 : 1 - if (b === DROPPED) return a === DROPPED ? 0 : -1 - const c = b4a.compare(a[0], b[0]) return c === 0 ? b4a.compare(a[2], b[2]) : c }