From 2fe769d82936bc857808c548db4cf0b4caf41969 Mon Sep 17 00:00:00 2001 From: Sam Gwilym Date: Fri, 2 Feb 2024 15:39:36 +0100 Subject: [PATCH 1/7] Refactor for string-of-string Paths, Areas replacing 3dProducts, Entry Paths used to just be bytestrings. This changes how entries are queried, and how entries would be deleted with prefix pruning. The classes implementing PrefixIterator were simplified (and in KeyHopTree's case, removed entirely). Areas replace (the more complex) 3dProducts. Stores are now queried with Areas. Fingerprints are now generated (via Storage.summarise) using Area. Entry used to have two properties: identifier and record. These have removed and the properties they each had pulled up into the Entry type itself. This commit also adopts the use of the willow_utils module. This let us drop the outdated Entry encoding utilities this module used to have, and a few other random utilities. KV drivers' list method now lets you skip specifying a lower and upper bound and let you just specify a prefix to fetch with instead. --- .nova/Configuration.json | 5 +- deno.json | 1 + deps.ts | 3 +- mod.universal.ts | 3 - src/entries/encode_decode.test.ts | 100 -- src/entries/encode_decode.ts | 159 --- src/entries/types.ts | 22 - src/replica/events.ts | 2 +- src/replica/replica.test.ts | 193 +-- src/replica/replica.ts | 189 ++- src/replica/storage/entry_drivers/kv_store.ts | 49 +- src/replica/storage/entry_drivers/memory.ts | 15 +- src/replica/storage/kv/kv_driver_deno.ts | 14 +- src/replica/storage/kv/prefixed_driver.ts | 33 +- src/replica/storage/kv/types.ts | 6 +- .../storage/payload_drivers/filesystem.ts | 10 +- src/replica/storage/payload_drivers/memory.ts | 12 +- .../storage/prefix_iterators/key_hop_tree.ts | 395 ------ .../prefix_iterators/prefix_iterator.test.ts | 105 +- .../storage/prefix_iterators/radix_tree.ts | 226 ++++ .../storage/prefix_iterators/radixish_tree.ts | 286 ----- .../prefix_iterators/simple_key_iterator.ts | 81 +- src/replica/storage/prefix_iterators/types.ts | 12 +- .../storage/storage_3d/storage_3d.test.ts | 1127 ++++------------- .../storage/storage_3d/triple_storage.ts | 805 ++++-------- src/replica/storage/storage_3d/types.ts | 36 +- .../summarisable_storage/monoid_skiplist.ts | 5 +- .../storage/summarisable_storage/simple_kv.ts | 3 +- src/replica/storage/types.ts | 6 +- src/replica/types.ts | 107 +- src/replica/util.ts | 180 ++- src/test/test_schemes.ts | 55 +- src/test/utils.ts | 27 + src/util/bytes.test.ts | 34 - src/util/bytes.ts | 56 - 35 files changed, 1306 insertions(+), 3056 deletions(-) delete mode 100644 src/entries/encode_decode.test.ts delete mode 100644 src/entries/encode_decode.ts delete mode 100644 src/entries/types.ts delete mode 100644 src/replica/storage/prefix_iterators/key_hop_tree.ts create mode 100644 src/replica/storage/prefix_iterators/radix_tree.ts delete mode 100644 src/replica/storage/prefix_iterators/radixish_tree.ts delete mode 100644 src/util/bytes.test.ts delete mode 100644 src/util/bytes.ts diff --git a/.nova/Configuration.json b/.nova/Configuration.json index 0a35e99..74dc3eb 100644 --- a/.nova/Configuration.json +++ b/.nova/Configuration.json @@ -5,8 +5,9 @@ "\/Users\/gwil\/Projects\/willow-js\/debug" ], "co.gwil.deno.config.enableLinting" : "true", - "co.gwil.deno.config.enableUnstable" : "true", + "co.gwil.deno.config.enableUnstable" : true, "deno.disablePaths" : [ "dist" - ] + ], + "deno.unstable" : true } diff --git a/deno.json b/deno.json index c9e99a8..a259b60 100644 --- a/deno.json +++ b/deno.json @@ -1,6 +1,7 @@ { "tasks": { "test": "deno test --unstable src", + "test-watch": "deno test --unstable src --watch", "bundle": "deno run --allow-all scripts/build_web_bundle.ts" }, "lock": false, diff --git a/deps.ts b/deps.ts index af29a45..1ff6de0 100644 --- a/deps.ts +++ b/deps.ts @@ -1,4 +1,5 @@ -export * as Products from "https://deno.land/x/willow_3d_products@0.1.0/mod.ts"; +export * from "https://deno.land/x/willow_utils@0.2.0/mod.ts"; + export { deferred } from "https://deno.land/std@0.202.0/async/deferred.ts"; export { concat } from "https://deno.land/std@0.202.0/bytes/concat.ts"; export { equals as equalsBytes } from "https://deno.land/std@0.202.0/bytes/equals.ts"; diff --git a/mod.universal.ts b/mod.universal.ts index 7a66748..6748856 100644 --- a/mod.universal.ts +++ b/mod.universal.ts @@ -1,6 +1,3 @@ -export * from "./src/entries/types.ts"; -export * from "./src/entries/encode_decode.ts"; - export * from "./src/replica/types.ts"; export * from "./src/replica/replica.ts"; diff --git a/src/entries/encode_decode.test.ts b/src/entries/encode_decode.test.ts deleted file mode 100644 index 130477f..0000000 --- a/src/entries/encode_decode.test.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { - NamespaceScheme, - PathLengthScheme, - PayloadScheme, - SubspaceScheme, -} from "../replica/types.ts"; -import { decodeEntry, encodeEntry } from "./encode_decode.ts"; -import { Entry } from "./types.ts"; -import { assertEquals } from "https://deno.land/std@0.188.0/testing/asserts.ts"; - -Deno.test("Encodes and decodes", () => { - const entry: Entry = { - identifier: { - namespace: 1, - subspace: 2, - path: new Uint8Array([3, 3, 3, 3]), - }, - record: { - timestamp: BigInt(1679467892424), - length: BigInt(256), - hash: 4, - }, - }; - - const encoded = encodeEntry(entry, { - namespaceScheme: namespaceScheme, - subspaceScheme: subspaceScheme, - pathLengthScheme: pathLengthScheme, - payloadScheme: payloadScheme, - }); - - const decoded = decodeEntry(encoded, { - namespaceScheme: namespaceScheme, - subspaceScheme: subspaceScheme, - pathLengthScheme: pathLengthScheme, - payloadScheme: payloadScheme, - }); - - assertEquals(decoded, entry); -}); - -const namespaceScheme: NamespaceScheme = { - encode(namespace) { - return new Uint8Array([namespace]); - }, - decode(encoded) { - return encoded[0]; - }, - encodedLength: () => 1, - isEqual: (a, b) => a === b, -}; - -const subspaceScheme: SubspaceScheme = { - encode(namespace) { - return new Uint8Array([0, namespace]); - }, - decode(encoded) { - return encoded[1]; - }, - encodedLength: () => 2, - isEqual: (a, b) => a === b, - minimalSubspaceKey: 0, - order: (a, b) => { - if (a < b) return -1; - else if (a > b) return 1; - - return 0; - }, - successor: (a) => a + 1, -}; - -const pathLengthScheme: PathLengthScheme = { - encode(length) { - return new Uint8Array([length]); - }, - decode(encoded) { - return encoded[0]; - }, - encodedLength() { - return 1; - }, - maxLength: 4, -}; - -const payloadScheme: PayloadScheme = { - encode(value) { - return new Uint8Array([0, 0, 0, value]); - }, - decode(encoded) { - return encoded[3]; - }, - encodedLength: () => 4, - fromBytes: () => Promise.resolve(1), - order: (a, b) => { - if (a < b) return -1; - else if (a > b) return 1; - - return 0; - }, -}; diff --git a/src/entries/encode_decode.ts b/src/entries/encode_decode.ts deleted file mode 100644 index a227621..0000000 --- a/src/entries/encode_decode.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { - NamespaceScheme, - PathLengthScheme, - PayloadScheme, - SubspaceScheme, -} from "../replica/types.ts"; -import { Entry } from "./types.ts"; - -export function encodeEntry< - NamespacePublicKey, - SubspacePublicKey, - PayloadDigest, ->( - entry: Entry, - opts: { - namespaceScheme: NamespaceScheme; - subspaceScheme: SubspaceScheme; - pathLengthScheme: PathLengthScheme; - payloadScheme: PayloadScheme; - }, -): Uint8Array { - if (entry.identifier.path.byteLength > 256) { - throw new Error("Record identifier path is longer than 2048 bits"); - } - - // Namespace pubkey + Author pubkey + 64 bit uint + path bytelength - const encodedNamespace = opts.namespaceScheme.encode( - entry.identifier.namespace, - ); - const encodedSubspace = opts.subspaceScheme.encode( - entry.identifier.subspace, - ); - const encodedPathLength = opts.pathLengthScheme.encode( - entry.identifier.path.byteLength, - ); - - //const encodedPath = concat(encodedPathLength, entry.identifier.path); - - const encodedPayloadDigest = opts.payloadScheme.encode( - entry.record.hash, - ); - - const recordIdentifierLength = encodedNamespace.byteLength + - encodedSubspace.byteLength + - +encodedPathLength.byteLength + - entry.identifier.path.byteLength; - - // time (uint64) + length (uint64) + digest - const recordLength = 8 + 8 + encodedPayloadDigest.byteLength; - - const totalLength = recordIdentifierLength + recordLength; - - const ui8 = new Uint8Array(totalLength); - const dataView = new DataView(ui8.buffer); - - let currentPosition = 0; - - // Record identifier - - // Namespace pubkey - ui8.set(encodedNamespace, currentPosition); - - currentPosition += encodedNamespace.byteLength; - - // Subspace pubkey - - ui8.set(encodedSubspace, currentPosition); - - currentPosition += encodedSubspace.byteLength; - - // Path - ui8.set(encodedPathLength, currentPosition); - - currentPosition += encodedPathLength.byteLength; - - ui8.set(entry.identifier.path, currentPosition); - - currentPosition += entry.identifier.path.byteLength; - - // Record - - // Timestamp - dataView.setBigUint64(currentPosition, entry.record.timestamp); - - currentPosition += 8; - - // Length - dataView.setBigUint64(currentPosition, entry.record.length); - - currentPosition += 8; - - // Hash - ui8.set(encodedPayloadDigest, currentPosition); - - return ui8; -} - -export function decodeEntry< - NamespacePublicKey, - SubspacePublicKey, - PayloadDigest, ->( - encodedEntry: Uint8Array, - opts: { - namespaceScheme: NamespaceScheme; - subspaceScheme: SubspaceScheme; - pathLengthScheme: PathLengthScheme; - payloadScheme: PayloadScheme; - }, -): Entry { - const dataView = new DataView(encodedEntry.buffer); - - const namespaceKey = opts.namespaceScheme.decode( - encodedEntry.subarray(0), - ); - const encodedNamespaceLength = opts.namespaceScheme.encodedLength( - namespaceKey, - ); - - const subspaceKey = opts.subspaceScheme.decode( - encodedEntry.subarray(encodedNamespaceLength), - ); - const encodedSubspaceLength = opts.subspaceScheme.encodedLength( - subspaceKey, - ); - - const pathLength = opts.pathLengthScheme.decode( - encodedEntry.subarray(encodedNamespaceLength + encodedSubspaceLength), - ); - - const encodedPathLengthLength = opts.pathLengthScheme.encodedLength( - pathLength, - ); - - const path = encodedEntry.subarray( - encodedNamespaceLength + encodedSubspaceLength + encodedPathLengthLength, - encodedNamespaceLength + encodedSubspaceLength + encodedPathLengthLength + - pathLength, - ); - - const identifierLength = encodedNamespaceLength + encodedSubspaceLength + - encodedPathLengthLength + - pathLength; - - return { - identifier: { - namespace: namespaceKey, - subspace: subspaceKey, - path: path, - }, - record: { - timestamp: dataView.getBigUint64(identifierLength), - length: dataView.getBigUint64(identifierLength + 8), - hash: opts.payloadScheme.decode( - encodedEntry.subarray(identifierLength + 8 + 8), - ), - }, - }; -} diff --git a/src/entries/types.ts b/src/entries/types.ts deleted file mode 100644 index 55ba7ec..0000000 --- a/src/entries/types.ts +++ /dev/null @@ -1,22 +0,0 @@ -export type RecordIdentifier = { - /** The namespace's public key as a fixed-width integer */ - namespace: NamespacePublicKey; - /** The author's public key as a fixed-width integer*/ - subspace: SubspacePublicKey; - /** Bit string of length at most 2048 */ - path: Uint8Array; -}; - -export type Record = { - /** 64 bit integer (interpreted as microseconds since the Unix epoch). Big-endian. */ - timestamp: bigint; - /** 64 bit integer */ - length: bigint; - /** digest-length bit integer*/ - hash: PayloadDigest; -}; - -export type Entry = { - identifier: RecordIdentifier; - record: Record; -}; diff --git a/src/replica/events.ts b/src/replica/events.ts index ad0bb4c..05a4f55 100644 --- a/src/replica/events.ts +++ b/src/replica/events.ts @@ -1,4 +1,4 @@ -import { Entry } from "../entries/types.ts"; +import { Entry } from "../../deps.ts"; import { Payload } from "./types.ts"; export class EntryPayloadSetEvent< diff --git a/src/replica/replica.test.ts b/src/replica/replica.test.ts index 9334066..79f6109 100644 --- a/src/replica/replica.test.ts +++ b/src/replica/replica.test.ts @@ -2,7 +2,6 @@ import { assert, assertEquals, } from "https://deno.land/std@0.177.0/testing/asserts.ts"; -import { compareBytes } from "../util/bytes.ts"; import { Replica } from "./replica.ts"; import { crypto } from "https://deno.land/std@0.188.0/crypto/crypto.ts"; import { encodeEntryKeys, encodeSummarisableStorageValue } from "./util.ts"; @@ -10,11 +9,12 @@ import { testSchemeAuthorisation, testSchemeFingerprint, testSchemeNamespace, - testSchemePathLength, + testSchemePath, testSchemePayload, testSchemeSubspace, } from "../test/test_schemes.ts"; import { makeSubspaceKeypair } from "../test/crypto.ts"; +import { fullArea, orderBytes, orderPath } from "../../deps.ts"; class TestReplica extends Replica< Uint8Array, @@ -30,7 +30,7 @@ class TestReplica extends Replica< protocolParameters: { namespaceScheme: testSchemeNamespace, subspaceScheme: testSchemeSubspace, - pathLengthScheme: testSchemePathLength, + pathScheme: testSchemePath, payloadScheme: testSchemePayload, authorisationScheme: testSchemeAuthorisation, fingerprintScheme: testSchemeFingerprint, @@ -64,7 +64,7 @@ Deno.test("Replica.set", async (test) => { // Returns an error and does not ingest payload if the entry is invalid const badKeypairRes = await replica.set( { - path: new Uint8Array([1, 2, 3, 4]), + path: [new Uint8Array([1, 2, 3, 4])], payload: new Uint8Array([1, 1, 1, 1]), subspace: authorKeypair.subspace, }, @@ -76,7 +76,13 @@ Deno.test("Replica.set", async (test) => { const entries = []; - for await (const entry of replica.query({ order: "path" })) { + for await ( + const entry of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "subspace") + ) { entries.push(entry); } @@ -88,7 +94,7 @@ Deno.test("Replica.set", async (test) => { const goodKeypairRes = await replica.set( { - path: new Uint8Array([1, 2, 3, 4]), + path: [new Uint8Array([1, 2, 3, 4])], payload: new Uint8Array([1, 1, 1, 1]), subspace: authorKeypair.subspace, }, @@ -99,7 +105,13 @@ Deno.test("Replica.set", async (test) => { const entries = []; - for await (const entry of replica.query({ order: "path" })) { + for await ( + const entry of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "subspace") + ) { entries.push(entry); } @@ -112,7 +124,7 @@ Deno.test("Replica.set", async (test) => { const res = await replica.set( { - path: new Uint8Array([1, 2, 3, 4]), + path: [new Uint8Array([1, 2, 3, 4])], payload: new Uint8Array([1, 1, 1, 1]), timestamp: BigInt(0), subspace: authorKeypair.subspace, @@ -121,7 +133,7 @@ Deno.test("Replica.set", async (test) => { ); assert(res.kind === "success"); - assertEquals(res.entry.record.timestamp, BigInt(0)); + assertEquals(res.entry.timestamp, BigInt(0)); }); await test.step("If no timestamp is set, and there is nothing else at the same path, use the current time.", async () => { @@ -131,7 +143,7 @@ Deno.test("Replica.set", async (test) => { const res = await replica.set( { - path: new Uint8Array([1, 2, 3, 4]), + path: [new Uint8Array([1, 2, 3, 4])], payload: new Uint8Array([1, 1, 1, 1]), subspace: authorKeypair.subspace, }, @@ -139,11 +151,9 @@ Deno.test("Replica.set", async (test) => { ); assert(res.kind === "success"); - assert(res.entry.record.timestamp >= timestampBefore); - assert(res.entry.record.timestamp <= BigInt(Date.now() * 1000)); + assert(res.entry.timestamp >= timestampBefore); + assert(res.entry.timestamp <= BigInt(Date.now() * 1000)); }); - - // if a timestamp is set, }); // ================================== @@ -160,7 +170,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const otherReplicaRes = await otherReplica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array(), subspace: authorKeypair.subspace, }, @@ -184,7 +194,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const otherReplicaRes = await otherReplica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array(), subspace: authorKeypair.subspace, }, @@ -208,7 +218,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array([0, 1, 2, 1]), timestamp: BigInt(2000), subspace: authorKeypair.subspace, @@ -218,7 +228,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const secondRes = await replica.set( { - path: new Uint8Array([0, 0, 0, 0, 1]), + path: [new Uint8Array([0, 0, 0, 0]), new Uint8Array([1])], payload: new Uint8Array([0, 1, 2, 3]), timestamp: BigInt(1000), subspace: authorKeypair.subspace, @@ -235,7 +245,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array([0, 1, 2, 1]), timestamp: BigInt(2000), subspace: authorKeypair.subspace, @@ -245,7 +255,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const secondRes = await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array([0, 1, 2, 3]), timestamp: BigInt(1000), subspace: authorKeypair.subspace, @@ -262,7 +272,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array([0, 1, 2, 1]), timestamp: BigInt(2000), subspace: authorKeypair.subspace, @@ -272,7 +282,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const secondRes = await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array([0, 1, 2, 3]), timestamp: BigInt(2000), subspace: authorKeypair.subspace, @@ -298,7 +308,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array([0, 1, 2, 1]), timestamp: BigInt(1000), subspace: authorKeypair.subspace, @@ -308,7 +318,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const secondRes = await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array([0, 1, 2, 3]), timestamp: BigInt(2000), subspace: authorKeypair.subspace, @@ -320,7 +330,13 @@ Deno.test("Replica.ingestEntry", async (test) => { const entries = []; - for await (const entry of replica.query({ order: "path" })) { + for await ( + const entry of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "path") + ) { entries.push(entry); } @@ -335,7 +351,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 1]), + path: [new Uint8Array([0]), new Uint8Array([1])], payload: new Uint8Array([0, 1, 2, 1]), timestamp: BigInt(0), subspace: authorKeypair.subspace, @@ -345,7 +361,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 2]), + path: [new Uint8Array([0]), new Uint8Array([2])], payload: new Uint8Array([0, 1, 2, 1]), timestamp: BigInt(0), subspace: authorKeypair.subspace, @@ -355,7 +371,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const prefixRes = await replica.set( { - path: new Uint8Array([0]), + path: [new Uint8Array([0])], payload: new Uint8Array([0, 1, 2, 3]), timestamp: BigInt(1), subspace: authorKeypair.subspace, @@ -367,13 +383,19 @@ Deno.test("Replica.ingestEntry", async (test) => { const entries = []; - for await (const entry of replica.query({ order: "path" })) { + for await ( + const entry of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "path") + ) { entries.push(entry); } assertEquals(entries.length, 1); assert(entries[0]); - assertEquals(entries[0][0].identifier.path, new Uint8Array([0])); + assertEquals(entries[0][0].path, [new Uint8Array([0])]); assert(entries[0][1]); assertEquals(await entries[0][1].bytes(), new Uint8Array([0, 1, 2, 3])); }); @@ -383,7 +405,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 1]), + path: [new Uint8Array([0]), new Uint8Array([1])], payload: new Uint8Array([0, 1, 2, 1]), timestamp: BigInt(0), subspace: authorKeypair.subspace, @@ -393,7 +415,7 @@ Deno.test("Replica.ingestEntry", async (test) => { await replica.set( { - path: new Uint8Array([0, 1]), + path: [new Uint8Array([0]), new Uint8Array([1])], payload: new Uint8Array([0, 1, 2, 3]), timestamp: BigInt(1), subspace: authorKeypair.subspace, @@ -403,7 +425,7 @@ Deno.test("Replica.ingestEntry", async (test) => { const prefixRes = await replica.set( { - path: new Uint8Array([0]), + path: [new Uint8Array([0])], payload: new Uint8Array([0, 1, 2, 3]), timestamp: BigInt(2), subspace: authorKeypair.subspace, @@ -415,13 +437,19 @@ Deno.test("Replica.ingestEntry", async (test) => { const entries = []; - for await (const entry of replica.query({ order: "path" })) { + for await ( + const entry of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "path") + ) { entries.push(entry); } assertEquals(entries.length, 1); assert(entries[0]); - assertEquals(entries[0][0].identifier.path, new Uint8Array([0])); + assertEquals(entries[0][0].path, [new Uint8Array([0])]); assert(entries[0][1]); assertEquals(await entries[0][1].bytes(), new Uint8Array([0, 1, 2, 3])); }); @@ -437,7 +465,7 @@ Deno.test("Replica.ingestPayload", async (test) => { const replica = new TestReplica(); const res = await replica.ingestPayload({ - path: new Uint8Array([0]), + path: [new Uint8Array([0])], subspace: new Uint8Array([0]), timestamp: BigInt(0), }, new Uint8Array()); @@ -455,7 +483,7 @@ Deno.test("Replica.ingestPayload", async (test) => { crypto.getRandomValues(payload); const res = await otherReplica.set({ - path: new Uint8Array([0, 2]), + path: [new Uint8Array([0, 2])], payload, subspace: authorKeypair.subspace, }, authorKeypair.privateKey); @@ -467,17 +495,17 @@ Deno.test("Replica.ingestPayload", async (test) => { assert(res2.kind === "success"); const res3 = await replica.ingestPayload({ - path: res.entry.identifier.path, - subspace: res.entry.identifier.subspace, - timestamp: res.entry.record.timestamp, + path: res.entry.path, + subspace: res.entry.subspaceId, + timestamp: res.entry.timestamp, }, payload); assert(res3.kind === "success"); const res4 = await replica.ingestPayload({ - path: new Uint8Array(res.entry.identifier.path), - subspace: new Uint8Array(res.entry.identifier.subspace), - timestamp: res.entry.record.timestamp, + path: res.entry.path, + subspace: new Uint8Array(res.entry.subspaceId), + timestamp: res.entry.timestamp, }, payload); assert(res4.kind === "no_op"); @@ -492,7 +520,7 @@ Deno.test("Replica.ingestPayload", async (test) => { crypto.getRandomValues(payload); const res = await otherReplica.set({ - path: new Uint8Array([0, 2]), + path: [new Uint8Array([0, 2])], payload, subspace: authorKeypair.subspace, }, authorKeypair.privateKey); @@ -504,9 +532,9 @@ Deno.test("Replica.ingestPayload", async (test) => { assert(res2.kind === "success"); const res3 = await replica.ingestPayload({ - path: new Uint8Array(res.entry.identifier.path), - subspace: new Uint8Array(res.entry.identifier.subspace), - timestamp: res.entry.record.timestamp, + path: res.entry.path, + subspace: res.entry.subspaceId, + timestamp: res.entry.timestamp, }, new Uint8Array(32)); assert(res3.kind === "failure"); @@ -522,7 +550,7 @@ Deno.test("Replica.ingestPayload", async (test) => { crypto.getRandomValues(payload); const res = await otherReplica.set({ - path: new Uint8Array([0, 2]), + path: [new Uint8Array([0, 2])], payload, subspace: authorKeypair.subspace, }, authorKeypair.privateKey); @@ -534,22 +562,28 @@ Deno.test("Replica.ingestPayload", async (test) => { assert(res2.kind === "success"); const res3 = await replica.ingestPayload({ - path: new Uint8Array(res.entry.identifier.path), - subspace: new Uint8Array(res.entry.identifier.subspace), - timestamp: res.entry.record.timestamp, + path: res.entry.path, + subspace: res.entry.subspaceId, + timestamp: res.entry.timestamp, }, payload); assert(res3.kind === "success"); let retrievedPayload; - for await (const [_entry, payload] of replica.query({ order: "path" })) { + for await ( + const [_entry, payload] of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "path") + ) { retrievedPayload = await payload?.bytes(); } assert(retrievedPayload); - assert(compareBytes(payload, retrievedPayload) === 0); + assert(orderBytes(payload, retrievedPayload) === 0); }); }); @@ -568,7 +602,7 @@ Deno.test("Write-ahead flags", async (test) => { const res = await otherReplica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array(32), timestamp: BigInt(1000), subspace: authorKeypair.subspace, @@ -581,9 +615,9 @@ Deno.test("Write-ahead flags", async (test) => { // Create PTA flag. const keys = encodeEntryKeys( { - path: new Uint8Array(res.entry.identifier.path), - timestamp: res.entry.record.timestamp, - subspace: new Uint8Array(res.entry.identifier.subspace), + path: res.entry.path, + timestamp: res.entry.timestamp, + subspace: res.entry.subspaceId, subspaceEncoding: { encode: (v) => v, @@ -595,31 +629,20 @@ Deno.test("Write-ahead flags", async (test) => { // Create storage value. const storageValue = encodeSummarisableStorageValue({ - payloadHash: res.entry.record.hash, - payloadLength: res.entry.record.length, - authTokenHash: new Uint8Array( + payloadDigest: res.entry.payloadDigest, + payloadLength: res.entry.payloadLength, + authTokenDigest: new Uint8Array( await crypto.subtle.digest("SHA-256", res.authToken), ), payloadScheme: testSchemePayload, - pathLength: res.entry.identifier.path.byteLength, - pathLengthEncoding: { - encode(length) { - return new Uint8Array([length]); - }, - decode(bytes) { - return bytes[0]; - }, - encodedLength() { - return 1; - }, - }, + encodedPathLength: keys.encodedPathLength, }); // Insert const result = await replica.set( { - path: new Uint8Array([0, 0, 0, 0, 1]), + path: [new Uint8Array([0, 0, 0, 0, 1])], payload: new Uint8Array(32), timestamp: BigInt(500), subspace: authorKeypair.subspace, @@ -638,7 +661,13 @@ Deno.test("Write-ahead flags", async (test) => { const entries = []; - for await (const [entry] of replica.query({ order: "path" })) { + for await ( + const [entry] of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "path") + ) { entries.push(entry); } @@ -646,9 +675,9 @@ Deno.test("Write-ahead flags", async (test) => { assert(entries[0]); assert( - compareBytes( - new Uint8Array(entries[0].identifier.path), - new Uint8Array([0, 0, 0, 0, 1]), + orderPath( + entries[0].path, + [new Uint8Array([0, 0, 0, 0, 1])], ) === 0, ); }); @@ -658,7 +687,7 @@ Deno.test("Write-ahead flags", async (test) => { const res = await replica.set( { - path: new Uint8Array([0, 0, 0, 0]), + path: [new Uint8Array([0, 0, 0, 0])], payload: new Uint8Array(32), timestamp: BigInt(1000), subspace: authorKeypair.subspace, @@ -674,7 +703,13 @@ Deno.test("Write-ahead flags", async (test) => { const entries = []; - for await (const [entry] of replica.query({ order: "path" })) { + for await ( + const [entry] of replica.query({ + area: fullArea(), + maxCount: 0, + maxSize: BigInt(0), + }, "path") + ) { entries.push(entry); } diff --git a/src/replica/replica.ts b/src/replica/replica.ts index ce139cc..8b0abd1 100644 --- a/src/replica/replica.ts +++ b/src/replica/replica.ts @@ -1,24 +1,30 @@ import { EntryDriverMemory } from "./storage/entry_drivers/memory.ts"; import { EntryDriver, PayloadDriver } from "./storage/types.ts"; -import { bigintToBytes, compareBytes } from "../util/bytes.ts"; import { EntryInput, IngestEvent, IngestPayloadEvent, Payload, ProtocolParameters, - Query, + QueryOrder, ReplicaOpts, } from "./types.ts"; import { PayloadDriverMemory } from "./storage/payload_drivers/memory.ts"; -import { Entry } from "../entries/types.ts"; import { EntryIngestEvent, EntryPayloadSetEvent, EntryRemoveEvent, PayloadIngestEvent, } from "./events.ts"; -import { concat, deferred, Products } from "../../deps.ts"; +import { + AreaOfInterest, + bigintToBytes, + deferred, + Entry, + OPEN_END, + orderPath, + Path, +} from "../../deps.ts"; import { Storage3d } from "./storage/storage_3d/types.ts"; /** A local snapshot of a namespace to be written to, queried from, and synced with other replicas. @@ -79,7 +85,7 @@ export class Replica< this.protocolParams = opts.protocolParameters; const entryDriver = opts.entryDriver || new EntryDriverMemory({ - pathLengthScheme: opts.protocolParameters.pathLengthScheme, + pathScheme: opts.protocolParameters.pathScheme, payloadScheme: opts.protocolParameters.payloadScheme, subspaceScheme: opts.protocolParameters.subspaceScheme, fingerprintScheme: opts.protocolParameters.fingerprintScheme, @@ -111,11 +117,11 @@ export class Replica< .tokenEncoding.decode(await encodedAuthToken?.bytes()); await this.insertEntry({ - path: existingInsert.entry.identifier.path, - subspace: existingInsert.entry.identifier.subspace, - hash: existingInsert.entry.record.hash, - length: existingInsert.entry.record.length, - timestamp: existingInsert.entry.record.timestamp, + path: existingInsert.entry.path, + subspace: existingInsert.entry.subspaceId, + hash: existingInsert.entry.payloadDigest, + length: existingInsert.entry.payloadLength, + timestamp: existingInsert.entry.timestamp, authToken: decodedToken, }); } @@ -139,12 +145,6 @@ export class Replica< input: EntryInput, authorisation: AuthorisationOpts, ) { - const identifier = { - namespace: this.namespace, - subspace: input.subspace, - path: input.path, - }; - const timestamp = input.timestamp !== undefined ? input.timestamp : BigInt(Date.now() * 1000); @@ -152,14 +152,15 @@ export class Replica< // Stage it with the driver const stagedResult = await this.payloadDriver.stage(input.payload); - const record = { - timestamp, - length: BigInt(stagedResult.length), - hash: stagedResult.hash, + const entry: Entry = { + namespaceId: this.namespace, + subspaceId: input.subspace, + path: input.path, + timestamp: timestamp, + payloadLength: stagedResult.length, + payloadDigest: stagedResult.hash, }; - const entry = { identifier, record }; - const authToken = await this.protocolParams.authorisationScheme.authorise( entry, authorisation, @@ -207,7 +208,7 @@ export class Replica< if ( !this.protocolParams.namespaceScheme.isEqual( this.namespace, - entry.identifier.namespace, + entry.namespaceId, ) ) { return { @@ -219,7 +220,7 @@ export class Replica< } if ( - await this.protocolParams.authorisationScheme.isAuthorised( + await this.protocolParams.authorisationScheme.isAuthorisedWrite( entry, authorisation, ) === false @@ -232,20 +233,20 @@ export class Replica< }; } - const prefixKey = concat( - this.protocolParams.subspaceScheme.encode(entry.identifier.subspace), - entry.identifier.path, - ); + const subspacePath = [ + this.protocolParams.subspaceScheme.encode(entry.subspaceId), + ...entry.path, + ]; // Check if we have any newer entries with this prefix. for await ( const [_path, timestampBytes] of this.entryDriver.prefixIterator - .prefixesOf(prefixKey) + .prefixesOf(subspacePath) ) { const view = new DataView(timestampBytes.buffer); const prefixTimestamp = view.getBigUint64(0); - if (prefixTimestamp >= entry.record.timestamp) { + if (prefixTimestamp >= entry.timestamp) { return { kind: "no_op", reason: "newer_prefix_found", @@ -256,35 +257,34 @@ export class Replica< // Check for collisions with stored entries for await ( - const { entry: otherEntry } of this.storage.entriesByQuery( + const { entry: otherEntry } of this.storage.query( { - order: "subspace", - subspace: { - lowerBound: entry.identifier.subspace, - upperBound: this.protocolParams.subspaceScheme.successor( - entry.identifier.subspace, - ), - }, - path: { - lowerBound: entry.identifier.path, - upperBound: Products.makeSuccessorPath( - this.protocolParams.pathLengthScheme.maxLength, - )(entry.identifier.path), + area: { + pathPrefix: entry.path, + + includedSubspaceId: entry.subspaceId, + timeRange: { + start: BigInt(0), + end: OPEN_END, + }, }, + maxCount: 1, + maxSize: BigInt(0), }, + "subspace", ) ) { if ( - compareBytes( - entry.identifier.path, - otherEntry.identifier.path, + orderPath( + entry.path, + otherEntry.path, ) !== 0 ) { break; } // If there is something existing and the timestamp is greater than ours, we have a no-op. - if (otherEntry.record.timestamp > entry.record.timestamp) { + if (otherEntry.timestamp > entry.timestamp) { return { kind: "no_op", reason: "obsolete_from_same_subspace", @@ -292,13 +292,13 @@ export class Replica< } const payloadDigestOrder = this.protocolParams.payloadScheme.order( - entry.record.hash, - otherEntry.record.hash, + entry.payloadDigest, + otherEntry.payloadDigest, ); // If the timestamps are the same, and our hash is less, we have a no-op. if ( - otherEntry.record.timestamp === entry.record.timestamp && + otherEntry.timestamp === entry.timestamp && payloadDigestOrder === -1 ) { return { @@ -308,11 +308,11 @@ export class Replica< } const otherPayloadLengthIsGreater = - entry.record.length < otherEntry.record.timestamp; + entry.payloadLength < otherEntry.payloadLength; // If the timestamps and hashes are the same, and the other payload's length is greater, we have a no-op. if ( - otherEntry.record.timestamp === entry.record.timestamp && + otherEntry.timestamp === entry.timestamp && payloadDigestOrder === 0 && otherPayloadLengthIsGreater ) { return { @@ -323,15 +323,15 @@ export class Replica< await this.storage.remove(otherEntry); - const toRemovePrefixKey = concat( + const toRemovePrefixPath = [ this.protocolParams.subspaceScheme.encode( - otherEntry.identifier.subspace, + otherEntry.subspaceId, ), - otherEntry.identifier.path, - ); + ...otherEntry.path, + ]; await this.entryDriver.prefixIterator.remove( - toRemovePrefixKey, + toRemovePrefixPath, ); this.dispatchEvent( @@ -340,11 +340,11 @@ export class Replica< } await this.insertEntry({ - path: entry.identifier.path, - subspace: entry.identifier.subspace, - hash: entry.record.hash, - timestamp: entry.record.timestamp, - length: entry.record.length, + path: entry.path, + subspace: entry.subspaceId, + hash: entry.payloadDigest, + timestamp: entry.timestamp, + length: entry.payloadLength, authToken: authorisation, }); @@ -371,7 +371,7 @@ export class Replica< length, authToken, }: { - path: Uint8Array; + path: Path; subspace: SubspacePublicKey; timestamp: bigint; hash: PayloadDigest; @@ -385,27 +385,23 @@ export class Replica< const stagingResult = await this.payloadDriver.stage(encodedToken); await this.entryDriver.writeAheadFlag.flagInsertion({ - identifier: { - namespace: this.namespace, - subspace: subspace, - path: path, - }, - record: { - hash, - length, - timestamp, - }, + namespaceId: this.namespace, + subspaceId: subspace, + path: path, + payloadDigest: hash, + payloadLength: length, + timestamp, }, stagingResult.hash); - const prefixKey = concat( + const prefixKey = [ this.protocolParams.subspaceScheme.encode(subspace), - path, - ); + ...path, + ]; await Promise.all([ this.storage.insert({ - payloadHash: hash, - authTokenHash: stagingResult.hash, + payloadDigest: hash, + authTokenDigest: stagingResult.hash, length, path, subspace, @@ -420,7 +416,7 @@ export class Replica< // And remove all prefixes with smaller timestamps. for await ( - const [prefixedByKey, prefixedByTimestamp] of this.entryDriver + const [prefixedBySubspacePath, prefixedByTimestamp] of this.entryDriver .prefixIterator .prefixedBy( prefixKey, @@ -429,19 +425,14 @@ export class Replica< const view = new DataView(prefixedByTimestamp.buffer); const prefixTimestamp = view.getBigUint64(prefixedByTimestamp.byteOffset); + const [prefixedBySubspace, ...prefixedByPath] = prefixedBySubspacePath; + if (prefixTimestamp < timestamp) { const subspace = this.protocolParams.subspaceScheme.decode( - prefixedByKey, + prefixedBySubspace, ); - const encodedSubspaceLength = this.protocolParams.subspaceScheme - .encodedLength(subspace); - - const prefixedPath = prefixedByKey.subarray( - encodedSubspaceLength, - ); - - const toDeleteResult = await this.storage.get(subspace, prefixedPath); + const toDeleteResult = await this.storage.get(subspace, prefixedByPath); if (toDeleteResult) { await this.entryDriver.writeAheadFlag.flagRemoval( @@ -450,8 +441,8 @@ export class Replica< await Promise.all([ this.storage.remove(toDeleteResult.entry), - this.payloadDriver.erase(toDeleteResult.entry.record.hash), - this.entryDriver.prefixIterator.remove(prefixedByKey), + this.payloadDriver.erase(toDeleteResult.entry.payloadDigest), + this.entryDriver.prefixIterator.remove(prefixedBySubspacePath), ]); await this.entryDriver.writeAheadFlag.unflagRemoval(); @@ -472,7 +463,7 @@ export class Replica< */ async ingestPayload( entryDetails: { - path: Uint8Array; + path: Path; timestamp: bigint; subspace: SubspacePublicKey; }, @@ -492,7 +483,7 @@ export class Replica< const { entry } = getResult; - const existingPayload = await this.payloadDriver.get(entry.record.hash); + const existingPayload = await this.payloadDriver.get(entry.payloadDigest); if (existingPayload) { return { @@ -506,7 +497,7 @@ export class Replica< if ( this.protocolParams.payloadScheme.order( stagedResult.hash, - entry.record.hash, + entry.payloadDigest, ) !== 0 ) { await stagedResult.reject(); @@ -530,7 +521,9 @@ export class Replica< /** Retrieve a list of entry-payload pairs from the replica for a given {@link Query}. */ async *query( - query: Query, + areaOfInterest: AreaOfInterest, + order: QueryOrder, + reverse = false, ): AsyncIterable< [ Entry, @@ -539,9 +532,13 @@ export class Replica< ] > { for await ( - const { entry, authTokenHash } of this.storage.entriesByQuery(query) + const { entry, authTokenHash } of this.storage.query( + areaOfInterest, + order, + reverse, + ) ) { - const payload = await this.payloadDriver.get(entry.record.hash); + const payload = await this.payloadDriver.get(entry.payloadDigest); const authTokenPayload = await this.payloadDriver.get(authTokenHash); diff --git a/src/replica/storage/entry_drivers/kv_store.ts b/src/replica/storage/entry_drivers/kv_store.ts index 469098c..21e9347 100644 --- a/src/replica/storage/entry_drivers/kv_store.ts +++ b/src/replica/storage/entry_drivers/kv_store.ts @@ -1,15 +1,18 @@ -import { decodeEntry, encodeEntry } from "../../../entries/encode_decode.ts"; -import { Entry } from "../../../entries/types.ts"; -import { compareBytes } from "../../../util/bytes.ts"; +import { + decodeEntry, + encodeEntry, + Entry, + orderBytes, + PathScheme, +} from "../../../../deps.ts"; import { FingerprintScheme, NamespaceScheme, - PathLengthScheme, PayloadScheme, SubspaceScheme, } from "../../types.ts"; import { KvDriver } from "../kv/types.ts"; -import { KeyHopTree } from "../prefix_iterators/key_hop_tree.ts"; +import { SimpleKeyIterator } from "../prefix_iterators/simple_key_iterator.ts"; import { PrefixIterator } from "../prefix_iterators/types.ts"; import { TripleStorage } from "../storage_3d/triple_storage.ts"; import { Storage3d } from "../storage_3d/types.ts"; @@ -27,7 +30,7 @@ type EntryDriverKvOpts< namespaceScheme: NamespaceScheme; subspaceScheme: SubspaceScheme; payloadScheme: PayloadScheme; - pathLengthScheme: PathLengthScheme; + pathScheme: PathScheme; fingerprintScheme: FingerprintScheme< NamespaceKey, SubspaceKey, @@ -52,7 +55,7 @@ export class EntryDriverKvStore< private namespaceScheme: NamespaceScheme; private subspaceScheme: SubspaceScheme; private payloadScheme: PayloadScheme; - private pathLengthScheme: PathLengthScheme; + private pathScheme: PathScheme; private fingerprintScheme: FingerprintScheme< NamespaceKey, SubspaceKey, @@ -74,11 +77,11 @@ export class EntryDriverKvStore< this.namespaceScheme = opts.namespaceScheme; this.subspaceScheme = opts.subspaceScheme; this.payloadScheme = opts.payloadScheme; - this.pathLengthScheme = opts.pathLengthScheme; + this.pathScheme = opts.pathScheme; this.fingerprintScheme = opts.fingerprintScheme; this.kvDriver = opts.kvDriver; - this.prefixIterator = new KeyHopTree(this.kvDriver); + this.prefixIterator = new SimpleKeyIterator(this.kvDriver); } makeStorage( @@ -92,11 +95,11 @@ export class EntryDriverKvStore< return new Skiplist({ kv: this.kvDriver, monoid, - compare: compareBytes, + compare: orderBytes, }); }, fingerprintScheme: this.fingerprintScheme, - pathLengthScheme: this.pathLengthScheme, + pathScheme: this.pathScheme, payloadScheme: this.payloadScheme, subspaceScheme: this.subspaceScheme, }); @@ -127,12 +130,12 @@ export class EntryDriverKvStore< return; } - const entry = decodeEntry(maybeInsertion, { + const entry = decodeEntry({ namespaceScheme: this.namespaceScheme, subspaceScheme: this.subspaceScheme, payloadScheme: this.payloadScheme, - pathLengthScheme: this.pathLengthScheme, - }); + pathScheme: this.pathScheme, + }, maybeInsertion); const authTokenHash = this.payloadScheme.decode(probablyAuthTokenHash); @@ -151,12 +154,12 @@ export class EntryDriverKvStore< return; } - const entry = decodeEntry(maybeRemoval, { + const entry = decodeEntry({ namespaceScheme: this.namespaceScheme, subspaceScheme: this.subspaceScheme, payloadScheme: this.payloadScheme, - pathLengthScheme: this.pathLengthScheme, - }); + pathScheme: this.pathScheme, + }, maybeRemoval); return entry; }, @@ -164,12 +167,12 @@ export class EntryDriverKvStore< entry: Entry, authTokenHash: PayloadDigest, ) => { - const entryEncoded = encodeEntry(entry, { + const entryEncoded = encodeEntry({ namespaceScheme: this.namespaceScheme, subspaceScheme: this.subspaceScheme, - pathLengthScheme: this.pathLengthScheme, + pathScheme: this.pathScheme, payloadScheme: this.payloadScheme, - }); + }, entry); const authHashEncoded = this.payloadScheme.encode(authTokenHash); @@ -185,12 +188,12 @@ export class EntryDriverKvStore< }, flagRemoval: (entry: Entry) => { - const entryEncoded = encodeEntry(entry, { + const entryEncoded = encodeEntry({ namespaceScheme: this.namespaceScheme, subspaceScheme: this.subspaceScheme, - pathLengthScheme: this.pathLengthScheme, + pathScheme: this.pathScheme, payloadScheme: this.payloadScheme, - }); + }, entry); return this.kvDriver.set(["waf", "remove"], entryEncoded); }, diff --git a/src/replica/storage/entry_drivers/memory.ts b/src/replica/storage/entry_drivers/memory.ts index 35fb33e..5fa9adc 100644 --- a/src/replica/storage/entry_drivers/memory.ts +++ b/src/replica/storage/entry_drivers/memory.ts @@ -1,18 +1,15 @@ -import { compareBytes } from "../../../util/bytes.ts"; import { FingerprintScheme, - PathLengthScheme, PayloadScheme, SubspaceScheme, } from "../../types.ts"; -import { RadixishTree } from "../prefix_iterators/radixish_tree.ts"; - import { LiftingMonoid } from "../summarisable_storage/lifting_monoid.ts"; import { MonoidRbTree } from "../summarisable_storage/monoid_rbtree.ts"; import { EntryDriver } from "../types.ts"; import { Storage3d } from "../storage_3d/types.ts"; import { TripleStorage } from "../storage_3d/triple_storage.ts"; -import { Entry } from "../../../entries/types.ts"; +import { Entry, orderBytes, PathScheme } from "../../../../deps.ts"; +import { RadixTree } from "../prefix_iterators/radix_tree.ts"; type EntryDriverMemoryOpts< NamespaceKey, @@ -22,7 +19,7 @@ type EntryDriverMemoryOpts< > = { subspaceScheme: SubspaceScheme; payloadScheme: PayloadScheme; - pathLengthScheme: PathLengthScheme; + pathScheme: PathScheme; fingerprintScheme: FingerprintScheme< NamespaceKey, SubspaceKey, @@ -65,11 +62,11 @@ export class EntryDriverMemory< ) => { return new MonoidRbTree({ monoid, - compare: compareBytes, + compare: orderBytes, }); }, fingerprintScheme: this.opts.fingerprintScheme, - pathLengthScheme: this.opts.pathLengthScheme, + pathScheme: this.opts.pathScheme, payloadScheme: this.opts.payloadScheme, subspaceScheme: this.opts.subspaceScheme, }); @@ -109,5 +106,5 @@ export class EntryDriverMemory< return Promise.resolve(); }, }; - prefixIterator = new RadixishTree(); + prefixIterator = new RadixTree(); } diff --git a/src/replica/storage/kv/kv_driver_deno.ts b/src/replica/storage/kv/kv_driver_deno.ts index 5bfa7c9..5c24d32 100644 --- a/src/replica/storage/kv/kv_driver_deno.ts +++ b/src/replica/storage/kv/kv_driver_deno.ts @@ -26,24 +26,14 @@ export class KvDriverDeno implements KvDriver { } async *list( - range: { start: Key; end: Key }, + selector: { start: Key; end: Key } | { prefix: Key }, opts?: { - prefix?: Key; reverse?: boolean; limit?: number; batchSize?: number; }, ): AsyncIterable<{ key: Key; value: ValueType }> { - const rangeArg = opts?.prefix - ? { - start: [...opts.prefix, ...range.start], - end: [...opts.prefix, ...range.end], - } - : range; - - const iter = this.kv.list({ - ...rangeArg, - }, { + const iter = this.kv.list(selector, { reverse: opts?.reverse, limit: opts?.limit, batchSize: opts?.batchSize, diff --git a/src/replica/storage/kv/prefixed_driver.ts b/src/replica/storage/kv/prefixed_driver.ts index 85cb181..2e7a350 100644 --- a/src/replica/storage/kv/prefixed_driver.ts +++ b/src/replica/storage/kv/prefixed_driver.ts @@ -22,39 +22,26 @@ export class PrefixedDriver implements KvDriver { } async *list( - range: { start: Key; end: Key }, + selector: { start: Key; end: Key } | { prefix: Key }, opts?: { - prefix?: Key; reverse?: boolean; limit?: number; batchSize?: number; }, ): AsyncIterable<{ key: Key; value: ValueType }> { - if (opts) { - for await ( - const entry of this.parentDriver.list( - range, - { - prefix: opts.prefix - ? [...this.prefix, ...opts.prefix] - : opts.prefix, - ...opts, - }, - ) - ) { - yield { - key: entry.key.slice(this.prefix.length), - value: entry.value, - }; + const selectorPrefixed = "start" in selector + ? { + start: [...this.prefix, ...selector.start], + end: [...this.prefix, ...selector.end], } - } + : { + prefix: [...this.prefix, ...selector.prefix], + }; for await ( const entry of this.parentDriver.list( - range, - { - prefix: this.prefix, - }, + selectorPrefixed, + opts, ) ) { yield { diff --git a/src/replica/storage/kv/types.ts b/src/replica/storage/kv/types.ts index be75b44..bd22f73 100644 --- a/src/replica/storage/kv/types.ts +++ b/src/replica/storage/kv/types.ts @@ -3,7 +3,8 @@ export type KeyPart = | string | number | bigint - | boolean; + | boolean + | symbol; export type Key = KeyPart[]; @@ -18,9 +19,8 @@ export interface KvDriver { set(key: Key, value: unknown): Promise; delete(key: Key): Promise; list( - range: { start: Key; end: Key }, + selector: { start: Key; end: Key } | { prefix: Key }, opts?: { - prefix?: Key; reverse?: boolean; limit?: number; batchSize?: number; diff --git a/src/replica/storage/payload_drivers/filesystem.ts b/src/replica/storage/payload_drivers/filesystem.ts index 0c06334..6c2e795 100644 --- a/src/replica/storage/payload_drivers/filesystem.ts +++ b/src/replica/storage/payload_drivers/filesystem.ts @@ -1,10 +1,10 @@ import { ValidationError, WillowError } from "../../../errors.ts"; -import { EncodingScheme, Payload } from "../../types.ts"; +import { Payload } from "../../types.ts"; import { PayloadDriver } from "../types.ts"; import { join } from "https://deno.land/std@0.188.0/path/mod.ts"; import { ensureDir } from "https://deno.land/std@0.188.0/fs/ensure_dir.ts"; import { move } from "https://deno.land/std@0.188.0/fs/move.ts"; -import { encodeBase32 } from "../../../../deps.ts"; +import { encodeBase32, EncodingScheme } from "../../../../deps.ts"; /** Stores and retrieves payloads from the filesystem. */ export class PayloadDriverFilesystem @@ -87,7 +87,7 @@ export class PayloadDriverFilesystem ): Promise< { hash: PayloadDigest; - length: number; + length: bigint; commit: () => Promise; reject: () => Promise; } @@ -106,7 +106,7 @@ export class PayloadDriverFilesystem return { hash, - length: payload.byteLength, + length: BigInt(payload.byteLength), commit: async () => { await this.ensureDir(); @@ -161,7 +161,7 @@ export class PayloadDriverFilesystem return { hash, - length, + length: BigInt(length), commit: async () => { await this.ensureDir(); diff --git a/src/replica/storage/payload_drivers/memory.ts b/src/replica/storage/payload_drivers/memory.ts index 82b1506..59fa098 100644 --- a/src/replica/storage/payload_drivers/memory.ts +++ b/src/replica/storage/payload_drivers/memory.ts @@ -1,6 +1,10 @@ -import { encodeBase64, toArrayBuffer } from "../../../../deps.ts"; +import { + encodeBase64, + EncodingScheme, + toArrayBuffer, +} from "../../../../deps.ts"; import { ValidationError } from "../../../errors.ts"; -import { EncodingScheme, Payload } from "../../types.ts"; +import { Payload } from "../../types.ts"; import { PayloadDriver } from "../types.ts"; /** Store and retrieve payloads in memory. */ @@ -72,7 +76,7 @@ export class PayloadDriverMemory ): Promise< { hash: PayloadDigest; - length: number; + length: bigint; commit: () => Promise; reject: () => Promise; } @@ -91,7 +95,7 @@ export class PayloadDriverMemory return Promise.resolve({ hash, - length: newPayload.size, + length: BigInt(newPayload.size), commit: () => { this.payloadMap.set(key, newPayload); this.stagingMap.delete(key); diff --git a/src/replica/storage/prefix_iterators/key_hop_tree.ts b/src/replica/storage/prefix_iterators/key_hop_tree.ts deleted file mode 100644 index 80122d8..0000000 --- a/src/replica/storage/prefix_iterators/key_hop_tree.ts +++ /dev/null @@ -1,395 +0,0 @@ -import { concat, equalsBytes } from "../../../../deps.ts"; -import { compareBytes, incrementLastByte } from "../../../util/bytes.ts"; -import { KvBatch, KvDriver } from "../kv/types.ts"; - -enum Phantomness { - Phantom, - Real, - RealWithPhantom, -} - -export type KeyHopTreeNode = [Phantomness, Uint8Array, ValueType]; - -export class KeyHopTree { - private kv: KvDriver; - - constructor(kv: KvDriver) { - this.kv = kv; - } - - private addBackingPhantom( - key: Uint8Array, - value: KeyHopTreeNode, - batch: KvBatch, - ) { - if (value[0] === Phantomness.Real) { - batch.set([key], [ - Phantomness.RealWithPhantom, - value[1], - value[2], - ]); - } - } - - async insert( - key: Uint8Array, - value: ValueType, - position = 1, - lastPassedNode?: { key: Uint8Array; value: KeyHopTreeNode }, - ): Promise { - // Check if first element exists in - const searchKey = key.slice(0, position); - - const existingNode = await this.kv.get>([ - searchKey, - ]); - - // Create a new node with what's left over. - if (!existingNode) { - const vector = key.slice(position); - - const node: KeyHopTreeNode = [Phantomness.Real, vector, value]; - - const batch = this.kv.batch(); - - if (lastPassedNode) { - this.addBackingPhantom(lastPassedNode.key, lastPassedNode.value, batch); - } - - batch.set([searchKey], node); - - await batch.commit(); - - return; - } - - // There is something here! Complexity begins. - - // First check if the key + vector is a prefix of ours. - const completeValue = concat(searchKey, existingNode[1]); - const foundIsPrefix = isPrefix(completeValue, key); - - // If it is, we check from the next position. Buck passed. - if (foundIsPrefix) { - if (completeValue.byteLength === key.byteLength) { - return; - } - - return this.insert(key, value, completeValue.byteLength + 1, { - key: searchKey, - value: existingNode, - }); - } - - // If it's not, we need to fuck with this node. - - // Deal with the special case where the inserted node is a prefix of the one we just found. - const newIsPrefix = isPrefix(key, completeValue); - - if (newIsPrefix) { - const batch = this.kv.batch(); - - // The new vector is the new key's suffix compared to this key. - const newVector = key.slice(searchKey.byteLength); - - // Set the search key's new vector and value (the new one) - batch.set([searchKey], [Phantomness.RealWithPhantom, newVector, value]); - - const newNodeKey = completeValue.slice( - 0, - searchKey.byteLength + newVector.byteLength + 1, - ); - const newNodeVector = completeValue.slice( - searchKey.byteLength + newVector.byteLength + 1, - ); - - // Set the new node's key with - - batch.set([newNodeKey], [ - existingNode[0], - newNodeVector, - existingNode[2], - ]); - - await batch.commit(); - - return; - } - - // Fork the node, creating a PHANTOM - // Get the common vector - const batch = this.kv.batch(); - - const newVectorFoundNode: number[] = []; - - for (let i = searchKey.byteLength; i < key.byteLength; i++) { - if (key[i] === completeValue[i]) { - newVectorFoundNode.push(key[i]); - } else { - break; - } - } - - const newVectorFoundNodeBytes = new Uint8Array(newVectorFoundNode); - - // And this node is a phantom!!! Whooooo - batch.set([searchKey], [ - Phantomness.Phantom, - newVectorFoundNodeBytes, - null as ValueType, - ]); - - // Deal with the old (real) node that used to be here - - const newValue = concat(searchKey, newVectorFoundNodeBytes); - - const foundNodeNewKey = completeValue.slice(0, newValue.byteLength + 1); - const foundNodeNewVector = completeValue.slice(newValue.byteLength + 1); - - batch.set([foundNodeNewKey], [ - existingNode[0], - foundNodeNewVector, - existingNode[2], - ]); - - // And finally insert the new value. Yay. - - const newKey = key.slice(0, newValue.byteLength + 1); - const newVector = key.slice(newValue.byteLength + 1); - - batch.set([newKey], [Phantomness.Real, newVector, value]); - - await batch.commit(); - } - - async remove( - key: Uint8Array, - position = 1, - lastPassedNode?: [Uint8Array, KeyHopTreeNode], - ): Promise { - // Try and find the value, and remove it. - const searchKey = key.slice(0, position); - - const existingNode = await this.kv.get>([ - searchKey, - ]); - - // No path, no nothing. - if (!existingNode) { - return false; - } - - // First check if the key + vector is a prefix of ours. - const completeValue = concat(searchKey, existingNode[1]); - - if (equalsBytes(completeValue, key)) { - const batch = this.kv.batch(); - - if (existingNode[0] === Phantomness.RealWithPhantom) { - batch.set([searchKey], [ - Phantomness.Phantom, - existingNode[1], - null, - ]); - } else { - batch.delete([searchKey]); - } - - // Extremely expensive healing op. - - if (lastPassedNode && lastPassedNode[1][0] === Phantomness.Phantom) { - // Sibling is a phantom... - const parentCompleteVal = concat( - lastPassedNode[0], - lastPassedNode[1][1], - ); - - // Time to do something expensive. - - let soleSibling: [number, KeyHopTreeNode] | null = null; - - for (let i = 0; i < 256; i++) { - const maybeSiblingKey = concat( - parentCompleteVal, - new Uint8Array([i]), - ); - - if (compareBytes(maybeSiblingKey, searchKey) === 0) { - continue; - } - - const siblingNode = await this.kv.get>([ - maybeSiblingKey, - ]); - - if (soleSibling && siblingNode) { - // If there is more than one sibling, we abort. - soleSibling = null; - break; - } else if (!soleSibling && siblingNode) { - soleSibling = [i, siblingNode]; - } else if (!siblingNode) { - continue; - } - } - - if (soleSibling) { - // Merge the sole sibling with the phantom parent. - - // Delete the sole sibling - - const soleSiblingKey = concat( - parentCompleteVal, - new Uint8Array([soleSibling[0]]), - ); - batch.delete([soleSiblingKey]); - - // Append the last bit of its key and its vector to the phantom parent - batch.set( - [lastPassedNode[0]], - [ - soleSibling[1][0], - concat( - lastPassedNode[1][1], - new Uint8Array([soleSibling[0]]), - soleSibling[1][1], - ), - soleSibling[1][2], - ], - ); - } - } - - await batch.commit(); - - return true; - } - - const foundIsPrefix = isPrefix(completeValue, key); - - // If it is, we check from the next position. Buck passed. - if (foundIsPrefix) { - return this.remove(key, completeValue.byteLength + 1, [ - searchKey, - existingNode, - ]); - } - - return false; - } - - async *prefixesOf(key: Uint8Array): AsyncIterable<[Uint8Array, ValueType]> { - let searchLength = 1; - - while (true) { - const searchKey = key.slice(0, searchLength); - - const node = await this.kv.get>([searchKey]); - - if (!node) { - break; - } - - const completeVal = concat(searchKey, node[1]); - - if (completeVal.byteLength >= key.byteLength) { - break; - } - - // Only do this if not a phantom. - if (node[0] !== Phantomness.Phantom && isPrefix(completeVal, key)) { - yield [completeVal, node[2]]; - } - - searchLength = completeVal.byteLength + 1; - } - } - - async *prefixedBy(key: Uint8Array): AsyncIterable<[Uint8Array, ValueType]> { - // The annoying bit. Find items that are prefixed by the given key but have shorter keys in the store than the key we're searching for. - let searchLength = 1; - - while (true) { - const searchKey = key.slice(0, searchLength); - - const node = await this.kv.get>([searchKey]); - - if (!node) { - break; - } - - const completeVal = concat(searchKey, node[1]); - - if (equalsBytes(completeVal, key)) { - break; - } - - if (completeVal.byteLength <= key.byteLength) { - searchLength = completeVal.byteLength + 1; - continue; - } - - // Only do this if not a phantom. - if (node[0] !== Phantomness.Phantom && isPrefix(key, completeVal)) { - yield [completeVal, node[2]]; - } - - searchLength = completeVal.byteLength + 1; - } - - // The easy bit - for await ( - const entry of this.kv.list>({ - start: [key], - end: [incrementLastByte(key)], - }) - ) { - if (entry.value[0] === Phantomness.Phantom) { - continue; - } - - const completeVal = concat( - entry.key[0] as Uint8Array, - entry.value[1], - ); - - if (equalsBytes(completeVal, key)) { - continue; - } - - yield [completeVal, entry.value[2]]; - } - } - - async print() { - for await ( - const { key, value } of this.kv.list>({ - start: [], - end: [Number.MAX_SAFE_INTEGER], - }) - ) { - console.log( - value[0] === Phantomness.Phantom - ? "👻" - : value[0] === Phantomness.Real - ? "🔑" - : "🗝", - `${key[0]}(${value[1]}`, - "-", - value[2], - ); - } - - console.groupEnd(); - } -} - -function isPrefix(maybePrefix: Uint8Array, against: Uint8Array) { - for (let i = 0; i < maybePrefix.byteLength; i++) { - if (maybePrefix[i] !== against[i]) { - return false; - } - } - - return true; -} diff --git a/src/replica/storage/prefix_iterators/prefix_iterator.test.ts b/src/replica/storage/prefix_iterators/prefix_iterator.test.ts index 1ec9b48..3ed752c 100644 --- a/src/replica/storage/prefix_iterators/prefix_iterator.test.ts +++ b/src/replica/storage/prefix_iterators/prefix_iterator.test.ts @@ -1,47 +1,18 @@ import { assertEquals } from "https://deno.land/std@0.202.0/testing/asserts.ts"; -import { RadixishTree } from "./radixish_tree.ts"; +import { RadixTree } from "./radix_tree.ts"; import { PrefixIterator } from "./types.ts"; -import { KeyHopTree } from "./key_hop_tree.ts"; import { KvDriverDeno } from "../kv/kv_driver_deno.ts"; import { SimpleKeyIterator } from "./simple_key_iterator.ts"; +import { randomPath } from "../../../test/utils.ts"; +import { concat, Path, prefixesOf } from "../../../../deps.ts"; -const MAX_PATH_LENGTH = 100; const MAX_PATH_SETS = 64; -function generateRandomPath() { - const pathLength = Math.floor(Math.random() * MAX_PATH_LENGTH + 1); - - const path = new Uint8Array(pathLength); - - for (let i = 0; i < pathLength; i++) { - const byte = Math.floor(Math.random() * (255)); - - path.set([byte], i); - } - - return path; -} - -function prefixesFromPath(path: Uint8Array) { - const prefixes: Uint8Array[] = []; - - for (let i = 1; i < path.byteLength; i++) { - if (Math.random() > 0.5) { - const prefix = path.subarray(0, i); - - prefixes.push(prefix); - } - } - - return prefixes; -} - -type PathSet = Uint8Array[]; +type PathSet = Path[]; function getRandomPathAndPrefixes() { - const path = generateRandomPath(); - const prefixes = prefixesFromPath(path); - return [...prefixes, path]; + const path = randomPath(); + return prefixesOf(path); } function getPaths() { @@ -64,31 +35,16 @@ type PrefixIteratorScenario = { >; }; -const radixishTreeScenario: PrefixIteratorScenario = { - name: "Radixish tree", +const radixTreeScenario: PrefixIteratorScenario = { + name: "Radix tree", makeScenario: () => { return Promise.resolve({ - iterator: new RadixishTree(), + iterator: new RadixTree(), dispose: () => Promise.resolve(), }); }, }; -const keyhopTreeScenario: PrefixIteratorScenario = { - name: "KeyHop tree", - makeScenario: async () => { - const kv = await Deno.openKv(); - const kvDriver = new KvDriverDeno(kv); - const keyhopTree = new KeyHopTree(kvDriver); - await kvDriver.clear(); - - return { - iterator: keyhopTree, - dispose: () => Promise.resolve(kv.close()), - }; - }, -}; - const simpleKeyIteratorScenario: PrefixIteratorScenario = { name: "Simple key iterator", makeScenario: async () => { @@ -107,9 +63,8 @@ const simpleKeyIteratorScenario: PrefixIteratorScenario = { }; const scenarios = [ - radixishTreeScenario, + radixTreeScenario, simpleKeyIteratorScenario, - keyhopTreeScenario, ]; Deno.test("Prefix Iterator", async (test) => { @@ -126,15 +81,16 @@ Deno.test("Prefix Iterator", async (test) => { // Get a random path const idx = Math.floor(Math.random() * (remaining.size - 1)); const remainingArr = Array.from(remaining); - const itemToInsert = remainingArr[idx]; + const pathToInsert = remainingArr[idx]; + const valueToInsert = concat(...pathToInsert); - await iterator.insert(itemToInsert, itemToInsert); + await iterator.insert(pathToInsert, valueToInsert); if (Math.random() > 0.75) { - await iterator.insert(itemToInsert, itemToInsert); + await iterator.insert(pathToInsert, valueToInsert); } - remaining.delete(itemToInsert); + remaining.delete(pathToInsert); } // Find a random index in the pathset length that is not zero or the path set length. @@ -145,14 +101,16 @@ Deno.test("Prefix Iterator", async (test) => { const expectedPrefixes = pathSet.slice(0, splitPoint); const expectedPrefixedBy = pathSet.slice(splitPoint + 1); - const actualPrefixes: Uint8Array[] = []; + const actualPrefixes: Path[] = []; + + const pathToTest = pathSet[splitPoint]; for await ( - const [key, value] of iterator.prefixesOf(pathSet[splitPoint]) + const [path, value] of iterator.prefixesOf(pathToTest) ) { - assertEquals(key, value); + assertEquals(concat(...path), value); - actualPrefixes.push(key); + actualPrefixes.push(path); } assertEquals(actualPrefixes, expectedPrefixes); @@ -161,11 +119,11 @@ Deno.test("Prefix Iterator", async (test) => { const actualPrefixedBy = []; for await ( - const [key, value] of iterator.prefixedBy(pathSet[splitPoint]) + const [path, value] of iterator.prefixedBy(pathToTest) ) { - assertEquals(key, value); + assertEquals(concat(...path), value); - actualPrefixedBy.push(key); + actualPrefixedBy.push(path); } assertEquals(actualPrefixedBy, expectedPrefixedBy); @@ -184,14 +142,13 @@ Deno.test("Prefix Iterator", async (test) => { } } - const actualPrefixesAfterRemoval: Uint8Array[] = []; + const actualPrefixesAfterRemoval: Path[] = []; for await ( - const [key, value] of iterator.prefixesOf(pathSet[splitPoint]) + const [path, value] of iterator.prefixesOf(pathToTest) ) { - assertEquals(key, value); - - actualPrefixesAfterRemoval.push(key); + assertEquals(concat(...path), value); + actualPrefixesAfterRemoval.push(path); } assertEquals( @@ -203,11 +160,11 @@ Deno.test("Prefix Iterator", async (test) => { const actualPrefixedByAfterRemoval = []; for await ( - const [key, value] of iterator.prefixedBy(pathSet[splitPoint]) + const [path, value] of iterator.prefixedBy(pathSet[splitPoint]) ) { - assertEquals(key, value); + assertEquals(concat(...path), value); - actualPrefixedByAfterRemoval.push(key); + actualPrefixedByAfterRemoval.push(path); } assertEquals( diff --git a/src/replica/storage/prefix_iterators/radix_tree.ts b/src/replica/storage/prefix_iterators/radix_tree.ts new file mode 100644 index 0000000..23fa6d2 --- /dev/null +++ b/src/replica/storage/prefix_iterators/radix_tree.ts @@ -0,0 +1,226 @@ +import { encodeBase64, orderBytes, Path } from "../../../../deps.ts"; +import { PrefixIterator } from "./types.ts"; + +type RootNode = { + value: ValueType | null; + children: Map>; +}; + +type MemoryNode = { + pathComponent: Uint8Array; + /** Null signifies there is no actual entry here. */ + value: ValueType | null; + /** The keys are the base64 encoding of the path component. */ + children: Map>; +}; + +export class RadixTree implements PrefixIterator { + private root: RootNode = { + value: null, + children: new Map>(), + }; + + print() { + const printNode = (node: RootNode | MemoryNode) => { + if ("pathComponent" in node) { + console.group(node.pathComponent, node.value); + } else { + console.group("Empty string", node.value); + } + + for (const [key, child] of node.children) { + console.log(key, "->"); + printNode(child); + } + + console.groupEnd(); + }; + + printNode(this.root); + } + + insert(path: Path, value: ValueType): Promise { + // Start at root node + if (path.length === 0) { + this.root.value = value; + return Promise.resolve(); + } + + let node: RootNode | MemoryNode = this.root; + + for (let i = 0; i < path.length; i++) { + const isLast = i === path.length - 1; + + const component = path[i]; + + const componentKey = encodeBase64(component); + + const edge: MemoryNode | undefined = node.children.get( + componentKey, + ); + + if (!edge) { + const newNode = { + pathComponent: component, + value: isLast ? value : null, + children: new Map(), + }; + + node.children.set(componentKey, newNode); + + node = newNode; + + continue; + } + + if (isLast) { + edge.value = value; + } + + node = edge; + } + + return Promise.resolve(); + } + + remove(path: Path): Promise { + if (path.length === 0) { + this.root.value = null; + return Promise.resolve(true); + } + + let node = this.root; + + let remove: (() => void) | null = null; + + for (let i = 0; i < path.length; i++) { + const isLast = i === path.length - 1; + + const component = path[i]; + + const componentKey = encodeBase64(component); + + const edge = node.children.get(componentKey); + + const thisIterationsNode = node; + + // If there's no edge, there's nothing to remove. + if (!edge) { + break; + } + + if (!isLast) { + if (edge.value === null && edge.children.size === 1) { + remove = () => { + thisIterationsNode.children.delete(componentKey); + }; + } else { + remove = null; + } + + node = edge; + + continue; + } + + if (edge.children.size > 0) { + edge.value = null; + return Promise.resolve(true); + } + + if (remove) { + remove(); + + return Promise.resolve(true); + } + + node.children.delete(componentKey); + + return Promise.resolve(true); + } + + return Promise.resolve(false); + } + + async *prefixesOf(path: Path): AsyncIterable<[Path, ValueType]> { + if (path.length === 0) { + return; + } + + let node = this.root; + + for (let i = 0; i < path.length; i++) { + const currentComponent = path[i]; + + if (node.value) { + yield [path.slice(0, i), node.value]; + } + + const edge = node.children.get(encodeBase64(currentComponent)); + + if (!edge) { + break; + } + + node = edge; + } + } + + async *prefixedBy(path: Path): AsyncIterable<[Path, ValueType]> { + // Find the node + + let result: MemoryNode | null = null; + let node = this.root; + + for (let i = 0; i < path.length; i++) { + const component = path[i]; + const isLast = i === path.length - 1; + const edge = node.children.get(encodeBase64(component)); + + if (!edge) { + break; + } + + if (isLast) { + result = edge; + } + + node = edge; + } + + if (!result) { + return; + } + + for ( + const [prefixedByPath, prefixedByValue] of this.allChildPrefixes(result) + ) { + yield [[...path, ...prefixedByPath], prefixedByValue]; + } + } + + private *allChildPrefixes( + node: MemoryNode, + ): Iterable<[Path, ValueType]> { + const childrenArr = Array.from(node.children.values()); + + childrenArr.sort((a, b) => { + return orderBytes( + a.pathComponent, + b.pathComponent, + ); + }); + + for (const child of childrenArr) { + if (child.value) { + yield [[child.pathComponent], child.value]; + } + + for ( + const [childChildPath, childChildValue] of this.allChildPrefixes(child) + ) { + yield [[child.pathComponent, ...childChildPath], childChildValue]; + } + } + } +} diff --git a/src/replica/storage/prefix_iterators/radixish_tree.ts b/src/replica/storage/prefix_iterators/radixish_tree.ts deleted file mode 100644 index 92aa904..0000000 --- a/src/replica/storage/prefix_iterators/radixish_tree.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { compareBytes } from "../../../util/bytes.ts"; -import { PrefixIterator } from "./types.ts"; - -type MemoryNode = { - key: Uint8Array; - value: ValueType | null; - children: Map>; -}; - -export class RadixishTree implements PrefixIterator { - private root: MemoryNode = { - key: new Uint8Array(), - value: null, - children: new Map>(), - }; - - print() { - const printNode = (node: MemoryNode) => { - console.group(node.key, node.value); - - for (const [key, child] of node.children) { - console.log(key, "->"); - printNode(child); - } - - console.groupEnd(); - }; - - printNode(this.root); - } - - insert(key: Uint8Array, value: ValueType): Promise { - // Start at root node - - let node = this.root; - - while (true) { - const lcp = getLongestCommonPrefix(node.key, key); - - if ( - lcp.byteLength === key.byteLength && - key.byteLength === node.key.byteLength && node.value - ) { - break; - } else if (lcp.byteLength === key.byteLength && node.value === null) { - node.value = value; - - break; - } else if (lcp.byteLength === key.byteLength && node.value) { - const splitNode = { - key: node.key, - value: node.value, - children: new Map(node.children), - }; - - node.key = key; - node.value = value; - node.children = new Map(); - - node.children.set(splitNode.key[lcp.byteLength], splitNode); - - break; - } else if ( - node.key.byteLength === 0 || - lcp.byteLength === node.key.byteLength - ) { - // Check if any children along its edges - const edge = key[lcp.byteLength]; - const childAtEdge = node.children.get(edge); - - if (childAtEdge) { - // Node is now that child. - node = childAtEdge; - continue; - } else { - // Insert child at that edge - - node.children.set(edge, { - key, - value, - children: new Map(), - }); - - break; - } - } else if (lcp.byteLength > 0) { - const splitNode = { - key: node.key, - value: node.value, - children: new Map(node.children), - }; - - const newNode = { - key: key, - value: value, - children: new Map(), - }; - - node.key = lcp; - node.value = null; - node.children = new Map(); - - node.children.set(splitNode.key[lcp.byteLength], splitNode); - node.children.set(newNode.key[lcp.byteLength], newNode); - - break; - } else { - break; - } - } - - return Promise.resolve(); - } - - remove(key: Uint8Array): Promise { - let node = this.root; - - while (true) { - const lcp = getLongestCommonPrefix(node.key, key); - - if ( - node.key.byteLength === 0 || lcp.byteLength === node.key.byteLength - ) { - // Check if any children along its edges - const edge = key[lcp.byteLength]; - const childAtEdge = node.children.get(edge); - - if (childAtEdge && compareBytes(childAtEdge.key, key) === 0) { - // We found it, noice... - // Absorb any children of the node to be deleted. - if (childAtEdge.children.size === 0) { - node.children.delete(edge); - - if (node.children.size === 1 && node.value !== null) { - // Merge them. - const nodeToMerge = Array.from(node.children.values())[0]; - - node.key = nodeToMerge.key; - node.children = nodeToMerge.children; - node.value = nodeToMerge.value; - } - } else if (childAtEdge.children.size === 1) { - node.children.set( - edge, - Array.from(childAtEdge.children.values())[0], - ); - } else { - childAtEdge.value = null; - } - - return Promise.resolve(true); - } else if (childAtEdge) { - // Node is now that child. - node = childAtEdge; - continue; - } else { - break; - } - } else { - break; - } - } - - return Promise.resolve(false); - } - - async *prefixesOf(key: Uint8Array): AsyncIterable<[Uint8Array, ValueType]> { - let node = this.root; - - while (true) { - const lcp = getLongestCommonPrefix(node.key, key); - - // Is this thing a prefix of ours? - if (lcp.byteLength === key.byteLength) { - break; - } else if ( - node.key.byteLength === 0 || - lcp.byteLength === node.key.byteLength - ) { - if (node.value !== null) { - yield [node.key, node.value]; - } - - // Check if any children along its edges - const edge = key[lcp.byteLength]; - const childAtEdge = node.children.get(edge); - - if (childAtEdge) { - // Node is now that child. - node = childAtEdge; - continue; - } else { - // Nothing left, stop iterating. - break; - } - } else { - break; - } - } - } - - async *prefixedBy(key: Uint8Array): AsyncIterable<[Uint8Array, ValueType]> { - let searchNode = this.root; - let firstPrefixed: MemoryNode | null = null; - - // find the first thing that is the key, or the prefix of the key - - // and then all the children of that are prefixed by us - - while (true) { - const lcp = getLongestCommonPrefix(key, searchNode.key); - - // Is this thing a prefix of ours? - if ( - lcp.byteLength === key.byteLength - ) { - firstPrefixed = searchNode; - } - - if ( - searchNode.key.byteLength === 0 || - lcp.byteLength > 0 - ) { - // Check if any children along its edges - const edge = key[lcp.byteLength]; - const childAtEdge = searchNode.children.get(edge); - - if (childAtEdge) { - // Node is now that child. - searchNode = childAtEdge; - continue; - } else { - // Nothing left, stop iterating. - break; - } - } else { - break; - } - } - - if (firstPrefixed) { - if ( - firstPrefixed.value !== null && - firstPrefixed.key.byteLength > key.byteLength - ) { - yield [firstPrefixed.key, firstPrefixed.value]; - } - - // iterate through all children. - for (const node of this.allNodesLnr(firstPrefixed)) { - if (node.value !== null) { - yield [node.key, node.value]; - } - } - } - } - - private *allNodesLnr( - node: MemoryNode, - ): Iterable> { - for (const [_key, child] of (node.children)) { - yield child; - - for (const node of this.allNodesLnr(child)) { - yield node; - } - } - } -} - -function getLongestCommonPrefix( - candidate: Uint8Array, - target: Uint8Array, -): Uint8Array { - const bytes: number[] = []; - - for (let i = 0; i < candidate.byteLength; i++) { - if (candidate[i] !== target[i]) { - break; - } - - bytes.push(candidate[i]); - } - - return new Uint8Array(bytes); -} diff --git a/src/replica/storage/prefix_iterators/simple_key_iterator.ts b/src/replica/storage/prefix_iterators/simple_key_iterator.ts index 1714a4d..00bc5e4 100644 --- a/src/replica/storage/prefix_iterators/simple_key_iterator.ts +++ b/src/replica/storage/prefix_iterators/simple_key_iterator.ts @@ -1,58 +1,51 @@ -import { equalsBytes } from "../../../../deps.ts"; -import { compareBytes, incrementLastByte } from "../../../util/bytes.ts"; -import { Key, KvDriver } from "../kv/types.ts"; +import { isPathPrefixed, orderPath, Path } from "../../../../deps.ts"; +import { KvDriver } from "../kv/types.ts"; +import { PrefixIterator } from "./types.ts"; -export class SimpleKeyIterator { +export class SimpleKeyIterator implements PrefixIterator { private kv: KvDriver; constructor(kv: KvDriver) { this.kv = kv; } - insert(key: Uint8Array, value: ValueType) { - return this.kv.set([key], value); + insert(path: Path, value: ValueType) { + return this.kv.set([0, ...path], value); } - async remove(key: Uint8Array) { - await this.kv.delete([key]); + async remove(path: Path) { + await this.kv.delete([0, ...path]); return true; } async *prefixesOf( - key: Uint8Array, - atLeast: Key = [], - ): AsyncIterable<[Uint8Array, ValueType]> { + path: Path, + atLeast: Path = [], + ): AsyncIterable<[Path, ValueType]> { for await ( const entry of this.kv.list({ - start: atLeast, - end: [key], + start: [0, ...atLeast], + end: [0, ...path], }, { - batchSize: key.length === 0 ? 1 : undefined, - limit: key.length === 0 ? 1 : undefined, + batchSize: path.length === 0 ? 1 : undefined, + limit: path.length === 0 ? 1 : undefined, }) ) { - const candidate = entry.key[0] as Uint8Array; - - if (compareBytes(candidate, key) >= 0) { + const candidate = entry.key.slice(1) as Path; + // If the candidate is greater than or equal to the current path, we've reached the end of the line. + if (orderPath(candidate, path) >= 0) { break; } - const longestCommonPrefix = getLongestCommonPrefix( - candidate, - key, - ); - - if (longestCommonPrefix.byteLength === candidate.byteLength) { + if (isPathPrefixed(candidate, path)) { yield [candidate, entry.value]; - const nextAtLeast = new Uint8Array(longestCommonPrefix.byteLength + 1); - nextAtLeast.set(longestCommonPrefix); - for await ( - const result of this.prefixesOf(key, [ - nextAtLeast, - ]) + const result of this.prefixesOf( + path, + path.slice(0, candidate.length + 1), + ) ) { yield result; } @@ -62,35 +55,13 @@ export class SimpleKeyIterator { } } - async *prefixedBy(key: Uint8Array): AsyncIterable<[Uint8Array, ValueType]> { + async *prefixedBy(path: Path): AsyncIterable<[Path, ValueType]> { for await ( const entry of this.kv.list({ - start: [key], - end: [incrementLastByte(key)], + prefix: [0, ...path], }) ) { - if (equalsBytes(entry.key[0] as Uint8Array, key)) { - continue; - } - - yield [entry.key[0] as Uint8Array, entry.value]; - } - } -} - -function getLongestCommonPrefix( - candidate: Uint8Array, - target: Uint8Array, -): Uint8Array { - const bytes: number[] = []; - - for (let i = 0; i < candidate.byteLength; i++) { - if (candidate[i] !== target[i]) { - break; + yield [entry.key.slice(1) as Path, entry.value]; } - - bytes.push(candidate[i]); } - - return new Uint8Array(bytes); } diff --git a/src/replica/storage/prefix_iterators/types.ts b/src/replica/storage/prefix_iterators/types.ts index fe45cfd..0f1551f 100644 --- a/src/replica/storage/prefix_iterators/types.ts +++ b/src/replica/storage/prefix_iterators/types.ts @@ -1,6 +1,10 @@ +import { Path } from "../../../../deps.ts"; + export interface PrefixIterator { - insert(key: Uint8Array, value: ValueType): Promise; - remove(key: Uint8Array): Promise; - prefixesOf(key: Uint8Array): AsyncIterable<[Uint8Array, ValueType]>; - prefixedBy(key: Uint8Array): AsyncIterable<[Uint8Array, ValueType]>; + insert(path: Path, value: ValueType): Promise; + remove(path: Path): Promise; + /** Return all paths that are prefixes of the given path */ + prefixesOf(path: Path): AsyncIterable<[Path, ValueType]>; + /** Returns all paths that are prefixed by the given path */ + prefixedBy(path: Path): AsyncIterable<[Path, ValueType]>; } diff --git a/src/replica/storage/storage_3d/storage_3d.test.ts b/src/replica/storage/storage_3d/storage_3d.test.ts index e11af5a..2fdb071 100644 --- a/src/replica/storage/storage_3d/storage_3d.test.ts +++ b/src/replica/storage/storage_3d/storage_3d.test.ts @@ -1,5 +1,19 @@ import { assert } from "https://deno.land/std@0.202.0/assert/assert.ts"; -import { concat, encodeBase64, Products } from "../../../../deps.ts"; +import { + ANY_SUBSPACE, + AreaOfInterest, + bigintToBytes, + concat, + Entry, + isIncludedRange, + isPathPrefixed, + OPEN_END, + orderBytes, + orderPath, + orderTimestamp, + Path, + Range, +} from "../../../../deps.ts"; import { makeNamespaceKeypair, makeSubspaceKeypair, @@ -8,33 +22,18 @@ import { testSchemeAuthorisation, testSchemeFingerprint, testSchemeNamespace, - testSchemePathLength, + testSchemePath, testSchemePayload, testSchemeSubspace, } from "../../../test/test_schemes.ts"; -import { getSubspaces, randomTimestamp } from "../../../test/utils.ts"; -import { bigintToBytes, compareBytes } from "../../../util/bytes.ts"; -import { Replica } from "../../replica.ts"; -import { - FingerprintScheme, - NamespaceScheme, - OptionalBounds, - ProtocolParameters, - Query, - SubspaceScheme, -} from "../../types.ts"; -import { RadixishTree } from "../prefix_iterators/radixish_tree.ts"; +import { randomPath, randomTimestamp } from "../../../test/utils.ts"; +import { ProtocolParameters } from "../../types.ts"; import { MonoidRbTree } from "../summarisable_storage/monoid_rbtree.ts"; import { TripleStorage } from "./triple_storage.ts"; import { Storage3d } from "./types.ts"; -import { sample } from "https://deno.land/std@0.198.0/collections/sample.ts"; -import { encodeEntry } from "../../../entries/encode_decode.ts"; import { assertEquals } from "https://deno.land/std@0.202.0/assert/assert_equals.ts"; -import { Entry } from "../../../entries/types.ts"; - -const emptyUi8 = new Uint8Array(); -type Storage3dScenario< +export type Storage3dScenario< NamespaceKey, SubspaceKey, PayloadDigest, @@ -92,7 +91,7 @@ const tripleStorageScenario = { createSummarisableStorage: (monoid) => { return new MonoidRbTree({ monoid, - compare: compareBytes, + compare: orderBytes, }); }, }); @@ -112,7 +111,7 @@ Deno.test("Storage3d.insert, get, and remove", async (test) => { { namespaceScheme: testSchemeNamespace, subspaceScheme: testSchemeSubspace, - pathLengthScheme: testSchemePathLength, + pathScheme: testSchemePath, payloadScheme: testSchemePayload, fingerprintScheme: testSchemeFingerprint, authorisationScheme: testSchemeAuthorisation, @@ -121,32 +120,32 @@ Deno.test("Storage3d.insert, get, and remove", async (test) => { await test.step(scenario.name, async () => { const subspace = await makeSubspaceKeypair(); - const pathAndPayload = crypto.getRandomValues(new Uint8Array(8)); + const path = randomPath(); - const payloadHash = crypto.getRandomValues(new Uint8Array(32)); - const authTokenHash = crypto.getRandomValues(new Uint8Array(32)); + const payloadDigest = crypto.getRandomValues(new Uint8Array(32)); + const authTokenDigest = crypto.getRandomValues(new Uint8Array(32)); await storage.insert({ - path: pathAndPayload, - payloadHash, - authTokenHash, + path, + payloadDigest, + authTokenDigest, length: BigInt(8), subspace: subspace.subspace, timestamp: BigInt(1000), }); - const res = await storage.get(subspace.subspace, pathAndPayload); + const res = await storage.get(subspace.subspace, path); assert(res); - assertEquals(res.entry.identifier.subspace, subspace.subspace); - assertEquals(res.entry.identifier.path, pathAndPayload); - assertEquals(res.entry.record.hash, payloadHash); - assertEquals(res.authTokenHash, authTokenHash); + assertEquals(res.entry.subspaceId, subspace.subspace); + assertEquals(res.entry.path, path); + assertEquals(res.entry.payloadDigest, payloadDigest); + assertEquals(res.authTokenHash, authTokenDigest); await storage.remove(res.entry); - const res2 = await storage.get(subspace.subspace, pathAndPayload); + const res2 = await storage.get(subspace.subspace, path); assert(res2 === undefined); }); @@ -160,27 +159,33 @@ Deno.test("Storage3d.summarise", async () => { const specialFingerprintScheme = { fingerprintSingleton( entry: Entry, - ): Promise<[number, Uint8Array, bigint][]> { + ): Promise<[number, Path, bigint, bigint][]> { return Promise.resolve([[ - entry.identifier.subspace, - entry.identifier.path, - entry.record.timestamp, + entry.subspaceId, + entry.path, + entry.timestamp, + entry.payloadLength, ]]); }, fingerprintCombine( - a: Array<[number, Uint8Array, bigint]>, - b: Array<[number, Uint8Array, bigint]>, + a: Array<[number, Path, bigint, bigint]>, + b: Array<[number, Path, bigint, bigint]>, ) { const newFingerprint = [...a]; + // Remove dupleicates + for (const element of b) { const existing = newFingerprint.find( - ([subspaceA, pathA, timestampA]) => { - const [subspaceB, pathB, timestampB] = element; + ([subspaceA, pathA, timestampA, lengthA]) => { + const [subspaceB, pathB, timestampB, lengthB] = element; if (subspaceA !== subspaceB) return false; - if (Products.orderPaths(pathA, pathB) !== 0) return false; - if (Products.orderTimestamps(timestampA, timestampB) !== 0) { + if (orderPath(pathA, pathB) !== 0) return false; + if (orderTimestamp(timestampA, timestampB) !== 0) { + return false; + } + if (orderTimestamp(lengthA, lengthB) !== 0) { return false; } @@ -196,17 +201,23 @@ Deno.test("Storage3d.summarise", async () => { } newFingerprint.sort((a, b) => { - const [subspaceA, pathA, timestampA] = a; - const [subspaceB, pathB, timestampB] = b; + const [subspaceA, pathA, timestampA, lengthA] = a; + const [subspaceB, pathB, timestampB, lengthB] = b; if (subspaceA < subspaceB) return -1; if (subspaceA > subspaceB) return 1; - if (Products.orderPaths(pathA, pathB) === -1) return -1; - if (Products.orderPaths(pathA, pathB) === 1) return 1; - if (Products.orderTimestamps(timestampA, timestampB) === -1) { + if (orderPath(pathA, pathB) === -1) return -1; + if (orderPath(pathA, pathB) === 1) return 1; + if (orderTimestamp(timestampA, timestampB) === -1) { + return -1; + } + if (orderTimestamp(timestampA, timestampB) === 1) { + return 1; + } + if (orderTimestamp(lengthA, lengthB) === -1) { return -1; } - if (Products.orderTimestamps(timestampA, timestampB) === 1) { + if (orderTimestamp(lengthA, lengthB) === 1) { return 1; } @@ -215,7 +226,7 @@ Deno.test("Storage3d.summarise", async () => { return newFingerprint; }, - neutral: [] as Array<[number, Uint8Array, bigint]>, + neutral: [] as Array<[number, Path, bigint, bigint]>, }; for (const scenario of scenarios) { @@ -246,48 +257,20 @@ Deno.test("Storage3d.summarise", async () => { encodedLength() { return 1; }, - isEqual(a, b) { - return a === b; - }, order(a: number, b: number) { if (a < b) return -1; if (a > b) return 1; return 0; }, minimalSubspaceKey: 0, - successor(a) { + successor(a: number) { return a + 1; }, }, - payloadScheme: { - encode(value: Uint8Array) { - return value; - }, - decode(encoded) { - return encoded; - }, - encodedLength(value: Uint8Array) { - return value.byteLength; - }, - fromBytes(bytes: Uint8Array | ReadableStream) { - return Promise.resolve(bytes as Uint8Array); - }, - order: compareBytes, - }, - pathLengthScheme: { - encode(value: number) { - return new Uint8Array([value]); - }, - decode(encoded) { - return encoded[0]; - }, - encodedLength() { - return 1; - }, - maxLength: 4, - }, + payloadScheme: testSchemePayload, + pathScheme: testSchemePath, authorisationScheme: { - isAuthorised() { + isAuthorisedWrite() { return Promise.resolve(true); }, authorise() { @@ -311,115 +294,101 @@ Deno.test("Storage3d.summarise", async () => { // Create some random products using these (pull from Meadowcap) - const summariseParams: { - product: Products.ThreeDimensionalProduct; - countLimits?: { subspace?: number; path?: number; time?: number }; - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }; - }[] = []; + const areaParams: AreaOfInterest[] = []; for (let i = 0; i < 100; i++) { const randomCount = () => { return Math.random() > 0.5 ? Math.floor(Math.random() * (3 - 1 + 1) + 1) - : undefined; + : 0; }; const randomSize = () => { return Math.random() > 0.5 ? BigInt(Math.floor(Math.random() * (64 - 16 + 1) + 16)) - : undefined; + : BigInt(0); }; - const randomCounts = () => { + const randomSubspaceId = () => { return Math.random() > 0.5 - ? { - subspace: randomCount(), - path: randomCount(), - time: randomCount(), - } - : undefined; + ? Math.floor(Math.random() * 255) + : ANY_SUBSPACE; }; - const randomSizes = () => { - return Math.random() > 0.5 - ? { - subspace: randomSize(), - path: randomSize(), - time: randomSize(), - } - : undefined; + const randomTimeRange = () => { + const isOpen = Math.random() > 0.5; + + const start = BigInt(Math.floor(Math.random() * 1000)); + + if (isOpen) { + return { + start, + end: OPEN_END, + } as Range; + } + + const end = start + BigInt(Math.floor(Math.random() * 1000)); + + return { start, end }; }; - summariseParams.push({ - product: getRandom3dProduct({ - noEmpty: true, - }), - countLimits: randomCounts(), - sizeLimits: randomSizes(), + areaParams.push({ + area: { + includedSubspaceId: randomSubspaceId(), + pathPrefix: randomPath(), + timeRange: randomTimeRange(), + }, + maxCount: randomCount(), + maxSize: randomSize(), }); } - // Define includedByProduct fn - const includedBySummariseParams = ( + // A function which returns all the areas a given spt is included by + const isIncludedByAreas = ( subspace: number, - path: Uint8Array, + path: Path, time: bigint, - ): { - product: Products.ThreeDimensionalProduct; - countLimits?: { subspace?: number; path?: number; time?: number }; - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }; - }[] => { - const includedProducts = []; - - for (const { product, countLimits, sizeLimits } of summariseParams) { + ): AreaOfInterest[] => { + const inclusiveAreas: AreaOfInterest[] = []; + + for (const aoi of areaParams) { if ( - Products.disjointIntervalIncludesValue( - { order: orderNumbers }, - product[0], - subspace, - ) === false + aoi.area.includedSubspaceId !== ANY_SUBSPACE && + aoi.area.includedSubspaceId !== subspace ) { continue; } if ( - Products.disjointIntervalIncludesValue( - { order: Products.orderPaths }, - product[1], - path, - ) === false + isPathPrefixed(aoi.area.pathPrefix, path) === false ) { continue; } if ( - Products.disjointIntervalIncludesValue( - { order: Products.orderTimestamps }, - product[2], - time, - ) === false + isIncludedRange(orderTimestamp, aoi.area.timeRange, time) === false ) { continue; } - includedProducts.push({ product, countLimits, sizeLimits }); + inclusiveAreas.push(aoi); } - return includedProducts; + return inclusiveAreas; }; // Define expected fingerprint map const actualFingerprintMap = new Map< - Products.ThreeDimensionalProduct, + AreaOfInterest, { - fingerprint: [number, Uint8Array, bigint][]; + fingerprint: [number, Path, bigint, bigint][]; count: number; size: bigint; } >(); - for (const { product } of summariseParams) { - actualFingerprintMap.set(product, { + for (const areaOfInterest of areaParams) { + actualFingerprintMap.set(areaOfInterest, { fingerprint: specialFingerprintScheme.neutral, count: 0, size: BigInt(0), @@ -436,12 +405,22 @@ Deno.test("Storage3d.summarise", async () => { const pathLastByte = Math.floor(Math.random() * 256); - const pathAndPayload = new Uint8Array([ - 0, - 0, - 0, - pathLastByte, - ]); + const path = randomPath(); + + const authTokenDigest = new Uint8Array( + await crypto.subtle.digest( + "SHA-256", + new Uint8Array(0), + ), + ); + + const payloadDigest = new Uint8Array( + await crypto.subtle.digest( + "SHA-256", + crypto.getRandomValues(new Uint8Array(16)), + ), + ); + const timestamp = randomTimestamp(); if (occupiedPaths.get(subspace)?.has(pathLastByte)) { @@ -450,24 +429,20 @@ Deno.test("Storage3d.summarise", async () => { await storage.insert({ subspace, - path: pathAndPayload, + path: path, timestamp: timestamp, length: BigInt(4), - authTokenHash: new Uint8Array(), - payloadHash: pathAndPayload, + authTokenDigest: authTokenDigest, + payloadDigest: payloadDigest, }); const entry: Entry = { - identifier: { - namespace: null, - subspace: subspace, - path: pathAndPayload, - }, - record: { - hash: pathAndPayload, - length: BigInt(4), - timestamp, - }, + namespaceId: null, + subspaceId: subspace, + path: path, + payloadDigest: payloadDigest, + payloadLength: BigInt(4), + timestamp, }; entries.push(entry); @@ -483,45 +458,38 @@ Deno.test("Storage3d.summarise", async () => { entries.sort((a, b) => { const aKey = concat( - new Uint8Array([a.identifier.subspace]), - a.identifier.path, - bigintToBytes(a.record.timestamp), + new Uint8Array([a.subspaceId]), + ...a.path, + bigintToBytes(a.timestamp), ); const bKey = concat( - new Uint8Array([b.identifier.subspace]), - b.identifier.path, - bigintToBytes(b.record.timestamp), + new Uint8Array([b.subspaceId]), + ...b.path, + bigintToBytes(b.timestamp), ); - return Products.orderPaths(aKey, bKey) * -1; + return orderBytes(aKey, bKey) * -1; }); for (const entry of entries) { - const includedBy = includedBySummariseParams( - entry.identifier.subspace, - entry.identifier.path, - entry.record.timestamp, + const includedBy = isIncludedByAreas( + entry.subspaceId, + entry.path, + entry.timestamp, ); - for (const { product, countLimits, sizeLimits } of includedBy) { - const { fingerprint, count, size } = actualFingerprintMap.get(product)!; + for (const aoi of includedBy) { + const { fingerprint, count, size } = actualFingerprintMap.get(aoi)!; const nextCount = count + 1; - const nextSize = size + entry.record.length; - - const sclExceeded = countLimits?.subspace && - nextCount > countLimits.subspace; - const pclExceeded = countLimits?.path && nextCount > countLimits.path; - const tclExceeded = countLimits?.time && nextCount > countLimits.time; + const nextSize = size + entry.payloadLength; - const sslExceeded = sizeLimits?.subspace && - nextSize > sizeLimits.subspace; - const pslExceeded = sizeLimits?.path && nextSize > sizeLimits.path; - const tslExceeded = sizeLimits?.time && nextSize > sizeLimits.time; + const countExceeded = aoi.maxCount !== 0 && nextCount > aoi.maxCount; + const sizeExceeded = aoi.maxSize !== BigInt(0) && + nextSize > aoi.maxSize; if ( - sclExceeded || pclExceeded || tclExceeded || sslExceeded || - pslExceeded || tslExceeded + countExceeded || sizeExceeded ) { continue; } @@ -531,7 +499,7 @@ Deno.test("Storage3d.summarise", async () => { ); actualFingerprintMap.set( - product, + aoi, { fingerprint: specialFingerprintScheme.fingerprintCombine( fingerprint, @@ -545,17 +513,17 @@ Deno.test("Storage3d.summarise", async () => { } // For all products, see if fingerprint matches the expected one. - for (const { product, countLimits, sizeLimits } of summariseParams) { - const actual = await storage.summarise(product, countLimits, sizeLimits); - const expected = actualFingerprintMap.get(product)!; + for (const aoi of areaParams) { + const actual = await storage.summarise(aoi); + const expected = actualFingerprintMap.get(aoi)!; assertEquals(actual.fingerprint, expected.fingerprint); assertEquals(actual.size, expected.count); - let actualPayloadSize = 0; + let actualPayloadSize = BigInt(0); for (const element of actual.fingerprint) { - actualPayloadSize += element[1].byteLength; + actualPayloadSize += element[3]; } assertEquals(BigInt(actualPayloadSize), expected.size); @@ -565,414 +533,8 @@ Deno.test("Storage3d.summarise", async () => { } }); -Deno.test("Storage3d.entriesByProduct", async () => { - const namespaceScheme: NamespaceScheme = { - encode() { - return new Uint8Array(); - }, - decode() { - return null; - }, - encodedLength() { - return 0; - }, - isEqual() { - return true; - }, - }; - - const subspaceScheme: SubspaceScheme = { - encode(value: number) { - return new Uint8Array([value]); - }, - decode(encoded) { - return encoded[0]; - }, - encodedLength() { - return 1; - }, - isEqual(a, b) { - return a === b; - }, - order(a: number, b: number) { - if (a < b) return -1; - if (a > b) return 1; - return 0; - }, - minimalSubspaceKey: 0, - successor(a) { - return a + 1; - }, - }; - - // A 'special' fingerprint which really just lists all the items it is made from. - const fingeprintScheme: FingerprintScheme< - null, - number, - Uint8Array, - Uint8Array - > = { - neutral: new Uint8Array(32), - async fingerprintSingleton(entry) { - const encodedEntry = encodeEntry(entry, { - namespaceScheme, - subspaceScheme, - pathLengthScheme: testSchemePathLength, - payloadScheme: testSchemePayload, - }); - - return new Uint8Array( - await crypto.subtle.digest("SHA-256", encodedEntry), - ); - }, - fingerprintCombine(a, b) { - const bytes = new Uint8Array(32); - - for (let i = 0; i < 32; i++) { - bytes.set([a[i] ^ b[i]], i); - } - - return bytes; - }, - }; - - for (const scenario of scenarios) { - const { storage, dispose } = await scenario.makeScenario( - null, - { - namespaceScheme, - subspaceScheme, - payloadScheme: { - encode(value: Uint8Array) { - return value; - }, - decode(encoded) { - return encoded; - }, - encodedLength(value: Uint8Array) { - return value.byteLength; - }, - fromBytes(bytes: Uint8Array | ReadableStream) { - return Promise.resolve(bytes as Uint8Array); - }, - order: compareBytes, - }, - pathLengthScheme: { - encode(value: number) { - return new Uint8Array([value]); - }, - decode(encoded) { - return encoded[0]; - }, - encodedLength() { - return 1; - }, - maxLength: 4, - }, - authorisationScheme: { - isAuthorised() { - return Promise.resolve(true); - }, - authorise() { - return Promise.resolve(null); - }, - tokenEncoding: { - encode() { - return new Uint8Array(); - }, - decode() { - return null; - }, - encodedLength() { - return 0; - }, - }, - }, - fingerprintScheme: fingeprintScheme, - }, - ); - - // Create some random products using these (pull from Meadowcap) - - const summariseParams: { - product: Products.ThreeDimensionalProduct; - countLimits?: { subspace?: number; path?: number; time?: number }; - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }; - }[] = []; - - for (let i = 0; i < 100; i++) { - const randomCount = () => { - return Math.random() > 0.5 - ? Math.floor(Math.random() * (3 - 1 + 1) + 1) - : undefined; - }; - - const randomSize = () => { - return Math.random() > 0.5 - ? BigInt(Math.floor(Math.random() * (64 - 16 + 1) + 16)) - : undefined; - }; - - const randomCounts = () => { - return Math.random() > 0.5 - ? { - subspace: randomCount(), - path: randomCount(), - time: randomCount(), - } - : undefined; - }; - - const randomSizes = () => { - return Math.random() > 0.5 - ? { - subspace: randomSize(), - path: randomSize(), - time: randomSize(), - } - : undefined; - }; - - summariseParams.push({ - product: getRandom3dProduct({ - noEmpty: true, - }), - countLimits: randomCounts(), - sizeLimits: randomSizes(), - }); - } - - // Define includedByProduct fn - const includedByQueryProductParams = ( - subspace: number, - path: Uint8Array, - time: bigint, - ): { - product: Products.ThreeDimensionalProduct; - countLimits?: { subspace?: number; path?: number; time?: number }; - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }; - }[] => { - const includedProducts = []; - - for (const { product, countLimits, sizeLimits } of summariseParams) { - if ( - Products.disjointIntervalIncludesValue( - { order: orderNumbers }, - product[0], - subspace, - ) === false - ) { - continue; - } - - if ( - Products.disjointIntervalIncludesValue( - { order: Products.orderPaths }, - product[1], - path, - ) === false - ) { - continue; - } - - if ( - Products.disjointIntervalIncludesValue( - { order: Products.orderTimestamps }, - product[2], - time, - ) === false - ) { - continue; - } - - includedProducts.push({ product, countLimits, sizeLimits }); - } - - return includedProducts; - }; - - const actualResultsMap = new Map< - Products.ThreeDimensionalProduct, - { - entries: Set; - count: number; - size: bigint; - } - >(); - - for (const { product } of summariseParams) { - actualResultsMap.set(product, { - entries: new Set(), - count: 0, - size: BigInt(0), - }); - } - - const occupiedPaths = new Map>(); - - const entries: Entry[] = []; - - // Generate some entries - for (let i = 0; i < 100; i++) { - const subspace = Math.floor(Math.random() * 100); - - const pathLastByte = Math.floor(Math.random() * 256); - - const pathAndPayload = new Uint8Array([ - 0, - 0, - 0, - pathLastByte, - ]); - const timestamp = randomTimestamp(); - - if (occupiedPaths.get(subspace)?.has(pathLastByte)) { - continue; - } - - await storage.insert({ - subspace, - path: pathAndPayload, - timestamp: timestamp, - length: BigInt(4), - authTokenHash: new Uint8Array(), - payloadHash: pathAndPayload, - }); - - const entry: Entry = { - identifier: { - namespace: null, - subspace: subspace, - path: pathAndPayload, - }, - record: { - hash: pathAndPayload, - length: BigInt(4), - timestamp, - }, - }; - - entries.push(entry); - - const usedPaths = occupiedPaths.get(subspace); - - if (!usedPaths) { - occupiedPaths.set(subspace, new Set([pathLastByte])); - } else { - usedPaths.add(pathLastByte); - } - } - - entries.sort((a, b) => { - const aKey = concat( - new Uint8Array([a.identifier.subspace]), - a.identifier.path, - bigintToBytes(a.record.timestamp), - ); - const bKey = concat( - new Uint8Array([b.identifier.subspace]), - b.identifier.path, - bigintToBytes(b.record.timestamp), - ); - - return Products.orderPaths(aKey, bKey) * -1; - }); - - for (const entry of entries) { - const includedBy = includedByQueryProductParams( - entry.identifier.subspace, - entry.identifier.path, - entry.record.timestamp, - ); - - for (const { product, countLimits, sizeLimits } of includedBy) { - const { entries, count, size } = actualResultsMap.get(product)!; - - const nextCount = count + 1; - const nextSize = size + entry.record.length; - - const sclExceeded = countLimits?.subspace && - nextCount > countLimits.subspace; - const pclExceeded = countLimits?.path && nextCount > countLimits.path; - const tclExceeded = countLimits?.time && nextCount > countLimits.time; - - const sslExceeded = sizeLimits?.subspace && - nextSize > sizeLimits.subspace; - const pslExceeded = sizeLimits?.path && nextSize > sizeLimits.path; - const tslExceeded = sizeLimits?.time && nextSize > sizeLimits.time; - - if ( - sclExceeded || pclExceeded || tclExceeded || sslExceeded || - pslExceeded || tslExceeded - ) { - continue; - } - - const encodedEntry = encodeEntry(entry, { - namespaceScheme, - subspaceScheme, - pathLengthScheme: testSchemePathLength, - payloadScheme: testSchemePayload, - }); - - const entryHash = await testSchemePayload.fromBytes(encodedEntry); - - const b64EntryHash = encodeBase64(entryHash); - - entries.add(b64EntryHash); - - actualResultsMap.set( - product, - { - entries: entries, - size: nextSize, - count: nextCount, - }, - ); - } - } - - // For all products, see if fingerprint matches the expected one. - for (const { product, countLimits, sizeLimits } of summariseParams) { - const expected = actualResultsMap.get(product)!; - - let countUsed = 0; - let sizeUsed = BigInt(0); - - for await ( - const { entry } of storage.entriesByProduct( - product, - countLimits, - sizeLimits, - ) - ) { - countUsed += 1; - sizeUsed += entry.record.length; - - // Check for presence of entry. - const encodedEntry = encodeEntry(entry, { - namespaceScheme, - subspaceScheme, - pathLengthScheme: testSchemePathLength, - payloadScheme: testSchemePayload, - }); - - const entryHash = await testSchemePayload.fromBytes(encodedEntry); - - const b64EntryHash = encodeBase64(entryHash); - - expected.entries.has(b64EntryHash); - } - - assertEquals(countUsed, expected.count); - assertEquals(sizeUsed, expected.size); - } - - await dispose(); - } -}); - -Deno.test("Storage3d.entriesByQuery", async (test) => { +/* +Deno.test("Storage3d.query", async (test) => { for (const scenario of scenarios) { const namespaceKeypair = await makeNamespaceKeypair(); @@ -981,13 +543,14 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { { namespaceScheme: testSchemeNamespace, subspaceScheme: testSchemeSubspace, - pathLengthScheme: testSchemePathLength, + pathScheme: testSchemePath, payloadScheme: testSchemePayload, fingerprintScheme: testSchemeFingerprint, authorisationScheme: testSchemeAuthorisation, }, ); + const replica = new Replica({ namespace: namespaceKeypair.namespace, protocolParameters: { @@ -1014,122 +577,100 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { }, }); + await test.step(scenario.name, async () => { // Generate the test queries - const subspaces = await getSubspaces(10); - const bytes = []; + const areaParams: AreaOfInterest[] = []; - for (let i = 0; i < 50; i++) { - bytes.push(crypto.getRandomValues(new Uint8Array(4))); - } + for (let i = 0; i < 100; i++) { + const randomCount = () => { + return Math.random() > 0.5 + ? Math.floor(Math.random() * (3 - 1 + 1) + 1) + : 0; + }; - const timestamps = []; + const randomSize = () => { + return Math.random() > 0.5 + ? BigInt(Math.floor(Math.random() * (64 - 16 + 1) + 16)) + : BigInt(0); + }; - for (let i = 0; i < 25; i++) { - timestamps.push(randomTimestamp()); - } + const randomSubspaceId = () => { + return Math.random() > 0.5 + ? Math.floor(Math.random() * 255) + : ANY_SUBSPACE; + }; - // Bounds + const randomTimeRange = () => { + const isOpen = Math.random() > 0.5; - const subspaceBounds = manyRandomBounds( - 100, - subspaces.map((s) => s.subspace), - compareBytes, - ); - const pathBounds = manyRandomBounds(100, bytes, Products.orderPaths); - const timeBounds = manyRandomBounds( - 100, - timestamps, - Products.orderTimestamps, - ); + const start = BigInt(Math.floor(Math.random() * 1000)); - const queries: Query[] = []; + if (isOpen) { + return { + start, + end: OPEN_END, + } as Range; + } - const includedByQueries = ( - subspace: Uint8Array, - path: Uint8Array, - time: bigint, - ): Query[] => { - const includedQueries = []; + const end = start + BigInt(Math.floor(Math.random() * 1000)); - for (const query of queries) { - if (query.subspace) { - const range = rangeFromOptionalBounds(query.subspace, emptyUi8); + return { start, end }; + }; - const isIncluded = Products.rangeIncludesValue( - { order: compareBytes }, - range, - subspace, - ); + areaParams.push({ + area: { + includedSubspaceId: randomSubspaceId(), + pathPrefix: randomPath(), + timeRange: randomTimeRange(), + }, + maxCount: randomCount(), + maxSize: randomSize(), + }); + } - if (!isIncluded) { - continue; - } + // A function which returns all the areas a given spt is included by + const isIncludedByAreas = ( + subspace: number, + path: Path, + time: bigint, + ): AreaOfInterest[] => { + const inclusiveAreas: AreaOfInterest[] = []; + + for (const aoi of areaParams) { + if ( + aoi.area.includedSubspaceId !== ANY_SUBSPACE && + aoi.area.includedSubspaceId !== subspace + ) { + continue; } - if (query.path) { - const range = rangeFromOptionalBounds(query.path, emptyUi8); - - const isIncluded = Products.rangeIncludesValue( - { order: Products.orderPaths }, - range, - path, - ); - - if (!isIncluded) { - continue; - } + if ( + isPathPrefixed(aoi.area.pathPrefix, path) === false + ) { + continue; } - if (query.time) { - const range = rangeFromOptionalBounds(query.time, BigInt(0)); - - const isIncluded = Products.rangeIncludesValue( - { order: Products.orderTimestamps }, - range, - time, - ); - - if (!isIncluded) { - continue; - } + if ( + isIncludedRange(orderTimestamp, aoi.area.timeRange, time) === false + ) { + continue; } - includedQueries.push(query); + inclusiveAreas.push(aoi); } - return includedQueries; + return inclusiveAreas; }; - for (let i = 0; i < 500; i++) { - const orderRoll = Math.random(); - - const query: Query = { - limit: Math.random() < 0.1 - ? Math.floor(Math.random() * 10) - : undefined, - reverse: Math.random() < 0.25 ? true : false, - order: orderRoll < 0.33 - ? "subspace" - : orderRoll < 0.66 - ? "path" - : "timestamp", - subspace: Math.random() < 0.5 ? sample(subspaceBounds) : undefined, - path: Math.random() < 0.5 ? sample(pathBounds) : undefined, - time: Math.random() < 0.5 ? sample(timeBounds) : undefined, - }; - - queries.push(query); - } - - const queryInclusionMap = new Map< - Query, + const actualResultMap = new Map< + AreaOfInterest, Set >(); - for (const query of queries) { - queryInclusionMap.set(query, new Set()); + for (const areaOfInterest of areaParams) { + actualResultMap.set(areaOfInterest, new Set()); } replica.addEventListener("entryremove", (event) => { @@ -1147,7 +688,7 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { testSchemePayload.fromBytes(encodedEntry).then((hash) => { const b64 = encodeBase64(hash); - for (const [, set] of queryInclusionMap) { + for (const [, set] of actualResultMap) { set.delete(b64); } }); @@ -1199,7 +740,7 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { entryAuthHashMap.set(b64EntryHash, b64AuthHash); for (const query of correspondingQueries) { - const set = queryInclusionMap.get(query)!; + const set = actualResultMap.get(query)!; set.add(b64EntryHash); } @@ -1208,7 +749,7 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { for (const query of queries) { let entriesRead = 0; - const awaiting = new Set(queryInclusionMap.get(query)); + const awaiting = new Set(actualResultMap.get(query)); const prevIsCorrectOrder = ( prev: Entry, @@ -1293,7 +834,7 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { assert(prevIsCorrectOrder(prevEntry, entry, query.order)); } - assert(queryInclusionMap.get(query)?.has(b64EntryHash)); + assert(actualResultMap.get(query)?.has(b64EntryHash)); entriesRead += 1; prevEntry = entry; @@ -1307,10 +848,10 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { if (query.limit) { assertEquals( entriesRead, - Math.min(query.limit, queryInclusionMap.get(query)!.size), + Math.min(query.limit, actualResultMap.get(query)!.size), ); } else { - assertEquals(entriesRead, queryInclusionMap.get(query)!.size); + assertEquals(entriesRead, actualResultMap.get(query)!.size); } } }); @@ -1319,195 +860,7 @@ Deno.test("Storage3d.entriesByQuery", async (test) => { } }); -function manyRandomBounds( - size: number, - sampleFrom: Array, - order: Products.TotalOrder, -) { - const bounds = []; - - for (let i = 0; i < size; i++) { - bounds.push(randomBounds(sampleFrom, order)); - } - - return bounds; -} - -function randomBounds( - sampleFrom: Array, - order: Products.TotalOrder, -): OptionalBounds { - const kindRoll = Math.random(); - - if (kindRoll < 0.33) { - return { - lowerBound: sample(sampleFrom)!, - }; - } else if (kindRoll < 0.66) { - return { - upperBound: sample(sampleFrom)!, - }; - } - - while (true) { - const fst = sample(sampleFrom)!; - const snd = sample(sampleFrom)!; - - const fstSndOrder = order(fst, snd); - - if (fstSndOrder === 0) { - continue; - } - - if (fstSndOrder === -1) { - return { - lowerBound: fst, - upperBound: snd, - }; - } - - return { - lowerBound: snd, - upperBound: fst, - }; - } -} - -function rangeFromOptionalBounds( - bounds: OptionalBounds, - leastValue: ValueType, -): Products.Range { - if (bounds.lowerBound && !bounds.upperBound) { - return { - kind: "open", - start: bounds.lowerBound, - }; - } - - if (bounds.upperBound && !bounds.lowerBound) { - return { - kind: "closed_exclusive", - start: leastValue, - end: bounds.upperBound, - }; - } - - return { - kind: "closed_exclusive", - start: bounds.lowerBound!, - end: bounds.upperBound!, - }; -} - -// Product stuff - -function getRandomDisjointInterval( - { minValue, successor, order, maxSize }: { - minValue: ValueType; - successor: Products.SuccessorFn; - maxSize: ValueType; - order: Products.TotalOrder; - }, -): Products.DisjointInterval { - let disjointInterval: Products.DisjointInterval = []; - - let start = minValue; - let end = minValue; - - while (true) { - start = end; - - while (true) { - start = successor(start); - - if (Math.random() > 0.8) { - break; - } - } - - end = start; - - while (true) { - end = successor(end); - - if ((order(end, maxSize) >= 0) || Math.random() > 0.8) { - break; - } - } - - if ((order(end, maxSize) >= 0)) { - break; - } - - disjointInterval = Products.addToDisjointInterval({ order: order }, { - kind: "closed_exclusive", - start, - end, - }, disjointInterval); - - if (Math.random() > 0.95) { - break; - } - } - - const isOpen = order(end, maxSize) < 0 && Math.random() > 0.8; - - if (isOpen) { - let openStart = end; - - while (true) { - openStart = successor(openStart); - - if (order(end, maxSize) >= 0 || Math.random() > 0.9) { - break; - } - } - - disjointInterval = Products.addToDisjointInterval({ order: order }, { - kind: "open", - start, - }, disjointInterval); - } - - return disjointInterval; -} - -function getRandom3dProduct( - { noEmpty }: { - noEmpty?: boolean; - }, -): Products.ThreeDimensionalProduct { - const isEmpty = Math.random() > 0.75; - - if (!noEmpty && isEmpty) { - return [[], [], []]; - } - - return [ - getRandomDisjointInterval({ - minValue: 0, - maxSize: 255, - order: (a, b) => { - if (a < b) return -1; - if (a > b) return 1; - return 0; - }, - successor: (a) => a + 1, - }), - getRandomDisjointInterval({ - minValue: new Uint8Array(), - maxSize: new Uint8Array([0, 0, 0, 255]), - order: Products.orderPaths, - successor: Products.makeSuccessorPath(4), - }), - getRandomDisjointInterval({ - minValue: BigInt(0), - maxSize: BigInt(1000), - order: Products.orderTimestamps, - successor: Products.successorTimestamp, - }), - ]; -} +*/ function orderNumbers(a: number, b: number) { if (a < b) return -1; diff --git a/src/replica/storage/storage_3d/triple_storage.ts b/src/replica/storage/storage_3d/triple_storage.ts index 9eef789..bdf89cd 100644 --- a/src/replica/storage/storage_3d/triple_storage.ts +++ b/src/replica/storage/storage_3d/triple_storage.ts @@ -1,12 +1,21 @@ -import { Products } from "../../../../deps.ts"; -import { Entry } from "../../../entries/types.ts"; -import { bigintToBytes } from "../../../util/bytes.ts"; +import { + ANY_SUBSPACE, + AreaOfInterest, + bigintToBytes, + concat, + Entry, + isIncludedRange, + isPathPrefixed, + OPEN_END, + orderTimestamp, + Path, + PathScheme, + successorPrefix, +} from "../../../../deps.ts"; import { FingerprintScheme, - OptionalBounds, - PathLengthScheme, PayloadScheme, - Query, + QueryOrder, SubspaceScheme, } from "../../types.ts"; import { @@ -33,7 +42,7 @@ export type TripleStorageOpts< ) => SummarisableStorage; subspaceScheme: SubspaceScheme; payloadScheme: PayloadScheme; - pathLengthScheme: PathLengthScheme; + pathScheme: PathScheme; fingerprintScheme: FingerprintScheme< NamespaceKey, SubspaceKey, @@ -59,10 +68,9 @@ export class TripleStorage< private ptsStorage: SummarisableStorage; private sptStorage: SummarisableStorage; private tspStorage: SummarisableStorage; - private subspaceScheme: SubspaceScheme; private payloadScheme: PayloadScheme; - private pathLengthScheme: PathLengthScheme; + private pathScheme: PathScheme; private fingerprintScheme: FingerprintScheme< NamespaceKey, SubspaceKey, @@ -88,7 +96,6 @@ export class TripleStorage< const values = decodeSummarisableStorageValue( value, this.payloadScheme, - this.pathLengthScheme, ); // Decode the key. @@ -96,20 +103,16 @@ export class TripleStorage< key, order, this.subspaceScheme, - values.pathLength, + values.encodedPathLength, ); const entry: Entry = { - identifier: { - namespace: this.namespace, - path, - subspace, - }, - record: { - timestamp, - hash: values.payloadHash, - length: values.payloadLength, - }, + namespaceId: this.namespace, + subspaceId: subspace, + path, + timestamp, + payloadDigest: values.payloadHash, + payloadLength: values.payloadLength, }; return opts.fingerprintScheme.fingerprintSingleton(entry); @@ -133,34 +136,32 @@ export class TripleStorage< this.subspaceScheme = opts.subspaceScheme; this.payloadScheme = opts.payloadScheme; - this.pathLengthScheme = opts.pathLengthScheme; + this.pathScheme = opts.pathScheme; this.fingerprintScheme = opts.fingerprintScheme; } async get( subspace: SubspaceKey, - path: Uint8Array, + path: Path, ): Promise< { entry: Entry; authTokenHash: PayloadDigest; } | undefined > { - const firstResult = this.entriesByQuery({ - subspace: { - lowerBound: subspace, - upperBound: this.subspaceScheme.successor(subspace), - }, - path: { - lowerBound: path, - upperBound: Products.makeSuccessorPath(this.pathLengthScheme.maxLength)( - path, - ), + const firstResult = this.query({ + area: { + includedSubspaceId: subspace, + pathPrefix: path, + timeRange: { + start: BigInt(0), + end: OPEN_END, + }, }, - limit: 1, - order: "subspace", - }); + maxCount: 1, + maxSize: BigInt(0), + }, "subspace"); for await (const result of firstResult) { return result; @@ -168,13 +169,13 @@ export class TripleStorage< } async insert( - { path, subspace, payloadHash, timestamp, length, authTokenHash }: { - path: Uint8Array; + { path, subspace, payloadDigest, timestamp, length, authTokenDigest }: { + path: Path; subspace: SubspaceKey; - payloadHash: PayloadDigest; + payloadDigest: PayloadDigest; timestamp: bigint; length: bigint; - authTokenHash: PayloadDigest; + authTokenDigest: PayloadDigest; }, ): Promise { const keys = encodeEntryKeys( @@ -186,14 +187,15 @@ export class TripleStorage< }, ); + // console.log(keys.spt); + const toStore = encodeSummarisableStorageValue( { - payloadHash, + payloadDigest, payloadLength: length, - authTokenHash: authTokenHash, + authTokenDigest: authTokenDigest, payloadScheme: this.payloadScheme, - pathLength: path.byteLength, - pathLengthEncoding: this.pathLengthScheme, + encodedPathLength: keys.encodedPathLength, }, ); @@ -209,9 +211,9 @@ export class TripleStorage< ): Promise { const keys = encodeEntryKeys( { - path: entry.identifier.path, - timestamp: entry.record.timestamp, - subspace: entry.identifier.subspace, + path: entry.path, + timestamp: entry.timestamp, + subspace: entry.subspaceId, subspaceEncoding: this.subspaceScheme, }, ); @@ -226,234 +228,160 @@ export class TripleStorage< } async summarise( - product: Products.CanonicProduct, - countLimits?: { subspace?: number; path?: number; time?: number }, - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }, + areaOfInterest: AreaOfInterest, ): Promise<{ fingerprint: Fingerprint; size: number }> { - const [subspaceDisjoint, pathDisjoint, timeDisjoint] = product; - - // Get the empty product out the way. - if ( - subspaceDisjoint.length === 0 && pathDisjoint.length === 0 && - timeDisjoint.length === 0 - ) { - return { - fingerprint: this.fingerprintScheme.neutral, - size: 0, - }; - } - let fingerprint = this.fingerprintScheme.neutral; + /** The size of the fingerprint. */ let size = 0; - // These keep track of how much of the count and size limits we've used. - let sclUsed = 0; - let sslUsed = BigInt(0); - let pclUsed = 0; - let pslUsed = BigInt(0); - let tclUsed = 0; - let tslUsed = BigInt(0); - - let limitsExceeded = false; - - // Go backwards through each range of the subspace disjoint, - // As we need to return greatest items first. - for ( - let subspaceDjIdx = subspaceDisjoint.length - 1; - subspaceDjIdx >= 0; - subspaceDjIdx-- - ) { - const subspaceRange = subspaceDisjoint[subspaceDjIdx]; + let countUsed = 0; + let sizeUsed = BigInt(0); - // Iterate through all the entries of each range. - const subspaceEntriesLowerBound = subspaceRange.start; - const subspaceEntriesUpperBound = subspaceRange.kind === "open" + // Iterate through all the entries of each range. + const subspaceEntriesLowerBound = + areaOfInterest.area.includedSubspaceId === ANY_SUBSPACE ? undefined - : subspaceRange.kind === "closed_exclusive" - ? subspaceRange.end - : this.subspaceScheme.successor(subspaceRange.end); - - const subspaceEntries = this.sptStorage.entries( - this.subspaceScheme.encode(subspaceEntriesLowerBound), - subspaceEntriesUpperBound - ? this.subspaceScheme.encode(subspaceEntriesUpperBound) - : undefined, - { - reverse: true, - }, - ); + : areaOfInterest.area.includedSubspaceId; + const subspaceEntriesUpperBound = + areaOfInterest.area.includedSubspaceId === ANY_SUBSPACE + ? undefined + : this.subspaceScheme.successor(areaOfInterest.area.includedSubspaceId); + + const subspaceEntries = this.sptStorage.entries( + subspaceEntriesLowerBound + ? this.subspaceScheme.encode(subspaceEntriesLowerBound) + : undefined, + subspaceEntriesUpperBound + ? this.subspaceScheme.encode(subspaceEntriesUpperBound) + : undefined, + { + reverse: true, + }, + ); - /** The least excluded item we've run into. - * This is going to be the upper bound of a summarise op we run when we detect a contiguous range of included entries. - */ - let leastExcluded = this.subspaceScheme.encode( - subspaceEntriesUpperBound !== undefined - ? subspaceEntriesUpperBound - : this.subspaceScheme.minimalSubspaceKey, - ); + /** The least excluded item we've run into. + * This is going to be the upper bound of a summarise op we run when we detect a contiguous range of included entries. + */ + let leastExcluded = this.subspaceScheme.encode( + subspaceEntriesUpperBound + ? subspaceEntriesUpperBound + : this.subspaceScheme.minimalSubspaceKey, + ); - /** The least included item we've run into. - * This is going to be the lower bound of a summarise op we run when we detect a contiguous range of included entries. - */ - let leastIncluded: Uint8Array | undefined; - - /** Run this when we detect a contiguous range of included entries. */ - const updateFingerprint = async (start: Uint8Array) => { - const { fingerprint: includedFp, size: includedSize } = await this - .sptStorage.summarise( - start, - leastExcluded, - ); - - fingerprint = this.fingerprintScheme.fingerprintCombine( - fingerprint, - includedFp, + /** The least included item we've run into. + * This is going to be the lower bound of a summarise op we run when we detect a contiguous range of included entries. + */ + let leastIncluded: Uint8Array | undefined; + + /** Run this when we detect a contiguous range of included entries. */ + const updateFingerprint = async (start: Uint8Array) => { + const { fingerprint: includedFp, size: includedSize } = await this + .sptStorage.summarise( + start, + leastExcluded, ); - size += includedSize; - - // Prevent this from running again until we run into another included entry. - leastIncluded = undefined; - }; - - for await (const subspaceEntry of subspaceEntries) { - // Decode the key. - const values = decodeSummarisableStorageValue( - subspaceEntry.value, - this.payloadScheme, - this.pathLengthScheme, - ); + fingerprint = this.fingerprintScheme.fingerprintCombine( + fingerprint, + includedFp, + ); - // Decode the key. - const { timestamp, path } = decodeEntryKey( - subspaceEntry.key, - "subspace", - this.subspaceScheme, - values.pathLength, - ); + size += includedSize; - // Check that decoded time and subspace are included by both other dimensions - let pathIncluded = false; - - for ( - let pathDisjointIdx = pathDisjoint.length - 1; - pathDisjointIdx >= 0; - pathDisjointIdx-- - ) { - if ( - Products.rangeIncludesValue( - { order: Products.orderPaths }, - pathDisjoint[pathDisjointIdx], - path, - ) - ) { - pathIncluded = true; - // If we're included in one, we don't need to check the others. - break; - } - } + // Prevent this from running again until we run into another included entry. + leastIncluded = undefined; + }; - // If it's not included, and we ran into an included item earlier, - // that indicates the end of a contiguous range. - // Recalculate the fingerprint! - if (!pathIncluded) { - if (leastIncluded) { - await updateFingerprint(leastIncluded); - } + for await (const subspaceEntry of subspaceEntries) { + // Decode the key. + const values = decodeSummarisableStorageValue( + subspaceEntry.value, + this.payloadScheme, + ); - // This entry is now the least excluded entry we've run into. - leastExcluded = subspaceEntry.key; - continue; - } + // Decode the key. + const { timestamp, path } = decodeEntryKey( + subspaceEntry.key, + "subspace", + this.subspaceScheme, + values.encodedPathLength, + ); - let timeIncluded = false; - - for ( - let timeDisjointIdx = timeDisjoint.length - 1; - timeDisjointIdx >= 0; - timeDisjointIdx-- - ) { - if ( - Products.rangeIncludesValue( - { order: Products.orderTimestamps }, - timeDisjoint[timeDisjointIdx], - timestamp, - ) - ) { - timeIncluded = true; - // If we're included in one, we don't need to check the others. - break; - } - } + // Check that decoded time and subspace are included by both other dimensions + let pathIncluded = false; - // If it's not included, and we ran into an included item earlier, - // that indicates the end of a contiguous range. - // Recalculate the fingerprint! - if (!timeIncluded) { - if (leastIncluded) { - await updateFingerprint(leastIncluded); - } + if (isPathPrefixed(areaOfInterest.area.pathPrefix, path)) { + pathIncluded = true; + } - // This entry is now the least excluded entry we've run into. - leastExcluded = subspaceEntry.key; - continue; + // If it's not included, and we ran into an included item earlier, + // that indicates the end of a contiguous range. + // Recalculate the fingerprint! + if (!pathIncluded) { + if (leastIncluded) { + await updateFingerprint(leastIncluded); } - // Now we know this entry is included. - - // Check all dimension count and size limits. - // If any limits have been exceeded, we have to stop here. + // This entry is now the least excluded entry we've run into. + leastExcluded = subspaceEntry.key; + continue; + } - // Boring. + let timeIncluded = false; - const nextSclUsed = sclUsed + 1; - const nextPclUsed = pclUsed + 1; - const nextTclUsed = tclUsed + 1; + if ( + isIncludedRange( + orderTimestamp, + areaOfInterest.area.timeRange, + timestamp, + ) + ) { + timeIncluded = true; + } - const nextSslUsed = sslUsed + values.payloadLength; - const nextPslUsed = pslUsed + values.payloadLength; - const nextTslUsed = tslUsed + values.payloadLength; + // If it's not included, and we ran into an included item earlier, + // that indicates the end of a contiguous range. + // Recalculate the fingerprint! + if (!timeIncluded) { + if (leastIncluded) { + await updateFingerprint(leastIncluded); + } - const sclExceeded = countLimits?.subspace && - nextSclUsed > countLimits.subspace; - const pclExceeded = countLimits?.path && nextPclUsed > countLimits.path; - const tclExceeded = countLimits?.time && nextTclUsed > countLimits.time; + // This entry is now the least excluded entry we've run into. + leastExcluded = subspaceEntry.key; + continue; + } - const sslExceeded = sizeLimits?.subspace && - nextSslUsed > sizeLimits.subspace; - const pslExceeded = sizeLimits?.path && nextPslUsed > sizeLimits.path; - const tslExceeded = sizeLimits?.time && nextTslUsed > sizeLimits.time; + // Now we know this entry is included. - if ( - sclExceeded || pclExceeded || tclExceeded || sslExceeded || - pslExceeded || tslExceeded - ) { - limitsExceeded = true; - break; - } + // Check all dimension count and size limits. + // If any limits have been exceeded, we have to stop here. - sclUsed = nextSclUsed; - pclUsed = nextPclUsed; - tclUsed = nextTclUsed; + // Boring. - sslUsed = nextSslUsed; - pslUsed = nextPslUsed; - tslUsed = nextTslUsed; + const nextCountUsed = countUsed + 1; + const nextSizeUsed = sizeUsed + values.payloadLength; - // This entry is part of a contiguous range of included entries, - // and it's the least included key we've encountered so far. - leastIncluded = subspaceEntry.key; + if ( + (areaOfInterest.maxCount !== 0 && + nextCountUsed > areaOfInterest.maxCount) || + (areaOfInterest.maxSize !== BigInt(0) && + nextSizeUsed > areaOfInterest.maxSize) + ) { + break; } - // Calculate a range that was left over, if any. - if (leastIncluded) { - await updateFingerprint(leastIncluded); - } + countUsed = nextCountUsed; + sizeUsed = nextSizeUsed; - // If the limits have been exceeded, we don't need to go through all the other ranges. - if (limitsExceeded) { - break; - } + // This entry is part of a contiguous range of included entries, + // and it's the least included key we've encountered so far. + leastIncluded = subspaceEntry.key; + } + + // Calculate a range that was left over, if any. + if (leastIncluded) { + await updateFingerprint(leastIncluded); } return { @@ -462,238 +390,61 @@ export class TripleStorage< }; } - async *entriesByProduct( - product: Products.CanonicProduct, - countLimits?: { subspace?: number; path?: number; time?: number }, - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }, - ): AsyncIterable<{ - entry: Entry; - authTokenHash: PayloadDigest; - }> { - const [subspaceDisjoint, pathDisjoint, timeDisjoint] = product; - - // Get the empty product out the way. - if ( - subspaceDisjoint.length === 0 && pathDisjoint.length === 0 && - timeDisjoint.length === 0 - ) { - return; - } - - // These keep track of how much of the count and size limits we've used. - let sclUsed = 0; - let sslUsed = BigInt(0); - let pclUsed = 0; - let pslUsed = BigInt(0); - let tclUsed = 0; - let tslUsed = BigInt(0); - - let limitsExceeded = false; - - // Go backwards through each range of the subspace disjoint, - // As we need to return greatest items first. - for ( - let subspaceDjIdx = subspaceDisjoint.length - 1; - subspaceDjIdx >= 0; - subspaceDjIdx-- - ) { - const subspaceRange = subspaceDisjoint[subspaceDjIdx]; - - // Iterate through all the entries of each range. - const subspaceEntriesLowerBound = subspaceRange.start; - const subspaceEntriesUpperBound = subspaceRange.kind === "open" - ? undefined - : subspaceRange.kind === "closed_exclusive" - ? subspaceRange.end - : this.subspaceScheme.successor(subspaceRange.end); - - const subspaceEntries = this.sptStorage.entries( - this.subspaceScheme.encode(subspaceEntriesLowerBound), - subspaceEntriesUpperBound - ? this.subspaceScheme.encode(subspaceEntriesUpperBound) - : undefined, - { - reverse: true, - }, - ); - - for await (const subspaceEntry of subspaceEntries) { - // Decode the key. - const values = decodeSummarisableStorageValue( - subspaceEntry.value, - this.payloadScheme, - this.pathLengthScheme, - ); - - // Decode the key. - const { timestamp, path, subspace } = decodeEntryKey( - subspaceEntry.key, - "subspace", - this.subspaceScheme, - values.pathLength, - ); - - // Check that decoded time and subspace are included by both other dimensions - let pathIncluded = false; - - for ( - let pathDisjointIdx = pathDisjoint.length - 1; - pathDisjointIdx >= 0; - pathDisjointIdx-- - ) { - if ( - Products.rangeIncludesValue( - { order: Products.orderPaths }, - pathDisjoint[pathDisjointIdx], - path, - ) - ) { - pathIncluded = true; - // If we're included in one, we don't need to check the others. - break; - } - } - - // Not included, continue to the next entry. - if (!pathIncluded) { - continue; - } - - let timeIncluded = false; - - for ( - let timeDisjointIdx = timeDisjoint.length - 1; - timeDisjointIdx >= 0; - timeDisjointIdx-- - ) { - if ( - Products.rangeIncludesValue( - { order: Products.orderTimestamps }, - timeDisjoint[timeDisjointIdx], - timestamp, - ) - ) { - timeIncluded = true; - // If we're included in one, we don't need to check the others. - break; - } - } - - // Not included, continue to the next entry. - if (!timeIncluded) { - continue; - } - - // Now we know this entry is included. - - // Check all dimension count and size limits. - // If any limits have been exceeded, we have to stop here. - - // Boring. - - const nextSclUsed = sclUsed + 1; - const nextPclUsed = pclUsed + 1; - const nextTclUsed = tclUsed + 1; - - const nextSslUsed = sslUsed + values.payloadLength; - const nextPslUsed = pslUsed + values.payloadLength; - const nextTslUsed = tslUsed + values.payloadLength; - - const sclExceeded = countLimits?.subspace && - nextSclUsed > countLimits.subspace; - const pclExceeded = countLimits?.path && nextPclUsed > countLimits.path; - const tclExceeded = countLimits?.time && nextTclUsed > countLimits.time; - - const sslExceeded = sizeLimits?.subspace && - nextSslUsed > sizeLimits.subspace; - const pslExceeded = sizeLimits?.path && nextPslUsed > sizeLimits.path; - const tslExceeded = sizeLimits?.time && nextTslUsed > sizeLimits.time; - - if ( - sclExceeded || pclExceeded || tclExceeded || sslExceeded || - pslExceeded || tslExceeded - ) { - limitsExceeded = true; - break; - } - - sclUsed = nextSclUsed; - pclUsed = nextPclUsed; - tclUsed = nextTclUsed; - - sslUsed = nextSslUsed; - pslUsed = nextPslUsed; - tslUsed = nextTslUsed; - - yield { - entry: { - identifier: { - namespace: this.namespace, - subspace, - path, - }, - record: { - hash: values.payloadHash, - length: values.payloadLength, - timestamp: timestamp, - }, - }, - authTokenHash: values.authTokenHash, - }; - } - - // If the limits have been exceeded, we don't need to go through all the other ranges. - if (limitsExceeded) { - break; - } - } - } - - async *entriesByQuery( - query: Query, + async *query( + areaOfInterest: AreaOfInterest, + order: QueryOrder, + reverse = false, ): AsyncIterable<{ entry: Entry; authTokenHash: PayloadDigest; }> { - const storage = query.order === "subspace" + const storage = order === "subspace" ? this.sptStorage - : query.order === "path" + : order === "path" ? this.ptsStorage : this.tspStorage; - if (!query.subspace && !query.path && !query.time) { + const includesAllTime = areaOfInterest.area.timeRange.start === BigInt(0) && + areaOfInterest.area.timeRange.end === OPEN_END; + const includesAllPaths = areaOfInterest.area.pathPrefix.length === 0; + const includesAllSubspaces = + areaOfInterest.area.includedSubspaceId === ANY_SUBSPACE; + + // Do the simplest thing if the area starts from the lowest value and is open ended in all dimensions. + if ( + includesAllTime && + includesAllPaths && + includesAllSubspaces && + areaOfInterest.maxSize === BigInt(0) && + areaOfInterest.maxCount === 0 + ) { const allEntriesOnOrder = storage.entries(undefined, undefined, { - limit: query.limit, - reverse: query.reverse, + limit: areaOfInterest.maxCount, + reverse: reverse, }); for await (const { key, value } of allEntriesOnOrder) { const values = decodeSummarisableStorageValue( value, this.payloadScheme, - this.pathLengthScheme, ); // Decode the key. const { subspace, timestamp, path } = decodeEntryKey( key, - query.order, + order, this.subspaceScheme, - values.pathLength, + values.encodedPathLength, ); yield { entry: { - identifier: { - namespace: this.namespace, - subspace, - path, - }, - record: { - hash: values.payloadHash, - length: values.payloadLength, - timestamp, - }, + namespaceId: this.namespace, + subspaceId: subspace, + path, + payloadDigest: values.payloadHash, + payloadLength: values.payloadLength, + timestamp, }, authTokenHash: values.authTokenHash, }; @@ -705,74 +456,80 @@ export class TripleStorage< let lowerBound: Uint8Array | undefined; let upperBound: Uint8Array | undefined; - const leastPath = new Uint8Array(); - - if (query.order === "path" && query.path) { - lowerBound = query.path.lowerBound; - upperBound = query.path.upperBound; - } else if (query.order === "subspace" && query.subspace) { - lowerBound = query.subspace.lowerBound - ? this.subspaceScheme.encode(query.subspace.lowerBound) - : undefined; - upperBound = query.subspace.upperBound - ? this.subspaceScheme.encode(query.subspace.upperBound) - : undefined; - } else if (query.order === "timestamp" && query.time) { - lowerBound = query.time.lowerBound - ? bigintToBytes(query.time.lowerBound) - : undefined; - upperBound = query.time.upperBound - ? bigintToBytes(query.time.upperBound) - : undefined; + if (order === "path") { + lowerBound = concat(...areaOfInterest.area.pathPrefix); + + const maybeSuccessorPrefix = successorPrefix( + areaOfInterest.area.pathPrefix, + ); + + if (maybeSuccessorPrefix) { + upperBound = concat(...maybeSuccessorPrefix); + } + } else if ( + order === "subspace" && + areaOfInterest.area.includedSubspaceId !== ANY_SUBSPACE + ) { + lowerBound = this.subspaceScheme.encode( + areaOfInterest.area.includedSubspaceId, + ); + + const maybeSuccessorSubspace = this.subspaceScheme.successor( + areaOfInterest.area.includedSubspaceId, + ); + + if (maybeSuccessorSubspace) { + upperBound = this.subspaceScheme.encode(maybeSuccessorSubspace); + } + } else if (order === "timestamp") { + if (areaOfInterest.area.timeRange.start > BigInt(0)) { + lowerBound = bigintToBytes(areaOfInterest.area.timeRange.start); + } + + if (areaOfInterest.area.timeRange.end !== OPEN_END) { + upperBound = bigintToBytes(areaOfInterest.area.timeRange.start); + } } let entriesYielded = 0; + let payloadBytesYielded = BigInt(0); const iterator = storage.entries(lowerBound, upperBound, { - reverse: query.reverse, + reverse, }); for await (const { key, value } of iterator) { const values = decodeSummarisableStorageValue( value, this.payloadScheme, - this.pathLengthScheme, ); // Decode the key. const { subspace, timestamp, path } = decodeEntryKey( key, - query.order, + order, this.subspaceScheme, - values.pathLength, + values.encodedPathLength, ); if ( - (query.order === "path" || query.order === "timestamp") && - query.subspace + (order === "path" || order === "timestamp") && + areaOfInterest.area.includedSubspaceId !== ANY_SUBSPACE ) { - const isIncludedInSubspaceRange = Products.rangeIncludesValue( - { - order: this.subspaceScheme.order, - }, - rangeFromOptionalBounds( - query.subspace, - this.subspaceScheme.minimalSubspaceKey, - ), + const isSubspace = this.subspaceScheme.order( subspace, + areaOfInterest.area.includedSubspaceId, ); - if (!isIncludedInSubspaceRange) { + if (!isSubspace) { continue; } } - if ( - (query.order === "path" || query.order === "subspace") && query.time - ) { - const isIncluded = Products.rangeIncludesValue( - { order: Products.orderTimestamps }, - rangeFromOptionalBounds(query.time, BigInt(0)), + if ((order === "path" || order === "subspace") && !includesAllTime) { + const isIncluded = isIncludedRange( + orderTimestamp, + areaOfInterest.area.timeRange, timestamp, ); @@ -782,67 +539,43 @@ export class TripleStorage< } if ( - (query.order === "subspace" || query.order === "timestamp") && - query.path + (order === "subspace" || order === "timestamp") && !includesAllPaths ) { - const isIncludedInPathRange = Products.rangeIncludesValue( - { order: Products.orderPaths }, - rangeFromOptionalBounds(query.path, leastPath), - path, - ); + const isIncluded = isPathPrefixed(areaOfInterest.area.pathPrefix, path); - if (!isIncludedInPathRange) { + if (!isIncluded) { continue; } } entriesYielded += 1; + payloadBytesYielded += values.payloadLength; + + if ( + areaOfInterest.maxSize !== BigInt(0) && + payloadBytesYielded >= areaOfInterest.maxSize + ) { + break; + } yield { entry: { - identifier: { - namespace: this.namespace, - subspace, - path, - }, - record: { - hash: values.payloadHash, - length: values.payloadLength, - timestamp, - }, + namespaceId: this.namespace, + subspaceId: subspace, + path, + payloadDigest: values.payloadHash, + payloadLength: values.payloadLength, + timestamp, }, authTokenHash: values.authTokenHash, }; - if (query.limit && entriesYielded === query.limit) { + if ( + areaOfInterest.maxCount !== 0 && + entriesYielded >= areaOfInterest.maxCount + ) { break; } } } } - -function rangeFromOptionalBounds( - bounds: OptionalBounds, - leastValue: ValueType, -): Products.Range { - if (bounds.lowerBound && !bounds.upperBound) { - return { - kind: "open", - start: bounds.lowerBound, - }; - } - - if (bounds.upperBound && !bounds.lowerBound) { - return { - kind: "closed_exclusive", - start: leastValue, - end: bounds.upperBound, - }; - } - - return { - kind: "closed_exclusive", - start: bounds.lowerBound!, - end: bounds.upperBound!, - }; -} diff --git a/src/replica/storage/storage_3d/types.ts b/src/replica/storage/storage_3d/types.ts index f4c0ceb..1de4541 100644 --- a/src/replica/storage/storage_3d/types.ts +++ b/src/replica/storage/storage_3d/types.ts @@ -1,6 +1,5 @@ -import { Products } from "../../../../deps.ts"; -import { Entry } from "../../../entries/types.ts"; -import { Query } from "../../types.ts"; +import { AreaOfInterest, Entry, Path } from "../../../../deps.ts"; +import { QueryOrder } from "../../types.ts"; export interface Storage3d< NamespaceKey, @@ -11,7 +10,7 @@ export interface Storage3d< /** Retrieve a value */ get( subspace: SubspaceKey, - path: Uint8Array, + path: Path, ): Promise< { entry: Entry; @@ -20,12 +19,12 @@ export interface Storage3d< >; insert(opts: { - path: Uint8Array; + path: Path; subspace: SubspaceKey; - payloadHash: PayloadDigest; + payloadDigest: PayloadDigest; timestamp: bigint; length: bigint; - authTokenHash: PayloadDigest; + authTokenDigest: PayloadDigest; }): Promise; remove( @@ -34,28 +33,15 @@ export interface Storage3d< // Used during sync summarise( - product: Products.CanonicProduct, - countLimits?: { subspace?: number; path?: number; time?: number }, - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }, + areaOfInterest: AreaOfInterest, ): Promise<{ fingerprint: Fingerprint; size: number }>; // Used to fetch entries for transfer during sync. // All three dimensions are defined - entriesByProduct( - product: Products.CanonicProduct, - countLimits?: { subspace?: number; path?: number; time?: number }, - sizeLimits?: { subspace?: bigint; path?: bigint; time?: bigint }, - ): AsyncIterable< - { - entry: Entry; - authTokenHash: PayloadDigest; - } - >; - - // Used to fetch entries when user is making query through replica - // 0 - 3 dimensions may be defined - entriesByQuery( - query: Query, + query( + areaOfInterest: AreaOfInterest, + order: QueryOrder, + reverse?: boolean, ): AsyncIterable< { entry: Entry; diff --git a/src/replica/storage/summarisable_storage/monoid_skiplist.ts b/src/replica/storage/summarisable_storage/monoid_skiplist.ts index 410a2df..d3c0402 100644 --- a/src/replica/storage/summarisable_storage/monoid_skiplist.ts +++ b/src/replica/storage/summarisable_storage/monoid_skiplist.ts @@ -12,7 +12,8 @@ type SkiplistOpts< | string | number | bigint - | boolean, + | boolean + | symbol, LiftedType, > = { compare: (a: ValueType, b: ValueType) => number; @@ -96,7 +97,7 @@ export class Skiplist< for (const key of map.keys()) { divider.push("------------"); - line.push(`${key}`.padEnd(12)); + line.push(`${String(key)}`.padEnd(12)); } console.log(divider.join("-+-")); diff --git a/src/replica/storage/summarisable_storage/simple_kv.ts b/src/replica/storage/summarisable_storage/simple_kv.ts index 5a9cba9..b9e31ad 100644 --- a/src/replica/storage/summarisable_storage/simple_kv.ts +++ b/src/replica/storage/summarisable_storage/simple_kv.ts @@ -8,7 +8,8 @@ type SimpleKvOpts< | string | number | bigint - | boolean, + | boolean + | symbol, LiftedType, > = { kv: KvDriver; diff --git a/src/replica/storage/types.ts b/src/replica/storage/types.ts index a649f55..88711c0 100644 --- a/src/replica/storage/types.ts +++ b/src/replica/storage/types.ts @@ -1,4 +1,4 @@ -import { Entry } from "../../entries/types.ts"; +import { Entry } from "../../../deps.ts"; import { ValidationError } from "../../errors.ts"; import { Payload } from "../types.ts"; @@ -55,7 +55,7 @@ export interface EntryDriver< /** */ export interface PayloadDriver { - /** Returns an payload for a given format and hash.*/ + /** Returns a payload for a given format and hash.*/ get( payloadHash: PayloadDigest, opts?: { @@ -69,7 +69,7 @@ export interface PayloadDriver { ): Promise< { hash: PayloadDigest; - length: number; + length: bigint; /** Commit the staged attachment to storage. */ commit: () => Promise; /** Reject the staged attachment, erasing it. */ diff --git a/src/replica/types.ts b/src/replica/types.ts index 6e14084..11baace 100644 --- a/src/replica/types.ts +++ b/src/replica/types.ts @@ -1,20 +1,21 @@ -import { Products } from "../../deps.ts"; -import { Entry } from "../entries/types.ts"; +import { + EncodingScheme, + Entry, + Path, + PathScheme, + SuccessorFn, + TotalOrder, +} from "../../deps.ts"; import { EntryDriver, PayloadDriver } from "./storage/types.ts"; -export type PathLengthScheme = EncodingScheme & { - maxLength: number; +export type NamespaceScheme = EncodingScheme & { + isEqual: EqualityFn; }; -export type NamespaceScheme = EncodingScheme & { - isEqual: EqualityFn; -}; - -export type SubspaceScheme = EncodingScheme & { - isEqual: EqualityFn; - order: Products.TotalOrder; - successor: Products.SuccessorFn; - minimalSubspaceKey: SubspaceKey; +export type SubspaceScheme = EncodingScheme & { + successor: SuccessorFn; + order: TotalOrder; + minimalSubspaceKey: SubspaceId; }; export type PayloadScheme = EncodingScheme & { @@ -29,11 +30,13 @@ export type AuthorisationScheme< AuthorisationOpts, AuthorisationToken, > = { + /** Produce an authorisation token from an entry */ authorise( entry: Entry, opts: AuthorisationOpts, ): Promise; - isAuthorised: ( + /** Verify if an entry is authorised to be written */ + isAuthorisedWrite: ( entry: Entry, token: AuthorisationToken, ) => Promise; @@ -65,12 +68,10 @@ export interface ProtocolParameters< AuthorisationToken, Fingerprint, > { - pathLengthScheme: PathLengthScheme; + pathScheme: PathScheme; - // Namespace encoding scheme namespaceScheme: NamespaceScheme; - // Learn what subspaceScheme: SubspaceScheme; // Learn about payloads and producing them from bytes @@ -122,68 +123,6 @@ export type ReplicaOpts< payloadDriver?: PayloadDriver; }; -export type QueryOrder = - /** By path, then timestamp, then subspace */ - | "path" - /** By timestamp, then subspace, then path */ - | "timestamp" - /** By subspace, then path, then timestamp */ - | "subspace"; - -export type OptionalBounds = { - /** The value to start returning results from, inclusive. Starts from the first entry in the replica if left undefined. */ - lowerBound?: ValueType; - /** The value to stop returning results at, exclusive. Stops after the last entry in the replica if undefined. */ - upperBound: ValueType; -} | { - /** The value to start returning results from, inclusive. Starts from the first entry in the replica if left undefined. */ - lowerBound: ValueType; - /** The value to stop returning results at, exclusive. Stops after the last entry in the replica if undefined. */ - upperBound?: ValueType; -}; - -export interface Query { - order: QueryOrder; - subspace?: OptionalBounds; - path?: OptionalBounds; - time?: OptionalBounds; - /** The maximum number of results to return. */ - limit?: number; - /** Whether the results should be returned in reverse order. */ - reverse?: boolean; -} - -export type EncodingScheme = { - /** A function to encode a given `ValueType`. */ - encode(value: ValueType): Uint8Array; - /** A function to decode a given `ValueType` */ - decode(encoded: Uint8Array): ValueType; - /** A function which returns the bytelength for a given `ValueType` when encoded. */ - encodedLength(value: ValueType): number; -}; - -export type KeypairEncodingScheme = { - /** The encoding scheme for a key pair's public key type. */ - publicKey: EncodingScheme; - /** The encoding scheme for a key pair's signature type. */ - signature: EncodingScheme; -}; - -/** A scheme for signing and verifying data using key pairs. */ -export type SignatureScheme = { - sign: (secretKey: SecretKey, bytestring: Uint8Array) => Promise; - verify: ( - publicKey: PublicKey, - signature: Signature, - bytestring: Uint8Array, - ) => Promise; -}; - -export type KeypairScheme = { - signatureScheme: SignatureScheme; - encodingScheme: KeypairEncodingScheme; -}; - export type EqualityFn = (a: ValueType, b: ValueType) => boolean; // Events @@ -215,6 +154,14 @@ export type IngestEventSuccess< externalSourceId?: string; }; +export type QueryOrder = + /** By path, then timestamp, then subspace */ + | "path" + /** By timestamp, then subspace, then path */ + | "timestamp" + /** By subspace, then path, then timestamp */ + | "subspace"; + export type IngestEvent< NamespacePublicKey, SubspacePublicKey, @@ -239,7 +186,7 @@ export type Payload = { }; export type EntryInput = { - path: Uint8Array; + path: Path; subspace: SubspacePublicKey; payload: Uint8Array | ReadableStream; /** The desired timestamp for the new entry. If left undefined, uses the current time, OR if another entry exists at the same path will be that entry's timestamp + 1. */ diff --git a/src/replica/util.ts b/src/replica/util.ts index a29aff2..5d257f7 100644 --- a/src/replica/util.ts +++ b/src/replica/util.ts @@ -1,10 +1,9 @@ import { join } from "https://deno.land/std@0.188.0/path/mod.ts"; import { EntryDriverKvStore } from "./storage/entry_drivers/kv_store.ts"; import { PayloadDriverFilesystem } from "./storage/payload_drivers/filesystem.ts"; -import { EncodingScheme, PayloadScheme, ProtocolParameters } from "./types.ts"; +import { PayloadScheme, ProtocolParameters } from "./types.ts"; import { ensureDir } from "https://deno.land/std@0.188.0/fs/ensure_dir.ts"; -import { bigintToBytes } from "../util/bytes.ts"; -import { concat } from "../../deps.ts"; +import { bigintToBytes, concat, EncodingScheme, Path } from "../../deps.ts"; import { KvDriverDeno } from "./storage/kv/kv_driver_deno.ts"; /** Create a pair of entry and payload drivers for use with a {@link Replica} which will store their data at a given filesystem path. */ @@ -49,17 +48,88 @@ export async function getPersistedDrivers< // Keys +export function encodePathWithSeparators(path: Path): Uint8Array { + const encodedComponents: Uint8Array[] = []; + + for (const component of path) { + const bytes: number[] = []; + + for (const byte of component) { + if (byte !== 0) { + bytes.push(byte); + continue; + } + + bytes.push(0, 1); + } + + bytes.push(0, 0); + const encodedComponent = new Uint8Array(bytes); + encodedComponents.push(encodedComponent); + } + + return concat(...encodedComponents); +} + +export function decodePathWithSeparators( + encoded: Uint8Array, +): Path { + const path: Path = []; + + let currentComponentBytes = []; + let previousWasZero = false; + + for (const byte of encoded) { + if (previousWasZero && byte === 0) { + // Separator + previousWasZero = false; + + const component = new Uint8Array(currentComponentBytes); + + path.push(component); + + currentComponentBytes = []; + + continue; + } + + if (previousWasZero && byte === 1) { + // Encoded zero. + currentComponentBytes.push(0); + previousWasZero = false; + continue; + } + + if (byte === 0) { + previousWasZero = true; + continue; + } + + currentComponentBytes.push(byte); + previousWasZero = false; + } + + return path; +} + export function encodeEntryKeys( opts: { - path: Uint8Array; + path: Path; timestamp: bigint; subspace: SubspacePublicKey; subspaceEncoding: EncodingScheme; }, -): { spt: Uint8Array; pts: Uint8Array; tsp: Uint8Array } { +): { + spt: Uint8Array; + pts: Uint8Array; + tsp: Uint8Array; + encodedPathLength: number; +} { const encodedSubspace = opts.subspaceEncoding.encode(opts.subspace); - const keyLength = 8 + opts.path.byteLength + + const encodedPath = encodePathWithSeparators(opts.path); + + const keyLength = 8 + encodedPath.byteLength + encodedSubspace.byteLength; const sptBytes = new Uint8Array(keyLength); @@ -69,23 +139,23 @@ export function encodeEntryKeys( // Subspace, path, timestamp sptBytes.set(encodedSubspace, 0); sptBytes.set( - opts.path, + encodedPath, encodedSubspace.byteLength, ); const sptDv = new DataView(sptBytes.buffer); sptDv.setBigUint64( - encodedSubspace.byteLength + opts.path.byteLength, + encodedSubspace.byteLength + encodedPath.byteLength, opts.timestamp, ); // Path, timestamp, subspace - ptsBytes.set(opts.path, 0); + ptsBytes.set(encodedPath, 0); const ptsDv = new DataView(ptsBytes.buffer); ptsDv.setBigUint64( - opts.path.byteLength, + encodedPath.byteLength, opts.timestamp, ); - ptsBytes.set(encodedSubspace, opts.path.byteLength + 8); + ptsBytes.set(encodedSubspace, encodedPath.byteLength + 8); // Timestamp, subspace, path const tapDv = new DataView(tspBytes.buffer); @@ -94,24 +164,29 @@ export function encodeEntryKeys( opts.timestamp, ); tspBytes.set(encodedSubspace, 8); - tspBytes.set(opts.path, 8 + encodedSubspace.byteLength); + tspBytes.set(encodedPath, 8 + encodedSubspace.byteLength); - return { spt: sptBytes, pts: ptsBytes, tsp: tspBytes }; + return { + spt: sptBytes, + pts: ptsBytes, + tsp: tspBytes, + encodedPathLength: encodedPath.byteLength, + }; } export function decodeEntryKey( encoded: Uint8Array, order: "subspace" | "path" | "timestamp", subspaceEncoding: EncodingScheme, - pathLength: number, + encodedPathLength: number, ): { subspace: SubspacePublicKey; - path: Uint8Array; + path: Path; timestamp: bigint; } { - let subspace; - let timestamp; - let path; + let subspace: SubspacePublicKey; + let timestamp: bigint; + let path: Path; switch (order) { case "subspace": { @@ -119,9 +194,13 @@ export function decodeEntryKey( const encodedSubspaceLength = subspaceEncoding.encodedLength(subspace); - path = encoded.subarray( - encodedSubspaceLength, - encodedSubspaceLength + pathLength, + const pathComponentPos = encodedSubspaceLength; + + path = decodePathWithSeparators( + encoded.subarray( + pathComponentPos, + pathComponentPos + encodedPathLength, + ), ); const dataView = new DataView(encoded.buffer); @@ -130,16 +209,21 @@ export function decodeEntryKey( break; } case "path": { - path = encoded.subarray(0, pathLength); + path = decodePathWithSeparators( + encoded.subarray( + 0, + encodedPathLength, + ), + ); const dataView = new DataView(encoded.buffer); timestamp = dataView.getBigUint64( - pathLength, + encodedPathLength, ); subspace = subspaceEncoding.decode(encoded.subarray( - pathLength + 8, + encodedPathLength + 8, )); break; @@ -156,7 +240,12 @@ export function decodeEntryKey( const encodedSubspaceLength = subspaceEncoding.encodedLength(subspace); - path = encoded.subarray(8 + encodedSubspaceLength); + path = decodePathWithSeparators( + encoded.subarray( + encodedSubspaceLength, + encodedSubspaceLength + encodedPathLength, + ), + ); } } @@ -169,59 +258,58 @@ export function decodeEntryKey( export function encodeSummarisableStorageValue( { - authTokenHash, - payloadHash, + authTokenDigest, + payloadDigest, payloadLength, payloadScheme, - pathLength, - pathLengthEncoding, + encodedPathLength, }: { - authTokenHash: PayloadDigest; - payloadHash: PayloadDigest; + authTokenDigest: PayloadDigest; + payloadDigest: PayloadDigest; payloadLength: bigint; payloadScheme: PayloadScheme; - pathLength: number; - pathLengthEncoding: EncodingScheme; + encodedPathLength: number; }, ): Uint8Array { + const pathLengthBytes = new Uint8Array(4); + const view = new DataView(pathLengthBytes.buffer); + view.setUint32(0, encodedPathLength); + return concat( - pathLengthEncoding.encode(pathLength), + pathLengthBytes, bigintToBytes(payloadLength), - payloadScheme.encode(payloadHash), - payloadScheme.encode(authTokenHash), + payloadScheme.encode(payloadDigest), + payloadScheme.encode(authTokenDigest), ); } export function decodeSummarisableStorageValue( encoded: Uint8Array, payloadEncoding: EncodingScheme, - pathLengthEncoding: EncodingScheme, ): { - pathLength: number; + encodedPathLength: number; payloadLength: bigint; payloadHash: PayloadDigest; authTokenHash: PayloadDigest; } { - const pathLength = pathLengthEncoding.decode(encoded); - - const pathLengthWidth = pathLengthEncoding.encodedLength(pathLength); - const dataView = new DataView(encoded.buffer); - const payloadLength = dataView.getBigUint64(pathLengthWidth); + const encodedPathLength = dataView.getUint32(0); + + const payloadLength = dataView.getBigUint64(4); const payloadHash = payloadEncoding.decode( - encoded.subarray(pathLengthWidth + 8), + encoded.subarray(4 + 8), ); const payloadHashLength = payloadEncoding.encodedLength(payloadHash); const authTokenHash = payloadEncoding.decode( - encoded.subarray(pathLengthWidth + 8 + payloadHashLength), + encoded.subarray(4 + 8 + payloadHashLength), ); return { - pathLength, + encodedPathLength, payloadLength, payloadHash, authTokenHash, diff --git a/src/test/test_schemes.ts b/src/test/test_schemes.ts index 51a16eb..47965ba 100644 --- a/src/test/test_schemes.ts +++ b/src/test/test_schemes.ts @@ -1,15 +1,18 @@ -import { equalsBytes, Products } from "../../deps.ts"; +import { + encodeEntry, + equalsBytes, + orderBytes, + PathScheme, + successorBytesFixedWidth, +} from "../../deps.ts"; import { crypto } from "https://deno.land/std@0.188.0/crypto/crypto.ts"; -import { encodeEntry } from "../entries/encode_decode.ts"; import { AuthorisationScheme, FingerprintScheme, NamespaceScheme, - PathLengthScheme, PayloadScheme, SubspaceScheme, } from "../replica/types.ts"; -import { compareBytes } from "../util/bytes.ts"; import { importPublicKey } from "./crypto.ts"; export const testSchemeNamespace: NamespaceScheme = { @@ -23,23 +26,15 @@ export const testSchemeSubspace: SubspaceScheme = { encode: (v) => v, decode: (v) => v.subarray(0, 65), encodedLength: () => 65, - isEqual: equalsBytes, minimalSubspaceKey: new Uint8Array(65), - order: Products.orderPaths, - successor: Products.makeSuccessorPath(65), + order: orderBytes, + successor: successorBytesFixedWidth, }; -export const testSchemePathLength: PathLengthScheme = { - encode(length) { - return new Uint8Array([length]); - }, - decode(bytes) { - return bytes[0]; - }, - encodedLength() { - return 1; - }, - maxLength: 8, +export const testSchemePath: PathScheme = { + maxPathLength: 8, + maxComponentCount: 4, + maxComponentLength: 3, }; export const testSchemePayload: PayloadScheme = { @@ -56,7 +51,7 @@ export const testSchemePayload: PayloadScheme = { return new Uint8Array(await crypto.subtle.digest("SHA-256", bytes)); }, order(a, b) { - return compareBytes(new Uint8Array(a), new Uint8Array(b)) as + return orderBytes(new Uint8Array(a), new Uint8Array(b)) as | 1 | 0 | -1; @@ -71,12 +66,12 @@ export const testSchemeFingerprint: FingerprintScheme< > = { neutral: new Uint8Array(32), async fingerprintSingleton(entry) { - const encodedEntry = encodeEntry(entry, { + const encodedEntry = encodeEntry({ namespaceScheme: testSchemeNamespace, subspaceScheme: testSchemeSubspace, - pathLengthScheme: testSchemePathLength, + pathScheme: testSchemePath, payloadScheme: testSchemePayload, - }); + }, entry); return new Uint8Array(await crypto.subtle.digest("SHA-256", encodedEntry)); }, @@ -99,12 +94,12 @@ export const testSchemeAuthorisation: AuthorisationScheme< ArrayBuffer > = { async authorise(entry, secretKey) { - const encodedEntry = encodeEntry(entry, { + const encodedEntry = encodeEntry({ namespaceScheme: testSchemeNamespace, subspaceScheme: testSchemeSubspace, - pathLengthScheme: testSchemePathLength, + pathScheme: testSchemePath, payloadScheme: testSchemePayload, - }); + }, entry); const res = await crypto.subtle.sign( { @@ -117,15 +112,15 @@ export const testSchemeAuthorisation: AuthorisationScheme< return new Uint8Array(res); }, - async isAuthorised(entry, token) { - const cryptoKey = await importPublicKey(entry.identifier.subspace); + async isAuthorisedWrite(entry, token) { + const cryptoKey = await importPublicKey(entry.subspaceId); - const encodedEntry = encodeEntry(entry, { + const encodedEntry = encodeEntry({ namespaceScheme: testSchemeNamespace, subspaceScheme: testSchemeSubspace, - pathLengthScheme: testSchemePathLength, + pathScheme: testSchemePath, payloadScheme: testSchemePayload, - }); + }, entry); return crypto.subtle.verify( { diff --git a/src/test/utils.ts b/src/test/utils.ts index 9e489dc..5892d6c 100644 --- a/src/test/utils.ts +++ b/src/test/utils.ts @@ -1,3 +1,4 @@ +import { Path } from "../../deps.ts"; import { makeSubspaceKeypair } from "./crypto.ts"; export async function getSubspaces(size: number) { @@ -15,3 +16,29 @@ export async function getSubspaces(size: number) { export function randomTimestamp() { return BigInt(Math.floor(Math.random() * 1000)); } + +/** Makes a random path which fits within the constraints of `testSchemePath`. */ +export function randomPath(): Path { + const pathLength = Math.floor(Math.random() * 4); + + const maxComponentLength = pathLength === 4 + ? 2 + : pathLength === 3 + ? 2 + : pathLength === 2 + ? 4 + : pathLength === 1 + ? 8 + : 0; + + const path = []; + + // Now create components with random uint. + for (let i = 0; i < pathLength; i++) { + const pathLength = Math.floor(Math.random() * maxComponentLength); + + path.push(crypto.getRandomValues(new Uint8Array(pathLength))); + } + + return path; +} diff --git a/src/util/bytes.test.ts b/src/util/bytes.test.ts deleted file mode 100644 index bd7d60b..0000000 --- a/src/util/bytes.test.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { assertEquals } from "https://deno.land/std@0.158.0/testing/asserts.ts"; -import { compareBytes } from "./bytes.ts"; - -type TestVector = [number[], number[], number]; - -const testVectors: TestVector[] = [ - // Equal - [[0], [0], 0], - [[0, 1], [0, 1], 0], - // Greater than - [[1], [0], 1], - [[1], [0, 1], 1], - [[1, 1], [0, 1], 1], - [[1, 1], [1], 1], - // Less than - [[0], [1], -1], - [[0], [1, 2], -1], - [[0, 1, 2], [1, 2, 3], -1], - [[0, 1, 2], [1], -1], -]; - -Deno.test("compareBytes", () => { - for (const vector of testVectors) { - const a = new Uint8Array(vector[0]); - const b = new Uint8Array(vector[1]); - const res = compareBytes(a, b); - - assertEquals( - res, - vector[2], - `[${a}] <> [${b}] should be ${vector[2]}, but is ${res}`, - ); - } -}); diff --git a/src/util/bytes.ts b/src/util/bytes.ts deleted file mode 100644 index 72a53d9..0000000 --- a/src/util/bytes.ts +++ /dev/null @@ -1,56 +0,0 @@ -export function compareBytes(a: Uint8Array, b: Uint8Array): -1 | 0 | 1 { - const shorter = a.byteLength < b.byteLength ? a : b; - - for (let i = 0; i < shorter.byteLength; i++) { - const aByte = a[i]; - const bByte = b[i]; - - if (aByte === bByte) { - continue; - } - - if (aByte < bByte) { - return -1; - } - - if (aByte > bByte) { - return 1; - } - } - - if (a.byteLength < b.byteLength) { - return -1; - } else if (a.byteLength > b.byteLength) { - return 1; - } - - return 0; -} - -export function bigintToBytes(bigint: bigint): Uint8Array { - const bytes = new Uint8Array(8); - const view = new DataView(bytes.buffer); - - view.setBigUint64(0, bigint); - - return bytes; -} - -export function incrementLastByte(bytes: Uint8Array) { - const last = bytes[bytes.byteLength - 1]; - - if (last === 255) { - const newBytes = new Uint8Array(bytes.byteLength + 1); - - newBytes.set(bytes, 0); - newBytes.set([0], bytes.byteLength); - - return newBytes; - } else { - const newBytes = new Uint8Array(bytes); - - newBytes.set([last + 1], bytes.byteLength - 1); - - return newBytes; - } -} From eadaf2c5b40ad5e49b96115a47557f0a9bdda971 Mon Sep 17 00:00:00 2001 From: Sam Gwilym Date: Mon, 5 Feb 2024 16:58:58 +0100 Subject: [PATCH 2/7] Add encryptPath, decryptPath utilities --- src/utils/encryption.test.ts | 123 +++++++++++++++++++++++++++++++++++ src/utils/encryption.ts | 107 ++++++++++++++++++++++++++++++ 2 files changed, 230 insertions(+) create mode 100644 src/utils/encryption.test.ts create mode 100644 src/utils/encryption.ts diff --git a/src/utils/encryption.test.ts b/src/utils/encryption.test.ts new file mode 100644 index 0000000..83057e6 --- /dev/null +++ b/src/utils/encryption.test.ts @@ -0,0 +1,123 @@ +import { assertEquals } from "https://deno.land/std@0.202.0/assert/assert_equals.ts"; +import { Path } from "../../deps.ts"; +import { decryptPath, encryptPath } from "./encryption.ts"; +import { assertNotEquals } from "https://deno.land/std@0.202.0/assert/assert_not_equals.ts"; + +type EncryptPathVector = Path; + +const encryptPathVectors: EncryptPathVector[] = [ + [], + [new Uint8Array([1, 2, 3, 4])], + [new Uint8Array([1]), new Uint8Array([2]), new Uint8Array([3])], +]; + +Deno.test("encryptPath and decryptPath", async () => { + const key = await crypto.subtle.importKey( + "raw", + crypto.getRandomValues(new Uint8Array(64)), + "HKDF", + false, // KDF keys cannot be exported + ["deriveKey", "deriveBits"], + ); + + const iv = crypto.getRandomValues(new Uint8Array(16)); + + const encryptFn = async (key: CryptoKey, component: Uint8Array) => { + const encryptionKey = await crypto.subtle.deriveKey( + { + name: "HKDF", + hash: "SHA-256", + salt: new Uint8Array(), + info: new Uint8Array(), + }, + key, + { + name: "AES-GCM", + length: 256, + }, + true, + ["encrypt"], + ); + + const encrypted = await crypto.subtle.encrypt( + { + name: "AES-GCM", + iv, + }, + encryptionKey, + component, + ); + + return new Uint8Array(encrypted); + }; + + const deriveKey = async (key: CryptoKey, component: Uint8Array) => { + const bits = await crypto.subtle.deriveBits( + { + name: "HKDF", + hash: "SHA-256", + salt: component, + info: new Uint8Array(), + }, + key, + 64, + ); + + return crypto.subtle.importKey( + "raw", + bits, + "HKDF", + false, + ["deriveKey", "deriveBits"], + ); + }; + + const decryptFn = async ( + key: CryptoKey, + encrypted: Uint8Array, + ) => { + const decryptionKey = await crypto.subtle.deriveKey( + { + name: "HKDF", + hash: "SHA-256", + salt: new Uint8Array(), + info: new Uint8Array(), + }, + key, + { + name: "AES-GCM", + length: 256, + }, + true, + ["decrypt"], + ); + + const decrypted = await crypto.subtle.decrypt( + { + name: "AES-GCM", + iv, + }, + decryptionKey, + encrypted, + ); + + return new Uint8Array(decrypted); + }; + + for (const path of encryptPathVectors) { + const [encryptedPath] = await encryptPath({ + key, + encryptFn, + deriveKey, + }, path); + + const [decryptedPath] = await decryptPath({ + key, + decryptFn, + deriveKey, + }, encryptedPath); + + assertNotEquals(encryptedPath, path); + assertEquals(decryptedPath, path); + } +}); diff --git a/src/utils/encryption.ts b/src/utils/encryption.ts new file mode 100644 index 0000000..93666f4 --- /dev/null +++ b/src/utils/encryption.ts @@ -0,0 +1,107 @@ +import { Path } from "../../deps.ts"; + +export async function encryptPath(opts: { + key: EncryptionKey; + /** Output must be of size maximum component length, input ... just a path component or MORE? */ + encryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; + deriveKey: ( + key: EncryptionKey, + component: Uint8Array, + ) => Promise; +}, path: Path): Promise<[Path, EncryptionKey]> { + if (path.length === 0) { + return [[], opts.key]; + } + + if (path.length === 1) { + const encryptedComponent = await encryptComponent( + { key: opts.key, encryptFn: opts.encryptFn }, + path[0], + ); + + const derivedKey = await opts.deriveKey(opts.key, path[0]); + + return [[encryptedComponent], derivedKey]; + } + + const [encryptedSoFar, derivedKey] = await encryptPath( + opts, + path.slice(0, path.length - 1), + ); + + const encryptedComponentLast = await encryptComponent({ + key: derivedKey, + encryptFn: opts.encryptFn, + }, path[path.length - 1]); + + const finalDerivedKey = await opts.deriveKey( + derivedKey, + path[path.length - 1], + ); + + return [[...encryptedSoFar, encryptedComponentLast], finalDerivedKey]; +} + +export function encryptComponent( + opts: { + key: EncryptionKey; + /** Output must be of size maximum component length, input ... just a path component or MORE? */ + encryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; + }, + component: Uint8Array, +): Promise { + return opts.encryptFn(opts.key, component); +} + +export async function decryptPath(opts: { + key: EncryptionKey; + /** Output must be of size maximum component length, input ... just a path component or MORE? */ + decryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; + deriveKey: ( + key: EncryptionKey, + component: Uint8Array, + ) => Promise; +}, path: Path): Promise<[Path, EncryptionKey]> { + if (path.length === 0) { + return [[], opts.key]; + } + + if (path.length === 1) { + const decryptedComponent = await decryptComponent( + { key: opts.key, decryptFn: opts.decryptFn }, + path[0], + ); + + const derivedKey = await opts.deriveKey(opts.key, decryptedComponent); + + return [[decryptedComponent], derivedKey]; + } + + const [decryptedSoFar, derivedKey] = await decryptPath( + opts, + path.slice(0, path.length - 1), + ); + + const decryptedComponentLast = await decryptComponent({ + key: derivedKey, + decryptFn: opts.decryptFn, + }, path[path.length - 1]); + + const finalDerivedKey = await opts.deriveKey( + derivedKey, + decryptedComponentLast, + ); + + return [[...decryptedSoFar, decryptedComponentLast], finalDerivedKey]; +} + +export function decryptComponent( + opts: { + key: EncryptionKey; + /** Output must be of size maximum component length, input ... just a path component or MORE? */ + decryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; + }, + component: Uint8Array, +): Promise { + return opts.decryptFn(opts.key, component); +} From f4c11c1df9dabe7fc935ccaecbd919368299e183 Mon Sep 17 00:00:00 2001 From: Sam Gwilym Date: Tue, 6 Feb 2024 13:35:09 +0100 Subject: [PATCH 3/7] Don't use --unstable flag in tasks --- deno.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deno.json b/deno.json index a259b60..abba864 100644 --- a/deno.json +++ b/deno.json @@ -1,7 +1,7 @@ { "tasks": { - "test": "deno test --unstable src", - "test-watch": "deno test --unstable src --watch", + "test": "deno test --unstable-kv src", + "test-watch": "deno test --unstable-kv src --watch", "bundle": "deno run --allow-all scripts/build_web_bundle.ts" }, "lock": false, From 04d4b8ed1f2d73f9f5daafc8fe7c720fcaaf88d1 Mon Sep 17 00:00:00 2001 From: Sam Gwilym Date: Tue, 6 Feb 2024 13:35:37 +0100 Subject: [PATCH 4/7] Add functions for de/encrypting at offset --- src/utils/encryption.test.ts | 5 ++- src/utils/encryption.ts | 62 +++++++++++++++++++++++++++++------- 2 files changed, 54 insertions(+), 13 deletions(-) diff --git a/src/utils/encryption.test.ts b/src/utils/encryption.test.ts index 83057e6..1c83241 100644 --- a/src/utils/encryption.test.ts +++ b/src/utils/encryption.test.ts @@ -117,7 +117,10 @@ Deno.test("encryptPath and decryptPath", async () => { deriveKey, }, encryptedPath); - assertNotEquals(encryptedPath, path); + if (path.length > 0) { + assertNotEquals(encryptedPath, path); + } + assertEquals(decryptedPath, path); } }); diff --git a/src/utils/encryption.ts b/src/utils/encryption.ts index 93666f4..a1131e6 100644 --- a/src/utils/encryption.ts +++ b/src/utils/encryption.ts @@ -1,14 +1,20 @@ import { Path } from "../../deps.ts"; -export async function encryptPath(opts: { - key: EncryptionKey; - /** Output must be of size maximum component length, input ... just a path component or MORE? */ - encryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; - deriveKey: ( - key: EncryptionKey, - component: Uint8Array, - ) => Promise; -}, path: Path): Promise<[Path, EncryptionKey]> { +/** Encrypt a `Path`. + * + * https://willowprotocol.org/specs/e2e/index.html#e2e_paths + */ +export async function encryptPath( + opts: { + key: EncryptionKey; + encryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; + deriveKey: ( + key: EncryptionKey, + component: Uint8Array, + ) => Promise; + }, + path: Path, +): Promise<[Path, EncryptionKey]> { if (path.length === 0) { return [[], opts.key]; } @@ -45,7 +51,6 @@ export async function encryptPath(opts: { export function encryptComponent( opts: { key: EncryptionKey; - /** Output must be of size maximum component length, input ... just a path component or MORE? */ encryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; }, component: Uint8Array, @@ -53,9 +58,12 @@ export function encryptComponent( return opts.encryptFn(opts.key, component); } +/** Decrypt a `Path`. + * + * https://willowprotocol.org/specs/e2e/index.html#e2e_paths + */ export async function decryptPath(opts: { key: EncryptionKey; - /** Output must be of size maximum component length, input ... just a path component or MORE? */ decryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; deriveKey: ( key: EncryptionKey, @@ -98,10 +106,40 @@ export async function decryptPath(opts: { export function decryptComponent( opts: { key: EncryptionKey; - /** Output must be of size maximum component length, input ... just a path component or MORE? */ decryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; }, component: Uint8Array, ): Promise { return opts.decryptFn(opts.key, component); } + +export function encryptPathAtOffset( + opts: { + key: EncryptionKey; + encryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; + deriveKey: ( + key: EncryptionKey, + component: Uint8Array, + ) => Promise; + offset: number; + }, + path: Path, +): Promise<[Path, EncryptionKey]> { + const offsetPath = path.slice(0, opts.offset); + + return encryptPath(opts, offsetPath); +} + +export function decryptPathAtOffset(opts: { + key: EncryptionKey; + decryptFn: (key: EncryptionKey, bytes: Uint8Array) => Promise; + deriveKey: ( + key: EncryptionKey, + component: Uint8Array, + ) => Promise; + offset: number; +}, path: Path): Promise<[Path, EncryptionKey]> { + const offsetPath = path.slice(0, opts.offset); + + return decryptPath(opts, offsetPath); +} From 66df00224cd0f5e70fb99b6a9e861c99140553ed Mon Sep 17 00:00:00 2001 From: Sam Gwilym Date: Tue, 6 Feb 2024 13:36:07 +0100 Subject: [PATCH 5/7] kv_store entry driver: use prefixed drivers for prefix iterators, 3d storage --- src/replica/storage/entry_drivers/kv_store.ts | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/replica/storage/entry_drivers/kv_store.ts b/src/replica/storage/entry_drivers/kv_store.ts index 21e9347..f8b2dc8 100644 --- a/src/replica/storage/entry_drivers/kv_store.ts +++ b/src/replica/storage/entry_drivers/kv_store.ts @@ -11,6 +11,7 @@ import { PayloadScheme, SubspaceScheme, } from "../../types.ts"; +import { PrefixedDriver } from "../kv/prefixed_driver.ts"; import { KvDriver } from "../kv/types.ts"; import { SimpleKeyIterator } from "../prefix_iterators/simple_key_iterator.ts"; import { PrefixIterator } from "../prefix_iterators/types.ts"; @@ -81,19 +82,27 @@ export class EntryDriverKvStore< this.fingerprintScheme = opts.fingerprintScheme; this.kvDriver = opts.kvDriver; - this.prefixIterator = new SimpleKeyIterator(this.kvDriver); + + const prefixedKvDriver = new PrefixedDriver(["prefix"], this.kvDriver); + + this.prefixIterator = new SimpleKeyIterator(prefixedKvDriver); } makeStorage( namespace: NamespaceKey, ): Storage3d { + const prefixedStorageDriver = new PrefixedDriver( + ["entries"], + this.kvDriver, + ); + return new TripleStorage({ namespace, createSummarisableStorage: ( monoid: LiftingMonoid, ) => { return new Skiplist({ - kv: this.kvDriver, + kv: prefixedStorageDriver, monoid, compare: orderBytes, }); From f0e1df3908362fef95cc8a9c4d71ce7b4b488879 Mon Sep 17 00:00:00 2001 From: Sam Gwilym Date: Tue, 6 Feb 2024 13:53:23 +0100 Subject: [PATCH 6/7] Update Deno version for CI --- .github/workflows/deno.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deno.yml b/.github/workflows/deno.yml index dc266c3..4aca536 100644 --- a/.github/workflows/deno.yml +++ b/.github/workflows/deno.yml @@ -12,7 +12,7 @@ jobs: strategy: matrix: - deno-version: ["1.37.1"] + deno-version: ["1.40.3"] steps: - uses: actions/checkout@v2 From 830984f4a2bcf23aec850838ce6969818259adcd Mon Sep 17 00:00:00 2001 From: Sam Gwilym Date: Tue, 6 Feb 2024 14:44:24 +0100 Subject: [PATCH 7/7] Fix storage3d.summarise test Expected fingerprints were being ordered differently than they should have due to not using a proper encoding. --- .../storage/storage_3d/storage_3d.test.ts | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/src/replica/storage/storage_3d/storage_3d.test.ts b/src/replica/storage/storage_3d/storage_3d.test.ts index 2fdb071..94fe823 100644 --- a/src/replica/storage/storage_3d/storage_3d.test.ts +++ b/src/replica/storage/storage_3d/storage_3d.test.ts @@ -32,6 +32,7 @@ import { MonoidRbTree } from "../summarisable_storage/monoid_rbtree.ts"; import { TripleStorage } from "./triple_storage.ts"; import { Storage3d } from "./types.ts"; import { assertEquals } from "https://deno.land/std@0.202.0/assert/assert_equals.ts"; +import { encodePathWithSeparators } from "../../util.ts"; export type Storage3dScenario< NamespaceKey, @@ -173,7 +174,7 @@ Deno.test("Storage3d.summarise", async () => { ) { const newFingerprint = [...a]; - // Remove dupleicates + // Remove duplicates for (const element of b) { const existing = newFingerprint.find( @@ -182,10 +183,10 @@ Deno.test("Storage3d.summarise", async () => { if (subspaceA !== subspaceB) return false; if (orderPath(pathA, pathB) !== 0) return false; - if (orderTimestamp(timestampA, timestampB) !== 0) { + if (timestampA !== timestampB) { return false; } - if (orderTimestamp(lengthA, lengthB) !== 0) { + if (lengthA !== lengthB) { return false; } @@ -208,16 +209,16 @@ Deno.test("Storage3d.summarise", async () => { if (subspaceA > subspaceB) return 1; if (orderPath(pathA, pathB) === -1) return -1; if (orderPath(pathA, pathB) === 1) return 1; - if (orderTimestamp(timestampA, timestampB) === -1) { + if (timestampA < timestampB) { return -1; } - if (orderTimestamp(timestampA, timestampB) === 1) { + if (timestampA > timestampB) { return 1; } - if (orderTimestamp(lengthA, lengthB) === -1) { + if (lengthA < lengthB) { return -1; } - if (orderTimestamp(lengthA, lengthB) === 1) { + if (lengthA > lengthB) { return 1; } @@ -459,12 +460,12 @@ Deno.test("Storage3d.summarise", async () => { entries.sort((a, b) => { const aKey = concat( new Uint8Array([a.subspaceId]), - ...a.path, + encodePathWithSeparators(a.path), bigintToBytes(a.timestamp), ); const bKey = concat( new Uint8Array([b.subspaceId]), - ...b.path, + encodePathWithSeparators(b.path), bigintToBytes(b.timestamp), ); @@ -517,7 +518,10 @@ Deno.test("Storage3d.summarise", async () => { const actual = await storage.summarise(aoi); const expected = actualFingerprintMap.get(aoi)!; - assertEquals(actual.fingerprint, expected.fingerprint); + assertEquals( + actual.fingerprint, + expected.fingerprint, + ); assertEquals(actual.size, expected.count); let actualPayloadSize = BigInt(0);