Skip to content

Commit

Permalink
Fixes #84 Unable to use record.data.blob() for files > 10KB
Browse files Browse the repository at this point in the history
Signed-off-by: Frank Hinek <[email protected]>
  • Loading branch information
frankhinek committed May 23, 2023
1 parent 97a761b commit 382ec71
Show file tree
Hide file tree
Showing 4 changed files with 286 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ describe('Web5UserAgent', () => {
expect(response.reply.status).to.exist;
expect(response.reply.entries).to.exist;
expect(response.reply.status.code).to.equal(200);
});
}).timeout(10_000);

it('handles RecordsDelete Messages', async () => {
const { did: aliceDid } = await testAgent.createProfile({
Expand Down
23 changes: 2 additions & 21 deletions packages/web5/src/dwn-api.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import type { Web5Agent } from '@tbd54566975/web5-agent';
import type {
MessageReply,
ProtocolDefinition,
ProtocolsConfigureDescriptor,
ProtocolsConfigureOptions,
ProtocolsQueryOptions,
RecordsDeleteOptions,
RecordsQueryOptions,
RecordsQueryReplyEntry,
RecordsReadOptions,
RecordsWriteDescriptor,
RecordsWriteMessage,
RecordsWriteOptions,
ProtocolsConfigureMessage
Expand All @@ -19,15 +19,6 @@ import { Record } from './record.js';
import { Protocol } from './protocol.js';
import { dataToBlob, isEmptyObject } from './utils.js';

// TODO: Export type ProtocolsConfigureDescriptor from dwn-sdk-js.
export type ProtocolsConfigureDescriptor = {
dateCreated: string;
definition: ProtocolDefinition;
interface : DwnInterfaceName.Protocols;
method: DwnMethodName.Configure;
protocol: string;
};

export type ProtocolsConfigureRequest = {
message: Omit<ProtocolsConfigureOptions, 'authorizationSignatureInput'>;
}
Expand Down Expand Up @@ -71,16 +62,6 @@ export type RecordsDeleteResponse = {
status: MessageReply['status'];
};

// TODO: Export type RecordsQueryReplyEntry and EncryptionProperty from dwn-sdk-js.
export type RecordsQueryReplyEntry = {
recordId: string,
contextId?: string;
descriptor: RecordsWriteDescriptor;
encryption?: RecordsWriteMessage['encryption'];
encodedData?: string;
};


export type RecordsQueryRequest = {
/** The from property indicates the DID to query from and return results. */
from?: string;
Expand Down
25 changes: 13 additions & 12 deletions packages/web5/src/record.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ export class Record implements RecordModel {
if (this.isDeleted) throw new Error('Operation failed: Attempted to access `data` of a record that has already been deleted.');

if (!this.#encodedData && !this.#readableStream) {
// `encodedData` will be set if `dataSize` <= DwnConstant.maxDataSizeAllowedToBeEncoded. (10KB as of April 2023)
// `readableStream` will be set if Record was instantiated from a RecordsRead reply.
// `encodedData` will be set if the Record was instantiated by dwn.records.create()/write().
// `readableStream` will be set if Record was instantiated by dwn.records.read().
// If neither of the above are true, then the record must be fetched from the DWN.
this.#readableStream = this.#web5Agent.processDwnRequest({
author : this.author,
Expand All @@ -119,34 +119,35 @@ export class Record implements RecordModel {

if (typeof this.#encodedData === 'string') {
// If `encodedData` is set, then it is expected that:
// `dataSize` <= DwnConstant.maxDataSizeAllowedToBeEncoded (10KB as of April 2023)
// type is Uint8Array bytes if the Record object was instantiated from a RecordsWrite response
// type is Base64 URL encoded string if the Record object was instantiated from a RecordsQuery response
// If it is a string, we need to Base64 URL decode to bytes
// type is Blob if the Record object was instantiated by dwn.records.create()/write().
// type is Base64 URL encoded string if the Record object was instantiated by dwn.records.query().
// If it is a string, we need to Base64 URL decode to bytes and instantiate a Blob.
const dataBytes = Encoder.base64UrlToBytes(this.#encodedData);
this.#encodedData = new Blob([dataBytes], { type: this.dataFormat });
}

// Explicitly cast #encodedData as a Blob since if non-null, it has been converted from string to Blob.
const dataBlob = this.#encodedData as Blob;

// eslint-disable-next-line @typescript-eslint/no-this-alias
const self = this; // Capture the context of the `Record` instance.
const dataBlob = this.#encodedData as Blob;
const dataObj = {
async blob(): Promise<Blob> {
if (self.#encodedData) return self.#encodedData as Blob;
if (self.#readableStream) return new Blob([this.stream().then(DataStream.toBytes)], { type: self.dataFormat });
if (dataBlob) return dataBlob;
if (self.#readableStream) return new Blob([await this.stream().then(DataStream.toBytes)], { type: self.dataFormat });
},
async json() {
if (self.#encodedData) return this.text().then(JSON.parse);
if (dataBlob) return this.text().then(JSON.parse);
if (self.#readableStream) return this.text().then(JSON.parse);
return null;
},
async text() {
if (self.#encodedData) return dataBlob.text();
if (dataBlob) return dataBlob.text();
if (self.#readableStream) return this.stream().then(DataStream.toBytes).then(Encoder.bytesToString);
return null;
},
async stream() {
if (self.#encodedData) return new ReadableWebToNodeStream(dataBlob.stream());
if (dataBlob) return new ReadableWebToNodeStream(dataBlob.stream());
if (self.#readableStream) return self.#readableStream;
return null;
},
Expand Down
270 changes: 270 additions & 0 deletions packages/web5/tests/record.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,276 @@ describe('Record', () => {
});
});

describe('record.data', () => {
describe('blob()', () => {
it('returns small data payloads after dwn.records.write()', async () => {
// Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched
// with a RecordsRead when record.data.blob() is executed.
const dataJson = TestDataGenerator.randomJson(500);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 500B record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Confirm that the size, in bytes, of the data read as a Blob matches the original input data.
const readDataBlob = await record!.data.blob();
expect(readDataBlob.size).to.equal(inputDataBytes.length);

// Convert the Blob into an array and ensure it matches the input data byte for byte.
const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer());
expect(readDataBytes).to.deep.equal(inputDataBytes);
});

it('returns small data payloads after dwn.records.read()', async () => {
// Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched
// with a RecordsRead when record.data.blob() is executed.
const dataJson = TestDataGenerator.randomJson(500);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 500B record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Read the record that was just created.
const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { recordId: record!.id }});

expect(readRecordStatus.code).to.equal(200);

// Confirm that the size, in bytes, of the data read as a Blob matches the original input data.
const readDataBlob = await readRecord.data.blob();
expect(readDataBlob.size).to.equal(inputDataBytes.length);

// Convert the Blob into an array and ensure it matches the input data byte for byte.
const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer());
expect(readDataBytes).to.deep.equal(inputDataBytes);
});

it('returns large data payloads after dwn.records.write()', async () => {
// Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched
// with a RecordsRead when record.data.blob() is executed.
const dataJson = TestDataGenerator.randomJson(11_000);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 11KB record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Confirm that the size, in bytes, of the data read as a Blob matches the original input data.
const readDataBlob = await record!.data.blob();
expect(readDataBlob.size).to.equal(inputDataBytes.length);

// Convert the Blob into an array and ensure it matches the input data byte for byte.
const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer());
expect(readDataBytes).to.deep.equal(inputDataBytes);
});

it('returns large data payloads after dwn.records.read()', async () => {
// Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched
// with a RecordsRead when record.data.blob() is executed.
const dataJson = TestDataGenerator.randomJson(11_000);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 11KB record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Read the record that was just created.
const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { recordId: record!.id }});

expect(readRecordStatus.code).to.equal(200);

// Confirm that the size, in bytes, of the data read as a Blob matches the original input data.
const readDataBlob = await readRecord.data.blob();
expect(readDataBlob.size).to.equal(inputDataBytes.length);

// Convert the Blob into an array and ensure it matches the input data byte for byte.
const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer());
expect(readDataBytes).to.deep.equal(inputDataBytes);
});
});

describe('json()', () => {
it('returns small data payloads after dwn.records.write()', async () => {
// Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched
// with a RecordsRead when record.data.json() is executed.
const dataJson = TestDataGenerator.randomJson(500);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 500B record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Confirm that the size, in bytes, of the data read as JSON matches the original input data.
const readDataJson = await record!.data.json();
const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson));
expect(readDataBytes.length).to.equal(inputDataBytes.length);

// Ensure the JSON returned matches the input data, byte for byte.
expect(readDataBytes).to.deep.equal(inputDataBytes);
});

it('returns small data payloads after dwn.records.read()', async () => {
// Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched
// with a RecordsRead when record.data.json() is executed.
const dataJson = TestDataGenerator.randomJson(500);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 500B record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Read the record that was just created.
const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { recordId: record!.id }});

expect(readRecordStatus.code).to.equal(200);

// Confirm that the size, in bytes, of the data read as JSON matches the original input data.
const readDataJson = await readRecord!.data.json();
const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson));
expect(readDataBytes.length).to.equal(inputDataBytes.length);

// Ensure the JSON returned matches the input data, byte for byte.
expect(readDataBytes).to.deep.equal(inputDataBytes);
});

it('returns large data payloads after dwn.records.write()', async () => {
// Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched
// with a RecordsRead when record.data.json() is executed.
const dataJson = TestDataGenerator.randomJson(11_000);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 11KB record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Confirm that the size, in bytes, of the data read as JSON matches the original input data.
const readDataJson = await record!.data.json();
const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson));
expect(readDataBytes.length).to.equal(inputDataBytes.length);

// Ensure the JSON returned matches the input data, byte for byte.
expect(readDataBytes).to.deep.equal(inputDataBytes);
});

it('returns large data payloads after dwn.records.read()', async () => {
// Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched
// with a RecordsRead when record.data.json() is executed.
const dataJson = TestDataGenerator.randomJson(11_000);
const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson));

// Write the 11KB record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataJson });

expect(status.code).to.equal(202);

// Read the record that was just created.
const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { recordId: record!.id }});

expect(readRecordStatus.code).to.equal(200);

// Confirm that the size, in bytes, of the data read as JSON matches the original input data.
const readDataJson = await readRecord!.data.json();
const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson));
expect(readDataBytes.length).to.equal(inputDataBytes.length);

// Ensure the JSON returned matches the input data, byte for byte.
expect(readDataBytes).to.deep.equal(inputDataBytes);
});
});

describe('text()', () => {
it('returns small data payloads after dwn.records.write()', async () => {
// Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched
// with a RecordsRead when record.data.text() is executed.
const dataText = TestDataGenerator.randomString(500);

// Write the 500B record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataText });

expect(status.code).to.equal(202);

// Confirm that the length of the data read as text matches the original input data.
const readDataText = await record!.data.text();
expect(readDataText.length).to.equal(dataText.length);

// Ensure the text returned matches the input data, char for char.
expect(readDataText).to.deep.equal(dataText);
});

it('returns small data payloads after dwn.records.read()', async () => {
// Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched
// with a RecordsRead when record.data.text() is executed.
const dataText = TestDataGenerator.randomString(500);

// Write the 500B record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataText });

expect(status.code).to.equal(202);

// Read the record that was just created.
const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { recordId: record!.id }});

expect(readRecordStatus.code).to.equal(200);

// Confirm that the length of the data read as text matches the original input data.
const readDataText = await readRecord!.data.text();
expect(readDataText.length).to.equal(dataText.length);

// Ensure the text returned matches the input data, char for char.
expect(readDataText).to.deep.equal(dataText);
});

it('returns large data payloads after dwn.records.write()', async () => {
// Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched
// with a RecordsRead when record.data.text() is executed.
const dataText = TestDataGenerator.randomString(11_000);

// Write the 11KB record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataText });

expect(status.code).to.equal(202);

// Confirm that the length of the data read as text matches the original input data.
const readDataText = await record!.data.text();
expect(readDataText.length).to.equal(dataText.length);

// Ensure the text returned matches the input data, char for char.
expect(readDataText).to.deep.equal(dataText);
});

it('returns large data payloads after dwn.records.read()', async () => {
// Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched
// with a RecordsRead when record.data.text() is executed.
const dataText = TestDataGenerator.randomString(11_000);

// Write the 11KB record to agent-connected DWN.
const { record, status } = await dwn.records.write({ data: dataText });

expect(status.code).to.equal(202);

// Read the record that was just created.
const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { recordId: record!.id }});

expect(readRecordStatus.code).to.equal(200);

// Confirm that the length of the data read as text matches the original input data.
const readDataText = await readRecord!.data.text();
expect(readDataText.length).to.equal(dataText.length);

// Ensure the text returned matches the input data, char for char.
expect(readDataText).to.deep.equal(dataText);
});
});
});

describe('record.delete', () => {
it('deletes the record', async () => {
const { status, record } = await dwn.records.write({
Expand Down

0 comments on commit 382ec71

Please sign in to comment.