diff --git a/src/app/neural-network/serialization/binary.ts b/src/app/neural-network/serialization/binary.ts index d8ea705..2763b0f 100644 --- a/src/app/neural-network/serialization/binary.ts +++ b/src/app/neural-network/serialization/binary.ts @@ -2,7 +2,7 @@ import {ILoss, IModel, IOptimizer} from "../engine/base"; import {OptimizerT} from "../engine/optimizers"; import {LossT} from "../engine/loss"; import {Matrix1D, Matrix2D} from "../engine/matrix"; -import {ChunkedArrayBuffer, TypedArray, TypedArrayT} from "../utils/array-buffer"; +import {ChunkedArrayBuffer, TypedArrayT} from "../utils/array-buffer"; import {Activations, Layers, Matrix, Models, ModelSerialization} from "../neural-network"; import {SerializationEntry, SerializedParams} from "./base"; @@ -53,7 +53,7 @@ export type TensorConfigHeader = { } export class BinarySerializer { - static save(model: IModel, dataType = TensorType.F32): ArrayBuffer { + static save(model: IModel, dataType = TensorType.F64): ArrayBuffer { if (!model.isCompiled) throw new Error("Model should be compiled"); const tensorsHeader: TensorConfigHeader = {}; @@ -119,6 +119,11 @@ export class BinarySerializer { const resultChunks = [headerSize, headerBytes].concat(dataChunks); const chunkedArray = new ChunkedArrayBuffer(resultChunks.map(c => c.buffer)); + + if (dataChunks.reduce((p, c) => p + c.byteLength, 0) !== tensorsHeader[metadata.layers[metadata.layers.length - 1].weightsKey].offsets[1]) { + throw new Error("Ooops"); + } + return chunkedArray.toTypedArray(Uint8Array).buffer; } @@ -140,22 +145,38 @@ export class BinarySerializer { const model = new modelT(optimizer, loss); const dataOffset = BigInt64Array.BYTES_PER_ELEMENT + metaSize; - const tensorsDataArray = new ChunkedArrayBuffer([data]).createTypedArray(Float64Array, dataOffset); + const tensorsDataArray = new ChunkedArrayBuffer([data], dataOffset); let layerIndex = 0; for (const layerConf of headerMeta.layers) { const biasesMeta = headerTensors[layerConf.biasesKey]; - const biases = tensorsDataArray.subarray( - biasesMeta.offsets[0] / Float64Array.BYTES_PER_ELEMENT, - biasesMeta.offsets[1] / Float64Array.BYTES_PER_ELEMENT + const biasesDataType = this._getArrayT(biasesMeta.dtype); + + const biasesExpectedSize = biasesMeta.shape[0]; + const biases = tensorsDataArray.createTypedArray( + biasesDataType, + biasesMeta.offsets[0], + biasesExpectedSize ); + if (biases.length !== biasesExpectedSize) { + throw new Error(`Not enough data for layer ${layerIndex} biases. Read ${biases.length} of ${biasesExpectedSize}`); + } + const weightsMeta = headerTensors[layerConf.weightsKey]; - const weightsData = tensorsDataArray.subarray( - weightsMeta.offsets[0] / Float64Array.BYTES_PER_ELEMENT, - weightsMeta.offsets[1] / Float64Array.BYTES_PER_ELEMENT + const weightsDataType = this._getArrayT(weightsMeta.dtype); + + const weightExpectedSize = weightsMeta.shape[0] * weightsMeta.shape[1]; + const weightsData = tensorsDataArray.createTypedArray( + weightsDataType, + weightsMeta.offsets[0], + weightExpectedSize ); + if (weightsData.length !== weightExpectedSize) { + throw new Error(`Not enough data for layer ${layerIndex} weights. Read ${weightsData.length} of ${weightExpectedSize}`); + } + const prevSize = layerIndex > 0 ? headerMeta.layers[layerIndex - 1].size : 0 const weights = Matrix.fill( i => weightsData.subarray(i * prevSize, (i + 1) * prevSize), @@ -196,7 +217,7 @@ export class BinarySerializer { return chunks; } - private static _getArrayT(dataType: TensorType): TypedArrayT { + private static _getArrayT(dataType: TensorType): TypedArrayT { switch (dataType) { case TensorType.F32: return Float32Array; diff --git a/src/app/neural-network/utils/array-buffer.ts b/src/app/neural-network/utils/array-buffer.ts index 7f73e20..d94abfb 100644 --- a/src/app/neural-network/utils/array-buffer.ts +++ b/src/app/neural-network/utils/array-buffer.ts @@ -17,7 +17,7 @@ export class ChunkedArrayBuffer { this._addChunk(chunk); } - byteLength = byteLength > 0 ? byteLength : this.byteLength; + byteLength = byteLength >= 0 ? byteLength : this.byteLength; this.bytesOffset = Math.max(0, Math.min(bytesOffset, this.chunks[0].size)); this.byteLength = Math.max(0, Math.min(this.byteLength - this.bytesOffset, byteLength)); } @@ -52,7 +52,7 @@ export class ChunkedArrayBuffer { createTypedArray(type: TypedArrayT, bytesOffset = 0, count = -1): T { const itemSize = type.BYTES_PER_ELEMENT; - const totalSize = count > 0 ? count : Math.floor((this.byteLength - bytesOffset) / itemSize); + const totalSize = count >= 0 ? count : Math.floor((this.byteLength - bytesOffset) / itemSize); return this.slice(bytesOffset, totalSize * itemSize).toTypedArray(type); } diff --git a/src/app/pages/demo3/demo3.component.ts b/src/app/pages/demo3/demo3.component.ts index 08dbbd9..30807ad 100644 --- a/src/app/pages/demo3/demo3.component.ts +++ b/src/app/pages/demo3/demo3.component.ts @@ -93,6 +93,8 @@ export class Demo3Component implements AfterViewInit { } resetDrawing() { + this.drawingContext.lineCap = "round"; + this.drawingContext.lineJoin = "round"; this.drawingContext.fillStyle = "white"; this.drawingContext.fillRect(0, 0, this.drawingCanvasRef.nativeElement.width, this.drawingCanvasRef.nativeElement.height); } diff --git a/tests/fixture/common.ts b/tests/fixture/common.ts new file mode 100644 index 0000000..790ebf2 --- /dev/null +++ b/tests/fixture/common.ts @@ -0,0 +1,4 @@ +export const RandomSimpleMockData = [ + 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, + 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0, +]; \ No newline at end of file diff --git a/tests/model.test.ts b/tests/model.test.ts index eba8c46..a130aad 100644 --- a/tests/model.test.ts +++ b/tests/model.test.ts @@ -1,5 +1,6 @@ import * as Models from "./mock/models"; import {SetupMockRandom} from "./mock/common"; +import {RandomSimpleMockData} from "./fixture/common"; import { SequentialModel, Dense, @@ -8,10 +9,7 @@ import { } from "../src/app/neural-network/neural-network"; import * as ArrayUtils from "./utils/array"; -SetupMockRandom([ - 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, - 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0, -], true); +SetupMockRandom(RandomSimpleMockData, true); const TrainInput = [0.1, 0.5]; const TrainExpected = [0.5, 0.1]; diff --git a/tests/serialization.test.ts b/tests/serialization.test.ts index 5a49275..d58344c 100644 --- a/tests/serialization.test.ts +++ b/tests/serialization.test.ts @@ -1,3 +1,5 @@ +import {SetupMockRandom} from "./mock/common"; +import {RandomSimpleMockData} from "./fixture/common"; import * as ModelsMock from "./mock/models"; import * as ArrayUtils from "./utils/array"; @@ -20,10 +22,9 @@ import { TensorConfigHeader, TensorType } from "../src/app/neural-network/serialization/binary"; + import {SerializationUtils} from "../src/app/neural-network/serialization/utils"; import {ChunkedArrayBuffer} from "../src/app/neural-network/utils/array-buffer"; -import {min} from "rxjs/operators"; -import {AbstractMomentAcceleratedOptimizer} from "../src/app/neural-network/engine/optimizers"; describe("Should correctly serialize model", () => { test("Sequential", () => { @@ -180,6 +181,9 @@ describe("Should fail when invalid data passed", () => { describe("Binary serialization", () => { + const eps = 1e-5; + SetupMockRandom(RandomSimpleMockData, true); + test("Should correctly serialize to binary format", () => { const model = new SequentialModel() .addLayer(new Dense(2)) @@ -247,15 +251,27 @@ describe("Binary serialization", () => { const model = ModelsMock.sequential(); model.compile(); - const sModel = BinarySerializer.load(BinarySerializer.save(model, TensorType.F64)); + const sModel = BinarySerializer.load(BinarySerializer.save(model, tensorType)); for (let i = 0; i < model.layers.length; i++) { expect(sModel.layers[i].activation).toEqual(model.layers[i].activation); - ArrayUtils.arrayCloseTo(sModel.layers[i].biases, model.layers[i].biases) - ArrayUtils.arrayCloseTo_2d(sModel.layers[i].weights, model.layers[i].weights) + ArrayUtils.arrayCloseTo(sModel.layers[i].biases, model.layers[i].biases, eps) + ArrayUtils.arrayCloseTo_2d(sModel.layers[i].weights, model.layers[i].weights, eps) } }) }) + test("Should serialize/deserialize chain model", () => { + const model = ModelsMock.chain(); + model.compile(); + + const sModel = BinarySerializer.load(BinarySerializer.save(model)); + for (let i = 0; i < model.layers.length; i++) { + expect(sModel.layers[i].activation).toEqual(model.layers[i].activation); + ArrayUtils.arrayCloseTo(sModel.layers[i].biases, model.layers[i].biases, eps) + ArrayUtils.arrayCloseTo_2d(sModel.layers[i].weights, model.layers[i].weights, eps) + } + }); + test.failing("Should fail if model isn't compiled", () => { const model = new SequentialModel() .addLayer(new Dense(2)) @@ -263,4 +279,12 @@ describe("Binary serialization", () => { BinarySerializer.save(model); }) -}) \ No newline at end of file + + test.failing("Should fail if not enough data", () => { + const model = ModelsMock.sequential(); + model.compile(); + + const data = BinarySerializer.save(model); + BinarySerializer.load(data.slice(0, data.byteLength - 1)); + }); +}); \ No newline at end of file