Skip to content

Commit

Permalink
Revert accidental examples changes
Browse files Browse the repository at this point in the history
This reverts commit e4d4bce.
  • Loading branch information
DrA1ex committed Sep 20, 2023
1 parent 9928f1f commit a27721a
Show file tree
Hide file tree
Showing 5 changed files with 130 additions and 214 deletions.
11 changes: 1 addition & 10 deletions examples/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,10 @@
"version": "0.0.0",
"type": "module",
"dependencies": {
"@mind-net.js/gpu": "^1.0.5",
"@tensorflow/tfjs": "^4.11.0",
"@tensorflow/tfjs-node-gpu": "^4.10.0",
"brain.js": "^2.0.0-beta.23",
"get-pixels": "^3.3.3",
"jimp": "^0.22.10",
"json-stream-stringify": "^3.1.0",
"jszip": "^3.10.1",
"mind-net.js": "../lib"
},
"overrides": {
"gpu.js": {
"gl": "^6.0.2"
}
"mind-net.js": "latest"
}
}
36 changes: 18 additions & 18 deletions examples/src/benchmark.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,29 +55,29 @@ brModel.train(brSingleData);
const trainOpts = {batchSize: BatchSize, epochs: 1, iterations: 1, progress: false};

for (let i = 0; i < 3; i++) {
// await TimeUtils.timeIt(() => pModel.compute(trainData, {batchSize: BatchSize}), `Worker.Compute (Full) #${i}`, ComputeIters / Count);
// await TimeUtils.timeIt(() => trainData.map(data => model.compute(data)), `Compute (Full) #${i}`, ComputeIters / Count);
// await TimeUtils.timeIt(() => tfModel.predict(tfTrainData), `TF.Compute (Full) #${i}`, ComputeIters / Count);
// await TimeUtils.timeIt(() => trainData.map(data => brModel.run(data)), `Brain.Compute (Full) #${i}`, ComputeIters / Count);
// console.log();

// await TimeUtils.timeIt(() => pModel.compute(singleTrainData), `Worker.Compute (Single) #${i}`, ComputeIters);
// await TimeUtils.timeIt(() => model.compute(singleTrainData[0]), `Compute (Single) #${i}`, ComputeIters);
// await TimeUtils.timeIt(() => tfModel.predict(tfSingleData), `TF.Compute (Single) #${i}`, ComputeIters);
// await TimeUtils.timeIt(() => brModel.run(singleTrainData[0]), `Brain.Compute (Single) #${i}`, ComputeIters);
// console.log();
await TimeUtils.timeIt(() => pModel.compute(trainData, {batchSize: BatchSize}), `Worker.Compute (Full) #${i}`, ComputeIters / Count);
await TimeUtils.timeIt(() => trainData.map(data => model.compute(data)), `Compute (Full) #${i}`, ComputeIters / Count);
await TimeUtils.timeIt(() => tfModel.predict(tfTrainData), `TF.Compute (Full) #${i}`, ComputeIters / Count);
await TimeUtils.timeIt(() => trainData.map(data => brModel.run(data)), `Brain.Compute (Full) #${i}`, ComputeIters / Count);
console.log();

await TimeUtils.timeIt(() => pModel.compute(singleTrainData), `Worker.Compute (Single) #${i}`, ComputeIters);
await TimeUtils.timeIt(() => model.compute(singleTrainData[0]), `Compute (Single) #${i}`, ComputeIters);
await TimeUtils.timeIt(() => tfModel.predict(tfSingleData), `TF.Compute (Single) #${i}`, ComputeIters);
await TimeUtils.timeIt(() => brModel.run(singleTrainData[0]), `Brain.Compute (Single) #${i}`, ComputeIters);
console.log();

await TimeUtils.timeIt(() => pModel.train(trainData, trainData, trainOpts), `Worker.Train (Full) #${i}`, TrainIters);
await TimeUtils.timeIt(() => model.train(trainData, trainData, trainOpts), `Train (Full) #${i}`, TrainIters);
// await TimeUtils.timeIt(() => tfModel.fit(tfTrainData, tfTrainData, trainOpts), `TF.Train (Full) #${i}`, TrainIters);
// await TimeUtils.timeIt(() => brModel.train(brTrainData, trainOpts), `Brain.Train (Full) #${i}`, TrainIters);
await TimeUtils.timeIt(() => tfModel.fit(tfTrainData, tfTrainData, trainOpts), `TF.Train (Full) #${i}`, TrainIters);
await TimeUtils.timeIt(() => brModel.train(brTrainData, trainOpts), `Brain.Train (Full) #${i}`, TrainIters);
console.log();

// await TimeUtils.timeIt(() => pModel.train(singleTrainData, singleTrainData, trainOpts), `Worker.Train (Single) #${i}`, TrainIters * Count);
// await TimeUtils.timeIt(() => model.train(singleTrainData, singleTrainData, trainOpts), `Train (Single) #${i}`, TrainIters * Count);
// await TimeUtils.timeIt(() => tfModel.fit(tfSingleData, tfSingleData, trainOpts), `TF.Train (Single) #${i}`, TrainIters * Count);
// await TimeUtils.timeIt(() => brModel.train(brSingleData, trainOpts), `Brain.Train (Single) #${i}`, TrainIters * Count);
// console.log("\n");
await TimeUtils.timeIt(() => pModel.train(singleTrainData, singleTrainData, trainOpts), `Worker.Train (Single) #${i}`, TrainIters * Count);
await TimeUtils.timeIt(() => model.train(singleTrainData, singleTrainData, trainOpts), `Train (Single) #${i}`, TrainIters * Count);
await TimeUtils.timeIt(() => tfModel.fit(tfSingleData, tfSingleData, trainOpts), `TF.Train (Single) #${i}`, TrainIters * Count);
await TimeUtils.timeIt(() => brModel.train(brSingleData, trainOpts), `Brain.Train (Single) #${i}`, TrainIters * Count);
console.log("\n");
}

await pModel.terminate();
6 changes: 3 additions & 3 deletions examples/src/cartoon_animation_example.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import {ModelSerialization, GanSerialization, Matrix, ImageUtils, ParallelModelW
import * as ModelUtils from "./utils/model.js";


const name = "2023-09-14T16:12:01.900Z_100";
const name = "2023-08-30T12:11:05.784Z_100";
const path = "./out/models";
const outPath = "./out/animation";

Expand All @@ -18,8 +18,8 @@ const ae = ModelSerialization.load(JSON.parse(aeDump.toString()));
const upscaler = ModelSerialization.load(JSON.parse(upscalerDump.toString()));
const gan = GanSerialization.load(JSON.parse(ganDump.toString()));

const count = 10;
const framesPerSample = 10;
const count = 26;
const framesPerSample = 3;
const scale = 2;

const channels = 3;
Expand Down
48 changes: 17 additions & 31 deletions examples/src/cartoon_colorful_example.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,15 @@ import {
ParallelGanWrapper,
Matrix,
ProgressUtils,
ImageUtils,
ColorUtils
ImageUtils
} from "mind-net.js";

import {GpuModelWrapper, GpuGanWrapper} from "@mind-net.js/gpu"

import * as DatasetUtils from "./utils/dataset.js";
import * as ModelUtils from "./utils/model.js";


const DatasetUrl = "https://127.0.0.1:8080/datasets/cartoon_avatar_4000_32.zip";
const DatasetBigUrl = "https://127.0.0.1:8080/datasets/cartoon_avatar_4000_64.zip";
const DatasetUrl = "https://github.com/DrA1ex/mind-net.js/files/12407792/cartoon-2500-28.zip";
const DatasetBigUrl = "https://github.com/DrA1ex/mind-net.js/files/12398103/cartoon-2500-64.zip";

console.log("Fetching datasets...");

Expand All @@ -38,7 +35,7 @@ const [trainData, bigTrainData] = await Promise.all([
]);

// You can reduce dataset length
const CNT = 4000;
const CNT = 2500;
trainData.splice(CNT);
bigTrainData.splice(CNT);

Expand All @@ -50,20 +47,15 @@ const gsTrainData = ImageUtils.grayscaleDataset(trainData, imageChannel);
// Creating grayscale Upscaler training data from the big RGB training data
const upscaleTrainData = ImageUtils.grayscaleDataset(bigTrainData);

for (const tData of trainData) {
ColorUtils.transformColorSpace(ColorUtils.tanhToRgb, tData, imageChannel, tData);
ColorUtils.transformColorSpace(ColorUtils.rgbToLab, tData, imageChannel, tData);
ColorUtils.transformColorSpace(ColorUtils.labToTanh, tData, imageChannel, tData);
}

// Setting up necessary parameters and dimensions
const inputDim = 64;
const inputDim = 32;
const imageDim = trainData[0].length;
const gsImageDim = gsTrainData[0].length;
const upscaleImageDim = upscaleTrainData[0].length;

const epochs = 100;
const batchSize = 128;
const batchSize = 64;
const epochSamples = 10;
const finalSamples = 20;
const outPath = "./out";
Expand All @@ -77,8 +69,8 @@ const initializer = "xavier";

// Helper functions and models setup
const createOptimizer = (lr) => new AdamOptimizer({lr, decay, beta1: beta, eps: 1e-7});
const createHiddenLayer = (size, activation = undefined) => new Dense(size, {
activation: activation ?? new LeakyReluActivation({alpha: 0.2}),
const createHiddenLayer = (size) => new Dense(size, {
activation: new LeakyReluActivation({alpha: 0.2}),
weightInitializer: initializer,
options: {
dropout,
Expand All @@ -88,22 +80,22 @@ const createHiddenLayer = (size, activation = undefined) => new Dense(size, {
});

// Creating the generator model
const generator = new SequentialModel(createOptimizer(lr * 1.2), loss);
const generator = new SequentialModel(createOptimizer(lr), loss);
generator.addLayer(new Dense(inputDim));
generator.addLayer(createHiddenLayer(128, "relu"));
generator.addLayer(createHiddenLayer(256, "relu"));
generator.addLayer(createHiddenLayer(64));
generator.addLayer(createHiddenLayer(128));
generator.addLayer(new Dense(imageDim, {activation: "tanh", weightInitializer: initializer}));

// Creating the discriminator model
const discriminator = new SequentialModel(createOptimizer(lr), loss);
discriminator.addLayer(new Dense(imageDim));
discriminator.addLayer(createHiddenLayer(256));
discriminator.addLayer(createHiddenLayer(128));
discriminator.addLayer(createHiddenLayer(64));
discriminator.addLayer(new Dense(1, {activation: "sigmoid", weightInitializer: initializer}));

// Creating the generative adversarial (GAN) model
const ganModel = new GenerativeAdversarialModel(generator, discriminator, createOptimizer(lr), loss);
const pGan = new GpuGanWrapper(ganModel, {batchSize});
const pGan = new ParallelGanWrapper(ganModel);

// Creating the variational autoencoder (AE) model
const ae = new SequentialModel(createOptimizer(lr), "mse");
Expand All @@ -116,7 +108,7 @@ ae.addLayer(new Dense(256, {activation: "relu", weightInitializer: initializer})
ae.addLayer(new Dense(gsImageDim, {activation: "tanh", weightInitializer: initializer}));
ae.compile();

const pAe = new GpuModelWrapper(ae, {batchSize});
const pAe = new ParallelModelWrapper(ae);

// Creating the Upscaler model
const upscaler = new SequentialModel(createOptimizer(lr), "mse");
Expand All @@ -126,7 +118,7 @@ upscaler.addLayer(new Dense(512, {activation: "relu", weightInitializer: initial
upscaler.addLayer(new Dense(upscaleImageDim, {activation: "tanh", weightInitializer: initializer}));
upscaler.compile();

const pUpscaler = new GpuModelWrapper(upscaler, {batchSize});
const pUpscaler = new ParallelModelWrapper(upscaler);

async function _filterWithAEBatch(inputs) {
return ImageUtils.processMultiChannelDataParallel(pAe, inputs, imageChannel);
Expand Down Expand Up @@ -154,7 +146,7 @@ async function _saveModel() {
let quitRequested = false;
process.on("SIGINT", async () => quitRequested = true);

//await Promise.all([pAe.init(), pUpscaler.init(), pGan.init()]);
await Promise.all([pAe.init(), pUpscaler.init(), pGan.init()]);

console.log("Training...");

Expand All @@ -175,12 +167,6 @@ for (const _ of ProgressUtils.progress(epochs)) {

// Saving a snapshot of the generator model's output
const generatedImages = await pGan.compute(generatorInput);
for (const tData of generatedImages) {
ColorUtils.transformColorSpace(ColorUtils.tanhToLab, tData, imageChannel, tData);
ColorUtils.transformColorSpace(ColorUtils.labToRgb, tData, imageChannel, tData);
ColorUtils.transformColorSpace(ColorUtils.rgbToTanh, tData, imageChannel, tData);
}

await ModelUtils.saveGeneratedModelsSamples(ganModel.epoch, outPath, generatedImages,
{channel: imageChannel, count: epochSamples, time: false, prefix: "generated", scale: 4});

Expand Down Expand Up @@ -212,4 +198,4 @@ for (const _ of ProgressUtils.progress(epochs)) {
// Save trained models
await _saveModel();

//await Promise.all([pAe.terminate(), pUpscaler.terminate(), pGan.terminate()]);
await Promise.all([pAe.terminate(), pUpscaler.terminate(), pGan.terminate()]);
Loading

0 comments on commit a27721a

Please sign in to comment.