From cd55c19e5c5820dcf58801b7d23063d2b890bc53 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 7 Nov 2017 11:21:10 -0600 Subject: [PATCH 01/89] WIP on complete rewrite to be stream based and modular --- .gitignore | 3 +- config/default.yml | 11 + lib/base_protocol_transform.js | 52 +++++ lib/cache/cache.js | 38 ++++ lib/cache/cache_debug.js | 61 ++++++ lib/cache/cache_membuf.js | 240 ++++++++++++++++++++++ lib/cache_fs.js | 65 +++--- lib/client/server_response_transform.js | 42 +--- lib/server/client_stream_processor.js | 187 +++++++++++++++++ lib/server/command_processor.js | 254 ++++++++++++++++++++++++ lib/server_v2.js | 73 +++++++ main.js | 29 ++- package.json | 5 +- test/cache_fs.js | 4 + test/server.js | 31 +-- 15 files changed, 1012 insertions(+), 83 deletions(-) create mode 100644 config/default.yml create mode 100644 lib/base_protocol_transform.js create mode 100644 lib/cache/cache.js create mode 100644 lib/cache/cache_debug.js create mode 100644 lib/cache/cache_membuf.js create mode 100644 lib/server/client_stream_processor.js create mode 100644 lib/server/command_processor.js create mode 100644 lib/server_v2.js diff --git a/.gitignore b/.gitignore index ee905dd..cbaa8fa 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,5 @@ cache/ cache5.0/ coverage/ node_modules/ -.coveralls.yml \ No newline at end of file +.coveralls.yml +!lib/cache \ No newline at end of file diff --git a/config/default.yml b/config/default.yml new file mode 100644 index 0000000..4f22db5 --- /dev/null +++ b/config/default.yml @@ -0,0 +1,11 @@ +Cache: + module: "cache_membuf" + path: "lib/cache" + options: + cache_debug: + minFileSize: 100000 + maxFileSize: 1000000 + cache_membuf: + initialPageSize: 10000000 + growPageSize: 10000000 + minFreeBlockSize: 1024 \ No newline at end of file diff --git a/lib/base_protocol_transform.js b/lib/base_protocol_transform.js new file mode 100644 index 0000000..0a754d3 --- /dev/null +++ b/lib/base_protocol_transform.js @@ -0,0 +1,52 @@ +const assert = require('assert'); +const helpers = require('./helpers'); +const consts = require('./constants').Constants; +const Transform = require('stream').Transform; + +const MAX_HEADER_SIZE = consts.ID_SIZE; + +class BaseProtocolTransform extends Transform { + constructor() { + super(); + + this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); + this._init(); + } + + _init() { + this.didReadHeader = false; + this.headerData = {}; + } + + _transform(data, encoding, callback) { + if(this.didReadHeader) { + this._sendData(data, callback); + } + else { + this._emitHeader(data, callback); + } + } + + _emitHeader(data, callback) {} + + _sendData(data, callback) { + var len = Math.min(this.headerData.size - this.blobBytesRead, data.length); + this.blobBytesRead += len; + + if(len >= data.length) { + this.push(data); + callback(); + } + else { + this.push(data.slice(0, len)); + this._emitHeader(data.slice(len), callback); + } + + if(this.blobBytesRead === this.headerData.size) { + this._init(); + this.emit('dataEnd'); + } + } +} + +module.exports.Transform = BaseProtocolTransform; \ No newline at end of file diff --git a/lib/cache/cache.js b/lib/cache/cache.js new file mode 100644 index 0000000..bf2400f --- /dev/null +++ b/lib/cache/cache.js @@ -0,0 +1,38 @@ +'use strict'; + +class Cache { + constructor() {} + + getFileStream(type, guid, hash, callback) { + throw new Error("Not implemented!"); + } + + createPutTransaction(guid, hash, callback) { + throw new Error("Not implemented!"); + } + + endPutTransaction(transaction, callback) { + throw new Error("Not implemented!"); + } + + integrityCheck(doFix, callback) { + throw new Error("Not implemented!"); + } + + registerClusterWorker(worker) { + throw new Error("Not implemented!"); + } +} + +class PutTransaction { + constructor() {} + + getWriteStream(type, size, callback) { + throw new Error("Not implemented!"); + } +} + +module.exports = { + Cache: Cache, + PutTransaction: PutTransaction +}; diff --git a/lib/cache/cache_debug.js b/lib/cache/cache_debug.js new file mode 100644 index 0000000..6e9fccc --- /dev/null +++ b/lib/cache/cache_debug.js @@ -0,0 +1,61 @@ +const { Cache, PutTransaction } = require('./cache'); +const { Readable, Writable } = require('stream'); +const crypto = require('crypto'); +const kBuffer = Symbol("buffer"); +const kOptions = Symbol("options"); + +class CacheDebug extends Cache { + constructor(options) { + super(options); + + this[kOptions] = options; + this[kBuffer] = Buffer.alloc( + options.maxFileSize, + crypto.randomBytes(options.maxFileSize).toString('ascii'), + 'ascii'); + } + + getFileStream(type, guid, hash, callback) { + var size = Math.trunc(Math.random() * this[kOptions].minFileSize + this[kOptions].maxFileSize); + var slice = this[kBuffer].slice(0, size); + + var stream = new Readable({ + read() { + this.push(slice); + this.push(null); + } + }); + + callback(null, {size: slice.length, stream: stream}); + } + + createPutTransaction(guid, hash, callback) { + callback(null, new PutTransactionDebug()); + } + + endPutTransaction(transaction, callback) { + callback(); + } + + integrityCheck(doFix, callback) { + callback(null, 0); + } + + registerClusterWorker(worker) {} +} + +class PutTransactionDebug extends PutTransaction { + constructor() { + super(); + } + + getWriteStream(type, size, callback) { + var stream = new Writable({ + write(chunk, encoding, callback) { callback(); } + }); + + callback(null, stream); + } +} + +module.exports = CacheDebug; diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js new file mode 100644 index 0000000..5a16328 --- /dev/null +++ b/lib/cache/cache_membuf.js @@ -0,0 +1,240 @@ +const cluster = require('cluster'); +const { Cache, PutTransaction } = require('./cache'); +const { Readable, Writable } = require('stream'); +const crypto = require('crypto'); +const helpers = require('../helpers'); +const consts = require('../constants').Constants; +const config = require('config'); + +class CacheMembuf extends Cache { + constructor() { + super(); + + if(!cluster.isMaster) + throw new Error("CacheMembuf module does not support clustering!"); + + CacheMembuf._init(); + } + + static _init() { + if(CacheMembuf._pages.length === 0) { + CacheMembuf._freeBlocks = []; + CacheMembuf._index = {}; + CacheMembuf._allocPage(CacheMembuf._options.initialPageSize); + } + } + + static get _options() { + return config.get("Cache.options.cache_membuf"); + } + + static _allocPage(size) { + CacheMembuf._pages.push(Buffer.alloc(size, 0, 'ascii')); + CacheMembuf._freeBlocks.push({ + pageIndex: CacheMembuf._pages.length - 1, + pageOffset: 0, + size: size + }); + + return CacheMembuf._freeBlocks.length - 1; + } + + static _calcIndexKey(type, guid, hash) { + var h = crypto.createHash('sha256'); + h.update(type); + h.update(guid); + h.update(hash); + return h.digest('hex'); + } + + static _findFreeBlockIndex(size) { + var best = -1; + var min = 0; + var max = CacheMembuf._freeBlocks.length - 1; + var guess; + + while (min <= max) { + guess = (min + max) >> 1; + + if (CacheMembuf._freeBlocks[guess].size < size) { + min = guess + 1; + } else { + best = guess; + max = guess - 1; + } + } + + return best; + } + + static _freeBlock(key) { + if(!CacheMembuf.hasOwnProperty(key)) + return; + + // Duplicate the index data into the free block list + CacheMembuf._freeBlocks.push(Object.assign({}, CacheMembuf._index[key])); + + // Remove the block from the index + delete CacheMembuf._index[key]; + + // Re-sort the free block list + CacheMembuf._freeBlocks.sort(function(a, b) { + return a.size - b.size; + }); + } + + static _reserveBlock(key, size) { + // Free any existing block for this key + CacheMembuf._freeBlock(key); + + // Find the best free block to use + var i = CacheMembuf._findFreeBlockIndex(size); + if(i >= 0) { + var block = CacheMembuf._freeBlocks[i]; + CacheMembuf._index[key] = Object.assign({}, block); + CacheMembuf._index[key].size = size; + + // Update this free block if leftover space is greater than the minimum + if(block.size - size >= CacheMembuf._options.minFreeBlockSize) { + block.pageOffset += size; + block.size -= size; + + // Re-sort the free block list + CacheMembuf._freeBlocks.sort(function(a, b) { + return a.size - b.size; + }); + } + else { + // Otherwise remove it + CacheMembuf._freeBlocks.splice(i, 0); + } + } + else { + // Otherwise add a new page + CacheMembuf._index[key] = { + pageIndex: CacheMembuf._allocPage(CacheMembuf._options.growPageSize), + pageOffset: 0, + size: size + } + } + + return CacheMembuf._index[key]; + } + + static _addFileToCache(type, guid, hash, buffer) { + var key = CacheMembuf._calcIndexKey(type, guid, hash); + var fileSize = buffer.length; + var entry = CacheMembuf._reserveBlock(key, fileSize); + helpers.log(consts.LOG_DBG, "Saving file: pageIndex = " + entry.pageIndex + " pageOffset = " + entry.pageOffset + " size = " + entry.size); + + buffer.copy(CacheMembuf._pages[entry.pageIndex], 0, entry.pageOffset, fileSize); + } + + getFileStream(type, guid, hash, callback) { + var key = CacheMembuf._calcIndexKey(type, guid, hash); + if(CacheMembuf._index.hasOwnProperty(key)) { + var entry = CacheMembuf._index[key]; + var slice = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); + var stream = new Readable({ + read() { + this.push(slice); + this.push(null); + } + }); + + callback(null, {size: entry.size, stream: stream}); + } + else { + callback(null, null); + } + } + + createPutTransaction(guid, hash, callback) { + callback(null, new PutTransactionMembuf(guid, hash)); + } + + endPutTransaction(transaction, callback) { + var files = transaction.getFiles(); + files.forEach(function(file) { + CacheMembuf._addFileToCache.call(this, file.type, transaction.guid, transaction.hash, file.buffer); + }); + + callback(); + } + + + integrityCheck(doFix, callback) { + return super.integrityCheck(doFix, callback); + } + + registerClusterWorker(worker) { + return super.registerClusterWorker(worker); + } +} + +class PutTransactionMembuf extends PutTransaction { + constructor(guid, hash) { + super(); + this._buffers = { + a: null, + i: null, + r: null + }; + + this._finished = []; + + this._guid = guid; + this._hash = hash; + } + + getFiles() { + return this._finished; + } + + get guid() { + return this._guid; + } + + get hash() { + return this._hash; + } + + getWriteStream(type, size, callback) { + var self = this; + + if(type !== 'a' && type !== 'i' && type !== 'r') { + return callback(new Error("Unrecognized type '" + type + "' for transaction.")); + } + + this._buffers[type] = Buffer.alloc(size, 0, 'ascii'); + this._bufferPos = 0; + + var buffer = this._buffers[type]; + + var stream = new Writable({ + write(chunk, encoding, callback) { + if(buffer.length - self._bufferPos >= chunk.length) { + chunk.copy(buffer, self._bufferPos, 0, chunk.length); + self._bufferPos += chunk.length; + + if(self._bufferPos === size) { + self._finished.push({type: type, buffer: self._buffers[type]}); + } + } + else { + helpers.log(consts.LOG_ERR, "Attempt to write over stream buffer allocation!"); + } + + callback(); + } + }); + + callback(null, stream); + } +} + +CacheMembuf._index = {}; +CacheMembuf._pages = []; +CacheMembuf._freeBlocks = []; + +module.exports = CacheMembuf; \ No newline at end of file diff --git a/lib/cache_fs.js b/lib/cache_fs.js index 3daf473..2e7ef71 100644 --- a/lib/cache_fs.js +++ b/lib/cache_fs.js @@ -354,25 +354,6 @@ class CacheFS { } } - _CreateCacheDir(guid) { - // Only the cluster master should manage the cache file system - if(!cluster.isMaster) { - process.send({ - msg: "CacheFS.cmd", - func: "_CreateCacheDir", - args: [guid] - }); - - return; - } - - var dir = this.cacheDir + "/" + guid.substring(0, 2); - if(!fs.existsSync(dir)) { - helpers.log(consts.LOG_DBG, "Create directory " + dir); - fs.mkdirSync(dir, 0o777); - } - } - /** * @return {number} */ @@ -406,13 +387,51 @@ class CacheFS { /** * @return {string} */ - GetCachePath(guid, hash, extension, create) { + GetCachePath(guid, hash, extension, create, callback) { var dir = this.cacheDir + "/" + guid.substring(0, 2); + var path = dir + "/" + guid + "-" + hash + "." + extension; + + if (create) { + fs.mkdir(dir, 0o777, function(err) { + if(!callback) return; - if (create) - this._CreateCacheDir(guid); + if (err && err.code !== 'EEXIST') { + callback(err); + } + else { + callback(null, path); + } + }); + } + else if(callback) { + process.nextTick(callback.bind(null, null, path)); + } + + if(!callback) return path; + } + + GetCacheFileStream(cachePath) { + var file = fs.createReadStream(cachePath); + + file.on('close', function () { + try { + // Touch the file, so that it becomes the newest accessed file for LRU cleanup - utimes expects a Unix timestamp in seconds, Date.now() returns millis + let dateNow = Date.now() / 1000; + helpers.log(consts.LOG_DBG, "Updating mtime of " + cachePath + " to: " + dateNow); + fs.utimesSync(cachePath, dateNow, dateNow); + } + catch (err) { + helpers.log(consts.LOG_ERR, "Failed to update mtime of " + cachePath + ": " + err); + } + }); + + file.on('open', function (fd) { + fs.fstat(fd, function (err, stats) { + file.emit('cache_open', err, stats.size); + }); + }); - return dir + "/" + guid + "-" + hash + "." + extension; + return file; } ReplaceFile(from, to, size) { diff --git a/lib/client/server_response_transform.js b/lib/client/server_response_transform.js index 99ac4be..4894df8 100644 --- a/lib/client/server_response_transform.js +++ b/lib/client/server_response_transform.js @@ -1,34 +1,19 @@ const assert = require('assert'); const helpers = require('./../helpers'); const consts = require('./../constants').Constants; -const Transform = require('stream').Transform; +const BaseTransform = require('../base_protocol_transform').Transform; -const MAX_HEADER_SIZE = consts.ID_SIZE; - -class CacheServerResponseTransform extends Transform { +class CacheServerResponseTransform extends BaseTransform { constructor() { super(); - - this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); - this.init(); } - init() { + _init() { + super._init(); this.headerBufPos = 0; this.blobBytesRead = 0; this.doReadSize = false; this.doReadId = false; - this.didReadHeader = false; - this.headerData = {}; - } - - _transform(data, encoding, callback) { - if(this.didReadHeader) { - this._sendData(data, callback); - } - else { - this._emitHeader(data, callback); - } } _emitHeader(data, callback) { @@ -113,25 +98,6 @@ class CacheServerResponseTransform extends Transform { callback(); } } - - _sendData(data, callback) { - var len = Math.min(this.headerData.size - this.blobBytesRead, data.length); - this.blobBytesRead += len; - - if(len >= data.length) { - this.push(data); - callback(); - } - else { - this.push(data.slice(0, len)); - this._emitHeader(data.slice(len), callback); - } - - if(this.blobBytesRead === this.headerData.size) { - this.init(); - this.emit('dataEnd'); - } - } } module.exports = CacheServerResponseTransform; \ No newline at end of file diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js new file mode 100644 index 0000000..0bf451a --- /dev/null +++ b/lib/server/client_stream_processor.js @@ -0,0 +1,187 @@ +const assert = require('assert'); +const helpers = require('./../helpers'); +const consts = require('./../constants').Constants; + +const { Transform } = require('stream'); + +const CMD_QUIT = 'q'.charCodeAt(0); +const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.ID_SIZE; +const kSource = Symbol("source"); + +class ClientStreamProcessor extends Transform { + constructor() { + super(); + + this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); + this.didReadVersion = false; + this.errState = null; + this._registerEventListeners(); + this._init(); + } + + _registerEventListeners() { + var self = this; + + this.on('pipe', function(src) { + self[kSource] = src; + }); + + this.on('quit', function() { + self[kSource].destroy(); + }) + } + + _init() { + this.readState = { + didReadCmd: false, + doReadSize: false, + didReadSize: false, + doReadId: false, + didReadId: false, + doReadIntegrityType: false, + didReadIntegrityType: false, + dataPassThrough: false, + dataSize: 0, + headerBufPos: 0, + dataBytesRead: 0 + }; + } + + static errorCodes() { + return { + quitError: { msg: "Client quit" } + } + } + + _transform(data, encoding, callback) { + while(data.length > 0 && this.errState === null) { + if (this.readState.dataPassThrough) + data = this._sendData(data); + else + data = this._sendCommands(data); + + if(this.errState !== null) { + this.emit('error', this.errState); + } + } + + callback(); + } + + _sendData(data) { + var len = Math.min(this.readState.dataSize - this.readState.dataBytesRead, data.length); + this.push(data.slice(0, len)); + this.readState.dataBytesRead += len; + + if(this.readState.dataBytesRead == this.readState.dataSize) { + this._init(); + } + + return len < data.length ? data.slice(len) : Buffer.from([]); + } + + _sendCommands(data) { + var self = this; + var dataPos = 0; + + function fillBufferWithData(size) { + if(dataPos >= data.length) + return false; + + var toCopy = Math.min(size, data.length - dataPos); + data.copy(self.headerBuf, self.readState.headerBufPos, dataPos, dataPos + toCopy); + dataPos += toCopy; + self.readState.headerBufPos += toCopy; + + return toCopy === size; + } + + function isDone() { + return dataPos >= data.length || self.errState !== null; + } + + if(!this.didReadVersion) { + var verSize = Math.max(consts.VERSION_SIZE, Math.min(consts.PROTOCOL_VERSION_MIN_SIZE, data.length)); + dataPos += verSize; + + this.didReadVersion = true; + this.push(data.slice(0, verSize)); + } + + while(!isDone()) { + // Quit? + if (data[dataPos] === CMD_QUIT) { + this.push(CMD_QUIT); + this.errState = this.errorCodes.quitError; + break; + } + + // Read command + if (!this.readState.didReadCmd) { + if(!fillBufferWithData(consts.CMD_SIZE)) + break; + + this.readState.didReadCmd = true; + + var cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + + switch (cmd[0]) { + case 'g': // get + this.readState.doReadId = true; + break; + case 'p': // put + this.readState.doReadSize = true; + break; + case 'i': // integrity check + this.readState.doReadIntegrityType = true; + break; + case 't': // transaction + if(cmd[1] == 's') + this.readState.doReadId = true; + + break; + default: + this.errState = new Error("Unrecognized command, aborting!"); + break; + } + } + + // Read size + if (this.readState.doReadSize && !this.readState.didReadSize) { + if(!fillBufferWithData(consts.SIZE_SIZE)) + break; + + this.readState.didReadSize = true; + this.readState.dataSize = helpers.readUInt64(this.headerBuf.slice(consts.CMD_SIZE, consts.CMD_SIZE + consts.SIZE_SIZE).toString('ascii')); + this.readState.dataPassThrough = true; + } + + // Read ID + if (this.readState.doReadId && !this.readState.didReadId) { + if(!fillBufferWithData(consts.ID_SIZE)) + break; + + this.readState.didReadId = true; + } + + // Read extra + if (this.readState.doReadIntegrityType && !this.readState.didReadIntegrityType) { + if(!fillBufferWithData(1)) + break; + + this.readState.didReadIntegrityType = true; + } + + this.push(Buffer.from(this.headerBuf.slice(0, this.readState.headerBufPos))); + + if(!this.readState.dataPassThrough) + this._init(); + else + break; + } + + return dataPos < data.length ? data.slice(dataPos) : Buffer.from([]); + } +} + +module.exports = ClientStreamProcessor; \ No newline at end of file diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js new file mode 100644 index 0000000..823a3a2 --- /dev/null +++ b/lib/server/command_processor.js @@ -0,0 +1,254 @@ +const assert = require('assert'); +const helpers = require('./../helpers'); +const consts = require('./../constants').Constants; +const crypto = require('crypto'); +const async = require('async'); + +const { Transform } = require('stream'); + +const kSource = Symbol("source"); +const kCache = Symbol("cache"); +const kSendFileQueue = Symbol("sendFileQueue"); +const kReadStateVersion = Symbol("readStateVersion"); +const kReadStateCommand = Symbol("readStateCommand"); +const kReadStatePutStream = Symbol("readStatePutStream"); +const kReadStateDone = Symbol("readStateDone"); + +class CommandProcessor extends Transform { + constructor(clientStreamProcessor, cache) { + super(); + this[kSource] = clientStreamProcessor; + this[kCache] = cache; + this[kSendFileQueue] = async.queue(this._sendFile.bind(this), 1); + this._readState = kReadStateVersion; + this._trx = null; + this._putStream = null; + this._putSize = 0; + this._putSent = 0; + } + + _transform(chunk, encoding, callback) { + var handler = null; + var self = this; + + switch(this._readState) { + case kReadStateVersion: + handler = this._handleVersion; + break; + case kReadStateCommand: + handler = this._handleCommand; + break; + case kReadStatePutStream: + handler = this._handleWrite; + break; + case kReadStateDone: + return callback(null); + break; + } + + handler.call(this, chunk, function(err) { + if(err) { + self._quit(err); + } + + callback(); + }); + } + + _quit(err) { + this[kSendFileQueue].kill(); + this[kSource].unpipe(this); + this[kSource].emit('quit'); + this._readState = kReadStateDone; + err && helpers.log(consts.LOG_ERR, err); + } + + _sendFile(task, callback) { + var self = this; + + this[kCache].getFileStream(task.type, task.guid, task.hash, function(err, result) { + if(err || result === null) { + self.push('-i'); + self.push(task.guid); + self.push(task.hash); + } + else { + self.push('+i'); + self.push(helpers.encodeInt64(result.size)); + self.push(task.guid); + self.push(task.hash); + + result.stream + .on('readable', function() { + var chunk; + while((chunk = result.stream.read()) != null) { + self.push(chunk); + } + }) + .on('end', function() { + callback(null); + }) + .on('error', function(err) { + callback(err); + }); + } + }); + } + + _handleVersion(data, callback) { + var version = helpers.readUInt32(data); + this._readState = kReadStateCommand; + this.push(helpers.encodeInt32(version)); + + if(version !== consts.PROTOCOL_VERSION) { + version = 0; + callback(new Error("Bad Client protocol version")); + } + else { + callback(null); + } + } + + _handleWrite(data, callback) { + var self = this; + + this._putStream.write(data, 'ascii', function() { + self._putSent += data.length; + if(self._putSent === self._putSize) { + self._readState = kReadStateCommand; + self._putSent = 0; + self._putSize = 0; + } + + callback(); + }); + } + + _handleCommand(data, callback) { + var cmd, size, type, guid, hash = null; + if(data.length > 1) { + cmd = data.slice(0, 2).toString('ascii'); + type = cmd[1]; + + if (data.length === 2 + consts.ID_SIZE) { + guid = Buffer.from(data.slice(2, 2 + consts.GUID_SIZE)); + hash = Buffer.from(data.slice(2 + consts.HASH_SIZE)); + } + else if (data.length === 2 + consts.SIZE_SIZE) { + size = helpers.readUInt64(data.slice(2)); + } + } + else if(data.length > 0) { + cmd = data.toString('ascii'); + } + else { + return callback(); + } + + switch(cmd) { + case 'q': + this._quit(); + this._readState = kReadStateDone; + break; + case 'ga': + case 'gi': + case 'gr': + this._onGet(type, guid, hash, callback); + break; + case 'ts': + this._onTransactionStart(guid, hash, callback); + break; + case 'te': + this._onTransactionEnd(callback); + break; + case 'pa': + case 'pi': + case 'pr': + this._onPut(type, size, callback); + break; + case 'icf': + this._onIntegrityCheck(true, callback); + break; + case 'icv': + this._onIntegrityCheck(false, callback); + break; + default: + callback(new Error("Unrecognized command '" + cmd + "'")); + } + } + + _onGet(type, guid, hash, callback) { + this[kSendFileQueue].push({ + type: type, + guid: guid, + hash: hash + }); + + callback(null); + } + + _onTransactionStart(guid, hash, callback) { + var self = this; + + if(this._trx !== null) { + return callback(new Error("Already in a transaction")); + } + + this[kCache].createPutTransaction(guid, hash, function(err, trx) { + if(err) { + return callback(err); + } + + self._trx = trx; + callback(null); + }); + } + + _onTransactionEnd(callback) { + var self = this; + + if(!this._trx) { + return callback(new Error("Not in a transaction")); + } + + this[kCache].endPutTransaction(this._trx, function(err) { + self._trx = null; + callback(err); + }); + } + + _onPut(type, size, callback) { + var self = this; + + if(!this._trx) { + return callback(new Error("Not in a transaction")); + } + + this._trx.getWriteStream(type, size, function(err, stream) { + if(err) { + return callback(err); + } + + self._putStream = stream; + self._putSize = size; + self._readState = kReadStatePutStream; + callback(null); + }); + } + + _onIntegrityCheck(doFix, callback) { + var self = this; + + this[kCache].integrityCheck(doFix, function(err, numErrs) { + if(err) { + return callback(err); + } + + self.push('ic'); + self.push(helpers.encodeInt64(numErrs)); + callback(null); + }) + } +} + +module.exports = CommandProcessor; \ No newline at end of file diff --git a/lib/server_v2.js b/lib/server_v2.js new file mode 100644 index 0000000..4f38214 --- /dev/null +++ b/lib/server_v2.js @@ -0,0 +1,73 @@ +/** + * Created by spalmer on 10/16/17. + */ +'use strict'; +const cluster = require('cluster'); +const net = require('net'); +const fs = require('fs'); +const consts = require('./constants').Constants; +const helpers = require('./helpers'); +const ClientStreamProcessor = require('./server/client_stream_processor'); +const CommandProcessor = require('./server/command_processor'); + +class CacheServer { + constructor(cache, port) { + this._cache = cache; + this._port = parseInt(port); + if (!port && port !== 0) + this._port = consts.DEFAULT_PORT; + this._sever = null; + } + + get port() { + return this._server && this._server.listening + ? this._server.address().port + : this._port; + } + + get cache() { + return this._cache; + } + + get server() { + return this._server; + } + + /** + * start the cache server + * + * @param errCallback error callback (optional) + * @param callback + */ + Start(errCallback, callback) { + var self = this; + + this._server = net.createServer(function (socket) { + socket + .on('close', function () { + helpers.log(consts.LOG_ERR, "Socket closed"); + }) + .on('error', function (err) { + helpers.log(consts.LOG_ERR, "Socket error " + err); + }); + + var clientStreamProcessor = new ClientStreamProcessor(); + var commandProcessor = new CommandProcessor(clientStreamProcessor, self.cache); + + socket.pipe(clientStreamProcessor).pipe(commandProcessor).pipe(socket); + }); + + this._server.on('error', function (e) { + if (e.code == 'EADDRINUSE') { + helpers.log(consts.LOG_ERR, 'Port ' + self.port + ' is already in use...'); + if (errCallback && typeof(errCallback === 'function')) { errCallback(e); } + } + }); + + this._server.listen(this._port, function() { + if(callback && typeof(callback) === 'function') { callback(); } + }); + }; +} + +module.exports = CacheServer; \ No newline at end of file diff --git a/main.js b/main.js index aa644c7..44043f9 100644 --- a/main.js +++ b/main.js @@ -3,25 +3,29 @@ const helpers = require('./lib/helpers'); const consts = require('./lib/constants').Constants; const program = require('commander'); const path = require('path'); -const CacheServer = require('./lib/server'); -const CacheFS = require('./lib/cache_fs'); +const CacheServer = require('./lib/server_v2'); +const config = require('config'); function myParseInt(val, def) { val = parseInt(val); return (!val && val !== 0) ? def : val; } +function zeroOrMore(val) { + return Math.max(0, val); +} + function atLeastOne(val) { return Math.max(1, val); } program.description("Unity Cache Server") .version(consts.VERSION) - .option('-s, --size ', 'Specify the maximum allowed size of the LRU cache. Files that have not been used recently will automatically be discarded when the cache size is exceeded. Default is 50Gb', myParseInt, consts.DEFAULT_CACHE_SIZE) + //.option('-s, --size ', 'Specify the maximum allowed size of the LRU cache. Files that have not been used recently will automatically be discarded when the cache size is exceeded. Default is 50Gb', myParseInt, consts.DEFAULT_CACHE_SIZE) .option('-p, --port ', 'Specify the server port, only apply to new cache server, default is 8126', myParseInt, consts.DEFAULT_PORT) - .option('-P, --path [path]', 'Specify the path of the cache directory. Default is ./cache5.0', consts.DEFAULT_CACHE_DIR) + //.option('-P, --path [path]', 'Specify the path of the cache directory. Default is ./cache5.0', consts.DEFAULT_CACHE_DIR) .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 4 (test)', myParseInt, consts.DEFAULT_LOG_LEVEL) - .option('-w, --workers ', 'Number of worker threads to spawn. Default is 1 for every 2 CPUs reported by the OS', atLeastOne, consts.DEFAULT_WORKERS) + .option('-w, --workers ', 'Number of worker threads to spawn. Default is 1 for every 2 CPUs reported by the OS', zeroOrMore, consts.DEFAULT_WORKERS) .option('-v, --verify', 'Verify the Cache Server integrity, without fixing errors') .option('-f, --fix', 'Fix errors found while verifying the Cache Server integrity') .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0) @@ -33,7 +37,11 @@ helpers.SetLogLevel(program.logLevel); var cache; try { - cache = new CacheFS(program.path, program.size); + var moduleName = config.get("Cache.module"); + var modulePath = path.resolve(config.get("Cache.path"), moduleName); + helpers.log(consts.LOG_INFO, "Loading Cache module at " + modulePath); + const Cache = require(modulePath); + cache = new Cache(); } catch(e) { console.log(e); @@ -79,9 +87,16 @@ var server = new CacheServer(cache, program.port); if(cluster.isMaster) { helpers.log(consts.LOG_INFO, "Cache Server version " + consts.VERSION); + + if(program.workers === 0) { + server.Start(errHandler, function () { + helpers.log(consts.LOG_INFO, `Cache Server ready on port ${server.port}`); + }); + } + for(let i = 0; i < program.workers; i++) { var worker = cluster.fork(); - cache.RegisterClusterWorker(worker); + cache.registerClusterWorker(worker); } } else { diff --git a/package.json b/package.json index c63003d..c5eff0b 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,9 @@ "mocha-lcov-reporter": "^1.3.0" }, "dependencies": { - "commander": "^2.11.0" + "async": "^2.5.0", + "commander": "^2.11.0", + "config": "^1.27.0", + "js-yaml": "^3.10.0" } } diff --git a/test/cache_fs.js b/test/cache_fs.js index 4f431e6..4c10994 100644 --- a/test/cache_fs.js +++ b/test/cache_fs.js @@ -24,4 +24,8 @@ describe("CacheFS", function() { } }); }); + + describe("GetCacheFileStream", function() { + it("should update the timestamp of the retrieved file to support LRU cleanup"); + }) }); \ No newline at end of file diff --git a/test/server.js b/test/server.js index 8b63987..30f4ad5 100644 --- a/test/server.js +++ b/test/server.js @@ -189,11 +189,11 @@ describe("CacheServer protocol", function() { this.slow(1500); var self = this; - this.getCachePath = function(extension) { + this.getCachePath = function(extension, callback) { return cache.GetCachePath( helpers.readHex(self.data.guid.length, self.data.guid), helpers.readHex(self.data.hash.length, self.data.hash), - extension, false); + extension, false, callback); }; before(function() { @@ -256,12 +256,15 @@ describe("CacheServer protocol", function() { tests.forEach(function(test) { it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function(done) { client.on('close', function() { - fs.open(self.getCachePath(test.ext), 'r', function(err, fd) { - assert(!err, err); - var buf = fs.readFileSync(fd); - assert(buf.compare(self.data.asset) == 0); - done(); - }); + self.getCachePath(test.ext, function(err, cachePath) { + fs.open(cachePath, 'r', function(err, fd) { + assert(!err, err); + var buf = fs.readFileSync(fd); + assert(buf.compare(self.data.asset) == 0); + done(); + }); + }) + }); var buf = Buffer.from( @@ -292,11 +295,13 @@ describe("CacheServer protocol", function() { var asset = Buffer.from(crypto.randomBytes(self.data.asset.length).toString('ascii'), 'ascii'); client.on('close', function() { - fs.open(self.getCachePath('bin'), 'r', function(err, fd) { - assert(!err, err); - var buf = fs.readFileSync(fd); - assert(buf.compare(asset) == 0); - done(); + self.getCachePath('bin', function(err, cachePath) { + fs.open(cachePath, 'r', function(err, fd) { + assert(!err, err); + var buf = fs.readFileSync(fd); + assert(buf.compare(asset) == 0); + done(); + }); }); }); From 2194795d51033cda9ab8de82a999ff4f7f1ab99a Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 8 Nov 2017 12:14:52 -0600 Subject: [PATCH 02/89] Bug fixes, tests almost all green, removed unused/old code. etc. --- .nvmrc | 1 + lib/base_protocol_transform.js | 52 - lib/cache/cache.js | 30 +- lib/cache/cache_debug.js | 20 +- lib/cache/cache_membuf.js | 87 +- lib/cache_fs.js | 483 -------- lib/client/server_response_transform.js | 54 +- lib/helpers.js | 24 +- lib/server.js | 501 +------- lib/server/client_stream_processor.js | 70 +- lib/server/command_processor.js | 62 +- lib/server_v2.js | 73 -- main.js | 16 +- package-lock.json | 1447 +++++++++++++++++++++++ package.json | 2 +- test/cache_fs.js | 31 - test/server.js | 345 ++---- 17 files changed, 1696 insertions(+), 1602 deletions(-) create mode 100644 .nvmrc delete mode 100644 lib/base_protocol_transform.js delete mode 100644 lib/cache_fs.js delete mode 100644 lib/server_v2.js create mode 100644 package-lock.json delete mode 100644 test/cache_fs.js diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000..ad8f873 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +v8.9.1 diff --git a/lib/base_protocol_transform.js b/lib/base_protocol_transform.js deleted file mode 100644 index 0a754d3..0000000 --- a/lib/base_protocol_transform.js +++ /dev/null @@ -1,52 +0,0 @@ -const assert = require('assert'); -const helpers = require('./helpers'); -const consts = require('./constants').Constants; -const Transform = require('stream').Transform; - -const MAX_HEADER_SIZE = consts.ID_SIZE; - -class BaseProtocolTransform extends Transform { - constructor() { - super(); - - this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); - this._init(); - } - - _init() { - this.didReadHeader = false; - this.headerData = {}; - } - - _transform(data, encoding, callback) { - if(this.didReadHeader) { - this._sendData(data, callback); - } - else { - this._emitHeader(data, callback); - } - } - - _emitHeader(data, callback) {} - - _sendData(data, callback) { - var len = Math.min(this.headerData.size - this.blobBytesRead, data.length); - this.blobBytesRead += len; - - if(len >= data.length) { - this.push(data); - callback(); - } - else { - this.push(data.slice(0, len)); - this._emitHeader(data.slice(len), callback); - } - - if(this.blobBytesRead === this.headerData.size) { - this._init(); - this.emit('dataEnd'); - } - } -} - -module.exports.Transform = BaseProtocolTransform; \ No newline at end of file diff --git a/lib/cache/cache.js b/lib/cache/cache.js index bf2400f..26f7110 100644 --- a/lib/cache/cache.js +++ b/lib/cache/cache.js @@ -2,34 +2,16 @@ class Cache { constructor() {} - - getFileStream(type, guid, hash, callback) { - throw new Error("Not implemented!"); - } - - createPutTransaction(guid, hash, callback) { - throw new Error("Not implemented!"); - } - - endPutTransaction(transaction, callback) { - throw new Error("Not implemented!"); - } - - integrityCheck(doFix, callback) { - throw new Error("Not implemented!"); - } - - registerClusterWorker(worker) { - throw new Error("Not implemented!"); - } } class PutTransaction { - constructor() {} - - getWriteStream(type, size, callback) { - throw new Error("Not implemented!"); + constructor(guid, hash) { + this._guid = guid; + this._hash = hash; } + + get guid() { return this._guid; } + get hash() { return this._hash; } } module.exports = { diff --git a/lib/cache/cache_debug.js b/lib/cache/cache_debug.js index 6e9fccc..9950511 100644 --- a/lib/cache/cache_debug.js +++ b/lib/cache/cache_debug.js @@ -16,10 +16,10 @@ class CacheDebug extends Cache { } getFileStream(type, guid, hash, callback) { - var size = Math.trunc(Math.random() * this[kOptions].minFileSize + this[kOptions].maxFileSize); - var slice = this[kBuffer].slice(0, size); + const size = Math.trunc(Math.random() * this[kOptions].minFileSize + this[kOptions].maxFileSize); + const slice = this[kBuffer].slice(0, size); - var stream = new Readable({ + const stream = new Readable({ read() { this.push(slice); this.push(null); @@ -37,21 +37,19 @@ class CacheDebug extends Cache { callback(); } - integrityCheck(doFix, callback) { - callback(null, 0); - } - registerClusterWorker(worker) {} } class PutTransactionDebug extends PutTransaction { - constructor() { - super(); + constructor(guid, hash) { + super(guid, hash); } getWriteStream(type, size, callback) { - var stream = new Writable({ - write(chunk, encoding, callback) { callback(); } + const stream = new Writable({ + write(chunk, encoding, callback) { + callback(); + } }); callback(null, stream); diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 5a16328..ba6a623 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -40,7 +40,7 @@ class CacheMembuf extends Cache { } static _calcIndexKey(type, guid, hash) { - var h = crypto.createHash('sha256'); + const h = crypto.createHash('sha256'); h.update(type); h.update(guid); h.update(hash); @@ -48,10 +48,10 @@ class CacheMembuf extends Cache { } static _findFreeBlockIndex(size) { - var best = -1; - var min = 0; - var max = CacheMembuf._freeBlocks.length - 1; - var guess; + let best = -1; + let min = 0; + let max = CacheMembuf._freeBlocks.length - 1; + let guess; while (min <= max) { guess = (min + max) >> 1; @@ -88,9 +88,9 @@ class CacheMembuf extends Cache { CacheMembuf._freeBlock(key); // Find the best free block to use - var i = CacheMembuf._findFreeBlockIndex(size); + const i = CacheMembuf._findFreeBlockIndex(size); if(i >= 0) { - var block = CacheMembuf._freeBlocks[i]; + const block = CacheMembuf._freeBlocks[i]; CacheMembuf._index[key] = Object.assign({}, block); CacheMembuf._index[key].size = size; @@ -122,30 +122,30 @@ class CacheMembuf extends Cache { } static _addFileToCache(type, guid, hash, buffer) { - var key = CacheMembuf._calcIndexKey(type, guid, hash); - var fileSize = buffer.length; - var entry = CacheMembuf._reserveBlock(key, fileSize); + const key = CacheMembuf._calcIndexKey(type, guid, hash); + const entry = CacheMembuf._reserveBlock(key, buffer.length); + helpers.log(consts.LOG_DBG, "Saving file: pageIndex = " + entry.pageIndex + " pageOffset = " + entry.pageOffset + " size = " + entry.size); - buffer.copy(CacheMembuf._pages[entry.pageIndex], 0, entry.pageOffset, fileSize); + buffer.copy(CacheMembuf._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length); } getFileStream(type, guid, hash, callback) { - var key = CacheMembuf._calcIndexKey(type, guid, hash); + const key = CacheMembuf._calcIndexKey(type, guid, hash); if(CacheMembuf._index.hasOwnProperty(key)) { - var entry = CacheMembuf._index[key]; - var slice = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); - var stream = new Readable({ + const entry = CacheMembuf._index[key]; + const slice = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); + const stream = new Readable({ read() { this.push(slice); this.push(null); } }); - + callback(null, {size: entry.size, stream: stream}); } else { - callback(null, null); + callback(new Error("File not found for (" + type + ") " + guid.toString('hex') + "-" + hash.toString('hex'))); } } @@ -154,7 +154,7 @@ class CacheMembuf extends Cache { } endPutTransaction(transaction, callback) { - var files = transaction.getFiles(); + const files = transaction.getFiles(); files.forEach(function(file) { CacheMembuf._addFileToCache.call(this, file.type, transaction.guid, transaction.hash, file.buffer); }); @@ -162,63 +162,42 @@ class CacheMembuf extends Cache { callback(); } - - integrityCheck(doFix, callback) { - return super.integrityCheck(doFix, callback); - } - registerClusterWorker(worker) { - return super.registerClusterWorker(worker); + // Not implemented } } class PutTransactionMembuf extends PutTransaction { constructor(guid, hash) { - super(); - this._buffers = { - a: null, - i: null, - r: null - }; - + super(guid, hash); + this._files = { a: {}, i: {}, r: {} }; this._finished = []; - - this._guid = guid; - this._hash = hash; } getFiles() { return this._finished; } - get guid() { - return this._guid; - } - - get hash() { - return this._hash; - } - getWriteStream(type, size, callback) { - var self = this; - + const self = this; + if(type !== 'a' && type !== 'i' && type !== 'r') { return callback(new Error("Unrecognized type '" + type + "' for transaction.")); } - this._buffers[type] = Buffer.alloc(size, 0, 'ascii'); - this._bufferPos = 0; - - var buffer = this._buffers[type]; + this._files[type].buffer = Buffer.alloc(size, 0, 'ascii'); + this._files[type].pos = 0; - var stream = new Writable({ + const stream = new Writable({ write(chunk, encoding, callback) { - if(buffer.length - self._bufferPos >= chunk.length) { - chunk.copy(buffer, self._bufferPos, 0, chunk.length); - self._bufferPos += chunk.length; + const file = self._files[type]; + + if (file.buffer.length - file.pos >= chunk.length) { + chunk.copy(file.buffer, file.pos, 0, chunk.length); + file.pos += chunk.length; - if(self._bufferPos === size) { - self._finished.push({type: type, buffer: self._buffers[type]}); + if (file.pos === size) { + self._finished.push({type: type, buffer: file.buffer}); } } else { diff --git a/lib/cache_fs.js b/lib/cache_fs.js deleted file mode 100644 index 2e7ef71..0000000 --- a/lib/cache_fs.js +++ /dev/null @@ -1,483 +0,0 @@ -'use strict'; -const cluster = require('cluster'); -const helpers = require('./helpers'); -const consts = require('./constants').Constants; -const fs = require('fs'); - -const freeCacheSizeRatio = 0.9; - -class CacheFS { - constructor(path, maxSize) { - this._cacheDir = path; - this._maxCacheSize = maxSize; - this._totalDataSize = -1; - this._freeingSpaceLock = 0; - this._InitCache(); - } - - get cacheDir() { - return this._cacheDir; - } - - get maxCacheSize() { - return this._maxCacheSize; - } - - set maxCacheSize(size) { - this._maxCacheSize = Math.max(0, parseInt(size)); - } - - get totalDataSize() { - return this._totalDataSize; - } - - /** - * @return {boolean} - */ - static ShouldIgnoreFile(file) { - if (file.length <= 2) return true; // Skip "00" to "ff" directories - if (file.length >= 4 && file.toLowerCase().indexOf("temp") == 0) return true; // Skip Temp directory - if (file.length >= 9 && file.toLowerCase().indexOf(".ds_store") == 0) return true; // Skip .DS_Store file on MacOSX - if (file.length >= 11 && file.toLowerCase().indexOf("desktop.ini") == 0) return true; // Skip Desktop.ini file on Windows - return false; - } - - static CheckCacheDirectory(dir) { - fs.readdirSync(dir).forEach(function (file) { - if (!CacheFS.ShouldIgnoreFile(file)) { - throw new Error("The file " + dir + "/" + file + " does not seem to be a valid cache file. Please delete it or choose another cache directory."); - } - }); - } - - static FixFileIfRequired(path, msg, fix) { - if (fix) { - try { - var stat = fs.statSync(path); - if (stat.isDirectory()) { - fs.rmdirSync(path); - helpers.log(consts.LOG_DBG, msg + " Directory deleted."); - } else { - fs.unlinkSync(path); - helpers.log(consts.LOG_DBG, msg + " File deleted."); - } - } - catch (err) { - helpers.log(consts.LOG_ERR, err); - } - } - else { - helpers.log(consts.LOG_DBG, msg + " Please delete it."); - } - } - - /** - * - * @param dir - * @returns {number} - */ - static GetDirectorySize(dir) { - var size = 0; - fs.readdirSync(dir).forEach(function (file) { - file = dir + "/" + file; - var stats = fs.statSync(file); - if (stats.isFile()) - size += stats.size; - else - size += CacheFS.GetDirectorySize(file); - }); - - return size; - } - - _InitCache() { - if (!fs.existsSync(this.cacheDir)) - fs.mkdirSync(this.cacheDir, 0o777); - var hexDigits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f"]; - for (var outer = 0; outer < hexDigits.length; outer++) { - for (var inner = 0; inner < hexDigits.length; inner++) { - var cacheSubDir = this.cacheDir + "/" + hexDigits[outer] + hexDigits[inner]; - if (!fs.existsSync(cacheSubDir)) - fs.mkdirSync(cacheSubDir, 0o777); - } - } - - CacheFS.CheckCacheDirectory(this.cacheDir); - this._totalDataSize = CacheFS.GetDirectorySize(this.cacheDir); - - helpers.log(consts.LOG_DBG, "Cache Server directory " + this.cacheDir); - helpers.log(consts.LOG_DBG, "Cache Server size " + this.totalDataSize); - helpers.log(consts.LOG_DBG, "Cache Server max cache size " + this.maxCacheSize); - - if (this.totalDataSize > this.maxCacheSize) - this._FreeSpace(this.GetFreeCacheSize()); - }; - - _WalkDirectory(dir, done) { - var results = []; - var self = this; - fs.readdir(dir, function (err, list) { - if (err) - return done(err); - - var pending = list.length; - if (pending == 0) { - done(null, results); - } - else { - list.forEach(function (file) { - file = dir + '/' + file; - fs.stat(file, function (err, stat) { - if (!err && stat) { - if (stat.isDirectory()) { - self._WalkDirectory(file, function (err, res) { - results = results.concat(res); - if (!--pending) - done(null, results); - }); - } - else { - results.push({name: file, date: stat.mtime, size: stat.size}); - if (!--pending) { - done(null, results); - } - } - } - else { - helpers.log(consts.LOG_DBG, "Freeing space failed to extract stat from file."); - } - }); - }); - } - }); - } - - _FreeSpaceOfFile(removeParam) { - this._LockFreeSpace(); - - var self = this; - fs.unlink(removeParam.name, function (err) { - if (err) { - helpers.log(consts.LOG_DBG, "Freeing cache space file can not be accessed: " + removeParam.name + err); - - // If removing the file fails, then we have to adjust the total data size back - self._totalDataSize += removeParam.size; - } - else { - helpers.log(consts.LOG_TEST, " Did remove: " + removeParam.name + ". (" + removeParam.size + ")"); - } - - self._UnlockFreeSpace(); - }); - } - - _FreeSpace(freeSize) { - if (this._freeingSpaceLock != 0) { - helpers.log(consts.LOG_DBG, "Skip free cache space because it is already in progress: " + this._freeingSpaceLock); - return; - } - - this._LockFreeSpace(); - - helpers.log(consts.LOG_TEST, "Begin freeing cache space. Current size: " + this.totalDataSize); - - var self = this; - this._WalkDirectory(this.cacheDir, function (err, files) { - if (err) - throw err; - - files.sort(function (a, b) { - if (a.date == b.date) - return 0; - else if (a.date < b.date) - return 1; - else - return -1; - }); - - while (self.totalDataSize > freeSize) { - var remove = files.pop(); - if (!remove) - break; - - self._totalDataSize -= remove.size; - self._FreeSpaceOfFile(remove); - } - - self._UnlockFreeSpace(); - }); - } - - _LockFreeSpace() { - this._freeingSpaceLock++; - } - - _UnlockFreeSpace() { - this._freeingSpaceLock--; - if (this._freeingSpaceLock == 0) { - helpers.log(consts.LOG_TEST, "Completed freeing cache space. Current size: " + this.totalDataSize); - } - } - - /** - * - * @param dir - * @param file - * @param fix - * @returns {number} - */ - _ValidateFile(dir, file, fix) { - if (CacheFS.ShouldIgnoreFile(file)) { - return 0; - } - - // Check file name - var pattern = /^([0-9a-f]{2})([0-9a-f]{30})-([0-9a-f]{32})\.(bin|info|resource)$/i; - var matches = file.match(pattern); - if (matches == null) { - let path = dir ? this.cacheDir + "/" + dir + "/" + file : this.cacheDir + "/" + file; - let msg = "File " + path + " doesn't match valid pattern."; - CacheFS.FixFileIfRequired(path, msg, fix); - return 1; - } - - // Check if first 2 characters of file corresponds to dir - if (matches[1].toLowerCase() != dir.toLowerCase()) { - let path = this.cacheDir + "/" + dir + "/" + file; - let msg = "File " + path + " should not be in dir " + dir + "."; - CacheFS.FixFileIfRequired(path, msg, fix); - return 1; - } - - // Check if bin file exists for info or resource file - if (matches[4].toLowerCase() == "info" || matches[4].toLowerCase() == "resource") { - let checkedPath = this.cacheDir + "/" + dir + "/" + matches[1] + matches[2] + "-" + matches[3] + ".bin"; - if(!fs.existsSync(checkedPath)) { - let path = this.cacheDir + "/" + dir + "/" + file; - let msg = "Missing file " + checkedPath + " for " + path + "."; - CacheFS.FixFileIfRequired(path, msg, fix); - return 1; - } - } - - // Check if info file exists for bin or resource file - if (matches[4].toLowerCase() == "bin" || matches[4].toLowerCase() == "resource") { - let checkedPath = this.cacheDir + "/" + dir + "/" + matches[1] + matches[2] + "-" + matches[3] + ".info"; - if(!fs.existsSync(checkedPath)) { - let path = this.cacheDir + "/" + dir + "/" + file; - let msg = "Missing file " + checkedPath + " for " + path + "."; - CacheFS.FixFileIfRequired(path, msg, fix); - return 1; - } - } - - // check if resource file exists for audio - if (matches[4].toLowerCase() == "info") { - try { - var contents = fs.readFileSync(this.cacheDir + "/" + dir + "/" + file, "ascii"); - if (contents.indexOf("assetImporterClassID: 1020") > 0) { - var checkedPath = this.cacheDir + "/" + dir + "/" + matches[1] + matches[2] + "-" + matches[3] + ".resource"; - if(!fs.existsSync(checkedPath)) { - var path = this.cacheDir + "/" + dir + "/" + file; - var msg = "Missing audio file " + checkedPath + " for " + path + "."; - CacheFS.FixFileIfRequired(path, msg, fix); - path = this.cacheDir + "/" + dir + "/" + matches[1] + matches[2] + "-" + matches[3] + ".bin"; - msg = "Missing audio file " + checkedPath + " for " + path + "."; - CacheFS.FixFileIfRequired(path, msg, fix); - return 2; - } - } - } - catch (e) { - } - } - - return 0; - } - - /** - * - * @param parent - * @param dir - * @param fix - * @returns {number} - */ - _VerifyCacheDirectory(parent, dir, fix) { - let errCount = 0; - - var self = this; - fs.readdirSync(dir).forEach(function (file) { - let path = dir + "/" + file; - let stats = fs.statSync(path); - if (stats.isDirectory()) { - if (!CacheFS.ShouldIgnoreFile(file)) { - let msg = "The path " + path + " does not seem to be a valid cache path."; - CacheFS.FixFileIfRequired(path, msg, fix); - errCount++; - } - else { - if (parent == null) - errCount += self._VerifyCacheDirectory(file, path, fix) - } - } - else if (stats.isFile()) { - errCount += self._ValidateFile(parent, file, fix); - } - }); - - return errCount; - } - - _RenameFileSync(from, to, size, oldSize) { - try { - helpers.log(consts.LOG_DBG, "Rename " + from + " to " + to); - - fs.renameSync(from, to); - - // When replace succeeds. We reduce the cache size by previous file size and increase by new file size. - this._AddFileToCache(size - oldSize); - } - catch (err) { - // When the rename fails. We just delete the temp file. The size of the cache has not changed. - helpers.log(consts.LOG_DBG, "Failed to rename file " + from + " to " + to + " (" + err + ")"); - fs.unlinkSync(from); - } - } - - _AddFileToCache(bytes) { - if (bytes != 0) { - this._totalDataSize += bytes; - helpers.log(consts.LOG_DBG, "Total Cache Size " + this.totalDataSize); - - if (this.totalDataSize > this.maxCacheSize) - this._FreeSpace(this.GetFreeCacheSize()); - } - } - - /** - * @return {number} - */ - GetFreeCacheSize() { - return freeCacheSizeRatio * this.maxCacheSize; - } - - /** - * @return {number} - */ - VerifyCache(fix) { - var numErrs = this._VerifyCacheDirectory(null, this.cacheDir, false); - - if(fix) { - if(cluster.isMaster) { - numErrs = this._VerifyCacheDirectory(null, this.cacheDir, true); - } - else { - // Only the cluster master should manage the cache file system - process.send({ - msg: "CacheFS.cmd", - func: "VerifyCache", - args: [true] - }); - } - } - - return numErrs; - } - - /** - * @return {string} - */ - GetCachePath(guid, hash, extension, create, callback) { - var dir = this.cacheDir + "/" + guid.substring(0, 2); - var path = dir + "/" + guid + "-" + hash + "." + extension; - - if (create) { - fs.mkdir(dir, 0o777, function(err) { - if(!callback) return; - - if (err && err.code !== 'EEXIST') { - callback(err); - } - else { - callback(null, path); - } - }); - } - else if(callback) { - process.nextTick(callback.bind(null, null, path)); - } - - if(!callback) return path; - } - - GetCacheFileStream(cachePath) { - var file = fs.createReadStream(cachePath); - - file.on('close', function () { - try { - // Touch the file, so that it becomes the newest accessed file for LRU cleanup - utimes expects a Unix timestamp in seconds, Date.now() returns millis - let dateNow = Date.now() / 1000; - helpers.log(consts.LOG_DBG, "Updating mtime of " + cachePath + " to: " + dateNow); - fs.utimesSync(cachePath, dateNow, dateNow); - } - catch (err) { - helpers.log(consts.LOG_ERR, "Failed to update mtime of " + cachePath + ": " + err); - } - }); - - file.on('open', function (fd) { - fs.fstat(fd, function (err, stats) { - file.emit('cache_open', err, stats.size); - }); - }); - - return file; - } - - ReplaceFile(from, to, size) { - // Only the cluster master should manage the cache file system - if(!cluster.isMaster) { - process.send({ - msg: "CacheFS.cmd", - func: "ReplaceFile", - args: [from, to, size] - }); - - return; - } - - var stats = {}; - try { - stats = fs.statSync(to); - - // We are replacing a file, we need to subtract this from the totalFileSize - var oldSize = stats.size; - - try { - fs.unlinkSync(to); - - // When delete succeeds. We rename the file.. - this._RenameFileSync(from, to, size, oldSize); - } - catch (err) { - // When the delete fails. We just delete the temp file. The size of the cache has not changed. - helpers.log(consts.LOG_DBG, "Failed to delete file " + to + " (" + err + ")"); - fs.unlinkSync(from); - } - } - catch (err) { - this._RenameFileSync(from, to, size, 0); - } - } - - RegisterClusterWorker(worker) { - var self = this; - worker.on('message', function(msg) { - if(msg.msg && msg.msg === 'CacheFS.cmd') { - self[msg.func].apply(self, msg.args); - } - }); - } -} - -module.exports = CacheFS; \ No newline at end of file diff --git a/lib/client/server_response_transform.js b/lib/client/server_response_transform.js index 4894df8..cd3b166 100644 --- a/lib/client/server_response_transform.js +++ b/lib/client/server_response_transform.js @@ -1,33 +1,65 @@ -const assert = require('assert'); const helpers = require('./../helpers'); const consts = require('./../constants').Constants; -const BaseTransform = require('../base_protocol_transform').Transform; +const { Transform } = require('stream'); -class CacheServerResponseTransform extends BaseTransform { +const MAX_HEADER_SIZE = consts.ID_SIZE; + +class CacheServerResponseTransform extends Transform { constructor() { super(); + this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); + this._init(); } _init() { - super._init(); + this.didReadHeader = false; + this.headerData = {}; this.headerBufPos = 0; this.blobBytesRead = 0; this.doReadSize = false; this.doReadId = false; } + _transform(data, encoding, callback) { + if(this.didReadHeader) { + this._sendData(data, callback); + } + else { + this._emitHeader(data, callback); + } + } + + _sendData(data, callback) { + const len = Math.min(this.headerData.size - this.blobBytesRead, data.length); + this.blobBytesRead += len; + + if(len >= data.length) { + this.push(data); + callback(); + } + else { + this.push(data.slice(0, len)); + this._emitHeader(data.slice(len), callback); + } + + if(this.blobBytesRead === this.headerData.size) { + this._init(); + this.emit('dataEnd'); + } + } + _emitHeader(data, callback) { - var self = this; - var dataPos = 0; - + const self = this; + let dataPos = 0; + function fillBufferWithData(fillToPos) { - var maxLen = fillToPos - self.headerBufPos; - var toCopy = Math.min(data.length, maxLen); + const maxLen = fillToPos - self.headerBufPos; + const toCopy = Math.min(data.length, maxLen); data.copy(self.headerBuf, self.headerBufPos, dataPos, dataPos + toCopy); dataPos += toCopy; self.headerBufPos += toCopy; - if(fillToPos == self.headerBufPos) { + if(fillToPos === self.headerBufPos) { self.headerBufPos = 0; return true; } @@ -52,7 +84,7 @@ class CacheServerResponseTransform extends BaseTransform { // Read command if(!didRead('cmd') && fillBufferWithData(consts.CMD_SIZE)) { - var cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); this.headerData.cmd = cmd; switch(cmd[0]) { case '+': // file found diff --git a/lib/helpers.js b/lib/helpers.js index b3918e9..3b107b3 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -3,13 +3,13 @@ const consts = require("./constants").Constants; const crypto = require('crypto'); const os = require('os'); -var logLevel = consts.LOG_TEST; +let logLevel = consts.LOG_TEST; /** * @returns {string} */ function zeroPad(len, str) { - for (var i = len - str.length; i > 0; i--) { + for (let i = len - str.length; i > 0; i--) { str = '0' + str; } @@ -44,29 +44,11 @@ exports.readUInt64 = function(input) { return parseInt(input.toString('ascii', 0, consts.UINT64_SIZE), 16); }; -/** - * @returns {string} - */ -exports.readHex = function(len, data) { - var res = ''; - var tmp; - for (var i = 0; i < len; i++) { - tmp = data[i]; - tmp = ( (tmp & 0x0F) << 4) | ( (tmp >> 4) & 0x0F ); - res += tmp < 0x10 ? '0' + tmp.toString(16) : tmp.toString(16); - } - return res; -}; - -exports.generateTempDir = function() { - return os.tmpdir() + "/" + crypto.randomBytes(32).toString('hex'); -}; - function DefaultLogger(lvl, msg) { if (logLevel < lvl) return; - var prefix = cluster.isMaster ? "[Cluster:M] " : `[Cluster:${cluster.worker.id}] `; + const prefix = cluster.isMaster ? "[Cluster:M] " : `[Cluster:${cluster.worker.id}] `; console.log(prefix + msg); } diff --git a/lib/server.js b/lib/server.js index cde8f6d..9cd593f 100644 --- a/lib/server.js +++ b/lib/server.js @@ -1,30 +1,14 @@ +/** + * Created by spalmer on 10/16/17. + */ 'use strict'; const cluster = require('cluster'); const net = require('net'); const fs = require('fs'); const consts = require('./constants').Constants; const helpers = require('./helpers'); - -const CMD_QUIT = 'q'.charCodeAt(0); - -const CMD_GET = 'g'.charCodeAt(0); -const CMD_PUT = 'p'.charCodeAt(0); -const CMD_GETOK = '+'.charCodeAt(0); -const CMD_GETNOK = '-'.charCodeAt(0); - -const TYPE_ASSET = 'a'.charCodeAt(0); -const TYPE_INFO = 'i'.charCodeAt(0); -const TYPE_RESOURCE = 'r'.charCodeAt(0); - -const CMD_TRX = 't'.charCodeAt(0); -const TRX_START = 's'.charCodeAt(0); -const TRX_END = 'e'.charCodeAt(0); - -const CMD_INTEGRITY = 'i'.charCodeAt(0); -const CMD_CHECK = 'c'.charCodeAt(0); -const OPT_VERIFY = 'v'.charCodeAt(0); -const OPT_FIX = 'f'.charCodeAt(0); - +const ClientStreamProcessor = require('./server/client_stream_processor'); +const CommandProcessor = require('./server/command_processor'); class CacheServer { constructor(cache, port) { @@ -49,481 +33,32 @@ class CacheServer { return this._server; } - static uuid() { - return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, - function (c) { - var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8); - return v.toString(16); - }); - } - - _HandleData(socket, data) { - var self = this; - - // There is pending data, add it to the data buffer - if (socket.pendingData != null) { - let buf = new Buffer(data.length + socket.pendingData.length); - socket.pendingData.copy(buf, 0, 0); - data.copy(buf, socket.pendingData.length, 0); - data = buf; - socket.pendingData = null; - } - - while (true) { - // Get the version as the first thing - var idx = 0; - if (!socket.protocolVersion) { - if (data.length < consts.PROTOCOL_VERSION_MIN_SIZE) - { - // We need more data - socket.pendingData = data; - return false; - } - - socket.protocolVersion = helpers.readUInt32(data); - let buf = Buffer.allocUnsafe(consts.UINT32_SIZE); - if (socket.protocolVersion == consts.PROTOCOL_VERSION) { - helpers.log(consts.LOG_INFO, "Client protocol version " + socket.protocolVersion); - buf.write(helpers.encodeInt32(socket.protocolVersion)); - if (socket.isActive) - socket.write(buf); - data = data.slice(consts.UINT32_SIZE); - } - else { - helpers.log(consts.LOG_ERR, "Bad Client protocol version"); - buf.write(helpers.encodeInt32(0)); - if (socket.isActive) - socket.write(buf); - socket.end(); - socket.forceQuit = true; - return false; - } - } - - // Write a a file to a temp location and move it in place when it has completed - if (socket.activePutFile != null) { - let size = data.length; - if (size > socket.bytesToBeWritten) { - size = socket.bytesToBeWritten; - } - socket.activePutFile.write(data.slice(0, size), "binary"); - socket.bytesToBeWritten -= size; - - // If we have written all data for this file. We can close the file. - if (socket.bytesToBeWritten <= 0) { - socket.activePutFile.end(function () { - socket.targets.push({ - from: socket.tempPath, - to: socket.activePutTarget, - size: socket.totalFileSize - }); - socket.tempPath = null; - socket.activePutTarget = null; - socket.totalFileSize = 0; - if (socket.isActive) { - socket.resume(); - - // It's possible to have already processed a 'te' (transaction end) event before this callback is called. - // Call _HandleData again to ensure the 'te' event is re-processed now that we finished - // saving this file - if (socket.inTransaction) - self._HandleData(socket, Buffer.from([])); - } - }); - socket.activePutFile = null; - - data = data.slice(size); - continue; - } - - // We need more data to write the file completely - // Return and wait for the next call to _HandleData to receive more data. - return true; - } - - if (data.length == 0) { - // No more data - return false; - } - - if (data[idx] == CMD_QUIT) { - socket.end(); - socket.forceQuit = true; - return false; - } - - if (data[idx] == CMD_GET) { - if (data.length < consts.CMD_SIZE + consts.ID_SIZE) { - socket.pendingData = data; - return true; - } - idx += 1; - - - let reqType = data[idx]; - - idx += 1; - var guid = helpers.readHex(consts.GUID_SIZE, data.slice(idx)); - var hash = helpers.readHex(consts.HASH_SIZE, data.slice(idx + consts.GUID_SIZE)); - - var resbuf = Buffer.allocUnsafe(consts.CMD_SIZE + consts.UINT64_SIZE + consts.ID_SIZE); - data.copy(resbuf, consts.CMD_SIZE + consts.UINT64_SIZE, idx, idx + consts.ID_SIZE); // copy guid + hash - - if (reqType == TYPE_ASSET) { - helpers.log(consts.LOG_TEST, "Get Asset Binary " + guid + "/" + hash); - socket.getFileQueue.unshift({ - buffer: resbuf, - type: TYPE_ASSET, - cacheStream: this.cache.GetCachePath(guid, hash, 'bin', false) - }); - } - else if (reqType == TYPE_INFO) { - helpers.log(consts.LOG_TEST, "Get Asset Info " + guid + "/" + hash); - socket.getFileQueue.unshift({ - buffer: resbuf, - type: TYPE_INFO, - cacheStream: this.cache.GetCachePath(guid, hash, 'info', false) - }); - } - else if (reqType == TYPE_RESOURCE) { - helpers.log(consts.LOG_TEST, "Get Asset Resource " + guid + "/" + hash); - socket.getFileQueue.unshift({ - buffer: resbuf, - type: TYPE_RESOURCE, - cacheStream: this.cache.GetCachePath(guid, hash, 'resource', false) - }); - } - else { - helpers.log(consts.LOG_ERR, "Invalid data receive"); - socket.destroy(); - return false; - } - - if (!socket.activeGetFile) { - self._SendNextGetFile(socket); - } - - data = data.slice(idx + consts.ID_SIZE); - continue; - } - - // handle a transaction - else if (data[idx] == CMD_TRX) { - if (data.length < consts.CMD_SIZE) { - socket.pendingData = data; - return true; - } - idx += 1; - - if (data[idx] == TRX_START) { - if (data.length < consts.CMD_SIZE + consts.ID_SIZE) { - socket.pendingData = data; - return true; - } - - // Error: The previous transaction was not completed - if (socket.inTransaction) { - helpers.log(consts.LOG_DBG, "Cancel previous transaction"); - for (var i = 0; i < socket.targets.length; i++) { - fs.unlinkSync(socket.targets[i].from); - } - } - - idx += 1; - - socket.targets = []; - socket.inTransaction = true; - socket.currentGuid = helpers.readHex(consts.GUID_SIZE, data.slice(idx)); - socket.currentHash = helpers.readHex(consts.HASH_SIZE, data.slice(idx + consts.GUID_SIZE)); - - helpers.log(consts.LOG_DBG, "Start transaction for " + socket.currentGuid + "-" + socket.currentHash); - - data = data.slice(idx + consts.ID_SIZE); - continue; - } - else if (data[idx] == TRX_END) { - if (!socket.inTransaction) { - helpers.log(consts.LOG_ERR, "Invalid transaction isolation"); - socket.destroy(); - return false; - } - - // We have not completed writing the previous file - if (socket.activePutTarget != null) { - // Keep the data in pending for the next _HandleData call - if (socket.isActive) - socket.pause(); - socket.pendingData = data; - return true; - } - - idx += 1; - - helpers.log(consts.LOG_DBG, "End transaction for " + socket.currentGuid + "-" + socket.currentHash); - for (let i = 0; i < socket.targets.length; i++) { - this.cache.ReplaceFile(socket.targets[i].from, socket.targets[i].to, socket.targets[i].size); - } - - socket.targets = []; - socket.inTransaction = false; - socket.currentGuid = null; - socket.currentHash = null; - - data = data.slice(idx); - - continue; - } - else { - helpers.log(consts.LOG_ERR, "Invalid data receive"); - socket.destroy(); - return false; - } - } - // Put a file from the client to the cache server - else if (data[idx] == CMD_PUT) { - if (!socket.inTransaction) { - helpers.log(consts.LOG_ERR, "Not in a transaction"); - socket.destroy(); - return false; - } - - // We have not completed writing the previous file - if (socket.activePutTarget != null) { - // Keep the data in pending for the next _HandleData call - if (socket.isActive) - socket.pause(); - socket.pendingData = data; - return true; - } - - /// * We don't have enough data to start the put request. (wait for more data) - if (data.length < consts.CMD_SIZE + consts.UINT64_SIZE) { - socket.pendingData = data; - return true; - } - - idx += 1; - - var reqType = data[idx]; - - idx += 1; - var size = helpers.readUInt64(data.slice(idx)); - - if (reqType == TYPE_ASSET) { - helpers.log(consts.LOG_TEST, "Put Asset Binary " + socket.currentGuid + "-" + socket.currentHash + " (size " + size + ")"); - socket.activePutTarget = this.cache.GetCachePath(socket.currentGuid, socket.currentHash, 'bin', true); - } - else if (reqType == TYPE_INFO) { - helpers.log(consts.LOG_TEST, "Put Asset Info " + socket.currentGuid + "-" + socket.currentHash + " (size " + size + ")"); - socket.activePutTarget = this.cache.GetCachePath(socket.currentGuid, socket.currentHash, 'info', true); - } - else if (reqType == TYPE_RESOURCE) { - helpers.log(consts.LOG_TEST, "Put Asset Resource " + socket.currentGuid + "-" + socket.currentHash + " (size " + size + ")"); - socket.activePutTarget = this.cache.GetCachePath(socket.currentGuid, socket.currentHash, 'resource', true); - } - else { - helpers.log(consts.LOG_ERR, "Invalid data receive"); - socket.destroy(); - return false; - } - - socket.tempPath = this.cache.cacheDir + "/Temp" + CacheServer.uuid(); - socket.activePutFile = fs.createWriteStream(socket.tempPath); - - socket.activePutFile.on('error', function (err) { - helpers.log(consts.LOG_ERR, "Error writing to file " + err + ". Possibly the disk is full? Please adjust --cacheSize with a more accurate maximum cache size"); - socket.destroy(); - return false; - }); - - socket.bytesToBeWritten = size; - socket.totalFileSize = size; - - data = data.slice(idx + consts.UINT64_SIZE); - continue; - } - - // handle check integrity - else if (data[idx] == CMD_INTEGRITY) { - if (data.length < consts.CMD_SIZE + 1) { - socket.pendingData = data; - return true; - } - idx += 1; - - if (socket.inTransaction) { - helpers.log(consts.LOG_ERR, "In a transaction"); - socket.destroy(); - return false; - } - - if (data[idx] == CMD_CHECK && (data[idx + 1] == OPT_VERIFY || data[idx + 1] == OPT_FIX)) { - var fixIt = (data[idx + 1] == OPT_FIX); - - helpers.log(consts.LOG_DBG, "Cache Server integrity check (" + (fixIt ? "fix it" : "verify only") + ")"); - let verificationNumErrors = this.cache.VerifyCache(fixIt); - if (fixIt) - helpers.log(consts.LOG_DBG, "Cache Server integrity fix " + verificationNumErrors + " issue(s)"); - else - helpers.log(consts.LOG_DBG, "Cache Server integrity found " + verificationNumErrors + " error(s)"); - - var buf = Buffer.allocUnsafe(consts.CMD_SIZE + consts.UINT64_SIZE); - buf[0] = CMD_INTEGRITY; - buf[1] = CMD_CHECK; - - buf.slice(consts.CMD_SIZE).write(helpers.encodeInt64(verificationNumErrors)); - if (socket.isActive) - socket.write(buf); - - idx += 2; - } - else { - helpers.log(consts.LOG_ERR, "Invalid data receive"); - socket.destroy(); - return false; - } - } - - // We need more data to write the file completely - return true; - } - } - - _SendNextGetFile(socket) { - var self = this; - - if (socket.getFileQueue.length == 0) { - socket.activeGetFile = null; - return; - } - - if (socket.isActive) - socket.resume(); - - var next = socket.getFileQueue.pop(); - var resbuf = next.buffer; - var type = next.type; - var file = fs.createReadStream(next.cacheStream); - // make sure no data is read and lost before we have called file.pipe (). - file.pause(); - socket.activeGetFile = file; - var errfunc = function () { - var buf = Buffer.allocUnsafe(consts.CMD_SIZE + consts.ID_SIZE); - buf[0] = CMD_GETNOK; - buf[1] = type; - resbuf.copy(buf, consts.CMD_SIZE, consts.CMD_SIZE + consts.UINT64_SIZE, consts.CMD_SIZE + consts.UINT64_SIZE + consts.ID_SIZE); - try { - socket.write(buf); - } - catch (err) { - helpers.log(consts.LOG_ERR, "Error sending file data to socket " + err); - } - finally { - if (socket.isActive) { - self._SendNextGetFile(socket); - } - else { - helpers.log(consts.LOG_ERR, "Socket closed, close active file"); - file.close(); - } - } - }; - - file.on('close', function () { - socket.activeGetFile = null; - if (socket.isActive) { - self._SendNextGetFile(socket); - } - - try { - // Touch the file, so that it becomes the newest accessed file for LRU cleanup - utimes expects a Unix timestamp in seconds, Date.now() returns millis - let dateNow = Date.now() / 1000; - helpers.log(consts.LOG_DBG, "Updating mtime of " + next.cacheStream + " to: " + dateNow); - fs.utimesSync(next.cacheStream, dateNow, dateNow); - } - catch (err) { - helpers.log(consts.LOG_ERR, "Failed to update mtime of " + next.cacheStream + ": " + err); - } - }); - - file.on('open', function (fd) { - fs.fstat(fd, function (err, stats) { - if (err) - errfunc(err); - else { - resbuf[0] = CMD_GETOK; - resbuf[1] = type; - - helpers.log(consts.LOG_TEST, "Found: " + next.cacheStream + " size:" + stats.size); - resbuf.slice(consts.CMD_SIZE).write(helpers.encodeInt64(stats.size)); - - // The ID is already written - try { - socket.write(resbuf); - file.resume(); - file.pipe(socket, {end: false}); - } - catch (err) { - helpers.log(consts.LOG_ERR, "Error sending file data to socket " + err + ", close active file"); - file.close(); - } - } - }); - }); - - file.on('error', errfunc); - } - /** * start the cache server * * @param errCallback error callback (optional) + * @param callback */ Start(errCallback, callback) { - var self = this; + const self = this; this._server = net.createServer(function (socket) { - socket.getFileQueue = []; - socket.protocolVersion = null; - socket.activePutFile = null; - socket.activeGetFile = null; - socket.activePutTarget = null; - socket.pendingData = null; - socket.bytesToBeWritten = 0; - socket.totalFileSize = 0; - socket.isActive = true; - socket.targets = []; - socket.inTransaction = false; - socket.currentGuid = null; - socket.currentHash = null; - socket.forceQuit = false; - - socket.on('data', function (data) { - socket.isActive = true; - self._HandleData(socket, data); - }); - - socket.on('close', function (had_errors) { - helpers.log(consts.LOG_ERR, "Socket closed"); - socket.isActive = false; - var checkFunc = function () { - var data = new Buffer(0); - if (self._HandleData(socket, data)) { - setTimeout(checkFunc, 1); - } - }; + socket + .on('close', function () { + helpers.log(consts.LOG_ERR, "Socket closed"); + }) + .on('error', function (err) { + helpers.log(consts.LOG_ERR, "Socket error " + err); + }); - if (!had_errors && !socket.forceQuit) - checkFunc(); - }); + const clientStreamProcessor = new ClientStreamProcessor(); + const commandProcessor = new CommandProcessor(clientStreamProcessor, self.cache); - socket.on('error', function (err) { - helpers.log(consts.LOG_ERR, "Socket error " + err); - }); + socket.pipe(clientStreamProcessor).pipe(commandProcessor).pipe(socket); }); this._server.on('error', function (e) { - if (e.code == 'EADDRINUSE') { + if (e.code === 'EADDRINUSE') { helpers.log(consts.LOG_ERR, 'Port ' + self.port + ' is already in use...'); if (errCallback && typeof(errCallback === 'function')) { errCallback(e); } } diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 0bf451a..86e83dc 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -20,7 +20,7 @@ class ClientStreamProcessor extends Transform { } _registerEventListeners() { - var self = this; + const self = this; this.on('pipe', function(src) { self[kSource] = src; @@ -38,30 +38,29 @@ class ClientStreamProcessor extends Transform { didReadSize: false, doReadId: false, didReadId: false, - doReadIntegrityType: false, - didReadIntegrityType: false, dataPassThrough: false, dataSize: 0, headerBufPos: 0, + headerSize : consts.CMD_SIZE, dataBytesRead: 0 }; } - static errorCodes() { + static get errorCodes() { return { quitError: { msg: "Client quit" } } } _transform(data, encoding, callback) { - while(data.length > 0 && this.errState === null) { + while(data !== null && data.length > 0 && this.errState === null) { if (this.readState.dataPassThrough) data = this._sendData(data); else data = this._sendCommands(data); if(this.errState !== null) { - this.emit('error', this.errState); + helpers.log(consts.LOG_ERR, this.errState.msg); } } @@ -69,11 +68,11 @@ class ClientStreamProcessor extends Transform { } _sendData(data) { - var len = Math.min(this.readState.dataSize - this.readState.dataBytesRead, data.length); + const len = Math.min(this.readState.dataSize - this.readState.dataBytesRead, data.length); this.push(data.slice(0, len)); this.readState.dataBytesRead += len; - if(this.readState.dataBytesRead == this.readState.dataSize) { + if(this.readState.dataBytesRead === this.readState.dataSize) { this._init(); } @@ -81,19 +80,24 @@ class ClientStreamProcessor extends Transform { } _sendCommands(data) { - var self = this; - var dataPos = 0; + const self = this; + let dataPos = 0; function fillBufferWithData(size) { if(dataPos >= data.length) return false; - var toCopy = Math.min(size, data.length - dataPos); + // Only copy as much as we need for the remaining header size + size = Math.min(self.readState.headerSize - self.readState.headerBufPos); + + // Don't copy past the remaining bytes in the data block + const toCopy = Math.min(size, data.length - dataPos); + data.copy(self.headerBuf, self.readState.headerBufPos, dataPos, dataPos + toCopy); dataPos += toCopy; self.readState.headerBufPos += toCopy; - return toCopy === size; + return self.readState.headerBufPos === self.readState.headerSize; } function isDone() { @@ -101,7 +105,7 @@ class ClientStreamProcessor extends Transform { } if(!this.didReadVersion) { - var verSize = Math.max(consts.VERSION_SIZE, Math.min(consts.PROTOCOL_VERSION_MIN_SIZE, data.length)); + const verSize = Math.max(consts.VERSION_SIZE, Math.min(consts.PROTOCOL_VERSION_MIN_SIZE, data.length)); dataPos += verSize; this.didReadVersion = true; @@ -109,35 +113,37 @@ class ClientStreamProcessor extends Transform { } while(!isDone()) { - // Quit? - if (data[dataPos] === CMD_QUIT) { - this.push(CMD_QUIT); - this.errState = this.errorCodes.quitError; - break; - } - // Read command if (!this.readState.didReadCmd) { - if(!fillBufferWithData(consts.CMD_SIZE)) + if(!fillBufferWithData(consts.CMD_SIZE)) { + + // Quit? + if (data[data.length - 1] === CMD_QUIT) { + this.push('q'); + this.errState = ClientStreamProcessor.errorCodes.quitError; + } + break; + } this.readState.didReadCmd = true; - var cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); switch (cmd[0]) { case 'g': // get this.readState.doReadId = true; + this.readState.headerSize += consts.ID_SIZE; break; case 'p': // put this.readState.doReadSize = true; - break; - case 'i': // integrity check - this.readState.doReadIntegrityType = true; + this.readState.headerSize += consts.SIZE_SIZE; break; case 't': // transaction - if(cmd[1] == 's') + if(cmd[1] === 's') { this.readState.doReadId = true; + this.readState.headerSize += consts.ID_SIZE; + } break; default: @@ -164,23 +170,15 @@ class ClientStreamProcessor extends Transform { this.readState.didReadId = true; } - // Read extra - if (this.readState.doReadIntegrityType && !this.readState.didReadIntegrityType) { - if(!fillBufferWithData(1)) - break; - - this.readState.didReadIntegrityType = true; - } - this.push(Buffer.from(this.headerBuf.slice(0, this.readState.headerBufPos))); - + if(!this.readState.dataPassThrough) this._init(); else break; } - return dataPos < data.length ? data.slice(dataPos) : Buffer.from([]); + return dataPos < data.length ? data.slice(dataPos) : null; } } diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 823a3a2..4a95890 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -28,8 +28,8 @@ class CommandProcessor extends Transform { } _transform(chunk, encoding, callback) { - var handler = null; - var self = this; + let handler = null; + const self = this; switch(this._readState) { case kReadStateVersion: @@ -64,7 +64,7 @@ class CommandProcessor extends Transform { } _sendFile(task, callback) { - var self = this; + const self = this; this[kCache].getFileStream(task.type, task.guid, task.hash, function(err, result) { if(err || result === null) { @@ -80,8 +80,8 @@ class CommandProcessor extends Transform { result.stream .on('readable', function() { - var chunk; - while((chunk = result.stream.read()) != null) { + let chunk; + while((chunk = result.stream.read()) !== null) { self.push(chunk); } }) @@ -96,22 +96,21 @@ class CommandProcessor extends Transform { } _handleVersion(data, callback) { - var version = helpers.readUInt32(data); + let version = helpers.readUInt32(data); this._readState = kReadStateCommand; - this.push(helpers.encodeInt32(version)); - + let err = null; if(version !== consts.PROTOCOL_VERSION) { version = 0; - callback(new Error("Bad Client protocol version")); - } - else { - callback(null); + err = new Error("Bad Client protocol version"); } + + this.push(helpers.encodeInt32(version)); + callback(null, err); } _handleWrite(data, callback) { - var self = this; - + const self = this; + this._putStream.write(data, 'ascii', function() { self._putSent += data.length; if(self._putSent === self._putSize) { @@ -125,7 +124,7 @@ class CommandProcessor extends Transform { } _handleCommand(data, callback) { - var cmd, size, type, guid, hash = null; + let cmd, size, type, guid, hash = null; if(data.length > 1) { cmd = data.slice(0, 2).toString('ascii'); type = cmd[1]; @@ -166,12 +165,6 @@ class CommandProcessor extends Transform { case 'pr': this._onPut(type, size, callback); break; - case 'icf': - this._onIntegrityCheck(true, callback); - break; - case 'icv': - this._onIntegrityCheck(false, callback); - break; default: callback(new Error("Unrecognized command '" + cmd + "'")); } @@ -188,10 +181,11 @@ class CommandProcessor extends Transform { } _onTransactionStart(guid, hash, callback) { - var self = this; + const self = this; if(this._trx !== null) { - return callback(new Error("Already in a transaction")); + helpers.log(consts.LOG_DBG, "Cancel previous transaction"); + this._trx = null; } this[kCache].createPutTransaction(guid, hash, function(err, trx) { @@ -199,26 +193,28 @@ class CommandProcessor extends Transform { return callback(err); } + helpers.log(consts.LOG_DBG, "Start transaction for " + guid.toString('hex') + "-" + hash.toString('hex')); self._trx = trx; callback(null); }); } _onTransactionEnd(callback) { - var self = this; + const self = this; if(!this._trx) { - return callback(new Error("Not in a transaction")); + return callback(new Error("Invalid transaction isolation")); } this[kCache].endPutTransaction(this._trx, function(err) { + helpers.log(consts.LOG_DBG, "End transaction for " + self._trx.guid.toString('hex') + "-" + self._trx.hash.toString('hex')); self._trx = null; callback(err); }); } _onPut(type, size, callback) { - var self = this; + const self = this; if(!this._trx) { return callback(new Error("Not in a transaction")); @@ -235,20 +231,6 @@ class CommandProcessor extends Transform { callback(null); }); } - - _onIntegrityCheck(doFix, callback) { - var self = this; - - this[kCache].integrityCheck(doFix, function(err, numErrs) { - if(err) { - return callback(err); - } - - self.push('ic'); - self.push(helpers.encodeInt64(numErrs)); - callback(null); - }) - } } module.exports = CommandProcessor; \ No newline at end of file diff --git a/lib/server_v2.js b/lib/server_v2.js deleted file mode 100644 index 4f38214..0000000 --- a/lib/server_v2.js +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Created by spalmer on 10/16/17. - */ -'use strict'; -const cluster = require('cluster'); -const net = require('net'); -const fs = require('fs'); -const consts = require('./constants').Constants; -const helpers = require('./helpers'); -const ClientStreamProcessor = require('./server/client_stream_processor'); -const CommandProcessor = require('./server/command_processor'); - -class CacheServer { - constructor(cache, port) { - this._cache = cache; - this._port = parseInt(port); - if (!port && port !== 0) - this._port = consts.DEFAULT_PORT; - this._sever = null; - } - - get port() { - return this._server && this._server.listening - ? this._server.address().port - : this._port; - } - - get cache() { - return this._cache; - } - - get server() { - return this._server; - } - - /** - * start the cache server - * - * @param errCallback error callback (optional) - * @param callback - */ - Start(errCallback, callback) { - var self = this; - - this._server = net.createServer(function (socket) { - socket - .on('close', function () { - helpers.log(consts.LOG_ERR, "Socket closed"); - }) - .on('error', function (err) { - helpers.log(consts.LOG_ERR, "Socket error " + err); - }); - - var clientStreamProcessor = new ClientStreamProcessor(); - var commandProcessor = new CommandProcessor(clientStreamProcessor, self.cache); - - socket.pipe(clientStreamProcessor).pipe(commandProcessor).pipe(socket); - }); - - this._server.on('error', function (e) { - if (e.code == 'EADDRINUSE') { - helpers.log(consts.LOG_ERR, 'Port ' + self.port + ' is already in use...'); - if (errCallback && typeof(errCallback === 'function')) { errCallback(e); } - } - }); - - this._server.listen(this._port, function() { - if(callback && typeof(callback) === 'function') { callback(); } - }); - }; -} - -module.exports = CacheServer; \ No newline at end of file diff --git a/main.js b/main.js index 44043f9..9ae4f5a 100644 --- a/main.js +++ b/main.js @@ -3,7 +3,7 @@ const helpers = require('./lib/helpers'); const consts = require('./lib/constants').Constants; const program = require('commander'); const path = require('path'); -const CacheServer = require('./lib/server_v2'); +const CacheServer = require('./lib/server'); const config = require('config'); function myParseInt(val, def) { @@ -34,11 +34,11 @@ program.description("Unity Cache Server") helpers.SetLogLevel(program.logLevel); // Initialize cache -var cache; +let cache; try { - var moduleName = config.get("Cache.module"); - var modulePath = path.resolve(config.get("Cache.path"), moduleName); + const moduleName = config.get("Cache.module"); + const modulePath = path.resolve(config.get("Cache.path"), moduleName); helpers.log(consts.LOG_INFO, "Loading Cache module at " + modulePath); const Cache = require(modulePath); cache = new Cache(); @@ -50,7 +50,7 @@ catch(e) { if (program.verify || program.fix) { console.log("Verifying integrity of Cache Server directory " + program.path); - var numErrors = cache.VerifyCache(program.fix); + const numErrors = cache.VerifyCache(program.fix); console.log("Cache Server directory contains " + numErrors + " integrity issue(s)"); if (program.fix) console.log("Cache Server directory integrity fixed."); @@ -78,12 +78,12 @@ if (program.monitorParentProcess > 0) { monitor(); } -var errHandler = function () { +const errHandler = function () { helpers.log(consts.LOG_ERR, "Unable to start Cache Server"); process.exit(1); }; -var server = new CacheServer(cache, program.port); +const server = new CacheServer(cache, program.port); if(cluster.isMaster) { helpers.log(consts.LOG_INFO, "Cache Server version " + consts.VERSION); @@ -95,7 +95,7 @@ if(cluster.isMaster) { } for(let i = 0; i < program.workers; i++) { - var worker = cluster.fork(); + const worker = cluster.fork(); cache.registerClusterWorker(worker); } } diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..1367c0d --- /dev/null +++ b/package-lock.json @@ -0,0 +1,1447 @@ +{ + "name": "Fast-CacheServer", + "version": "6.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "argparse": { + "version": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=", + "requires": { + "sprintf-js": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" + } + }, + "async": { + "version": "https://registry.npmjs.org/async/-/async-2.5.0.tgz", + "integrity": "sha1-hDGQ/WtzV6C54clW7d3V7IRitU0=", + "requires": { + "lodash": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" + } + }, + "balanced-match": { + "version": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", + "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", + "dev": true, + "requires": { + "balanced-match": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "concat-map": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + }, + "browser-stdout": { + "version": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.0.tgz", + "integrity": "sha1-81HTKWnTL6XXpVZxVCY9korjvR8=", + "dev": true + }, + "commander": { + "version": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", + "integrity": "sha1-FXFS/R56bI2YpbcVzzdt+SgARWM=" + }, + "concat-map": { + "version": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "config": { + "version": "https://registry.npmjs.org/config/-/config-1.27.0.tgz", + "integrity": "sha1-OrMNAID/dvQHwvR6wTJq39kIr18=", + "requires": { + "json5": "https://registry.npmjs.org/json5/-/json5-0.4.0.tgz", + "os-homedir": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz" + } + }, + "coveralls": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.0.tgz", + "integrity": "sha1-Iu9zAzBTgIDSm4wVHckUav3oipk=", + "dev": true, + "requires": { + "js-yaml": "3.10.0", + "lcov-parse": "0.0.10", + "log-driver": "1.2.5", + "minimist": "1.2.0", + "request": "2.83.0" + }, + "dependencies": { + "js-yaml": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", + "dev": true, + "requires": { + "argparse": "1.0.9", + "esprima": "4.0.0" + }, + "dependencies": { + "argparse": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=", + "dev": true, + "requires": { + "sprintf-js": "1.0.3" + }, + "dependencies": { + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + } + } + }, + "esprima": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", + "integrity": "sha1-RJnt3NERDgshi6zy+n9/WfVcqAQ=", + "dev": true + } + } + }, + "lcov-parse": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/lcov-parse/-/lcov-parse-0.0.10.tgz", + "integrity": "sha1-GwuP+ayceIklBYK3C3ExXZ2m2aM=", + "dev": true + }, + "log-driver": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.5.tgz", + "integrity": "sha1-euTsJXMC/XkNVXyxDJcQDYV7AFY=", + "dev": true + }, + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "dev": true + }, + "request": { + "version": "2.83.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz", + "integrity": "sha1-ygtl2gLtYpNYh4COb1EDgQNOM1Y=", + "dev": true, + "requires": { + "aws-sign2": "0.7.0", + "aws4": "1.6.0", + "caseless": "0.12.0", + "combined-stream": "1.0.5", + "extend": "3.0.1", + "forever-agent": "0.6.1", + "form-data": "2.3.1", + "har-validator": "5.0.3", + "hawk": "6.0.2", + "http-signature": "1.2.0", + "is-typedarray": "1.0.0", + "isstream": "0.1.2", + "json-stringify-safe": "5.0.1", + "mime-types": "2.1.17", + "oauth-sign": "0.8.2", + "performance-now": "2.1.0", + "qs": "6.5.1", + "safe-buffer": "5.1.1", + "stringstream": "0.0.5", + "tough-cookie": "2.3.3", + "tunnel-agent": "0.6.0", + "uuid": "3.1.0" + }, + "dependencies": { + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "dev": true + }, + "aws4": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", + "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=", + "dev": true + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true + }, + "combined-stream": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "integrity": "sha1-k4NwpXtKUd6ix3wV1cX9+JUWQAk=", + "dev": true, + "requires": { + "delayed-stream": "1.0.0" + }, + "dependencies": { + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true + } + } + }, + "extend": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", + "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=", + "dev": true + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "dev": true + }, + "form-data": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz", + "integrity": "sha1-b7lPvXGIUwbXPRXMSX/kzE7NRL8=", + "dev": true, + "requires": { + "asynckit": "0.4.0", + "combined-stream": "1.0.5", + "mime-types": "2.1.17" + }, + "dependencies": { + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true + } + } + }, + "har-validator": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", + "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", + "dev": true, + "requires": { + "ajv": "5.2.3", + "har-schema": "2.0.0" + }, + "dependencies": { + "ajv": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.2.3.tgz", + "integrity": "sha1-wG9Zh3jETGsWGrr+NGa4GtGBTtI=", + "dev": true, + "requires": { + "co": "4.6.0", + "fast-deep-equal": "1.0.0", + "json-schema-traverse": "0.3.1", + "json-stable-stringify": "1.0.1" + }, + "dependencies": { + "co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", + "dev": true + }, + "fast-deep-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz", + "integrity": "sha1-liVqO8l1WV6zbYLpkp0GDYk0Of8=", + "dev": true + }, + "json-schema-traverse": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", + "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=", + "dev": true + }, + "json-stable-stringify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", + "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", + "dev": true, + "requires": { + "jsonify": "0.0.0" + }, + "dependencies": { + "jsonify": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", + "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", + "dev": true + } + } + } + } + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "dev": true + } + } + }, + "hawk": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", + "integrity": "sha1-r02RTrBl+bXOTZ0RwcshJu7MMDg=", + "dev": true, + "requires": { + "boom": "4.3.1", + "cryptiles": "3.1.2", + "hoek": "4.2.0", + "sntp": "2.0.2" + }, + "dependencies": { + "boom": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", + "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", + "dev": true, + "requires": { + "hoek": "4.2.0" + } + }, + "cryptiles": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", + "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", + "dev": true, + "requires": { + "boom": "5.2.0" + }, + "dependencies": { + "boom": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", + "integrity": "sha1-XdnabuOl8wIHdDYpDLcX0/SlTgI=", + "dev": true, + "requires": { + "hoek": "4.2.0" + } + } + } + }, + "hoek": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz", + "integrity": "sha1-ctnQdU9/4lyi0BrY+PmpRJqJUm0=", + "dev": true + }, + "sntp": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.0.2.tgz", + "integrity": "sha1-UGQRDwr4X3z9t9a2ekACjOUrSys=", + "dev": true, + "requires": { + "hoek": "4.2.0" + } + } + } + }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "jsprim": "1.4.1", + "sshpk": "1.13.1" + }, + "dependencies": { + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "dev": true + }, + "jsprim": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + }, + "dependencies": { + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true + }, + "json-schema": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "dev": true + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "1.3.0" + }, + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true + } + } + } + } + }, + "sshpk": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", + "integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", + "dev": true, + "requires": { + "asn1": "0.2.3", + "assert-plus": "1.0.0", + "bcrypt-pbkdf": "1.0.1", + "dashdash": "1.14.1", + "ecc-jsbn": "0.1.1", + "getpass": "0.1.7", + "jsbn": "0.1.1", + "tweetnacl": "0.14.5" + }, + "dependencies": { + "asn1": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", + "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", + "dev": true, + "optional": true, + "requires": { + "tweetnacl": "0.14.5" + } + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dev": true, + "requires": { + "assert-plus": "1.0.0" + } + }, + "ecc-jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", + "dev": true, + "optional": true, + "requires": { + "jsbn": "0.1.1" + } + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dev": true, + "requires": { + "assert-plus": "1.0.0" + } + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "dev": true, + "optional": true + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "dev": true, + "optional": true + } + } + } + } + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "dev": true + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true + }, + "mime-types": { + "version": "2.1.17", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz", + "integrity": "sha1-Cdejk/A+mVp5+K+Fe3Cp4KsWVXo=", + "dev": true, + "requires": { + "mime-db": "1.30.0" + }, + "dependencies": { + "mime-db": { + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz", + "integrity": "sha1-dMZD2i3Z1qRTmZY0ZbJtXKfXHwE=", + "dev": true + } + } + }, + "oauth-sign": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=", + "dev": true + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", + "dev": true + }, + "qs": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha1-NJzfbu+J7EXBLX1es/wMhwNDptg=", + "dev": true + }, + "safe-buffer": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha1-iTMSr2myEj3vcfV4iQAWce6yyFM=", + "dev": true + }, + "stringstream": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=", + "dev": true + }, + "tough-cookie": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz", + "integrity": "sha1-C2GKVWW23qkL80JdBNVe3EdadWE=", + "dev": true, + "requires": { + "punycode": "1.4.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", + "dev": true + } + } + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, + "requires": { + "safe-buffer": "5.1.1" + } + }, + "uuid": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", + "integrity": "sha1-PdPT55Crwk17DToDT/q6vijrvAQ=", + "dev": true + } + } + } + } + }, + "debug": { + "version": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz", + "integrity": "sha1-5zFTHKLt4n0YgiJCfaF4IdaP9Pw=", + "dev": true, + "requires": { + "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + } + }, + "diff": { + "version": "https://registry.npmjs.org/diff/-/diff-3.2.0.tgz", + "integrity": "sha1-yc45Okt8vQsFinJck98pkCeGj/k=", + "dev": true + }, + "escape-string-regexp": { + "version": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "esprima": { + "version": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", + "integrity": "sha1-RJnt3NERDgshi6zy+n9/WfVcqAQ=" + }, + "fs.realpath": { + "version": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "integrity": "sha1-gFIR3wT6rxxjo2ADBs31reULLsg=", + "dev": true, + "requires": { + "fs.realpath": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "inflight": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "path-is-absolute": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + }, + "graceful-readlink": { + "version": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "integrity": "sha1-TK+tdrxi8C+gObL5Tpo906ORpyU=", + "dev": true + }, + "growl": { + "version": "https://registry.npmjs.org/growl/-/growl-1.9.2.tgz", + "integrity": "sha1-Dqd0NxXbjY3ixe3hd14bRayFwC8=", + "dev": true + }, + "has-flag": { + "version": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", + "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=", + "dev": true + }, + "he": { + "version": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", + "dev": true + }, + "inflight": { + "version": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + }, + "inherits": { + "version": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "istanbul": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/istanbul/-/istanbul-0.4.5.tgz", + "integrity": "sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs=", + "dev": true, + "requires": { + "abbrev": "1.0.9", + "async": "1.5.2", + "escodegen": "1.8.1", + "esprima": "2.7.3", + "glob": "5.0.15", + "handlebars": "4.0.10", + "js-yaml": "3.10.0", + "mkdirp": "0.5.1", + "nopt": "3.0.6", + "once": "1.4.0", + "resolve": "1.1.7", + "supports-color": "3.2.3", + "which": "1.3.0", + "wordwrap": "1.0.0" + }, + "dependencies": { + "abbrev": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz", + "integrity": "sha1-kbR5JYinc4wl813W9jdSovh3YTU=", + "dev": true + }, + "async": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=", + "dev": true + }, + "escodegen": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.8.1.tgz", + "integrity": "sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg=", + "dev": true, + "requires": { + "esprima": "2.7.3", + "estraverse": "1.9.3", + "esutils": "2.0.2", + "optionator": "0.8.2", + "source-map": "0.2.0" + }, + "dependencies": { + "estraverse": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.9.3.tgz", + "integrity": "sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q=", + "dev": true + }, + "esutils": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", + "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", + "dev": true + }, + "optionator": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", + "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", + "dev": true, + "requires": { + "deep-is": "0.1.3", + "fast-levenshtein": "2.0.6", + "levn": "0.3.0", + "prelude-ls": "1.1.2", + "type-check": "0.3.2", + "wordwrap": "1.0.0" + }, + "dependencies": { + "deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", + "dev": true + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "requires": { + "prelude-ls": "1.1.2", + "type-check": "0.3.2" + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "requires": { + "prelude-ls": "1.1.2" + } + } + } + }, + "source-map": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.2.0.tgz", + "integrity": "sha1-2rc/vPwrqBm03gO9b26qSBZLP50=", + "dev": true, + "optional": true, + "requires": { + "amdefine": "1.0.1" + }, + "dependencies": { + "amdefine": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", + "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", + "dev": true, + "optional": true + } + } + } + } + }, + "esprima": { + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-2.7.3.tgz", + "integrity": "sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE=", + "dev": true + }, + "glob": { + "version": "5.0.15", + "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", + "integrity": "sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E=", + "dev": true, + "requires": { + "inflight": "1.0.6", + "inherits": "2.0.3", + "minimatch": "3.0.4", + "once": "1.4.0", + "path-is-absolute": "1.0.1" + }, + "dependencies": { + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "1.4.0", + "wrappy": "1.0.2" + }, + "dependencies": { + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "dev": true, + "requires": { + "brace-expansion": "1.1.8" + }, + "dependencies": { + "brace-expansion": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", + "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", + "dev": true, + "requires": { + "balanced-match": "1.0.0", + "concat-map": "0.0.1" + }, + "dependencies": { + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + } + } + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + } + } + }, + "handlebars": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.0.10.tgz", + "integrity": "sha1-PTDHGLCaPZbyPqTMH0A8TTup/08=", + "dev": true, + "requires": { + "async": "1.5.2", + "optimist": "0.6.1", + "source-map": "0.4.4", + "uglify-js": "2.8.29" + }, + "dependencies": { + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "0.0.10", + "wordwrap": "0.0.3" + }, + "dependencies": { + "minimist": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + } + } + }, + "source-map": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", + "integrity": "sha1-66T12pwNyZneaAMti092FzZSA2s=", + "dev": true, + "requires": { + "amdefine": "1.0.1" + }, + "dependencies": { + "amdefine": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", + "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", + "dev": true + } + } + }, + "uglify-js": { + "version": "2.8.29", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", + "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", + "dev": true, + "optional": true, + "requires": { + "source-map": "0.5.7", + "uglify-to-browserify": "1.0.2", + "yargs": "3.10.0" + }, + "dependencies": { + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "optional": true + }, + "uglify-to-browserify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", + "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", + "dev": true, + "optional": true + }, + "yargs": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", + "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", + "dev": true, + "optional": true, + "requires": { + "camelcase": "1.2.1", + "cliui": "2.1.0", + "decamelize": "1.2.0", + "window-size": "0.1.0" + }, + "dependencies": { + "camelcase": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", + "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=", + "dev": true, + "optional": true + }, + "cliui": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", + "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", + "dev": true, + "optional": true, + "requires": { + "center-align": "0.1.3", + "right-align": "0.1.3", + "wordwrap": "0.0.2" + }, + "dependencies": { + "center-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", + "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", + "dev": true, + "optional": true, + "requires": { + "align-text": "0.1.4", + "lazy-cache": "1.0.4" + }, + "dependencies": { + "align-text": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "3.2.2", + "longest": "1.0.1", + "repeat-string": "1.6.1" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "optional": true, + "requires": { + "is-buffer": "1.1.5" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.5.tgz", + "integrity": "sha1-Hzsm72E7IUuIy8ojzGwB2Hlh7sw=", + "dev": true, + "optional": true + } + } + }, + "longest": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", + "dev": true, + "optional": true + }, + "repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true, + "optional": true + } + } + }, + "lazy-cache": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", + "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=", + "dev": true, + "optional": true + } + } + }, + "right-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", + "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", + "dev": true, + "optional": true, + "requires": { + "align-text": "0.1.4" + }, + "dependencies": { + "align-text": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "3.2.2", + "longest": "1.0.1", + "repeat-string": "1.6.1" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "optional": true, + "requires": { + "is-buffer": "1.1.5" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.5.tgz", + "integrity": "sha1-Hzsm72E7IUuIy8ojzGwB2Hlh7sw=", + "dev": true, + "optional": true + } + } + }, + "longest": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", + "dev": true, + "optional": true + }, + "repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true, + "optional": true + } + } + } + } + }, + "wordwrap": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", + "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=", + "dev": true, + "optional": true + } + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true, + "optional": true + }, + "window-size": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", + "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=", + "dev": true, + "optional": true + } + } + } + } + } + } + }, + "js-yaml": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", + "dev": true, + "requires": { + "argparse": "1.0.9", + "esprima": "4.0.0" + }, + "dependencies": { + "argparse": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=", + "dev": true, + "requires": { + "sprintf-js": "1.0.3" + }, + "dependencies": { + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + } + } + }, + "esprima": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", + "integrity": "sha1-RJnt3NERDgshi6zy+n9/WfVcqAQ=", + "dev": true + } + } + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + }, + "dependencies": { + "minimist": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", + "dev": true + } + } + }, + "nopt": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", + "integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=", + "dev": true, + "requires": { + "abbrev": "1.0.9" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1.0.2" + }, + "dependencies": { + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } + }, + "resolve": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.1.7.tgz", + "integrity": "sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs=", + "dev": true + }, + "supports-color": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", + "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "dev": true, + "requires": { + "has-flag": "1.0.0" + }, + "dependencies": { + "has-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", + "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=", + "dev": true + } + } + }, + "which": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.0.tgz", + "integrity": "sha1-/wS9/AEO5UfXgL7DjhrBwnd9JTo=", + "dev": true, + "requires": { + "isexe": "2.0.0" + }, + "dependencies": { + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + } + } + }, + "wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", + "dev": true + } + } + }, + "js-yaml": { + "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", + "requires": { + "argparse": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "esprima": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz" + } + }, + "json3": { + "version": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz", + "integrity": "sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE=", + "dev": true + }, + "json5": { + "version": "https://registry.npmjs.org/json5/-/json5-0.4.0.tgz", + "integrity": "sha1-BUNS5MTIDIbAkjh31EneF2pzLI0=" + }, + "lodash": { + "version": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" + }, + "lodash._baseassign": { + "version": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "integrity": "sha1-jDigmVAPIVrQnlnxci/QxSv+Ck4=", + "dev": true, + "requires": { + "lodash._basecopy": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", + "lodash.keys": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz" + } + }, + "lodash._basecopy": { + "version": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", + "integrity": "sha1-jaDmqHbPNEwK2KVIghEd08XHyjY=", + "dev": true + }, + "lodash._basecreate": { + "version": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz", + "integrity": "sha1-G8ZhYU2qf8MRt9A78WgGoCE8+CE=", + "dev": true + }, + "lodash._getnative": { + "version": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", + "integrity": "sha1-VwvH3t5G1hzc3mh9ZdPuy6o6r/U=", + "dev": true + }, + "lodash._isiterateecall": { + "version": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz", + "integrity": "sha1-UgOte6Ql+uhCRg5pbbnPPmqsBXw=", + "dev": true + }, + "lodash.create": { + "version": "https://registry.npmjs.org/lodash.create/-/lodash.create-3.1.1.tgz", + "integrity": "sha1-1/KEnw29p+BGgruM1yqwIkYd6+c=", + "dev": true, + "requires": { + "lodash._baseassign": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "lodash._basecreate": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz", + "lodash._isiterateecall": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" + } + }, + "lodash.isarguments": { + "version": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "integrity": "sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo=", + "dev": true + }, + "lodash.isarray": { + "version": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz", + "integrity": "sha1-eeTriMNqgSKvhvhEqpvNhRtfu1U=", + "dev": true + }, + "lodash.keys": { + "version": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", + "integrity": "sha1-TbwEcrFWvlCgsoaFXRvQsMZWCYo=", + "dev": true, + "requires": { + "lodash._getnative": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", + "lodash.isarguments": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "lodash.isarray": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" + } + }, + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "dev": true, + "requires": { + "brace-expansion": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz" + } + }, + "minimist": { + "version": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", + "dev": true + }, + "mkdirp": { + "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + }, + "mocha": { + "version": "https://registry.npmjs.org/mocha/-/mocha-3.5.3.tgz", + "integrity": "sha1-HgSA/jbS2lhY0etqzDhBiybqog0=", + "dev": true, + "requires": { + "browser-stdout": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.0.tgz", + "commander": "2.9.0", + "debug": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz", + "diff": "https://registry.npmjs.org/diff/-/diff-3.2.0.tgz", + "escape-string-regexp": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "glob": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "growl": "https://registry.npmjs.org/growl/-/growl-1.9.2.tgz", + "he": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "json3": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz", + "lodash.create": "https://registry.npmjs.org/lodash.create/-/lodash.create-3.1.1.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "supports-color": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.2.tgz" + }, + "dependencies": { + "commander": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "integrity": "sha1-nJkJQXbhIkDLItbFFGCYQA/g99Q=", + "dev": true, + "requires": { + "graceful-readlink": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + } + }, + "mocha-lcov-reporter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/mocha-lcov-reporter/-/mocha-lcov-reporter-1.3.0.tgz", + "integrity": "sha1-Rpve9PivyaEWBW8HnfYYLQr7A4Q=", + "dev": true + }, + "ms": { + "version": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "once": { + "version": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + }, + "os-homedir": { + "version": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" + }, + "path-is-absolute": { + "version": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "sprintf-js": { + "version": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "supports-color": { + "version": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.2.tgz", + "integrity": "sha1-cqJiiU2dQIuVbKBf83su2KbiotU=", + "dev": true, + "requires": { + "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" + } + }, + "wrappy": { + "version": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } +} diff --git a/package.json b/package.json index c5eff0b..a806c45 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "Unity Cache Server", "main": "main.js", "engines": { - "node": ">=6.11.0" + "node": "^8.9.1" }, "directories": { "test": "test" diff --git a/test/cache_fs.js b/test/cache_fs.js deleted file mode 100644 index 4c10994..0000000 --- a/test/cache_fs.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const helpers = require('../lib/helpers'); -const CacheFS = require('../lib/cache_fs'); - -describe("CacheFS", function() { - describe("Init", function() { - it("should throw an error if the given cache folder is not recognized as a valid cache", function() { - var p = helpers.generateTempDir(); - fs.mkdirSync(p); - var f = p + "/veryImportantDoc.doc"; - fs.writeFileSync(f); - - var err = null; - try { - new CacheFS(p, 0); - } - catch(e) { - err = e; - } - finally { - assert(err); - } - }); - }); - - describe("GetCacheFileStream", function() { - it("should update the timestamp of the retrieved file to support LRU cleanup"); - }) -}); \ No newline at end of file diff --git a/test/server.js b/test/server.js index 30f4ad5..8e070bc 100644 --- a/test/server.js +++ b/test/server.js @@ -4,8 +4,8 @@ const crypto = require('crypto'); const fs = require('fs'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants').Constants; -const CacheServer = require('../lib/server.js'); -const CacheFS = require("../lib/cache_fs"); +const CacheServer = require('../lib/server'); +const Cache = require("../lib/cache/cache_membuf"); const CmdResponseListener = require('./../lib/client/server_response_transform.js'); @@ -14,11 +14,11 @@ const MIN_BLOB_SIZE = 64; const MAX_BLOB_SIZE = 2048; helpers.SetLogger(()=>{}); -var cache = new CacheFS(helpers.generateTempDir(), CACHE_SIZE); -var server = new CacheServer(cache, 0); -var client; +const cache = new Cache(); +const server = new CacheServer(cache, 0); +let client; -var cmd = { +const cmd = { quit: "q", getAsset: "ga", getInfo: "gi", @@ -41,7 +41,7 @@ function generateCommandData(minSize, maxSize) { return { guid: Buffer.from(crypto.randomBytes(consts.GUID_SIZE).toString('ascii'), 'ascii'), hash: Buffer.from(crypto.randomBytes(consts.HASH_SIZE).toString('ascii'), 'ascii'), - asset: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), + bin: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), info: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), resource: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii') } @@ -65,18 +65,18 @@ function encodeCommand(command, guid, hash, blob) { } function expectLog(client, regex, condition, callback) { - if(callback == null) { + if(typeof(callback) !== 'function' && typeof(condition) === 'function') { callback = condition; condition = true; } - var match; + let match; helpers.SetLogger(function (lvl, msg) { match = match || regex.test(msg); }); client.on('close', function() { - assert(match == condition); + assert(match === condition); callback(); }); } @@ -105,8 +105,8 @@ describe("CacheServer protocol", function() { it("should echo the version if supported", function (done) { client.on('data', function (data) { - var ver = helpers.readUInt32(data); - assert(ver == consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); + const ver = helpers.readUInt32(data); + assert(ver === consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); done(); }); @@ -115,24 +115,24 @@ describe("CacheServer protocol", function() { it("should respond with 0 if unsupported", function (done) { client.on('data', function (data) { - var ver = helpers.readUInt32(data); - assert(ver == 0, "Expected 0, Received " + ver); + const ver = helpers.readUInt32(data); + assert(ver === 0, "Expected 0, Received " + ver); done(); }); client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION + 1)); }); - it("should recognize a 2 byte version sent 1 byte at a time", function (done) { + it.skip("should recognize a 2 byte version sent 1 byte at a time", function (done) { this.slow(250); client.on('data', function(data) { - var ver = helpers.readUInt32(data); - assert(ver == consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); + const ver = helpers.readUInt32(data); + assert(ver === consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); done(); }); - var ver = "fe"; + let ver = "fe"; client.write(ver[0]); sleep(50).then(() => { client.write(ver[1]); }); }); @@ -140,7 +140,7 @@ describe("CacheServer protocol", function() { describe("Transactions", function () { - var self = this; + const self = this; beforeEach(function (done) { client = net.connect({port: server.port}, function (err) { @@ -158,7 +158,7 @@ describe("CacheServer protocol", function() { it("should cancel a pending transaction if a new (ts) command is received", function (done) { expectLog(client, /Cancel previous transaction/, done); - var d = encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash); + const d = encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash); client.write(d); // first one ... client.end(d); // ... canceled by this one }); @@ -176,11 +176,11 @@ describe("CacheServer protocol", function() { it("should require a transaction start (te) command before a put command", function(done) { expectLog(client, /Not in a transaction/, done); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.asset)); + client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); }); it("should close the socket on an invalid transaction command", function(done) { - expectLog(client, /invalid data receive/i, done); + expectLog(client, /Unrecognized command/i, done); client.write('tx', self.data.guid, self.data.hash); }); }); @@ -188,7 +188,7 @@ describe("CacheServer protocol", function() { describe("PUT requests", function () { this.slow(1500); - var self = this; + const self = this; this.getCachePath = function(extension, callback) { return cache.GetCachePath( helpers.readHex(self.data.guid.length, self.data.guid), @@ -213,15 +213,15 @@ describe("CacheServer protocol", function() { }); it("should close the socket on an invalid PUT type", function(done) { - expectLog(client, /invalid data receive/i, done); + expectLog(client, /Unrecognized command/i, done); client.write( encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand("px", null, null, self.data.asset)); + encodeCommand("px", null, null, self.data.bin)); }); - it("should try to free cache space if the cache size exceeds the max cache size after writing a file", function(done) { - var match1 = false; - var match2 = false; + it.skip("should try to free cache space if the cache size exceeds the max cache size after writing a file", function(done) { + let match1 = false; + let match2 = false; cache.maxCacheSize = 1024; @@ -236,18 +236,19 @@ describe("CacheServer protocol", function() { done(); }); - var data = generateCommandData(400, 400); + const data = generateCommandData(400, 400); client.write( encodeCommand(cmd.transactionStart, data.guid, data.hash) + - encodeCommand(cmd.putAsset, null, null, data.asset) + + encodeCommand(cmd.putAsset, null, null, data.bin) + encodeCommand(cmd.putResource, null, null, data.resource) + - encodeCommand(cmd.putInfo, null, null, data.resource) + + encodeCommand(cmd.putInfo, null, null, data.info) + encodeCommand(cmd.transactionEnd)); sleep(50).then(() => { client.end(); }) }); - var tests = [ + + const tests = [ {ext: 'bin', cmd: cmd.putAsset}, {ext: 'info', cmd: cmd.putInfo}, {ext: 'resource', cmd: cmd.putResource} @@ -256,26 +257,29 @@ describe("CacheServer protocol", function() { tests.forEach(function(test) { it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function(done) { client.on('close', function() { - self.getCachePath(test.ext, function(err, cachePath) { - fs.open(cachePath, 'r', function(err, fd) { - assert(!err, err); - var buf = fs.readFileSync(fd); - assert(buf.compare(self.data.asset) == 0); + cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function(err, result) { + assert(!err, err); + assert(result.size === self.data[test.ext].length); + assert(result.stream !== null); + + result.stream.on("readable", function() { + const chunk = result.stream.read(); // should only be one in this test + assert(self.data[test.ext].compare(chunk) === 0); done(); }); - }) - + }); }); - var buf = Buffer.from( + const buf = Buffer.from( encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand(test.cmd, null, null, self.data.asset) + + encodeCommand(test.cmd, null, null, self.data[test.ext]) + encodeCommand(cmd.transactionEnd), 'ascii'); - var sentBytes = 0; + let sentBytes = 0; + function sendBytesAsync() { setTimeout(() => { - var packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); + const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); client.write(buf.slice(sentBytes, sentBytes + packetSize), function() { sentBytes += packetSize; if(sentBytes < buf.length) @@ -292,14 +296,17 @@ describe("CacheServer protocol", function() { }); it("should replace an existing file with the same guid and hash", function(done) { - var asset = Buffer.from(crypto.randomBytes(self.data.asset.length).toString('ascii'), 'ascii'); + const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); client.on('close', function() { - self.getCachePath('bin', function(err, cachePath) { - fs.open(cachePath, 'r', function(err, fd) { - assert(!err, err); - var buf = fs.readFileSync(fd); - assert(buf.compare(asset) == 0); + cache.getFileStream('a', self.data.guid, self.data.hash, function(err, result) { + assert(!err, err); + assert(result.size === asset.length); + assert(result.stream !== null); + + result.stream.on("readable", function() { + const chunk = result.stream.read(); // should only be one in this test + assert(asset.compare(chunk) === 0); done(); }); }); @@ -317,7 +324,7 @@ describe("CacheServer protocol", function() { describe("GET requests", function() { this.slow(1000); - var self = this; + const self = this; self.data = generateCommandData(); before(function(done) { @@ -325,7 +332,7 @@ describe("CacheServer protocol", function() { assert(!err); client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.asset)); + client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); client.write(encodeCommand(cmd.putInfo, null, null, self.data.info)); client.write(encodeCommand(cmd.putResource, null, null, self.data.resource)); client.write(cmd.transactionEnd); @@ -346,20 +353,20 @@ describe("CacheServer protocol", function() { }); it("should close the socket on an invalid GET type", function(done) { - expectLog(client, /invalid data receive/i, done); + expectLog(client, /Unrecognized command/i, done); client.write(encodeCommand('gx', self.data.guid, self.data.hash)); }); - var tests = [ - { cmd: cmd.getAsset, blob: self.data.asset, type: 'bin' }, - { cmd: cmd.getInfo, blob: self.data.info, type: 'info' }, - { cmd: cmd.getResource, blob: self.data.resource, type: 'resource' } + const tests = [ + {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin'}, + {cmd: cmd.getInfo, blob: self.data.info, type: 'info'}, + {cmd: cmd.getResource, blob: self.data.resource, type: 'resource'} ]; tests.forEach(function(test) { it("should retrieve stored " + test.type + " data with the (" + test.cmd + ") command", function(done) { - var dataBuf; - var pos = 0; + let dataBuf; + let pos = 0; client.pipe(new CmdResponseListener()) .on('header', function(header) { assert(header.cmd[0] === '+'); @@ -374,12 +381,13 @@ describe("CacheServer protocol", function() { done(); }); - var buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); + const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); + + let sentBytes = 0; - var sentBytes = 0; function sendBytesAsync() { setTimeout(() => { - var packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); + const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); client.write(buf.slice(sentBytes, sentBytes + packetSize), function() { sentBytes += packetSize; if(sentBytes < buf.length) @@ -399,224 +407,13 @@ describe("CacheServer protocol", function() { done(); }); - var badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); - var badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); + const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); + const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); client.write(encodeCommand(test.cmd, badGuid, badHash)); }); }); }); - describe("Integrity check", function() { - - var self = this; - - before(function() { - self.data = generateCommandData(); - }); - - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(); - }); - }); - - it("should not allow an integrity check while in a transaction", function(done) { - expectLog(client, /In a transaction/, done); - client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - client.end(cmd.integrityVerify); - }); - - it("should only verify integrity with the integrity check-verify command (icv)", function(done) { - expectLog(client, /fix/, false, done); - client.end(cmd.integrityVerify); - }); - - it("should respond with the number of errors detected with any integrity check command", function(done) { - expectLog(client, /fix \d+ issue/, done); - client.end(cmd.integrityFix); - }); - - it("should close the socket on an invalid integrity command type", function(done) { - expectLog(client, /invalid data receive/i, done); - client.write('icx'); - }); - - describe("Validations", function() { - this.slow(250); - - it("should remove unrecognized files from the cache root dir", function(done) { - var filePath = cache.cacheDir + "/file.rogue"; - fs.writeFileSync(filePath, ""); - - client.on('close', function() { - assert(!fs.existsSync(filePath)); - done(); - }); - - client.write(cmd.integrityFix); - sleep(50).then(() => { client.end(); }); - }); - - it("should remove unrecognized files from cache subdirs", function(done) { - var filePath = cache.cacheDir + "/00/file.rogue"; - fs.writeFileSync(filePath, ""); - - client.on('close', function() { - assert(!fs.existsSync(filePath)); - done(); - }); - - client.write(cmd.integrityFix); - sleep(50).then(() => { client.end(); }); - }); - - it("should remove unrecognized directories from the cache root dir", function(done) { - var dirPath = cache.cacheDir + "/dir.rogue"; - fs.mkdirSync(dirPath); - - client.on('close', function() { - assert(!fs.existsSync(dirPath)); - done(); - }); - - client.write(cmd.integrityFix); - sleep(50).then(() => { client.end(); }); - }); - - it("should remove unrecognized directories from cache subdirs", function(done) { - var dirPath = cache.cacheDir + "/00/dir.rogue"; - fs.mkdirSync(dirPath); - - client.on('close', function() { - assert(!fs.existsSync(dirPath)); - done(); - }); - - client.write(cmd.integrityFix); - sleep(50).then(() => { client.end(); }); - }); - - it("should ensure that cache files match their parent dir namespace", function(done) { - var data = generateCommandData(); - var fileName = data.guid.toString('hex') + "-" + data.hash.toString('hex') + ".bin"; - - // Put a valid cache file into the wrong sub directory - fileName = "ff" + fileName.slice(2); - var filePath = cache.cacheDir + "/00/" + fileName; - - fs.writeFileSync(filePath, ""); - - client.on('close', function() { - assert(!fs.existsSync(filePath)); - done(); - }); - - client.write(cmd.integrityFix); - sleep(50).then(() => { client.end(); }); - }); - - it("should ensure each .resource file has a corresponding .bin file", function(done) { - expectLog(client, /fix 1 issue/, done); - - var data = generateCommandData(); - client.write(encodeCommand(cmd.transactionStart, data.guid, data.hash)); - client.write(encodeCommand(cmd.putResource, null, null, data.resource)); - client.write(encodeCommand(cmd.transactionEnd)); - - sleep(50).then(() => { - client.end(cmd.integrityFix); - }); - }); - - it("should ensure each .info file has a corresponding .bin file", function(done) { - expectLog(client, /fix 1 issue/, done); - - var data = generateCommandData(); - client.write(encodeCommand(cmd.transactionStart, data.guid, data.hash)); - client.write(encodeCommand(cmd.putInfo, null, null, data.info)); - client.write(encodeCommand(cmd.transactionEnd)); - - sleep(50).then(() => { - client.end(cmd.integrityFix); - }); - }); - - it("should ensure each .bin file has a corresponding .info file", function(done) { - expectLog(client, /fix 1 issue/, done); - - var data = generateCommandData(); - client.write(encodeCommand(cmd.transactionStart, data.guid, data.hash)); - client.write(encodeCommand(cmd.putAsset, null, null, data.asset)); - client.write(encodeCommand(cmd.transactionEnd)); - - sleep(50).then(() => { - client.end(cmd.integrityFix); - }); - }); - - it("should ensure each .resource file has a corresponding .info file", function(done) { - expectLog(client, /fix 2 issue/, done); - - var data = generateCommandData(); - client.write(encodeCommand(cmd.transactionStart, data.guid, data.hash)); - client.write(encodeCommand(cmd.putAsset, null, null, data.asset)); - client.write(encodeCommand(cmd.putResource, null, null, data.resource)); - client.write(encodeCommand(cmd.transactionEnd)); - - sleep(50).then(() => { - client.end(cmd.integrityFix); - }); - }); - - var requiredResourceTests = [ - { type: "audio", classId: "1020" } - ]; - - requiredResourceTests.forEach(function(test) { - it("should ensure " + test.type + " files have a corresponding .resource file", function(done) { - expectLog(client, /fix 2 issue/, done); - - var data = generateCommandData(); - data.info = Buffer.from(" assetImporterClassID: " + test.classId, 'ascii'); - client.write(encodeCommand(cmd.transactionStart, data.guid, data.hash)); - client.write(encodeCommand(cmd.putAsset, null, null, data.asset)); - client.write(encodeCommand(cmd.putInfo, null, null, data.info)); - client.write(encodeCommand(cmd.transactionEnd)); - - sleep(50).then(() => { - client.end(cmd.integrityFix); - }); - - }); - }); - - var skipFiles = [ - "desktop.ini", - "temp", - ".ds_store" - ] - - skipFiles.forEach(function(test) { - it("should skip validation for certain system specific files (" + test + ")", function(done) { - var filePath = cache.cacheDir + "/" + test; - fs.writeFileSync(filePath, ""); - - client.on('close', function() { - fs.access(filePath, function(error) { - assert(!error); - done(); - }) - }); - - client.write(cmd.integrityFix); - sleep(50).then(() => { client.end(); }); - }) - }) - }); - }); - describe("Other", function() { it("should force close the socket when a quit (q) command is received", function(done) { client = net.connect({port: server.port}, function (err) { From f0e35636d6cdea2c4e05898f82a5c17de5c86f5e Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 8 Nov 2017 20:41:59 -0600 Subject: [PATCH 03/89] All pending tests to green (or removed). Code cleanup. --- lib/server/client_stream_processor.js | 22 ++++++++++++------- lib/server/command_processor.js | 7 +++--- test/server.js | 31 +-------------------------- 3 files changed, 18 insertions(+), 42 deletions(-) diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 86e83dc..3894781 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -14,6 +14,7 @@ class ClientStreamProcessor extends Transform { this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); this.didReadVersion = false; + this.version = ''; this.errState = null; this._registerEventListeners(); this._init(); @@ -83,12 +84,12 @@ class ClientStreamProcessor extends Transform { const self = this; let dataPos = 0; - function fillBufferWithData(size) { + function fillBufferWithData() { if(dataPos >= data.length) return false; // Only copy as much as we need for the remaining header size - size = Math.min(self.readState.headerSize - self.readState.headerBufPos); + let size = self.readState.headerSize - self.readState.headerBufPos; // Don't copy past the remaining bytes in the data block const toCopy = Math.min(size, data.length - dataPos); @@ -105,17 +106,22 @@ class ClientStreamProcessor extends Transform { } if(!this.didReadVersion) { - const verSize = Math.max(consts.VERSION_SIZE, Math.min(consts.PROTOCOL_VERSION_MIN_SIZE, data.length)); - dataPos += verSize; + let len = Math.min(consts.VERSION_SIZE - this.version.length, data.length); + this.version += data.slice(0, len).toString('ascii'); + dataPos += len; + if(this.version.length < consts.PROTOCOL_VERSION_MIN_SIZE) { + return null; + } + + this.push(this.version); this.didReadVersion = true; - this.push(data.slice(0, verSize)); } while(!isDone()) { // Read command if (!this.readState.didReadCmd) { - if(!fillBufferWithData(consts.CMD_SIZE)) { + if(!fillBufferWithData()) { // Quit? if (data[data.length - 1] === CMD_QUIT) { @@ -154,7 +160,7 @@ class ClientStreamProcessor extends Transform { // Read size if (this.readState.doReadSize && !this.readState.didReadSize) { - if(!fillBufferWithData(consts.SIZE_SIZE)) + if(!fillBufferWithData()) break; this.readState.didReadSize = true; @@ -164,7 +170,7 @@ class ClientStreamProcessor extends Transform { // Read ID if (this.readState.doReadId && !this.readState.didReadId) { - if(!fillBufferWithData(consts.ID_SIZE)) + if(!fillBufferWithData()) break; this.readState.didReadId = true; diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 4a95890..cf49854 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -12,7 +12,6 @@ const kSendFileQueue = Symbol("sendFileQueue"); const kReadStateVersion = Symbol("readStateVersion"); const kReadStateCommand = Symbol("readStateCommand"); const kReadStatePutStream = Symbol("readStatePutStream"); -const kReadStateDone = Symbol("readStateDone"); class CommandProcessor extends Transform { constructor(clientStreamProcessor, cache) { @@ -41,7 +40,7 @@ class CommandProcessor extends Transform { case kReadStatePutStream: handler = this._handleWrite; break; - case kReadStateDone: + default: return callback(null); break; } @@ -59,7 +58,7 @@ class CommandProcessor extends Transform { this[kSendFileQueue].kill(); this[kSource].unpipe(this); this[kSource].emit('quit'); - this._readState = kReadStateDone; + this._readState = null; err && helpers.log(consts.LOG_ERR, err); } @@ -147,7 +146,7 @@ class CommandProcessor extends Transform { switch(cmd) { case 'q': this._quit(); - this._readState = kReadStateDone; + this._readState = null; break; case 'ga': case 'gi': diff --git a/test/server.js b/test/server.js index 8e070bc..9a0d8f7 100644 --- a/test/server.js +++ b/test/server.js @@ -123,7 +123,7 @@ describe("CacheServer protocol", function() { client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION + 1)); }); - it.skip("should recognize a 2 byte version sent 1 byte at a time", function (done) { + it("should recognize a 2 byte version sent 1 byte at a time", function (done) { this.slow(250); client.on('data', function(data) { @@ -219,35 +219,6 @@ describe("CacheServer protocol", function() { encodeCommand("px", null, null, self.data.bin)); }); - it.skip("should try to free cache space if the cache size exceeds the max cache size after writing a file", function(done) { - let match1 = false; - let match2 = false; - - cache.maxCacheSize = 1024; - - helpers.SetLogger(function(lvl, msg) { - match1 = match1 || /Begin.*1200/.test(msg); - match2 = match2 || /Completed.*800/.test(msg); - }); - - client.on('close', function() { - assert(match1 && match2); - cache.maxCacheSize = CACHE_SIZE; - done(); - }); - - const data = generateCommandData(400, 400); - client.write( - encodeCommand(cmd.transactionStart, data.guid, data.hash) + - encodeCommand(cmd.putAsset, null, null, data.bin) + - encodeCommand(cmd.putResource, null, null, data.resource) + - encodeCommand(cmd.putInfo, null, null, data.info) + - encodeCommand(cmd.transactionEnd)); - - sleep(50).then(() => { client.end(); }) - }); - - const tests = [ {ext: 'bin', cmd: cmd.putAsset}, {ext: 'info', cmd: cmd.putInfo}, From 8e29b0cdeb669a070d4a8a3148652aeca7ccbc54 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 9 Nov 2017 21:40:08 -0600 Subject: [PATCH 04/89] Code cleanup, test refactor to easily support more cache modules --- lib/cache/cache_debug.js | 18 +- lib/client/server_response_transform.js | 4 - lib/constants.js | 5 +- lib/server.js | 4 + lib/server/client_stream_processor.js | 24 +- lib/server/command_processor.js | 3 - protocol.md | 4 - test/cache.js | 240 ++++++++++++++++++++ test/server.js | 290 ++---------------------- test/test_utils.js | 74 ++++++ 10 files changed, 361 insertions(+), 305 deletions(-) create mode 100644 test/cache.js create mode 100644 test/test_utils.js diff --git a/lib/cache/cache_debug.js b/lib/cache/cache_debug.js index 9950511..4429cd2 100644 --- a/lib/cache/cache_debug.js +++ b/lib/cache/cache_debug.js @@ -1,20 +1,24 @@ const { Cache, PutTransaction } = require('./cache'); const { Readable, Writable } = require('stream'); const crypto = require('crypto'); +const config = require('config'); + const kBuffer = Symbol("buffer"); const kOptions = Symbol("options"); class CacheDebug extends Cache { - constructor(options) { - super(options); - - this[kOptions] = options; + constructor() { + super(); this[kBuffer] = Buffer.alloc( - options.maxFileSize, - crypto.randomBytes(options.maxFileSize).toString('ascii'), + CacheDebug._options.maxFileSize, + crypto.randomBytes(CacheDebug._options.maxFileSize).toString('ascii'), 'ascii'); } + static get _options() { + return config.get("Cache.options.cache_debug"); + } + getFileStream(type, guid, hash, callback) { const size = Math.trunc(Math.random() * this[kOptions].minFileSize + this[kOptions].maxFileSize); const slice = this[kBuffer].slice(0, size); @@ -30,7 +34,7 @@ class CacheDebug extends Cache { } createPutTransaction(guid, hash, callback) { - callback(null, new PutTransactionDebug()); + callback(null, new PutTransactionDebug(guid, hash)); } endPutTransaction(transaction, callback) { diff --git a/lib/client/server_response_transform.js b/lib/client/server_response_transform.js index cd3b166..9b2a905 100644 --- a/lib/client/server_response_transform.js +++ b/lib/client/server_response_transform.js @@ -95,10 +95,6 @@ class CacheServerResponseTransform extends Transform { this.doReadSize = false; this.doReadId = true; break; - case 'i': // integrity check - this.doReadSize = true; - this.doReadId = false; - break; default: return callback(new Error("Unrecognized command response, aborting!")); } diff --git a/lib/constants.js b/lib/constants.js index f8c7838..a7df31a 100644 --- a/lib/constants.js +++ b/lib/constants.js @@ -1,5 +1,5 @@ const constants = { - VERSION: "5.4.0", + VERSION: "6.0.0", PROTOCOL_VERSION: 254, PROTOCOL_VERSION_MIN_SIZE: 2, UINT32_SIZE: 8, // hex @@ -12,9 +12,6 @@ const constants = { LOG_INFO: 3, LOG_TEST: 4, LOG_DBG: 5, - DEFAULT_CACHE_DIR: require('path').resolve(`${__dirname}/../cache5.0`), - - DEFAULT_CACHE_SIZE: 1024 * 1024 * 1024 * 50, DEFAULT_PORT: 8126, DEFAULT_WORKERS: Math.ceil(require('os').cpus().length / 2) }; diff --git a/lib/server.js b/lib/server.js index 9cd593f..9e9d6a4 100644 --- a/lib/server.js +++ b/lib/server.js @@ -68,6 +68,10 @@ class CacheServer { if(callback && typeof(callback) === 'function') { callback(); } }); }; + + Stop() { + this._server.close(); + } } module.exports = CacheServer; \ No newline at end of file diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 3894781..13ab213 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -1,4 +1,3 @@ -const assert = require('assert'); const helpers = require('./../helpers'); const consts = require('./../constants').Constants; @@ -55,13 +54,15 @@ class ClientStreamProcessor extends Transform { _transform(data, encoding, callback) { while(data !== null && data.length > 0 && this.errState === null) { - if (this.readState.dataPassThrough) + if (this.readState.dataPassThrough) { data = this._sendData(data); - else + } else { data = this._sendCommands(data); + } if(this.errState !== null) { helpers.log(consts.LOG_ERR, this.errState.msg); + this.push('q'); // quit } } @@ -85,9 +86,6 @@ class ClientStreamProcessor extends Transform { let dataPos = 0; function fillBufferWithData() { - if(dataPos >= data.length) - return false; - // Only copy as much as we need for the remaining header size let size = self.readState.headerSize - self.readState.headerBufPos; @@ -125,7 +123,6 @@ class ClientStreamProcessor extends Transform { // Quit? if (data[data.length - 1] === CMD_QUIT) { - this.push('q'); this.errState = ClientStreamProcessor.errorCodes.quitError; } @@ -160,8 +157,9 @@ class ClientStreamProcessor extends Transform { // Read size if (this.readState.doReadSize && !this.readState.didReadSize) { - if(!fillBufferWithData()) + if(!fillBufferWithData()) { break; + } this.readState.didReadSize = true; this.readState.dataSize = helpers.readUInt64(this.headerBuf.slice(consts.CMD_SIZE, consts.CMD_SIZE + consts.SIZE_SIZE).toString('ascii')); @@ -170,18 +168,20 @@ class ClientStreamProcessor extends Transform { // Read ID if (this.readState.doReadId && !this.readState.didReadId) { - if(!fillBufferWithData()) + if(!fillBufferWithData()) { break; + } this.readState.didReadId = true; } this.push(Buffer.from(this.headerBuf.slice(0, this.readState.headerBufPos))); - if(!this.readState.dataPassThrough) - this._init(); - else + if(this.readState.dataPassThrough) { break; + } + + this._init(); } return dataPos < data.length ? data.slice(dataPos) : null; diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index cf49854..f29fb95 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -1,7 +1,5 @@ -const assert = require('assert'); const helpers = require('./../helpers'); const consts = require('./../constants').Constants; -const crypto = require('crypto'); const async = require('async'); const { Transform } = require('stream'); @@ -42,7 +40,6 @@ class CommandProcessor extends Transform { break; default: return callback(null); - break; } handler.call(this, chunk, function(err) { diff --git a/protocol.md b/protocol.md index c913d53..7f31ddb 100644 --- a/protocol.md +++ b/protocol.md @@ -28,9 +28,5 @@ client --- 'pr' (size ) + size bytes --> server ## end transaction (ie rename targets to their final names) client --- 'te' --> server -## cache server integrity -client --- 'ic' () --> server -client <-- 'ic' (errors ) --- server - ## quit client --- 'q' --> server \ No newline at end of file diff --git a/test/cache.js b/test/cache.js new file mode 100644 index 0000000..617a129 --- /dev/null +++ b/test/cache.js @@ -0,0 +1,240 @@ +const assert = require('assert'); +const net = require('net'); +const crypto = require('crypto'); +const helpers = require('../lib/helpers'); +const consts = require('../lib/constants').Constants; +const CacheServer = require('../lib/server'); +const CmdResponseListener = require('./../lib/client/server_response_transform.js'); + +const generateCommandData = require('./test_utils').generateCommandData; +const encodeCommand = require('./test_utils').encodeCommand; +const sleep = require('./test_utils').sleep; +const expectLog = require('./test_utils').expectLog; +const cmd = require('./test_utils').cmd; + +helpers.SetLogger(()=>{}); +let cache, server, client; + +let test_modules = [ + { name: "CacheMembuf", path: "../lib/cache/cache_membuf" } +]; + +test_modules.forEach(function(module) { + describe(module.name, function() { + + beforeEach(function() { + helpers.SetLogger(function(lvl, msg) {}); + }); + + before(function (done) { + const Cache = require(module.path); + cache = new Cache(); + server = new CacheServer(cache, 0); + + server.Start(function (err) { + assert(!err, "Cache Server reported error! " + err); + }, done); + }); + + after(function() { + server.Stop(); + }); + + describe("PUT requests", function () { + this.slow(1500); + + const self = this; + + before(function () { + self.data = generateCommandData(); + }); + + beforeEach(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err); + + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + done(); + }); + }); + + it("should close the socket on an invalid PUT type", function (done) { + expectLog(client, /Unrecognized command/i, done); + client.write( + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + + encodeCommand("px", null, null, self.data.bin)); + }); + + const tests = [ + {ext: 'bin', cmd: cmd.putAsset}, + {ext: 'info', cmd: cmd.putInfo}, + {ext: 'resource', cmd: cmd.putResource} + ]; + + tests.forEach(function (test) { + it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function (done) { + client.on('close', function () { + cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function (err, result) { + assert(!err, err); + assert(result.size === self.data[test.ext].length); + assert(result.stream !== null); + + result.stream.on("readable", function () { + const chunk = result.stream.read(); // should only be one in this test + assert(self.data[test.ext].compare(chunk) === 0); + done(); + }); + }); + }); + + const buf = Buffer.from( + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + + encodeCommand(test.cmd, null, null, self.data[test.ext]) + + encodeCommand(cmd.transactionEnd), 'ascii'); + + let sentBytes = 0; + + function sendBytesAsync() { + setTimeout(() => { + const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); + client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { + sentBytes += packetSize; + if (sentBytes < buf.length) + return sendBytesAsync(); + else + sleep(50).then(() => { + client.end(); + }); + }); + }, 1); + } + + sendBytesAsync(); + + }); + }); + + it("should replace an existing file with the same guid and hash", function (done) { + const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); + + client.on('close', function () { + cache.getFileStream('a', self.data.guid, self.data.hash, function (err, result) { + assert(!err, err); + assert(result.size === asset.length); + assert(result.stream !== null); + + result.stream.on("readable", function () { + const chunk = result.stream.read(); // should only be one in this test + assert(asset.compare(chunk) === 0); + done(); + }); + }); + }); + + client.write( + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + + encodeCommand(cmd.putAsset, null, null, asset) + + encodeCommand(cmd.transactionEnd)); + + sleep(50).then(() => { + client.end(); + }); + }); + }); + + describe("GET requests", function () { + this.slow(1000); + + const self = this; + self.data = generateCommandData(); + + before(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err); + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); + client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); + client.write(encodeCommand(cmd.putInfo, null, null, self.data.info)); + client.write(encodeCommand(cmd.putResource, null, null, self.data.resource)); + client.write(cmd.transactionEnd); + + return sleep(25).then(done); + }); + }); + + beforeEach(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err); + + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + done(); + }); + }); + + it("should close the socket on an invalid GET type", function (done) { + expectLog(client, /Unrecognized command/i, done); + client.write(encodeCommand('gx', self.data.guid, self.data.hash)); + }); + + const tests = [ + {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin'}, + {cmd: cmd.getInfo, blob: self.data.info, type: 'info'}, + {cmd: cmd.getResource, blob: self.data.resource, type: 'resource'} + ]; + + tests.forEach(function (test) { + it("should retrieve stored " + test.type + " data with the (" + test.cmd + ") command", function (done) { + let dataBuf; + let pos = 0; + client.pipe(new CmdResponseListener()) + .on('header', function (header) { + assert(header.cmd[0] === '+'); + assert(header.size === test.blob.length, "Expected size " + test.blob.length); + dataBuf = Buffer.allocUnsafe(header.size); + }) + .on('data', function (data) { + pos += data.copy(dataBuf, pos, 0); + }) + .on('dataEnd', function () { + assert(dataBuf.compare(test.blob) === 0); + done(); + }); + + const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); + + let sentBytes = 0; + + function sendBytesAsync() { + setTimeout(() => { + const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); + client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { + sentBytes += packetSize; + if (sentBytes < buf.length) + return sendBytesAsync(); + }); + }, 1); + } + + sendBytesAsync(); + + }); + + it("should respond with not found (-) for missing " + test.type + " data with the (" + test.cmd + ") command", function (done) { + client.pipe(new CmdResponseListener()) + .on('header', function (header) { + assert(header.cmd[0] === '-'); + done(); + }); + + const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); + const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); + client.write(encodeCommand(test.cmd, badGuid, badHash)); + }); + }); + }); + }); +}); \ No newline at end of file diff --git a/test/server.js b/test/server.js index 9a0d8f7..f0936cb 100644 --- a/test/server.js +++ b/test/server.js @@ -1,91 +1,22 @@ const assert = require('assert'); const net = require('net'); -const crypto = require('crypto'); -const fs = require('fs'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants').Constants; const CacheServer = require('../lib/server'); -const Cache = require("../lib/cache/cache_membuf"); +const Cache = require("../lib/cache/cache_debug"); -const CmdResponseListener = require('./../lib/client/server_response_transform.js'); - -const CACHE_SIZE = 1024 * 1024; -const MIN_BLOB_SIZE = 64; -const MAX_BLOB_SIZE = 2048; +const generateCommandData = require('./test_utils').generateCommandData; +const encodeCommand = require('./test_utils').encodeCommand; +const sleep = require('./test_utils').sleep; +const expectLog = require('./test_utils').expectLog; +const cmd = require('./test_utils').cmd; helpers.SetLogger(()=>{}); const cache = new Cache(); const server = new CacheServer(cache, 0); let client; -const cmd = { - quit: "q", - getAsset: "ga", - getInfo: "gi", - getResource: "gr", - putAsset: "pa", - putInfo: "pi", - putResource: "pr", - transactionStart: "ts", - transactionEnd: "te", - integrityVerify: "icv", - integrityFix: "icf" -}; - -function generateCommandData(minSize, maxSize) { - minSize = minSize || MIN_BLOB_SIZE; - maxSize = maxSize || MAX_BLOB_SIZE; - - function getSize() { return Math.max(minSize, Math.floor(Math.random() * maxSize)); } - - return { - guid: Buffer.from(crypto.randomBytes(consts.GUID_SIZE).toString('ascii'), 'ascii'), - hash: Buffer.from(crypto.randomBytes(consts.HASH_SIZE).toString('ascii'), 'ascii'), - bin: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), - info: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), - resource: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii') - } -} - -function encodeCommand(command, guid, hash, blob) { - - if(blob) - command += helpers.encodeInt64(blob.length); - - if(guid) - command += guid; - - if(hash) - command += hash; - - if(blob) - command += blob; - - return command; -} - -function expectLog(client, regex, condition, callback) { - if(typeof(callback) !== 'function' && typeof(condition) === 'function') { - callback = condition; - condition = true; - } - - let match; - helpers.SetLogger(function (lvl, msg) { - match = match || regex.test(msg); - }); - - client.on('close', function() { - assert(match === condition); - callback(); - }); -} - -function sleep(ms) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -describe("CacheServer protocol", function() { +describe("Server common", function() { beforeEach(function() { helpers.SetLogger(function(lvl, msg) {}); @@ -97,6 +28,10 @@ describe("CacheServer protocol", function() { }, done); }); + after(function() { + server.Stop(); + }); + describe("Version check", function () { beforeEach(function (done) { @@ -185,208 +120,21 @@ describe("CacheServer protocol", function() { }); }); - describe("PUT requests", function () { - this.slow(1500); - - const self = this; - this.getCachePath = function(extension, callback) { - return cache.GetCachePath( - helpers.readHex(self.data.guid.length, self.data.guid), - helpers.readHex(self.data.hash.length, self.data.hash), - extension, false, callback); - }; - - before(function() { - self.data = generateCommandData(); - }); - - beforeEach(function (done) { - client = net.connect({port: server.port}, function(err) { + describe("Other", function() { + it("should force close the socket when a quit (q) command is received", function(done) { + client = net.connect({port: server.port}, function (err) { assert(!err); - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(); - }); - - }); - - it("should close the socket on an invalid PUT type", function(done) { - expectLog(client, /Unrecognized command/i, done); - client.write( - encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand("px", null, null, self.data.bin)); - }); - - const tests = [ - {ext: 'bin', cmd: cmd.putAsset}, - {ext: 'info', cmd: cmd.putInfo}, - {ext: 'resource', cmd: cmd.putResource} - ]; - - tests.forEach(function(test) { - it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function(done) { client.on('close', function() { - cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function(err, result) { - assert(!err, err); - assert(result.size === self.data[test.ext].length); - assert(result.stream !== null); - - result.stream.on("readable", function() { - const chunk = result.stream.read(); // should only be one in this test - assert(self.data[test.ext].compare(chunk) === 0); - done(); - }); - }); - }); - - const buf = Buffer.from( - encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand(test.cmd, null, null, self.data[test.ext]) + - encodeCommand(cmd.transactionEnd), 'ascii'); - - let sentBytes = 0; - - function sendBytesAsync() { - setTimeout(() => { - const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); - client.write(buf.slice(sentBytes, sentBytes + packetSize), function() { - sentBytes += packetSize; - if(sentBytes < buf.length) - return sendBytesAsync(); - else - sleep(50).then(() => { client.end(); }); - }); - }, 1); - } - - sendBytesAsync(); - - }); - }); - - it("should replace an existing file with the same guid and hash", function(done) { - const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); - - client.on('close', function() { - cache.getFileStream('a', self.data.guid, self.data.hash, function(err, result) { - assert(!err, err); - assert(result.size === asset.length); - assert(result.stream !== null); - - result.stream.on("readable", function() { - const chunk = result.stream.read(); // should only be one in this test - assert(asset.compare(chunk) === 0); - done(); - }); + done(); }); - }); - client.write( - encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand(cmd.putAsset, null, null, asset) + - encodeCommand(cmd.transactionEnd)); - - sleep(50).then(() => { client.end(); }); - }); - }); - - describe("GET requests", function() { - this.slow(1000); - - const self = this; - self.data = generateCommandData(); - - before(function(done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); - client.write(encodeCommand(cmd.putInfo, null, null, self.data.info)); - client.write(encodeCommand(cmd.putResource, null, null, self.data.resource)); - client.write(cmd.transactionEnd); - - return sleep(25).then(done); - }); - }); - - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); - - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(); - }); - }); - - it("should close the socket on an invalid GET type", function(done) { - expectLog(client, /Unrecognized command/i, done); - client.write(encodeCommand('gx', self.data.guid, self.data.hash)); - }); - - const tests = [ - {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin'}, - {cmd: cmd.getInfo, blob: self.data.info, type: 'info'}, - {cmd: cmd.getResource, blob: self.data.resource, type: 'resource'} - ]; - - tests.forEach(function(test) { - it("should retrieve stored " + test.type + " data with the (" + test.cmd + ") command", function(done) { - let dataBuf; - let pos = 0; - client.pipe(new CmdResponseListener()) - .on('header', function(header) { - assert(header.cmd[0] === '+'); - assert(header.size === test.blob.length, "Expected size " + test.blob.length); - dataBuf = Buffer.allocUnsafe(header.size); - }) - .on('data', function(data) { - pos += data.copy(dataBuf, pos, 0); - }) - .on('dataEnd', function() { - assert(dataBuf.compare(test.blob) === 0); - done(); - }); - - const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); - - let sentBytes = 0; - - function sendBytesAsync() { - setTimeout(() => { - const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); - client.write(buf.slice(sentBytes, sentBytes + packetSize), function() { - sentBytes += packetSize; - if(sentBytes < buf.length) - return sendBytesAsync(); - }); - }, 1); - } - - sendBytesAsync(); - - }); - - it("should respond with not found (-) for missing " + test.type + " data with the (" + test.cmd + ") command", function(done) { - client.pipe(new CmdResponseListener()) - .on('header', function(header) { - assert(header.cmd[0] === '-'); - done(); - }); - - const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); - const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); - client.write(encodeCommand(test.cmd, badGuid, badHash)); + client.write(cmd.quit); }); }); - }); - describe("Other", function() { - it("should force close the socket when a quit (q) command is received", function(done) { + it("should force close the socket when an unrecognized command is received", function(done) { client = net.connect({port: server.port}, function (err) { assert(!err); @@ -395,8 +143,8 @@ describe("CacheServer protocol", function() { }); client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - client.write(cmd.quit); + client.write('xx'); }); - }); + }) }) }); \ No newline at end of file diff --git a/test/test_utils.js b/test/test_utils.js new file mode 100644 index 0000000..b55369b --- /dev/null +++ b/test/test_utils.js @@ -0,0 +1,74 @@ +const assert = require('assert'); +const crypto = require('crypto'); +const consts = require('../lib/constants').Constants; +const helpers = require('../lib/helpers'); + +const MIN_BLOB_SIZE = 64; +const MAX_BLOB_SIZE = 2048; + +exports.generateCommandData = function(minSize, maxSize) { + minSize = minSize || MIN_BLOB_SIZE; + maxSize = maxSize || MAX_BLOB_SIZE; + + function getSize() { return Math.max(minSize, Math.floor(Math.random() * maxSize)); } + + return { + guid: Buffer.from(crypto.randomBytes(consts.GUID_SIZE).toString('ascii'), 'ascii'), + hash: Buffer.from(crypto.randomBytes(consts.HASH_SIZE).toString('ascii'), 'ascii'), + bin: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), + info: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), + resource: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii') + } +}; + +exports.encodeCommand = function(command, guid, hash, blob) { + + if(blob) + command += helpers.encodeInt64(blob.length); + + if(guid) + command += guid; + + if(hash) + command += hash; + + if(blob) + command += blob; + + return command; +}; + +exports.expectLog = function(client, regex, condition, callback) { + if(typeof(callback) !== 'function' && typeof(condition) === 'function') { + callback = condition; + condition = true; + } + + let match; + helpers.SetLogger(function (lvl, msg) { + match = match || regex.test(msg); + }); + + client.on('close', function() { + assert(match === condition); + callback(); + }); +}; + +exports.sleep = function(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +}; + +exports.cmd = { + quit: "q", + getAsset: "ga", + getInfo: "gi", + getResource: "gr", + putAsset: "pa", + putInfo: "pi", + putResource: "pr", + transactionStart: "ts", + transactionEnd: "te", + integrityVerify: "icv", + integrityFix: "icf" +}; \ No newline at end of file From 726ef842adf35ffb030c7cd245b203f5d2fac49e Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 11 Nov 2017 06:27:58 -0600 Subject: [PATCH 05/89] More test organization; removed the debug cache module which is no longer useful --- lib/cache/cache_debug.js | 63 --------------------------- lib/server.js | 5 --- lib/server/client_stream_processor.js | 2 +- test/cache.js | 49 ++++++++++++++++++++- test/server.js | 52 +--------------------- 5 files changed, 50 insertions(+), 121 deletions(-) delete mode 100644 lib/cache/cache_debug.js diff --git a/lib/cache/cache_debug.js b/lib/cache/cache_debug.js deleted file mode 100644 index 4429cd2..0000000 --- a/lib/cache/cache_debug.js +++ /dev/null @@ -1,63 +0,0 @@ -const { Cache, PutTransaction } = require('./cache'); -const { Readable, Writable } = require('stream'); -const crypto = require('crypto'); -const config = require('config'); - -const kBuffer = Symbol("buffer"); -const kOptions = Symbol("options"); - -class CacheDebug extends Cache { - constructor() { - super(); - this[kBuffer] = Buffer.alloc( - CacheDebug._options.maxFileSize, - crypto.randomBytes(CacheDebug._options.maxFileSize).toString('ascii'), - 'ascii'); - } - - static get _options() { - return config.get("Cache.options.cache_debug"); - } - - getFileStream(type, guid, hash, callback) { - const size = Math.trunc(Math.random() * this[kOptions].minFileSize + this[kOptions].maxFileSize); - const slice = this[kBuffer].slice(0, size); - - const stream = new Readable({ - read() { - this.push(slice); - this.push(null); - } - }); - - callback(null, {size: slice.length, stream: stream}); - } - - createPutTransaction(guid, hash, callback) { - callback(null, new PutTransactionDebug(guid, hash)); - } - - endPutTransaction(transaction, callback) { - callback(); - } - - registerClusterWorker(worker) {} -} - -class PutTransactionDebug extends PutTransaction { - constructor(guid, hash) { - super(guid, hash); - } - - getWriteStream(type, size, callback) { - const stream = new Writable({ - write(chunk, encoding, callback) { - callback(); - } - }); - - callback(null, stream); - } -} - -module.exports = CacheDebug; diff --git a/lib/server.js b/lib/server.js index 9e9d6a4..64d6a8a 100644 --- a/lib/server.js +++ b/lib/server.js @@ -1,10 +1,5 @@ -/** - * Created by spalmer on 10/16/17. - */ 'use strict'; -const cluster = require('cluster'); const net = require('net'); -const fs = require('fs'); const consts = require('./constants').Constants; const helpers = require('./helpers'); const ClientStreamProcessor = require('./server/client_stream_processor'); diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 13ab213..10c3adb 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -28,7 +28,7 @@ class ClientStreamProcessor extends Transform { this.on('quit', function() { self[kSource].destroy(); - }) + }); } _init() { diff --git a/test/cache.js b/test/cache.js index 617a129..266d3e9 100644 --- a/test/cache.js +++ b/test/cache.js @@ -16,7 +16,7 @@ helpers.SetLogger(()=>{}); let cache, server, client; let test_modules = [ - { name: "CacheMembuf", path: "../lib/cache/cache_membuf" } + { name: "Cache: Membuf", path: "../lib/cache/cache_membuf" } ]; test_modules.forEach(function(module) { @@ -40,6 +40,53 @@ test_modules.forEach(function(module) { server.Stop(); }); + describe("Transactions", function () { + + const self = this; + + beforeEach(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err, err); + self.data = generateCommandData(); + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + done(err); + }); + }); + + it("should start a transaction with the (ts) command", function (done) { + expectLog(client, /Start transaction/, done); + client.end(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); + }); + + it("should cancel a pending transaction if a new (ts) command is received", function (done) { + expectLog(client, /Cancel previous transaction/, done); + const d = encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash); + client.write(d); // first one ... + client.end(d); // ... canceled by this one + }); + + it("should require a start transaction (ts) cmd before an end transaction (te) cmd", function (done) { + expectLog(client, /Invalid transaction isolation/, done); + client.end(cmd.transactionEnd); + }); + + it("should end a transaction that was started with the (te) command", function (done) { + expectLog(client, /End transaction for/, done); + client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); + client.end(cmd.transactionEnd); + }); + + it("should require a transaction start (te) command before a put command", function(done) { + expectLog(client, /Not in a transaction/, done); + client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); + }); + + it("should close the socket on an invalid transaction command", function(done) { + expectLog(client, /Unrecognized command/i, done); + client.write('tx', self.data.guid, self.data.hash); + }); + }); + describe("PUT requests", function () { this.slow(1500); diff --git a/test/server.js b/test/server.js index f0936cb..02cafa4 100644 --- a/test/server.js +++ b/test/server.js @@ -3,12 +3,9 @@ const net = require('net'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants').Constants; const CacheServer = require('../lib/server'); -const Cache = require("../lib/cache/cache_debug"); +const Cache = require("../lib/cache/cache").Cache; -const generateCommandData = require('./test_utils').generateCommandData; -const encodeCommand = require('./test_utils').encodeCommand; const sleep = require('./test_utils').sleep; -const expectLog = require('./test_utils').expectLog; const cmd = require('./test_utils').cmd; helpers.SetLogger(()=>{}); @@ -73,53 +70,6 @@ describe("Server common", function() { }); }); - describe("Transactions", function () { - - const self = this; - - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err, err); - self.data = generateCommandData(); - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(err); - }); - }); - - it("should start a transaction with the (ts) command", function (done) { - expectLog(client, /Start transaction/, done); - client.end(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - }); - - it("should cancel a pending transaction if a new (ts) command is received", function (done) { - expectLog(client, /Cancel previous transaction/, done); - const d = encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash); - client.write(d); // first one ... - client.end(d); // ... canceled by this one - }); - - it("should require a start transaction (ts) cmd before an end transaction (te) cmd", function (done) { - expectLog(client, /Invalid transaction isolation/, done); - client.end(cmd.transactionEnd); - }); - - it("should end a transaction that was started with the (te) command", function (done) { - expectLog(client, /End transaction for/, done); - client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - client.end(cmd.transactionEnd); - }); - - it("should require a transaction start (te) command before a put command", function(done) { - expectLog(client, /Not in a transaction/, done); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); - }); - - it("should close the socket on an invalid transaction command", function(done) { - expectLog(client, /Unrecognized command/i, done); - client.write('tx', self.data.guid, self.data.hash); - }); - }); - describe("Other", function() { it("should force close the socket when a quit (q) command is received", function(done) { client = net.connect({port: server.port}, function (err) { From 630894584f28270dbe46cb9a0b24f6028bc19101 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 5 Dec 2017 13:17:22 -0600 Subject: [PATCH 06/89] Lots of WIP .. bugfixes, added membuf page serialization/deserialization, added a few console commands (needs more love) --- config/default.yml | 10 +- lib/cache/cache_membuf.js | 254 +++++++++++++++++++----- lib/client/server_response_transform.js | 177 +++++++++-------- lib/helpers.js | 2 - lib/server.js | 2 +- lib/server/client_stream_processor.js | 57 +++--- lib/server/command_processor.js | 115 ++++++----- main.js | 124 ++++++++---- package-lock.json | 167 ++++++++++++++-- package.json | 5 +- test/cache.js | 85 +++++--- test/test_utils.js | 2 +- 12 files changed, 686 insertions(+), 314 deletions(-) diff --git a/config/default.yml b/config/default.yml index 4f22db5..a201e15 100644 --- a/config/default.yml +++ b/config/default.yml @@ -2,10 +2,8 @@ Cache: module: "cache_membuf" path: "lib/cache" options: - cache_debug: - minFileSize: 100000 - maxFileSize: 1000000 cache_membuf: - initialPageSize: 10000000 - growPageSize: 10000000 - minFreeBlockSize: 1024 \ No newline at end of file + initialPageSize: 100000000 + growPageSize: 100000000 + minFreeBlockSize: 1024 + serializePath: ".cache_membuf" \ No newline at end of file diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index ba6a623..10f6cdf 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -1,31 +1,27 @@ -const cluster = require('cluster'); -const { Cache, PutTransaction } = require('./cache'); +const { PutTransaction } = require('./cache'); const { Readable, Writable } = require('stream'); const crypto = require('crypto'); const helpers = require('../helpers'); const consts = require('../constants').Constants; const config = require('config'); +const path = require('path'); +const fs = require('fs'); +const rimraf = require('rimraf'); +const async = require('async'); +const defaults = require('lodash/fp/defaults'); -class CacheMembuf extends Cache { - constructor() { - super(); +class CacheMembuf { - if(!cluster.isMaster) - throw new Error("CacheMembuf module does not support clustering!"); - - CacheMembuf._init(); + static get _options() { + let opts = config.get("Cache.options.cache_membuf"); + return defaults(opts, CacheMembuf._optionOverrides); } - static _init() { - if(CacheMembuf._pages.length === 0) { - CacheMembuf._freeBlocks = []; - CacheMembuf._index = {}; - CacheMembuf._allocPage(CacheMembuf._options.initialPageSize); - } - } + static get _serializePath() { + if(!CacheMembuf._options.hasOwnProperty('serializePath')) + return null; - static get _options() { - return config.get("Cache.options.cache_membuf"); + return path.join(path.dirname(require.main.filename), CacheMembuf._options.serializePath) } static _allocPage(size) { @@ -68,11 +64,14 @@ class CacheMembuf extends Cache { } static _freeBlock(key) { - if(!CacheMembuf.hasOwnProperty(key)) + if(!CacheMembuf._index.hasOwnProperty(key)) return; + let block = Object.assign({}, CacheMembuf._index[key]); + delete block.key; + // Duplicate the index data into the free block list - CacheMembuf._freeBlocks.push(Object.assign({}, CacheMembuf._index[key])); + CacheMembuf._freeBlocks.push(block); // Remove the block from the index delete CacheMembuf._index[key]; @@ -88,34 +87,34 @@ class CacheMembuf extends Cache { CacheMembuf._freeBlock(key); // Find the best free block to use - const i = CacheMembuf._findFreeBlockIndex(size); - if(i >= 0) { - const block = CacheMembuf._freeBlocks[i]; - CacheMembuf._index[key] = Object.assign({}, block); - CacheMembuf._index[key].size = size; - - // Update this free block if leftover space is greater than the minimum - if(block.size - size >= CacheMembuf._options.minFreeBlockSize) { - block.pageOffset += size; - block.size -= size; - - // Re-sort the free block list - CacheMembuf._freeBlocks.sort(function(a, b) { - return a.size - b.size; - }); - } - else { - // Otherwise remove it - CacheMembuf._freeBlocks.splice(i, 0); + let i; + while((i = CacheMembuf._findFreeBlockIndex(size)) < 0) { + let growPageSize = CacheMembuf._options.growPageSize; + let allocSize = Math.max(size, growPageSize); + if(allocSize > growPageSize) { + helpers.log(consts.LOG_WARN, "File allocation size of " + size + " exceeds growPageSize of " + growPageSize); } + + CacheMembuf._allocPage(allocSize); + } + + const block = CacheMembuf._freeBlocks[i]; + CacheMembuf._index[key] = Object.assign({}, block); + CacheMembuf._index[key].size = size; + + // Update this free block if leftover space is greater than the minimum + if(block.size - size >= CacheMembuf._options.minFreeBlockSize) { + block.pageOffset += size; + block.size -= size; + + // Re-sort the free block list + CacheMembuf._freeBlocks.sort(function(a, b) { + return a.size - b.size; + }); } else { - // Otherwise add a new page - CacheMembuf._index[key] = { - pageIndex: CacheMembuf._allocPage(CacheMembuf._options.growPageSize), - pageOffset: 0, - size: size - } + // Otherwise remove it + CacheMembuf._freeBlocks.splice(i, 1); } return CacheMembuf._index[key]; @@ -130,16 +129,164 @@ class CacheMembuf extends Cache { buffer.copy(CacheMembuf._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length); } - getFileStream(type, guid, hash, callback) { + static _serialize(callback) { + + let p = CacheMembuf._serializePath; + if(p === null) + return callback(new Error("Invalid serializedPath")); + + let writeOps = []; + let i = 0; + + CacheMembuf._pages.forEach(function(page) { + writeOps.push({ + path: path.join(p, `page.${i++}`), + data: page + }); + }); + + writeOps.push({ + path: path.join(p, 'index.json'), + data: JSON.stringify(CacheMembuf._index) + }); + + writeOps.push({ + path: path.join(p, 'freeBlocks.json'), + data: JSON.stringify(CacheMembuf._freeBlocks) + }); + + function doWriteOp(op, cb) { + helpers.log(consts.LOG_INFO, `Writing ${op.path}`); + fs.writeFile(op.path, op.data, cb); + } + + async.series([ + async.apply(rimraf, p, {}), + async.apply(fs.mkdir, p, 0o755), + async.apply(async.eachSeries, writeOps, doWriteOp) + ], callback); + } + + static _deserialize(callback) { + const p = CacheMembuf._serializePath; + if(p === null || !fs.existsSync(p)) + return callback(new Error("Invalid serializedPath")); + + const files = fs.readdirSync(p); + + function loadIndexFile(cb) { + let indexFile = files.find(file => file.endsWith('index.json')); + if(!indexFile) { + return callback(new Error("Cannot find index.json")); + } + + indexFile = path.join(p, indexFile); + helpers.log(consts.LOG_DBG, `Loading index file at ${indexFile}`); + + fs.readFile(indexFile, 'utf8', function(err, result) { + if(err) return callback(err); + CacheMembuf._index = JSON.parse(result); + cb(); + }); + } + + function loadFreeBlocksFile(cb) { + let freeBlocksFile = files.find(file => file.endsWith('freeBlocks.json')); + if(!freeBlocksFile) { + return cb(new Error("Cannot find freeBlocks.json")); + } + + freeBlocksFile = path.join(p, freeBlocksFile); + helpers.log(consts.LOG_DBG, `Loading freeBlocksFile file at ${freeBlocksFile}`); + + fs.readFile(freeBlocksFile, 'utf8', function(err, result) { + if(err) return cb(err); + CacheMembuf._freeBlocks = JSON.parse(result); + cb(); + }); + } + + let pageFiles = files.filter(file => /page\.\d+$/.test(file)).sort((a, b) => { + return a.localeCompare(b, undefined, {numeric: true, sensitivity: 'base'}); + }); + + CacheMembuf._pages = new Array(pageFiles.length); + + function loadPageFile(file, index, cb) { + file = path.join(p, file); + helpers.log(consts.LOG_DBG, `Loading page file at ${file}`); + + fs.readFile(file, function(err, result) { + if(err) return cb(err); + CacheMembuf._pages[index] = result; + cb(); + }) + } + + async.series([ + async.apply(loadIndexFile), + async.apply(loadFreeBlocksFile), + async.apply(async.eachOf, pageFiles, loadPageFile) + ], callback); + } + + static _clearCache() { + CacheMembuf._pages = []; + CacheMembuf._freeBlocks = []; + CacheMembuf._index = {}; + CacheMembuf._allocPage(CacheMembuf._options.initialPageSize); + } + + static init(options, callback) { + if(typeof(options) === 'object') + CacheMembuf._optionOverrides = options; + + if(CacheMembuf._pages.length === 0) { + CacheMembuf._deserialize(function(err) { + if(err) { + helpers.log(consts.LOG_ERR, err); + CacheMembuf._clearCache(); + } + + callback(); + }); + } + } + + static reset(callback) { + let p = CacheMembuf._serializePath; + if(p !== null) { + rimraf(p, {}, function() { + CacheMembuf._clearCache(); + callback(); + }); + } + else { + CacheMembuf._clearCache(); + callback(); + } + } + + static save(callback) { + CacheMembuf._serialize(callback); + } + + static shutdown(callback) { + CacheMembuf._serialize(callback); + } + + static getFileStream(type, guid, hash, callback) { const key = CacheMembuf._calcIndexKey(type, guid, hash); if(CacheMembuf._index.hasOwnProperty(key)) { const entry = CacheMembuf._index[key]; - const slice = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); + const file = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); const stream = new Readable({ read() { - this.push(slice); + this.push(file); this.push(null); - } + }, + + highWaterMark: file.length }); callback(null, {size: entry.size, stream: stream}); @@ -149,20 +296,20 @@ class CacheMembuf extends Cache { } } - createPutTransaction(guid, hash, callback) { + static createPutTransaction(guid, hash, callback) { callback(null, new PutTransactionMembuf(guid, hash)); } - endPutTransaction(transaction, callback) { + static endPutTransaction(transaction, callback) { const files = transaction.getFiles(); files.forEach(function(file) { - CacheMembuf._addFileToCache.call(this, file.type, transaction.guid, transaction.hash, file.buffer); + CacheMembuf._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); }); callback(); } - registerClusterWorker(worker) { + static registerClusterWorker(worker) { // Not implemented } } @@ -215,5 +362,6 @@ class PutTransactionMembuf extends PutTransaction { CacheMembuf._index = {}; CacheMembuf._pages = []; CacheMembuf._freeBlocks = []; +CacheMembuf._optionOverrides = {}; module.exports = CacheMembuf; \ No newline at end of file diff --git a/lib/client/server_response_transform.js b/lib/client/server_response_transform.js index 9b2a905..a0bfa02 100644 --- a/lib/client/server_response_transform.js +++ b/lib/client/server_response_transform.js @@ -2,129 +2,144 @@ const helpers = require('./../helpers'); const consts = require('./../constants').Constants; const { Transform } = require('stream'); -const MAX_HEADER_SIZE = consts.ID_SIZE; +const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.ID_SIZE; class CacheServerResponseTransform extends Transform { constructor() { super(); this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); + this.didReadVersion = false; + this.version = 0; + this.errState = null; this._init(); } _init() { - this.didReadHeader = false; - this.headerData = {}; - this.headerBufPos = 0; - this.blobBytesRead = 0; - this.doReadSize = false; - this.doReadId = false; + this.readState = { + headerData: {}, + dataPassThrough : false, + didReadCmd: false, + doReadSize : false, + doReadId: false, + headerBufPos: 0, + headerSize: consts.CMD_SIZE, + dataBytesRead: 0 + }; + + this.readState.headerData.version = this.version; } _transform(data, encoding, callback) { - if(this.didReadHeader) { - this._sendData(data, callback); - } - else { - this._emitHeader(data, callback); + while(data !== null && data.length > 0) { + if (this.readState.dataPassThrough) { + data = this._sendData(data); + } + else { + data = this._emitHeader(data); + } + + if(this.errState !== null) { + helpers.log(consts.LOG_ERR, this.errState.msg); + } } + + callback(); } - _sendData(data, callback) { - const len = Math.min(this.headerData.size - this.blobBytesRead, data.length); - this.blobBytesRead += len; + _sendData(data) { + const len = Math.min(this.readState.headerData.size - this.readState.dataBytesRead, data.length); + this.push(data.slice(0, len)); + this.readState.dataBytesRead += len; - if(len >= data.length) { - this.push(data); - callback(); - } - else { - this.push(data.slice(0, len)); - this._emitHeader(data.slice(len), callback); - } - - if(this.blobBytesRead === this.headerData.size) { + if(this.readState.dataBytesRead === this.readState.headerData.size) { this._init(); this.emit('dataEnd'); } + + return len < data.length ? data.slice(len) : null; } - _emitHeader(data, callback) { + _emitHeader(data) { const self = this; let dataPos = 0; - function fillBufferWithData(fillToPos) { - const maxLen = fillToPos - self.headerBufPos; - const toCopy = Math.min(data.length, maxLen); - data.copy(self.headerBuf, self.headerBufPos, dataPos, dataPos + toCopy); - dataPos += toCopy; - self.headerBufPos += toCopy; + function fillBufferWithData() { + // Only copy as much as we need for the remaining header size + let size = self.readState.headerSize - self.readState.headerBufPos; - if(fillToPos === self.headerBufPos) { - self.headerBufPos = 0; - return true; - } + // Don't copy past the remaining bytes in the data block + const toCopy = Math.min(size, data.length - dataPos); - return false; - } + data.copy(self.headerBuf, self.readState.headerBufPos, dataPos, dataPos + toCopy); + dataPos += toCopy; + self.readState.headerBufPos += toCopy; - function isDone() { - return dataPos >= data.length || self.didReadHeader; + return self.readState.headerBufPos === self.readState.headerSize; } - function didRead(key) { - return self.headerData.hasOwnProperty(key); + function isDone() { + return dataPos >= data.length || self.errState !== null; } // Read version - if(!didRead('version') && fillBufferWithData(consts.VERSION_SIZE)) { - this.headerData.version = helpers.readUInt32(this.headerBuf.slice(0, consts.VERSION_SIZE)); + if (!this.didReadVersion) { + this.version = helpers.readUInt32(data.slice(0, consts.VERSION_SIZE)); + dataPos += Math.min(data.length, consts.VERSION_SIZE); + this.readState.headerData.version = this.version; + this.didReadVersion = true; } - if(isDone()) { return callback(); } - - // Read command - if(!didRead('cmd') && fillBufferWithData(consts.CMD_SIZE)) { - const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); - this.headerData.cmd = cmd; - switch(cmd[0]) { - case '+': // file found - this.doReadSize = true; - this.doReadId = true; - break; - case '-': // file not found - this.doReadSize = false; - this.doReadId = true; - break; - default: - return callback(new Error("Unrecognized command response, aborting!")); + while(!isDone()) { + + if(!fillBufferWithData()) + break; + + // Read command + if (!this.readState.didReadCmd) { + const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + this.readState.headerData.cmd = cmd; + switch (cmd[0]) { + case '+': // file found + this.readState.doReadSize = true; + this.readState.doReadId = true; + this.readState.headerSize += consts.SIZE_SIZE + consts.ID_SIZE; + break; + case '-': // file not found + this.readState.doReadId = true; + this.readState.headerSize += consts.ID_SIZE; + break; + default: + this.errState = new Error("Unrecognized command response, aborting!"); + } } - } - if(isDone()) { return callback(); } + if(!fillBufferWithData()) + break; - // Read size - if(this.doReadSize && !didRead('size') && fillBufferWithData(consts.SIZE_SIZE)) { - this.headerData.size = helpers.readUInt64(this.headerBuf.slice(0, consts.UINT64_SIZE)); - } + let pos = consts.CMD_SIZE; - if(isDone()) { return callback(); } + if (this.readState.doReadSize) { + this.readState.headerData.size = helpers.readUInt64(this.headerBuf.slice(pos, pos + consts.UINT64_SIZE)); + pos += consts.UINT64_SIZE; + this.readState.dataPassThrough = true; + } - // Read ID - if(this.doReadId && !didRead('guid') && fillBufferWithData(consts.ID_SIZE)) { - this.headerData.guid = this.headerBuf.slice(0, consts.GUID_SIZE); - this.headerData.hash = this.headerBuf.slice(consts.GUID_SIZE); - } + if(this.readState.doReadId) { + this.readState.headerData.guid = this.headerBuf.slice(pos, pos + consts.GUID_SIZE); + pos += consts.GUID_SIZE; + this.readState.headerData.hash = this.headerBuf.slice(pos, pos + consts.HASH_SIZE); + } - this.didReadHeader = true; - this.emit('header', Object.assign({}, this.headerData)); + this.emit('header', Object.assign({}, this.readState.headerData)); - // Send any remaining bytes in the buffer as blob data - if(dataPos < data.length) { - this._sendData(data.slice(dataPos), callback); - } - else { - callback(); + if(this.readState.dataPassThrough) { + break; + } + + this._init(); } + + return dataPos < data.length ? data.slice(dataPos) : null; } } diff --git a/lib/helpers.js b/lib/helpers.js index 3b107b3..5ea1d7d 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -1,7 +1,5 @@ const cluster = require('cluster'); const consts = require("./constants").Constants; -const crypto = require('crypto'); -const os = require('os'); let logLevel = consts.LOG_TEST; diff --git a/lib/server.js b/lib/server.js index 64d6a8a..edee341 100644 --- a/lib/server.js +++ b/lib/server.js @@ -43,7 +43,7 @@ class CacheServer { helpers.log(consts.LOG_ERR, "Socket closed"); }) .on('error', function (err) { - helpers.log(consts.LOG_ERR, "Socket error " + err); + helpers.log(consts.LOG_ERR, err); }); const clientStreamProcessor = new ClientStreamProcessor(); diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 10c3adb..f122352 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -12,7 +12,7 @@ class ClientStreamProcessor extends Transform { super(); this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); - this.didReadVersion = false; + this.didSendVersion = false; this.version = ''; this.errState = null; this._registerEventListeners(); @@ -35,9 +35,7 @@ class ClientStreamProcessor extends Transform { this.readState = { didReadCmd: false, doReadSize: false, - didReadSize: false, doReadId: false, - didReadId: false, dataPassThrough: false, dataSize: 0, headerBufPos: 0, @@ -54,7 +52,10 @@ class ClientStreamProcessor extends Transform { _transform(data, encoding, callback) { while(data !== null && data.length > 0 && this.errState === null) { - if (this.readState.dataPassThrough) { + if(!this.didSendVersion) { + data = this._sendVersion(data); + } + else if (this.readState.dataPassThrough) { data = this._sendData(data); } else { data = this._sendCommands(data); @@ -69,6 +70,20 @@ class ClientStreamProcessor extends Transform { callback(); } + _sendVersion(data) { + let len = Math.min(consts.VERSION_SIZE - this.version.length, data.length); + this.version += data.slice(0, len).toString('ascii'); + + if(this.version.length < consts.PROTOCOL_VERSION_MIN_SIZE) { + return null; + } + + this.push(this.version); + this.didSendVersion = true; + + return len < data.length ? data.slice(len) : null; + } + _sendData(data) { const len = Math.min(this.readState.dataSize - this.readState.dataBytesRead, data.length); this.push(data.slice(0, len)); @@ -78,7 +93,7 @@ class ClientStreamProcessor extends Transform { this._init(); } - return len < data.length ? data.slice(len) : Buffer.from([]); + return len < data.length ? data.slice(len) : null; } _sendCommands(data) { @@ -103,19 +118,6 @@ class ClientStreamProcessor extends Transform { return dataPos >= data.length || self.errState !== null; } - if(!this.didReadVersion) { - let len = Math.min(consts.VERSION_SIZE - this.version.length, data.length); - this.version += data.slice(0, len).toString('ascii'); - dataPos += len; - - if(this.version.length < consts.PROTOCOL_VERSION_MIN_SIZE) { - return null; - } - - this.push(this.version); - this.didReadVersion = true; - } - while(!isDone()) { // Read command if (!this.readState.didReadCmd) { @@ -155,26 +157,15 @@ class ClientStreamProcessor extends Transform { } } - // Read size - if (this.readState.doReadSize && !this.readState.didReadSize) { - if(!fillBufferWithData()) { - break; - } + if(!fillBufferWithData()) { + break; + } - this.readState.didReadSize = true; + if (this.readState.doReadSize) { this.readState.dataSize = helpers.readUInt64(this.headerBuf.slice(consts.CMD_SIZE, consts.CMD_SIZE + consts.SIZE_SIZE).toString('ascii')); this.readState.dataPassThrough = true; } - // Read ID - if (this.readState.doReadId && !this.readState.didReadId) { - if(!fillBufferWithData()) { - break; - } - - this.readState.didReadId = true; - } - this.push(Buffer.from(this.headerBuf.slice(0, this.readState.headerBufPos))); if(this.readState.dataPassThrough) { diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index f29fb95..8c26c22 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -1,8 +1,7 @@ const helpers = require('./../helpers'); const consts = require('./../constants').Constants; -const async = require('async'); -const { Transform } = require('stream'); +const { Duplex } = require('stream'); const kSource = Symbol("source"); const kCache = Symbol("cache"); @@ -11,12 +10,12 @@ const kReadStateVersion = Symbol("readStateVersion"); const kReadStateCommand = Symbol("readStateCommand"); const kReadStatePutStream = Symbol("readStatePutStream"); -class CommandProcessor extends Transform { +class CommandProcessor extends Duplex { constructor(clientStreamProcessor, cache) { super(); this[kSource] = clientStreamProcessor; this[kCache] = cache; - this[kSendFileQueue] = async.queue(this._sendFile.bind(this), 1); + this[kSendFileQueue] = []; this._readState = kReadStateVersion; this._trx = null; this._putStream = null; @@ -24,7 +23,7 @@ class CommandProcessor extends Transform { this._putSent = 0; } - _transform(chunk, encoding, callback) { + _write(chunk, encoding, callback) { let handler = null; const self = this; @@ -51,45 +50,50 @@ class CommandProcessor extends Transform { }); } - _quit(err) { - this[kSendFileQueue].kill(); - this[kSource].unpipe(this); - this[kSource].emit('quit'); - this._readState = null; - err && helpers.log(consts.LOG_ERR, err); - } + _read_internal() { + if(this[kSendFileQueue].length === 0) { + this.push(''); + return; + } - _sendFile(task, callback) { - const self = this; + let go = true; - this[kCache].getFileStream(task.type, task.guid, task.hash, function(err, result) { - if(err || result === null) { - self.push('-i'); - self.push(task.guid); - self.push(task.hash); + while(go && this[kSendFileQueue].length > 0) { + let file = this[kSendFileQueue][0]; + + if (file.header !== null) { + let header = file.header; + file.header = null; + go = this.push(header, 'ascii'); + helpers.log(consts.LOG_DBG, `Sent header, size ${header.length}`); } - else { - self.push('+i'); - self.push(helpers.encodeInt64(result.size)); - self.push(task.guid); - self.push(task.hash); - - result.stream - .on('readable', function() { - let chunk; - while((chunk = result.stream.read()) !== null) { - self.push(chunk); - } - }) - .on('end', function() { - callback(null); - }) - .on('error', function(err) { - callback(err); - }); + + let chunk = null; + + if (file.stream !== null && (chunk = file.stream.read()) !== null) { + go = this.push(chunk, 'ascii'); } + + if (chunk === null) { + helpers.log(consts.LOG_DBG, `Finished send queue item, length is now ${this[kSendFileQueue].length}`); + this[kSendFileQueue].shift(); + } + } + } + + _read() { + let self = this; + Promise.resolve().then(() => { + self._read_internal(); }); } + + _quit(err) { + this[kSource].unpipe(this); + this[kSource].emit('quit'); + this._readState = null; + err && helpers.log(consts.LOG_ERR, err); + } _handleVersion(data, callback) { let version = helpers.readUInt32(data); @@ -101,7 +105,7 @@ class CommandProcessor extends Transform { } this.push(helpers.encodeInt32(version)); - callback(null, err); + callback(err); } _handleWrite(data, callback) { @@ -140,6 +144,8 @@ class CommandProcessor extends Transform { return callback(); } + helpers.log(consts.LOG_DBG, "CP: Parsing command '" + cmd + "'"); + switch(cmd) { case 'q': this._quit(); @@ -167,13 +173,32 @@ class CommandProcessor extends Transform { } _onGet(type, guid, hash, callback) { - this[kSendFileQueue].push({ - type: type, - guid: guid, - hash: hash - }); + let self = this; + this[kCache].getFileStream(type, guid, hash, function(err, result) { - callback(null); + if(err || result === null) { + let resp = Buffer.from('-' + type, 'ascii'); + self[kSendFileQueue].push({ + header: Buffer.concat([resp, guid, hash], 34), + stream: null + }); + } + else { + let resp = Buffer.from('+' + type + helpers.encodeInt64(result.size), 'ascii'); + self[kSendFileQueue].push({ + size: result.size, + header: Buffer.concat([resp, guid, hash], 50), + stream: result.stream + }); + + helpers.log(consts.LOG_DBG, "CP: Adding file to send queue, size " + result.size); + } + + if(self[kSendFileQueue].length === 1) + self._read(self._readState.highWaterMark); + + callback(null); + }); } _onTransactionStart(guid, hash, callback) { diff --git a/main.js b/main.js index 9ae4f5a..f2b8b68 100644 --- a/main.js +++ b/main.js @@ -5,6 +5,7 @@ const program = require('commander'); const path = require('path'); const CacheServer = require('./lib/server'); const config = require('config'); +const prompt = require('prompt'); function myParseInt(val, def) { val = parseInt(val); @@ -26,37 +27,11 @@ program.description("Unity Cache Server") //.option('-P, --path [path]', 'Specify the path of the cache directory. Default is ./cache5.0', consts.DEFAULT_CACHE_DIR) .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 4 (test)', myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-w, --workers ', 'Number of worker threads to spawn. Default is 1 for every 2 CPUs reported by the OS', zeroOrMore, consts.DEFAULT_WORKERS) - .option('-v, --verify', 'Verify the Cache Server integrity, without fixing errors') - .option('-f, --fix', 'Fix errors found while verifying the Cache Server integrity') .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0) .parse(process.argv); helpers.SetLogLevel(program.logLevel); -// Initialize cache -let cache; - -try { - const moduleName = config.get("Cache.module"); - const modulePath = path.resolve(config.get("Cache.path"), moduleName); - helpers.log(consts.LOG_INFO, "Loading Cache module at " + modulePath); - const Cache = require(modulePath); - cache = new Cache(); -} -catch(e) { - console.log(e); - process.exit(1); -} - -if (program.verify || program.fix) { - console.log("Verifying integrity of Cache Server directory " + program.path); - const numErrors = cache.VerifyCache(program.fix); - console.log("Cache Server directory contains " + numErrors + " integrity issue(s)"); - if (program.fix) - console.log("Cache Server directory integrity fixed."); - process.exit(0); -} - if (program.monitorParentProcess > 0) { function monitor() { function is_running(pid) { @@ -83,24 +58,95 @@ const errHandler = function () { process.exit(1); }; -const server = new CacheServer(cache, program.port); +const moduleName = config.get("Cache.module"); +const modulePath = path.resolve(config.get("Cache.path"), moduleName); +helpers.log(consts.LOG_INFO, "Loading Cache module at " + modulePath); +const Cache = require(modulePath); +let server = null; -if(cluster.isMaster) { - helpers.log(consts.LOG_INFO, "Cache Server version " + consts.VERSION); +Cache.init({}, function() { + server = new CacheServer(Cache, program.port); - if(program.workers === 0) { + if(cluster.isMaster) { + helpers.log(consts.LOG_INFO, "Cache Server version " + consts.VERSION); + + if(program.workers === 0) { + server.Start(errHandler, function () { + helpers.log(consts.LOG_INFO, `Cache Server ready on port ${server.port}`); + startPrompt(); + }); + } + + for(let i = 0; i < program.workers; i++) { + const worker = cluster.fork(); + Cache.registerClusterWorker(worker); + } + } + else { server.Start(errHandler, function () { - helpers.log(consts.LOG_INFO, `Cache Server ready on port ${server.port}`); + helpers.log(consts.LOG_INFO, `Cache Server worker ${cluster.worker.id} ready on port ${server.port}`); }); } +}); - for(let i = 0; i < program.workers; i++) { - const worker = cluster.fork(); - cache.registerClusterWorker(worker); - } -} -else { - server.Start(errHandler, function () { - helpers.log(consts.LOG_INFO, `Cache Server worker ${cluster.worker.id} ready on port ${server.port}`); +function startPrompt() { + prompt.message = ""; + prompt.delimiter = "> "; + prompt.start(); + + prompt.get(['command'], function(err, result) { + if(err) { + if(err.message === 'canceled') { + result = { command: 'q' }; + } + else { + helpers.log(consts.LOG_ERR, err); + server.Stop(); + process.exit(1); + } + } + + if(result) { + switch(result.command) { + case 'q': + helpers.log(consts.LOG_INFO, "Shutting down ..."); + Cache.shutdown(function () { + server.Stop(); + process.exit(0); + }); + break; + + case 's': + helpers.log(consts.LOG_INFO, "Saving cache data ..."); + Cache.save(function(err) { + if(err) { + helpers.log(consts.LOG_ERR, err); + server.Stop(); + process.exit(1); + } + + helpers.log(consts.LOG_INFO, "Save finished."); + }); + + break; + case 'r': + helpers.log(consts.LOG_INFO, "Resetting cache data ..."); + Cache.reset(function(err) { + "use strict"; + if(err) { + helpers.log(consts.LOG_ERR, err); + server.Stop(); + process.exit(1); + } + + helpers.log(consts.LOG_INFO, "Reset finished."); + }); + } + } + + process.nextTick(startPrompt); }); } + + + diff --git a/package-lock.json b/package-lock.json index 1367c0d..3aeb3fb 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,18 +15,16 @@ "version": "https://registry.npmjs.org/async/-/async-2.5.0.tgz", "integrity": "sha1-hDGQ/WtzV6C54clW7d3V7IRitU0=", "requires": { - "lodash": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" + "lodash": "4.17.4" } }, "balanced-match": { "version": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "brace-expansion": { "version": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", - "dev": true, "requires": { "balanced-match": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "concat-map": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" @@ -37,14 +35,18 @@ "integrity": "sha1-81HTKWnTL6XXpVZxVCY9korjvR8=", "dev": true }, + "colors": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.1.2.tgz", + "integrity": "sha1-FopHAXVran9RoSzgyXv6KMCE7WM=" + }, "commander": { "version": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", "integrity": "sha1-FXFS/R56bI2YpbcVzzdt+SgARWM=" }, "concat-map": { "version": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "config": { "version": "https://registry.npmjs.org/config/-/config-1.27.0.tgz", @@ -582,6 +584,11 @@ } } }, + "cycle": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "integrity": "sha1-IegLK+hYD5i0aPN5QwZisEbDStI=" + }, "debug": { "version": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz", "integrity": "sha1-5zFTHKLt4n0YgiJCfaF4IdaP9Pw=", @@ -590,6 +597,11 @@ "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" } }, + "deep-equal": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-0.2.2.tgz", + "integrity": "sha1-hLdFiW80xoTpjyzg5Cq69Du6AX0=" + }, "diff": { "version": "https://registry.npmjs.org/diff/-/diff-3.2.0.tgz", "integrity": "sha1-yc45Okt8vQsFinJck98pkCeGj/k=", @@ -604,15 +616,18 @@ "version": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", "integrity": "sha1-RJnt3NERDgshi6zy+n9/WfVcqAQ=" }, + "eyes": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "integrity": "sha1-Ys8SAjTGg3hdkCNIqADvPgzCC8A=" + }, "fs.realpath": { "version": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "glob": { "version": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", "integrity": "sha1-gFIR3wT6rxxjo2ADBs31reULLsg=", - "dev": true, "requires": { "fs.realpath": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "inflight": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -642,10 +657,14 @@ "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", "dev": true }, + "i": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", + "integrity": "sha1-2WyScyB28HJxG2sQ/X1PZa2O4j0=" + }, "inflight": { "version": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, "requires": { "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" @@ -653,8 +672,12 @@ }, "inherits": { "version": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", - "dev": true + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, "istanbul": { "version": "0.4.5", @@ -1285,7 +1308,8 @@ "integrity": "sha1-BUNS5MTIDIbAkjh31EneF2pzLI0=" }, "lodash": { - "version": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "version": "4.17.4", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" }, "lodash._baseassign": { @@ -1350,20 +1374,17 @@ "minimatch": { "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", - "dev": true, "requires": { "brace-expansion": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz" } }, "minimist": { "version": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "dev": true + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, "mkdirp": { "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "dev": true, "requires": { "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" } @@ -1409,10 +1430,19 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=" + }, + "ncp": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-1.0.1.tgz", + "integrity": "sha1-0VNn5cuHQyuhF9K/gP30Wuz7QkY=" + }, "once": { "version": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, "requires": { "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" } @@ -1423,13 +1453,56 @@ }, "path-is-absolute": { "version": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + }, + "pkginfo": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.4.1.tgz", + "integrity": "sha1-tUGO8EOd5UJfxJlQQtztFPsqhP8=" + }, + "prompt": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prompt/-/prompt-1.0.0.tgz", + "integrity": "sha1-jlcSPDlquYiJf7Mn/Trtw+c15P4=", + "requires": { + "colors": "1.1.2", + "pkginfo": "0.4.1", + "read": "1.0.7", + "revalidator": "0.1.8", + "utile": "0.3.0", + "winston": "2.1.1" + } + }, + "read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", + "requires": { + "mute-stream": "0.0.7" + } + }, + "revalidator": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", + "integrity": "sha1-/s5hv6DBtSoga9axgZgYS91SOjs=" + }, + "rimraf": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.2.tgz", + "integrity": "sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==", + "requires": { + "glob": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz" + } }, "sprintf-js": { "version": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" + }, "supports-color": { "version": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.2.tgz", "integrity": "sha1-cqJiiU2dQIuVbKBf83su2KbiotU=", @@ -1438,10 +1511,60 @@ "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" } }, + "utile": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/utile/-/utile-0.3.0.tgz", + "integrity": "sha1-E1LDQOuCDk2N26A5pPv6oy7U7zo=", + "requires": { + "async": "0.9.2", + "deep-equal": "0.2.2", + "i": "0.3.6", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "ncp": "1.0.1", + "rimraf": "2.6.2" + }, + "dependencies": { + "async": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + } + } + }, + "winston": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/winston/-/winston-2.1.1.tgz", + "integrity": "sha1-PJNJ0ZYgf9G9/51LxD73JRDjoS4=", + "requires": { + "async": "1.0.0", + "colors": "1.0.3", + "cycle": "1.0.3", + "eyes": "0.1.8", + "isstream": "0.1.2", + "pkginfo": "0.3.1", + "stack-trace": "0.0.10" + }, + "dependencies": { + "async": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async/-/async-1.0.0.tgz", + "integrity": "sha1-+PwEyjoTeErenhZBr5hXjPvWR6k=" + }, + "colors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "integrity": "sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=" + }, + "pkginfo": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "integrity": "sha1-Wyn2qB9wcXFC4J52W76rl7T4HiE=" + } + } + }, "wrappy": { "version": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" } } } diff --git a/package.json b/package.json index a806c45..55828f6 100644 --- a/package.json +++ b/package.json @@ -36,6 +36,9 @@ "async": "^2.5.0", "commander": "^2.11.0", "config": "^1.27.0", - "js-yaml": "^3.10.0" + "js-yaml": "^3.10.0", + "lodash": "^4.17.4", + "prompt": "^1.0.0", + "rimraf": "^2.6.2" } } diff --git a/test/cache.js b/test/cache.js index 266d3e9..d4955c3 100644 --- a/test/cache.js +++ b/test/cache.js @@ -5,6 +5,7 @@ const helpers = require('../lib/helpers'); const consts = require('../lib/constants').Constants; const CacheServer = require('../lib/server'); const CmdResponseListener = require('./../lib/client/server_response_transform.js'); +const { Writable } = require('stream'); const generateCommandData = require('./test_utils').generateCommandData; const encodeCommand = require('./test_utils').encodeCommand; @@ -12,28 +13,34 @@ const sleep = require('./test_utils').sleep; const expectLog = require('./test_utils').expectLog; const cmd = require('./test_utils').cmd; -helpers.SetLogger(()=>{}); let cache, server, client; -let test_modules = [ - { name: "Cache: Membuf", path: "../lib/cache/cache_membuf" } -]; +let test_modules = [{ + name: "Cache: Membuf", + path: "../lib/cache/cache_membuf", + options: { + initialPageSize: 10000, + growPageSize: 10000, + minFreeBlockSize: 1024 + } + }]; test_modules.forEach(function(module) { describe(module.name, function() { beforeEach(function() { - helpers.SetLogger(function(lvl, msg) {}); + helpers.SetLogger(function(lvl, msg) { console.log(msg); }); }); before(function (done) { - const Cache = require(module.path); - cache = new Cache(); - server = new CacheServer(cache, 0); + cache = require(module.path); + cache.init(module.options, function() { + server = new CacheServer(cache, 0); - server.Start(function (err) { - assert(!err, "Cache Server reported error! " + err); - }, done); + server.Start(function (err) { + assert(!err, "Cache Server reported error! " + err); + }, done); + }); }); after(function() { @@ -44,10 +51,13 @@ test_modules.forEach(function(module) { const self = this; + before(function() { + self.data = generateCommandData(); + }); + beforeEach(function (done) { client = net.connect({port: server.port}, function (err) { assert(!err, err); - self.data = generateCommandData(); client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); done(err); }); @@ -195,7 +205,7 @@ test_modules.forEach(function(module) { this.slow(1000); const self = this; - self.data = generateCommandData(); + self.data = generateCommandData(5000000, 6000000); before(function (done) { client = net.connect({port: server.port}, function (err) { @@ -206,8 +216,8 @@ test_modules.forEach(function(module) { client.write(encodeCommand(cmd.putInfo, null, null, self.data.info)); client.write(encodeCommand(cmd.putResource, null, null, self.data.resource)); client.write(cmd.transactionEnd); - - return sleep(25).then(done); + client.end(cmd.quit); + client.on('close', done); }); }); @@ -233,13 +243,33 @@ test_modules.forEach(function(module) { {cmd: cmd.getResource, blob: self.data.resource, type: 'resource'} ]; + it("should respond with not found (-) for missing files", function (done) { + let count = 0; + + client.pipe(new CmdResponseListener()) + .on('header', function (header) { + assert(header.cmd === '-' + tests[count].cmd[1]); + count++; + if(count === 3) done(); + }); + + const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); + const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); + + tests.forEach(function(test) { + client.write(encodeCommand(test.cmd, badGuid, badHash)); + }); + }); + + tests.forEach(function (test) { it("should retrieve stored " + test.type + " data with the (" + test.cmd + ") command", function (done) { let dataBuf; let pos = 0; - client.pipe(new CmdResponseListener()) - .on('header', function (header) { - assert(header.cmd[0] === '+'); + let resp = new CmdResponseListener(); + + resp.on('header', function (header) { + assert(header.cmd === '+' + test.cmd[1]); assert(header.size === test.blob.length, "Expected size " + test.blob.length); dataBuf = Buffer.allocUnsafe(header.size); }) @@ -251,6 +281,13 @@ test_modules.forEach(function(module) { done(); }); + client.pipe(resp); + + // client.on('data', function(data) { + // "use strict"; + // console.log("Received data " + data.length); + // }); + const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); let sentBytes = 0; @@ -269,18 +306,6 @@ test_modules.forEach(function(module) { sendBytesAsync(); }); - - it("should respond with not found (-) for missing " + test.type + " data with the (" + test.cmd + ") command", function (done) { - client.pipe(new CmdResponseListener()) - .on('header', function (header) { - assert(header.cmd[0] === '-'); - done(); - }); - - const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); - const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); - client.write(encodeCommand(test.cmd, badGuid, badHash)); - }); }); }); }); diff --git a/test/test_utils.js b/test/test_utils.js index b55369b..84f0996 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -10,7 +10,7 @@ exports.generateCommandData = function(minSize, maxSize) { minSize = minSize || MIN_BLOB_SIZE; maxSize = maxSize || MAX_BLOB_SIZE; - function getSize() { return Math.max(minSize, Math.floor(Math.random() * maxSize)); } + function getSize() { return minSize + Math.floor(Math.random() * (maxSize - minSize)); } return { guid: Buffer.from(crypto.randomBytes(consts.GUID_SIZE).toString('ascii'), 'ascii'), From d0d26d3ac28470519bf0ea756e37d3071f43507a Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 5 Dec 2017 15:56:55 -0600 Subject: [PATCH 07/89] Added client download throughput stat reporting; simplified calculation of page index key string --- lib/cache/cache_membuf.js | 9 ++------- lib/server/command_processor.js | 19 ++++++++++++++++++- package-lock.json | 5 +++++ package.json | 1 + 4 files changed, 26 insertions(+), 8 deletions(-) diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 10f6cdf..7d8db1a 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -1,6 +1,5 @@ const { PutTransaction } = require('./cache'); const { Readable, Writable } = require('stream'); -const crypto = require('crypto'); const helpers = require('../helpers'); const consts = require('../constants').Constants; const config = require('config'); @@ -36,11 +35,7 @@ class CacheMembuf { } static _calcIndexKey(type, guid, hash) { - const h = crypto.createHash('sha256'); - h.update(type); - h.update(guid); - h.update(hash); - return h.digest('hex'); + return `${guid.toString('hex')}-${hash.toString('hex')}-${type}`; } static _findFreeBlockIndex(size) { @@ -170,7 +165,7 @@ class CacheMembuf { static _deserialize(callback) { const p = CacheMembuf._serializePath; if(p === null || !fs.existsSync(p)) - return callback(new Error("Invalid serializedPath")); + return callback(new Error("Invalid serializePath")); const files = fs.readdirSync(p); diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 8c26c22..511c831 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -1,4 +1,5 @@ const helpers = require('./../helpers'); +const filesize = require('filesize'); const consts = require('./../constants').Constants; const { Duplex } = require('stream'); @@ -21,6 +22,9 @@ class CommandProcessor extends Duplex { this._putStream = null; this._putSize = 0; this._putSent = 0; + this._sendFileQueueReadStartTime = Date.now(); + this._sendFileQueueReadBytes = 0; + this._sendFileQueueCount = 0; } _write(chunk, encoding, callback) { @@ -52,6 +56,14 @@ class CommandProcessor extends Duplex { _read_internal() { if(this[kSendFileQueue].length === 0) { + if(this._sendFileQueueReadBytes > 0) { + let totalTime = (Date.now() - this._sendFileQueueReadStartTime) / 1000; + let throughput = (this._sendFileQueueReadBytes / totalTime).toFixed(2); + helpers.log(consts.LOG_TEST, `Sent ${this._sendFileQueueCount} files totaling ${filesize(this._sendFileQueueReadBytes)} in ${totalTime} seconds (${filesize(throughput)}/sec)`); + this._sendFileQueueReadBytes = 0; + this._sendFileQueueCount = 0; + } + this.push(''); return; } @@ -72,6 +84,7 @@ class CommandProcessor extends Duplex { if (file.stream !== null && (chunk = file.stream.read()) !== null) { go = this.push(chunk, 'ascii'); + this._sendFileQueueReadBytes += chunk.length; } if (chunk === null) { @@ -191,11 +204,15 @@ class CommandProcessor extends Duplex { stream: result.stream }); + self._sendFileQueueCount++; helpers.log(consts.LOG_DBG, "CP: Adding file to send queue, size " + result.size); } - if(self[kSendFileQueue].length === 1) + if(self[kSendFileQueue].length === 1) { + self._sendFileQueueReadStartTime = Date.now(); + self._sendFileQueueReadBytes = 0; self._read(self._readState.highWaterMark); + } callback(null); }); diff --git a/package-lock.json b/package-lock.json index 3aeb3fb..6e4600c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -621,6 +621,11 @@ "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", "integrity": "sha1-Ys8SAjTGg3hdkCNIqADvPgzCC8A=" }, + "filesize": { + "version": "3.5.11", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-3.5.11.tgz", + "integrity": "sha512-ZH7loueKBoDb7yG9esn1U+fgq7BzlzW6NRi5/rMdxIZ05dj7GFD/Xc5rq2CDt5Yq86CyfSYVyx4242QQNZbx1g==" + }, "fs.realpath": { "version": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" diff --git a/package.json b/package.json index 55828f6..9b8fa5d 100644 --- a/package.json +++ b/package.json @@ -36,6 +36,7 @@ "async": "^2.5.0", "commander": "^2.11.0", "config": "^1.27.0", + "filesize": "^3.5.11", "js-yaml": "^3.10.0", "lodash": "^4.17.4", "prompt": "^1.0.0", From 05fa598bde4628a6580a002200e47f6f0061b261 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Mon, 11 Dec 2017 09:56:29 -0600 Subject: [PATCH 08/89] Refactor to use lokifs in-memory database for membuf cache meta data. Auto-save page files and meta data (if dirty) at configurable intervals. --- config/default.yml | 6 +- lib/cache/cache_membuf.js | 309 +++++++++++++++----------------- lib/server/command_processor.js | 4 +- package-lock.json | 10 ++ package.json | 3 +- test/cache.js | 9 +- 6 files changed, 165 insertions(+), 176 deletions(-) diff --git a/config/default.yml b/config/default.yml index a201e15..2e6e830 100644 --- a/config/default.yml +++ b/config/default.yml @@ -6,4 +6,8 @@ Cache: initialPageSize: 100000000 growPageSize: 100000000 minFreeBlockSize: 1024 - serializePath: ".cache_membuf" \ No newline at end of file + serializePath: ".cache_membuf" + persistenceOptions: + autosave: true + autosaveInterval: 10000 + throttledSaves: false \ No newline at end of file diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 7d8db1a..4c218f6 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -5,15 +5,21 @@ const consts = require('../constants').Constants; const config = require('config'); const path = require('path'); const fs = require('fs'); -const rimraf = require('rimraf'); const async = require('async'); -const defaults = require('lodash/fp/defaults'); +const _ = require('lodash'); +const loki = require('lokijs'); +const uuid = require('uuid/v4'); + +const kOptionsPath = 'Cache.options.cache_membuf'; +const kDbName = 'cache_membuf.db'; +const kIndex = 'index'; +const kPageMeta = 'pages'; class CacheMembuf { static get _options() { - let opts = config.get("Cache.options.cache_membuf"); - return defaults(opts, CacheMembuf._optionOverrides); + let opts = config.get(kOptionsPath); + return _.defaultsDeep(opts, CacheMembuf._optionOverrides); } static get _serializePath() { @@ -23,67 +29,48 @@ class CacheMembuf { return path.join(path.dirname(require.main.filename), CacheMembuf._options.serializePath) } + static get _dbPath() { + return path.join(CacheMembuf._serializePath, kDbName); + } + static _allocPage(size) { - CacheMembuf._pages.push(Buffer.alloc(size, 0, 'ascii')); - CacheMembuf._freeBlocks.push({ - pageIndex: CacheMembuf._pages.length - 1, + let pageIndex = uuid(); + CacheMembuf._pages[pageIndex] = Buffer.alloc(size, 0, 'ascii'); + + CacheMembuf._index.insert({ + pageIndex: pageIndex, pageOffset: 0, size: size }); - - return CacheMembuf._freeBlocks.length - 1; + + return CacheMembuf._pageMeta.insert({ + index: pageIndex, + size: size, + dirty: true + }); } static _calcIndexKey(type, guid, hash) { return `${guid.toString('hex')}-${hash.toString('hex')}-${type}`; } - static _findFreeBlockIndex(size) { - let best = -1; - let min = 0; - let max = CacheMembuf._freeBlocks.length - 1; - let guess; - - while (min <= max) { - guess = (min + max) >> 1; - - if (CacheMembuf._freeBlocks[guess].size < size) { - min = guess + 1; - } else { - best = guess; - max = guess - 1; - } - } + static _findFreeBlock(size) { + let result = CacheMembuf._index.chain() + .find({ 'fileId' : undefined, 'size' : { '$gte' : size }}) + .simplesort('size') + .limit(1) + .data(); - return best; - } - - static _freeBlock(key) { - if(!CacheMembuf._index.hasOwnProperty(key)) - return; - - let block = Object.assign({}, CacheMembuf._index[key]); - delete block.key; - - // Duplicate the index data into the free block list - CacheMembuf._freeBlocks.push(block); - - // Remove the block from the index - delete CacheMembuf._index[key]; - - // Re-sort the free block list - CacheMembuf._freeBlocks.sort(function(a, b) { - return a.size - b.size; - }); + return result.length > 0 ? result[0] : null; } static _reserveBlock(key, size) { // Free any existing block for this key - CacheMembuf._freeBlock(key); + CacheMembuf._index.findAndUpdate({'fileId' : key}, doc => doc['fileId'] = undefined); // Find the best free block to use - let i; - while((i = CacheMembuf._findFreeBlockIndex(size)) < 0) { + let freeBlock; + while((freeBlock = CacheMembuf._findFreeBlock(size)) === null) { let growPageSize = CacheMembuf._options.growPageSize; let allocSize = Math.max(size, growPageSize); if(allocSize > growPageSize) { @@ -93,35 +80,36 @@ class CacheMembuf { CacheMembuf._allocPage(allocSize); } - const block = CacheMembuf._freeBlocks[i]; - CacheMembuf._index[key] = Object.assign({}, block); - CacheMembuf._index[key].size = size; + // Clone the free block, then set it's file id and size + let block = _.omit(freeBlock, ['$loki', 'meta']); + block['fileId'] = key; + block['size'] = size; + CacheMembuf._index.insert(block); // Update this free block if leftover space is greater than the minimum - if(block.size - size >= CacheMembuf._options.minFreeBlockSize) { - block.pageOffset += size; - block.size -= size; - - // Re-sort the free block list - CacheMembuf._freeBlocks.sort(function(a, b) { - return a.size - b.size; - }); + if(freeBlock.size - size >= CacheMembuf._options.minFreeBlockSize) { + freeBlock.pageOffset += size; + freeBlock.size -= size; + CacheMembuf._index.update(freeBlock); } else { - // Otherwise remove it - CacheMembuf._freeBlocks.splice(i, 1); + CacheMembuf._index.remove(freeBlock); } - return CacheMembuf._index[key]; + return block; } static _addFileToCache(type, guid, hash, buffer) { const key = CacheMembuf._calcIndexKey(type, guid, hash); const entry = CacheMembuf._reserveBlock(key, buffer.length); - helpers.log(consts.LOG_DBG, "Saving file: pageIndex = " + entry.pageIndex + " pageOffset = " + entry.pageOffset + " size = " + entry.size); + helpers.log(consts.LOG_TEST, `Saving file type: ${type} guid: ${guid.toString('hex')} hash: ${hash.toString('hex')} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`); buffer.copy(CacheMembuf._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length); + + let pageMeta = CacheMembuf._pageMeta.by('index', entry.pageIndex); + pageMeta.dirty = true; + CacheMembuf._pageMeta.update(pageMeta); } static _serialize(callback) { @@ -130,36 +118,26 @@ class CacheMembuf { if(p === null) return callback(new Error("Invalid serializedPath")); - let writeOps = []; - let i = 0; - - CacheMembuf._pages.forEach(function(page) { - writeOps.push({ - path: path.join(p, `page.${i++}`), - data: page - }); - }); - - writeOps.push({ - path: path.join(p, 'index.json'), - data: JSON.stringify(CacheMembuf._index) - }); - - writeOps.push({ - path: path.join(p, 'freeBlocks.json'), - data: JSON.stringify(CacheMembuf._freeBlocks) + let pages = CacheMembuf._pageMeta.chain().find({'dirty' : true}).data(); + let writeOps = pages.map(function(page) { + return { + index: page.index, + path: path.join(p, page.index), + data: CacheMembuf._pages[page.index] + } }); function doWriteOp(op, cb) { helpers.log(consts.LOG_INFO, `Writing ${op.path}`); - fs.writeFile(op.path, op.data, cb); + fs.writeFile(op.path, op.data, function(err) { + if(err) return cb(err); + let doc = CacheMembuf._pageMeta.by('index', op.index); + doc.dirty = false; + CacheMembuf._pageMeta.update(doc); + }); } - async.series([ - async.apply(rimraf, p, {}), - async.apply(fs.mkdir, p, 0o755), - async.apply(async.eachSeries, writeOps, doWriteOp) - ], callback); + async.eachSeries(writeOps, doWriteOp, callback); } static _deserialize(callback) { @@ -167,113 +145,95 @@ class CacheMembuf { if(p === null || !fs.existsSync(p)) return callback(new Error("Invalid serializePath")); - const files = fs.readdirSync(p); + let pages = CacheMembuf._pageMeta.chain().find({}).data(); - function loadIndexFile(cb) { - let indexFile = files.find(file => file.endsWith('index.json')); - if(!indexFile) { - return callback(new Error("Cannot find index.json")); - } - - indexFile = path.join(p, indexFile); - helpers.log(consts.LOG_DBG, `Loading index file at ${indexFile}`); - - fs.readFile(indexFile, 'utf8', function(err, result) { - if(err) return callback(err); - CacheMembuf._index = JSON.parse(result); - cb(); + function loadPageFile(page, cb) { + let file = path.join(p, page.index); + helpers.log(consts.LOG_DBG, `Loading page file at ${file}`); + fs.stat(file, function(err, stats) { + if(err) + return cb(err); + + if(stats.size !== page.size) + return cb(new Error(`Unrecognized/invalid page file '${file}'`)); + + fs.readFile(file, function(err, result) { + if(err) return cb(err); + CacheMembuf._pages[page.index] = result; + cb(); + }); }); } - function loadFreeBlocksFile(cb) { - let freeBlocksFile = files.find(file => file.endsWith('freeBlocks.json')); - if(!freeBlocksFile) { - return cb(new Error("Cannot find freeBlocks.json")); - } - - freeBlocksFile = path.join(p, freeBlocksFile); - helpers.log(consts.LOG_DBG, `Loading freeBlocksFile file at ${freeBlocksFile}`); - - fs.readFile(freeBlocksFile, 'utf8', function(err, result) { - if(err) return cb(err); - CacheMembuf._freeBlocks = JSON.parse(result); - cb(); - }); - } + async.each(pages, loadPageFile, callback); + } - let pageFiles = files.filter(file => /page\.\d+$/.test(file)).sort((a, b) => { - return a.localeCompare(b, undefined, {numeric: true, sensitivity: 'base'}); - }); + static _clearCache() { + CacheMembuf._index.clear(); + CacheMembuf._pageMeta.clear(); + CacheMembuf._pages = {}; + CacheMembuf._allocPage(CacheMembuf._options.initialPageSize); + } - CacheMembuf._pages = new Array(pageFiles.length); + static _initDb(options, callback) { + let db = new loki(CacheMembuf._dbPath, options); + CacheMembuf._db = db; - function loadPageFile(file, index, cb) { - file = path.join(p, file); - helpers.log(consts.LOG_DBG, `Loading page file at ${file}`); + db.loadDatabase({}, function() { + CacheMembuf._index = db.getCollection(kIndex); + CacheMembuf._pageMeta = db.getCollection(kPageMeta); - fs.readFile(file, function(err, result) { - if(err) return cb(err); - CacheMembuf._pages[index] = result; - cb(); - }) - } + if(CacheMembuf._pageMeta === null) { + CacheMembuf._pageMeta = db.addCollection(kPageMeta, { + unique: ["index"] + }); + } - async.series([ - async.apply(loadIndexFile), - async.apply(loadFreeBlocksFile), - async.apply(async.eachOf, pageFiles, loadPageFile) - ], callback); - } + if(CacheMembuf._index === null) { + CacheMembuf._index = db.addCollection(kIndex, { + unique: ["fileId"], + indices: ["size"] + }); - static _clearCache() { - CacheMembuf._pages = []; - CacheMembuf._freeBlocks = []; - CacheMembuf._index = {}; - CacheMembuf._allocPage(CacheMembuf._options.initialPageSize); + CacheMembuf._clearCache(); + callback(); + } + else { + CacheMembuf._deserialize(callback); + } + }); } static init(options, callback) { if(typeof(options) === 'object') CacheMembuf._optionOverrides = options; - if(CacheMembuf._pages.length === 0) { - CacheMembuf._deserialize(function(err) { - if(err) { - helpers.log(consts.LOG_ERR, err); - CacheMembuf._clearCache(); - } - - callback(); - }); + let dbOpts = CacheMembuf._options.get('persistenceOptions') || {}; + if(!dbOpts.hasOwnProperty('adapter')) { + dbOpts.adapter = new PersistenceAdapter(); } + + CacheMembuf._initDb(dbOpts, callback); } static reset(callback) { - let p = CacheMembuf._serializePath; - if(p !== null) { - rimraf(p, {}, function() { - CacheMembuf._clearCache(); - callback(); - }); - } - else { - CacheMembuf._clearCache(); - callback(); - } + CacheMembuf._clearCache(); + callback(); } static save(callback) { - CacheMembuf._serialize(callback); + CacheMembuf._db.saveDatabase(callback); } static shutdown(callback) { - CacheMembuf._serialize(callback); + CacheMembuf._db.close(callback); } static getFileStream(type, guid, hash, callback) { - const key = CacheMembuf._calcIndexKey(type, guid, hash); - if(CacheMembuf._index.hasOwnProperty(key)) { - const entry = CacheMembuf._index[key]; + const entry = CacheMembuf._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); + + // noinspection EqualityComparisonWithCoercionJS (checking for null or undefined) + if(entry != null) { const file = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); const stream = new Readable({ read() { @@ -354,9 +314,20 @@ class PutTransactionMembuf extends PutTransaction { } } -CacheMembuf._index = {}; -CacheMembuf._pages = []; -CacheMembuf._freeBlocks = []; +CacheMembuf._db = null; +CacheMembuf._pages = {}; CacheMembuf._optionOverrides = {}; -module.exports = CacheMembuf; \ No newline at end of file +module.exports = CacheMembuf; + +class PersistenceAdapter extends loki.LokiFsAdapter { + constructor() { + super(); + } + + saveDatabase(dbname, dbstring, callback) { + super.saveDatabase(dbname, dbstring, function() { + CacheMembuf._serialize(callback); + }); + } +} \ No newline at end of file diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 511c831..1830e88 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -56,6 +56,8 @@ class CommandProcessor extends Duplex { _read_internal() { if(this[kSendFileQueue].length === 0) { + + // print some stats if(this._sendFileQueueReadBytes > 0) { let totalTime = (Date.now() - this._sendFileQueueReadStartTime) / 1000; let throughput = (this._sendFileQueueReadBytes / totalTime).toFixed(2); @@ -77,7 +79,6 @@ class CommandProcessor extends Duplex { let header = file.header; file.header = null; go = this.push(header, 'ascii'); - helpers.log(consts.LOG_DBG, `Sent header, size ${header.length}`); } let chunk = null; @@ -88,7 +89,6 @@ class CommandProcessor extends Duplex { } if (chunk === null) { - helpers.log(consts.LOG_DBG, `Finished send queue item, length is now ${this[kSendFileQueue].length}`); this[kSendFileQueue].shift(); } } diff --git a/package-lock.json b/package-lock.json index 6e4600c..3a94de4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1376,6 +1376,11 @@ "lodash.isarray": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" } }, + "lokijs": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/lokijs/-/lokijs-1.5.1.tgz", + "integrity": "sha512-Pj67gdP6CxUPV7AXM/VAnUZNyKR6mx4JxNmZfVG7XeebBZyrd8iLcKxKutc6Z5akJlMb0EeCxPW8/YkCPiMQbw==" + }, "minimatch": { "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", @@ -1536,6 +1541,11 @@ } } }, + "uuid": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", + "integrity": "sha512-DIWtzUkw04M4k3bf1IcpS2tngXEL26YUD2M0tMDUpnUrz2hgzUBlD55a4FjdLGPvfHxS6uluGWvaVEqgBcVa+g==" + }, "winston": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/winston/-/winston-2.1.1.tgz", diff --git a/package.json b/package.json index 9b8fa5d..ba87dee 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,8 @@ "filesize": "^3.5.11", "js-yaml": "^3.10.0", "lodash": "^4.17.4", + "lokijs": "^1.5.1", "prompt": "^1.0.0", - "rimraf": "^2.6.2" + "uuid": "^3.1.0" } } diff --git a/test/cache.js b/test/cache.js index d4955c3..1bc01e2 100644 --- a/test/cache.js +++ b/test/cache.js @@ -5,7 +5,7 @@ const helpers = require('../lib/helpers'); const consts = require('../lib/constants').Constants; const CacheServer = require('../lib/server'); const CmdResponseListener = require('./../lib/client/server_response_transform.js'); -const { Writable } = require('stream'); +const loki = require('lokijs'); const generateCommandData = require('./test_utils').generateCommandData; const encodeCommand = require('./test_utils').encodeCommand; @@ -21,7 +21,10 @@ let test_modules = [{ options: { initialPageSize: 10000, growPageSize: 10000, - minFreeBlockSize: 1024 + minFreeBlockSize: 1024, + persistenceOptions: { + adapter: new loki.LokiMemoryAdapter() + } } }]; @@ -29,7 +32,7 @@ test_modules.forEach(function(module) { describe(module.name, function() { beforeEach(function() { - helpers.SetLogger(function(lvl, msg) { console.log(msg); }); + helpers.SetLogger(function() {}); }); before(function (done) { From 35d06e55637a6bc1cc9bf5180806f0ec8043d952 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Mon, 11 Dec 2017 10:07:55 -0600 Subject: [PATCH 09/89] Fixing bug - missing callback. Modified high water mark of command buffer --- lib/cache/cache_membuf.js | 1 + lib/server/command_processor.js | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 4c218f6..7ceaecc 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -134,6 +134,7 @@ class CacheMembuf { let doc = CacheMembuf._pageMeta.by('index', op.index); doc.dirty = false; CacheMembuf._pageMeta.update(doc); + cb(); }); } diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 1830e88..2dff1d1 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -13,7 +13,7 @@ const kReadStatePutStream = Symbol("readStatePutStream"); class CommandProcessor extends Duplex { constructor(clientStreamProcessor, cache) { - super(); + super({highWaterMark: 16 * 1024 * 4}); this[kSource] = clientStreamProcessor; this[kCache] = cache; this[kSendFileQueue] = []; From 359ba7f990acb4360befd29feead70ec555482e7 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Mon, 11 Dec 2017 21:08:13 -0600 Subject: [PATCH 10/89] Ensure database and page files are not modified during a serialization event; remove highWatermark option from CommandProcessor, as it wasn't evident it was making any difference. --- lib/cache/cache_membuf.js | 34 +++++++++++++++++++++++++-------- lib/server/command_processor.js | 2 +- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 7ceaecc..f23b3e7 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -99,11 +99,21 @@ class CacheMembuf { return block; } + static _waitForSerialize() { + return new Promise((resolve) => { + (function waitForSave() { + if(CacheMembuf._searializeInProgess === false) return resolve(); + helpers.log(consts.LOG_TEST, "_waitForSerialize..."); + setTimeout(waitForSave, 100); + })(); + }); + } + static _addFileToCache(type, guid, hash, buffer) { const key = CacheMembuf._calcIndexKey(type, guid, hash); const entry = CacheMembuf._reserveBlock(key, buffer.length); - helpers.log(consts.LOG_TEST, `Saving file type: ${type} guid: ${guid.toString('hex')} hash: ${hash.toString('hex')} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`); + helpers.log(consts.LOG_TEST, `Saving file key: ${key} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`); buffer.copy(CacheMembuf._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length); @@ -186,7 +196,8 @@ class CacheMembuf { if(CacheMembuf._pageMeta === null) { CacheMembuf._pageMeta = db.addCollection(kPageMeta, { - unique: ["index"] + unique: ["index"], + indices: ["dirty"] }); } @@ -257,12 +268,14 @@ class CacheMembuf { } static endPutTransaction(transaction, callback) { - const files = transaction.getFiles(); - files.forEach(function(file) { - CacheMembuf._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); - }); + CacheMembuf._waitForSerialize().then(() => { + const files = transaction.getFiles(); + files.forEach(function (file) { + CacheMembuf._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); + }); - callback(); + callback(); + }); } static registerClusterWorker(worker) { @@ -318,6 +331,7 @@ class PutTransactionMembuf extends PutTransaction { CacheMembuf._db = null; CacheMembuf._pages = {}; CacheMembuf._optionOverrides = {}; +CacheMembuf._searializeInProgess = false; module.exports = CacheMembuf; @@ -327,8 +341,12 @@ class PersistenceAdapter extends loki.LokiFsAdapter { } saveDatabase(dbname, dbstring, callback) { + CacheMembuf._searializeInProgess = true; super.saveDatabase(dbname, dbstring, function() { - CacheMembuf._serialize(callback); + CacheMembuf._serialize(function() { + CacheMembuf._searializeInProgess = false; + callback(); + }); }); } } \ No newline at end of file diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 2dff1d1..1830e88 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -13,7 +13,7 @@ const kReadStatePutStream = Symbol("readStatePutStream"); class CommandProcessor extends Duplex { constructor(clientStreamProcessor, cache) { - super({highWaterMark: 16 * 1024 * 4}); + super(); this[kSource] = clientStreamProcessor; this[kCache] = cache; this[kSendFileQueue] = []; From bfe3dfde5377f2a8351c6af9a43e10323a58c553 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Mon, 11 Dec 2017 21:09:17 -0600 Subject: [PATCH 11/89] Adding membuf serialization folder to gitignore --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index cbaa8fa..922633c 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ cache5.0/ coverage/ node_modules/ .coveralls.yml -!lib/cache \ No newline at end of file +!lib/cache +.cache_membuf/ \ No newline at end of file From c20502ca10448322ed90e5d47de028953c0636d3 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 15 Dec 2017 22:17:30 -0600 Subject: [PATCH 12/89] Moved back to instance based cache modules, to allow for cleaner inheritance from the base cache class. Added back ability to specify a custom cache save folder --- config/default.yml | 3 +- lib/cache/cache.js | 41 +++++- lib/cache/cache_membuf.js | 258 ++++++++++++++++++++------------------ main.js | 40 +++--- package-lock.json | 28 +++++ package.json | 1 + test/cache.js | 4 +- 7 files changed, 236 insertions(+), 139 deletions(-) diff --git a/config/default.yml b/config/default.yml index 2e6e830..4e50379 100644 --- a/config/default.yml +++ b/config/default.yml @@ -5,8 +5,9 @@ Cache: cache_membuf: initialPageSize: 100000000 growPageSize: 100000000 + maxPageCount: 10 minFreeBlockSize: 1024 - serializePath: ".cache_membuf" + cachePath: ".cache_membuf" persistenceOptions: autosave: true autosaveInterval: 10000 diff --git a/lib/cache/cache.js b/lib/cache/cache.js index 26f7110..60eac2e 100644 --- a/lib/cache/cache.js +++ b/lib/cache/cache.js @@ -1,7 +1,46 @@ 'use strict'; +const config = require('config'); +const path = require('path'); +const fs = require('fs-extra'); +const _ = require('lodash'); class Cache { - constructor() {} + static get properties() { + return {}; + } + + get _optionsPath() { + return 'Cache.options'; + } + + get _options() { + let opts = config.get(this._optionsPath); + return _.defaultsDeep(this._optionOverrides, opts); + } + + get _cachePath() { + if(!this._options.hasOwnProperty('cachePath')) + return null; + + return path.join(path.dirname(require.main.filename), this._options.cachePath) + } + + init(options, callback) { + if(typeof(options) === 'object') + this._optionOverrides = options; + + const p = this._cachePath; + + if(typeof(callback) !== 'function') { + return fs.mkdirs(p); + } + + fs.mkdirs(p, callback); + } + + registerClusterWorker(worker) { + // Not implemented + } } class PutTransaction { diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index f23b3e7..fb74b52 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -1,61 +1,65 @@ -const { PutTransaction } = require('./cache'); +'use strict' +const { Cache, PutTransaction } = require('./cache'); const { Readable, Writable } = require('stream'); const helpers = require('../helpers'); const consts = require('../constants').Constants; -const config = require('config'); const path = require('path'); -const fs = require('fs'); +const fs = require('fs-extra'); const async = require('async'); const _ = require('lodash'); const loki = require('lokijs'); const uuid = require('uuid/v4'); -const kOptionsPath = 'Cache.options.cache_membuf'; const kDbName = 'cache_membuf.db'; const kIndex = 'index'; const kPageMeta = 'pages'; -class CacheMembuf { +class CacheMembuf extends Cache { + constructor() { + super(); + this._db = null; + this._pages = {}; + this._serializeInProgress = false; + } - static get _options() { - let opts = config.get(kOptionsPath); - return _.defaultsDeep(opts, CacheMembuf._optionOverrides); + static get properties() { + return { + clustering: false + } } - static get _serializePath() { - if(!CacheMembuf._options.hasOwnProperty('serializePath')) - return null; + static _calcIndexKey(type, guid, hash) { + return `${guid.toString('hex')}-${hash.toString('hex')}-${type}`; + } - return path.join(path.dirname(require.main.filename), CacheMembuf._options.serializePath) + get _optionsPath() { + return super._optionsPath + ".cache_membuf"; } - static get _dbPath() { - return path.join(CacheMembuf._serializePath, kDbName); + get _dbPath() { + return path.join(this._cachePath, kDbName); } - static _allocPage(size) { + _allocPage(size) { let pageIndex = uuid(); - CacheMembuf._pages[pageIndex] = Buffer.alloc(size, 0, 'ascii'); + this._pages[pageIndex] = Buffer.alloc(size, 0, 'ascii'); - CacheMembuf._index.insert({ + this._index.insert({ pageIndex: pageIndex, pageOffset: 0, - size: size + size: size, + timestamp: Date.now() }); - return CacheMembuf._pageMeta.insert({ + return this._pageMeta.insert({ index: pageIndex, size: size, dirty: true }); } - static _calcIndexKey(type, guid, hash) { - return `${guid.toString('hex')}-${hash.toString('hex')}-${type}`; - } - - static _findFreeBlock(size) { - let result = CacheMembuf._index.chain() + _findFreeBlock(size) { + let result = this._index.chain() .find({ 'fileId' : undefined, 'size' : { '$gte' : size }}) .simplesort('size') .limit(1) @@ -64,189 +68,205 @@ class CacheMembuf { return result.length > 0 ? result[0] : null; } - static _reserveBlock(key, size) { + _reserveBlock(key, size) { // Free any existing block for this key - CacheMembuf._index.findAndUpdate({'fileId' : key}, doc => doc['fileId'] = undefined); + this._index.findAndUpdate({'fileId' : key}, doc => doc['fileId'] = undefined); // Find the best free block to use let freeBlock; - while((freeBlock = CacheMembuf._findFreeBlock(size)) === null) { - let growPageSize = CacheMembuf._options.growPageSize; + while((freeBlock = this._findFreeBlock(size)) === null) { + let growPageSize = this._options.growPageSize; let allocSize = Math.max(size, growPageSize); if(allocSize > growPageSize) { helpers.log(consts.LOG_WARN, "File allocation size of " + size + " exceeds growPageSize of " + growPageSize); } - CacheMembuf._allocPage(allocSize); + this._allocPage(allocSize); } // Clone the free block, then set it's file id and size let block = _.omit(freeBlock, ['$loki', 'meta']); block['fileId'] = key; block['size'] = size; - CacheMembuf._index.insert(block); + block['timestamp'] = Date.now(); + this._index.insert(block); // Update this free block if leftover space is greater than the minimum - if(freeBlock.size - size >= CacheMembuf._options.minFreeBlockSize) { + if(freeBlock.size - size >= this._options.minFreeBlockSize) { freeBlock.pageOffset += size; freeBlock.size -= size; - CacheMembuf._index.update(freeBlock); + this._index.update(freeBlock); } else { - CacheMembuf._index.remove(freeBlock); + this._index.remove(freeBlock); } return block; } - static _waitForSerialize() { + _waitForSerialize() { + const self = this; + return new Promise((resolve) => { (function waitForSave() { - if(CacheMembuf._searializeInProgess === false) return resolve(); + if(self._serializeInProgress === false) return resolve(); helpers.log(consts.LOG_TEST, "_waitForSerialize..."); setTimeout(waitForSave, 100); })(); }); } - static _addFileToCache(type, guid, hash, buffer) { + _addFileToCache(type, guid, hash, buffer) { const key = CacheMembuf._calcIndexKey(type, guid, hash); - const entry = CacheMembuf._reserveBlock(key, buffer.length); + const entry = this._reserveBlock(key, buffer.length); helpers.log(consts.LOG_TEST, `Saving file key: ${key} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`); - buffer.copy(CacheMembuf._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length); + buffer.copy(this._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length); - let pageMeta = CacheMembuf._pageMeta.by('index', entry.pageIndex); + let pageMeta = this._pageMeta.by('index', entry.pageIndex); pageMeta.dirty = true; - CacheMembuf._pageMeta.update(pageMeta); + this._pageMeta.update(pageMeta); } - static _serialize(callback) { + _serialize(callback) { + const self = this; - let p = CacheMembuf._serializePath; + let p = self._cachePath; if(p === null) - return callback(new Error("Invalid serializedPath")); + return callback(new Error("Invalid cachePath")); - let pages = CacheMembuf._pageMeta.chain().find({'dirty' : true}).data(); + let pages = self._pageMeta.chain().find({'dirty' : true}).data(); let writeOps = pages.map(function(page) { return { index: page.index, path: path.join(p, page.index), - data: CacheMembuf._pages[page.index] + data: self._pages[page.index] } }); function doWriteOp(op, cb) { helpers.log(consts.LOG_INFO, `Writing ${op.path}`); - fs.writeFile(op.path, op.data, function(err) { - if(err) return cb(err); - let doc = CacheMembuf._pageMeta.by('index', op.index); - doc.dirty = false; - CacheMembuf._pageMeta.update(doc); - cb(); - }); + fs.writeFile(op.path, op.data) + .then(() => { + let doc = self._pageMeta.by('index', op.index); + doc.dirty = false; + self._pageMeta.update(doc); + cb(); + }) + .catch(err => { + cb(err); + }); } async.eachSeries(writeOps, doWriteOp, callback); } - static _deserialize(callback) { - const p = CacheMembuf._serializePath; - if(p === null || !fs.existsSync(p)) - return callback(new Error("Invalid serializePath")); - - let pages = CacheMembuf._pageMeta.chain().find({}).data(); + _deserialize(callback) { + const self = this; + + const p = self._cachePath; + let pages = self._pageMeta.chain().find({}).data(); function loadPageFile(page, cb) { let file = path.join(p, page.index); helpers.log(consts.LOG_DBG, `Loading page file at ${file}`); - fs.stat(file, function(err, stats) { - if(err) - return cb(err); - - if(stats.size !== page.size) - return cb(new Error(`Unrecognized/invalid page file '${file}'`)); - - fs.readFile(file, function(err, result) { - if(err) return cb(err); - CacheMembuf._pages[page.index] = result; + fs.stat(file) + .then(stats => { + if(stats.size !== page.size) + return cb(new Error(`Unrecognized/invalid page file '${file}'`)); + + return fs.readFile(file); + }) + .then(result => { + self._pages[page.index] = result; cb(); + }) + .catch(err => { + cb(err); }); - }); } async.each(pages, loadPageFile, callback); } - static _clearCache() { - CacheMembuf._index.clear(); - CacheMembuf._pageMeta.clear(); - CacheMembuf._pages = {}; - CacheMembuf._allocPage(CacheMembuf._options.initialPageSize); + _clearCache() { + this._index.clear(); + this._pageMeta.clear(); + this._pages = {}; + this._allocPage(this._options.initialPageSize); } - static _initDb(options, callback) { - let db = new loki(CacheMembuf._dbPath, options); - CacheMembuf._db = db; + _initDb(options, callback) { + const self = this; + + let db = new loki(self._dbPath, options); + this._db = db; db.loadDatabase({}, function() { - CacheMembuf._index = db.getCollection(kIndex); - CacheMembuf._pageMeta = db.getCollection(kPageMeta); + self._index = db.getCollection(kIndex); + self._pageMeta = db.getCollection(kPageMeta); - if(CacheMembuf._pageMeta === null) { - CacheMembuf._pageMeta = db.addCollection(kPageMeta, { + if(self._pageMeta === null) { + self._pageMeta = db.addCollection(kPageMeta, { unique: ["index"], indices: ["dirty"] }); } - if(CacheMembuf._index === null) { - CacheMembuf._index = db.addCollection(kIndex, { + if(self._index === null) { + self._index = db.addCollection(kIndex, { unique: ["fileId"], indices: ["size"] }); - CacheMembuf._clearCache(); + self._clearCache(); callback(); } else { - CacheMembuf._deserialize(callback); + self._deserialize(callback); } }); } - static init(options, callback) { - if(typeof(options) === 'object') - CacheMembuf._optionOverrides = options; - - let dbOpts = CacheMembuf._options.get('persistenceOptions') || {}; - if(!dbOpts.hasOwnProperty('adapter')) { - dbOpts.adapter = new PersistenceAdapter(); - } + init(options, callback) { + const self = this; + + super.init(options) + .then(() => { + let dbOpts = self._options.persistenceOptions || {}; + if(!dbOpts.hasOwnProperty('adapter')) { + dbOpts.adapter = new PersistenceAdapter(self); + } - CacheMembuf._initDb(dbOpts, callback); + self._initDb(dbOpts, callback); + }) + .catch(err => { + callback(err); + }); } - static reset(callback) { - CacheMembuf._clearCache(); + reset(callback) { + this._clearCache(); callback(); } - static save(callback) { - CacheMembuf._db.saveDatabase(callback); + save(callback) { + this._db.saveDatabase(callback); } - static shutdown(callback) { - CacheMembuf._db.close(callback); + shutdown(callback) { + this._db.close(callback); } - static getFileStream(type, guid, hash, callback) { - const entry = CacheMembuf._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); + getFileStream(type, guid, hash, callback) { + const self = this; + + const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); // noinspection EqualityComparisonWithCoercionJS (checking for null or undefined) if(entry != null) { - const file = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); + const file = self._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); const stream = new Readable({ read() { this.push(file); @@ -263,24 +283,22 @@ class CacheMembuf { } } - static createPutTransaction(guid, hash, callback) { + createPutTransaction(guid, hash, callback) { callback(null, new PutTransactionMembuf(guid, hash)); } - static endPutTransaction(transaction, callback) { - CacheMembuf._waitForSerialize().then(() => { + endPutTransaction(transaction, callback) { + const self = this; + + this._waitForSerialize().then(() => { const files = transaction.getFiles(); files.forEach(function (file) { - CacheMembuf._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); + self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); }); callback(); }); } - - static registerClusterWorker(worker) { - // Not implemented - } } class PutTransactionMembuf extends PutTransaction { @@ -328,23 +346,21 @@ class PutTransactionMembuf extends PutTransaction { } } -CacheMembuf._db = null; -CacheMembuf._pages = {}; -CacheMembuf._optionOverrides = {}; -CacheMembuf._searializeInProgess = false; - module.exports = CacheMembuf; class PersistenceAdapter extends loki.LokiFsAdapter { - constructor() { + constructor(cache) { super(); + this._cache = cache; } saveDatabase(dbname, dbstring, callback) { - CacheMembuf._searializeInProgess = true; + const self = this; + + self._cache._serializeInProgress = true; super.saveDatabase(dbname, dbstring, function() { - CacheMembuf._serialize(function() { - CacheMembuf._searializeInProgess = false; + self._cache._serialize(function() { + self._cache._serializeInProgress = false; callback(); }); }); diff --git a/main.js b/main.js index f2b8b68..d3b74c0 100644 --- a/main.js +++ b/main.js @@ -16,19 +16,19 @@ function zeroOrMore(val) { return Math.max(0, val); } -function atLeastOne(val) { - return Math.max(1, val); -} +const moduleName = config.get("Cache.module"); +const CacheModule = require(path.resolve(config.get("Cache.path"), moduleName)); +const Cache = new CacheModule(); program.description("Unity Cache Server") .version(consts.VERSION) - //.option('-s, --size ', 'Specify the maximum allowed size of the LRU cache. Files that have not been used recently will automatically be discarded when the cache size is exceeded. Default is 50Gb', myParseInt, consts.DEFAULT_CACHE_SIZE) - .option('-p, --port ', 'Specify the server port, only apply to new cache server, default is 8126', myParseInt, consts.DEFAULT_PORT) - //.option('-P, --path [path]', 'Specify the path of the cache directory. Default is ./cache5.0', consts.DEFAULT_CACHE_DIR) - .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 4 (test)', myParseInt, consts.DEFAULT_LOG_LEVEL) - .option('-w, --workers ', 'Number of worker threads to spawn. Default is 1 for every 2 CPUs reported by the OS', zeroOrMore, consts.DEFAULT_WORKERS) - .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0) - .parse(process.argv); + .option('-p, --port ', `Specify the server port, only apply to new cache server, default is ${consts.DEFAULT_PORT}`, myParseInt, consts.DEFAULT_PORT) + .option('-P, --cachePath [path]', `Specify the path of the cache directory. Default is .${moduleName}`, `.${moduleName}`) + .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) + .option('-w, --workers ', `Number of worker threads to spawn. Default is ${consts.DEFAULT_WORKERS}`, zeroOrMore, consts.DEFAULT_WORKERS) + .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0); + +program.parse(process.argv); helpers.SetLogLevel(program.logLevel); @@ -58,13 +58,23 @@ const errHandler = function () { process.exit(1); }; -const moduleName = config.get("Cache.module"); -const modulePath = path.resolve(config.get("Cache.path"), moduleName); -helpers.log(consts.LOG_INFO, "Loading Cache module at " + modulePath); -const Cache = require(modulePath); +if(!CacheModule.properties.clustering) { + program.workers = 0; + helpers.log(consts.LOG_INFO, `Clustering disabled, ${moduleName} module does not support it.`); +} + let server = null; -Cache.init({}, function() { +let cacheOpts = { + cachePath: program.cachePath +}; + +Cache.init(cacheOpts, function(error) { + if(error) { + helpers.log(consts.LOG_ERR, error); + process.exit(1); + } + server = new CacheServer(Cache, program.port); if(cluster.isMaster) { diff --git a/package-lock.json b/package-lock.json index 3a94de4..11891fa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -626,6 +626,16 @@ "resolved": "https://registry.npmjs.org/filesize/-/filesize-3.5.11.tgz", "integrity": "sha512-ZH7loueKBoDb7yG9esn1U+fgq7BzlzW6NRi5/rMdxIZ05dj7GFD/Xc5rq2CDt5Yq86CyfSYVyx4242QQNZbx1g==" }, + "fs-extra": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-5.0.0.tgz", + "integrity": "sha512-66Pm4RYbjzdyeuqudYqhFiNBbCIuI9kgRqLPSHIlXHidW8NIQtVdkM1yeZ4lXwuhbTETv3EUGMNHAAw6hiundQ==", + "requires": { + "graceful-fs": "4.1.11", + "jsonfile": "4.0.0", + "universalify": "0.1.1" + } + }, "fs.realpath": { "version": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" @@ -642,6 +652,11 @@ "path-is-absolute": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" } }, + "graceful-fs": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", + "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" + }, "graceful-readlink": { "version": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", "integrity": "sha1-TK+tdrxi8C+gObL5Tpo906ORpyU=", @@ -1312,6 +1327,14 @@ "version": "https://registry.npmjs.org/json5/-/json5-0.4.0.tgz", "integrity": "sha1-BUNS5MTIDIbAkjh31EneF2pzLI0=" }, + "jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "requires": { + "graceful-fs": "4.1.11" + } + }, "lodash": { "version": "4.17.4", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", @@ -1521,6 +1544,11 @@ "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" } }, + "universalify": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.1.tgz", + "integrity": "sha1-+nG63UQ3r0wUiEHjs7Fl+enlkLc=" + }, "utile": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/utile/-/utile-0.3.0.tgz", diff --git a/package.json b/package.json index ba87dee..cea95fd 100644 --- a/package.json +++ b/package.json @@ -37,6 +37,7 @@ "commander": "^2.11.0", "config": "^1.27.0", "filesize": "^3.5.11", + "fs-extra": "^5.0.0", "js-yaml": "^3.10.0", "lodash": "^4.17.4", "lokijs": "^1.5.1", diff --git a/test/cache.js b/test/cache.js index 1bc01e2..59b71f2 100644 --- a/test/cache.js +++ b/test/cache.js @@ -36,7 +36,9 @@ test_modules.forEach(function(module) { }); before(function (done) { - cache = require(module.path); + let CacheModule = require(module.path); + cache = new CacheModule(); + cache.init(module.options, function() { server = new CacheServer(cache, 0); From 6fecc51b8e40ff0cd3588db5fa18812e87e67675 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 20 Dec 2017 10:32:36 -0600 Subject: [PATCH 13/89] Added file system cache module, and refactored as necessary to support both the fs and mem cache modules in the same framework. --- config/default.yml | 4 +- lib/cache/cache.js | 37 ++++- lib/cache/cache_fs.js | 182 ++++++++++++++++++++++++ lib/cache/cache_membuf.js | 24 +++- lib/client/server_response_transform.js | 2 +- lib/server/command_processor.js | 117 +++++++++------ package-lock.json | 15 ++ package.json | 3 +- test/cache.js | 67 ++++++--- 9 files changed, 373 insertions(+), 78 deletions(-) create mode 100644 lib/cache/cache_fs.js diff --git a/config/default.yml b/config/default.yml index 4e50379..a7cf43e 100644 --- a/config/default.yml +++ b/config/default.yml @@ -11,4 +11,6 @@ Cache: persistenceOptions: autosave: true autosaveInterval: 10000 - throttledSaves: false \ No newline at end of file + throttledSaves: false + cache_fs: + cachePath: ".cache_fs" \ No newline at end of file diff --git a/lib/cache/cache.js b/lib/cache/cache.js index 60eac2e..2f28722 100644 --- a/lib/cache/cache.js +++ b/lib/cache/cache.js @@ -22,7 +22,8 @@ class Cache { if(!this._options.hasOwnProperty('cachePath')) return null; - return path.join(path.dirname(require.main.filename), this._options.cachePath) + let cachePath = this._options.cachePath; + return path.isAbsolute(cachePath) ? cachePath : path.join(path.dirname(require.main.filename), cachePath); } init(options, callback) { @@ -38,8 +39,36 @@ class Cache { fs.mkdirs(p, callback); } + reset(callback) { + throw new Error("Not implemented"); + } + + save(callback) { + throw new Error("Not implemented"); + } + + shutdown(callback) { + throw new Error("Not implemented"); + } + + hasFile(type, guid, hash, callback) { + throw new Error("Not implemented"); + } + + getFileStream(type, guid, hash, callback) { + throw new Error("Not implemented"); + } + + createPutTransaction(guid, hash, callback) { + throw new Error("Not implemented"); + } + + endPutTransaction(transaction, callback) { + throw new Error("Not implemented"); + } + registerClusterWorker(worker) { - // Not implemented + throw new Error("Not implemented"); } } @@ -51,6 +80,10 @@ class PutTransaction { get guid() { return this._guid; } get hash() { return this._hash; } + + getWriteStream(type, size, callback) { + throw new Error("Not implemented"); + } } module.exports = { diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js new file mode 100644 index 0000000..372cb66 --- /dev/null +++ b/lib/cache/cache_fs.js @@ -0,0 +1,182 @@ +'use strict'; +const { Cache, PutTransaction } = require('./cache'); +const helpers = require('../helpers'); +const consts = require('../constants').Constants; +const path = require('path'); +const fs = require('fs-extra'); +const uuid = require('uuid'); +const _ = require('lodash'); + +class CacheFS extends Cache { + constructor() { + super(); + } + + static get properties() { + return { + clustering: true + } + } + + static _calcFilename(type, guid, hash) { + return `${guid.toString('hex')}-${hash.toString('hex')}.${type}`; + } + + _calcFilepath(type, guid, hash) { + return path.join(this._cachePath, CacheFS._calcFilename(type, guid, hash)); + } + + get _optionsPath() { + return super._optionsPath + ".cache_fs"; + } + + init(options, callback) { + // Nothing to do yet + callback(); + } + + reset(callback) { + return super.reset(callback); + } + + save(callback) { + callback(); // No op + } + + shutdown(callback) { + callback(); // No op + } + + getFileInfo(type, guid, hash, callback) { + fs.stat(this._calcFilepath(type, guid, hash)) + .then(stats => { + callback(null, {size: stats.size}); + }) + .catch(err => { + callback(err); + }) + } + + getFileStream(type, guid, hash, callback) { + let stream = fs.createReadStream(this._calcFilepath(type, guid, hash)); + stream.on('open', () => { + callback(null, stream); + }).on('error', err => { + callback(err); + }); + } + + createPutTransaction(guid, hash, callback) { + callback(null, new PutTransactionFS(guid, hash, this._cachePath)); + } + + endPutTransaction(transaction, callback) { + let self = this; + + function moveFile(file) { + let filePath = self._calcFilepath(file.type, transaction.guid, transaction.hash); + return fs.move(file.file, filePath, { overwrite: true }); + } + + transaction.getFiles() + .then((files) => { + return Promise.all(files.map(moveFile)); + }) + .then(() => { + callback(); + }) + .catch(err => { + callback(err); + }); + } + + registerClusterWorker(worker) { + + } +} + +class PutTransactionFS extends PutTransaction { + constructor(guid, hash, cachePath) { + super(guid, hash); + this._cachePath = cachePath; + this._writeOptions = { + flags: 'w', + encoding: 'ascii', + fd: null, + mode: 0o666, + autoClose: true + }; + + this._files = {}; + } + + _closeAllStreams() { + return new Promise((resolve) => { + let self = this; + let files = _.values(this._files); + + if(files.length === 0) + return resolve(); + + let closed = 0; + let toClose = files.length; + + function processClosedFile(file) { + closed++; + + if(file.stream.bytesWritten !== file.size) { + _.unset(self._files, file.type); + } + + if(closed === toClose) { + resolve(); + } + } + + files.forEach(file => { + if(file.stream.closed) return processClosedFile(file); + + file.stream.on('close', () => { + processClosedFile(file); + }).on('error', err => { + helpers.log(consts.LOG_ERR, err); + _.unset(self._files, file.type); + }); + }) + }); + } + + getFiles() { + return new Promise((resolve) => { + this._closeAllStreams() + .then(() => { + resolve(_.values(this._files)); + }) + }); + } + + getWriteStream(type, size, callback) { + let self = this; + let file = path.join(this._cachePath, uuid()); + + fs.ensureFile(file) + .then(() => { + let stream = fs.createWriteStream(file, this._writeOptions); + stream.on('open', () => { + callback(null, stream); + }); + + self._files[type] = { + file: file, + type: type, + size: size, + stream: stream + }; + }) + .catch(err => { + callback(err); + }); + } +} + +module.exports = CacheFS; \ No newline at end of file diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index fb74b52..dda2e99 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -1,4 +1,4 @@ -'use strict' +'use strict'; const { Cache, PutTransaction } = require('./cache'); const { Readable, Writable } = require('stream'); const helpers = require('../helpers'); @@ -78,7 +78,7 @@ class CacheMembuf extends Cache { let growPageSize = this._options.growPageSize; let allocSize = Math.max(size, growPageSize); if(allocSize > growPageSize) { - helpers.log(consts.LOG_WARN, "File allocation size of " + size + " exceeds growPageSize of " + growPageSize); + helpers.log(consts.LOG_WARN, `File allocation size of ${size} exceeds growPageSize of ${growPageSize}`); } this._allocPage(allocSize); @@ -259,6 +259,16 @@ class CacheMembuf extends Cache { this._db.close(callback); } + getFileInfo(type, guid, hash, callback) { + const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); + if(entry != null) { + callback(null, { size: entry.size }); + } + else { + callback(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)) + } + } + getFileStream(type, guid, hash, callback) { const self = this; @@ -269,17 +279,19 @@ class CacheMembuf extends Cache { const file = self._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); const stream = new Readable({ read() { + if(this.didPush) + return this.push(null); this.push(file); - this.push(null); + this.didPush = true; }, highWaterMark: file.length }); - callback(null, {size: entry.size, stream: stream}); + callback(null, stream); } else { - callback(new Error("File not found for (" + type + ") " + guid.toString('hex') + "-" + hash.toString('hex'))); + callback(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); } } @@ -316,7 +328,7 @@ class PutTransactionMembuf extends PutTransaction { const self = this; if(type !== 'a' && type !== 'i' && type !== 'r') { - return callback(new Error("Unrecognized type '" + type + "' for transaction.")); + return callback(new Error(`Unrecognized type '${type}' for transaction.`)); } this._files[type].buffer = Buffer.alloc(size, 0, 'ascii'); diff --git a/lib/client/server_response_transform.js b/lib/client/server_response_transform.js index a0bfa02..41455f1 100644 --- a/lib/client/server_response_transform.js +++ b/lib/client/server_response_transform.js @@ -2,7 +2,7 @@ const helpers = require('./../helpers'); const consts = require('./../constants').Constants; const { Transform } = require('stream'); -const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.ID_SIZE; +const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.SIZE_SIZE + consts.ID_SIZE; class CacheServerResponseTransform extends Transform { constructor() { diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 1830e88..1d3b7cf 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -22,9 +22,16 @@ class CommandProcessor extends Duplex { this._putStream = null; this._putSize = 0; this._putSent = 0; - this._sendFileQueueReadStartTime = Date.now(); + this._sendFileQueueChunkReads = 0; + this._sendFileQueueReadDuration = 0; this._sendFileQueueReadBytes = 0; this._sendFileQueueCount = 0; + this._isReading = false; + this._readReady = true; + + this.once('finish', function() { + this._printReadStats(); + }); } _write(chunk, encoding, callback) { @@ -55,46 +62,67 @@ class CommandProcessor extends Duplex { } _read_internal() { - if(this[kSendFileQueue].length === 0) { - - // print some stats - if(this._sendFileQueueReadBytes > 0) { - let totalTime = (Date.now() - this._sendFileQueueReadStartTime) / 1000; - let throughput = (this._sendFileQueueReadBytes / totalTime).toFixed(2); - helpers.log(consts.LOG_TEST, `Sent ${this._sendFileQueueCount} files totaling ${filesize(this._sendFileQueueReadBytes)} in ${totalTime} seconds (${filesize(throughput)}/sec)`); - this._sendFileQueueReadBytes = 0; - this._sendFileQueueCount = 0; - } - - this.push(''); + if(this._isReading || this[kSendFileQueue].length === 0) return; - } - - let go = true; - while(go && this[kSendFileQueue].length > 0) { - let file = this[kSendFileQueue][0]; + let self = this; + let file = self[kSendFileQueue][0]; - if (file.header !== null) { - let header = file.header; - file.header = null; - go = this.push(header, 'ascii'); - } + if (file.header !== null) { + let header = file.header; + file.header = null; + self._readReady = self.push(header, 'ascii'); + } - let chunk = null; + if(!file.exists) { + self[kSendFileQueue].shift(); + return; + } - if (file.stream !== null && (chunk = file.stream.read()) !== null) { - go = this.push(chunk, 'ascii'); - this._sendFileQueueReadBytes += chunk.length; + self._isReading = true; + self._readStartTime = Date.now(); + this[kCache].getFileStream(file.type, file.guid, file.hash, function(err, stream) { + if(err) { + helpers.log(consts.LOG_ERR, err); + self._isReading = false; + return; } - if (chunk === null) { - this[kSendFileQueue].shift(); + function readChunk() { + if(!self._readReady) { + return setImmediate(readChunk); + } + + let chunk = stream.read(); + if(chunk !== null) { + self._readReady = self.push(chunk, 'ascii'); + self._sendFileQueueChunkReads++; + self._sendFileQueueReadBytes += chunk.length; + } + else { + self[kSendFileQueue].shift(); + self._isReading = false; + self._sendFileQueueReadDuration += Date.now() - self._readStartTime; + self._read(); + } } + + stream.on('readable', function() { + readChunk(); + }); + }); + } + + _printReadStats() { + if(this._sendFileQueueReadDuration > 0) { + let totalTime = this._sendFileQueueReadDuration / 1000; + let throughput = (this._sendFileQueueReadBytes / totalTime).toFixed(2); + helpers.log(consts.LOG_INFO, `Sent ${this._sendFileQueueCount} files (${this._sendFileQueueChunkReads} chunks) totaling ${filesize(this._sendFileQueueReadBytes)} in ${totalTime} seconds (${filesize(throughput)}/sec)`); } } _read() { + this._readReady = true; let self = this; Promise.resolve().then(() => { self._read_internal(); @@ -127,12 +155,14 @@ class CommandProcessor extends Duplex { this._putStream.write(data, 'ascii', function() { self._putSent += data.length; if(self._putSent === self._putSize) { + self._putStream.end(callback); self._readState = kReadStateCommand; self._putSent = 0; self._putSize = 0; } - - callback(); + else { + callback(); + } }); } @@ -157,8 +187,6 @@ class CommandProcessor extends Duplex { return callback(); } - helpers.log(consts.LOG_DBG, "CP: Parsing command '" + cmd + "'"); - switch(cmd) { case 'q': this._quit(); @@ -181,36 +209,37 @@ class CommandProcessor extends Duplex { this._onPut(type, size, callback); break; default: - callback(new Error("Unrecognized command '" + cmd + "'")); + callback(new Error(`Unrecognized command '${cmd}`)); } } _onGet(type, guid, hash, callback) { let self = this; - this[kCache].getFileStream(type, guid, hash, function(err, result) { + this[kCache].getFileInfo(type, guid, hash, function(err, result) { if(err || result === null) { let resp = Buffer.from('-' + type, 'ascii'); self[kSendFileQueue].push({ - header: Buffer.concat([resp, guid, hash], 34), - stream: null + exists: false, + header: Buffer.concat([resp, guid, hash], 34) }); } else { let resp = Buffer.from('+' + type + helpers.encodeInt64(result.size), 'ascii'); self[kSendFileQueue].push({ - size: result.size, + exists: true, header: Buffer.concat([resp, guid, hash], 50), - stream: result.stream + size: result.size, + type: type, + guid: guid, + hash: hash }); self._sendFileQueueCount++; - helpers.log(consts.LOG_DBG, "CP: Adding file to send queue, size " + result.size); + helpers.log(consts.LOG_DBG, `Adding file to send queue, size ${result.size}`); } if(self[kSendFileQueue].length === 1) { - self._sendFileQueueReadStartTime = Date.now(); - self._sendFileQueueReadBytes = 0; self._read(self._readState.highWaterMark); } @@ -231,7 +260,7 @@ class CommandProcessor extends Duplex { return callback(err); } - helpers.log(consts.LOG_DBG, "Start transaction for " + guid.toString('hex') + "-" + hash.toString('hex')); + helpers.log(consts.LOG_DBG, `Start transaction for ${guid.toString('hex')}-${hash.toString('hex')}`); self._trx = trx; callback(null); }); @@ -245,7 +274,7 @@ class CommandProcessor extends Duplex { } this[kCache].endPutTransaction(this._trx, function(err) { - helpers.log(consts.LOG_DBG, "End transaction for " + self._trx.guid.toString('hex') + "-" + self._trx.hash.toString('hex')); + helpers.log(consts.LOG_DBG, `End transaction for ${self._trx.guid.toString('hex')}-${self._trx.hash.toString('hex')}`); self._trx = null; callback(err); }); diff --git a/package-lock.json b/package-lock.json index 11891fa..5ef0a29 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1484,6 +1484,12 @@ "version": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true + }, "path-is-absolute": { "version": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" @@ -1544,6 +1550,15 @@ "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" } }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "1.0.2" + } + }, "universalify": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.1.tgz", diff --git a/package.json b/package.json index cea95fd..b95b540 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,8 @@ "coveralls": "^3.0.0", "istanbul": "^0.4.5", "mocha": "^3.5.3", - "mocha-lcov-reporter": "^1.3.0" + "mocha-lcov-reporter": "^1.3.0", + "tmp": "0.0.33" }, "dependencies": { "async": "^2.5.0", diff --git a/test/cache.js b/test/cache.js index 59b71f2..373efed 100644 --- a/test/cache.js +++ b/test/cache.js @@ -6,6 +6,7 @@ const consts = require('../lib/constants').Constants; const CacheServer = require('../lib/server'); const CmdResponseListener = require('./../lib/client/server_response_transform.js'); const loki = require('lokijs'); +const tmp = require('tmp'); const generateCommandData = require('./test_utils').generateCommandData; const encodeCommand = require('./test_utils').encodeCommand; @@ -15,7 +16,9 @@ const cmd = require('./test_utils').cmd; let cache, server, client; -let test_modules = [{ +let test_modules = [ + { + tmpDir: tmp.dirSync({unsafeCleanup: true}), name: "Cache: Membuf", path: "../lib/cache/cache_membuf", options: { @@ -26,7 +29,14 @@ let test_modules = [{ adapter: new loki.LokiMemoryAdapter() } } - }]; + }, + { + tmpDir: tmp.dirSync({unsafeCleanup: true}), + name: "Cache: FS", + path: "../lib/cache/cache_fs", + options: {} + } + ]; test_modules.forEach(function(module) { describe(module.name, function() { @@ -39,6 +49,8 @@ test_modules.forEach(function(module) { let CacheModule = require(module.path); cache = new CacheModule(); + module.options.cachePath = module.tmpDir.name; + cache.init(module.options, function() { server = new CacheServer(cache, 0); @@ -50,6 +62,7 @@ test_modules.forEach(function(module) { after(function() { server.Stop(); + module.tmpDir.removeCallback(); }); describe("Transactions", function () { @@ -138,17 +151,22 @@ test_modules.forEach(function(module) { tests.forEach(function (test) { it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function (done) { client.on('close', function () { - cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function (err, result) { + cache.getFileInfo(test.cmd[1], self.data.guid, self.data.hash, function(err, info) { assert(!err, err); - assert(result.size === self.data[test.ext].length); - assert(result.stream !== null); - - result.stream.on("readable", function () { - const chunk = result.stream.read(); // should only be one in this test - assert(self.data[test.ext].compare(chunk) === 0); - done(); + assert(info.size === self.data[test.ext].length); + cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function (err, stream) { + assert(!err, err); + assert(stream !== null); + + stream.on("readable", function () { + const chunk = stream.read(); // should only be one in this test + assert(self.data[test.ext].compare(chunk) === 0); + done(); + }); }); }); + + }); const buf = Buffer.from( @@ -182,15 +200,19 @@ test_modules.forEach(function(module) { const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); client.on('close', function () { - cache.getFileStream('a', self.data.guid, self.data.hash, function (err, result) { + cache.getFileInfo('a', self.data.guid, self.data.hash, function(err, info) { assert(!err, err); - assert(result.size === asset.length); - assert(result.stream !== null); + assert(info.size === asset.length); - result.stream.on("readable", function () { - const chunk = result.stream.read(); // should only be one in this test - assert(asset.compare(chunk) === 0); - done(); + cache.getFileStream('a', self.data.guid, self.data.hash, function (err, stream) { + assert(!err, err); + assert(stream !== null); + + stream.on("readable", function () { + const chunk = stream.read(); // should only be one in this test + assert(asset.compare(chunk) === 0); + done(); + }); }); }); }); @@ -275,11 +297,15 @@ test_modules.forEach(function(module) { resp.on('header', function (header) { assert(header.cmd === '+' + test.cmd[1]); + assert(header.guid.compare(self.data.guid) === 0, "GUID does not match"); + assert(header.hash.compare(self.data.hash) === 0, "HASH does not match"); assert(header.size === test.blob.length, "Expected size " + test.blob.length); dataBuf = Buffer.allocUnsafe(header.size); }) .on('data', function (data) { - pos += data.copy(dataBuf, pos, 0); + let prev = pos; + pos += data.copy(dataBuf, pos); + assert(data.compare(test.blob.slice(prev, pos)) === 0, `Blobs don't match at pos ${pos}`); }) .on('dataEnd', function () { assert(dataBuf.compare(test.blob) === 0); @@ -288,11 +314,6 @@ test_modules.forEach(function(module) { client.pipe(resp); - // client.on('data', function(data) { - // "use strict"; - // console.log("Received data " + data.length); - // }); - const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); let sentBytes = 0; From 532f8ea6e450ca356d550377ba50eaa3a50f45dd Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 20 Dec 2017 11:11:48 -0600 Subject: [PATCH 14/89] Added CLI option to specify cache module; log some more info --- config/default.yml | 3 +-- lib/cache/cache.js | 3 +++ lib/cache/cache_fs.js | 7 +++---- lib/helpers.js | 2 +- main.js | 22 ++++++++++++---------- 5 files changed, 20 insertions(+), 17 deletions(-) diff --git a/config/default.yml b/config/default.yml index a7cf43e..d3abedf 100644 --- a/config/default.yml +++ b/config/default.yml @@ -1,6 +1,5 @@ Cache: - module: "cache_membuf" - path: "lib/cache" + module: "lib/cache/cache_fs" options: cache_membuf: initialPageSize: 100000000 diff --git a/lib/cache/cache.js b/lib/cache/cache.js index 2f28722..c841881 100644 --- a/lib/cache/cache.js +++ b/lib/cache/cache.js @@ -1,4 +1,6 @@ 'use strict'; +const consts = require('../constants').Constants; +const helpers = require('../helpers'); const config = require('config'); const path = require('path'); const fs = require('fs-extra'); @@ -31,6 +33,7 @@ class Cache { this._optionOverrides = options; const p = this._cachePath; + helpers.log(consts.LOG_INFO, `Cache path is ${p}`); if(typeof(callback) !== 'function') { return fs.mkdirs(p); diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 372cb66..230e718 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -31,8 +31,7 @@ class CacheFS extends Cache { } init(options, callback) { - // Nothing to do yet - callback(); + return super.init(options, callback); } reset(callback) { @@ -142,7 +141,7 @@ class PutTransactionFS extends PutTransaction { helpers.log(consts.LOG_ERR, err); _.unset(self._files, file.type); }); - }) + }); }); } @@ -151,7 +150,7 @@ class PutTransactionFS extends PutTransaction { this._closeAllStreams() .then(() => { resolve(_.values(this._files)); - }) + }); }); } diff --git a/lib/helpers.js b/lib/helpers.js index 5ea1d7d..b7e94c7 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -47,7 +47,7 @@ function DefaultLogger(lvl, msg) { return; const prefix = cluster.isMaster ? "[Cluster:M] " : `[Cluster:${cluster.worker.id}] `; - console.log(prefix + msg); + console.log(`${prefix}${msg}`); } exports.log = DefaultLogger; diff --git a/main.js b/main.js index d3b74c0..6d0c45e 100644 --- a/main.js +++ b/main.js @@ -16,14 +16,13 @@ function zeroOrMore(val) { return Math.max(0, val); } -const moduleName = config.get("Cache.module"); -const CacheModule = require(path.resolve(config.get("Cache.path"), moduleName)); -const Cache = new CacheModule(); +const defaultCacheModule = config.get("Cache.module"); program.description("Unity Cache Server") .version(consts.VERSION) .option('-p, --port ', `Specify the server port, only apply to new cache server, default is ${consts.DEFAULT_PORT}`, myParseInt, consts.DEFAULT_PORT) - .option('-P, --cachePath [path]', `Specify the path of the cache directory. Default is .${moduleName}`, `.${moduleName}`) + .option('-c --cacheModule [path]', `Use cache module at specified path. Default is '${defaultCacheModule}'`, defaultCacheModule) + .option('-P, --cachePath [path]', `Specify the path of the cache directory.`) .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-w, --workers ', `Number of worker threads to spawn. Default is ${consts.DEFAULT_WORKERS}`, zeroOrMore, consts.DEFAULT_WORKERS) .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0); @@ -58,16 +57,19 @@ const errHandler = function () { process.exit(1); }; -if(!CacheModule.properties.clustering) { +const CacheModule = require(path.resolve(program.cacheModule)); +const Cache = new CacheModule(); + +if(program.workers > 0 && !CacheModule.properties.clustering) { program.workers = 0; - helpers.log(consts.LOG_INFO, `Clustering disabled, ${moduleName} module does not support it.`); + helpers.log(consts.LOG_INFO, `Clustering disabled, ${program.cacheModule} module does not support it.`); } let server = null; -let cacheOpts = { - cachePath: program.cachePath -}; +let cacheOpts = {}; +if(program.cachePath !== null) + cacheOpts.cachePath = program.cachePath; Cache.init(cacheOpts, function(error) { if(error) { @@ -78,7 +80,7 @@ Cache.init(cacheOpts, function(error) { server = new CacheServer(Cache, program.port); if(cluster.isMaster) { - helpers.log(consts.LOG_INFO, "Cache Server version " + consts.VERSION); + helpers.log(consts.LOG_INFO, `Cache Server version ${consts.VERSION}; Cache module ${program.cacheModule}`); if(program.workers === 0) { server.Start(errHandler, function () { From fc66f99434d45cf869ea58fcad0b5042558dad2e Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 20 Dec 2017 11:19:53 -0600 Subject: [PATCH 15/89] Only create cache dir from the master process --- lib/cache/cache.js | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/lib/cache/cache.js b/lib/cache/cache.js index c841881..6ad263b 100644 --- a/lib/cache/cache.js +++ b/lib/cache/cache.js @@ -1,4 +1,5 @@ 'use strict'; +const cluster = require('cluster'); const consts = require('../constants').Constants; const helpers = require('../helpers'); const config = require('config'); @@ -32,14 +33,24 @@ class Cache { if(typeof(options) === 'object') this._optionOverrides = options; - const p = this._cachePath; - helpers.log(consts.LOG_INFO, `Cache path is ${p}`); + if(cluster.isMaster) { + const p = this._cachePath; + helpers.log(consts.LOG_INFO, `Cache path is ${p}`); - if(typeof(callback) !== 'function') { - return fs.mkdirs(p); - } + if (typeof(callback) !== 'function') { + return fs.mkdirs(p); + } - fs.mkdirs(p, callback); + fs.mkdirs(p, callback); + } + else { + if (typeof(callback) !== 'function') { + return new Promise(resolve => { resolve(); }); + } + else { + callback(null); + } + } } reset(callback) { From 0cafd160541496924ca4e7faafb8eacf140c474f Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 23 Dec 2017 14:19:41 -0600 Subject: [PATCH 16/89] Lots of cleanup and annotations to make IntelliJ code inspection happy --- .gitignore | 1 - README.md | 2 +- lib/cache/cache.js | 105 ++++++++++++++++--- lib/cache/cache_fs.js | 102 +++++++++++++------ lib/cache/cache_membuf.js | 66 ++++++------ lib/client/server_response_transform.js | 5 +- lib/constants.js | 2 +- lib/helpers.js | 9 +- lib/server.js | 56 +++++++---- lib/server/client_stream_processor.js | 9 +- lib/server/command_processor.js | 128 +++++++++++++++++++----- main.js | 28 +----- test/cache_api.js | 0 test/cache_membuf.js | 0 test/{cache.js => protocol.js} | 4 +- 15 files changed, 358 insertions(+), 159 deletions(-) create mode 100644 test/cache_api.js create mode 100644 test/cache_membuf.js rename test/{cache.js => protocol.js} (99%) diff --git a/.gitignore b/.gitignore index 922633c..760398f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ cache/ cache5.0/ -coverage/ node_modules/ .coveralls.yml !lib/cache diff --git a/README.md b/README.md index 207be8d..49fb86b 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ This is the officially maintained open-source implementation of the Unity Cache At present time this open-source repository is maintained separately from the Cache Server available on the Unity website, as well as the version packaged with the Unity installer. It is possible that compatibility with specific versions of Unity will diverge between these separate implementations. Check the release notes for specific compatibility information prior to usage. ## Server Setup -Download and install the latest LTS version of node from the [Node.JS website](https://nodejs.org/en/download/). +Download and install the latest LTS version of node from the [Node.JS website](`https://nodejs.org/en/download/`). ```bash git clone git@github.com:Unity-Technologies/unity-cache-server.git diff --git a/lib/cache/cache.js b/lib/cache/cache.js index 6ad263b..dd9bfdf 100644 --- a/lib/cache/cache.js +++ b/lib/cache/cache.js @@ -1,17 +1,20 @@ 'use strict'; const cluster = require('cluster'); -const consts = require('../constants').Constants; +const consts = require('../constants'); const helpers = require('../helpers'); const config = require('config'); const path = require('path'); const fs = require('fs-extra'); const _ = require('lodash'); -class Cache { +class CacheBase { + constructor() {} + static get properties() { return {}; } + // noinspection JSMethodCanBeStatic get _optionsPath() { return 'Cache.options'; } @@ -29,6 +32,12 @@ class Cache { return path.isAbsolute(cachePath) ? cachePath : path.join(path.dirname(require.main.filename), cachePath); } + /** + * + * @param {Object} options + * @param {Function?} callback + * @returns {*} + */ init(options, callback) { if(typeof(options) === 'object') this._optionOverrides = options; @@ -53,54 +62,124 @@ class Cache { } } - reset(callback) { - throw new Error("Not implemented"); - } - - save(callback) { - throw new Error("Not implemented"); - } - + // noinspection JSMethodCanBeStatic + /** + * + * @param {Function} callback + */ shutdown(callback) { throw new Error("Not implemented"); } - hasFile(type, guid, hash, callback) { + // noinspection JSMethodCanBeStatic + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @param {Function} callback + */ + getFileInfo(type, guid, hash, callback) { throw new Error("Not implemented"); } + // noinspection JSMethodCanBeStatic + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @param {Function} callback + */ getFileStream(type, guid, hash, callback) { throw new Error("Not implemented"); } + // noinspection JSMethodCanBeStatic + /** + * + * @param {Buffer} guid + * @param {Buffer} hash + * @param {Function} callback + */ createPutTransaction(guid, hash, callback) { throw new Error("Not implemented"); } + // noinspection JSMethodCanBeStatic + /** + * + * @param {PutTransaction} transaction + * @param {Function} callback + */ endPutTransaction(transaction, callback) { throw new Error("Not implemented"); } + // noinspection JSMethodCanBeStatic + /** + * + * @param {EventEmitter} worker + */ registerClusterWorker(worker) { throw new Error("Not implemented"); } } class PutTransaction { + + /** + * + * @param {Buffer} guid + * @param {Buffer} hash + */ constructor(guid, hash) { this._guid = guid; this._hash = hash; } - + + /** + * + * @returns {Buffer} + */ get guid() { return this._guid; } + + /** + * + * @returns {Buffer} + */ get hash() { return this._hash; } + /** + * + * @returns {Array} + */ + get files() { return []; } + /** + * + * @param {Function?} callback + * @returns {Promise} + */ + finalize(callback) { + if(typeof(callback) !== 'function') { + return new Promise((resolve) => { resolve(); }); + } + + setImmediate(callback); + } + + /** + * + * @param {String} type + * @param {Number} size + * @param {Function} callback + */ getWriteStream(type, size, callback) { throw new Error("Not implemented"); } } module.exports = { - Cache: Cache, + CacheBase: CacheBase, PutTransaction: PutTransaction }; diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 230e718..47c1493 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -1,13 +1,13 @@ 'use strict'; -const { Cache, PutTransaction } = require('./cache'); +const { CacheBase, PutTransaction } = require('./cache'); const helpers = require('../helpers'); -const consts = require('../constants').Constants; +const consts = require('../constants'); const path = require('path'); const fs = require('fs-extra'); const uuid = require('uuid'); const _ = require('lodash'); -class CacheFS extends Cache { +class CacheFS extends CacheBase { constructor() { super(); } @@ -18,10 +18,26 @@ class CacheFS extends Cache { } } + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @returns {string} + * @private + */ static _calcFilename(type, guid, hash) { return `${guid.toString('hex')}-${hash.toString('hex')}.${type}`; } + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @returns {String} + * @private + */ _calcFilepath(type, guid, hash) { return path.join(this._cachePath, CacheFS._calcFilename(type, guid, hash)); } @@ -34,14 +50,6 @@ class CacheFS extends Cache { return super.init(options, callback); } - reset(callback) { - return super.reset(callback); - } - - save(callback) { - callback(); // No op - } - shutdown(callback) { callback(); // No op } @@ -77,9 +85,9 @@ class CacheFS extends Cache { return fs.move(file.file, filePath, { overwrite: true }); } - transaction.getFiles() - .then((files) => { - return Promise.all(files.map(moveFile)); + transaction.finalize() + .then(() => { + return Promise.all(transaction.files.map(moveFile)); }) .then(() => { callback(); @@ -90,14 +98,25 @@ class CacheFS extends Cache { } registerClusterWorker(worker) { - + worker.on('message', () => {}); } } class PutTransactionFS extends PutTransaction { + /** + * + * @param {Buffer} guid + * @param {Buffer} hash + * @param {String} cachePath + */ constructor(guid, hash, cachePath) { super(guid, hash); + /** + * @type {String} + * @private + */ this._cachePath = cachePath; + this._writeOptions = { flags: 'w', encoding: 'ascii', @@ -106,52 +125,69 @@ class PutTransactionFS extends PutTransaction { autoClose: true }; - this._files = {}; + this._streams = {}; + this._files = []; } _closeAllStreams() { - return new Promise((resolve) => { + return new Promise((resolve, reject) => { let self = this; - let files = _.values(this._files); + let files = _.values(this._streams); if(files.length === 0) return resolve(); let closed = 0; let toClose = files.length; + let success = true; - function processClosedFile(file) { + function processClosedStream(stream) { closed++; - if(file.stream.bytesWritten !== file.size) { - _.unset(self._files, file.type); + if(stream.stream.bytesWritten === stream.size) { + self._files.push({ + file: stream.file, + type: stream.type + }); + } + else { + success = false; } if(closed === toClose) { - resolve(); + success ? resolve() : reject(new Error("Transaction failed; file size mismatch")); } } files.forEach(file => { - if(file.stream.closed) return processClosedFile(file); + if(file.stream.closed) return processClosedStream(file); file.stream.on('close', () => { - processClosedFile(file); + processClosedStream(file); }).on('error', err => { helpers.log(consts.LOG_ERR, err); - _.unset(self._files, file.type); + _.unset(self._streams, file.type); }); }); }); } - getFiles() { - return new Promise((resolve) => { - this._closeAllStreams() - .then(() => { - resolve(_.values(this._files)); - }); - }); + get files() { + return this._files; + } + + finalize(callback) { + if(typeof(callback) !== 'function') { + return this._closeAllStreams(); + } + + this._closeAllStreams() + .then(() => { + callback(); + }) + .catch(err => { + callback(err); + }); } getWriteStream(type, size, callback) { @@ -165,7 +201,7 @@ class PutTransactionFS extends PutTransaction { callback(null, stream); }); - self._files[type] = { + self._streams[type] = { file: file, type: type, size: size, diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index dda2e99..01fcb9e 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -1,8 +1,8 @@ 'use strict'; -const { Cache, PutTransaction } = require('./cache'); +const { CacheBase, PutTransaction } = require('./cache'); const { Readable, Writable } = require('stream'); const helpers = require('../helpers'); -const consts = require('../constants').Constants; +const consts = require('../constants'); const path = require('path'); const fs = require('fs-extra'); const async = require('async'); @@ -14,7 +14,7 @@ const kDbName = 'cache_membuf.db'; const kIndex = 'index'; const kPageMeta = 'pages'; -class CacheMembuf extends Cache { +class CacheMembuf extends CacheBase { constructor() { super(); this._db = null; @@ -28,6 +28,14 @@ class CacheMembuf extends Cache { } } + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @returns {string} + * @private + */ static _calcIndexKey(type, guid, hash) { return `${guid.toString('hex')}-${hash.toString('hex')}-${type}`; } @@ -137,7 +145,7 @@ class CacheMembuf extends Cache { return callback(new Error("Invalid cachePath")); let pages = self._pageMeta.chain().find({'dirty' : true}).data(); - let writeOps = pages.map(function(page) { + let writeOps = pages.map(page => { return { index: page.index, path: path.join(p, page.index), @@ -203,7 +211,7 @@ class CacheMembuf extends Cache { let db = new loki(self._dbPath, options); this._db = db; - db.loadDatabase({}, function() { + db.loadDatabase({}, () => { self._index = db.getCollection(kIndex); self._pageMeta = db.getCollection(kPageMeta); @@ -246,15 +254,6 @@ class CacheMembuf extends Cache { }); } - reset(callback) { - this._clearCache(); - callback(); - } - - save(callback) { - this._db.saveDatabase(callback); - } - shutdown(callback) { this._db.close(callback); } @@ -302,28 +301,38 @@ class CacheMembuf extends Cache { endPutTransaction(transaction, callback) { const self = this; - this._waitForSerialize().then(() => { - const files = transaction.getFiles(); - files.forEach(function (file) { - self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); - }); + this._waitForSerialize() + .then(() => { + return transaction.finalize(); + }) + .then(() => { + transaction.files.forEach(file => { + self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); + }); - callback(); - }); + callback(); + }) + .catch(err => { + callback(err); + }); } } class PutTransactionMembuf extends PutTransaction { constructor(guid, hash) { super(guid, hash); - this._files = { a: {}, i: {}, r: {} }; + this._streams = { a: {}, i: {}, r: {} }; this._finished = []; } - getFiles() { + get files() { return this._finished; } + finalize(callback) { + return super.finalize(callback); + } + getWriteStream(type, size, callback) { const self = this; @@ -331,12 +340,12 @@ class PutTransactionMembuf extends PutTransaction { return callback(new Error(`Unrecognized type '${type}' for transaction.`)); } - this._files[type].buffer = Buffer.alloc(size, 0, 'ascii'); - this._files[type].pos = 0; + this._streams[type].buffer = Buffer.alloc(size, 0, 'ascii'); + this._streams[type].pos = 0; const stream = new Writable({ write(chunk, encoding, callback) { - const file = self._files[type]; + const file = self._streams[type]; if (file.buffer.length - file.pos >= chunk.length) { chunk.copy(file.buffer, file.pos, 0, chunk.length); @@ -366,11 +375,12 @@ class PersistenceAdapter extends loki.LokiFsAdapter { this._cache = cache; } - saveDatabase(dbname, dbstring, callback) { + // noinspection JSUnusedGlobalSymbols + saveDatabase(dbName, dbString, callback) { const self = this; self._cache._serializeInProgress = true; - super.saveDatabase(dbname, dbstring, function() { + super.saveDatabase(dbName, dbString, function() { self._cache._serialize(function() { self._cache._serializeInProgress = false; callback(); diff --git a/lib/client/server_response_transform.js b/lib/client/server_response_transform.js index 41455f1..01ae251 100644 --- a/lib/client/server_response_transform.js +++ b/lib/client/server_response_transform.js @@ -1,6 +1,6 @@ const helpers = require('./../helpers'); -const consts = require('./../constants').Constants; -const { Transform } = require('stream'); +const consts = require('./../constants'); +const Transform = require('stream').Transform; const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.SIZE_SIZE + consts.ID_SIZE; @@ -29,6 +29,7 @@ class CacheServerResponseTransform extends Transform { this.readState.headerData.version = this.version; } + // noinspection JSUnusedGlobalSymbols _transform(data, encoding, callback) { while(data !== null && data.length > 0) { if (this.readState.dataPassThrough) { diff --git a/lib/constants.js b/lib/constants.js index a7df31a..e184fdc 100644 --- a/lib/constants.js +++ b/lib/constants.js @@ -22,4 +22,4 @@ constants.SIZE_SIZE = constants.UINT64_SIZE; constants.DEFAULT_LOG_LEVEL = constants.LOG_INFO; Object.freeze(constants); -module.exports.Constants = constants; +module.exports = constants; diff --git a/lib/helpers.js b/lib/helpers.js index b7e94c7..4f4a77e 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -1,5 +1,5 @@ const cluster = require('cluster'); -const consts = require("./constants").Constants; +const consts = require("./constants"); let logLevel = consts.LOG_TEST; @@ -15,6 +15,7 @@ function zeroPad(len, str) { } /** + * @param {Number} input * @return {string} */ exports.encodeInt32 = function(input) { @@ -22,6 +23,7 @@ exports.encodeInt32 = function(input) { }; /** + * @param {Number} input * @return {string} */ exports.encodeInt64 = function(input) { @@ -29,13 +31,16 @@ exports.encodeInt64 = function(input) { }; /** - * @return {number} + * + * @param {Buffer} input + * @returns {number} */ exports.readUInt32 = function(input) { return parseInt(input.toString('ascii', 0, consts.UINT32_SIZE), 16); }; /** + * @param {Buffer} input * @return {number} */ exports.readUInt64 = function(input) { diff --git a/lib/server.js b/lib/server.js index edee341..d39af87 100644 --- a/lib/server.js +++ b/lib/server.js @@ -1,33 +1,43 @@ 'use strict'; const net = require('net'); -const consts = require('./constants').Constants; +const consts = require('./constants'); const helpers = require('./helpers'); const ClientStreamProcessor = require('./server/client_stream_processor'); const CommandProcessor = require('./server/command_processor'); class CacheServer { + /** + * + * @param {CacheBase} cache + * @param {Number} port + */ constructor(cache, port) { this._cache = cache; - this._port = parseInt(port); + this._port = port; if (!port && port !== 0) this._port = consts.DEFAULT_PORT; - this._sever = null; + + this._server = null; } + /** + * + * @returns {*} + */ get port() { - return this._server && this._server.listening + return (this._server && this._server.listening) ? this._server.address().port : this._port; } + /** + * + * @returns {CacheBase|*} + */ get cache() { return this._cache; } - get server() { - return this._server; - } - /** * start the cache server * @@ -37,31 +47,35 @@ class CacheServer { Start(errCallback, callback) { const self = this; - this._server = net.createServer(function (socket) { + let server = net.createServer(socket => { + helpers.log(consts.LOG_TEST, `${socket.remoteAddress}:${socket.remotePort} connected.`); + socket - .on('close', function () { - helpers.log(consts.LOG_ERR, "Socket closed"); + .on('close', () => { + helpers.log(consts.LOG_TEST, `${socket.remoteAddress}:${socket.remotePort} closed connection.`); }) - .on('error', function (err) { + .on('error', err => { helpers.log(consts.LOG_ERR, err); }); - const clientStreamProcessor = new ClientStreamProcessor(); - const commandProcessor = new CommandProcessor(clientStreamProcessor, self.cache); - - socket.pipe(clientStreamProcessor).pipe(commandProcessor).pipe(socket); + socket + .pipe(new ClientStreamProcessor()) // Transform the incoming byte stream into commands and file data + .pipe(new CommandProcessor(self.cache)) // Execute commands and interface with the cache module + .pipe(socket); // Connect back to socket to send files }); - this._server.on('error', function (e) { - if (e.code === 'EADDRINUSE') { - helpers.log(consts.LOG_ERR, 'Port ' + self.port + ' is already in use...'); - if (errCallback && typeof(errCallback === 'function')) { errCallback(e); } + server.on('error', err => { + if (err.code === 'EADDRINUSE') { + helpers.log(consts.LOG_ERR, `Port ${self.port} is already in use...`); + if (errCallback && typeof(errCallback === 'function')) { errCallback(err); } } }); - this._server.listen(this._port, function() { + server.listen(this.port, () => { if(callback && typeof(callback) === 'function') { callback(); } }); + + this._server = server; }; Stop() { diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index f122352..d143acd 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -1,7 +1,6 @@ const helpers = require('./../helpers'); -const consts = require('./../constants').Constants; - -const { Transform } = require('stream'); +const consts = require('./../constants'); +const Transform = require('stream').Transform; const CMD_QUIT = 'q'.charCodeAt(0); const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.ID_SIZE; @@ -50,6 +49,7 @@ class ClientStreamProcessor extends Transform { } } + // noinspection JSUnusedGlobalSymbols _transform(data, encoding, callback) { while(data !== null && data.length > 0 && this.errState === null) { if(!this.didSendVersion) { @@ -162,10 +162,11 @@ class ClientStreamProcessor extends Transform { } if (this.readState.doReadSize) { - this.readState.dataSize = helpers.readUInt64(this.headerBuf.slice(consts.CMD_SIZE, consts.CMD_SIZE + consts.SIZE_SIZE).toString('ascii')); + this.readState.dataSize = helpers.readUInt64(this.headerBuf.slice(consts.CMD_SIZE, consts.CMD_SIZE + consts.SIZE_SIZE)); this.readState.dataPassThrough = true; } + // noinspection JSCheckFunctionSignatures this.push(Buffer.from(this.headerBuf.slice(0, this.readState.headerBufPos))); if(this.readState.dataPassThrough) { diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 1d3b7cf..ff4db07 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -1,8 +1,7 @@ const helpers = require('./../helpers'); const filesize = require('filesize'); -const consts = require('./../constants').Constants; - -const { Duplex } = require('stream'); +const consts = require('./../constants'); +const Duplex = require('stream').Duplex; const kSource = Symbol("source"); const kCache = Symbol("cache"); @@ -12,13 +11,24 @@ const kReadStateCommand = Symbol("readStateCommand"); const kReadStatePutStream = Symbol("readStatePutStream"); class CommandProcessor extends Duplex { - constructor(clientStreamProcessor, cache) { + + /** + * + * @param {CacheBase} cache + */ + constructor(cache) { super(); - this[kSource] = clientStreamProcessor; this[kCache] = cache; this[kSendFileQueue] = []; this._readState = kReadStateVersion; + + /** + * + * @type {PutTransaction} + * @private + */ this._trx = null; + this._putStream = null; this._putSize = 0; this._putSent = 0; @@ -28,12 +38,25 @@ class CommandProcessor extends Duplex { this._sendFileQueueCount = 0; this._isReading = false; this._readReady = true; + this._registerEventListeners(); + } - this.once('finish', function() { - this._printReadStats(); + _registerEventListeners() { + const self = this; + this.once('finish', this._printReadStats); + this.on('pipe', src => { + self[kSource] = src; }); } + // noinspection JSUnusedGlobalSymbols + /** + * + * @param {Buffer} chunk + * @param {String} encoding + * @param {Function} callback + * @private + */ _write(chunk, encoding, callback) { let handler = null; const self = this; @@ -61,6 +84,20 @@ class CommandProcessor extends Duplex { }); } + /** + * @private + */ + _read() { + this._readReady = true; + let self = this; + Promise.resolve().then(() => { + self._read_internal(); + }); + } + + /** + * @private + */ _read_internal() { if(this._isReading || this[kSendFileQueue].length === 0) return; @@ -107,35 +144,39 @@ class CommandProcessor extends Duplex { } } - stream.on('readable', function() { - readChunk(); - }); + stream.on('readable', readChunk); }); - } + } - _printReadStats() { + /** + * @private + */ + _printReadStats() { if(this._sendFileQueueReadDuration > 0) { let totalTime = this._sendFileQueueReadDuration / 1000; let throughput = (this._sendFileQueueReadBytes / totalTime).toFixed(2); helpers.log(consts.LOG_INFO, `Sent ${this._sendFileQueueCount} files (${this._sendFileQueueChunkReads} chunks) totaling ${filesize(this._sendFileQueueReadBytes)} in ${totalTime} seconds (${filesize(throughput)}/sec)`); } - } - - _read() { - this._readReady = true; - let self = this; - Promise.resolve().then(() => { - self._read_internal(); - }); } - _quit(err) { + /** + * + * @param {Error?} err + * @private + */ + _quit(err) { this[kSource].unpipe(this); this[kSource].emit('quit'); this._readState = null; err && helpers.log(consts.LOG_ERR, err); } - + + /** + * + * @param {Buffer} data + * @param {Function} callback + * @private + */ _handleVersion(data, callback) { let version = helpers.readUInt32(data); this._readState = kReadStateCommand; @@ -149,6 +190,12 @@ class CommandProcessor extends Duplex { callback(err); } + /** + * + * @param {Buffer} data + * @param {Function} callback + * @private + */ _handleWrite(data, callback) { const self = this; @@ -166,6 +213,12 @@ class CommandProcessor extends Duplex { }); } + /** + * + * @param {Buffer} data + * @param {Function} callback + * @private + */ _handleCommand(data, callback) { let cmd, size, type, guid, hash = null; if(data.length > 1) { @@ -213,19 +266,27 @@ class CommandProcessor extends Duplex { } } + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @param {Function} callback + * @private + */ _onGet(type, guid, hash, callback) { let self = this; this[kCache].getFileInfo(type, guid, hash, function(err, result) { if(err || result === null) { - let resp = Buffer.from('-' + type, 'ascii'); + let resp = Buffer.from(`-${type}`, 'ascii'); self[kSendFileQueue].push({ exists: false, header: Buffer.concat([resp, guid, hash], 34) }); } else { - let resp = Buffer.from('+' + type + helpers.encodeInt64(result.size), 'ascii'); + let resp = Buffer.from(`+${type}${helpers.encodeInt64(result.size)}`, 'ascii'); self[kSendFileQueue].push({ exists: true, header: Buffer.concat([resp, guid, hash], 50), @@ -247,6 +308,13 @@ class CommandProcessor extends Duplex { }); } + /** + * + * @param {Buffer} guid + * @param {Buffer} hash + * @param {Function} callback + * @private + */ _onTransactionStart(guid, hash, callback) { const self = this; @@ -266,6 +334,11 @@ class CommandProcessor extends Duplex { }); } + /** + * + * @param {Function} callback + * @private + */ _onTransactionEnd(callback) { const self = this; @@ -280,6 +353,13 @@ class CommandProcessor extends Duplex { }); } + /** + * + * @param {String} type + * @param {Number} size + * @param {Function} callback + * @private + */ _onPut(type, size, callback) { const self = this; diff --git a/main.js b/main.js index 6d0c45e..2cd3669 100644 --- a/main.js +++ b/main.js @@ -1,6 +1,6 @@ const cluster = require('cluster'); const helpers = require('./lib/helpers'); -const consts = require('./lib/constants').Constants; +const consts = require('./lib/constants'); const program = require('commander'); const path = require('path'); const CacheServer = require('./lib/server'); @@ -127,32 +127,6 @@ function startPrompt() { process.exit(0); }); break; - - case 's': - helpers.log(consts.LOG_INFO, "Saving cache data ..."); - Cache.save(function(err) { - if(err) { - helpers.log(consts.LOG_ERR, err); - server.Stop(); - process.exit(1); - } - - helpers.log(consts.LOG_INFO, "Save finished."); - }); - - break; - case 'r': - helpers.log(consts.LOG_INFO, "Resetting cache data ..."); - Cache.reset(function(err) { - "use strict"; - if(err) { - helpers.log(consts.LOG_ERR, err); - server.Stop(); - process.exit(1); - } - - helpers.log(consts.LOG_INFO, "Reset finished."); - }); } } diff --git a/test/cache_api.js b/test/cache_api.js new file mode 100644 index 0000000..e69de29 diff --git a/test/cache_membuf.js b/test/cache_membuf.js new file mode 100644 index 0000000..e69de29 diff --git a/test/cache.js b/test/protocol.js similarity index 99% rename from test/cache.js rename to test/protocol.js index 373efed..c4bb60f 100644 --- a/test/cache.js +++ b/test/protocol.js @@ -19,7 +19,7 @@ let cache, server, client; let test_modules = [ { tmpDir: tmp.dirSync({unsafeCleanup: true}), - name: "Cache: Membuf", + name: "Transaction tests (cache_membuf)", path: "../lib/cache/cache_membuf", options: { initialPageSize: 10000, @@ -32,7 +32,7 @@ let test_modules = [ }, { tmpDir: tmp.dirSync({unsafeCleanup: true}), - name: "Cache: FS", + name: "Transaction tests (cache_fs)", path: "../lib/cache/cache_fs", options: {} } From 7fd2ea632ba2c5fed2453feff3fc473933cd2a9a Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 23 Dec 2017 16:13:01 -0600 Subject: [PATCH 17/89] stubbed out cache module API unit tests --- test/cache_api.js | 83 ++++++++ test/cache_membuf.js | 41 ++++ test/protocol.js | 468 ++++++++++++++++++++++--------------------- test/server.js | 7 +- test/test_utils.js | 2 +- 5 files changed, 367 insertions(+), 234 deletions(-) diff --git a/test/cache_api.js b/test/cache_api.js index e69de29..65f6bcd 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -0,0 +1,83 @@ +const assert = require('assert'); +const helpers = require('../lib/helpers'); +const consts = require('../lib/constants'); +const tmp = require('tmp'); +const loki = require('lokijs'); + +let test_modules = [ + { + tmpDir: tmp.dirSync({unsafeCleanup: true}), + name: "cache_membuf", + path: "../lib/cache/cache_membuf", + options: { + initialPageSize: 10000, + growPageSize: 10000, + minFreeBlockSize: 1024, + persistenceOptions: { + adapter: new loki.LokiMemoryAdapter() + } + } + }, + { + tmpDir: tmp.dirSync({unsafeCleanup: true}), + name: "cache_membuf", + path: "../lib/cache/cache_fs", + options: {} + } +]; + +describe("Cache API", function() { + test_modules.forEach(function (module) { + describe(module.name, function () { + describe("init", function() { + it("should create the cache working directory if it doesn't exist"); + }); + + describe("getFileInfo", function() { + it("should report the file size for a file that exists in the cache"); + it("should return an error for a file that does not exist in the cache"); + }); + + describe("getFileStream", function() { + it("should return a readable stream for a file that exists in the cache"); + it("should return an error for a file that does not exist in the cache"); + }); + + describe("createPutTransaction", function() { + it("should return a PutTransaction object for the given file hash & guid"); + }); + + describe("endPutTransaction", function() { + it("should call finalize on the transaction"); + it("should add info, asset, and resource files to the cache that were written to the transaction"); + it("should return an error if any files were partially written to the transaction"); + }); + }); + }); +}); + +describe("PutTransaction API", function() { + test_modules.forEach(function (module) { + describe(module.name, function () { + describe("guid", function() { + it("should return the file guid for the transaction"); + }); + + describe("hash", function() { + it("should return the file hash for the transaction"); + }); + + describe("finalize", function() { + it("should return an error if any file was not fully written"); + it("should return with no error and no value if the transaction was successfully finalized"); + it("should return a promise if no callback is supplied"); + }); + + describe("getWriteStream", function() { + it("should return a WritableStream for the given file type"); + it("should only accept types of 'i', 'a', or 'r"); + it("should only accept size > 0"); + }); + }); + }); +}); \ No newline at end of file diff --git a/test/cache_membuf.js b/test/cache_membuf.js index e69de29..c3d95b3 100644 --- a/test/cache_membuf.js +++ b/test/cache_membuf.js @@ -0,0 +1,41 @@ +const assert = require('assert'); +const helpers = require('../lib/helpers'); +const consts = require('../lib/constants'); + +describe("Cache: Membuf", function() { + describe("_allocPage", function() { + + }); + + describe("_findFreeBlock", function() { + + }); + + describe("_reserveBlock", function() { + + }); + + describe("_waitForSerialize", function() { + + }); + + describe("_addFileToCache", function() { + + }); + + describe("_serialize", function() { + + }); + + describe("_deserialize", function() { + + }); + + describe("_clearCache", function() { + + }); + + describe("_initDb", function() { + + }); +}); \ No newline at end of file diff --git a/test/protocol.js b/test/protocol.js index c4bb60f..3689b43 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -2,11 +2,12 @@ const assert = require('assert'); const net = require('net'); const crypto = require('crypto'); const helpers = require('../lib/helpers'); -const consts = require('../lib/constants').Constants; +const consts = require('../lib/constants'); const CacheServer = require('../lib/server'); -const CmdResponseListener = require('./../lib/client/server_response_transform.js'); +const CacheServerResponseTransform = require('./../lib/client/server_response_transform.js'); const loki = require('lokijs'); const tmp = require('tmp'); +const { before, beforeEach, after } = require('mocha'); const generateCommandData = require('./test_utils').generateCommandData; const encodeCommand = require('./test_utils').encodeCommand; @@ -19,7 +20,7 @@ let cache, server, client; let test_modules = [ { tmpDir: tmp.dirSync({unsafeCleanup: true}), - name: "Transaction tests (cache_membuf)", + name: "cache_membuf", path: "../lib/cache/cache_membuf", options: { initialPageSize: 10000, @@ -32,307 +33,314 @@ let test_modules = [ }, { tmpDir: tmp.dirSync({unsafeCleanup: true}), - name: "Transaction tests (cache_fs)", + name: "cache_fs", path: "../lib/cache/cache_fs", options: {} } ]; -test_modules.forEach(function(module) { - describe(module.name, function() { +describe("Protocol", function() { + test_modules.forEach(function(module) { + describe(module.name, function() { + + beforeEach(function() { + helpers.SetLogger(function() {}); + }); + + before(function (done) { - beforeEach(function() { - helpers.SetLogger(function() {}); - }); - before(function (done) { - let CacheModule = require(module.path); - cache = new CacheModule(); + /** @type {CacheBase} **/ + let CacheModule = require(module.path); + cache = new CacheModule(); - module.options.cachePath = module.tmpDir.name; + module.options.cachePath = module.tmpDir.name; - cache.init(module.options, function() { - server = new CacheServer(cache, 0); + cache.init(module.options, function() { + server = new CacheServer(cache, 0); - server.Start(function (err) { - assert(!err, "Cache Server reported error! " + err); - }, done); + server.Start(function (err) { + assert(!err, "Cache Server reported error! " + err); + }, done); + }); }); - }); - after(function() { - server.Stop(); - module.tmpDir.removeCallback(); - }); + after(function() { + server.Stop(); + module.tmpDir.removeCallback(); + }); - describe("Transactions", function () { + describe("Transactions", function () { - const self = this; + const self = this; - before(function() { - self.data = generateCommandData(); - }); + before(function() { + self.data = generateCommandData(); + }); - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err, err); - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(err); + beforeEach(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err, err); + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + done(err); + }); }); - }); - it("should start a transaction with the (ts) command", function (done) { - expectLog(client, /Start transaction/, done); - client.end(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - }); + it("should start a transaction with the (ts) command", function (done) { + expectLog(client, /Start transaction/, done); + client.end(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); + }); - it("should cancel a pending transaction if a new (ts) command is received", function (done) { - expectLog(client, /Cancel previous transaction/, done); - const d = encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash); - client.write(d); // first one ... - client.end(d); // ... canceled by this one - }); + it("should cancel a pending transaction if a new (ts) command is received", function (done) { + expectLog(client, /Cancel previous transaction/, done); + const d = encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash); + client.write(d); // first one ... + client.end(d); // ... canceled by this one + }); - it("should require a start transaction (ts) cmd before an end transaction (te) cmd", function (done) { - expectLog(client, /Invalid transaction isolation/, done); - client.end(cmd.transactionEnd); - }); + it("should require a start transaction (ts) cmd before an end transaction (te) cmd", function (done) { + expectLog(client, /Invalid transaction isolation/, done); + client.end(cmd.transactionEnd); + }); - it("should end a transaction that was started with the (te) command", function (done) { - expectLog(client, /End transaction for/, done); - client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - client.end(cmd.transactionEnd); - }); + it("should end a transaction that was started with the (te) command", function (done) { + expectLog(client, /End transaction for/, done); + client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); + client.end(cmd.transactionEnd); + }); - it("should require a transaction start (te) command before a put command", function(done) { - expectLog(client, /Not in a transaction/, done); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); - }); + it("should require a transaction start (te) command before a put command", function(done) { + expectLog(client, /Not in a transaction/, done); + client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); + }); - it("should close the socket on an invalid transaction command", function(done) { - expectLog(client, /Unrecognized command/i, done); - client.write('tx', self.data.guid, self.data.hash); + it("should close the socket on an invalid transaction command", function(done) { + expectLog(client, /Unrecognized command/i, done); + client.write('tx', self.data.guid, self.data.hash); + }); }); - }); - describe("PUT requests", function () { - this.slow(1500); + describe("PUT requests", function () { + this.slow(1500); - const self = this; + const self = this; - before(function () { - self.data = generateCommandData(); - }); + before(function () { + self.data = generateCommandData(); + }); - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); + beforeEach(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err); - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(); + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + done(); + }); }); - }); - it("should close the socket on an invalid PUT type", function (done) { - expectLog(client, /Unrecognized command/i, done); - client.write( - encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand("px", null, null, self.data.bin)); - }); + it("should close the socket on an invalid PUT type", function (done) { + expectLog(client, /Unrecognized command/i, done); + client.write( + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + + encodeCommand("px", null, null, self.data.bin)); + }); - const tests = [ - {ext: 'bin', cmd: cmd.putAsset}, - {ext: 'info', cmd: cmd.putInfo}, - {ext: 'resource', cmd: cmd.putResource} - ]; + const tests = [ + {ext: 'bin', cmd: cmd.putAsset}, + {ext: 'info', cmd: cmd.putInfo}, + {ext: 'resource', cmd: cmd.putResource} + ]; - tests.forEach(function (test) { - it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function (done) { - client.on('close', function () { - cache.getFileInfo(test.cmd[1], self.data.guid, self.data.hash, function(err, info) { - assert(!err, err); - assert(info.size === self.data[test.ext].length); - cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function (err, stream) { + tests.forEach(function (test) { + it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function (done) { + client.on('close', function () { + cache.getFileInfo(test.cmd[1], self.data.guid, self.data.hash, function(err, info) { assert(!err, err); - assert(stream !== null); - - stream.on("readable", function () { - const chunk = stream.read(); // should only be one in this test - assert(self.data[test.ext].compare(chunk) === 0); - done(); + assert(info.size === self.data[test.ext].length); + cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function (err, stream) { + assert(!err, err); + assert(stream !== null); + + stream.on("readable", function () { + const chunk = stream.read(); // should only be one in this test + assert(self.data[test.ext].compare(chunk) === 0); + done(); + }); }); }); - }); - }); + }); - const buf = Buffer.from( - encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand(test.cmd, null, null, self.data[test.ext]) + - encodeCommand(cmd.transactionEnd), 'ascii'); - - let sentBytes = 0; - - function sendBytesAsync() { - setTimeout(() => { - const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); - client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { - sentBytes += packetSize; - if (sentBytes < buf.length) - return sendBytesAsync(); - else - sleep(50).then(() => { - client.end(); - }); - }); - }, 1); - } + const buf = Buffer.from( + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + + encodeCommand(test.cmd, null, null, self.data[test.ext]) + + encodeCommand(cmd.transactionEnd), 'ascii'); + + let sentBytes = 0; + + function sendBytesAsync() { + setTimeout(() => { + const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); + client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { + sentBytes += packetSize; + if (sentBytes < buf.length) + return sendBytesAsync(); + else + sleep(50).then(() => { + client.end(); + }); + }); + }, 1); + } - sendBytesAsync(); + sendBytesAsync(); + }); }); - }); - it("should replace an existing file with the same guid and hash", function (done) { - const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); + it("should replace an existing file with the same guid and hash", function (done) { + const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); - client.on('close', function () { - cache.getFileInfo('a', self.data.guid, self.data.hash, function(err, info) { - assert(!err, err); - assert(info.size === asset.length); - - cache.getFileStream('a', self.data.guid, self.data.hash, function (err, stream) { + client.on('close', function () { + cache.getFileInfo('a', self.data.guid, self.data.hash, function(err, info) { assert(!err, err); - assert(stream !== null); + assert(info.size === asset.length); - stream.on("readable", function () { - const chunk = stream.read(); // should only be one in this test - assert(asset.compare(chunk) === 0); - done(); + cache.getFileStream('a', self.data.guid, self.data.hash, function (err, stream) { + assert(!err, err); + assert(stream !== null); + + stream.on("readable", function () { + const chunk = stream.read(); // should only be one in this test + assert(asset.compare(chunk) === 0); + done(); + }); }); }); }); - }); - client.write( - encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand(cmd.putAsset, null, null, asset) + - encodeCommand(cmd.transactionEnd)); + client.write( + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + + encodeCommand(cmd.putAsset, null, null, asset) + + encodeCommand(cmd.transactionEnd)); - sleep(50).then(() => { - client.end(); + sleep(50).then(() => { + client.end(); + }); }); }); - }); - - describe("GET requests", function () { - this.slow(1000); - - const self = this; - self.data = generateCommandData(5000000, 6000000); - before(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); - client.write(encodeCommand(cmd.putInfo, null, null, self.data.info)); - client.write(encodeCommand(cmd.putResource, null, null, self.data.resource)); - client.write(cmd.transactionEnd); - client.end(cmd.quit); - client.on('close', done); + describe("GET requests", function () { + this.slow(1000); + + const self = this; + self.data = generateCommandData(5000000, 6000000); + + before(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err); + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); + client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); + client.write(encodeCommand(cmd.putInfo, null, null, self.data.info)); + client.write(encodeCommand(cmd.putResource, null, null, self.data.resource)); + client.write(cmd.transactionEnd); + client.end(cmd.quit); + client.on('close', done); + }); }); - }); - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); + beforeEach(function (done) { + client = net.connect({port: server.port}, function (err) { + assert(!err); - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(); + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + done(); + }); }); - }); - it("should close the socket on an invalid GET type", function (done) { - expectLog(client, /Unrecognized command/i, done); - client.write(encodeCommand('gx', self.data.guid, self.data.hash)); - }); + it("should close the socket on an invalid GET type", function (done) { + expectLog(client, /Unrecognized command/i, done); + client.write(encodeCommand('gx', self.data.guid, self.data.hash)); + }); - const tests = [ - {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin'}, - {cmd: cmd.getInfo, blob: self.data.info, type: 'info'}, - {cmd: cmd.getResource, blob: self.data.resource, type: 'resource'} - ]; + const tests = [ + {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin'}, + {cmd: cmd.getInfo, blob: self.data.info, type: 'info'}, + {cmd: cmd.getResource, blob: self.data.resource, type: 'resource'} + ]; - it("should respond with not found (-) for missing files", function (done) { - let count = 0; + it("should respond with not found (-) for missing files", function (done) { + let count = 0; - client.pipe(new CmdResponseListener()) - .on('header', function (header) { - assert(header.cmd === '-' + tests[count].cmd[1]); - count++; - if(count === 3) done(); - }); + client.pipe(new CacheServerResponseTransform()) + .on('header', function (header) { + assert(header.cmd === '-' + tests[count].cmd[1]); + count++; + if(count === 3) done(); + }); - const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); - const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); + const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); + const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); - tests.forEach(function(test) { - client.write(encodeCommand(test.cmd, badGuid, badHash)); + tests.forEach(function(test) { + client.write(encodeCommand(test.cmd, badGuid, badHash)); + }); }); - }); - tests.forEach(function (test) { - it("should retrieve stored " + test.type + " data with the (" + test.cmd + ") command", function (done) { - let dataBuf; - let pos = 0; - let resp = new CmdResponseListener(); - - resp.on('header', function (header) { - assert(header.cmd === '+' + test.cmd[1]); - assert(header.guid.compare(self.data.guid) === 0, "GUID does not match"); - assert(header.hash.compare(self.data.hash) === 0, "HASH does not match"); - assert(header.size === test.blob.length, "Expected size " + test.blob.length); - dataBuf = Buffer.allocUnsafe(header.size); - }) - .on('data', function (data) { - let prev = pos; - pos += data.copy(dataBuf, pos); - assert(data.compare(test.blob.slice(prev, pos)) === 0, `Blobs don't match at pos ${pos}`); - }) - .on('dataEnd', function () { - assert(dataBuf.compare(test.blob) === 0); - done(); - }); + tests.forEach(function (test) { + it("should retrieve stored " + test.type + " data with the (" + test.cmd + ") command", function (done) { + let dataBuf; + let pos = 0; + + let resp = new CacheServerResponseTransform(); + + resp + .on('header', function (header) { + assert(header.cmd === '+' + test.cmd[1]); + assert(header.guid.compare(self.data.guid) === 0, "GUID does not match"); + assert(header.hash.compare(self.data.hash) === 0, "HASH does not match"); + assert(header.size === test.blob.length, "Expected size " + test.blob.length); + dataBuf = Buffer.allocUnsafe(header.size); + }) + .on('data', function (data) { + let prev = pos; + pos += data.copy(dataBuf, pos); + assert(data.compare(test.blob.slice(prev, pos)) === 0, `Blobs don't match at pos ${pos}`); + }) + .on('dataEnd', function () { + assert(dataBuf.compare(test.blob) === 0); + done(); + }); - client.pipe(resp); + client.pipe(resp); - const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); + const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); - let sentBytes = 0; + let sentBytes = 0; - function sendBytesAsync() { - setTimeout(() => { - const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); - client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { - sentBytes += packetSize; - if (sentBytes < buf.length) - return sendBytesAsync(); - }); - }, 1); - } + function sendBytesAsync() { + setTimeout(() => { + const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); + client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { + sentBytes += packetSize; + if (sentBytes < buf.length) + return sendBytesAsync(); + }); + }, 1); + } - sendBytesAsync(); + sendBytesAsync(); + }); }); }); }); }); -}); \ No newline at end of file +}); diff --git a/test/server.js b/test/server.js index 02cafa4..e991963 100644 --- a/test/server.js +++ b/test/server.js @@ -1,9 +1,10 @@ const assert = require('assert'); const net = require('net'); const helpers = require('../lib/helpers'); -const consts = require('../lib/constants').Constants; +const consts = require('../lib/constants'); const CacheServer = require('../lib/server'); -const Cache = require("../lib/cache/cache").Cache; +const Cache = require('../lib/cache/cache').CacheBase; +const { before, beforeEach, after } = require('mocha'); const sleep = require('./test_utils').sleep; const cmd = require('./test_utils').cmd; @@ -16,7 +17,7 @@ let client; describe("Server common", function() { beforeEach(function() { - helpers.SetLogger(function(lvl, msg) {}); + helpers.SetLogger(() => {}); }); before(function (done) { diff --git a/test/test_utils.js b/test/test_utils.js index 84f0996..7f92ebb 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -1,6 +1,6 @@ const assert = require('assert'); const crypto = require('crypto'); -const consts = require('../lib/constants').Constants; +const consts = require('../lib/constants'); const helpers = require('../lib/helpers'); const MIN_BLOB_SIZE = 64; From 8690cc53571cf8dd64f62f5c9bd954bb42031396 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 23 Dec 2017 16:44:22 -0600 Subject: [PATCH 18/89] Bumping node version in travis config --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index a584bcf..919e583 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,5 @@ language: node_js node_js: - - "6" + - "8" after_success: - npm run coveralls \ No newline at end of file From 2bcf3ed3dc99461f3bdb98ec7681ce576e2ed7d1 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 28 Dec 2017 12:18:35 -0600 Subject: [PATCH 19/89] Brought test coverage up over 95%, with some minor refactors to support testing --- lib/cache/{cache.js => cache_base.js} | 57 +++-- lib/cache/cache_fs.js | 114 +++++----- lib/cache/cache_membuf.js | 286 +++++++++++++++----------- lib/helpers.js | 17 ++ test/cache_api.js | 235 ++++++++++++++++++--- test/cache_base.js | 170 +++++++++++++++ test/cache_membuf.js | 205 ++++++++++++++++-- test/protocol.js | 6 +- test/server.js | 4 +- test/test_utils.js | 17 +- 10 files changed, 842 insertions(+), 269 deletions(-) rename lib/cache/{cache.js => cache_base.js} (73%) create mode 100644 test/cache_base.js diff --git a/lib/cache/cache.js b/lib/cache/cache_base.js similarity index 73% rename from lib/cache/cache.js rename to lib/cache/cache_base.js index dd9bfdf..9f1e77d 100644 --- a/lib/cache/cache.js +++ b/lib/cache/cache_base.js @@ -1,4 +1,5 @@ 'use strict'; +const EventEmitter = require('events'); const cluster = require('cluster'); const consts = require('../constants'); const helpers = require('../helpers'); @@ -7,8 +8,11 @@ const path = require('path'); const fs = require('fs-extra'); const _ = require('lodash'); -class CacheBase { - constructor() {} +class CacheBase extends EventEmitter { + constructor() { + super(); + this._optionOverrides = {}; + } static get properties() { return {}; @@ -36,7 +40,7 @@ class CacheBase { * * @param {Object} options * @param {Function?} callback - * @returns {*} + * @returns {Promise} */ init(options, callback) { if(typeof(options) === 'object') @@ -45,30 +49,20 @@ class CacheBase { if(cluster.isMaster) { const p = this._cachePath; helpers.log(consts.LOG_INFO, `Cache path is ${p}`); - - if (typeof(callback) !== 'function') { - return fs.mkdirs(p); - } - - fs.mkdirs(p, callback); + return helpers.returnPromise(fs.mkdirs(p), callback); } else { - if (typeof(callback) !== 'function') { - return new Promise(resolve => { resolve(); }); - } - else { - callback(null); - } + return helpers.returnPromise(Promise.resolve(), callback); } } // noinspection JSMethodCanBeStatic /** * - * @param {Function} callback + * @param {Function?} callback */ shutdown(callback) { - throw new Error("Not implemented"); + return Promise.reject(new Error("Not implemented")); } // noinspection JSMethodCanBeStatic @@ -77,10 +71,11 @@ class CacheBase { * @param {String} type * @param {Buffer} guid * @param {Buffer} hash - * @param {Function} callback + * @param {Function?} callback + * @returns {Promise} */ getFileInfo(type, guid, hash, callback) { - throw new Error("Not implemented"); + return Promise.reject(new Error("Not implemented")); } // noinspection JSMethodCanBeStatic @@ -89,10 +84,11 @@ class CacheBase { * @param {String} type * @param {Buffer} guid * @param {Buffer} hash - * @param {Function} callback + * @param {Function?} callback + * @returns {Promise} */ getFileStream(type, guid, hash, callback) { - throw new Error("Not implemented"); + return Promise.reject(new Error("Not implemented")); } // noinspection JSMethodCanBeStatic @@ -100,20 +96,22 @@ class CacheBase { * * @param {Buffer} guid * @param {Buffer} hash - * @param {Function} callback + * @param {Function?} callback + * @returns {Promise} */ createPutTransaction(guid, hash, callback) { - throw new Error("Not implemented"); + return Promise.reject(new Error("Not implemented")); } // noinspection JSMethodCanBeStatic /** * * @param {PutTransaction} transaction - * @param {Function} callback + * @param {Function?} callback + * @returns {Promise} */ endPutTransaction(transaction, callback) { - throw new Error("Not implemented"); + return Promise.reject(new Error("Not implemented")); } // noinspection JSMethodCanBeStatic @@ -161,11 +159,7 @@ class PutTransaction { * @returns {Promise} */ finalize(callback) { - if(typeof(callback) !== 'function') { - return new Promise((resolve) => { resolve(); }); - } - - setImmediate(callback); + return Promise.reject(new Error("Not implemented")); } /** @@ -173,9 +167,10 @@ class PutTransaction { * @param {String} type * @param {Number} size * @param {Function} callback + * @returns {Promise} */ getWriteStream(type, size, callback) { - throw new Error("Not implemented"); + return Promise.reject(new Error("Not implemented")); } } diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 47c1493..2f1ac67 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -1,7 +1,6 @@ 'use strict'; -const { CacheBase, PutTransaction } = require('./cache'); +const { CacheBase, PutTransaction } = require('./cache_base'); const helpers = require('../helpers'); -const consts = require('../constants'); const path = require('path'); const fs = require('fs-extra'); const uuid = require('uuid'); @@ -47,34 +46,45 @@ class CacheFS extends CacheBase { } init(options, callback) { - return super.init(options, callback); + let p = super.init(options); + return helpers.returnPromise(p, callback); } shutdown(callback) { - callback(); // No op + return helpers.returnPromise(Promise.resolve(), callback); } getFileInfo(type, guid, hash, callback) { - fs.stat(this._calcFilepath(type, guid, hash)) - .then(stats => { - callback(null, {size: stats.size}); - }) - .catch(err => { - callback(err); - }) + let p = new Promise((resolve, reject) => { + fs.stat(this._calcFilepath(type, guid, hash)) + .then(stats => { + resolve({size: stats.size}); + }) + .catch(err => { + reject(err); + }); + }); + + return helpers.returnPromise(p, callback); } getFileStream(type, guid, hash, callback) { let stream = fs.createReadStream(this._calcFilepath(type, guid, hash)); - stream.on('open', () => { - callback(null, stream); + + let p = new Promise((resolve, reject) => { + stream.on('open', () => { + resolve(stream); }).on('error', err => { - callback(err); + reject(stream); + }); }); + + return helpers.returnPromise(p, callback); } createPutTransaction(guid, hash, callback) { - callback(null, new PutTransactionFS(guid, hash, this._cachePath)); + let p = Promise.resolve(new PutTransactionFS(guid, hash, this._cachePath)); + return helpers.returnPromise(p, callback); } endPutTransaction(transaction, callback) { @@ -85,21 +95,14 @@ class CacheFS extends CacheBase { return fs.move(file.file, filePath, { overwrite: true }); } - transaction.finalize() - .then(() => { + let p = transaction.finalize().then(() => { return Promise.all(transaction.files.map(moveFile)); - }) - .then(() => { - callback(); - }) - .catch(err => { - callback(err); }); - } - registerClusterWorker(worker) { - worker.on('message', () => {}); + return helpers.returnPromise(p, callback); } + + registerClusterWorker(worker) {} } class PutTransactionFS extends PutTransaction { @@ -161,12 +164,8 @@ class PutTransactionFS extends PutTransaction { files.forEach(file => { if(file.stream.closed) return processClosedStream(file); - file.stream.on('close', () => { processClosedStream(file); - }).on('error', err => { - helpers.log(consts.LOG_ERR, err); - _.unset(self._streams, file.type); }); }); }); @@ -177,40 +176,43 @@ class PutTransactionFS extends PutTransaction { } finalize(callback) { - if(typeof(callback) !== 'function') { - return this._closeAllStreams(); - } - - this._closeAllStreams() - .then(() => { - callback(); - }) - .catch(err => { - callback(err); - }); + let p = this._closeAllStreams(); + return helpers.returnPromise(p, callback); } getWriteStream(type, size, callback) { let self = this; let file = path.join(this._cachePath, uuid()); - fs.ensureFile(file) - .then(() => { - let stream = fs.createWriteStream(file, this._writeOptions); - stream.on('open', () => { - callback(null, stream); + let p = new Promise((resolve, reject) => { + if(typeof(size) !== 'number' || size <= 0) { + return reject(new Error("Invalid size for write stream")); + } + + if(type !== 'a' && type !== 'i' && type !== 'r') { + return reject(new Error(`Unrecognized type '${type}' for transaction.`)); + } + + fs.ensureFile(file) + .then(() => { + let stream = fs.createWriteStream(file, this._writeOptions); + stream.on('open', () => { + resolve(stream); + }); + + self._streams[type] = { + file: file, + type: type, + size: size, + stream: stream + }; + }) + .catch(err => { + reject(err); }); + }); - self._streams[type] = { - file: file, - type: type, - size: size, - stream: stream - }; - }) - .catch(err => { - callback(err); - }); + return helpers.returnPromise(p, callback); } } diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 01fcb9e..7a31f33 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -1,11 +1,11 @@ 'use strict'; -const { CacheBase, PutTransaction } = require('./cache'); +const { CacheBase, PutTransaction } = require('./cache_base'); const { Readable, Writable } = require('stream'); +const { promisify } = require('util'); const helpers = require('../helpers'); const consts = require('../constants'); const path = require('path'); const fs = require('fs-extra'); -const async = require('async'); const _ = require('lodash'); const loki = require('lokijs'); const uuid = require('uuid/v4'); @@ -112,18 +112,31 @@ class CacheMembuf extends CacheBase { return block; } + /** + * + * @returns {Promise} + * @private + */ _waitForSerialize() { const self = this; return new Promise((resolve) => { (function waitForSave() { if(self._serializeInProgress === false) return resolve(); - helpers.log(consts.LOG_TEST, "_waitForSerialize..."); + self.emit('waitForSerialize'); setTimeout(waitForSave, 100); })(); }); } + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @param {Buffer} buffer + * @private + */ _addFileToCache(type, guid, hash, buffer) { const key = CacheMembuf._calcIndexKey(type, guid, hash); const entry = this._reserveBlock(key, buffer.length); @@ -137,67 +150,65 @@ class CacheMembuf extends CacheBase { this._pageMeta.update(pageMeta); } - _serialize(callback) { + /** + * + * @returns {Promise<[any]>} + * @private + */ + _serialize() { const self = this; - let p = self._cachePath; - if(p === null) - return callback(new Error("Invalid cachePath")); - let pages = self._pageMeta.chain().find({'dirty' : true}).data(); - let writeOps = pages.map(page => { - return { - index: page.index, - path: path.join(p, page.index), - data: self._pages[page.index] - } - }); - function doWriteOp(op, cb) { - helpers.log(consts.LOG_INFO, `Writing ${op.path}`); - fs.writeFile(op.path, op.data) + let promises = pages.map(page => { + let pagePath = path.join(self._cachePath, page.index); + helpers.log(consts.LOG_INFO, `Writing ${pagePath}`); + return fs.writeFile(pagePath, self._pages[page.index]) .then(() => { - let doc = self._pageMeta.by('index', op.index); + let doc = self._pageMeta.by('index', page.index); doc.dirty = false; self._pageMeta.update(doc); - cb(); - }) - .catch(err => { - cb(err); }); - } + }); - async.eachSeries(writeOps, doWriteOp, callback); + return Promise.all(promises); } - _deserialize(callback) { + /** + * + * @returns {Promise<[any]>} + * @private + */ + _deserialize() { const self = this; - const p = self._cachePath; + const cachePath = self._cachePath; let pages = self._pageMeta.chain().find({}).data(); - function loadPageFile(page, cb) { - let file = path.join(p, page.index); + let promises = pages.map(page => { + let file = path.join(cachePath, page.index); helpers.log(consts.LOG_DBG, `Loading page file at ${file}`); - fs.stat(file) + + return fs.stat(file) .then(stats => { - if(stats.size !== page.size) - return cb(new Error(`Unrecognized/invalid page file '${file}'`)); + if(stats.size === page.size) { + return fs.readFile(file); + } - return fs.readFile(file); + throw new Error(`Unrecognized/invalid page file '${file}'`); }) .then(result => { self._pages[page.index] = result; - cb(); - }) - .catch(err => { - cb(err); }); - } + }); - async.each(pages, loadPageFile, callback); + return Promise.all(promises); } + /** + * + * @private + */ _clearCache() { this._index.clear(); this._pageMeta.clear(); @@ -205,123 +216,138 @@ class CacheMembuf extends CacheBase { this._allocPage(this._options.initialPageSize); } - _initDb(options, callback) { + /** + * + * @param options + * @returns {Promise} + * @private + */ + _initDb(options) { const self = this; let db = new loki(self._dbPath, options); + let loadDb = promisify(db.loadDatabase).bind(db); this._db = db; - db.loadDatabase({}, () => { - self._index = db.getCollection(kIndex); - self._pageMeta = db.getCollection(kPageMeta); + return loadDb({}) + .then(() => { + self._index = db.getCollection(kIndex); + self._pageMeta = db.getCollection(kPageMeta); + + if(self._index !== null && self._pageMeta !== null) { + return self._deserialize(); + } - if(self._pageMeta === null) { self._pageMeta = db.addCollection(kPageMeta, { unique: ["index"], indices: ["dirty"] }); - } - if(self._index === null) { self._index = db.addCollection(kIndex, { unique: ["fileId"], indices: ["size"] }); self._clearCache(); - callback(); - } - else { - self._deserialize(callback); - } - }); + }); + } + + /** + * + * @private + */ + _saveDb() { + let save = promisify(this._db.saveDatabase).bind(this._db); + return save(); } init(options, callback) { const self = this; - - super.init(options) + + let p = super.init(options) .then(() => { let dbOpts = self._options.persistenceOptions || {}; - if(!dbOpts.hasOwnProperty('adapter')) { + if(!dbOpts.hasOwnProperty('adapter') || dbOpts.adapter === null) { dbOpts.adapter = new PersistenceAdapter(self); } - self._initDb(dbOpts, callback); - }) - .catch(err => { - callback(err); + return self._initDb(dbOpts); }); + + return helpers.returnPromise(p, callback); } shutdown(callback) { - this._db.close(callback); + let close = promisify(this._db.close).bind(this._db); + let p = this._saveDb().then(() => close()); + return helpers.returnPromise(p, callback); } getFileInfo(type, guid, hash, callback) { const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); - if(entry != null) { - callback(null, { size: entry.size }); - } - else { - callback(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)) - } + + let p = (entry != null) + ? Promise.resolve({ size: entry.size }) + : Promise.reject(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); + + return helpers.returnPromise(p, callback); } getFileStream(type, guid, hash, callback) { const self = this; - const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); - // noinspection EqualityComparisonWithCoercionJS (checking for null or undefined) - if(entry != null) { - const file = self._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); - const stream = new Readable({ - read() { - if(this.didPush) - return this.push(null); - this.push(file); - this.didPush = true; - }, - - highWaterMark: file.length - }); + let p = new Promise((resolve, reject) => { + // noinspection EqualityComparisonWithCoercionJS (checking for null or undefined) + if(entry != null) { + const file = self._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); + const stream = new Readable({ + read() { + if(this.didPush) + return this.push(null); + this.push(file); + this.didPush = true; + }, + + highWaterMark: file.length + }); - callback(null, stream); - } - else { - callback(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); - } + resolve(stream); + } + else { + reject(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); + } + }); + + return helpers.returnPromise(p, callback); } createPutTransaction(guid, hash, callback) { - callback(null, new PutTransactionMembuf(guid, hash)); + let p = Promise.resolve(new PutTransactionMembuf(guid, hash)); + return helpers.returnPromise(p, callback); } endPutTransaction(transaction, callback) { const self = this; - - this._waitForSerialize() - .then(() => { - return transaction.finalize(); - }) + + let p = this._waitForSerialize() + .then(() => transaction.finalize()) .then(() => { transaction.files.forEach(file => { self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); }); - - callback(); - }) - .catch(err => { - callback(err); }); + + return helpers.returnPromise(p, callback); } + + registerClusterWorker(worker) {} } class PutTransactionMembuf extends PutTransaction { constructor(guid, hash) { super(guid, hash); - this._streams = { a: {}, i: {}, r: {} }; + this._streams = {}; this._finished = []; } @@ -330,40 +356,57 @@ class PutTransactionMembuf extends PutTransaction { } finalize(callback) { - return super.finalize(callback); + let self = this; + let p = new Promise((resolve, reject) => { + self._finished = _.values(self._streams); + let ok = self._finished.every(file => { + return file.pos === file.buffer.length; + }); + + ok ? resolve() : reject(new Error("Transaction failed; file size mismatch")); + }); + + return helpers.returnPromise(p, callback); } getWriteStream(type, size, callback) { const self = this; - if(type !== 'a' && type !== 'i' && type !== 'r') { - return callback(new Error(`Unrecognized type '${type}' for transaction.`)); - } + let p = new Promise((resolve, reject) => { + if(typeof(size) !== 'number' || size <= 0) { + return reject(new Error("Invalid size for write stream")); + } - this._streams[type].buffer = Buffer.alloc(size, 0, 'ascii'); - this._streams[type].pos = 0; + if(type !== 'a' && type !== 'i' && type !== 'r') { + return reject(new Error(`Unrecognized type '${type}' for transaction.`)); + } - const stream = new Writable({ - write(chunk, encoding, callback) { - const file = self._streams[type]; + self._streams[type] = { + type: type, + buffer: Buffer.alloc(size, 0, 'ascii'), + pos: 0 + }; - if (file.buffer.length - file.pos >= chunk.length) { - chunk.copy(file.buffer, file.pos, 0, chunk.length); - file.pos += chunk.length; + const stream = new Writable({ + write(chunk, encoding, cb) { + const file = self._streams[type]; - if (file.pos === size) { - self._finished.push({type: type, buffer: file.buffer}); + if (file.buffer.length - file.pos >= chunk.length) { + chunk.copy(file.buffer, file.pos, 0, chunk.length); + file.pos += chunk.length; } + else { + helpers.log(consts.LOG_ERR, "Attempt to write over stream buffer allocation!"); + } + + cb(); } - else { - helpers.log(consts.LOG_ERR, "Attempt to write over stream buffer allocation!"); - } + }); - callback(); - } + resolve(stream); }); - callback(null, stream); + return helpers.returnPromise(p, callback); } } @@ -381,10 +424,11 @@ class PersistenceAdapter extends loki.LokiFsAdapter { self._cache._serializeInProgress = true; super.saveDatabase(dbName, dbString, function() { - self._cache._serialize(function() { - self._cache._serializeInProgress = false; - callback(); - }); + self._cache._serialize() + .then(() => { + self._cache._serializeInProgress = false; + callback(); + }); }); } } \ No newline at end of file diff --git a/lib/helpers.js b/lib/helpers.js index 4f4a77e..299a0ef 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -3,6 +3,23 @@ const consts = require("./constants"); let logLevel = consts.LOG_TEST; +/** + * + * @param {Promise} p + * @param {Function} cb + */ +exports.returnPromise = function(p, cb) { + if(typeof(cb) !== 'function') { + return p; + } + + p.then(result => { + result !== undefined ? cb(null, result) : cb(); + }).catch(err => { + cb(err); + }); +}; + /** * @returns {string} */ diff --git a/test/cache_api.js b/test/cache_api.js index 65f6bcd..d499df2 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -1,15 +1,17 @@ const assert = require('assert'); -const helpers = require('../lib/helpers'); -const consts = require('../lib/constants'); const tmp = require('tmp'); const loki = require('lokijs'); +const fs = require('fs-extra'); +const sleep = require('./test_utils').sleep; +const generateCommandData = require('./test_utils').generateCommandData; +const EventEmitter = require('events'); let test_modules = [ { - tmpDir: tmp.dirSync({unsafeCleanup: true}), name: "cache_membuf", path: "../lib/cache/cache_membuf", options: { + cachePath: tmp.tmpNameSync({}), initialPageSize: 10000, growPageSize: 10000, minFreeBlockSize: 1024, @@ -19,38 +21,142 @@ let test_modules = [ } }, { - tmpDir: tmp.dirSync({unsafeCleanup: true}), - name: "cache_membuf", + name: "cache_fs", path: "../lib/cache/cache_fs", - options: {} + options: { + cachePath: tmp.tmpNameSync({}) + } } ]; -describe("Cache API", function() { - test_modules.forEach(function (module) { - describe(module.name, function () { - describe("init", function() { - it("should create the cache working directory if it doesn't exist"); +describe("Cache API", () => { + test_modules.forEach(module => { + describe(module.name, () => { + let CacheModule, cache; + + before(() => { + /** @type {CacheBase} **/ + CacheModule = require(module.path); + cache = new CacheModule(); }); - describe("getFileInfo", function() { - it("should report the file size for a file that exists in the cache"); - it("should return an error for a file that does not exist in the cache"); + after(() => { + return fs.remove(module.options.cachePath); }); - describe("getFileStream", function() { - it("should return a readable stream for a file that exists in the cache"); - it("should return an error for a file that does not exist in the cache"); + describe("static get properties", () => { + it("should return an object with common property values", () => { + let props = CacheModule.properties; + assert(props.hasOwnProperty('clustering') && typeof(props['clustering']) === 'boolean'); + }); + }); + + describe("init", () => { + it("should create the cache working directory if it doesn't exist", () => { + return cache.init(module.options) + .then(() => fs.access(module.options.cachePath)); + }); + }); + + describe("registerClusterWorker", () => { + it("should return with no error", done => { + cache.registerClusterWorker(new EventEmitter()); + done(); + }); }); - describe("createPutTransaction", function() { - it("should return a PutTransaction object for the given file hash & guid"); + describe("shutdown", () => { + it("should return with no error", () => { + return cache.shutdown(); + }); + }); + + describe("createPutTransaction", () => { + let fileData; + + before(() => { + fileData = generateCommandData(1024, 1024); + }); + + it("should return a PutTransaction object for the given file hash & guid", () => { + return cache.createPutTransaction(fileData.guid, fileData.hash) + .then(trx => { + assert(trx.guid.compare(fileData.guid) === 0); + assert(trx.hash.compare(fileData.hash) === 0); + }); + }); + }); + + describe("endPutTransaction & getFileInfo", () => { + let fileData, trx; + + beforeEach(() => { + fileData = generateCommandData(1024, 1024); + return cache.createPutTransaction(fileData.guid, fileData.hash) + .then(result => { + trx = result; + }); + }); + + it("should call finalize on the transaction", () => { + let called = false; + trx.finalize = () => { + called = true; + return Promise.resolve(); + }; + + cache.endPutTransaction(trx).then(() => assert(called)); + }); + + it("should add info, asset, and resource files to the cache that were written to the transaction", () => { + return trx.getWriteStream('i', fileData.info.length) + .then(stream => stream.end(fileData.info)) + .then(() => cache.endPutTransaction(trx)) + .then(() => cache.getFileInfo('i', fileData.guid, fileData.hash)) + .then(info => assert(info.size === fileData.info.length)); + }); + + it("should return an error if any files were partially written to the transaction", () => { + return trx.getWriteStream('i', fileData.info.length) + .then(stream => stream.end(fileData.info.slice(0, 1))) + .then(() => cache.endPutTransaction(trx)) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)); + }); + + it("should not add files to the cache that were partially written to the transaction", () => { + return trx.getWriteStream('i', fileData.info.length) + .then(stream => stream.end(fileData.info.slice(0, 1))) + .then(() => cache.endPutTransaction(trx)) + .then(() => {}, err => assert(err)) + .then(() => cache.getFileInfo('i', fileData.guid, fileData.hash)) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)); + }); }); - describe("endPutTransaction", function() { - it("should call finalize on the transaction"); - it("should add info, asset, and resource files to the cache that were written to the transaction"); - it("should return an error if any files were partially written to the transaction"); + describe("getFileStream", function() { + + let fileData; + + beforeEach(() => { + fileData = generateCommandData(1024, 1024); + let trx; + return cache.createPutTransaction(fileData.guid, fileData.hash) + .then(result => { trx = result; }) + .then(() => trx.getWriteStream('i', fileData.info.length)) + .then(stream => stream.end(fileData.info)) + .then(() => cache.endPutTransaction(trx)) + .then(() => sleep(50)); + }); + + it("should return a readable stream for a file that exists in the cache", () => { + return cache.getFileStream('i', fileData.guid, fileData.hash) + .then(stream => assert(stream instanceof require('stream').Readable)); + }); + + it("should return an error for a file that does not exist in the cache", () => { + return cache.getFileStream('a', fileData.guid, fileData.hash) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)); + }); }); }); }); @@ -59,24 +165,87 @@ describe("Cache API", function() { describe("PutTransaction API", function() { test_modules.forEach(function (module) { describe(module.name, function () { - describe("guid", function() { - it("should return the file guid for the transaction"); + let cache, fileData, trx; + + before(() => { + /** @type {CacheBase} **/ + let CacheModule = require(module.path); + cache = new CacheModule(); + fileData = generateCommandData(1024, 1024); }); - describe("hash", function() { - it("should return the file hash for the transaction"); + after(() => { + return fs.remove(module.options.cachePath); + }); + + beforeEach(() => { + return cache.createPutTransaction(fileData.guid, fileData.hash) + .then(result => { trx = result; }); + }); + + describe("get guid", function() { + it("should return the file guid for the transaction", () => { + assert(trx.guid === fileData.guid); + }); + }); + + describe("get hash", function() { + it("should return the file hash for the transaction", () => { + assert(trx.hash === fileData.hash); + }); + }); + + describe("get files", function() { + it("should return an empty array before finalize() is called", () => { + assert(trx.files.length === 0); + }); + + it("should return a list of objects that represent completed files for the transaction", () => { + return trx.getWriteStream('i', fileData.info.length) + .then(stream => stream.end(fileData.info)) + .then(() => trx.finalize()) + .then(() => assert(trx.files.length === 1)); + }); }); describe("finalize", function() { - it("should return an error if any file was not fully written"); - it("should return with no error and no value if the transaction was successfully finalized"); - it("should return a promise if no callback is supplied"); + it("should return an error if any file was not fully written", () => { + return trx.getWriteStream('i', fileData.info.length) + .then(stream => stream.end(fileData.info.slice(0, 1))) + .then(() => trx.finalize()) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)); + }); + + it("should return with no error and no value if the transaction was successfully finalized", () => { + return trx.getWriteStream('i', fileData.info.length) + .then(stream => stream.end(fileData.info)) + .then(() => trx.finalize()) + }); }); describe("getWriteStream", function() { - it("should return a WritableStream for the given file type"); - it("should only accept types of 'i', 'a', or 'r"); - it("should only accept size > 0"); + it("should return a WritableStream for the given file type", () => { + return trx.getWriteStream('i', 1) + .then(stream => assert(stream instanceof require('stream').Writable)); + }); + + it("should only accept types of 'i', 'a', or 'r", () => { + return trx.getWriteStream('i', 1) + .then(() => trx.getWriteStream('a', 1)) + .then(() => trx.getWriteStream('r', 1)) + .then(() => trx.getWriteStream('x', 1)) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)); + }); + + it("should return an error for size equal to 0", () => { + return trx.getWriteStream('i', 0) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)) + }); + + it("should return an error for size less than 0", () => { + return trx.getWriteStream('i', -1) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)) + }); }); }); }); diff --git a/test/cache_base.js b/test/cache_base.js new file mode 100644 index 0000000..23fdd8e --- /dev/null +++ b/test/cache_base.js @@ -0,0 +1,170 @@ +const tmp = require('tmp'); +const fs = require('fs-extra'); +const { CacheBase, PutTransaction } = require('../lib/cache/cache_base'); +const assert = require('assert'); +const _ = require('lodash'); +const path = require('path'); +const randomBuffer = require('./test_utils').randomBuffer; +const consts = require('../lib/constants'); + +describe("Cache: Base Class", () => { + let cache; + + let opts = { + cachePath: tmp.tmpNameSync({}).toString(), + }; + + beforeEach(() => { + cache = new CacheBase(); + }); + + describe("static get properties", () => { + it("should return an empty object", () => { + assert(_.isEmpty(CacheBase.properties)); + }); + }); + + describe("get _optionsPath", () => { + it("should return 'Cache.options'", () => { + assert(cache._optionsPath === 'Cache.options'); + }); + }); + + describe("get _options", () => { + it("should return an object with options for all built-in cache modules", () => { + let cacheOptions = cache._options; + assert(typeof(cacheOptions) === 'object'); + assert(cacheOptions.hasOwnProperty('cache_fs')); + assert(cacheOptions.hasOwnProperty('cache_membuf')); + }); + + it("should apply option overrides", () => { + cache._optionOverrides = { + $testVal: { nested: { option: true } } + }; + + let cacheOptions = cache._options; + assert(cacheOptions.hasOwnProperty('$testVal')); + assert(cacheOptions.$testVal.nested.option === true); + }); + }); + + describe("get _cachePath", () => { + it("should return null if there is no cachePath option set", () => { + assert(cache._cachePath === null); + }); + + it("should return the exact value of cachePath if cachePath is an absolute path", () => { + cache._optionOverrides = opts; + assert(cache._cachePath === opts.cachePath); + }); + + it("should return a subdirectory path relative to the app root if cachePath is not an abosolute path", () => { + cache._optionOverrides = { + cachePath: "abc123" + }; + + assert(cache._cachePath === path.join(path.dirname(require.main.filename), "abc123")); + }); + }); + + describe("init", () => { + + after(() => { + return fs.remove(opts.cachePath); + }); + + it("should create the cache working directory if it doesn't exist", () => { + return cache.init(opts) + .then(() => fs.access(opts.cachePath)); + }); + }); + + describe("shutdown", () => { + it("should require override implementation in subclasses by returning an error", () => { + return cache.shutdown() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); + + describe("getFileInfo", () => { + it("should require override implementation in subclasses by returning an error", () => { + return cache.getFileInfo() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); + + describe("getFileStream", () => { + it("should require override implementation in subclasses by returning an error", () => { + return cache.getFileStream() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); + + describe("createPutTransaction", () => { + it("should require override implementation in subclasses by returning an error", () => { + return cache.createPutTransaction() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); + + describe("endPutTransaction", () => { + it("should require override implementation in subclasses by returning an error", () => { + return cache.endPutTransaction() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); + + describe("registerClusterWorker", () => { + it("should require override implementation in subclasses by returning an error", () => { + let error; + try { + cache.registerClusterWorker(); + } + catch(err) { + error = err; + } + finally { + assert(error); + } + }); + }); +}); + +describe("PutTransaction: Base Class", () => { + let guid = randomBuffer(consts.GUID_SIZE); + let hash = randomBuffer(consts.HASH_SIZE); + let trx = new PutTransaction(guid, hash); + + describe("get guid", () => { + it("should return the guid passed to the constructor", () => { + assert(guid.compare(trx.guid) === 0); + }); + }); + + describe("get hash", () => { + it("should return the hash passed to the constructor", () => { + assert(hash.compare(trx.hash) === 0); + }); + }); + + describe("get files", () => { + it("should return an empty array", () => { + assert(trx.files.length === 0); + }); + }); + + describe("finalize", () => { + it("should require override implementation in subclasses by returning an error", () => { + return trx.finalize() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); + + describe("getWriteStream", () => { + it("should require override implementation in subclasses by returning an error", () => { + return trx.getWriteStream() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); +}); \ No newline at end of file diff --git a/test/cache_membuf.js b/test/cache_membuf.js index c3d95b3..77c942e 100644 --- a/test/cache_membuf.js +++ b/test/cache_membuf.js @@ -1,41 +1,216 @@ +const tmp = require('tmp'); +const fs = require('fs-extra'); +const Cache = require('../lib/cache/cache_membuf'); +const randomBuffer = require('./test_utils').randomBuffer; +const generateCommandData = require('./test_utils').generateCommandData; +const path = require('path'); const assert = require('assert'); -const helpers = require('../lib/helpers'); -const consts = require('../lib/constants'); - -describe("Cache: Membuf", function() { - describe("_allocPage", function() { +describe("Cache: Membuf", () => { + + function dirtyPages() { + return cache._pageMeta.chain() + .find({'dirty' : true}).data() + .map(page => page.index); + } + + function writeFileDataToCache(fileData) { + cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); + cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); + } + + let opts = { + cachePath: tmp.tmpNameSync({}).toString(), + initialPageSize: 1024 * 10, + growPageSize: 1024 * 10, + minFreeBlockSize: 1024, + persistenceOptions: { + autosave: false + } + }; + + let cache; + let fileData = generateCommandData(1024 * 5); + + describe("Public API", () => { + + beforeEach(() => { + cache = new Cache(); + }); + + afterEach(() => { + return fs.remove(opts.cachePath); + }); + + describe("init", () => { + it("should initialize the _db object", () => { + return cache.init(opts).then(() => assert(cache._db !== null)); + }); + + it("should initialize an empty cache if no database was loaded from disk", () => { + return cache.init(opts) + .then(() => { + assert(cache._pageMeta.count() === 1); + let index = cache._index.findOne({}); + assert(index !== null); + assert(index.size === opts.initialPageSize); + assert(index.pageOffset === 0); + }); + }); + + it("should populate the _index and _pageMeta when a saved database is loaded from disk", () => { + return cache.init(opts) + .then(() => { cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info);} ) + .then(() => cache.shutdown()) + .then(() => cache.init(opts)) + .then(() => { + assert(cache._pageMeta.count() === 1); + assert(cache._index.count() === 2); + }); + }); + }); + + describe("endPutTransaction", () => { + it("it should wait for a database save in-progress to complete before ending the transaction", () => { + let trx; + + let ok = false; + cache.on('waitForSerialize', () => { + ok = true; + cache._serializeInProgress = false; + }); + + cache._serializeInProgress = true; + return cache.init(opts) + .then(() => cache.createPutTransaction(fileData.guid, fileData.hash)) + .then(result => { trx = result; }) + .then(() => trx.getWriteStream('i', fileData.info.length)) + .then(stream => stream.end(fileData.info)) + .then(() => cache.endPutTransaction(trx)) + .then(() => assert(ok)); + }); + }); + + describe("shutdown", () => { + it("should serialize the database and page files to disk before returning", () => { + let pages; + return cache.init(opts) + .then(() => { cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); }) + .then(() => { + pages = dirtyPages(); + assert(pages.length === 1); + }) + .then(() => cache.shutdown()) + .then(() => fs.access(cache._dbPath)) + .then(() => fs.readdir(opts.cachePath)) + .then(dir => assert(dir.includes(pages[0]))); + }); + }); }); - describe("_findFreeBlock", function() { - + describe("_serialize", () => { + + beforeEach(() => { + cache = new Cache(); + return cache.init(opts).then(() => writeFileDataToCache(fileData)); + }); + + afterEach(() => { + cache._clearCache(); + return fs.remove(opts.cachePath); + }); + + it("should write only dirty page files to disk", () => { + let testDir = (dir, dirty) => { + assert(dirty.every(entry => dir.includes(entry))); + assert(dir.every(entry => dirty.includes(entry))); + }; + + let dirty = dirtyPages(); + return Promise.resolve() + // Serialize the cache + .then(() => cache._serialize()) + // Read the cache dir and compare file list to expected dirty pages + .then(() => fs.readdir(opts.cachePath)) + .then(dir => testDir(dir, dirty)) + // Remove all files from the cache dir + .then(() => fs.emptyDir(opts.cachePath)) + // Replace a single file + .then(() => cache._addFileToCache('i', fileData.guid, fileData.hash, randomBuffer(fileData.info.length))) + // Store the dirty page list again + .then(() => { dirty = dirtyPages(); }) + // Serialize the cache again + .then(() => cache._serialize()) + // Re-compare cache dir contents to expected dirty pages + .then(() => fs.readdir(opts.cachePath)) + .then(dir => testDir(dir, dirty)); + }); }); - describe("_reserveBlock", function() { - + describe("_deserialize", () => { + + beforeEach(() => { + cache = new Cache(); + return cache.init(opts) + .then(() => writeFileDataToCache(fileData)) + .then(() => cache._serialize()); + }); + + afterEach(() => { + cache._clearCache(); + return fs.remove(opts.cachePath); + }); + + it("should load all page files from the cachePath", () => { + let pageMeta = cache._pageMeta.chain().find({}).data(); + let pageData = cache._pages; + + // artificially clear out the page array before de-serializing + cache._pages = []; + + return cache._deserialize() + .then(() => { + let ok = pageMeta.every(page => { + return Buffer.compare(cache._pages[page.index], pageData[page.index]) === 0; + }); + + assert(ok); + }); + }); + + it("should throw an error if the page file size doesn't match the expected size", () => { + return fs.readdir(opts.cachePath) + .then(dir => { + assert(dir.length > 0); + return fs.truncate(path.join(opts.cachePath, dir[0])) + }) + .then(() => cache._deserialize()) + .then(() => { throw new Error("Expected error!"); }, err => assert(err)); + }); }); - describe("_waitForSerialize", function() { + describe("_allocPage", () => { }); - describe("_addFileToCache", function() { + describe("_findFreeBlock", () => { }); - describe("_serialize", function() { + describe("_reserveBlock", () => { }); - describe("_deserialize", function() { + describe("_waitForSerialize", () => { }); - describe("_clearCache", function() { + describe("_addFileToCache", () => { }); - describe("_initDb", function() { + describe("_clearCache", () => { }); }); \ No newline at end of file diff --git a/test/protocol.js b/test/protocol.js index 3689b43..78d1389 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -7,8 +7,6 @@ const CacheServer = require('../lib/server'); const CacheServerResponseTransform = require('./../lib/client/server_response_transform.js'); const loki = require('lokijs'); const tmp = require('tmp'); -const { before, beforeEach, after } = require('mocha'); - const generateCommandData = require('./test_utils').generateCommandData; const encodeCommand = require('./test_utils').encodeCommand; const sleep = require('./test_utils').sleep; @@ -48,8 +46,6 @@ describe("Protocol", function() { }); before(function (done) { - - /** @type {CacheBase} **/ let CacheModule = require(module.path); cache = new CacheModule(); @@ -237,7 +233,7 @@ describe("Protocol", function() { this.slow(1000); const self = this; - self.data = generateCommandData(5000000, 6000000); + self.data = generateCommandData(); before(function (done) { client = net.connect({port: server.port}, function (err) { diff --git a/test/server.js b/test/server.js index e991963..0301e92 100644 --- a/test/server.js +++ b/test/server.js @@ -3,9 +3,7 @@ const net = require('net'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants'); const CacheServer = require('../lib/server'); -const Cache = require('../lib/cache/cache').CacheBase; -const { before, beforeEach, after } = require('mocha'); - +const Cache = require('../lib/cache/cache_base').CacheBase; const sleep = require('./test_utils').sleep; const cmd = require('./test_utils').cmd; diff --git a/test/test_utils.js b/test/test_utils.js index 7f92ebb..532e371 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -6,6 +6,13 @@ const helpers = require('../lib/helpers'); const MIN_BLOB_SIZE = 64; const MAX_BLOB_SIZE = 2048; +function randomBuffer(size) { + return Buffer.from(crypto.randomBytes(size).toString('ascii'), 'ascii') +} + +exports.randomBuffer = randomBuffer; + + exports.generateCommandData = function(minSize, maxSize) { minSize = minSize || MIN_BLOB_SIZE; maxSize = maxSize || MAX_BLOB_SIZE; @@ -13,11 +20,11 @@ exports.generateCommandData = function(minSize, maxSize) { function getSize() { return minSize + Math.floor(Math.random() * (maxSize - minSize)); } return { - guid: Buffer.from(crypto.randomBytes(consts.GUID_SIZE).toString('ascii'), 'ascii'), - hash: Buffer.from(crypto.randomBytes(consts.HASH_SIZE).toString('ascii'), 'ascii'), - bin: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), - info: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii'), - resource: Buffer.from(crypto.randomBytes(getSize()).toString('ascii'), 'ascii') + guid: randomBuffer(consts.GUID_SIZE), + hash: randomBuffer(consts.HASH_SIZE), + bin: randomBuffer(getSize()), + info: randomBuffer(getSize()), + resource: randomBuffer(getSize()) } }; From 7f85798450e756f680cac36960a533242139eb99 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 28 Dec 2017 17:23:14 -0600 Subject: [PATCH 20/89] Removed callback support and converted to pure promises in the cache API --- lib/cache/cache_base.js | 34 +++---- lib/cache/cache_fs.js | 45 ++++----- lib/cache/cache_membuf.js | 46 +++------ lib/helpers.js | 17 ---- lib/server/command_processor.js | 169 +++++++++++++++----------------- main.js | 52 +++++----- test/cache_base.js | 4 +- test/cache_membuf.js | 2 +- test/protocol.js | 55 +++++------ 9 files changed, 178 insertions(+), 246 deletions(-) diff --git a/lib/cache/cache_base.js b/lib/cache/cache_base.js index 9f1e77d..d1a3e9f 100644 --- a/lib/cache/cache_base.js +++ b/lib/cache/cache_base.js @@ -39,78 +39,68 @@ class CacheBase extends EventEmitter { /** * * @param {Object} options - * @param {Function?} callback * @returns {Promise} */ - init(options, callback) { + init(options) { if(typeof(options) === 'object') this._optionOverrides = options; if(cluster.isMaster) { const p = this._cachePath; helpers.log(consts.LOG_INFO, `Cache path is ${p}`); - return helpers.returnPromise(fs.mkdirs(p), callback); + return fs.mkdirs(p); } else { - return helpers.returnPromise(Promise.resolve(), callback); + return Promise.resolve(); } } - // noinspection JSMethodCanBeStatic /** * - * @param {Function?} callback + * @returns {Promise} */ - shutdown(callback) { + shutdown() { return Promise.reject(new Error("Not implemented")); } - // noinspection JSMethodCanBeStatic /** * * @param {String} type * @param {Buffer} guid * @param {Buffer} hash - * @param {Function?} callback * @returns {Promise} */ - getFileInfo(type, guid, hash, callback) { + getFileInfo(type, guid, hash) { return Promise.reject(new Error("Not implemented")); } - // noinspection JSMethodCanBeStatic /** * * @param {String} type * @param {Buffer} guid * @param {Buffer} hash - * @param {Function?} callback * @returns {Promise} */ - getFileStream(type, guid, hash, callback) { + getFileStream(type, guid, hash) { return Promise.reject(new Error("Not implemented")); } - // noinspection JSMethodCanBeStatic /** * * @param {Buffer} guid * @param {Buffer} hash - * @param {Function?} callback * @returns {Promise} */ - createPutTransaction(guid, hash, callback) { + createPutTransaction(guid, hash) { return Promise.reject(new Error("Not implemented")); } - // noinspection JSMethodCanBeStatic /** * * @param {PutTransaction} transaction - * @param {Function?} callback * @returns {Promise} */ - endPutTransaction(transaction, callback) { + endPutTransaction(transaction) { return Promise.reject(new Error("Not implemented")); } @@ -155,10 +145,9 @@ class PutTransaction { get files() { return []; } /** * - * @param {Function?} callback * @returns {Promise} */ - finalize(callback) { + finalize() { return Promise.reject(new Error("Not implemented")); } @@ -166,10 +155,9 @@ class PutTransaction { * * @param {String} type * @param {Number} size - * @param {Function} callback * @returns {Promise} */ - getWriteStream(type, size, callback) { + getWriteStream(type, size) { return Promise.reject(new Error("Not implemented")); } } diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 2f1ac67..2592109 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -5,6 +5,7 @@ const path = require('path'); const fs = require('fs-extra'); const uuid = require('uuid'); const _ = require('lodash'); +const consts = require('../constants'); class CacheFS extends CacheBase { constructor() { @@ -45,17 +46,16 @@ class CacheFS extends CacheBase { return super._optionsPath + ".cache_fs"; } - init(options, callback) { - let p = super.init(options); - return helpers.returnPromise(p, callback); + init(options) { + return super.init(options); } - shutdown(callback) { - return helpers.returnPromise(Promise.resolve(), callback); + shutdown() { + return Promise.resolve(); } - getFileInfo(type, guid, hash, callback) { - let p = new Promise((resolve, reject) => { + getFileInfo(type, guid, hash) { + return new Promise((resolve, reject) => { fs.stat(this._calcFilepath(type, guid, hash)) .then(stats => { resolve({size: stats.size}); @@ -64,30 +64,26 @@ class CacheFS extends CacheBase { reject(err); }); }); - - return helpers.returnPromise(p, callback); } - getFileStream(type, guid, hash, callback) { + getFileStream(type, guid, hash) { let stream = fs.createReadStream(this._calcFilepath(type, guid, hash)); - let p = new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { stream.on('open', () => { resolve(stream); }).on('error', err => { + helpers.log(consts.LOG_ERR, err); reject(stream); }); }); - - return helpers.returnPromise(p, callback); } - createPutTransaction(guid, hash, callback) { - let p = Promise.resolve(new PutTransactionFS(guid, hash, this._cachePath)); - return helpers.returnPromise(p, callback); + createPutTransaction(guid, hash) { + return Promise.resolve(new PutTransactionFS(guid, hash, this._cachePath)); } - endPutTransaction(transaction, callback) { + endPutTransaction(transaction) { let self = this; function moveFile(file) { @@ -95,11 +91,9 @@ class CacheFS extends CacheBase { return fs.move(file.file, filePath, { overwrite: true }); } - let p = transaction.finalize().then(() => { + return transaction.finalize().then(() => { return Promise.all(transaction.files.map(moveFile)); }); - - return helpers.returnPromise(p, callback); } registerClusterWorker(worker) {} @@ -175,16 +169,15 @@ class PutTransactionFS extends PutTransaction { return this._files; } - finalize(callback) { - let p = this._closeAllStreams(); - return helpers.returnPromise(p, callback); + finalize() { + return this._closeAllStreams(); } - getWriteStream(type, size, callback) { + getWriteStream(type, size) { let self = this; let file = path.join(this._cachePath, uuid()); - let p = new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { if(typeof(size) !== 'number' || size <= 0) { return reject(new Error("Invalid size for write stream")); } @@ -211,8 +204,6 @@ class PutTransactionFS extends PutTransaction { reject(err); }); }); - - return helpers.returnPromise(p, callback); } } diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 7a31f33..1a3b744 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -261,10 +261,10 @@ class CacheMembuf extends CacheBase { return save(); } - init(options, callback) { + init(options) { const self = this; - let p = super.init(options) + return super.init(options) .then(() => { let dbOpts = self._options.persistenceOptions || {}; if(!dbOpts.hasOwnProperty('adapter') || dbOpts.adapter === null) { @@ -273,31 +273,26 @@ class CacheMembuf extends CacheBase { return self._initDb(dbOpts); }); - - return helpers.returnPromise(p, callback); } - shutdown(callback) { + shutdown() { let close = promisify(this._db.close).bind(this._db); - let p = this._saveDb().then(() => close()); - return helpers.returnPromise(p, callback); + return this._saveDb().then(() => close()); } - getFileInfo(type, guid, hash, callback) { + getFileInfo(type, guid, hash) { const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); - let p = (entry != null) + return (entry != null) ? Promise.resolve({ size: entry.size }) : Promise.reject(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); - - return helpers.returnPromise(p, callback); } - getFileStream(type, guid, hash, callback) { + getFileStream(type, guid, hash) { const self = this; const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); - let p = new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { // noinspection EqualityComparisonWithCoercionJS (checking for null or undefined) if(entry != null) { const file = self._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); @@ -318,27 +313,22 @@ class CacheMembuf extends CacheBase { reject(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); } }); - - return helpers.returnPromise(p, callback); } - createPutTransaction(guid, hash, callback) { - let p = Promise.resolve(new PutTransactionMembuf(guid, hash)); - return helpers.returnPromise(p, callback); + createPutTransaction(guid, hash) { + return Promise.resolve(new PutTransactionMembuf(guid, hash)); } - endPutTransaction(transaction, callback) { + endPutTransaction(transaction) { const self = this; - let p = this._waitForSerialize() + return this._waitForSerialize() .then(() => transaction.finalize()) .then(() => { transaction.files.forEach(file => { self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); }); }); - - return helpers.returnPromise(p, callback); } registerClusterWorker(worker) {} @@ -355,9 +345,9 @@ class PutTransactionMembuf extends PutTransaction { return this._finished; } - finalize(callback) { + finalize() { let self = this; - let p = new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { self._finished = _.values(self._streams); let ok = self._finished.every(file => { return file.pos === file.buffer.length; @@ -365,14 +355,12 @@ class PutTransactionMembuf extends PutTransaction { ok ? resolve() : reject(new Error("Transaction failed; file size mismatch")); }); - - return helpers.returnPromise(p, callback); } - getWriteStream(type, size, callback) { + getWriteStream(type, size) { const self = this; - let p = new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { if(typeof(size) !== 'number' || size <= 0) { return reject(new Error("Invalid size for write stream")); } @@ -405,8 +393,6 @@ class PutTransactionMembuf extends PutTransaction { resolve(stream); }); - - return helpers.returnPromise(p, callback); } } diff --git a/lib/helpers.js b/lib/helpers.js index 299a0ef..4f4a77e 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -3,23 +3,6 @@ const consts = require("./constants"); let logLevel = consts.LOG_TEST; -/** - * - * @param {Promise} p - * @param {Function} cb - */ -exports.returnPromise = function(p, cb) { - if(typeof(cb) !== 'function') { - return p; - } - - p.then(result => { - result !== undefined ? cb(null, result) : cb(); - }).catch(err => { - cb(err); - }); -}; - /** * @returns {string} */ diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index ff4db07..a954acc 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -89,10 +89,7 @@ class CommandProcessor extends Duplex { */ _read() { this._readReady = true; - let self = this; - Promise.resolve().then(() => { - self._read_internal(); - }); + Promise.resolve().then(() => this._read_internal()); } /** @@ -118,34 +115,33 @@ class CommandProcessor extends Duplex { self._isReading = true; self._readStartTime = Date.now(); - this[kCache].getFileStream(file.type, file.guid, file.hash, function(err, stream) { - if(err) { - helpers.log(consts.LOG_ERR, err); - self._isReading = false; - return; - } - - function readChunk() { - if(!self._readReady) { - return setImmediate(readChunk); + this[kCache].getFileStream(file.type, file.guid, file.hash) + .then(stream => { + function readChunk() { + if(!self._readReady) { + return setImmediate(readChunk); + } + + let chunk = stream.read(); + if(chunk !== null) { + self._readReady = self.push(chunk, 'ascii'); + self._sendFileQueueChunkReads++; + self._sendFileQueueReadBytes += chunk.length; + } + else { + self[kSendFileQueue].shift(); + self._isReading = false; + self._sendFileQueueReadDuration += Date.now() - self._readStartTime; + self._read(); + } } - let chunk = stream.read(); - if(chunk !== null) { - self._readReady = self.push(chunk, 'ascii'); - self._sendFileQueueChunkReads++; - self._sendFileQueueReadBytes += chunk.length; - } - else { - self[kSendFileQueue].shift(); - self._isReading = false; - self._sendFileQueueReadDuration += Date.now() - self._readStartTime; - self._read(); - } - } - - stream.on('readable', readChunk); - }); + stream.on('readable', readChunk); + }) + .catch(err => { + helpers.log(consts.LOG_ERR, err); + self._isReading = false; + }); } /** @@ -220,7 +216,7 @@ class CommandProcessor extends Duplex { * @private */ _handleCommand(data, callback) { - let cmd, size, type, guid, hash = null; + let p, cmd, size, type, guid, hash = null; if(data.length > 1) { cmd = data.slice(0, 2).toString('ascii'); type = cmd[1]; @@ -233,37 +229,37 @@ class CommandProcessor extends Duplex { size = helpers.readUInt64(data.slice(2)); } } - else if(data.length > 0) { - cmd = data.toString('ascii'); - } else { - return callback(); + cmd = data.toString('ascii'); } switch(cmd) { case 'q': this._quit(); this._readState = null; + p = Promise.resolve(); break; case 'ga': case 'gi': case 'gr': - this._onGet(type, guid, hash, callback); + p = this._onGet(type, guid, hash); break; case 'ts': - this._onTransactionStart(guid, hash, callback); + p = this._onTransactionStart(guid, hash); break; case 'te': - this._onTransactionEnd(callback); + p = this._onTransactionEnd(); break; case 'pa': case 'pi': case 'pr': - this._onPut(type, size, callback); + p = this._onPut(type, size); break; default: - callback(new Error(`Unrecognized command '${cmd}`)); + p = Promise.reject(new Error(`Unrecognized command '${cmd}`)); } + + p.then(() => callback(), err => callback(err)); } /** @@ -271,21 +267,13 @@ class CommandProcessor extends Duplex { * @param {String} type * @param {Buffer} guid * @param {Buffer} hash - * @param {Function} callback + * @returns {Promise} * @private */ - _onGet(type, guid, hash, callback) { + _onGet(type, guid, hash) { let self = this; - this[kCache].getFileInfo(type, guid, hash, function(err, result) { - - if(err || result === null) { - let resp = Buffer.from(`-${type}`, 'ascii'); - self[kSendFileQueue].push({ - exists: false, - header: Buffer.concat([resp, guid, hash], 34) - }); - } - else { + return this[kCache].getFileInfo(type, guid, hash) + .then(result => { let resp = Buffer.from(`+${type}${helpers.encodeInt64(result.size)}`, 'ascii'); self[kSendFileQueue].push({ exists: true, @@ -298,24 +286,29 @@ class CommandProcessor extends Duplex { self._sendFileQueueCount++; helpers.log(consts.LOG_DBG, `Adding file to send queue, size ${result.size}`); - } - - if(self[kSendFileQueue].length === 1) { - self._read(self._readState.highWaterMark); - } - - callback(null); - }); + }) + .catch(() => { + let resp = Buffer.from(`-${type}`, 'ascii'); + self[kSendFileQueue].push({ + exists: false, + header: Buffer.concat([resp, guid, hash], 34) + }); + }) + .then(() => { + if(self[kSendFileQueue].length === 1) { + self._read(self._readState.highWaterMark); + } + }); } /** * * @param {Buffer} guid * @param {Buffer} hash - * @param {Function} callback + * @returns {Promise} * @private */ - _onTransactionStart(guid, hash, callback) { + _onTransactionStart(guid, hash) { const self = this; if(this._trx !== null) { @@ -323,60 +316,52 @@ class CommandProcessor extends Duplex { this._trx = null; } - this[kCache].createPutTransaction(guid, hash, function(err, trx) { - if(err) { - return callback(err); - } - - helpers.log(consts.LOG_DBG, `Start transaction for ${guid.toString('hex')}-${hash.toString('hex')}`); - self._trx = trx; - callback(null); - }); + return this[kCache].createPutTransaction(guid, hash) + .then(trx => { + helpers.log(consts.LOG_DBG, `Start transaction for ${guid.toString('hex')}-${hash.toString('hex')}`); + self._trx = trx; + }); } /** * - * @param {Function} callback + * @returns {Promise} * @private */ - _onTransactionEnd(callback) { + _onTransactionEnd() { const self = this; if(!this._trx) { - return callback(new Error("Invalid transaction isolation")); + return Promise.reject(new Error("Invalid transaction isolation")); } - this[kCache].endPutTransaction(this._trx, function(err) { - helpers.log(consts.LOG_DBG, `End transaction for ${self._trx.guid.toString('hex')}-${self._trx.hash.toString('hex')}`); - self._trx = null; - callback(err); - }); + return this[kCache].endPutTransaction(this._trx) + .then(() => { + helpers.log(consts.LOG_DBG, `End transaction for ${self._trx.guid.toString('hex')}-${self._trx.hash.toString('hex')}`); + self._trx = null; + }); } /** * * @param {String} type * @param {Number} size - * @param {Function} callback + * @returns {Promise} * @private */ - _onPut(type, size, callback) { + _onPut(type, size) { const self = this; if(!this._trx) { - return callback(new Error("Not in a transaction")); + return Promise.reject(new Error("Not in a transaction")); } - this._trx.getWriteStream(type, size, function(err, stream) { - if(err) { - return callback(err); - } - - self._putStream = stream; - self._putSize = size; - self._readState = kReadStatePutStream; - callback(null); - }); + return this._trx.getWriteStream(type, size) + .then(stream => { + self._putStream = stream; + self._putSize = size; + self._readState = kReadStatePutStream; + }); } } diff --git a/main.js b/main.js index 2cd3669..d68e25d 100644 --- a/main.js +++ b/main.js @@ -71,35 +71,35 @@ let cacheOpts = {}; if(program.cachePath !== null) cacheOpts.cachePath = program.cachePath; -Cache.init(cacheOpts, function(error) { - if(error) { - helpers.log(consts.LOG_ERR, error); - process.exit(1); - } - - server = new CacheServer(Cache, program.port); - - if(cluster.isMaster) { - helpers.log(consts.LOG_INFO, `Cache Server version ${consts.VERSION}; Cache module ${program.cacheModule}`); +Cache.init(cacheOpts) + .then(() => { + server = new CacheServer(Cache, program.port); + + if(cluster.isMaster) { + helpers.log(consts.LOG_INFO, `Cache Server version ${consts.VERSION}; Cache module ${program.cacheModule}`); + + if(program.workers === 0) { + server.Start(errHandler, function () { + helpers.log(consts.LOG_INFO, `Cache Server ready on port ${server.port}`); + startPrompt(); + }); + } - if(program.workers === 0) { + for(let i = 0; i < program.workers; i++) { + const worker = cluster.fork(); + Cache.registerClusterWorker(worker); + } + } + else { server.Start(errHandler, function () { - helpers.log(consts.LOG_INFO, `Cache Server ready on port ${server.port}`); - startPrompt(); + helpers.log(consts.LOG_INFO, `Cache Server worker ${cluster.worker.id} ready on port ${server.port}`); }); } - - for(let i = 0; i < program.workers; i++) { - const worker = cluster.fork(); - Cache.registerClusterWorker(worker); - } - } - else { - server.Start(errHandler, function () { - helpers.log(consts.LOG_INFO, `Cache Server worker ${cluster.worker.id} ready on port ${server.port}`); - }); - } -}); + }) + .catch(err => { + helpers.log(consts.LOG_ERR, err); + process.exit(1); + }); function startPrompt() { prompt.message = ""; @@ -122,7 +122,7 @@ function startPrompt() { switch(result.command) { case 'q': helpers.log(consts.LOG_INFO, "Shutting down ..."); - Cache.shutdown(function () { + Cache.shutdown().then(() => { server.Stop(); process.exit(0); }); diff --git a/test/cache_base.js b/test/cache_base.js index 23fdd8e..166ddba 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -11,7 +11,7 @@ describe("Cache: Base Class", () => { let cache; let opts = { - cachePath: tmp.tmpNameSync({}).toString(), + cachePath: tmp.tmpNameSync({}), }; beforeEach(() => { @@ -163,7 +163,7 @@ describe("PutTransaction: Base Class", () => { describe("getWriteStream", () => { it("should require override implementation in subclasses by returning an error", () => { - return trx.getWriteStream() + return trx.getWriteStream('i', 0) .then(() => { throw new Error("Expected error!"); }, () => {}); }); }); diff --git a/test/cache_membuf.js b/test/cache_membuf.js index 77c942e..619923c 100644 --- a/test/cache_membuf.js +++ b/test/cache_membuf.js @@ -21,7 +21,7 @@ describe("Cache: Membuf", () => { } let opts = { - cachePath: tmp.tmpNameSync({}).toString(), + cachePath: tmp.tmpNameSync({}), initialPageSize: 1024 * 10, growPageSize: 1024 * 10, minFreeBlockSize: 1024, diff --git a/test/protocol.js b/test/protocol.js index 78d1389..a117e36 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -52,13 +52,13 @@ describe("Protocol", function() { module.options.cachePath = module.tmpDir.name; - cache.init(module.options, function() { - server = new CacheServer(cache, 0); - - server.Start(function (err) { - assert(!err, "Cache Server reported error! " + err); - }, done); - }); + cache.init(module.options) + .then(() => { + server = new CacheServer(cache, 0); + server.Start(err => { + assert(!err, "Cache Server reported error! " + err); + }, done); + }); }); after(function() { @@ -151,23 +151,22 @@ describe("Protocol", function() { tests.forEach(function (test) { it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function (done) { - client.on('close', function () { - cache.getFileInfo(test.cmd[1], self.data.guid, self.data.hash, function(err, info) { - assert(!err, err); - assert(info.size === self.data[test.ext].length); - cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash, function (err, stream) { - assert(!err, err); - assert(stream !== null); - + client.on('close', () => { + cache.getFileInfo(test.cmd[1], self.data.guid, self.data.hash) + .then(info => { + assert(info.size === self.data[test.ext].length); + return cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash); + }) + .then(stream => { stream.on("readable", function () { const chunk = stream.read(); // should only be one in this test assert(self.data[test.ext].compare(chunk) === 0); done(); }); + }) + .catch(err => { + done(err); }); - }); - - }); const buf = Buffer.from( @@ -200,22 +199,22 @@ describe("Protocol", function() { it("should replace an existing file with the same guid and hash", function (done) { const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); - client.on('close', function () { - cache.getFileInfo('a', self.data.guid, self.data.hash, function(err, info) { - assert(!err, err); - assert(info.size === asset.length); - - cache.getFileStream('a', self.data.guid, self.data.hash, function (err, stream) { - assert(!err, err); - assert(stream !== null); - + client.on('close', () => { + cache.getFileInfo('a', self.data.guid, self.data.hash) + .then(info => { + assert(info.size === asset.length); + return cache.getFileStream('a', self.data.guid, self.data.hash); + }) + .then(stream => { stream.on("readable", function () { const chunk = stream.read(); // should only be one in this test assert(asset.compare(chunk) === 0); done(); }); + }) + .catch(err => { + done(err); }); - }); }); client.write( From a5c7beda58ee6daf081b9f0c8115f689ca4180e4 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 30 Dec 2017 10:02:07 -0600 Subject: [PATCH 21/89] Refactored protocol tests to work with much larger generated binary files; Added tests to cover writing both small and large packets to the server for GET and PUT operations --- test/cache_api.js | 4 +- test/cache_membuf.js | 9 +- test/protocol.js | 241 +++++++++++++++++-------------------------- test/test_utils.js | 82 +++++++++++++++ 4 files changed, 184 insertions(+), 152 deletions(-) diff --git a/test/cache_api.js b/test/cache_api.js index d499df2..2f29f8a 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -93,9 +93,7 @@ describe("Cache API", () => { beforeEach(() => { fileData = generateCommandData(1024, 1024); return cache.createPutTransaction(fileData.guid, fileData.hash) - .then(result => { - trx = result; - }); + .then(result => { trx = result; }); }); it("should call finalize on the transaction", () => { diff --git a/test/cache_membuf.js b/test/cache_membuf.js index 619923c..e287dc9 100644 --- a/test/cache_membuf.js +++ b/test/cache_membuf.js @@ -6,6 +6,9 @@ const generateCommandData = require('./test_utils').generateCommandData; const path = require('path'); const assert = require('assert'); +const MIN_FILE_SIZE = 1024 * 5; +const MAX_FILE_SIZE = MIN_FILE_SIZE; + describe("Cache: Membuf", () => { function dirtyPages() { @@ -22,8 +25,8 @@ describe("Cache: Membuf", () => { let opts = { cachePath: tmp.tmpNameSync({}), - initialPageSize: 1024 * 10, - growPageSize: 1024 * 10, + initialPageSize: MIN_FILE_SIZE * 2, + growPageSize: MIN_FILE_SIZE * 2, minFreeBlockSize: 1024, persistenceOptions: { autosave: false @@ -31,7 +34,7 @@ describe("Cache: Membuf", () => { }; let cache; - let fileData = generateCommandData(1024 * 5); + let fileData = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); describe("Public API", () => { diff --git a/test/protocol.js b/test/protocol.js index a117e36..a2b0777 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -9,9 +9,16 @@ const loki = require('lokijs'); const tmp = require('tmp'); const generateCommandData = require('./test_utils').generateCommandData; const encodeCommand = require('./test_utils').encodeCommand; -const sleep = require('./test_utils').sleep; const expectLog = require('./test_utils').expectLog; const cmd = require('./test_utils').cmd; +const clientWrite = require('./test_utils').clientWrite; +const readStream = require('./test_utils').readStream; +const getClientPromise = require('./test_utils').getClientPromise; + +const MIN_FILE_SIZE = 1024; +const MAX_FILE_SIZE = 1024 * 1024 * 10; +const SMALL_PACKET_SIZE = 256; +const LARGE_PACKET_SIZE = 1024 * 16; let cache, server, client; @@ -21,8 +28,8 @@ let test_modules = [ name: "cache_membuf", path: "../lib/cache/cache_membuf", options: { - initialPageSize: 10000, - growPageSize: 10000, + initialPageSize: MAX_FILE_SIZE * 2, + growPageSize: MAX_FILE_SIZE, minFreeBlockSize: 1024, persistenceOptions: { adapter: new loki.LokiMemoryAdapter() @@ -71,15 +78,15 @@ describe("Protocol", function() { const self = this; before(function() { - self.data = generateCommandData(); + self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); }); - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err, err); - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(err); - }); + beforeEach(() => { + return getClientPromise(server.port) + .then(c => { + client = c; + return clientWrite(c, helpers.encodeInt32(consts.PROTOCOL_VERSION)); + }); }); it("should start a transaction with the (ts) command", function (done) { @@ -107,7 +114,7 @@ describe("Protocol", function() { it("should require a transaction start (te) command before a put command", function(done) { expectLog(client, /Not in a transaction/, done); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); + client.write(encodeCommand(cmd.putAsset, null, null, 'abc')); }); it("should close the socket on an invalid transaction command", function(done) { @@ -122,109 +129,64 @@ describe("Protocol", function() { const self = this; before(function () { - self.data = generateCommandData(); + self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); }); - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); + beforeEach(() => { + return getClientPromise(server.port) + .then(c => { + client = c; - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(); - }); + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + return clientWrite(c, helpers.encodeInt32(consts.PROTOCOL_VERSION)); + }); }); it("should close the socket on an invalid PUT type", function (done) { expectLog(client, /Unrecognized command/i, done); - client.write( + let buf = Buffer.from( encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand("px", null, null, self.data.bin)); + encodeCommand("px", null, null, 'abc'), 'ascii'); + + client.write(buf); }); const tests = [ - {ext: 'bin', cmd: cmd.putAsset}, - {ext: 'info', cmd: cmd.putInfo}, - {ext: 'resource', cmd: cmd.putResource} + {ext: 'bin', cmd: cmd.putAsset, packetSize: SMALL_PACKET_SIZE}, + {ext: 'info', cmd: cmd.putInfo, packetSize: SMALL_PACKET_SIZE}, + {ext: 'resource', cmd: cmd.putResource, packetSize: SMALL_PACKET_SIZE}, + {ext: 'bin', cmd: cmd.putAsset, packetSize: LARGE_PACKET_SIZE}, + {ext: 'info', cmd: cmd.putInfo, packetSize: LARGE_PACKET_SIZE}, + {ext: 'resource', cmd: cmd.putResource, packetSize: LARGE_PACKET_SIZE} ]; tests.forEach(function (test) { - it("should store " + test.ext + " data with a (" + test.cmd + ") cmd", function (done) { - client.on('close', () => { - cache.getFileInfo(test.cmd[1], self.data.guid, self.data.hash) - .then(info => { - assert(info.size === self.data[test.ext].length); - return cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash); - }) - .then(stream => { - stream.on("readable", function () { - const chunk = stream.read(); // should only be one in this test - assert(self.data[test.ext].compare(chunk) === 0); - done(); - }); - }) - .catch(err => { - done(err); - }); - }); - + it(`should store ${test.ext} data with a (${test.cmd}) command (client write packet size = ${test.packetSize})`, () => { const buf = Buffer.from( encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + encodeCommand(test.cmd, null, null, self.data[test.ext]) + encodeCommand(cmd.transactionEnd), 'ascii'); - let sentBytes = 0; - - function sendBytesAsync() { - setTimeout(() => { - const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); - client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { - sentBytes += packetSize; - if (sentBytes < buf.length) - return sendBytesAsync(); - else - sleep(50).then(() => { - client.end(); - }); - }); - }, 1); - } - - sendBytesAsync(); - + return clientWrite(client, buf, test.packetSize) + .then(() => cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash)) + .then(stream => readStream(stream, self.data[test.ext].length)) + .then(data => assert(self.data[test.ext].compare(data) === 0)); }); }); - it("should replace an existing file with the same guid and hash", function (done) { + it("should replace an existing file with the same guid and hash ", () => { const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); - client.on('close', () => { - cache.getFileInfo('a', self.data.guid, self.data.hash) - .then(info => { - assert(info.size === asset.length); - return cache.getFileStream('a', self.data.guid, self.data.hash); - }) - .then(stream => { - stream.on("readable", function () { - const chunk = stream.read(); // should only be one in this test - assert(asset.compare(chunk) === 0); - done(); - }); - }) - .catch(err => { - done(err); - }); - }); - - client.write( + const buf = Buffer.from( encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + encodeCommand(cmd.putAsset, null, null, asset) + - encodeCommand(cmd.transactionEnd)); + encodeCommand(cmd.transactionEnd), 'ascii'); - sleep(50).then(() => { - client.end(); - }); + return clientWrite(client, buf) + .then(() => cache.getFileStream('a', self.data.guid, self.data.hash)) + .then(stream => readStream(stream, asset.length)) + .then(buffer => assert(asset.compare(buffer) === 0)); }); }); @@ -232,72 +194,73 @@ describe("Protocol", function() { this.slow(1000); const self = this; - self.data = generateCommandData(); - - before(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); - client.write(encodeCommand(cmd.putAsset, null, null, self.data.bin)); - client.write(encodeCommand(cmd.putInfo, null, null, self.data.info)); - client.write(encodeCommand(cmd.putResource, null, null, self.data.resource)); - client.write(cmd.transactionEnd); - client.end(cmd.quit); - client.on('close', done); - }); + self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); + + before(() => { + const buf = Buffer.from( + helpers.encodeInt32(consts.PROTOCOL_VERSION) + + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + + encodeCommand(cmd.putAsset, null, null, self.data.bin) + + encodeCommand(cmd.putInfo, null, null, self.data.info) + + encodeCommand(cmd.putResource, null, null, self.data.resource) + + encodeCommand(cmd.transactionEnd) + + encodeCommand(cmd.quit), 'ascii'); + + return getClientPromise(server.port) + .then(c => { + client = c; + return clientWrite(c, buf); + }); }); - beforeEach(function (done) { - client = net.connect({port: server.port}, function (err) { - assert(!err); + beforeEach(() => { + return getClientPromise(server.port) + .then(c => { + client = c; - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - done(); - }); + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + return clientWrite(c, helpers.encodeInt32(consts.PROTOCOL_VERSION)); + }); }); it("should close the socket on an invalid GET type", function (done) { expectLog(client, /Unrecognized command/i, done); - client.write(encodeCommand('gx', self.data.guid, self.data.hash)); + clientWrite(client, encodeCommand('gx', self.data.guid, self.data.hash)).catch(err => done(err)); }); const tests = [ - {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin'}, - {cmd: cmd.getInfo, blob: self.data.info, type: 'info'}, - {cmd: cmd.getResource, blob: self.data.resource, type: 'resource'} + {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin', packetSize: 1}, + {cmd: cmd.getInfo, blob: self.data.info, type: 'info', packetSize: 1}, + {cmd: cmd.getResource, blob: self.data.resource, type: 'resource', packetSize: 1}, + {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin', packetSize: LARGE_PACKET_SIZE}, + {cmd: cmd.getInfo, blob: self.data.info, type: 'info', packetSize: LARGE_PACKET_SIZE}, + {cmd: cmd.getResource, blob: self.data.resource, type: 'resource', packetSize: LARGE_PACKET_SIZE} ]; - it("should respond with not found (-) for missing files", function (done) { - let count = 0; + tests.forEach(function (test) { - client.pipe(new CacheServerResponseTransform()) - .on('header', function (header) { - assert(header.cmd === '-' + tests[count].cmd[1]); - count++; - if(count === 3) done(); - }); + it(`should respond with not found (-) for missing ${test.type} files (client write packet size = ${test.packetSize})`, function (done) { + client.pipe(new CacheServerResponseTransform()) + .on('header', function (header) { + assert(header.cmd === '-' + test.cmd[1]); + done(); + }); - const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); - const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); + const badGuid = Buffer.allocUnsafe(consts.GUID_SIZE).fill(0); + const badHash = Buffer.allocUnsafe(consts.HASH_SIZE).fill(0); - tests.forEach(function(test) { - client.write(encodeCommand(test.cmd, badGuid, badHash)); + clientWrite(client, encodeCommand(test.cmd, badGuid, badHash), test.packetSize) + .catch(err => done(err)); }); - }); - - tests.forEach(function (test) { - it("should retrieve stored " + test.type + " data with the (" + test.cmd + ") command", function (done) { + it(`should retrieve stored ${test.type} data with the (${test.cmd}) command (write packet size = ${test.packetSize})`, function (done) { let dataBuf; let pos = 0; let resp = new CacheServerResponseTransform(); - resp - .on('header', function (header) { + resp.on('header', function (header) { assert(header.cmd === '+' + test.cmd[1]); assert(header.guid.compare(self.data.guid) === 0, "GUID does not match"); assert(header.hash.compare(self.data.hash) === 0, "HASH does not match"); @@ -318,21 +281,7 @@ describe("Protocol", function() { const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); - let sentBytes = 0; - - function sendBytesAsync() { - setTimeout(() => { - const packetSize = Math.min(buf.length - sentBytes, Math.ceil(Math.random() * 10)); - client.write(buf.slice(sentBytes, sentBytes + packetSize), function () { - sentBytes += packetSize; - if (sentBytes < buf.length) - return sendBytesAsync(); - }); - }, 1); - } - - sendBytesAsync(); - + clientWrite(client, buf, test.packetSize).catch(err => done(err)); }); }); }); diff --git a/test/test_utils.js b/test/test_utils.js index 532e371..b42d860 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -2,9 +2,12 @@ const assert = require('assert'); const crypto = require('crypto'); const consts = require('../lib/constants'); const helpers = require('../lib/helpers'); +const net = require('net'); const MIN_BLOB_SIZE = 64; const MAX_BLOB_SIZE = 2048; +const MIN_PACKET_SIZE = 1024 * 16; +const WRITE_RESOLVE_DELAY = 100; function randomBuffer(size) { return Buffer.from(crypto.randomBytes(size).toString('ascii'), 'ascii') @@ -66,6 +69,85 @@ exports.sleep = function(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); }; +exports.clientWrite = function(client, data, minPacketSize, maxPacketSize) { + return new Promise((resolve, reject) => { + let sentBytes = 0; + + client.once('close', () => { + if(sentBytes < data.length) + reject(new Error("Client closed before write finished")); + }); + + if(typeof(minPacketSize) !== 'number') { + minPacketSize = MIN_PACKET_SIZE; + } + + if(typeof(maxPacketSize) !== 'number' || maxPacketSize < minPacketSize) { + maxPacketSize = minPacketSize; + } + + function packetSize() { + return Math.ceil(minPacketSize + (Math.random() * maxPacketSize - minPacketSize)); + } + + function write() { + let ok = true; + while(ok && sentBytes < data.length) { + let len = Math.min(data.length - sentBytes, packetSize()); + ok = client.write(data.slice(sentBytes, sentBytes + len)); + sentBytes += len; + } + + if (sentBytes === data.length) { + client.removeListener('drain', write); + setTimeout(resolve, WRITE_RESOLVE_DELAY); + } + } + + client.on('drain', write); + write(); + }); +}; + +/** + * + * @param stream + * @param size + * @returns {Promise} + */ +exports.readStream = function(stream, size) { + return new Promise((resolve, reject) => { + let pos = 0; + let buffer = Buffer.alloc(size, 0, 'ascii'); + stream.on('data', data => { + if(pos + data.length <= size) { + data.copy(buffer, pos); + pos += data.length; + } + else { + reject(new Error("Stream size exceeds buffer size allocation")); + } + }); + + stream.on('end', () => { + resolve(buffer); + }); + }); +}; + +exports.getClientPromise = function(port) { + return new Promise((resolve, reject) => { + let client = net.createConnection(port); + client.once('connect', () => { + resolve(client); + }); + + client.once('error', err => { + reject(err); + }); + }); +}; + exports.cmd = { quit: "q", getAsset: "ga", From 9495f5c7e8247d74669db38f665f07e3d89c539b Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 11 Jan 2018 11:50:50 -0600 Subject: [PATCH 22/89] =?UTF-8?q?implement=20the=20=E2=80=9CUnity=20way?= =?UTF-8?q?=E2=80=9D=20of=20serializing=20a=20GUID=20string=20to=20its=20h?= =?UTF-8?q?ex=20representation,=20which=20involves=20swapping=20the=20hex?= =?UTF-8?q?=20pairs=20for=20each=20byte.=20Other=20minor=20fixes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 3 +- lib/cache/cache_fs.js | 6 ++- lib/cache/cache_membuf.js | 12 +++-- ...ransform.js => server_stream_processor.js} | 4 +- lib/helpers.js | 26 +++++++++ lib/server/client_stream_processor.js | 54 +++++++++---------- lib/server/command_processor.js | 24 ++++++--- test/helpers.js | 31 +++++++++++ test/protocol.js | 8 +-- 9 files changed, 118 insertions(+), 50 deletions(-) rename lib/client/{server_response_transform.js => server_stream_processor.js} (97%) create mode 100644 test/helpers.js diff --git a/.gitignore b/.gitignore index 760398f..801e058 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ cache5.0/ node_modules/ .coveralls.yml !lib/cache -.cache_membuf/ \ No newline at end of file +.cache_membuf/ +.cache_fs/ diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 2592109..b3cd17f 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -27,7 +27,8 @@ class CacheFS extends CacheBase { * @private */ static _calcFilename(type, guid, hash) { - return `${guid.toString('hex')}-${hash.toString('hex')}.${type}`; + const ext = { 'i': 'info', 'a': 'bin', 'r': 'resource' }[type]; + return `${helpers.GUIDBufferToString(guid)}-${hash.toString('hex')}.${ext}`; } /** @@ -39,7 +40,8 @@ class CacheFS extends CacheBase { * @private */ _calcFilepath(type, guid, hash) { - return path.join(this._cachePath, CacheFS._calcFilename(type, guid, hash)); + let fileName = CacheFS._calcFilename(type, guid, hash); + return path.join(this._cachePath, fileName.substr(0, 2), fileName); } get _optionsPath() { diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 1a3b744..517ad0a 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -37,7 +37,7 @@ class CacheMembuf extends CacheBase { * @private */ static _calcIndexKey(type, guid, hash) { - return `${guid.toString('hex')}-${hash.toString('hex')}-${type}`; + return `${helpers.GUIDBufferToString(guid)}-${hash.toString('hex')}-${type}`; } get _optionsPath() { @@ -281,16 +281,18 @@ class CacheMembuf extends CacheBase { } getFileInfo(type, guid, hash) { - const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); + const key = CacheMembuf._calcIndexKey(type, guid, hash); + const entry = this._index.by('fileId', key); return (entry != null) ? Promise.resolve({ size: entry.size }) - : Promise.reject(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); + : Promise.reject(new Error(`File not found for ${key}`)); } getFileStream(type, guid, hash) { const self = this; - const entry = this._index.by('fileId', CacheMembuf._calcIndexKey(type, guid, hash)); + const key = CacheMembuf._calcIndexKey(type, guid, hash); + const entry = this._index.by('fileId', key); return new Promise((resolve, reject) => { // noinspection EqualityComparisonWithCoercionJS (checking for null or undefined) @@ -310,7 +312,7 @@ class CacheMembuf extends CacheBase { resolve(stream); } else { - reject(new Error(`File not found for (${type}) ${guid.toString('hex')}-${hash.toString('hex')}`)); + reject(new Error(`File not found for ${key}`)); } }); } diff --git a/lib/client/server_response_transform.js b/lib/client/server_stream_processor.js similarity index 97% rename from lib/client/server_response_transform.js rename to lib/client/server_stream_processor.js index 01ae251..e045166 100644 --- a/lib/client/server_response_transform.js +++ b/lib/client/server_stream_processor.js @@ -4,7 +4,7 @@ const Transform = require('stream').Transform; const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.SIZE_SIZE + consts.ID_SIZE; -class CacheServerResponseTransform extends Transform { +class ServerStreamProcessor extends Transform { constructor() { super(); this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); @@ -144,4 +144,4 @@ class CacheServerResponseTransform extends Transform { } } -module.exports = CacheServerResponseTransform; \ No newline at end of file +module.exports = ServerStreamProcessor; \ No newline at end of file diff --git a/lib/helpers.js b/lib/helpers.js index 4f4a77e..5551373 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -3,6 +3,32 @@ const consts = require("./constants"); let logLevel = consts.LOG_TEST; +reverseByte = (b) => ((b & 0x0F) << 4) | ((b >> 4) & 0x0F); + +/** + * Reverses the hex digits for each byte in a GUID before converting to a string, the same way Unity serializes GUIDs to strings. + * For example Buffer[10ab7cac5ef26c6e7ec6060be64419fc] => "01bac7cae52fc6e6e76c60b06e4491cf" + * @param {Buffer} guidBuffer + * @returns {String} + */ +exports.GUIDBufferToString = function(guidBuffer) { + if(!guidBuffer || guidBuffer.length !== 16) throw new Error("Invalid GUID input"); + return guidBuffer.reduce((result, curVal) => result + reverseByte(curVal).toString(16).padStart(2, '0'), ''); +}; + +/** + * + * @param {String} guidString + * @returns {Buffer} + * @constructor + */ +exports.GUIDStringToBuffer = function(guidString) { + if(typeof(guidString) !== 'string' || guidString.length !== 32) throw new Error("Invalid GUID String input"); + let buf = Buffer.from(guidString, 'hex'); + buf.forEach((val, i) => buf[i] = reverseByte(buf[i])); + return buf; +}; + /** * @returns {string} */ diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index d143acd..63374a3 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -32,7 +32,6 @@ class ClientStreamProcessor extends Transform { _init() { this.readState = { - didReadCmd: false, doReadSize: false, doReadId: false, dataPassThrough: false, @@ -120,43 +119,42 @@ class ClientStreamProcessor extends Transform { while(!isDone()) { // Read command - if (!this.readState.didReadCmd) { - if(!fillBufferWithData()) { - // Quit? - if (data[data.length - 1] === CMD_QUIT) { - this.errState = ClientStreamProcessor.errorCodes.quitError; - } + if(!fillBufferWithData()) { - break; + // Quit? + if (data[data.length - 1] === CMD_QUIT) { + this.errState = ClientStreamProcessor.errorCodes.quitError; } - this.readState.didReadCmd = true; + break; + } + - const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); - switch (cmd[0]) { - case 'g': // get + switch (cmd[0]) { + case 'g': // get + this.readState.doReadId = true; + this.readState.headerSize += consts.ID_SIZE; + break; + case 'p': // put + this.readState.doReadSize = true; + this.readState.headerSize += consts.SIZE_SIZE; + break; + case 't': // transaction + if(cmd[1] === 's') { this.readState.doReadId = true; this.readState.headerSize += consts.ID_SIZE; - break; - case 'p': // put - this.readState.doReadSize = true; - this.readState.headerSize += consts.SIZE_SIZE; - break; - case 't': // transaction - if(cmd[1] === 's') { - this.readState.doReadId = true; - this.readState.headerSize += consts.ID_SIZE; - } - - break; - default: - this.errState = new Error("Unrecognized command, aborting!"); - break; - } + } + + break; + default: + this.errState = new Error("Unrecognized command, aborting!"); + break; } + if(!fillBufferWithData()) { break; } diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index a954acc..fa78ded 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -36,6 +36,7 @@ class CommandProcessor extends Duplex { this._sendFileQueueReadDuration = 0; this._sendFileQueueReadBytes = 0; this._sendFileQueueCount = 0; + this._sendFileQueueSentCount = 0; this._isReading = false; this._readReady = true; this._registerEventListeners(); @@ -102,11 +103,7 @@ class CommandProcessor extends Duplex { let self = this; let file = self[kSendFileQueue][0]; - if (file.header !== null) { - let header = file.header; - file.header = null; - self._readReady = self.push(header, 'ascii'); - } + self._readReady = self.push(file.header, 'ascii'); if(!file.exists) { self[kSendFileQueue].shift(); @@ -117,7 +114,14 @@ class CommandProcessor extends Duplex { self._readStartTime = Date.now(); this[kCache].getFileStream(file.type, file.guid, file.hash) .then(stream => { + function readTimeout() { + helpers.log(consts.LOG_ERR, "Timeout waiting for readable! Trying to read again"); + setImmediate(readChunk); + } + function readChunk() { + self.readFileTimeout && clearTimeout(self.readFileTimeout); + if(!self._readReady) { return setImmediate(readChunk); } @@ -130,10 +134,14 @@ class CommandProcessor extends Duplex { } else { self[kSendFileQueue].shift(); + self._sendFileQueueSentCount++; self._isReading = false; self._sendFileQueueReadDuration += Date.now() - self._readStartTime; self._read(); } + + if(self[kSendFileQueue].length > 0) + self.readFileTimeout = setTimeout(readTimeout, 5000); } stream.on('readable', readChunk); @@ -151,7 +159,7 @@ class CommandProcessor extends Duplex { if(this._sendFileQueueReadDuration > 0) { let totalTime = this._sendFileQueueReadDuration / 1000; let throughput = (this._sendFileQueueReadBytes / totalTime).toFixed(2); - helpers.log(consts.LOG_INFO, `Sent ${this._sendFileQueueCount} files (${this._sendFileQueueChunkReads} chunks) totaling ${filesize(this._sendFileQueueReadBytes)} in ${totalTime} seconds (${filesize(throughput)}/sec)`); + helpers.log(consts.LOG_INFO, `Sent ${this._sendFileQueueSentCount} of ${this._sendFileQueueCount} requested files (${this._sendFileQueueChunkReads} chunks) totaling ${filesize(this._sendFileQueueReadBytes)} in ${totalTime} seconds (${filesize(throughput)}/sec)`); } } @@ -318,7 +326,7 @@ class CommandProcessor extends Duplex { return this[kCache].createPutTransaction(guid, hash) .then(trx => { - helpers.log(consts.LOG_DBG, `Start transaction for ${guid.toString('hex')}-${hash.toString('hex')}`); + helpers.log(consts.LOG_DBG, `Start transaction for GUID: ${helpers.GUIDBufferToString(guid)} Hash: ${hash.toString('hex')}`); self._trx = trx; }); } @@ -337,7 +345,7 @@ class CommandProcessor extends Duplex { return this[kCache].endPutTransaction(this._trx) .then(() => { - helpers.log(consts.LOG_DBG, `End transaction for ${self._trx.guid.toString('hex')}-${self._trx.hash.toString('hex')}`); + helpers.log(consts.LOG_DBG, `End transaction for GUID: ${helpers.GUIDBufferToString(self._trx.guid)} Hash: ${self._trx.hash.toString('hex')}`); self._trx = null; }); } diff --git a/test/helpers.js b/test/helpers.js new file mode 100644 index 0000000..eafd7c6 --- /dev/null +++ b/test/helpers.js @@ -0,0 +1,31 @@ +const assert = require('assert'); +const helpers = require('../lib/helpers'); + +describe("Helper functions", () => { + const guid = Buffer.from([80,127,95,145,103,153,135,123,185,19,13,54,122,207,246,26]); + const guidStr = "05f7f519769978b79b31d063a7fc6fa1"; + + describe("GUIDBufferToString", () => { + it("should convert a 16 byte buffer to a hex representation that matches Unity's string formatter for GUIDs", () => { + assert(helpers.GUIDBufferToString(guid) === guidStr); + }); + + it("should throw an error if the input is not a buffer or the wrong length", () => { + assert.throws(helpers.GUIDBufferToString.bind(null, null), Error); + assert.throws(helpers.GUIDBufferToString.bind(null, Buffer.from([])), Error); + assert.throws(helpers.GUIDBufferToString.bind(null, Buffer.alloc(17, 0)), Error); + }); + }); + + describe("GUIDStringToBuffer", () => { + it("should convert a 32 character hex string that represents a Unity GUID to an equivalent byte buffer", () => { + assert(guid.compare(helpers.GUIDStringToBuffer(guidStr)) === 0); + + }); + it("should throw an error if the input value is not a string or is the wrong length", () => { + assert.throws(helpers.GUIDStringToBuffer.bind(null, null)); + assert.throws(helpers.GUIDStringToBuffer.bind(null, '')); + assert.throws(helpers.GUIDStringToBuffer.bind(null, guidStr + 'x')); + }); + }); +}); \ No newline at end of file diff --git a/test/protocol.js b/test/protocol.js index a2b0777..469722d 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -4,7 +4,7 @@ const crypto = require('crypto'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants'); const CacheServer = require('../lib/server'); -const CacheServerResponseTransform = require('./../lib/client/server_response_transform.js'); +const CacheServerResponseTransform = require('../lib/client/server_stream_processor.js'); const loki = require('lokijs'); const tmp = require('tmp'); const generateCommandData = require('./test_utils').generateCommandData; @@ -16,8 +16,8 @@ const readStream = require('./test_utils').readStream; const getClientPromise = require('./test_utils').getClientPromise; const MIN_FILE_SIZE = 1024; -const MAX_FILE_SIZE = 1024 * 1024 * 10; -const SMALL_PACKET_SIZE = 256; +const MAX_FILE_SIZE = 1024 * 1024; +const SMALL_PACKET_SIZE = 16; const LARGE_PACKET_SIZE = 1024 * 16; let cache, server, client; @@ -124,7 +124,7 @@ describe("Protocol", function() { }); describe("PUT requests", function () { - this.slow(1500); + this.slow(5000); const self = this; From f28223ca83a611d778a7f94dacb71ffa07239137 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 11 Jan 2018 13:14:01 -0600 Subject: [PATCH 23/89] Correct handling of readable event when streaming file from cache, and remove temporary workaround for stalled readable events --- lib/server/command_processor.js | 34 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index fa78ded..0e479da 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -114,37 +114,33 @@ class CommandProcessor extends Duplex { self._readStartTime = Date.now(); this[kCache].getFileStream(file.type, file.guid, file.hash) .then(stream => { - function readTimeout() { - helpers.log(consts.LOG_ERR, "Timeout waiting for readable! Trying to read again"); - setImmediate(readChunk); - } - function readChunk() { - self.readFileTimeout && clearTimeout(self.readFileTimeout); - if(!self._readReady) { return setImmediate(readChunk); } - let chunk = stream.read(); - if(chunk !== null) { + let chunk; + while(chunk = stream.read()) { self._readReady = self.push(chunk, 'ascii'); self._sendFileQueueChunkReads++; self._sendFileQueueReadBytes += chunk.length; - } - else { - self[kSendFileQueue].shift(); - self._sendFileQueueSentCount++; - self._isReading = false; - self._sendFileQueueReadDuration += Date.now() - self._readStartTime; - self._read(); - } - if(self[kSendFileQueue].length > 0) - self.readFileTimeout = setTimeout(readTimeout, 5000); + if(!self._readReady) { + setImmediate(readChunk); + break; + } + } } stream.on('readable', readChunk); + + stream.on('end', () => { + self[kSendFileQueue].shift(); + self._sendFileQueueSentCount++; + self._isReading = false; + self._sendFileQueueReadDuration += Date.now() - self._readStartTime; + self._read(); + }) }) .catch(err => { helpers.log(consts.LOG_ERR, err); From e8a811f13b670cfe73cd76d702938b25ecbb989d Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 16 Jan 2018 09:27:10 -0600 Subject: [PATCH 24/89] Made PutTransaction extend EventEmitter, and added a 'finalize' event when finalize is called --- lib/cache/cache_base.js | 6 ++++-- lib/cache/cache_fs.js | 2 +- lib/cache/cache_membuf.js | 2 +- test/cache_api.js | 5 +++++ test/cache_base.js | 7 ++++--- 5 files changed, 15 insertions(+), 7 deletions(-) diff --git a/lib/cache/cache_base.js b/lib/cache/cache_base.js index d1a3e9f..8265d0a 100644 --- a/lib/cache/cache_base.js +++ b/lib/cache/cache_base.js @@ -114,7 +114,7 @@ class CacheBase extends EventEmitter { } } -class PutTransaction { +class PutTransaction extends EventEmitter { /** * @@ -122,6 +122,7 @@ class PutTransaction { * @param {Buffer} hash */ constructor(guid, hash) { + super(); this._guid = guid; this._hash = hash; } @@ -143,12 +144,13 @@ class PutTransaction { * @returns {Array} */ get files() { return []; } + /** * * @returns {Promise} */ finalize() { - return Promise.reject(new Error("Not implemented")); + return Promise.resolve().then(() => this.emit('finalize')); } /** diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index b3cd17f..2a48540 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -172,7 +172,7 @@ class PutTransactionFS extends PutTransaction { } finalize() { - return this._closeAllStreams(); + return this._closeAllStreams().then(() => super.finalize()); } getWriteStream(type, size) { diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index 517ad0a..b3682b0 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -356,7 +356,7 @@ class PutTransactionMembuf extends PutTransaction { }); ok ? resolve() : reject(new Error("Transaction failed; file size mismatch")); - }); + }).then(() => super.finalize()); } getWriteStream(type, size) { diff --git a/test/cache_api.js b/test/cache_api.js index 2f29f8a..66da9eb 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -219,6 +219,11 @@ describe("PutTransaction API", function() { .then(stream => stream.end(fileData.info)) .then(() => trx.finalize()) }); + + it("should emit a 'finalize' event", (done) => { + trx.once('finalize', () => done()); + trx.finalize(); + }); }); describe("getWriteStream", function() { diff --git a/test/cache_base.js b/test/cache_base.js index 166ddba..e51a2e1 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -155,9 +155,10 @@ describe("PutTransaction: Base Class", () => { }); describe("finalize", () => { - it("should require override implementation in subclasses by returning an error", () => { - return trx.finalize() - .then(() => { throw new Error("Expected error!"); }, () => {}); + it("should return a promise and emit a 'finalize' event", (done) => { + trx.once('finalize', () => done()); + let p = trx.finalize(); + assert(typeof(p.then) === 'function'); }); }); From 058aa3bfcb4507be36240cd3d4467aa8687beb61 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 16 Jan 2018 10:20:46 -0600 Subject: [PATCH 25/89] Added PutTransaction.manifest method to return a list of file types (i, a, r) written to the transaction --- lib/cache/cache_base.js | 6 ++++++ lib/cache/cache_fs.js | 4 ++++ lib/cache/cache_membuf.js | 4 ++++ test/cache_api.js | 16 ++++++++++++++++ 4 files changed, 30 insertions(+) diff --git a/lib/cache/cache_base.js b/lib/cache/cache_base.js index 8265d0a..bee23ef 100644 --- a/lib/cache/cache_base.js +++ b/lib/cache/cache_base.js @@ -139,6 +139,12 @@ class PutTransaction extends EventEmitter { */ get hash() { return this._hash; } + /** + * + * @returns {Array} + */ + get manifest() { return []; } + /** * * @returns {Array} diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 2a48540..93daf88 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -167,6 +167,10 @@ class PutTransactionFS extends PutTransaction { }); } + get manifest() { + return this.files.map((file) => file.type); + } + get files() { return this._files; } diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_membuf.js index b3682b0..e8ee230 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_membuf.js @@ -343,6 +343,10 @@ class PutTransactionMembuf extends PutTransaction { this._finished = []; } + get manifest() { + return this.files.map((file) => file.type); + } + get files() { return this._finished; } diff --git a/test/cache_api.js b/test/cache_api.js index 66da9eb..b94f28d 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -193,6 +193,22 @@ describe("PutTransaction API", function() { }); }); + describe("get manifest", function() { + it("should return an array of file types that were successfully written to the transaction", () => { + return trx.getWriteStream('i', fileData.info.length) + .then(stream => stream.end(fileData.info)) + .then(() => trx.getWriteStream('r', fileData.resource.length)) + .then(stream => stream.end(fileData.resource)) + .then(() => trx.getWriteStream('a', fileData.bin.length)) + .then(stream => stream.end(fileData.bin)) + .then(() => trx.finalize()) + .then(() => { + let m = trx.manifest; + ['i', 'a', 'r'].forEach((t) => assert(m.indexOf(t) >= 0)); + }); + }); + }); + describe("get files", function() { it("should return an empty array before finalize() is called", () => { assert(trx.files.length === 0); From 8e981f34ac6b1a79c15df628b67f59483ce292b3 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 17 Jan 2018 15:54:13 -0600 Subject: [PATCH 26/89] =?UTF-8?q?Added=20ability=20to=20mirror=20transacti?= =?UTF-8?q?ons=20to=20one=20or=20more=20remote=20Cache=20Servers.=20Use=20?= =?UTF-8?q?command=20line=20option=20=E2=80=94mirror=20[host:port]=20one?= =?UTF-8?q?=20or=20more=20times=20to=20enable.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config/default.yml | 6 +- lib/cache/cache_fs.js | 4 +- lib/server.js | 35 ++++++--- lib/server/command_processor.js | 4 +- lib/server/transaction_mirror.js | 120 +++++++++++++++++++++++++++++++ main.js | 24 ++++++- test/protocol.js | 2 +- test/server.js | 2 +- 8 files changed, 179 insertions(+), 18 deletions(-) create mode 100644 lib/server/transaction_mirror.js diff --git a/config/default.yml b/config/default.yml index d3abedf..6baba9d 100644 --- a/config/default.yml +++ b/config/default.yml @@ -12,4 +12,8 @@ Cache: autosaveInterval: 10000 throttledSaves: false cache_fs: - cachePath: ".cache_fs" \ No newline at end of file + cachePath: ".cache_fs" +Mirror: + options: + queueProcessDelay: 2000 + connectionIdleTimeout: 10000 \ No newline at end of file diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 93daf88..ef2a216 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -90,6 +90,7 @@ class CacheFS extends CacheBase { function moveFile(file) { let filePath = self._calcFilepath(file.type, transaction.guid, transaction.hash); + helpers.log(consts.LOG_INFO, `Adding file to cache: ${file.size} ${filePath}`); return fs.move(file.file, filePath, { overwrite: true }); } @@ -146,7 +147,8 @@ class PutTransactionFS extends PutTransaction { if(stream.stream.bytesWritten === stream.size) { self._files.push({ file: stream.file, - type: stream.type + type: stream.type, + size: stream.size }); } else { diff --git a/lib/server.js b/lib/server.js index d39af87..0da3daa 100644 --- a/lib/server.js +++ b/lib/server.js @@ -4,6 +4,7 @@ const consts = require('./constants'); const helpers = require('./helpers'); const ClientStreamProcessor = require('./server/client_stream_processor'); const CommandProcessor = require('./server/command_processor'); +const TransactionMirror = require('./server/transaction_mirror'); class CacheServer { /** @@ -11,13 +12,20 @@ class CacheServer { * @param {CacheBase} cache * @param {Number} port */ - constructor(cache, port) { + constructor(cache, options) { this._cache = cache; - this._port = port; - if (!port && port !== 0) + + this._port = options.port; + if (!options.port && options.port !== 0) this._port = consts.DEFAULT_PORT; this._server = null; + this._mirrors = []; + + if(options.mirror) { + options.mirror = [].concat(options.mirror); + this._mirrors = options.mirror.map(m => new TransactionMirror(m, cache)); + } } /** @@ -50,18 +58,23 @@ class CacheServer { let server = net.createServer(socket => { helpers.log(consts.LOG_TEST, `${socket.remoteAddress}:${socket.remotePort} connected.`); - socket - .on('close', () => { + let cmdProc = new CommandProcessor(self.cache); + + if(self._mirrors.length > 0) { + cmdProc.on('onTransactionEnd', (trx) => { + self._mirrors.forEach(m => m.queueTransaction(trx)); + }); + } + + socket.on('close', () => { helpers.log(consts.LOG_TEST, `${socket.remoteAddress}:${socket.remotePort} closed connection.`); - }) - .on('error', err => { + }).on('error', err => { helpers.log(consts.LOG_ERR, err); }); - socket - .pipe(new ClientStreamProcessor()) // Transform the incoming byte stream into commands and file data - .pipe(new CommandProcessor(self.cache)) // Execute commands and interface with the cache module - .pipe(socket); // Connect back to socket to send files + socket.pipe(new ClientStreamProcessor()) // Transform the incoming byte stream into commands and file data + .pipe(cmdProc) // Execute commands and interface with the cache module + .pipe(socket); // Connect back to socket to send files }); server.on('error', err => { diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 0e479da..e7fcb91 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -5,6 +5,7 @@ const Duplex = require('stream').Duplex; const kSource = Symbol("source"); const kCache = Symbol("cache"); +const kMirror = Symbol("mirror"); const kSendFileQueue = Symbol("sendFileQueue"); const kReadStateVersion = Symbol("readStateVersion"); const kReadStateCommand = Symbol("readStateCommand"); @@ -260,7 +261,7 @@ class CommandProcessor extends Duplex { p = this._onPut(type, size); break; default: - p = Promise.reject(new Error(`Unrecognized command '${cmd}`)); + p = Promise.reject(new Error(`Unrecognized command '${cmd}'`)); } p.then(() => callback(), err => callback(err)); @@ -341,6 +342,7 @@ class CommandProcessor extends Duplex { return this[kCache].endPutTransaction(this._trx) .then(() => { + self.emit('onTransactionEnd', self._trx); helpers.log(consts.LOG_DBG, `End transaction for GUID: ${helpers.GUIDBufferToString(self._trx.guid)} Hash: ${self._trx.hash.toString('hex')}`); self._trx = null; }); diff --git a/lib/server/transaction_mirror.js b/lib/server/transaction_mirror.js new file mode 100644 index 0000000..a49d346 --- /dev/null +++ b/lib/server/transaction_mirror.js @@ -0,0 +1,120 @@ +'use strict'; +const helpers = require('../helpers'); +const consts = require('../constants'); +const net = require('net'); +const config = require('config'); + +const OPTIONS_PATH = "Mirror.options"; + +const PROCESS_DELAY_MS = 2000; +const CONNECT_IDLE_TIMEOUT_MS = 10000; + +let cWrite = (client, buf) => { + return new Promise(resolve => { + client.write(buf, () => resolve()); + }); +}; + +let cPipe = (client, stream) => { + return new Promise((resolve, reject) => { + stream.on('end', () => resolve()); + stream.on('error', err => reject(err)); + stream.pipe(client, {end: false}); + }); +}; + +class TransactionMirror { + + /** + * + * @param {Object} connectOptions + * @param {CacheBase} cache + */ + constructor(connectOptions, cache) { + this._connectOptions = connectOptions; + this._cache = cache; + this._queue = []; + this._processing = false; + this._client = null; + } + + static get options() { + return config.get(OPTIONS_PATH); + } + + _connect() { + const self = this; + return new Promise(resolve => { + if(self._client !== null && !self._client.destroyed) + return resolve(self._client); + + let client = net.connect(this._connectOptions); + + const idleTimeout = TransactionMirror.options.connectionIdleTimeout || CONNECT_IDLE_TIMEOUT_MS; + client.setTimeout(idleTimeout, () => { + client.end('q'); + self._client = null; + }); + + client.on('connect', () => { + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + self._client = client; + resolve(self._client); + }); + + client.on('error', err => helpers.log(consts.LOG_ERR, err)); + }); + } + + async _processQueue() { + let self = this; + let client = await self._connect(); + + let send = async (item) => { + await cWrite(client, Buffer.concat([Buffer.from('ts'), item.guid, item.hash], 34)); + + for (let i = 0; i < item.types.length; i++) { + let type = item.types[i]; + let info = await self._cache.getFileInfo(type, item.guid, item.hash); + let stream = await self._cache.getFileStream(type, item.guid, item.hash); + await cWrite(client, `p${type}${helpers.encodeInt64(info.size)}`); + await cPipe(client, stream); + } + + await cWrite(client, 'te'); + }; + + while (self._queue.length > 0) { + try { + await send(self._queue.shift()); + } + catch (err) { + helpers.log(consts.LOG_ERR, err); + } + } + + self._processing = false; + } + + /** + * + * @param {PutTransaction} trx + */ + queueTransaction(trx) { + if(trx.manifest.length === 0) return; + + this._queue.push({ + guid: trx.guid, + hash: trx.hash, + types: trx.manifest + }); + + if(!this._processing) { + this._processing = true; + let delay = TransactionMirror.options.queueProcessDelay || PROCESS_DELAY_MS; + setTimeout(this._processQueue.bind(this), delay); + } + } +} + +module.exports = TransactionMirror; \ No newline at end of file diff --git a/main.js b/main.js index d68e25d..cc186b9 100644 --- a/main.js +++ b/main.js @@ -16,6 +16,11 @@ function zeroOrMore(val) { return Math.max(0, val); } +function collect(val, memo) { + memo.push(val); + return memo; +} + const defaultCacheModule = config.get("Cache.module"); program.description("Unity Cache Server") @@ -25,6 +30,7 @@ program.description("Unity Cache Server") .option('-P, --cachePath [path]', `Specify the path of the cache directory.`) .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-w, --workers ', `Number of worker threads to spawn. Default is ${consts.DEFAULT_WORKERS}`, zeroOrMore, consts.DEFAULT_WORKERS) + .option('-m --mirror [host:port]', `Mirror transactions to another cache server. Can be repeated for multiple mirrors.`, collect, []) .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0); program.parse(process.argv); @@ -68,12 +74,26 @@ if(program.workers > 0 && !CacheModule.properties.clustering) { let server = null; let cacheOpts = {}; -if(program.cachePath !== null) +if(program.cachePath !== null) { cacheOpts.cachePath = program.cachePath; +} + +let mirrors = program.mirror.map(m => { + let [host, port] = m.split(':'); + if(!port) port = consts.DEFAULT_PORT; + + helpers.log(consts.LOG_INFO, `Cache Server mirroring to ${host}:${port}`); + return { host: host, port: port }; +}); Cache.init(cacheOpts) .then(() => { - server = new CacheServer(Cache, program.port); + let opts = { + port: program.port, + mirror: mirrors + }; + + server = new CacheServer(Cache, opts); if(cluster.isMaster) { helpers.log(consts.LOG_INFO, `Cache Server version ${consts.VERSION}; Cache module ${program.cacheModule}`); diff --git a/test/protocol.js b/test/protocol.js index 469722d..bb404ea 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -61,7 +61,7 @@ describe("Protocol", function() { cache.init(module.options) .then(() => { - server = new CacheServer(cache, 0); + server = new CacheServer(cache, {port: 0}); server.Start(err => { assert(!err, "Cache Server reported error! " + err); }, done); diff --git a/test/server.js b/test/server.js index 0301e92..975187a 100644 --- a/test/server.js +++ b/test/server.js @@ -9,7 +9,7 @@ const cmd = require('./test_utils').cmd; helpers.SetLogger(()=>{}); const cache = new Cache(); -const server = new CacheServer(cache, 0); +const server = new CacheServer(cache, {port: 0}); let client; describe("Server common", function() { From 369d6c83845e33a6e272f5039b94e47349fe21af Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 19 Jan 2018 08:09:55 -0600 Subject: [PATCH 27/89] Move server.js to server lib/server --- lib/server/command_processor.js | 1 - lib/{ => server}/server.js | 22 +++++++++++++--------- main.js | 2 +- test/protocol.js | 2 +- test/server.js | 2 +- 5 files changed, 16 insertions(+), 13 deletions(-) rename lib/{ => server}/server.js (77%) diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index e7fcb91..6642951 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -5,7 +5,6 @@ const Duplex = require('stream').Duplex; const kSource = Symbol("source"); const kCache = Symbol("cache"); -const kMirror = Symbol("mirror"); const kSendFileQueue = Symbol("sendFileQueue"); const kReadStateVersion = Symbol("readStateVersion"); const kReadStateCommand = Symbol("readStateCommand"); diff --git a/lib/server.js b/lib/server/server.js similarity index 77% rename from lib/server.js rename to lib/server/server.js index 0da3daa..b6efefd 100644 --- a/lib/server.js +++ b/lib/server/server.js @@ -1,16 +1,17 @@ 'use strict'; const net = require('net'); -const consts = require('./constants'); -const helpers = require('./helpers'); -const ClientStreamProcessor = require('./server/client_stream_processor'); -const CommandProcessor = require('./server/command_processor'); -const TransactionMirror = require('./server/transaction_mirror'); +const consts = require('../constants'); +const helpers = require('../helpers'); +const ClientStreamProcessor = require('./client_stream_processor'); +const CommandProcessor = require('./command_processor'); +const TransactionMirror = require('./transaction_mirror'); +const ip = require('ip'); class CacheServer { /** * * @param {CacheBase} cache - * @param {Number} port + * @param {Object} options */ constructor(cache, options) { this._cache = cache; @@ -58,11 +59,14 @@ class CacheServer { let server = net.createServer(socket => { helpers.log(consts.LOG_TEST, `${socket.remoteAddress}:${socket.remotePort} connected.`); - let cmdProc = new CommandProcessor(self.cache); + const cmdProc = new CommandProcessor(self.cache); - if(self._mirrors.length > 0) { + // Prune mirror list to exclude the incoming socket address, to prevent looping transactions + const mirrors = self._mirrors.filter(m => !ip.isEqual(socket.address(), m.address())); + + if(mirrors.length > 0) { cmdProc.on('onTransactionEnd', (trx) => { - self._mirrors.forEach(m => m.queueTransaction(trx)); + mirrors.forEach(m => m.queueTransaction(trx)); }); } diff --git a/main.js b/main.js index cc186b9..85b0ac2 100644 --- a/main.js +++ b/main.js @@ -3,7 +3,7 @@ const helpers = require('./lib/helpers'); const consts = require('./lib/constants'); const program = require('commander'); const path = require('path'); -const CacheServer = require('./lib/server'); +const CacheServer = require('./lib/server/server'); const config = require('config'); const prompt = require('prompt'); diff --git a/test/protocol.js b/test/protocol.js index bb404ea..3ee2b35 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -3,7 +3,7 @@ const net = require('net'); const crypto = require('crypto'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants'); -const CacheServer = require('../lib/server'); +const CacheServer = require('../lib/server/server'); const CacheServerResponseTransform = require('../lib/client/server_stream_processor.js'); const loki = require('lokijs'); const tmp = require('tmp'); diff --git a/test/server.js b/test/server.js index 975187a..3bd8a28 100644 --- a/test/server.js +++ b/test/server.js @@ -2,7 +2,7 @@ const assert = require('assert'); const net = require('net'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants'); -const CacheServer = require('../lib/server'); +const CacheServer = require('../lib/server/server'); const Cache = require('../lib/cache/cache_base').CacheBase; const sleep = require('./test_utils').sleep; const cmd = require('./test_utils').cmd; From 294456f583b1988f45905411a3689a73067cdd3b Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 19 Jan 2018 08:11:36 -0600 Subject: [PATCH 28/89] =?UTF-8?q?Resolve=20mirror=20addresses=20and=20make?= =?UTF-8?q?=20sure=20we=20don=E2=80=99t=20attempt=20to=20self-mirror?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- lib/server/transaction_mirror.js | 4 ++++ main.js | 34 ++++++++++++++++++++++++++------ 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/lib/server/transaction_mirror.js b/lib/server/transaction_mirror.js index a49d346..639b985 100644 --- a/lib/server/transaction_mirror.js +++ b/lib/server/transaction_mirror.js @@ -42,6 +42,10 @@ class TransactionMirror { return config.get(OPTIONS_PATH); } + get address() { + return this._connectOptions.host; + } + _connect() { const self = this; return new Promise(resolve => { diff --git a/main.js b/main.js index 85b0ac2..9ad3c97 100644 --- a/main.js +++ b/main.js @@ -6,6 +6,8 @@ const path = require('path'); const CacheServer = require('./lib/server/server'); const config = require('config'); const prompt = require('prompt'); +const dns = require('dns'); +const ip = require('ip'); function myParseInt(val, def) { val = parseInt(val); @@ -78,16 +80,36 @@ if(program.cachePath !== null) { cacheOpts.cachePath = program.cachePath; } -let mirrors = program.mirror.map(m => { - let [host, port] = m.split(':'); - if(!port) port = consts.DEFAULT_PORT; +let getMirrors = () => new Promise((resolve, reject) => { + let mirrors = program.mirror.map(m => { + let [host, port] = m.split(':'); + port = parseInt(port); - helpers.log(consts.LOG_INFO, `Cache Server mirroring to ${host}:${port}`); - return { host: host, port: port }; + if(!port) port = consts.DEFAULT_PORT; + const myIp = ip.address(); + + return new Promise((resolve, reject) => { + dns.lookup(host, {family: 4, hints: dns.ADDRCONFIG}, (err, address) => { + if(err) return reject(err); + + if((ip.isEqual(myIp, address) || ip.isEqual("127.0.0.1", address)) && program.port === port) { + return reject(new Error(`Cannot mirror to self!`)); + } + + helpers.log(consts.LOG_INFO, `Cache Server mirroring to ${address}:${port}`); + resolve({ host: address, port: port }); + }); + }) + }); + + Promise.all(mirrors) + .then(m => resolve(m)) + .catch(err => reject(err)); }); Cache.init(cacheOpts) - .then(() => { + .then(() => getMirrors()) + .then(mirrors => { let opts = { port: program.port, mirror: mirrors From 91cc60ef2723b15eb89ab98e7cee47ca83d2f911 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 10:45:39 -0600 Subject: [PATCH 29/89] - Fixed a major bug in the client stream parser when the data chunk is parsed incorrectly if it lands on a certain boundary between commands. The test suite was not picking this up because it was sending data too fast to the client, so that was fixed as well to properly repro the bug. - Renamed CacheMembuf to CacheRAM for clarity - Cleaned up the transaction mirror implementation, which creates the start of a client javascript module - Promisified most of the command processor --- config/default.yml | 6 +- lib/cache/cache_base.js | 2 +- lib/cache/cache_fs.js | 6 +- lib/cache/{cache_membuf.js => cache_ram.js} | 18 +- lib/client/client.js | 144 ++++++++++++++ lib/constants.js | 3 +- lib/helpers.js | 9 + lib/index.js | 4 + lib/server/client_stream_processor.js | 51 ++--- lib/server/command_processor.js | 205 +++++++++----------- lib/server/server.js | 4 +- lib/server/transaction_mirror.js | 68 ++----- main.js | 11 +- package.json | 3 +- test/cache_api.js | 4 +- test/cache_base.js | 2 +- test/{cache_membuf.js => cache_ram.js} | 6 +- test/helpers.js | 9 + test/protocol.js | 20 +- test/test_utils.js | 19 +- 20 files changed, 355 insertions(+), 239 deletions(-) rename lib/cache/{cache_membuf.js => cache_ram.js} (96%) create mode 100644 lib/client/client.js create mode 100644 lib/index.js rename test/{cache_membuf.js => cache_ram.js} (98%) diff --git a/config/default.yml b/config/default.yml index 6baba9d..0c87899 100644 --- a/config/default.yml +++ b/config/default.yml @@ -1,12 +1,12 @@ Cache: - module: "lib/cache/cache_fs" + defaultModule: "lib/cache/cache_fs" options: - cache_membuf: + cache_ram: initialPageSize: 100000000 growPageSize: 100000000 maxPageCount: 10 minFreeBlockSize: 1024 - cachePath: ".cache_membuf" + cachePath: ".cache_ram" persistenceOptions: autosave: true autosaveInterval: 10000 diff --git a/lib/cache/cache_base.js b/lib/cache/cache_base.js index bee23ef..2605139 100644 --- a/lib/cache/cache_base.js +++ b/lib/cache/cache_base.js @@ -79,7 +79,7 @@ class CacheBase extends EventEmitter { * @param {String} type * @param {Buffer} guid * @param {Buffer} hash - * @returns {Promise} + * @returns {Promise} */ getFileStream(type, guid, hash) { return Promise.reject(new Error("Not implemented")); diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index ef2a216..3adbffa 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -7,7 +7,7 @@ const uuid = require('uuid'); const _ = require('lodash'); const consts = require('../constants'); -class CacheFS extends CacheBase { +class CacheRAM extends CacheBase { constructor() { super(); } @@ -40,7 +40,7 @@ class CacheFS extends CacheBase { * @private */ _calcFilepath(type, guid, hash) { - let fileName = CacheFS._calcFilename(type, guid, hash); + let fileName = CacheRAM._calcFilename(type, guid, hash); return path.join(this._cachePath, fileName.substr(0, 2), fileName); } @@ -215,4 +215,4 @@ class PutTransactionFS extends PutTransaction { } } -module.exports = CacheFS; \ No newline at end of file +module.exports = CacheRAM; \ No newline at end of file diff --git a/lib/cache/cache_membuf.js b/lib/cache/cache_ram.js similarity index 96% rename from lib/cache/cache_membuf.js rename to lib/cache/cache_ram.js index e8ee230..de4fe33 100644 --- a/lib/cache/cache_membuf.js +++ b/lib/cache/cache_ram.js @@ -10,11 +10,11 @@ const _ = require('lodash'); const loki = require('lokijs'); const uuid = require('uuid/v4'); -const kDbName = 'cache_membuf.db'; +const kDbName = 'cache.db'; const kIndex = 'index'; const kPageMeta = 'pages'; -class CacheMembuf extends CacheBase { +class CacheRAM extends CacheBase { constructor() { super(); this._db = null; @@ -41,7 +41,7 @@ class CacheMembuf extends CacheBase { } get _optionsPath() { - return super._optionsPath + ".cache_membuf"; + return super._optionsPath + ".cache_ram"; } get _dbPath() { @@ -138,7 +138,7 @@ class CacheMembuf extends CacheBase { * @private */ _addFileToCache(type, guid, hash, buffer) { - const key = CacheMembuf._calcIndexKey(type, guid, hash); + const key = CacheRAM._calcIndexKey(type, guid, hash); const entry = this._reserveBlock(key, buffer.length); helpers.log(consts.LOG_TEST, `Saving file key: ${key} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`); @@ -281,7 +281,7 @@ class CacheMembuf extends CacheBase { } getFileInfo(type, guid, hash) { - const key = CacheMembuf._calcIndexKey(type, guid, hash); + const key = CacheRAM._calcIndexKey(type, guid, hash); const entry = this._index.by('fileId', key); return (entry != null) @@ -291,7 +291,7 @@ class CacheMembuf extends CacheBase { getFileStream(type, guid, hash) { const self = this; - const key = CacheMembuf._calcIndexKey(type, guid, hash); + const key = CacheRAM._calcIndexKey(type, guid, hash); const entry = this._index.by('fileId', key); return new Promise((resolve, reject) => { @@ -318,7 +318,7 @@ class CacheMembuf extends CacheBase { } createPutTransaction(guid, hash) { - return Promise.resolve(new PutTransactionMembuf(guid, hash)); + return Promise.resolve(new PutTransactionRAM(guid, hash)); } endPutTransaction(transaction) { @@ -336,7 +336,7 @@ class CacheMembuf extends CacheBase { registerClusterWorker(worker) {} } -class PutTransactionMembuf extends PutTransaction { +class PutTransactionRAM extends PutTransaction { constructor(guid, hash) { super(guid, hash); this._streams = {}; @@ -402,7 +402,7 @@ class PutTransactionMembuf extends PutTransaction { } } -module.exports = CacheMembuf; +module.exports = CacheRAM; class PersistenceAdapter extends loki.LokiFsAdapter { constructor(cache) { diff --git a/lib/client/client.js b/lib/client/client.js new file mode 100644 index 0000000..7c622ea --- /dev/null +++ b/lib/client/client.js @@ -0,0 +1,144 @@ +'use strict'; +const net = require('net'); +const consts = require('../constants'); +const helpers = require('../helpers'); + +const cmd = { + quit: "q", + transactionStart: "ts", + transactionEnd: "te" +}; + +class CacheClient { + constructor(address, port, options) { + this._address = address; + this._port = port; + this._client = null; + this._options = { + idleTimeout: options.idleTimeout || 0 + } + } + + /** + * + * @param {Object|String} data + * @returns {Promise} + * @private + */ + _clientWrite(data) { + const self = this; + return new Promise((resolve, reject) => { + if(!self._client) reject(new Error("Not connected, call connect() first")); + self._client.write(data, () => resolve()); + }); + } + + /** + * + * @param {Object} stream + * @private + */ + _clientPipe(stream) { + const self = this; + return new Promise((resolve, reject) => { + if(!self._client) reject(new Error("Not connected, call connect() first")); + stream.on('end', () => resolve()); + stream.on('error', err => reject(err)); + stream.pipe(self._client, {end: false}); + }); + } + + static get fileTypes() { + return { + info: 'i', + resource: 'r', + asset: 'a' + } + } + + /** + * + * @returns {Promise} + */ + connect() { + const self = this; + return new Promise((resolve, reject) => { + if(self._client !== null && !self._client.destroyed) + return resolve(self); + + let client = net.connect({host: self._address, port: self._port}); + + if(self._options.idleTimeout > 0) { + client.setTimeout(self._options.idleTimeout, () => self.quit()); + } + + client.on('connect', () => { + client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); + self._client = client; + resolve(self); + }); + + client.on('close', () => self._client = null); + client.on('error', err => reject(err)); + }); + } + + /** + * + * @returns {Promise} + */ + quit() { + return Promise.resolve(this._client.end(cmd.quit)); + } + + /** + * + * @param {Buffer} guid + * @param {Buffer} hash + * @returns {Promise} + */ + beginTransaction(guid, hash) { + return this._clientWrite(Buffer.concat([Buffer.from(cmd.transactionStart), guid, hash], 34)); + } + + /** + * + * @returns {Promise} + */ + endTransaction() { + return this._clientWrite(Buffer.from(cmd.transactionEnd)); + } + + /** + * + * @param {String} type + * @param {Buffer} guid + * @param {Buffer} hash + * @param {Buffer|Readable} data + * @param {Number} size + * @returns {Promise} + */ + async putFile(type, guid, hash, data, size) { + const types = Object.values(CacheClient.fileTypes); + + if(types.indexOf(type) < 0) + throw new Error("Unrecognized file type"); + + if(!helpers.isBuffer(guid) || guid.length !== 16) + throw new Error("guid is not a buffer or the wrong length (16)"); + + if(!helpers.isBuffer(hash) || hash.length !== 16) + throw new Error("hash is not a buffer or the wrong length (16)"); + + await this._clientWrite(`p${type}${helpers.encodeInt64(size)}`); + + if(helpers.isBuffer(data)) { + await this._clientWrite(data); + } + else { + await this._clientPipe(data); + } + } +} + +module.exports = CacheClient; \ No newline at end of file diff --git a/lib/constants.js b/lib/constants.js index e184fdc..7d7c88d 100644 --- a/lib/constants.js +++ b/lib/constants.js @@ -1,5 +1,4 @@ const constants = { - VERSION: "6.0.0", PROTOCOL_VERSION: 254, PROTOCOL_VERSION_MIN_SIZE: 2, UINT32_SIZE: 8, // hex @@ -13,7 +12,7 @@ const constants = { LOG_TEST: 4, LOG_DBG: 5, DEFAULT_PORT: 8126, - DEFAULT_WORKERS: Math.ceil(require('os').cpus().length / 2) + DEFAULT_WORKERS: 0 }; constants.ID_SIZE = constants.GUID_SIZE + constants.HASH_SIZE; diff --git a/lib/helpers.js b/lib/helpers.js index 5551373..ede08af 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -73,6 +73,15 @@ exports.readUInt64 = function(input) { return parseInt(input.toString('ascii', 0, consts.UINT64_SIZE), 16); }; +/** + * + * @param obj + * @returns {boolean} + */ +exports.isBuffer = function(obj) { + return !(obj === null) && !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) +}; + function DefaultLogger(lvl, msg) { if (logLevel < lvl) return; diff --git a/lib/index.js b/lib/index.js new file mode 100644 index 0000000..cf1565f --- /dev/null +++ b/lib/index.js @@ -0,0 +1,4 @@ +exports.Server = require('./server/server'); +exports.Client = require('./client/client'); +exports.CacheFS = require('./cache/cache_fs'); +exports.CacheRAM = require('./cache/cache_ram'); \ No newline at end of file diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 63374a3..12d7acc 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -1,6 +1,7 @@ const helpers = require('./../helpers'); const consts = require('./../constants'); const Transform = require('stream').Transform; +const assert = require('assert'); const CMD_QUIT = 'q'.charCodeAt(0); const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.ID_SIZE; @@ -35,6 +36,7 @@ class ClientStreamProcessor extends Transform { doReadSize: false, doReadId: false, dataPassThrough: false, + didParseCmd: false, dataSize: 0, headerBufPos: 0, headerSize : consts.CMD_SIZE, @@ -44,7 +46,7 @@ class ClientStreamProcessor extends Transform { static get errorCodes() { return { - quitError: { msg: "Client quit" } + quitError: { message: "Client quit" } } } @@ -61,7 +63,7 @@ class ClientStreamProcessor extends Transform { } if(this.errState !== null) { - helpers.log(consts.LOG_ERR, this.errState.msg); + helpers.log(consts.LOG_ERR, this.errState); this.push('q'); // quit } } @@ -109,6 +111,7 @@ class ClientStreamProcessor extends Transform { data.copy(self.headerBuf, self.readState.headerBufPos, dataPos, dataPos + toCopy); dataPos += toCopy; self.readState.headerBufPos += toCopy; + assert(self.readState.headerBufPos <= self.headerBuf.length); return self.readState.headerBufPos === self.readState.headerSize; } @@ -130,33 +133,35 @@ class ClientStreamProcessor extends Transform { break; } + if(!this.readState.didParseCmd) { + this.readState.didParseCmd = true; - const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); - switch (cmd[0]) { - case 'g': // get - this.readState.doReadId = true; - this.readState.headerSize += consts.ID_SIZE; - break; - case 'p': // put - this.readState.doReadSize = true; - this.readState.headerSize += consts.SIZE_SIZE; - break; - case 't': // transaction - if(cmd[1] === 's') { + switch (cmd[0]) { + case 'g': // get this.readState.doReadId = true; this.readState.headerSize += consts.ID_SIZE; - } + break; + case 'p': // put + this.readState.doReadSize = true; + this.readState.headerSize += consts.SIZE_SIZE; + break; + case 't': // transaction + if (cmd[1] === 's') { + this.readState.doReadId = true; + this.readState.headerSize += consts.ID_SIZE; + } + + break; + default: + this.errState = new Error("Unrecognized command, aborting!"); + break; + } + if (!fillBufferWithData()) { break; - default: - this.errState = new Error("Unrecognized command, aborting!"); - break; - } - - - if(!fillBufferWithData()) { - break; + } } if (this.readState.doReadSize) { diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index 6642951..b1fde21 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -60,29 +60,22 @@ class CommandProcessor extends Duplex { */ _write(chunk, encoding, callback) { let handler = null; - const self = this; switch(this._readState) { case kReadStateVersion: - handler = this._handleVersion; + handler = this._handleVersion(chunk); break; case kReadStateCommand: - handler = this._handleCommand; + handler = this._handleCommand(chunk); break; case kReadStatePutStream: - handler = this._handleWrite; + handler = this._handleWrite(chunk); break; default: return callback(null); } - handler.call(this, chunk, function(err) { - if(err) { - self._quit(err); - } - - callback(); - }); + handler.then(() => callback(), err => this._quit(err)); } /** @@ -165,62 +158,68 @@ class CommandProcessor extends Duplex { * @private */ _quit(err) { - this[kSource].unpipe(this); - this[kSource].emit('quit'); - this._readState = null; - err && helpers.log(consts.LOG_ERR, err); + return new Promise(resolve => { + this[kSource].unpipe(this); + this[kSource].emit('quit'); + this._readState = null; + if(err) { + helpers.log(consts.LOG_ERR, err); + } + + resolve(); + }); } /** * * @param {Buffer} data - * @param {Function} callback * @private */ - _handleVersion(data, callback) { - let version = helpers.readUInt32(data); - this._readState = kReadStateCommand; - let err = null; - if(version !== consts.PROTOCOL_VERSION) { - version = 0; - err = new Error("Bad Client protocol version"); - } + _handleVersion(data) { + const self = this; + return new Promise((resolve, reject) => { + let version = helpers.readUInt32(data); + self._readState = kReadStateCommand; + let err = null; + if(version !== consts.PROTOCOL_VERSION) { + version = 0; + err = new Error("Bad Client protocol version"); + } - this.push(helpers.encodeInt32(version)); - callback(err); + self.push(helpers.encodeInt32(version)); + err ? reject(err) : resolve(); + }); } /** * * @param {Buffer} data - * @param {Function} callback * @private */ - _handleWrite(data, callback) { + _handleWrite(data) { const self = this; + return new Promise(resolve => { + this._putStream.write(data, 'ascii', () => { + self._putSent += data.length; + if(self._putSent === self._putSize) { + self._putStream.end(); + self._readState = kReadStateCommand; + self._putSent = 0; + self._putSize = 0; + } - this._putStream.write(data, 'ascii', function() { - self._putSent += data.length; - if(self._putSent === self._putSize) { - self._putStream.end(callback); - self._readState = kReadStateCommand; - self._putSent = 0; - self._putSize = 0; - } - else { - callback(); - } + resolve(); + }); }); } /** * * @param {Buffer} data - * @param {Function} callback * @private */ - _handleCommand(data, callback) { - let p, cmd, size, type, guid, hash = null; + async _handleCommand(data) { + let cmd, size, type, guid, hash = null; if(data.length > 1) { cmd = data.slice(0, 2).toString('ascii'); type = cmd[1]; @@ -239,31 +238,27 @@ class CommandProcessor extends Duplex { switch(cmd) { case 'q': - this._quit(); - this._readState = null; - p = Promise.resolve(); + await this._quit(); break; case 'ga': case 'gi': case 'gr': - p = this._onGet(type, guid, hash); + await this._onGet(type, guid, hash); break; case 'ts': - p = this._onTransactionStart(guid, hash); + await this._onTransactionStart(guid, hash); break; case 'te': - p = this._onTransactionEnd(); + await this._onTransactionEnd(); break; case 'pa': case 'pi': case 'pr': - p = this._onPut(type, size); + await this._onPut(type, size); break; default: - p = Promise.reject(new Error(`Unrecognized command '${cmd}'`)); + throw new Error(`Unrecognized command '${cmd}'`); } - - p.then(() => callback(), err => callback(err)); } /** @@ -271,102 +266,88 @@ class CommandProcessor extends Duplex { * @param {String} type * @param {Buffer} guid * @param {Buffer} hash - * @returns {Promise} + * @returns {Promise} * @private */ - _onGet(type, guid, hash) { - let self = this; - return this[kCache].getFileInfo(type, guid, hash) - .then(result => { - let resp = Buffer.from(`+${type}${helpers.encodeInt64(result.size)}`, 'ascii'); - self[kSendFileQueue].push({ - exists: true, - header: Buffer.concat([resp, guid, hash], 50), - size: result.size, - type: type, - guid: guid, - hash: hash - }); - - self._sendFileQueueCount++; - helpers.log(consts.LOG_DBG, `Adding file to send queue, size ${result.size}`); - }) - .catch(() => { - let resp = Buffer.from(`-${type}`, 'ascii'); - self[kSendFileQueue].push({ - exists: false, - header: Buffer.concat([resp, guid, hash], 34) - }); - }) - .then(() => { - if(self[kSendFileQueue].length === 1) { - self._read(self._readState.highWaterMark); - } + async _onGet(type, guid, hash) { + + try { + const info = await this[kCache].getFileInfo(type, guid, hash); + const resp = Buffer.from(`+${type}${helpers.encodeInt64(info.size)}`, 'ascii'); + this[kSendFileQueue].push({ + exists: true, + header: Buffer.concat([resp, guid, hash], 50), + size: info.size, + type: type, + guid: guid, + hash: hash }); + + this._sendFileQueueCount++; + helpers.log(consts.LOG_DBG, `Adding file to send queue, size ${info.size}`); + } + catch(err) { + const resp = Buffer.from(`-${type}`, 'ascii'); + this[kSendFileQueue].push({ + exists: false, + header: Buffer.concat([resp, guid, hash], 34) + }); + } + finally { + if(this[kSendFileQueue].length === 1) { + this._read(this._readState.highWaterMark); + } + } } /** * * @param {Buffer} guid * @param {Buffer} hash - * @returns {Promise} + * @returns {Promise} * @private */ - _onTransactionStart(guid, hash) { - const self = this; - + async _onTransactionStart(guid, hash) { if(this._trx !== null) { helpers.log(consts.LOG_DBG, "Cancel previous transaction"); this._trx = null; } - return this[kCache].createPutTransaction(guid, hash) - .then(trx => { - helpers.log(consts.LOG_DBG, `Start transaction for GUID: ${helpers.GUIDBufferToString(guid)} Hash: ${hash.toString('hex')}`); - self._trx = trx; - }); + this._trx = await this[kCache].createPutTransaction(guid, hash); + helpers.log(consts.LOG_DBG, `Start transaction for GUID: ${helpers.GUIDBufferToString(guid)} Hash: ${hash.toString('hex')}`); } /** * - * @returns {Promise} + * @returns {Promise} * @private */ - _onTransactionEnd() { - const self = this; - + async _onTransactionEnd() { if(!this._trx) { - return Promise.reject(new Error("Invalid transaction isolation")); + throw new Error("Invalid transaction isolation"); } - return this[kCache].endPutTransaction(this._trx) - .then(() => { - self.emit('onTransactionEnd', self._trx); - helpers.log(consts.LOG_DBG, `End transaction for GUID: ${helpers.GUIDBufferToString(self._trx.guid)} Hash: ${self._trx.hash.toString('hex')}`); - self._trx = null; - }); + await this[kCache].endPutTransaction(this._trx); + this.emit('onTransactionEnd', this._trx); + helpers.log(consts.LOG_DBG, `End transaction for GUID: ${helpers.GUIDBufferToString(this._trx.guid)} Hash: ${this._trx.hash.toString('hex')}`); + this._trx = null; } /** * * @param {String} type * @param {Number} size - * @returns {Promise} + * @returns {Promise} * @private */ - _onPut(type, size) { - const self = this; - + async _onPut(type, size) { if(!this._trx) { - return Promise.reject(new Error("Not in a transaction")); + throw new Error("Not in a transaction"); } - return this._trx.getWriteStream(type, size) - .then(stream => { - self._putStream = stream; - self._putSize = size; - self._readState = kReadStatePutStream; - }); + this._putStream = await this._trx.getWriteStream(type, size); + this._putSize = size; + this._readState = kReadStatePutStream; } } diff --git a/lib/server/server.js b/lib/server/server.js index b6efefd..caa6227 100644 --- a/lib/server/server.js +++ b/lib/server/server.js @@ -61,8 +61,8 @@ class CacheServer { const cmdProc = new CommandProcessor(self.cache); - // Prune mirror list to exclude the incoming socket address, to prevent looping transactions - const mirrors = self._mirrors.filter(m => !ip.isEqual(socket.address(), m.address())); + // TODO: Prune mirror list to exclude the incoming socket address, to prevent looping transactions + const mirrors = self._mirrors; if(mirrors.length > 0) { cmdProc.on('onTransactionEnd', (trx) => { diff --git a/lib/server/transaction_mirror.js b/lib/server/transaction_mirror.js index 639b985..7a61d91 100644 --- a/lib/server/transaction_mirror.js +++ b/lib/server/transaction_mirror.js @@ -1,28 +1,14 @@ 'use strict'; const helpers = require('../helpers'); const consts = require('../constants'); -const net = require('net'); const config = require('config'); +const Client = require('../client/client'); const OPTIONS_PATH = "Mirror.options"; const PROCESS_DELAY_MS = 2000; const CONNECT_IDLE_TIMEOUT_MS = 10000; -let cWrite = (client, buf) => { - return new Promise(resolve => { - client.write(buf, () => resolve()); - }); -}; - -let cPipe = (client, stream) => { - return new Promise((resolve, reject) => { - stream.on('end', () => resolve()); - stream.on('error', err => reject(err)); - stream.pipe(client, {end: false}); - }); -}; - class TransactionMirror { /** @@ -35,7 +21,11 @@ class TransactionMirror { this._cache = cache; this._queue = []; this._processing = false; - this._client = null; + + const address = connectOptions.address; + const port = connectOptions.port; + const idleTimeout = TransactionMirror.options.idleTimeout || CONNECT_IDLE_TIMEOUT_MS; + this._client = new Client(address, port, {idleTimeout: idleTimeout}); } static get options() { @@ -47,54 +37,34 @@ class TransactionMirror { } _connect() { - const self = this; - return new Promise(resolve => { - if(self._client !== null && !self._client.destroyed) - return resolve(self._client); - - let client = net.connect(this._connectOptions); - - const idleTimeout = TransactionMirror.options.connectionIdleTimeout || CONNECT_IDLE_TIMEOUT_MS; - client.setTimeout(idleTimeout, () => { - client.end('q'); - self._client = null; - }); - - client.on('connect', () => { - client.write(helpers.encodeInt32(consts.PROTOCOL_VERSION)); - self._client = client; - resolve(self._client); - }); - - client.on('error', err => helpers.log(consts.LOG_ERR, err)); - }); + return this._client.connect(); } async _processQueue() { let self = this; - let client = await self._connect(); + let client; let send = async (item) => { - await cWrite(client, Buffer.concat([Buffer.from('ts'), item.guid, item.hash], 34)); + await client.beginTransaction(item.guid, item.hash); - for (let i = 0; i < item.types.length; i++) { - let type = item.types[i]; + for (let type of item.types) { let info = await self._cache.getFileInfo(type, item.guid, item.hash); let stream = await self._cache.getFileStream(type, item.guid, item.hash); - await cWrite(client, `p${type}${helpers.encodeInt64(info.size)}`); - await cPipe(client, stream); + await client.putFile(type, item.guid, item.hash, stream, info.size); } - await cWrite(client, 'te'); + await client.endTransaction(); }; - while (self._queue.length > 0) { - try { + try { + client = await self._connect(); + + while (self._queue.length > 0) { await send(self._queue.shift()); } - catch (err) { - helpers.log(consts.LOG_ERR, err); - } + } + catch (err) { + helpers.log(consts.LOG_ERR, `[TransactionMirror] ${err}`); } self._processing = false; diff --git a/main.js b/main.js index 9ad3c97..9b4c6be 100644 --- a/main.js +++ b/main.js @@ -3,11 +3,12 @@ const helpers = require('./lib/helpers'); const consts = require('./lib/constants'); const program = require('commander'); const path = require('path'); -const CacheServer = require('./lib/server/server'); +const CacheServer = require('./lib').Server; const config = require('config'); const prompt = require('prompt'); const dns = require('dns'); const ip = require('ip'); +const VERSION = require('./package.json').version; function myParseInt(val, def) { val = parseInt(val); @@ -23,10 +24,10 @@ function collect(val, memo) { return memo; } -const defaultCacheModule = config.get("Cache.module"); +const defaultCacheModule = config.get("Cache.defaultModule"); program.description("Unity Cache Server") - .version(consts.VERSION) + .version(VERSION) .option('-p, --port ', `Specify the server port, only apply to new cache server, default is ${consts.DEFAULT_PORT}`, myParseInt, consts.DEFAULT_PORT) .option('-c --cacheModule [path]', `Use cache module at specified path. Default is '${defaultCacheModule}'`, defaultCacheModule) .option('-P, --cachePath [path]', `Specify the path of the cache directory.`) @@ -85,7 +86,7 @@ let getMirrors = () => new Promise((resolve, reject) => { let [host, port] = m.split(':'); port = parseInt(port); - if(!port) port = consts.DEFAULT_PORT; + if(!port) port = config.get("Defaults.serverPort"); const myIp = ip.address(); return new Promise((resolve, reject) => { @@ -118,7 +119,7 @@ Cache.init(cacheOpts) server = new CacheServer(Cache, opts); if(cluster.isMaster) { - helpers.log(consts.LOG_INFO, `Cache Server version ${consts.VERSION}; Cache module ${program.cacheModule}`); + helpers.log(consts.LOG_INFO, `Cache Server version ${VERSION}; Cache module ${program.cacheModule}`); if(program.workers === 0) { server.Start(errHandler, function () { diff --git a/package.json b/package.json index b95b540..1b87f52 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "Fast-CacheServer", + "name": "unity-cache-server", "version": "6.0.0", "description": "Unity Cache Server", "main": "main.js", @@ -39,6 +39,7 @@ "config": "^1.27.0", "filesize": "^3.5.11", "fs-extra": "^5.0.0", + "ip": "^1.1.5", "js-yaml": "^3.10.0", "lodash": "^4.17.4", "lokijs": "^1.5.1", diff --git a/test/cache_api.js b/test/cache_api.js index b94f28d..6b310fa 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -8,8 +8,8 @@ const EventEmitter = require('events'); let test_modules = [ { - name: "cache_membuf", - path: "../lib/cache/cache_membuf", + name: "cache_ram", + path: "../lib/cache/cache_ram", options: { cachePath: tmp.tmpNameSync({}), initialPageSize: 10000, diff --git a/test/cache_base.js b/test/cache_base.js index e51a2e1..4c4a2b2 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -35,7 +35,7 @@ describe("Cache: Base Class", () => { let cacheOptions = cache._options; assert(typeof(cacheOptions) === 'object'); assert(cacheOptions.hasOwnProperty('cache_fs')); - assert(cacheOptions.hasOwnProperty('cache_membuf')); + assert(cacheOptions.hasOwnProperty('cache_ram')); }); it("should apply option overrides", () => { diff --git a/test/cache_membuf.js b/test/cache_ram.js similarity index 98% rename from test/cache_membuf.js rename to test/cache_ram.js index e287dc9..cbf16c1 100644 --- a/test/cache_membuf.js +++ b/test/cache_ram.js @@ -1,6 +1,6 @@ const tmp = require('tmp'); const fs = require('fs-extra'); -const Cache = require('../lib/cache/cache_membuf'); +const Cache = require('../lib/cache/cache_ram'); const randomBuffer = require('./test_utils').randomBuffer; const generateCommandData = require('./test_utils').generateCommandData; const path = require('path'); @@ -9,7 +9,7 @@ const assert = require('assert'); const MIN_FILE_SIZE = 1024 * 5; const MAX_FILE_SIZE = MIN_FILE_SIZE; -describe("Cache: Membuf", () => { +describe("Cache: RAM", () => { function dirtyPages() { return cache._pageMeta.chain() @@ -24,7 +24,7 @@ describe("Cache: Membuf", () => { } let opts = { - cachePath: tmp.tmpNameSync({}), + cachePath: tmp.tmpNameSync({}).toString(), initialPageSize: MIN_FILE_SIZE * 2, growPageSize: MIN_FILE_SIZE * 2, minFreeBlockSize: 1024, diff --git a/test/helpers.js b/test/helpers.js index eafd7c6..76bbdde 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -22,10 +22,19 @@ describe("Helper functions", () => { assert(guid.compare(helpers.GUIDStringToBuffer(guidStr)) === 0); }); + it("should throw an error if the input value is not a string or is the wrong length", () => { assert.throws(helpers.GUIDStringToBuffer.bind(null, null)); assert.throws(helpers.GUIDStringToBuffer.bind(null, '')); assert.throws(helpers.GUIDStringToBuffer.bind(null, guidStr + 'x')); }); }); + + describe("isBuffer", () => { + it("should correctly identify whether or not passed value is a type of Buffer", () => { + assert(helpers.isBuffer(Buffer.from([]))); + assert(!helpers.isBuffer({})); + assert(!helpers.isBuffer(null)); + }) + }); }); \ No newline at end of file diff --git a/test/protocol.js b/test/protocol.js index 3ee2b35..8e6d30c 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -1,5 +1,4 @@ const assert = require('assert'); -const net = require('net'); const crypto = require('crypto'); const helpers = require('../lib/helpers'); const consts = require('../lib/constants'); @@ -17,7 +16,8 @@ const getClientPromise = require('./test_utils').getClientPromise; const MIN_FILE_SIZE = 1024; const MAX_FILE_SIZE = 1024 * 1024; -const SMALL_PACKET_SIZE = 16; +const SMALL_PACKET_SIZE = 64; +const MED_PACKET_SIZE = 1024; const LARGE_PACKET_SIZE = 1024 * 16; let cache, server, client; @@ -25,8 +25,8 @@ let cache, server, client; let test_modules = [ { tmpDir: tmp.dirSync({unsafeCleanup: true}), - name: "cache_membuf", - path: "../lib/cache/cache_membuf", + name: "cache_ram", + path: "../lib/cache/cache_ram", options: { initialPageSize: MAX_FILE_SIZE * 2, growPageSize: MAX_FILE_SIZE, @@ -154,10 +154,7 @@ describe("Protocol", function() { const tests = [ {ext: 'bin', cmd: cmd.putAsset, packetSize: SMALL_PACKET_SIZE}, - {ext: 'info', cmd: cmd.putInfo, packetSize: SMALL_PACKET_SIZE}, - {ext: 'resource', cmd: cmd.putResource, packetSize: SMALL_PACKET_SIZE}, - {ext: 'bin', cmd: cmd.putAsset, packetSize: LARGE_PACKET_SIZE}, - {ext: 'info', cmd: cmd.putInfo, packetSize: LARGE_PACKET_SIZE}, + {ext: 'info', cmd: cmd.putInfo, packetSize: MED_PACKET_SIZE}, {ext: 'resource', cmd: cmd.putResource, packetSize: LARGE_PACKET_SIZE} ]; @@ -230,11 +227,8 @@ describe("Protocol", function() { }); const tests = [ - {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin', packetSize: 1}, - {cmd: cmd.getInfo, blob: self.data.info, type: 'info', packetSize: 1}, - {cmd: cmd.getResource, blob: self.data.resource, type: 'resource', packetSize: 1}, - {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin', packetSize: LARGE_PACKET_SIZE}, - {cmd: cmd.getInfo, blob: self.data.info, type: 'info', packetSize: LARGE_PACKET_SIZE}, + {cmd: cmd.getAsset, blob: self.data.bin, type: 'bin', packetSize: SMALL_PACKET_SIZE}, + {cmd: cmd.getInfo, blob: self.data.info, type: 'info', packetSize: MED_PACKET_SIZE}, {cmd: cmd.getResource, blob: self.data.resource, type: 'resource', packetSize: LARGE_PACKET_SIZE} ]; diff --git a/test/test_utils.js b/test/test_utils.js index b42d860..57c5566 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -91,20 +91,19 @@ exports.clientWrite = function(client, data, minPacketSize, maxPacketSize) { } function write() { - let ok = true; - while(ok && sentBytes < data.length) { - let len = Math.min(data.length - sentBytes, packetSize()); - ok = client.write(data.slice(sentBytes, sentBytes + len)); + let len = Math.min(data.length - sentBytes, packetSize()); + client.write(data.slice(sentBytes, sentBytes + len), () => { sentBytes += len; - } - if (sentBytes === data.length) { - client.removeListener('drain', write); - setTimeout(resolve, WRITE_RESOLVE_DELAY); - } + if (sentBytes === data.length) { + setTimeout(resolve, WRITE_RESOLVE_DELAY); + } + else { + setImmediate(write); + } + }); } - client.on('drain', write); write(); }); }; From c4b1218ed5625fec622bd2ae5e63b1c6239a07b3 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 12:52:10 -0600 Subject: [PATCH 30/89] =?UTF-8?q?Switch=20to=20nyc=20for=20test=20coverage?= =?UTF-8?q?=20(istanbul=20was=20deprecated=20and=20doesn=E2=80=99t=20suppo?= =?UTF-8?q?rt=20async/await=20syntax)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package-lock.json | 2350 ++++++++++++++++++++++++++++++++------------- package.json | 9 +- test/protocol.js | 2 +- 3 files changed, 1679 insertions(+), 682 deletions(-) diff --git a/package-lock.json b/package-lock.json index 5ef0a29..52b8eac 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,5 +1,5 @@ { - "name": "Fast-CacheServer", + "name": "unity-cache-server", "version": "6.0.0", "lockfileVersion": 1, "requires": true, @@ -694,785 +694,1781 @@ "version": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" }, + "ip": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", + "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=" + }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, - "istanbul": { - "version": "0.4.5", - "resolved": "https://registry.npmjs.org/istanbul/-/istanbul-0.4.5.tgz", - "integrity": "sha1-ZcfXPUxNqE1POsMQuRj7C4Azczs=", + "js-yaml": { + "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", + "requires": { + "argparse": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "esprima": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz" + } + }, + "json3": { + "version": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz", + "integrity": "sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE=", + "dev": true + }, + "json5": { + "version": "https://registry.npmjs.org/json5/-/json5-0.4.0.tgz", + "integrity": "sha1-BUNS5MTIDIbAkjh31EneF2pzLI0=" + }, + "jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "requires": { + "graceful-fs": "4.1.11" + } + }, + "klaw": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-2.1.1.tgz", + "integrity": "sha1-QrdolHARacyRD9DRnOZ3tfs3ivE=", + "requires": { + "graceful-fs": "4.1.11" + } + }, + "lodash": { + "version": "4.17.4", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" + }, + "lodash._baseassign": { + "version": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "integrity": "sha1-jDigmVAPIVrQnlnxci/QxSv+Ck4=", "dev": true, "requires": { - "abbrev": "1.0.9", - "async": "1.5.2", - "escodegen": "1.8.1", - "esprima": "2.7.3", - "glob": "5.0.15", - "handlebars": "4.0.10", - "js-yaml": "3.10.0", + "lodash._basecopy": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", + "lodash.keys": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz" + } + }, + "lodash._basecopy": { + "version": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", + "integrity": "sha1-jaDmqHbPNEwK2KVIghEd08XHyjY=", + "dev": true + }, + "lodash._basecreate": { + "version": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz", + "integrity": "sha1-G8ZhYU2qf8MRt9A78WgGoCE8+CE=", + "dev": true + }, + "lodash._getnative": { + "version": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", + "integrity": "sha1-VwvH3t5G1hzc3mh9ZdPuy6o6r/U=", + "dev": true + }, + "lodash._isiterateecall": { + "version": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz", + "integrity": "sha1-UgOte6Ql+uhCRg5pbbnPPmqsBXw=", + "dev": true + }, + "lodash.create": { + "version": "https://registry.npmjs.org/lodash.create/-/lodash.create-3.1.1.tgz", + "integrity": "sha1-1/KEnw29p+BGgruM1yqwIkYd6+c=", + "dev": true, + "requires": { + "lodash._baseassign": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "lodash._basecreate": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz", + "lodash._isiterateecall": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" + } + }, + "lodash.isarguments": { + "version": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "integrity": "sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo=", + "dev": true + }, + "lodash.isarray": { + "version": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz", + "integrity": "sha1-eeTriMNqgSKvhvhEqpvNhRtfu1U=", + "dev": true + }, + "lodash.keys": { + "version": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", + "integrity": "sha1-TbwEcrFWvlCgsoaFXRvQsMZWCYo=", + "dev": true, + "requires": { + "lodash._getnative": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", + "lodash.isarguments": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "lodash.isarray": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" + } + }, + "lokijs": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/lokijs/-/lokijs-1.5.1.tgz", + "integrity": "sha512-Pj67gdP6CxUPV7AXM/VAnUZNyKR6mx4JxNmZfVG7XeebBZyrd8iLcKxKutc6Z5akJlMb0EeCxPW8/YkCPiMQbw==" + }, + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "requires": { + "brace-expansion": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz" + } + }, + "minimist": { + "version": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + }, + "mkdirp": { + "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "requires": { + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + }, + "mocha": { + "version": "https://registry.npmjs.org/mocha/-/mocha-3.5.3.tgz", + "integrity": "sha1-HgSA/jbS2lhY0etqzDhBiybqog0=", + "dev": true, + "requires": { + "browser-stdout": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.0.tgz", + "commander": "2.9.0", + "debug": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz", + "diff": "https://registry.npmjs.org/diff/-/diff-3.2.0.tgz", + "escape-string-regexp": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "glob": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "growl": "https://registry.npmjs.org/growl/-/growl-1.9.2.tgz", + "he": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "json3": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz", + "lodash.create": "https://registry.npmjs.org/lodash.create/-/lodash.create-3.1.1.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "supports-color": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.2.tgz" + }, + "dependencies": { + "commander": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "integrity": "sha1-nJkJQXbhIkDLItbFFGCYQA/g99Q=", + "dev": true, + "requires": { + "graceful-readlink": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + } + }, + "mocha-lcov-reporter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/mocha-lcov-reporter/-/mocha-lcov-reporter-1.3.0.tgz", + "integrity": "sha1-Rpve9PivyaEWBW8HnfYYLQr7A4Q=", + "dev": true + }, + "ms": { + "version": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=" + }, + "ncp": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-1.0.1.tgz", + "integrity": "sha1-0VNn5cuHQyuhF9K/gP30Wuz7QkY=" + }, + "nyc": { + "version": "11.4.1", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-11.4.1.tgz", + "integrity": "sha512-5eCZpvaksFVjP2rt1r60cfXmt3MUtsQDw8bAzNqNEr4WLvUMLgiVENMf/B9bE9YAX0mGVvaGA3v9IS9ekNqB1Q==", + "dev": true, + "requires": { + "archy": "1.0.0", + "arrify": "1.0.1", + "caching-transform": "1.0.1", + "convert-source-map": "1.5.1", + "debug-log": "1.0.1", + "default-require-extensions": "1.0.0", + "find-cache-dir": "0.1.1", + "find-up": "2.1.0", + "foreground-child": "1.5.6", + "glob": "7.1.2", + "istanbul-lib-coverage": "1.1.1", + "istanbul-lib-hook": "1.1.0", + "istanbul-lib-instrument": "1.9.1", + "istanbul-lib-report": "1.1.2", + "istanbul-lib-source-maps": "1.2.2", + "istanbul-reports": "1.1.3", + "md5-hex": "1.3.0", + "merge-source-map": "1.0.4", + "micromatch": "2.3.11", "mkdirp": "0.5.1", - "nopt": "3.0.6", - "once": "1.4.0", - "resolve": "1.1.7", - "supports-color": "3.2.3", - "which": "1.3.0", - "wordwrap": "1.0.0" + "resolve-from": "2.0.0", + "rimraf": "2.6.2", + "signal-exit": "3.0.2", + "spawn-wrap": "1.4.2", + "test-exclude": "4.1.1", + "yargs": "10.0.3", + "yargs-parser": "8.0.0" }, "dependencies": { - "abbrev": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz", - "integrity": "sha1-kbR5JYinc4wl813W9jdSovh3YTU=", + "align-text": { + "version": "0.1.4", + "bundled": true, + "dev": true, + "requires": { + "kind-of": "3.2.2", + "longest": "1.0.1", + "repeat-string": "1.6.1" + } + }, + "amdefine": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "ansi-regex": { + "version": "2.1.1", + "bundled": true, + "dev": true + }, + "ansi-styles": { + "version": "2.2.1", + "bundled": true, + "dev": true + }, + "append-transform": { + "version": "0.4.0", + "bundled": true, + "dev": true, + "requires": { + "default-require-extensions": "1.0.0" + } + }, + "archy": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "arr-diff": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "arr-flatten": "1.1.0" + } + }, + "arr-flatten": { + "version": "1.1.0", + "bundled": true, + "dev": true + }, + "array-unique": { + "version": "0.2.1", + "bundled": true, + "dev": true + }, + "arrify": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "async": { + "version": "1.5.2", + "bundled": true, + "dev": true + }, + "babel-code-frame": { + "version": "6.26.0", + "bundled": true, + "dev": true, + "requires": { + "chalk": "1.1.3", + "esutils": "2.0.2", + "js-tokens": "3.0.2" + } + }, + "babel-generator": { + "version": "6.26.0", + "bundled": true, + "dev": true, + "requires": { + "babel-messages": "6.23.0", + "babel-runtime": "6.26.0", + "babel-types": "6.26.0", + "detect-indent": "4.0.0", + "jsesc": "1.3.0", + "lodash": "4.17.4", + "source-map": "0.5.7", + "trim-right": "1.0.1" + } + }, + "babel-messages": { + "version": "6.23.0", + "bundled": true, + "dev": true, + "requires": { + "babel-runtime": "6.26.0" + } + }, + "babel-runtime": { + "version": "6.26.0", + "bundled": true, + "dev": true, + "requires": { + "core-js": "2.5.3", + "regenerator-runtime": "0.11.1" + } + }, + "babel-template": { + "version": "6.26.0", + "bundled": true, + "dev": true, + "requires": { + "babel-runtime": "6.26.0", + "babel-traverse": "6.26.0", + "babel-types": "6.26.0", + "babylon": "6.18.0", + "lodash": "4.17.4" + } + }, + "babel-traverse": { + "version": "6.26.0", + "bundled": true, + "dev": true, + "requires": { + "babel-code-frame": "6.26.0", + "babel-messages": "6.23.0", + "babel-runtime": "6.26.0", + "babel-types": "6.26.0", + "babylon": "6.18.0", + "debug": "2.6.9", + "globals": "9.18.0", + "invariant": "2.2.2", + "lodash": "4.17.4" + } + }, + "babel-types": { + "version": "6.26.0", + "bundled": true, + "dev": true, + "requires": { + "babel-runtime": "6.26.0", + "esutils": "2.0.2", + "lodash": "4.17.4", + "to-fast-properties": "1.0.3" + } + }, + "babylon": { + "version": "6.18.0", + "bundled": true, + "dev": true + }, + "balanced-match": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "brace-expansion": { + "version": "1.1.8", + "bundled": true, + "dev": true, + "requires": { + "balanced-match": "1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "1.8.5", + "bundled": true, + "dev": true, + "requires": { + "expand-range": "1.8.2", + "preserve": "0.2.0", + "repeat-element": "1.1.2" + } + }, + "builtin-modules": { + "version": "1.1.1", + "bundled": true, + "dev": true + }, + "caching-transform": { + "version": "1.0.1", + "bundled": true, + "dev": true, + "requires": { + "md5-hex": "1.3.0", + "mkdirp": "0.5.1", + "write-file-atomic": "1.3.4" + } + }, + "camelcase": { + "version": "1.2.1", + "bundled": true, + "dev": true, + "optional": true + }, + "center-align": { + "version": "0.1.3", + "bundled": true, + "dev": true, + "optional": true, + "requires": { + "align-text": "0.1.4", + "lazy-cache": "1.0.4" + } + }, + "chalk": { + "version": "1.1.3", + "bundled": true, + "dev": true, + "requires": { + "ansi-styles": "2.2.1", + "escape-string-regexp": "1.0.5", + "has-ansi": "2.0.0", + "strip-ansi": "3.0.1", + "supports-color": "2.0.0" + } + }, + "cliui": { + "version": "2.1.0", + "bundled": true, + "dev": true, + "optional": true, + "requires": { + "center-align": "0.1.3", + "right-align": "0.1.3", + "wordwrap": "0.0.2" + }, + "dependencies": { + "wordwrap": { + "version": "0.0.2", + "bundled": true, + "dev": true, + "optional": true + } + } + }, + "code-point-at": { + "version": "1.1.0", + "bundled": true, + "dev": true + }, + "commondir": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "bundled": true, + "dev": true + }, + "convert-source-map": { + "version": "1.5.1", + "bundled": true, + "dev": true + }, + "core-js": { + "version": "2.5.3", + "bundled": true, + "dev": true + }, + "cross-spawn": { + "version": "4.0.2", + "bundled": true, + "dev": true, + "requires": { + "lru-cache": "4.1.1", + "which": "1.3.0" + } + }, + "debug": { + "version": "2.6.9", + "bundled": true, + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "debug-log": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "decamelize": { + "version": "1.2.0", + "bundled": true, + "dev": true + }, + "default-require-extensions": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "strip-bom": "2.0.0" + } + }, + "detect-indent": { + "version": "4.0.0", + "bundled": true, + "dev": true, + "requires": { + "repeating": "2.0.1" + } + }, + "error-ex": { + "version": "1.3.1", + "bundled": true, + "dev": true, + "requires": { + "is-arrayish": "0.2.1" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "bundled": true, + "dev": true + }, + "esutils": { + "version": "2.0.2", + "bundled": true, + "dev": true + }, + "execa": { + "version": "0.7.0", + "bundled": true, + "dev": true, + "requires": { + "cross-spawn": "5.1.0", + "get-stream": "3.0.0", + "is-stream": "1.1.0", + "npm-run-path": "2.0.2", + "p-finally": "1.0.0", + "signal-exit": "3.0.2", + "strip-eof": "1.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "5.1.0", + "bundled": true, + "dev": true, + "requires": { + "lru-cache": "4.1.1", + "shebang-command": "1.2.0", + "which": "1.3.0" + } + } + } + }, + "expand-brackets": { + "version": "0.1.5", + "bundled": true, + "dev": true, + "requires": { + "is-posix-bracket": "0.1.1" + } + }, + "expand-range": { + "version": "1.8.2", + "bundled": true, + "dev": true, + "requires": { + "fill-range": "2.2.3" + } + }, + "extglob": { + "version": "0.3.2", + "bundled": true, + "dev": true, + "requires": { + "is-extglob": "1.0.0" + } + }, + "filename-regex": { + "version": "2.0.1", + "bundled": true, + "dev": true + }, + "fill-range": { + "version": "2.2.3", + "bundled": true, + "dev": true, + "requires": { + "is-number": "2.1.0", + "isobject": "2.1.0", + "randomatic": "1.1.7", + "repeat-element": "1.1.2", + "repeat-string": "1.6.1" + } + }, + "find-cache-dir": { + "version": "0.1.1", + "bundled": true, + "dev": true, + "requires": { + "commondir": "1.0.1", + "mkdirp": "0.5.1", + "pkg-dir": "1.0.0" + } + }, + "find-up": { + "version": "2.1.0", + "bundled": true, + "dev": true, + "requires": { + "locate-path": "2.0.0" + } + }, + "for-in": { + "version": "1.0.2", + "bundled": true, + "dev": true + }, + "for-own": { + "version": "0.1.5", + "bundled": true, + "dev": true, + "requires": { + "for-in": "1.0.2" + } + }, + "foreground-child": { + "version": "1.5.6", + "bundled": true, + "dev": true, + "requires": { + "cross-spawn": "4.0.2", + "signal-exit": "3.0.2" + } + }, + "fs.realpath": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "get-caller-file": { + "version": "1.0.2", + "bundled": true, + "dev": true + }, + "get-stream": { + "version": "3.0.0", + "bundled": true, + "dev": true + }, + "glob": { + "version": "7.1.2", + "bundled": true, + "dev": true, + "requires": { + "fs.realpath": "1.0.0", + "inflight": "1.0.6", + "inherits": "2.0.3", + "minimatch": "3.0.4", + "once": "1.4.0", + "path-is-absolute": "1.0.1" + } + }, + "glob-base": { + "version": "0.3.0", + "bundled": true, + "dev": true, + "requires": { + "glob-parent": "2.0.0", + "is-glob": "2.0.1" + } + }, + "glob-parent": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "is-glob": "2.0.1" + } + }, + "globals": { + "version": "9.18.0", + "bundled": true, + "dev": true + }, + "graceful-fs": { + "version": "4.1.11", + "bundled": true, + "dev": true + }, + "handlebars": { + "version": "4.0.11", + "bundled": true, + "dev": true, + "requires": { + "async": "1.5.2", + "optimist": "0.6.1", + "source-map": "0.4.4", + "uglify-js": "2.8.29" + }, + "dependencies": { + "source-map": { + "version": "0.4.4", + "bundled": true, + "dev": true, + "requires": { + "amdefine": "1.0.1" + } + } + } + }, + "has-ansi": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "has-flag": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "hosted-git-info": { + "version": "2.5.0", + "bundled": true, + "dev": true + }, + "imurmurhash": { + "version": "0.1.4", + "bundled": true, + "dev": true + }, + "inflight": { + "version": "1.0.6", + "bundled": true, + "dev": true, + "requires": { + "once": "1.4.0", + "wrappy": "1.0.2" + } + }, + "inherits": { + "version": "2.0.3", + "bundled": true, + "dev": true + }, + "invariant": { + "version": "2.2.2", + "bundled": true, + "dev": true, + "requires": { + "loose-envify": "1.3.1" + } + }, + "invert-kv": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "is-arrayish": { + "version": "0.2.1", + "bundled": true, + "dev": true + }, + "is-buffer": { + "version": "1.1.6", + "bundled": true, + "dev": true + }, + "is-builtin-module": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "builtin-modules": "1.1.1" + } + }, + "is-dotfile": { + "version": "1.0.3", + "bundled": true, + "dev": true + }, + "is-equal-shallow": { + "version": "0.1.3", + "bundled": true, + "dev": true, + "requires": { + "is-primitive": "2.0.0" + } + }, + "is-extendable": { + "version": "0.1.1", + "bundled": true, + "dev": true + }, + "is-extglob": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "is-finite": { + "version": "1.0.2", + "bundled": true, + "dev": true, + "requires": { + "number-is-nan": "1.0.1" + } + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "number-is-nan": "1.0.1" + } + }, + "is-glob": { + "version": "2.0.1", + "bundled": true, + "dev": true, + "requires": { + "is-extglob": "1.0.0" + } + }, + "is-number": { + "version": "2.1.0", + "bundled": true, + "dev": true, + "requires": { + "kind-of": "3.2.2" + } + }, + "is-posix-bracket": { + "version": "0.1.1", + "bundled": true, + "dev": true + }, + "is-primitive": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "is-stream": { + "version": "1.1.0", + "bundled": true, + "dev": true + }, + "is-utf8": { + "version": "0.2.1", + "bundled": true, + "dev": true + }, + "isarray": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "isexe": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "isobject": { + "version": "2.1.0", + "bundled": true, + "dev": true, + "requires": { + "isarray": "1.0.0" + } + }, + "istanbul-lib-coverage": { + "version": "1.1.1", + "bundled": true, + "dev": true + }, + "istanbul-lib-hook": { + "version": "1.1.0", + "bundled": true, + "dev": true, + "requires": { + "append-transform": "0.4.0" + } + }, + "istanbul-lib-instrument": { + "version": "1.9.1", + "bundled": true, + "dev": true, + "requires": { + "babel-generator": "6.26.0", + "babel-template": "6.26.0", + "babel-traverse": "6.26.0", + "babel-types": "6.26.0", + "babylon": "6.18.0", + "istanbul-lib-coverage": "1.1.1", + "semver": "5.4.1" + } + }, + "istanbul-lib-report": { + "version": "1.1.2", + "bundled": true, + "dev": true, + "requires": { + "istanbul-lib-coverage": "1.1.1", + "mkdirp": "0.5.1", + "path-parse": "1.0.5", + "supports-color": "3.2.3" + }, + "dependencies": { + "supports-color": { + "version": "3.2.3", + "bundled": true, + "dev": true, + "requires": { + "has-flag": "1.0.0" + } + } + } + }, + "istanbul-lib-source-maps": { + "version": "1.2.2", + "bundled": true, + "dev": true, + "requires": { + "debug": "3.1.0", + "istanbul-lib-coverage": "1.1.1", + "mkdirp": "0.5.1", + "rimraf": "2.6.2", + "source-map": "0.5.7" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "bundled": true, + "dev": true, + "requires": { + "ms": "2.0.0" + } + } + } + }, + "istanbul-reports": { + "version": "1.1.3", + "bundled": true, + "dev": true, + "requires": { + "handlebars": "4.0.11" + } + }, + "js-tokens": { + "version": "3.0.2", + "bundled": true, + "dev": true + }, + "jsesc": { + "version": "1.3.0", + "bundled": true, + "dev": true + }, + "kind-of": { + "version": "3.2.2", + "bundled": true, + "dev": true, + "requires": { + "is-buffer": "1.1.6" + } + }, + "lazy-cache": { + "version": "1.0.4", + "bundled": true, + "dev": true, + "optional": true + }, + "lcid": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "invert-kv": "1.0.0" + } + }, + "load-json-file": { + "version": "1.1.0", + "bundled": true, + "dev": true, + "requires": { + "graceful-fs": "4.1.11", + "parse-json": "2.2.0", + "pify": "2.3.0", + "pinkie-promise": "2.0.1", + "strip-bom": "2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "p-locate": "2.0.0", + "path-exists": "3.0.0" + }, + "dependencies": { + "path-exists": { + "version": "3.0.0", + "bundled": true, + "dev": true + } + } + }, + "lodash": { + "version": "4.17.4", + "bundled": true, + "dev": true + }, + "longest": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "loose-envify": { + "version": "1.3.1", + "bundled": true, + "dev": true, + "requires": { + "js-tokens": "3.0.2" + } + }, + "lru-cache": { + "version": "4.1.1", + "bundled": true, + "dev": true, + "requires": { + "pseudomap": "1.0.2", + "yallist": "2.1.2" + } + }, + "md5-hex": { + "version": "1.3.0", + "bundled": true, + "dev": true, + "requires": { + "md5-o-matic": "0.1.1" + } + }, + "md5-o-matic": { + "version": "0.1.1", + "bundled": true, + "dev": true + }, + "mem": { + "version": "1.1.0", + "bundled": true, + "dev": true, + "requires": { + "mimic-fn": "1.1.0" + } + }, + "merge-source-map": { + "version": "1.0.4", + "bundled": true, + "dev": true, + "requires": { + "source-map": "0.5.7" + } + }, + "micromatch": { + "version": "2.3.11", + "bundled": true, + "dev": true, + "requires": { + "arr-diff": "2.0.0", + "array-unique": "0.2.1", + "braces": "1.8.5", + "expand-brackets": "0.1.5", + "extglob": "0.3.2", + "filename-regex": "2.0.1", + "is-extglob": "1.0.0", + "is-glob": "2.0.1", + "kind-of": "3.2.2", + "normalize-path": "2.1.1", + "object.omit": "2.0.1", + "parse-glob": "3.0.4", + "regex-cache": "0.4.4" + } + }, + "mimic-fn": { + "version": "1.1.0", + "bundled": true, + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "bundled": true, + "dev": true, + "requires": { + "brace-expansion": "1.1.8" + } + }, + "minimist": { + "version": "0.0.8", + "bundled": true, + "dev": true + }, + "mkdirp": { + "version": "0.5.1", + "bundled": true, + "dev": true, + "requires": { + "minimist": "0.0.8" + } + }, + "ms": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "normalize-package-data": { + "version": "2.4.0", + "bundled": true, + "dev": true, + "requires": { + "hosted-git-info": "2.5.0", + "is-builtin-module": "1.0.0", + "semver": "5.4.1", + "validate-npm-package-license": "3.0.1" + } + }, + "normalize-path": { + "version": "2.1.1", + "bundled": true, + "dev": true, + "requires": { + "remove-trailing-separator": "1.1.0" + } + }, + "npm-run-path": { + "version": "2.0.2", + "bundled": true, + "dev": true, + "requires": { + "path-key": "2.0.1" + } + }, + "number-is-nan": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "object-assign": { + "version": "4.1.1", + "bundled": true, + "dev": true + }, + "object.omit": { + "version": "2.0.1", + "bundled": true, + "dev": true, + "requires": { + "for-own": "0.1.5", + "is-extendable": "0.1.1" + } + }, + "once": { + "version": "1.4.0", + "bundled": true, + "dev": true, + "requires": { + "wrappy": "1.0.2" + } + }, + "optimist": { + "version": "0.6.1", + "bundled": true, + "dev": true, + "requires": { + "minimist": "0.0.8", + "wordwrap": "0.0.3" + } + }, + "os-homedir": { + "version": "1.0.2", + "bundled": true, "dev": true }, - "async": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", - "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=", + "os-locale": { + "version": "2.1.0", + "bundled": true, + "dev": true, + "requires": { + "execa": "0.7.0", + "lcid": "1.0.0", + "mem": "1.1.0" + } + }, + "p-finally": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "p-limit": { + "version": "1.1.0", + "bundled": true, "dev": true }, - "escodegen": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.8.1.tgz", - "integrity": "sha1-WltTr0aTEQvrsIZ6o0MN07cKEBg=", + "p-locate": { + "version": "2.0.0", + "bundled": true, "dev": true, "requires": { - "esprima": "2.7.3", - "estraverse": "1.9.3", - "esutils": "2.0.2", - "optionator": "0.8.2", - "source-map": "0.2.0" + "p-limit": "1.1.0" + } + }, + "parse-glob": { + "version": "3.0.4", + "bundled": true, + "dev": true, + "requires": { + "glob-base": "0.3.0", + "is-dotfile": "1.0.3", + "is-extglob": "1.0.0", + "is-glob": "2.0.1" + } + }, + "parse-json": { + "version": "2.2.0", + "bundled": true, + "dev": true, + "requires": { + "error-ex": "1.3.1" + } + }, + "path-exists": { + "version": "2.1.0", + "bundled": true, + "dev": true, + "requires": { + "pinkie-promise": "2.0.1" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "path-key": { + "version": "2.0.1", + "bundled": true, + "dev": true + }, + "path-parse": { + "version": "1.0.5", + "bundled": true, + "dev": true + }, + "path-type": { + "version": "1.1.0", + "bundled": true, + "dev": true, + "requires": { + "graceful-fs": "4.1.11", + "pify": "2.3.0", + "pinkie-promise": "2.0.1" + } + }, + "pify": { + "version": "2.3.0", + "bundled": true, + "dev": true + }, + "pinkie": { + "version": "2.0.4", + "bundled": true, + "dev": true + }, + "pinkie-promise": { + "version": "2.0.1", + "bundled": true, + "dev": true, + "requires": { + "pinkie": "2.0.4" + } + }, + "pkg-dir": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "find-up": "1.1.2" }, "dependencies": { - "estraverse": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.9.3.tgz", - "integrity": "sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q=", - "dev": true - }, - "esutils": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", - "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", - "dev": true - }, - "optionator": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", - "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", - "dev": true, - "requires": { - "deep-is": "0.1.3", - "fast-levenshtein": "2.0.6", - "levn": "0.3.0", - "prelude-ls": "1.1.2", - "type-check": "0.3.2", - "wordwrap": "1.0.0" - }, - "dependencies": { - "deep-is": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", - "dev": true - }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true - }, - "levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", - "dev": true, - "requires": { - "prelude-ls": "1.1.2", - "type-check": "0.3.2" - } - }, - "prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", - "dev": true - }, - "type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", - "dev": true, - "requires": { - "prelude-ls": "1.1.2" - } - } - } - }, - "source-map": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.2.0.tgz", - "integrity": "sha1-2rc/vPwrqBm03gO9b26qSBZLP50=", + "find-up": { + "version": "1.1.2", + "bundled": true, "dev": true, - "optional": true, "requires": { - "amdefine": "1.0.1" - }, - "dependencies": { - "amdefine": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", - "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", - "dev": true, - "optional": true - } + "path-exists": "2.1.0", + "pinkie-promise": "2.0.1" } } } }, - "esprima": { - "version": "2.7.3", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-2.7.3.tgz", - "integrity": "sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE=", + "preserve": { + "version": "0.2.0", + "bundled": true, "dev": true }, - "glob": { - "version": "5.0.15", - "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", - "integrity": "sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E=", + "pseudomap": { + "version": "1.0.2", + "bundled": true, + "dev": true + }, + "randomatic": { + "version": "1.1.7", + "bundled": true, "dev": true, "requires": { - "inflight": "1.0.6", - "inherits": "2.0.3", - "minimatch": "3.0.4", - "once": "1.4.0", - "path-is-absolute": "1.0.1" + "is-number": "3.0.0", + "kind-of": "4.0.0" }, "dependencies": { - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "requires": { - "once": "1.4.0", - "wrappy": "1.0.2" - }, - "dependencies": { - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - } - } - }, - "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", - "dev": true - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "is-number": { + "version": "3.0.0", + "bundled": true, "dev": true, "requires": { - "brace-expansion": "1.1.8" + "kind-of": "3.2.2" }, "dependencies": { - "brace-expansion": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", - "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", + "kind-of": { + "version": "3.2.2", + "bundled": true, "dev": true, "requires": { - "balanced-match": "1.0.0", - "concat-map": "0.0.1" - }, - "dependencies": { - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - } + "is-buffer": "1.1.6" } } } }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true + "kind-of": { + "version": "4.0.0", + "bundled": true, + "dev": true, + "requires": { + "is-buffer": "1.1.6" + } } } }, - "handlebars": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.0.10.tgz", - "integrity": "sha1-PTDHGLCaPZbyPqTMH0A8TTup/08=", + "read-pkg": { + "version": "1.1.0", + "bundled": true, "dev": true, "requires": { - "async": "1.5.2", - "optimist": "0.6.1", - "source-map": "0.4.4", - "uglify-js": "2.8.29" + "load-json-file": "1.1.0", + "normalize-package-data": "2.4.0", + "path-type": "1.1.0" + } + }, + "read-pkg-up": { + "version": "1.0.1", + "bundled": true, + "dev": true, + "requires": { + "find-up": "1.1.2", + "read-pkg": "1.1.0" }, "dependencies": { - "optimist": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "find-up": { + "version": "1.1.2", + "bundled": true, "dev": true, "requires": { - "minimist": "0.0.10", - "wordwrap": "0.0.3" - }, - "dependencies": { - "minimist": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", - "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", - "dev": true - }, - "wordwrap": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", - "dev": true - } + "path-exists": "2.1.0", + "pinkie-promise": "2.0.1" } + } + } + }, + "regenerator-runtime": { + "version": "0.11.1", + "bundled": true, + "dev": true + }, + "regex-cache": { + "version": "0.4.4", + "bundled": true, + "dev": true, + "requires": { + "is-equal-shallow": "0.1.3" + } + }, + "remove-trailing-separator": { + "version": "1.1.0", + "bundled": true, + "dev": true + }, + "repeat-element": { + "version": "1.1.2", + "bundled": true, + "dev": true + }, + "repeat-string": { + "version": "1.6.1", + "bundled": true, + "dev": true + }, + "repeating": { + "version": "2.0.1", + "bundled": true, + "dev": true, + "requires": { + "is-finite": "1.0.2" + } + }, + "require-directory": { + "version": "2.1.1", + "bundled": true, + "dev": true + }, + "require-main-filename": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "resolve-from": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "right-align": { + "version": "0.1.3", + "bundled": true, + "dev": true, + "optional": true, + "requires": { + "align-text": "0.1.4" + } + }, + "rimraf": { + "version": "2.6.2", + "bundled": true, + "dev": true, + "requires": { + "glob": "7.1.2" + } + }, + "semver": { + "version": "5.4.1", + "bundled": true, + "dev": true + }, + "set-blocking": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "shebang-command": { + "version": "1.2.0", + "bundled": true, + "dev": true, + "requires": { + "shebang-regex": "1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "signal-exit": { + "version": "3.0.2", + "bundled": true, + "dev": true + }, + "slide": { + "version": "1.1.6", + "bundled": true, + "dev": true + }, + "source-map": { + "version": "0.5.7", + "bundled": true, + "dev": true + }, + "spawn-wrap": { + "version": "1.4.2", + "bundled": true, + "dev": true, + "requires": { + "foreground-child": "1.5.6", + "mkdirp": "0.5.1", + "os-homedir": "1.0.2", + "rimraf": "2.6.2", + "signal-exit": "3.0.2", + "which": "1.3.0" + } + }, + "spdx-correct": { + "version": "1.0.2", + "bundled": true, + "dev": true, + "requires": { + "spdx-license-ids": "1.2.2" + } + }, + "spdx-expression-parse": { + "version": "1.0.4", + "bundled": true, + "dev": true + }, + "spdx-license-ids": { + "version": "1.2.2", + "bundled": true, + "dev": true + }, + "string-width": { + "version": "2.1.1", + "bundled": true, + "dev": true, + "requires": { + "is-fullwidth-code-point": "2.0.0", + "strip-ansi": "4.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "bundled": true, + "dev": true }, - "source-map": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", - "integrity": "sha1-66T12pwNyZneaAMti092FzZSA2s=", - "dev": true, - "requires": { - "amdefine": "1.0.1" - }, - "dependencies": { - "amdefine": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", - "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", - "dev": true - } - } + "is-fullwidth-code-point": { + "version": "2.0.0", + "bundled": true, + "dev": true }, - "uglify-js": { - "version": "2.8.29", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", - "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", + "strip-ansi": { + "version": "4.0.0", + "bundled": true, "dev": true, - "optional": true, "requires": { - "source-map": "0.5.7", - "uglify-to-browserify": "1.0.2", - "yargs": "3.10.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "optional": true - }, - "uglify-to-browserify": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", - "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", - "dev": true, - "optional": true - }, - "yargs": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", - "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", - "dev": true, - "optional": true, - "requires": { - "camelcase": "1.2.1", - "cliui": "2.1.0", - "decamelize": "1.2.0", - "window-size": "0.1.0" - }, - "dependencies": { - "camelcase": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", - "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=", - "dev": true, - "optional": true - }, - "cliui": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", - "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", - "dev": true, - "optional": true, - "requires": { - "center-align": "0.1.3", - "right-align": "0.1.3", - "wordwrap": "0.0.2" - }, - "dependencies": { - "center-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", - "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", - "dev": true, - "optional": true, - "requires": { - "align-text": "0.1.4", - "lazy-cache": "1.0.4" - }, - "dependencies": { - "align-text": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", - "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", - "dev": true, - "optional": true, - "requires": { - "kind-of": "3.2.2", - "longest": "1.0.1", - "repeat-string": "1.6.1" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "optional": true, - "requires": { - "is-buffer": "1.1.5" - }, - "dependencies": { - "is-buffer": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.5.tgz", - "integrity": "sha1-Hzsm72E7IUuIy8ojzGwB2Hlh7sw=", - "dev": true, - "optional": true - } - } - }, - "longest": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", - "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", - "dev": true, - "optional": true - }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "dev": true, - "optional": true - } - } - }, - "lazy-cache": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", - "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=", - "dev": true, - "optional": true - } - } - }, - "right-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", - "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", - "dev": true, - "optional": true, - "requires": { - "align-text": "0.1.4" - }, - "dependencies": { - "align-text": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", - "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", - "dev": true, - "optional": true, - "requires": { - "kind-of": "3.2.2", - "longest": "1.0.1", - "repeat-string": "1.6.1" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "optional": true, - "requires": { - "is-buffer": "1.1.5" - }, - "dependencies": { - "is-buffer": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.5.tgz", - "integrity": "sha1-Hzsm72E7IUuIy8ojzGwB2Hlh7sw=", - "dev": true, - "optional": true - } - } - }, - "longest": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", - "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", - "dev": true, - "optional": true - }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "dev": true, - "optional": true - } - } - } - } - }, - "wordwrap": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", - "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=", - "dev": true, - "optional": true - } - } - }, - "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "dev": true, - "optional": true - }, - "window-size": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", - "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=", - "dev": true, - "optional": true - } - } - } + "ansi-regex": "3.0.0" } } } }, - "js-yaml": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", - "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", + "strip-ansi": { + "version": "3.0.1", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "strip-bom": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "is-utf8": "0.2.1" + } + }, + "strip-eof": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "supports-color": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "test-exclude": { + "version": "4.1.1", + "bundled": true, + "dev": true, + "requires": { + "arrify": "1.0.1", + "micromatch": "2.3.11", + "object-assign": "4.1.1", + "read-pkg-up": "1.0.1", + "require-main-filename": "1.0.1" + } + }, + "to-fast-properties": { + "version": "1.0.3", + "bundled": true, + "dev": true + }, + "trim-right": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "uglify-js": { + "version": "2.8.29", + "bundled": true, "dev": true, + "optional": true, "requires": { - "argparse": "1.0.9", - "esprima": "4.0.0" + "source-map": "0.5.7", + "uglify-to-browserify": "1.0.2", + "yargs": "3.10.0" }, "dependencies": { - "argparse": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", - "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=", + "yargs": { + "version": "3.10.0", + "bundled": true, "dev": true, + "optional": true, "requires": { - "sprintf-js": "1.0.3" - }, - "dependencies": { - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true - } + "camelcase": "1.2.1", + "cliui": "2.1.0", + "decamelize": "1.2.0", + "window-size": "0.1.0" } - }, - "esprima": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", - "integrity": "sha1-RJnt3NERDgshi6zy+n9/WfVcqAQ=", - "dev": true } } }, - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "uglify-to-browserify": { + "version": "1.0.2", + "bundled": true, + "dev": true, + "optional": true + }, + "validate-npm-package-license": { + "version": "3.0.1", + "bundled": true, "dev": true, "requires": { - "minimist": "0.0.8" - }, - "dependencies": { - "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "dev": true - } + "spdx-correct": "1.0.2", + "spdx-expression-parse": "1.0.4" } }, - "nopt": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", - "integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=", + "which": { + "version": "1.3.0", + "bundled": true, "dev": true, "requires": { - "abbrev": "1.0.9" + "isexe": "2.0.0" } }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "which-module": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "window-size": { + "version": "0.1.0", + "bundled": true, + "dev": true, + "optional": true + }, + "wordwrap": { + "version": "0.0.3", + "bundled": true, + "dev": true + }, + "wrap-ansi": { + "version": "2.1.0", + "bundled": true, "dev": true, "requires": { - "wrappy": "1.0.2" + "string-width": "1.0.2", + "strip-ansi": "3.0.1" }, "dependencies": { - "wrappy": { + "string-width": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true + "bundled": true, + "dev": true, + "requires": { + "code-point-at": "1.1.0", + "is-fullwidth-code-point": "1.0.0", + "strip-ansi": "3.0.1" + } } } }, - "resolve": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.1.7.tgz", - "integrity": "sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs=", + "wrappy": { + "version": "1.0.2", + "bundled": true, "dev": true }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "write-file-atomic": { + "version": "1.3.4", + "bundled": true, "dev": true, "requires": { - "has-flag": "1.0.0" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=", - "dev": true - } + "graceful-fs": "4.1.11", + "imurmurhash": "0.1.4", + "slide": "1.1.6" } }, - "which": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.0.tgz", - "integrity": "sha1-/wS9/AEO5UfXgL7DjhrBwnd9JTo=", + "y18n": { + "version": "3.2.1", + "bundled": true, + "dev": true + }, + "yallist": { + "version": "2.1.2", + "bundled": true, + "dev": true + }, + "yargs": { + "version": "10.0.3", + "bundled": true, "dev": true, "requires": { - "isexe": "2.0.0" + "cliui": "3.2.0", + "decamelize": "1.2.0", + "find-up": "2.1.0", + "get-caller-file": "1.0.2", + "os-locale": "2.1.0", + "require-directory": "2.1.1", + "require-main-filename": "1.0.1", + "set-blocking": "2.0.0", + "string-width": "2.1.1", + "which-module": "2.0.0", + "y18n": "3.2.1", + "yargs-parser": "8.0.0" }, "dependencies": { - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true + "cliui": { + "version": "3.2.0", + "bundled": true, + "dev": true, + "requires": { + "string-width": "1.0.2", + "strip-ansi": "3.0.1", + "wrap-ansi": "2.1.0" + }, + "dependencies": { + "string-width": { + "version": "1.0.2", + "bundled": true, + "dev": true, + "requires": { + "code-point-at": "1.1.0", + "is-fullwidth-code-point": "1.0.0", + "strip-ansi": "3.0.1" + } + } + } } } }, - "wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", - "dev": true - } - } - }, - "js-yaml": { - "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", - "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", - "requires": { - "argparse": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", - "esprima": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz" - } - }, - "json3": { - "version": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz", - "integrity": "sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE=", - "dev": true - }, - "json5": { - "version": "https://registry.npmjs.org/json5/-/json5-0.4.0.tgz", - "integrity": "sha1-BUNS5MTIDIbAkjh31EneF2pzLI0=" - }, - "jsonfile": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", - "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", - "requires": { - "graceful-fs": "4.1.11" - } - }, - "lodash": { - "version": "4.17.4", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", - "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" - }, - "lodash._baseassign": { - "version": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", - "integrity": "sha1-jDigmVAPIVrQnlnxci/QxSv+Ck4=", - "dev": true, - "requires": { - "lodash._basecopy": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", - "lodash.keys": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz" - } - }, - "lodash._basecopy": { - "version": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", - "integrity": "sha1-jaDmqHbPNEwK2KVIghEd08XHyjY=", - "dev": true - }, - "lodash._basecreate": { - "version": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz", - "integrity": "sha1-G8ZhYU2qf8MRt9A78WgGoCE8+CE=", - "dev": true - }, - "lodash._getnative": { - "version": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", - "integrity": "sha1-VwvH3t5G1hzc3mh9ZdPuy6o6r/U=", - "dev": true - }, - "lodash._isiterateecall": { - "version": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz", - "integrity": "sha1-UgOte6Ql+uhCRg5pbbnPPmqsBXw=", - "dev": true - }, - "lodash.create": { - "version": "https://registry.npmjs.org/lodash.create/-/lodash.create-3.1.1.tgz", - "integrity": "sha1-1/KEnw29p+BGgruM1yqwIkYd6+c=", - "dev": true, - "requires": { - "lodash._baseassign": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", - "lodash._basecreate": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz", - "lodash._isiterateecall": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" - } - }, - "lodash.isarguments": { - "version": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", - "integrity": "sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo=", - "dev": true - }, - "lodash.isarray": { - "version": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz", - "integrity": "sha1-eeTriMNqgSKvhvhEqpvNhRtfu1U=", - "dev": true - }, - "lodash.keys": { - "version": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", - "integrity": "sha1-TbwEcrFWvlCgsoaFXRvQsMZWCYo=", - "dev": true, - "requires": { - "lodash._getnative": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", - "lodash.isarguments": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", - "lodash.isarray": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" - } - }, - "lokijs": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/lokijs/-/lokijs-1.5.1.tgz", - "integrity": "sha512-Pj67gdP6CxUPV7AXM/VAnUZNyKR6mx4JxNmZfVG7XeebBZyrd8iLcKxKutc6Z5akJlMb0EeCxPW8/YkCPiMQbw==" - }, - "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", - "requires": { - "brace-expansion": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz" - } - }, - "minimist": { - "version": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" - }, - "mkdirp": { - "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" - } - }, - "mocha": { - "version": "https://registry.npmjs.org/mocha/-/mocha-3.5.3.tgz", - "integrity": "sha1-HgSA/jbS2lhY0etqzDhBiybqog0=", - "dev": true, - "requires": { - "browser-stdout": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.0.tgz", - "commander": "2.9.0", - "debug": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz", - "diff": "https://registry.npmjs.org/diff/-/diff-3.2.0.tgz", - "escape-string-regexp": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "glob": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", - "growl": "https://registry.npmjs.org/growl/-/growl-1.9.2.tgz", - "he": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "json3": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz", - "lodash.create": "https://registry.npmjs.org/lodash.create/-/lodash.create-3.1.1.tgz", - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "supports-color": "https://registry.npmjs.org/supports-color/-/supports-color-3.1.2.tgz" - }, - "dependencies": { - "commander": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", - "integrity": "sha1-nJkJQXbhIkDLItbFFGCYQA/g99Q=", + "yargs-parser": { + "version": "8.0.0", + "bundled": true, "dev": true, "requires": { - "graceful-readlink": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + "camelcase": "4.1.0" + }, + "dependencies": { + "camelcase": { + "version": "4.1.0", + "bundled": true, + "dev": true + } } } } }, - "mocha-lcov-reporter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/mocha-lcov-reporter/-/mocha-lcov-reporter-1.3.0.tgz", - "integrity": "sha1-Rpve9PivyaEWBW8HnfYYLQr7A4Q=", - "dev": true - }, - "ms": { - "version": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, - "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=" - }, - "ncp": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ncp/-/ncp-1.0.1.tgz", - "integrity": "sha1-0VNn5cuHQyuhF9K/gP30Wuz7QkY=" - }, "once": { "version": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", diff --git a/package.json b/package.json index 1b87f52..22cfeec 100644 --- a/package.json +++ b/package.json @@ -10,10 +10,10 @@ "test": "test" }, "scripts": { - "test": "mocha test", - "coverage": "istanbul cover ./node_modules/mocha/bin/_mocha; open coverage/lcov-report/index.html", + "test": "nyc mocha", + "coverage": "nyc npm test && nyc report --reporter=lcov; open coverage/lcov-report/index.html", "start": "node main.js", - "coveralls": "istanbul cover ./node_modules/mocha/bin/_mocha --report lcovonly -- -R spec && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js && rm -rf ./coverage" + "coveralls": "nyc report --reporter=lcovonly && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js && rm -rf ./coverage" }, "repository": { "type": "git", @@ -28,9 +28,9 @@ "homepage": "https://github.com/Unity-Technologies/unity-cache-server#readme", "devDependencies": { "coveralls": "^3.0.0", - "istanbul": "^0.4.5", "mocha": "^3.5.3", "mocha-lcov-reporter": "^1.3.0", + "nyc": "^11.4.1", "tmp": "0.0.33" }, "dependencies": { @@ -41,6 +41,7 @@ "fs-extra": "^5.0.0", "ip": "^1.1.5", "js-yaml": "^3.10.0", + "klaw": "^2.1.1", "lodash": "^4.17.4", "lokijs": "^1.5.1", "prompt": "^1.0.0", diff --git a/test/protocol.js b/test/protocol.js index 8e6d30c..8a7ea17 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -172,7 +172,7 @@ describe("Protocol", function() { }); }); - it("should replace an existing file with the same guid and hash ", () => { + it("should replace an existing file with the same guid and hash", () => { const asset = Buffer.from(crypto.randomBytes(self.data.bin.length).toString('ascii'), 'ascii'); const buf = Buffer.from( From 4893250aa65ea7f55f6c2806399850492120c5fa Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 12:52:34 -0600 Subject: [PATCH 31/89] publish lib/index.js as the main include --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 22cfeec..e89b28b 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "unity-cache-server", "version": "6.0.0", "description": "Unity Cache Server", - "main": "main.js", + "main": "lib/index.js", "engines": { "node": "^8.9.1" }, From 00f5b9790d4ad379f747f2490d691b5c4c26699b Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 13:00:23 -0600 Subject: [PATCH 32/89] Increase timeout for PUT tests --- test/protocol.js | 1 + 1 file changed, 1 insertion(+) diff --git a/test/protocol.js b/test/protocol.js index 8a7ea17..6bb260c 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -125,6 +125,7 @@ describe("Protocol", function() { describe("PUT requests", function () { this.slow(5000); + this.timeout(5000); const self = this; From 8244727f349ff7063397a8a327009181f1b9e9f0 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 14:51:47 -0600 Subject: [PATCH 33/89] basic test coverage for the transaction_mirror module --- lib/server/transaction_mirror.js | 4 +- test/transaction_mirror.js | 76 ++++++++++++++++++++++++++++++++ 2 files changed, 78 insertions(+), 2 deletions(-) create mode 100644 test/transaction_mirror.js diff --git a/lib/server/transaction_mirror.js b/lib/server/transaction_mirror.js index 7a61d91..ed1296f 100644 --- a/lib/server/transaction_mirror.js +++ b/lib/server/transaction_mirror.js @@ -21,6 +21,7 @@ class TransactionMirror { this._cache = cache; this._queue = []; this._processing = false; + this._queueProcessDelay = TransactionMirror.options.queueProcessDelay || PROCESS_DELAY_MS; const address = connectOptions.address; const port = connectOptions.port; @@ -85,8 +86,7 @@ class TransactionMirror { if(!this._processing) { this._processing = true; - let delay = TransactionMirror.options.queueProcessDelay || PROCESS_DELAY_MS; - setTimeout(this._processQueue.bind(this), delay); + setTimeout(this._processQueue.bind(this), this._queueProcessDelay); } } } diff --git a/test/transaction_mirror.js b/test/transaction_mirror.js new file mode 100644 index 0000000..711f715 --- /dev/null +++ b/test/transaction_mirror.js @@ -0,0 +1,76 @@ +const { Server, CacheRAM } = require('../lib'); +const TransactionMirror = require('../lib/server/transaction_mirror'); +const tmp = require('tmp'); +const { generateCommandData, sleep } = require('./test_utils'); +const assert = require('assert'); + +let cacheOpts = { + cachePath: tmp.tmpNameSync({}).toString(), + initialPageSize: 10 * 1024, + growPageSize: 10 * 1024, + minFreeBlockSize: 1024, + persistenceOptions: { + autosave: false + } +}; + +describe("TransactionMirror", () => { + + before(async () => { + this.fileData = generateCommandData(1024, 1024); + + this.sourceCache = new CacheRAM(); + this.targetCache = new CacheRAM(); + await this.sourceCache.init(cacheOpts); + await this.targetCache.init(cacheOpts); + + this.targetServer = new Server(this.targetCache, {port: 0}); + + let self = this; + return new Promise((resolve, reject) => { + self.targetServer.Start(err => reject(err), () => { + let opts = { host: 'localhost', port: self.targetServer.port }; + self.mirror = new TransactionMirror(opts, self.sourceCache); + self.mirror._queueProcessDelay = 0; + resolve(); + }); + }); + }); + + it("should mirror all queued transactions to the target Cache Server", async () => { + this.sourceCache._addFileToCache('i', this.fileData.guid, this.fileData.hash, this.fileData.info); + this.sourceCache._addFileToCache('a', this.fileData.guid, this.fileData.hash, this.fileData.bin); + this.sourceCache._addFileToCache('r', this.fileData.guid, this.fileData.hash, this.fileData.resource); + + const trxMock = { + guid: this.fileData.guid, + hash: this.fileData.hash, + manifest: ['i', 'a', 'r'] + }; + + this.mirror.queueTransaction(trxMock); + await sleep(50); + + let info = await this.targetCache.getFileInfo('i', this.fileData.guid, this.fileData.hash); + assert(info && info.size === this.fileData.info.length); + + info = await this.targetCache.getFileInfo('r', this.fileData.guid, this.fileData.hash); + assert(info && info.size === this.fileData.resource.length); + + info = await this.targetCache.getFileInfo('a', this.fileData.guid, this.fileData.hash); + assert(info && info.size === this.fileData.bin.length); + }); + + describe("queueTransaction", () => { + it("should not queue an empty transaction for mirroring", () => { + this.mirror.queueTransaction({manifest: []}); + assert(this.mirror._queue.length === 0); + }); + }); + + describe("get address", () => { + it("should return the address of the mirror host", () => { + assert(this.mirror.address === "localhost"); + }); + }); +}); \ No newline at end of file From 9cca86d6aade4eb3e5eb297047292299c19bbb0f Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 15:12:23 -0600 Subject: [PATCH 34/89] Improve transaction mirror test by queuing more than one transaction back to back --- test/cache_ram.js | 7 +------ test/test_utils.js | 6 ++++++ test/transaction_mirror.js | 38 +++++++++++++++++++------------------- 3 files changed, 26 insertions(+), 25 deletions(-) diff --git a/test/cache_ram.js b/test/cache_ram.js index cbf16c1..3fcdcd5 100644 --- a/test/cache_ram.js +++ b/test/cache_ram.js @@ -3,6 +3,7 @@ const fs = require('fs-extra'); const Cache = require('../lib/cache/cache_ram'); const randomBuffer = require('./test_utils').randomBuffer; const generateCommandData = require('./test_utils').generateCommandData; +const writeFileDataToCache = require('./test_utils').writeFileDataToCache; const path = require('path'); const assert = require('assert'); @@ -17,12 +18,6 @@ describe("Cache: RAM", () => { .map(page => page.index); } - function writeFileDataToCache(fileData) { - cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); - cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); - cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); - } - let opts = { cachePath: tmp.tmpNameSync({}).toString(), initialPageSize: MIN_FILE_SIZE * 2, diff --git a/test/test_utils.js b/test/test_utils.js index 57c5566..1f12a24 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -31,6 +31,12 @@ exports.generateCommandData = function(minSize, maxSize) { } }; +exports.writeFileDataToCache = function(cache, fileData) { + cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); + cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); +}; + exports.encodeCommand = function(command, guid, hash, blob) { if(blob) diff --git a/test/transaction_mirror.js b/test/transaction_mirror.js index 711f715..20bcf88 100644 --- a/test/transaction_mirror.js +++ b/test/transaction_mirror.js @@ -1,7 +1,7 @@ const { Server, CacheRAM } = require('../lib'); const TransactionMirror = require('../lib/server/transaction_mirror'); const tmp = require('tmp'); -const { generateCommandData, sleep } = require('./test_utils'); +const { generateCommandData, sleep, writeFileDataToCache } = require('./test_utils'); const assert = require('assert'); let cacheOpts = { @@ -17,8 +17,6 @@ let cacheOpts = { describe("TransactionMirror", () => { before(async () => { - this.fileData = generateCommandData(1024, 1024); - this.sourceCache = new CacheRAM(); this.targetCache = new CacheRAM(); await this.sourceCache.init(cacheOpts); @@ -31,34 +29,36 @@ describe("TransactionMirror", () => { self.targetServer.Start(err => reject(err), () => { let opts = { host: 'localhost', port: self.targetServer.port }; self.mirror = new TransactionMirror(opts, self.sourceCache); - self.mirror._queueProcessDelay = 0; + self.mirror._queueProcessDelay = 1; resolve(); }); }); }); it("should mirror all queued transactions to the target Cache Server", async () => { - this.sourceCache._addFileToCache('i', this.fileData.guid, this.fileData.hash, this.fileData.info); - this.sourceCache._addFileToCache('a', this.fileData.guid, this.fileData.hash, this.fileData.bin); - this.sourceCache._addFileToCache('r', this.fileData.guid, this.fileData.hash, this.fileData.resource); + let fileData = [ + generateCommandData(1024, 1024), + generateCommandData(1024, 1024) + ]; - const trxMock = { - guid: this.fileData.guid, - hash: this.fileData.hash, - manifest: ['i', 'a', 'r'] - }; + fileData.forEach(d => { + writeFileDataToCache(this.sourceCache, d); + const trxMock = { guid: d.guid, hash: d.hash, manifest: ['i', 'a', 'r'] }; + this.mirror.queueTransaction(trxMock); + }); - this.mirror.queueTransaction(trxMock); await sleep(50); - let info = await this.targetCache.getFileInfo('i', this.fileData.guid, this.fileData.hash); - assert(info && info.size === this.fileData.info.length); + fileData.forEach(async d => { + let info = await this.targetCache.getFileInfo('i', d.guid, d.hash); + assert(info && info.size === d.info.length); - info = await this.targetCache.getFileInfo('r', this.fileData.guid, this.fileData.hash); - assert(info && info.size === this.fileData.resource.length); + info = await this.targetCache.getFileInfo('r', d.guid, d.hash); + assert(info && info.size === d.resource.length); - info = await this.targetCache.getFileInfo('a', this.fileData.guid, this.fileData.hash); - assert(info && info.size === this.fileData.bin.length); + info = await this.targetCache.getFileInfo('a', d.guid, d.hash); + assert(info && info.size === d.bin.length); + }); }); describe("queueTransaction", () => { From f4da9d49ddbe2d488c59fbb924291c04946c5bbb Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 15:16:01 -0600 Subject: [PATCH 35/89] Fix broken tests --- test/cache_ram.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/cache_ram.js b/test/cache_ram.js index 3fcdcd5..d0da7ee 100644 --- a/test/cache_ram.js +++ b/test/cache_ram.js @@ -111,7 +111,7 @@ describe("Cache: RAM", () => { beforeEach(() => { cache = new Cache(); - return cache.init(opts).then(() => writeFileDataToCache(fileData)); + return cache.init(opts).then(() => writeFileDataToCache(cache, fileData)); }); afterEach(() => { @@ -151,7 +151,7 @@ describe("Cache: RAM", () => { beforeEach(() => { cache = new Cache(); return cache.init(opts) - .then(() => writeFileDataToCache(fileData)) + .then(() => writeFileDataToCache(cache, fileData)) .then(() => cache._serialize()); }); From dedc8617a2f2e3b7cd38eb41e82678784ef62e07 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 23 Jan 2018 17:55:12 -0600 Subject: [PATCH 36/89] Refactored server to use promises instead of callbacks, for consistency with the rest of the API --- lib/server/server.js | 21 ++++++++++++--------- main.js | 8 ++++---- test/protocol.js | 14 ++++++-------- test/server.js | 8 +++----- test/transaction_mirror.js | 14 ++++---------- 5 files changed, 29 insertions(+), 36 deletions(-) diff --git a/lib/server/server.js b/lib/server/server.js index caa6227..192aaa2 100644 --- a/lib/server/server.js +++ b/lib/server/server.js @@ -53,10 +53,10 @@ class CacheServer { * @param errCallback error callback (optional) * @param callback */ - Start(errCallback, callback) { + start(errCallback) { const self = this; - let server = net.createServer(socket => { + this._server = net.createServer(socket => { helpers.log(consts.LOG_TEST, `${socket.remoteAddress}:${socket.remotePort} connected.`); const cmdProc = new CommandProcessor(self.cache); @@ -81,22 +81,25 @@ class CacheServer { .pipe(socket); // Connect back to socket to send files }); - server.on('error', err => { + this._server.on('error', err => { if (err.code === 'EADDRINUSE') { helpers.log(consts.LOG_ERR, `Port ${self.port} is already in use...`); if (errCallback && typeof(errCallback === 'function')) { errCallback(err); } } }); - server.listen(this.port, () => { - if(callback && typeof(callback) === 'function') { callback(); } + return new Promise(resolve => { + this._server.listen(this.port, () => resolve()); }); - - this._server = server; }; - Stop() { - this._server.close(); + stop() { + return new Promise((resolve, reject) => { + this._server.close(err => { + if(err) reject(err); + else resolve(); + }); + }); } } diff --git a/main.js b/main.js index 9b4c6be..caa30ca 100644 --- a/main.js +++ b/main.js @@ -122,7 +122,7 @@ Cache.init(cacheOpts) helpers.log(consts.LOG_INFO, `Cache Server version ${VERSION}; Cache module ${program.cacheModule}`); if(program.workers === 0) { - server.Start(errHandler, function () { + server.start(errHandler).then(() => { helpers.log(consts.LOG_INFO, `Cache Server ready on port ${server.port}`); startPrompt(); }); @@ -134,7 +134,7 @@ Cache.init(cacheOpts) } } else { - server.Start(errHandler, function () { + server.start(errHandler).then(() => { helpers.log(consts.LOG_INFO, `Cache Server worker ${cluster.worker.id} ready on port ${server.port}`); }); } @@ -156,7 +156,7 @@ function startPrompt() { } else { helpers.log(consts.LOG_ERR, err); - server.Stop(); + server.stop(); process.exit(1); } } @@ -166,7 +166,7 @@ function startPrompt() { case 'q': helpers.log(consts.LOG_INFO, "Shutting down ..."); Cache.shutdown().then(() => { - server.Stop(); + server.stop(); process.exit(0); }); break; diff --git a/test/protocol.js b/test/protocol.js index 6bb260c..e2d26b3 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -52,24 +52,22 @@ describe("Protocol", function() { helpers.SetLogger(function() {}); }); - before(function (done) { + before(function () { /** @type {CacheBase} **/ let CacheModule = require(module.path); cache = new CacheModule(); module.options.cachePath = module.tmpDir.name; - cache.init(module.options) - .then(() => { + return cache.init(module.options) + .then(() => { server = new CacheServer(cache, {port: 0}); - server.Start(err => { - assert(!err, "Cache Server reported error! " + err); - }, done); - }); + }) + .then(() => server.start(err => assert(!err, `Cache Server reported error! ${err}`))); }); after(function() { - server.Stop(); + server.stop(); module.tmpDir.removeCallback(); }); diff --git a/test/server.js b/test/server.js index 3bd8a28..753341e 100644 --- a/test/server.js +++ b/test/server.js @@ -18,14 +18,12 @@ describe("Server common", function() { helpers.SetLogger(() => {}); }); - before(function (done) { - server.Start(function (err) { - assert(!err, "Cache Server reported error! " + err); - }, done); + before(function () { + return server.start(err => assert(!err, `Cache Server reported error! ${err}`)); }); after(function() { - server.Stop(); + server.stop(); }); describe("Version check", function () { diff --git a/test/transaction_mirror.js b/test/transaction_mirror.js index 20bcf88..6eb61da 100644 --- a/test/transaction_mirror.js +++ b/test/transaction_mirror.js @@ -23,16 +23,10 @@ describe("TransactionMirror", () => { await this.targetCache.init(cacheOpts); this.targetServer = new Server(this.targetCache, {port: 0}); - - let self = this; - return new Promise((resolve, reject) => { - self.targetServer.Start(err => reject(err), () => { - let opts = { host: 'localhost', port: self.targetServer.port }; - self.mirror = new TransactionMirror(opts, self.sourceCache); - self.mirror._queueProcessDelay = 1; - resolve(); - }); - }); + await this.targetServer.start(err => assert(!err, `Server reported error! ${err}`)); + let opts = { host: 'localhost', port: this.targetServer.port }; + this.mirror = new TransactionMirror(opts, this.sourceCache); + this.mirror._queueProcessDelay = 1; }); it("should mirror all queued transactions to the target Cache Server", async () => { From de750f6c293bd43712881cadf624fc21f5f923b5 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 25 Jan 2018 13:49:18 -0600 Subject: [PATCH 37/89] - Simplify CachRAM to use a single pageSize config var instead of a split between initial and grow page sizes - Implemented maxPageCount in CacheRAM to limit allocations of new pages - Implemented LRU strategy for replacing an in-use cache block when maxPageCount limit is reached - Simplified some tests to use aysnc/await instead of long Promise chains - Bump node version to latest LTS release (8.9.4) --- config/default.yml | 3 +- lib/cache/cache_ram.js | 172 +++++++++++--------- package.json | 2 +- test/cache_api.js | 3 +- test/cache_ram.js | 316 ++++++++++++++++++++++--------------- test/protocol.js | 3 +- test/test_utils.js | 6 - test/transaction_mirror.js | 11 +- 8 files changed, 297 insertions(+), 219 deletions(-) diff --git a/config/default.yml b/config/default.yml index 0c87899..20602bc 100644 --- a/config/default.yml +++ b/config/default.yml @@ -2,8 +2,7 @@ Cache: defaultModule: "lib/cache/cache_fs" options: cache_ram: - initialPageSize: 100000000 - growPageSize: 100000000 + pageSize: 100000000 maxPageCount: 10 minFreeBlockSize: 1024 cachePath: ".cache_ram" diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index de4fe33..1b7543c 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -48,15 +48,26 @@ class CacheRAM extends CacheBase { return path.join(this._cachePath, kDbName); } - _allocPage(size) { - let pageIndex = uuid(); + _allocPage(minSize) { + const maxPageCount = this._options.maxPageCount; + if(this._pageMeta.count() === maxPageCount) { + throw new Error(`reached maxPageCount (${maxPageCount}), cannot allocate new memory page`); + } + + const pageSize = this._options.pageSize; + const size = Math.max(minSize, pageSize); + if(size > pageSize) { + helpers.log(consts.LOG_WARN, `File allocation size of ${size} exceeds pageSize of ${pageSize}`); + } + + const pageIndex = uuid(); this._pages[pageIndex] = Buffer.alloc(size, 0, 'ascii'); this._index.insert({ pageIndex: pageIndex, pageOffset: 0, size: size, - timestamp: Date.now() + lastAccessTime: Date.now() }); return this._pageMeta.insert({ @@ -73,6 +84,15 @@ class CacheRAM extends CacheBase { .limit(1) .data(); + // find LRU block to recycle + if(result.length === 0 && this._pageMeta.count() === this._options.maxPageCount) { + result = this._index.chain() + .find({ 'size' : { '$gte' : size }}) + .simplesort('lastAccessTime') + .limit(1) + .data(); + } + return result.length > 0 ? result[0] : null; } @@ -83,13 +103,15 @@ class CacheRAM extends CacheBase { // Find the best free block to use let freeBlock; while((freeBlock = this._findFreeBlock(size)) === null) { - let growPageSize = this._options.growPageSize; - let allocSize = Math.max(size, growPageSize); - if(allocSize > growPageSize) { - helpers.log(consts.LOG_WARN, `File allocation size of ${size} exceeds growPageSize of ${growPageSize}`); - } + this._allocPage(size); + } - this._allocPage(allocSize); + if(freeBlock.fileId) { + delete freeBlock.fileId; + helpers.log(consts.LOG_DBG, `Allocated existing block of size ${freeBlock.size} for ${key}, last accessed ${freeBlock.timestamp}`); + } + else { + helpers.log(consts.LOG_DBG, `Allocated free block of size ${freeBlock.size} for key ${key}`); } // Clone the free block, then set it's file id and size @@ -139,6 +161,7 @@ class CacheRAM extends CacheBase { */ _addFileToCache(type, guid, hash, buffer) { const key = CacheRAM._calcIndexKey(type, guid, hash); + const entry = this._reserveBlock(key, buffer.length); helpers.log(consts.LOG_TEST, `Saving file key: ${key} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`); @@ -213,7 +236,7 @@ class CacheRAM extends CacheBase { this._index.clear(); this._pageMeta.clear(); this._pages = {}; - this._allocPage(this._options.initialPageSize); + this._allocPage(this._options.pageSize); } /** @@ -222,115 +245,110 @@ class CacheRAM extends CacheBase { * @returns {Promise} * @private */ - _initDb(options) { + async _initDb(options) { const self = this; let db = new loki(self._dbPath, options); let loadDb = promisify(db.loadDatabase).bind(db); this._db = db; - return loadDb({}) - .then(() => { - self._index = db.getCollection(kIndex); - self._pageMeta = db.getCollection(kPageMeta); + await loadDb({}); - if(self._index !== null && self._pageMeta !== null) { - return self._deserialize(); - } + self._index = db.getCollection(kIndex); + self._pageMeta = db.getCollection(kPageMeta); - self._pageMeta = db.addCollection(kPageMeta, { - unique: ["index"], - indices: ["dirty"] - }); + if(self._index !== null && self._pageMeta !== null) { + return self._deserialize(); + } - self._index = db.addCollection(kIndex, { - unique: ["fileId"], - indices: ["size"] - }); + self._pageMeta = db.addCollection(kPageMeta, { + unique: ["index"], + indices: ["dirty"] + }); - self._clearCache(); - }); + self._index = db.addCollection(kIndex, { + unique: ["fileId"], + indices: ["size"] + }); + + self._clearCache(); } /** * * @private */ - _saveDb() { + async _saveDb() { let save = promisify(this._db.saveDatabase).bind(this._db); - return save(); + await save(); } - init(options) { + async init(options) { const self = this; - return super.init(options) - .then(() => { - let dbOpts = self._options.persistenceOptions || {}; - if(!dbOpts.hasOwnProperty('adapter') || dbOpts.adapter === null) { - dbOpts.adapter = new PersistenceAdapter(self); - } + await super.init(options); - return self._initDb(dbOpts); - }); + let dbOpts = self._options.persistenceOptions || {}; + if(!dbOpts.hasOwnProperty('adapter') || dbOpts.adapter === null) { + dbOpts.adapter = new PersistenceAdapter(self); + } + + return self._initDb(dbOpts); } - shutdown() { - let close = promisify(this._db.close).bind(this._db); - return this._saveDb().then(() => close()); + async shutdown() { + await this._saveDb(); + await promisify(this._db.close).bind(this._db)(); } - getFileInfo(type, guid, hash) { + async getFileInfo(type, guid, hash) { const key = CacheRAM._calcIndexKey(type, guid, hash); const entry = this._index.by('fileId', key); - - return (entry != null) - ? Promise.resolve({ size: entry.size }) - : Promise.reject(new Error(`File not found for ${key}`)); + if(entry == null) throw new Error(`File not found for ${key}`); + return { size: entry.size, lastAccessTime: entry.lastAccessTime }; } - getFileStream(type, guid, hash) { - const self = this; + async getFileStream(type, guid, hash) { const key = CacheRAM._calcIndexKey(type, guid, hash); const entry = this._index.by('fileId', key); + if(entry == null) throw new Error(`File not found for ${key}`); - return new Promise((resolve, reject) => { - // noinspection EqualityComparisonWithCoercionJS (checking for null or undefined) - if(entry != null) { - const file = self._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); - const stream = new Readable({ - read() { - if(this.didPush) - return this.push(null); - this.push(file); - this.didPush = true; - }, - - highWaterMark: file.length - }); + // Update lastAccessTime of entry + entry.lastAccessTime = Date.now(); + this._index.update(entry); - resolve(stream); - } - else { - reject(new Error(`File not found for ${key}`)); - } + const file = this._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); + + return new Readable({ + read() { + if(this.didPush) + return this.push(null); + this.push(file); + this.didPush = true; + }, + + highWaterMark: file.length }); } - createPutTransaction(guid, hash) { - return Promise.resolve(new PutTransactionRAM(guid, hash)); + async createPutTransaction(guid, hash) { + return new PutTransactionRAM(guid, hash); } - endPutTransaction(transaction) { + async endPutTransaction(transaction) { const self = this; - return this._waitForSerialize() - .then(() => transaction.finalize()) - .then(() => { - transaction.files.forEach(file => { - self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); - }); + await this._waitForSerialize(); + await transaction.finalize(); + + try { + transaction.files.forEach(file => { + self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); }); + } + catch(err) { + helpers.log(consts.LOG_ERR, err); + } } registerClusterWorker(worker) {} diff --git a/package.json b/package.json index e89b28b..fe6164f 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "Unity Cache Server", "main": "lib/index.js", "engines": { - "node": "^8.9.1" + "node": "^8.9.4" }, "directories": { "test": "test" diff --git a/test/cache_api.js b/test/cache_api.js index 6b310fa..acd3b18 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -12,8 +12,7 @@ let test_modules = [ path: "../lib/cache/cache_ram", options: { cachePath: tmp.tmpNameSync({}), - initialPageSize: 10000, - growPageSize: 10000, + pageSize: 10000, minFreeBlockSize: 1024, persistenceOptions: { adapter: new loki.LokiMemoryAdapter() diff --git a/test/cache_ram.js b/test/cache_ram.js index d0da7ee..e872932 100644 --- a/test/cache_ram.js +++ b/test/cache_ram.js @@ -3,7 +3,7 @@ const fs = require('fs-extra'); const Cache = require('../lib/cache/cache_ram'); const randomBuffer = require('./test_utils').randomBuffer; const generateCommandData = require('./test_utils').generateCommandData; -const writeFileDataToCache = require('./test_utils').writeFileDataToCache; +const sleep = require('./test_utils').sleep; const path = require('path'); const assert = require('assert'); @@ -12,16 +12,14 @@ const MAX_FILE_SIZE = MIN_FILE_SIZE; describe("Cache: RAM", () => { - function dirtyPages() { - return cache._pageMeta.chain() - .find({'dirty' : true}).data() - .map(page => page.index); - } + let dirtyPages = () => cache._pageMeta.chain() + .find({'dirty' : true}).data() + .map(page => page.index); let opts = { cachePath: tmp.tmpNameSync({}).toString(), - initialPageSize: MIN_FILE_SIZE * 2, - growPageSize: MIN_FILE_SIZE * 2, + pageSize: MIN_FILE_SIZE * 2, + maxPageCount: 2, minFreeBlockSize: 1024, persistenceOptions: { autosave: false @@ -31,48 +29,62 @@ describe("Cache: RAM", () => { let cache; let fileData = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); - describe("Public API", () => { + let writeFileDataToCache = (fileData) => { + cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); + cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); + }; + describe("Public API", () => { beforeEach(() => { cache = new Cache(); }); - afterEach(() => { - return fs.remove(opts.cachePath); - }); + afterEach(() => fs.remove(opts.cachePath)); describe("init", () => { - it("should initialize the _db object", () => { - return cache.init(opts).then(() => assert(cache._db !== null)); - }); - - it("should initialize an empty cache if no database was loaded from disk", () => { - return cache.init(opts) - .then(() => { - assert(cache._pageMeta.count() === 1); - let index = cache._index.findOne({}); - assert(index !== null); - assert(index.size === opts.initialPageSize); - assert(index.pageOffset === 0); - }); - }); - - it("should populate the _index and _pageMeta when a saved database is loaded from disk", () => { - return cache.init(opts) - .then(() => { cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info);} ) - .then(() => cache.shutdown()) - .then(() => cache.init(opts)) - .then(() => { - assert(cache._pageMeta.count() === 1); - assert(cache._index.count() === 2); - }); + it("should initialize the _db object", async () => { + await cache.init(opts); + assert(cache._db !== null); + }); + + it("should initialize an empty cache if no database was loaded from disk", async () => { + await cache.init(opts); + assert(cache._pageMeta.count() === 1); + let index = cache._index.findOne({}); + + assert(index !== null); + assert(index.size === opts.pageSize); + assert(index.pageOffset === 0); + }); + + it("should populate the _index and _pageMeta when a saved database is loaded from disk", async () => { + await cache.init(opts); + await cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + await cache.shutdown(); + await cache.init(opts); + + assert(cache._pageMeta.count() === 1); + assert(cache._index.count() === 2); }); }); - describe("endPutTransaction", () => { - it("it should wait for a database save in-progress to complete before ending the transaction", () => { - let trx; + describe("getFileStream", () => { + it("should update the lastAccessTime of the requested file entry", async () => { + let prevTime; + await cache.init(opts); + await cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + let info = await cache.getFileInfo('i', fileData.guid, fileData.hash); + prevTime = info.lastAccessTime; + await sleep(100); + await cache.getFileStream('i', fileData.guid, fileData.hash); + info = await cache.getFileInfo('i', fileData.guid, fileData.hash); + assert(info.lastAccessTime > prevTime); + }); + }); + describe("endPutTransaction", () => { + it("it should wait for a database save in-progress to complete before ending the transaction", async () => { let ok = false; cache.on('waitForSerialize', () => { ok = true; @@ -80,135 +92,187 @@ describe("Cache: RAM", () => { }); cache._serializeInProgress = true; - return cache.init(opts) - .then(() => cache.createPutTransaction(fileData.guid, fileData.hash)) - .then(result => { trx = result; }) - .then(() => trx.getWriteStream('i', fileData.info.length)) - .then(stream => stream.end(fileData.info)) - .then(() => cache.endPutTransaction(trx)) - .then(() => assert(ok)); + await cache.init(opts); + let trx = await cache.createPutTransaction(fileData.guid, fileData.hash); + let stream = await trx.getWriteStream('i', fileData.info.length); + stream.end(fileData.info); + await cache.endPutTransaction(trx); + assert(ok); }); }); describe("shutdown", () => { - it("should serialize the database and page files to disk before returning", () => { + it("should serialize the database and page files to disk before returning", async () => { let pages; - return cache.init(opts) - .then(() => { cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); }) - .then(() => { - pages = dirtyPages(); - assert(pages.length === 1); - }) - .then(() => cache.shutdown()) - .then(() => fs.access(cache._dbPath)) - .then(() => fs.readdir(opts.cachePath)) - .then(dir => assert(dir.includes(pages[0]))); + await cache.init(opts); + await cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + + pages = dirtyPages(); + assert(pages.length === 1); + + await cache.shutdown(); + await fs.access(cache._dbPath); + let dir = await fs.readdir(opts.cachePath); + assert(dir.includes(pages[0])); }); }); }); - describe("_serialize", () => { + describe("Internal", () => { - beforeEach(() => { + beforeEach(async () => { cache = new Cache(); - return cache.init(opts).then(() => writeFileDataToCache(cache, fileData)); + await cache.init(opts); + }); - afterEach(() => { + afterEach(async () => { cache._clearCache(); - return fs.remove(opts.cachePath); + await fs.remove(opts.cachePath); }); - it("should write only dirty page files to disk", () => { - let testDir = (dir, dirty) => { - assert(dirty.every(entry => dir.includes(entry))); - assert(dir.every(entry => dirty.includes(entry))); - }; + describe("_serialize", () => { + + beforeEach(() => writeFileDataToCache(fileData)); + + it("should write only dirty page files to disk", async () => { + let testDir = (dir, dirty) => { + assert(dirty.every(entry => dir.includes(entry))); + assert(dir.every(entry => dirty.includes(entry))); + }; + + let dirty = dirtyPages(); - let dirty = dirtyPages(); - return Promise.resolve() // Serialize the cache - .then(() => cache._serialize()) + await cache._serialize(); // Read the cache dir and compare file list to expected dirty pages - .then(() => fs.readdir(opts.cachePath)) - .then(dir => testDir(dir, dirty)) + let dir = await fs.readdir(opts.cachePath); + testDir(dir, dirty); // Remove all files from the cache dir - .then(() => fs.emptyDir(opts.cachePath)) + await fs.emptyDir(opts.cachePath); // Replace a single file - .then(() => cache._addFileToCache('i', fileData.guid, fileData.hash, randomBuffer(fileData.info.length))) + cache._addFileToCache('i', fileData.guid, fileData.hash, randomBuffer(fileData.info.length)); // Store the dirty page list again - .then(() => { dirty = dirtyPages(); }) + dirty = dirtyPages(); // Serialize the cache again - .then(() => cache._serialize()) + await cache._serialize(); // Re-compare cache dir contents to expected dirty pages - .then(() => fs.readdir(opts.cachePath)) - .then(dir => testDir(dir, dirty)); - }); - }); - - describe("_deserialize", () => { - - beforeEach(() => { - cache = new Cache(); - return cache.init(opts) - .then(() => writeFileDataToCache(cache, fileData)) - .then(() => cache._serialize()); - }); - - afterEach(() => { - cache._clearCache(); - return fs.remove(opts.cachePath); + dir = await fs.readdir(opts.cachePath); + testDir(dir, dirty); + }); }); - it("should load all page files from the cachePath", () => { - let pageMeta = cache._pageMeta.chain().find({}).data(); - let pageData = cache._pages; + describe("_deserialize", () => { - // artificially clear out the page array before de-serializing - cache._pages = []; + beforeEach(async () => { + writeFileDataToCache(fileData); + await cache._serialize(); + }); - return cache._deserialize() - .then(() => { - let ok = pageMeta.every(page => { - return Buffer.compare(cache._pages[page.index], pageData[page.index]) === 0; - }); + it("should load all page files from the cachePath", async () => { + let pageMeta = cache._pageMeta.chain().find({}).data(); + let pageData = cache._pages; - assert(ok); + // artificially clear out the page array before de-serializing + cache._pages = []; + await cache._deserialize(); + pageMeta.forEach(page => { + assert.equal(Buffer.compare(cache._pages[page.index], pageData[page.index]), 0); }); - }); + }); - it("should throw an error if the page file size doesn't match the expected size", () => { - return fs.readdir(opts.cachePath) - .then(dir => { - assert(dir.length > 0); - return fs.truncate(path.join(opts.cachePath, dir[0])) - }) - .then(() => cache._deserialize()) - .then(() => { throw new Error("Expected error!"); }, err => assert(err)); + it("should throw an error if the page file size doesn't match the expected size", async () => { + let dir = await fs.readdir(opts.cachePath); + + assert(dir.length > 0); + await fs.truncate(path.join(opts.cachePath, dir[0])); + + let didThrow = false; + try { + await cache._deserialize(); + } + catch(err) { + didThrow = true; + } + finally { + assert(didThrow); + } + }); }); - }); - - describe("_allocPage", () => { - }); + describe("_allocPage", () => { + it("should allocate a new page with size equal to the configured page size", () => { + assert.equal(cache._pageMeta.count(), 1); + const page = cache._allocPage(0); + assert.equal(cache._pageMeta.count(), 2); + assert.equal(cache._pages[page.index].length, opts.pageSize); + }); - describe("_findFreeBlock", () => { + it("should allocate a new page with size equal to the given minSize when greater than the configured page size", () => { + assert.equal(cache._pageMeta.count(), 1); + const page = cache._allocPage(opts.pageSize * 2); + assert.equal(cache._pageMeta.count(), 2); + assert.equal(cache._pages[page.index].length, opts.pageSize * 2); + }); - }); + it("should throw an error if page count would exceed maxPageCount", () => { + for(let x = 0; x < opts.maxPageCount - 1; x++) + cache._allocPage(0); - describe("_reserveBlock", () => { + assert.throws(() => cache._allocPage(0)); + }); + }); - }); + describe("_reserveBlock", () => { + it("should allocate an existing free block in an existing page when available", () => { + const key = Cache._calcIndexKey('a', randomBuffer(16), randomBuffer(16)); + const block = cache._reserveBlock(key, MIN_FILE_SIZE); + assert.equal(cache._pageMeta.count(), 1); + assert.equal(block.size, MIN_FILE_SIZE); + }); - describe("_waitForSerialize", () => { + it("should allocate a new free block to a new page when no free blocks are found in existing pages", () => { + const key1 = Cache._calcIndexKey('a', randomBuffer(16), randomBuffer(16)); + const key2 = Cache._calcIndexKey('a', randomBuffer(16), randomBuffer(16)); + cache._reserveBlock(key1, opts.pageSize); // Fill up the first free block + const block = cache._reserveBlock(key2, MIN_FILE_SIZE); // Should now allocate another + assert.equal(cache._pageMeta.count(), 2); + assert.equal(block.size, MIN_FILE_SIZE); + }); - }); + it("should re-allocate a LRU block when no free blocks are available and maxPageCount has been reached", async () => { + let firstBlock; + for(let x = 0; x < opts.maxPageCount; x++) { + let key = Cache._calcIndexKey('a', randomBuffer(16), randomBuffer(16)); + let block = cache._reserveBlock(key, opts.pageSize); + if(!firstBlock) + firstBlock = block; + await sleep(50); + } + + let key = Cache._calcIndexKey('a', randomBuffer(16), randomBuffer(16)); + let block = cache._reserveBlock(key, MIN_FILE_SIZE); + assert.equal(firstBlock.pageIndex, block.pageIndex); + }); - describe("_addFileToCache", () => { + it("should throw an exception if no free block or no LRU block of a suitable size can be found when maxPageCount has been reached", () => { + for(let x = 0; x < opts.maxPageCount; x++) { + let key = Cache._calcIndexKey('a', randomBuffer(16), randomBuffer(16)); + cache._reserveBlock(key, opts.pageSize); + } - }); + assert.throws(() => cache._reserveBlock(key, opts.pageSize * 2)); + }); + }); - describe("_clearCache", () => { + describe("_addFileToCache", () => { + it("should throw an error if the cache cannot grow to accommodate the new file", () => { + for(let x = 0; x < opts.maxPageCount; x++) { + cache._addFileToCache('a', randomBuffer(16), randomBuffer(16), randomBuffer(opts.pageSize)); + } + assert.throws(() => cache._addFileToCache('a', randomBuffer(16), randomBuffer(16), randomBuffer(opts.pageSize * 2))); + }); + }); }); }); \ No newline at end of file diff --git a/test/protocol.js b/test/protocol.js index e2d26b3..e86a5f7 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -28,8 +28,7 @@ let test_modules = [ name: "cache_ram", path: "../lib/cache/cache_ram", options: { - initialPageSize: MAX_FILE_SIZE * 2, - growPageSize: MAX_FILE_SIZE, + pageSize: MAX_FILE_SIZE, minFreeBlockSize: 1024, persistenceOptions: { adapter: new loki.LokiMemoryAdapter() diff --git a/test/test_utils.js b/test/test_utils.js index 1f12a24..57c5566 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -31,12 +31,6 @@ exports.generateCommandData = function(minSize, maxSize) { } }; -exports.writeFileDataToCache = function(cache, fileData) { - cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); - cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); - cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); -}; - exports.encodeCommand = function(command, guid, hash, blob) { if(blob) diff --git a/test/transaction_mirror.js b/test/transaction_mirror.js index 6eb61da..b165a2f 100644 --- a/test/transaction_mirror.js +++ b/test/transaction_mirror.js @@ -1,19 +1,24 @@ const { Server, CacheRAM } = require('../lib'); const TransactionMirror = require('../lib/server/transaction_mirror'); const tmp = require('tmp'); -const { generateCommandData, sleep, writeFileDataToCache } = require('./test_utils'); +const { generateCommandData, sleep } = require('./test_utils'); const assert = require('assert'); let cacheOpts = { cachePath: tmp.tmpNameSync({}).toString(), - initialPageSize: 10 * 1024, - growPageSize: 10 * 1024, + pageSize: 10 * 1024, minFreeBlockSize: 1024, persistenceOptions: { autosave: false } }; +let writeFileDataToCache = (cache, fileData) => { + cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); + cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); +}; + describe("TransactionMirror", () => { before(async () => { From c118fbe0e9bbbc2ce55221868de95fa7e87fc1be Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 12:34:09 -0600 Subject: [PATCH 38/89] Move logic for parsing address strings to helper file --- lib/helpers.js | 14 ++++++++++++++ main.js | 25 ++++++++----------------- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/lib/helpers.js b/lib/helpers.js index ede08af..a978222 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -1,5 +1,6 @@ const cluster = require('cluster'); const consts = require("./constants"); +const dns = require('dns'); let logLevel = consts.LOG_TEST; @@ -90,6 +91,19 @@ function DefaultLogger(lvl, msg) { console.log(`${prefix}${msg}`); } +exports.parseAndValidateAddressString = function(address, defaultPort) { + let [host, port] = address.split(':'); + port = parseInt(port); + if(!port) port = defaultPort; + + return new Promise((resolve, reject) => { + dns.lookup(host, {family: 4, hints: dns.ADDRCONFIG}, (err, address) => { + if(err) return reject(err); + resolve({ host: address, port: port }); + }); + }) +}; + exports.log = DefaultLogger; exports.SetLogger = function(logger) { diff --git a/main.js b/main.js index caa30ca..b17467b 100644 --- a/main.js +++ b/main.js @@ -82,25 +82,16 @@ if(program.cachePath !== null) { } let getMirrors = () => new Promise((resolve, reject) => { - let mirrors = program.mirror.map(m => { - let [host, port] = m.split(':'); - port = parseInt(port); + const defaultPort = consts.DEFAULT_PORT; + const myIp = ip.address(); - if(!port) port = config.get("Defaults.serverPort"); - const myIp = ip.address(); - - return new Promise((resolve, reject) => { - dns.lookup(host, {family: 4, hints: dns.ADDRCONFIG}, (err, address) => { - if(err) return reject(err); - - if((ip.isEqual(myIp, address) || ip.isEqual("127.0.0.1", address)) && program.port === port) { - return reject(new Error(`Cannot mirror to self!`)); - } + let mirrors = program.mirror.map(async m => { + let result = await helpers.parseAndValidateAddressString(m, defaultPort); + if((ip.isEqual(myIp, result.host) || ip.isEqual("127.0.0.1", result.host)) && program.port === port) { + throw new Error(`Cannot mirror to self!`); + } - helpers.log(consts.LOG_INFO, `Cache Server mirroring to ${address}:${port}`); - resolve({ host: address, port: port }); - }); - }) + return result; }); Promise.all(mirrors) From aa3a11c4f915a546ef027da0299da66d21587348 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 12:34:33 -0600 Subject: [PATCH 39/89] =?UTF-8?q?Fix=20a=20log=20message=20that=20was=20lo?= =?UTF-8?q?gging=20=E2=80=9C[object=20Object]=E2=80=9D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- lib/server/client_stream_processor.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 12d7acc..7497c7d 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -46,7 +46,7 @@ class ClientStreamProcessor extends Transform { static get errorCodes() { return { - quitError: { message: "Client quit" } + quitError: new Error("Client quit") } } From 12fa96503177b95c3f3d810b340530d9d1874b61 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 12:34:49 -0600 Subject: [PATCH 40/89] Update .gitignore --- .gitignore | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 801e058..5042bca 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,8 @@ cache/ -cache5.0/ node_modules/ .coveralls.yml !lib/cache -.cache_membuf/ +.cache_ram/ .cache_fs/ +.nyc_output/ +coverage/ \ No newline at end of file From 30bfd17395667e599366ee45111bf9d56993ae63 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 12:36:31 -0600 Subject: [PATCH 41/89] Add utility to seed a cache server from a Unity project Library folder. Use the C# script to export transaction data from the project, and then the import.js script to parse exported file and send to remote server (or localhost) --- Unity/CacheServerTransactionExporter.cs | 139 ++++++++++++++++++++++++ import.js | 85 +++++++++++++++ 2 files changed, 224 insertions(+) create mode 100644 Unity/CacheServerTransactionExporter.cs create mode 100644 import.js diff --git a/Unity/CacheServerTransactionExporter.cs b/Unity/CacheServerTransactionExporter.cs new file mode 100644 index 0000000..744810f --- /dev/null +++ b/Unity/CacheServerTransactionExporter.cs @@ -0,0 +1,139 @@ +using UnityEngine; +using UnityEditor; +using System; +using System.IO; +using System.Linq; +using System.Collections.Generic; + +public class CacheServerTransactionExporter : MonoBehaviour +{ + private const string TYPE_ASSET = "a"; + private const string TYPE_INFO = "i"; + private const string TYPE_RESOURCE = "r"; + + [Serializable] + public class CacheServerTransactionData : ISerializationCallbackReceiver + { + public string projectRoot; + public Transaction[] transactions; + + private readonly List m_transactionList; + + public CacheServerTransactionData(int size) + { + projectRoot = Directory.GetParent(Application.dataPath).FullName; + m_transactionList = new List(size); + } + + public void AddItem(string assetPath) + { + if (Directory.Exists(assetPath)) return; + if (!File.Exists(assetPath)) return; + + var guid = AssetDatabase.AssetPathToGUID(assetPath); + var hash = AssetDatabase.GetAssetDependencyHash(assetPath); + + var libPath = + new[] { projectRoot, "Library", "metadata", guid.Substring(0, 2), guid } + .Aggregate(string.Empty, Path.Combine); + + if (!File.Exists(libPath)) + { + Debug.Log("Cannot find Library representation for GUID " + guid); + return; + } + + var files = new List + { + new Transaction.FileInfo(TYPE_ASSET, libPath, ToUnixTime(File.GetLastWriteTime(libPath))) + }; + + var infoLibPath = libPath + ".info"; + if (File.Exists(infoLibPath)) + { + files.Add(new Transaction.FileInfo(TYPE_INFO, infoLibPath, ToUnixTime(File.GetLastWriteTime(infoLibPath)))); + } + + var resLibPath = libPath + ".resource"; + if (File.Exists(resLibPath)) + { + files.Add(new Transaction.FileInfo(TYPE_RESOURCE, resLibPath, ToUnixTime(File.GetLastWriteTime(resLibPath)))); + } + + m_transactionList.Add(new Transaction(assetPath, guid, hash, files.ToArray())); + } + + public void OnBeforeSerialize() + { + transactions = m_transactionList.ToArray(); + } + + public void OnAfterDeserialize() + { + // No op + } + } + + [Serializable] + public struct Transaction + { + + [Serializable] + public struct FileInfo + { + public string type; + public string path; + public long ts; + + public FileInfo(string type, string path, long ts) + { + this.type = type; + this.path = path; + this.ts = ts; + } + } + + public string assetPath; + public string guid; + public string hash; + public FileInfo[] files; + + public Transaction(string assetPath, string guid, Hash128 hash, FileInfo[] files) + { + this.assetPath = assetPath; + this.guid = guid; + this.hash = hash.ToString(); + this.files = files; + } + } + + public static void ExportTransactions(string exportPath) + { + var assetPaths = AssetDatabase.GetAllAssetPaths(); + var data = new CacheServerTransactionData(assetPaths.Length); + + foreach (var path in assetPaths) + data.AddItem(path); + + using (var stream = File.CreateText(exportPath)) + stream.Write(EditorJsonUtility.ToJson(data, true)); + } + + [MenuItem("Cache Server Utilities/Export Transactions")] + public static void ExportTransactionsMenuItem() + { + var path = EditorUtility.SaveFilePanel( + "Save Import Data", + Directory.GetCurrentDirectory(), + "CacheServerTransactions_" + EditorUserBuildSettings.activeBuildTarget, "json"); + + if (path.Length != 0) + ExportTransactions(path); + } + + public static long ToUnixTime(DateTime date) + { + var epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); + return Convert.ToInt64((date.ToUniversalTime() - epoch).TotalSeconds); + } +} diff --git a/import.js b/import.js new file mode 100644 index 0000000..bc630da --- /dev/null +++ b/import.js @@ -0,0 +1,85 @@ +const helpers = require('./lib/helpers'); +const program = require('commander'); +const consts = require('./lib/constants'); +const fs = require('fs-extra'); +const filesize = require('filesize'); +const Client = require('./lib/client/client'); + +function myParseInt(val, def) { + val = parseInt(val); + return (!val && val !== 0) ? def : val; +} + +program.description("Unity Cache Server - Project Import") + .version(require('./package').version) + .description('Imports Unity project Library data into a local or remote Cache Server.') + .arguments(' [ServerAddress]') + .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) + .action((projectRoot, serverAddress) => { + importTransactionFile(projectRoot, serverAddress, consts.DEFAULT_PORT) + .catch(err => { + console.log(err); + process.exit(1); + }); + }); + +program.parse(process.argv); + +async function importTransactionFile(filePath, addressString, defaultPort) { + + let address = await helpers.parseAndValidateAddressString(addressString, defaultPort); + + if(!await fs.pathExists(filePath)) throw new Error(`Cannot find ${filePath}`); + let data = await fs.readJson(filePath); + if(!data.hasOwnProperty('transactions')) throw new Error(`Invalid transaction data!`); + + let client = new Client(address.host, address.port, {}); + await client.connect(); + + const trxCount = data.transactions.length; + const startTime = Date.now(); + let sentBytes = 0; + let sentAssetCount = 0; + let sentFileCount = 0; + + for(let i = 0; i < trxCount; i++) { + const trx = data.transactions[i]; + const guid = helpers.GUIDStringToBuffer(trx.guid); + const hash = Buffer.from(trx.hash, 'hex'); + + helpers.log(consts.LOG_INFO, `(${i + 1}/${trxCount}) ${trx.assetPath}`); + + await client.beginTransaction(guid, hash); + + for(let file of trx.files) { + let stats; + + try { + stats = await fs.stat(file.path); + } + catch(err) { + helpers.log(consts.LOG_ERR, err); + continue; + } + + if(stats.mtimeMs !== file.ts * 1000) { + helpers.log(consts.LOG_WARN, `${file.path} has been modified, skipping`); + continue; + } + + sentBytes += stats.size; + const stream = fs.createReadStream(file.path); + await client.putFile(file.type, guid, hash, stream, stats.size); + sentFileCount ++; + } + + await client.endTransaction(); + sentAssetCount++; + } + + let totalTime = (Date.now() - startTime) / 1000; + let throughput = (sentBytes / totalTime).toFixed(2); + helpers.log(consts.LOG_INFO, `Sent ${sentFileCount} files for ${sentAssetCount} assets (${filesize(sentBytes)}) in ${totalTime} seconds (${filesize(throughput)}/sec)`); + + return client.quit(); +} From 269c6b0903f1da83cda128cefdd82d26704fb3b8 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 13:21:45 -0600 Subject: [PATCH 42/89] Removing unused package dependency --- package-lock.json | 8 -------- package.json | 1 - 2 files changed, 9 deletions(-) diff --git a/package-lock.json b/package-lock.json index 52b8eac..535b71b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -729,14 +729,6 @@ "graceful-fs": "4.1.11" } }, - "klaw": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/klaw/-/klaw-2.1.1.tgz", - "integrity": "sha1-QrdolHARacyRD9DRnOZ3tfs3ivE=", - "requires": { - "graceful-fs": "4.1.11" - } - }, "lodash": { "version": "4.17.4", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", diff --git a/package.json b/package.json index fe6164f..d7a7304 100644 --- a/package.json +++ b/package.json @@ -41,7 +41,6 @@ "fs-extra": "^5.0.0", "ip": "^1.1.5", "js-yaml": "^3.10.0", - "klaw": "^2.1.1", "lodash": "^4.17.4", "lokijs": "^1.5.1", "prompt": "^1.0.0", From dd8bdb7e013d3d90f2cd1835eff196eb756958e0 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 13:32:09 -0600 Subject: [PATCH 43/89] conform new CLI option naming pattern to existing options --- main.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main.js b/main.js index 0131a79..637234f 100755 --- a/main.js +++ b/main.js @@ -30,8 +30,8 @@ const defaultCacheModule = config.get("Cache.defaultModule"); program.description("Unity Cache Server") .version(VERSION) .option('-p, --port ', `Specify the server port, only apply to new cache server, default is ${consts.DEFAULT_PORT}`, myParseInt, consts.DEFAULT_PORT) - .option('-c --cacheModule [path]', `Use cache module at specified path. Default is '${defaultCacheModule}'`, defaultCacheModule) - .option('-P, --cachePath [path]', `Specify the path of the cache directory.`) + .option('-c --cache-module [path]', `Use cache module at specified path. Default is '${defaultCacheModule}'`, defaultCacheModule) + .option('-P, --cache-path [path]', `Specify the path of the cache directory.`) .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-w, --workers ', `Number of worker threads to spawn. Default is ${consts.DEFAULT_WORKERS}`, zeroOrMore, consts.DEFAULT_WORKERS) .option('-m --mirror [host:port]', `Mirror transactions to another cache server. Can be repeated for multiple mirrors.`, collect, []) From d22a1beabf01f92b7d74b99f2a9ebc97dafcbdd5 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 15:04:32 -0600 Subject: [PATCH 44/89] Documentation! --- README.md | 67 +++++++++++++++++++++++++++++++++++++++++++++++++------ main.js | 4 ++-- 2 files changed, 62 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index b22f159..f43ad9a 100644 --- a/README.md +++ b/README.md @@ -26,22 +26,75 @@ unity-cache-server [arguments] ## Options ``` -V, --version output the version number - -s, --size Specify the maximum allowed size of the LRU cache. Files that have not been used recently will automatically be discarded when the cache size is exceeded. Default is 50Gb -p, --port Specify the server port, only apply to new cache server, default is 8126 - -P, --path [path] Specify the path of the cache directory. Default is ./cache5.0 - -l, --log-level Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 4 (test) - -w, --workers Number of worker threads to spawn. Default is 1 for every 2 CPUs reported by the OS - -v, --verify Verify the Cache Server integrity, without fixing errors - -f, --fix Fix errors found while verifying the Cache Server integrity + -c --cache-module [path] Use cache module at specified path. Default is 'lib/cache/cache_fs' + -P, --cache-path [path] Specify the path of the cache directory. + -l, --log-level Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 3 + -w, --workers Number of worker threads to spawn. Default is 0 + -m --mirror [host:port] Mirror transactions to another cache server. Can be repeated for multiple mirrors. -m, --monitor-parent-process Monitor a parent process and exit if it dies -h, --help output usage information ``` +## Configuration file +`config/default.yml` contains various configuration values for the cache modules (see below) and other features. The config system is based on the [node-config](`https://github.com/lorenwest/node-config/wiki/Configuration-Files`) module. Refer to the documentation in that package for tips on how to manage environment specific config files. ## Client Configuration The [Cache Server](https://docs.unity3d.com/Manual/CacheServer.html) section of the Unity Manual contains detailed information on connecting clients to remote Cache Servers. -## Contributors +## Cache Modules +Two distinct caching mechanisms are provided: a simple file system based cache, and a fully memory (RAM) backed cache. The file system module is the default and suitable for most applications. The RAM cache module provides optimal performance but requires a sufficient amount of physical RAM in the server system. + +Configuration options for all modules are set in the `config/default.yml` file. +### cache_fs (default) +A simple, efficient file system backed cache. +#### Usage +`--cache-module lib/cache/cache_fs`. +#### Options +option | default | description +--------- | ----------- | ----------- +cachePath | `.cache_fs` | Path to cache directory +#### Notes +* This module is backwards compatible with v5.x Cache Server directories +* For performance and simplicity reasons, unlike prior versions, it does NOT operate as an LRU cache and does not enforce overall cache size restrictions. If disk space is a concern, external shell scripts can be executed periodically to clean up files that have not been accessed recently. +* Supports worker threads (`--workers` option) +### cache_ram +A high performance, fully in-memory LRU cache. +#### Usage +`--cache-module lib/cache/cache_ram` +#### Options +option | default | description +--------- | ----------- | ----------- +pageSize | 100000000 | Smallest memory allocation to make, in bytes. i.e. the cache will grow in increments of pageSize. +maxPageCount | 10 | Maximum number of pages allowed in the cache. This combined with `pageSize` effectively limits the overall memory footprint of the cache. When this threshold is reached, an LRU mechanism will kick in to find room for new files. +minFreeBlockSize | 1024 | Smallest allocation unit within a page. Can be lowered for smaller projects. +cachePath | `.cache_ram` | Path to cache directory. Dirty memory pages are saved to disk periodically in this directory, and loaded at startup. +persistenceOptions.autosave | true | `true` to enable saving memory pages; `false` to disable. +persistenceOptions.autosaveInterval | 10000 | Minimum interval in milliseconds to save dirty pages. +#### Notes +* Does not support worker threads + +## Mirroring +#### Usage +Use the `--mirror [host:port]` option to relay all upload transactions to one or more Cache Server hosts (repeat the option for each host). There are checks in place to prevent self-mirroring, but beyond that it would be easy to create infinite transaction loops so use with care. +#### Options +option | default | description +--------- | ----------- | ----------- +queueProcessDelay | 2000 | Each transaction from a client is queued after completion. The `queueProcessDelay` (ms) will delay the start of processing the queue, from when the first transaction is added to an empty queue. It's a good idea to keep this value at or above the default value to avoid possible I/O race conditions with recently completed transactions. +connectionIdleTimeout | 10000 | Keep connections to remote mirror hosts alive for this length in ms, after processing a queue of transactions. Queue processing is 'bursty' so this should be calibrated to minimize the overhead of connection setup & tear-down. +## Unity project Library Importer +Tools are provided to quickly seed a Cache Server from a fully imported Unity project (a project with a Library folder). +#### Steps to Import +1) Add the [CacheServerTransactionImporter.cs](./Unity/CacheServerTransactionExporter.cs) script to the Unity project you wish to export. +2) Select the Menu item _Cache Server Utilities -> Export Transactions_ to save an export data file in .json format. Alternatively, with the script added to your project, you can run Unity in batchmode and [execute the static method](https://docs.unity3d.com/Manual/CommandLineArguments.html) `CacheServerTransactionExporter.ExportTransactions([path])` where `path` is the full path and filename to export. +3) Run the import utility to begin the import process: `node import.js [server:port]` +#### Notes +* On very large projects, Unity may appear to freeze while generating the exported JSON data. +* The default `server:port` is `localhost:8126` +* The import process connects and uploads to the target host like any other Unity client, so it should be safe in a production environment. +* Files will be skipped if any changes were detected between when the JSON data was exported and when the importer tool is executed. + +## Contributors Contributions are welcome! Before submitting pull requests please note the Submission of Contributions section of the Apache 2.0 license. The server protocol is described in [protocol.md](./protocol.md) diff --git a/main.js b/main.js index 637234f..e3725a6 100755 --- a/main.js +++ b/main.js @@ -31,10 +31,10 @@ program.description("Unity Cache Server") .version(VERSION) .option('-p, --port ', `Specify the server port, only apply to new cache server, default is ${consts.DEFAULT_PORT}`, myParseInt, consts.DEFAULT_PORT) .option('-c --cache-module [path]', `Use cache module at specified path. Default is '${defaultCacheModule}'`, defaultCacheModule) - .option('-P, --cache-path [path]', `Specify the path of the cache directory.`) + .option('-P, --cache-path [path]', `Specify the path of the cache directory`) .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-w, --workers ', `Number of worker threads to spawn. Default is ${consts.DEFAULT_WORKERS}`, zeroOrMore, consts.DEFAULT_WORKERS) - .option('-m --mirror [host:port]', `Mirror transactions to another cache server. Can be repeated for multiple mirrors.`, collect, []) + .option('-m --mirror [host:port]', `Mirror transactions to another cache server. Can be repeated for multiple mirrors`, collect, []) .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0); program.parse(process.argv); From c81b0322fb1ff844b5e124904cbd5778812566ed Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 26 Jan 2018 15:19:24 -0600 Subject: [PATCH 45/89] Added ToC --- README.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/README.md b/README.md index f43ad9a..dc448ba 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,22 @@ This is the officially maintained open-source implementation of the Unity Cache At present time this open-source repository is maintained separately from the Cache Server available on the Unity website, as well as the version packaged with the Unity installer. It is possible that compatibility with specific versions of Unity will diverge between these separate implementations. Check the release notes for specific compatibility information prior to usage. +#### Table of Contents +* [Server Setup](#server-setup) + * [Install from npm registry](#install-from-npm-registry) + * [Install from GitHub source](#install-from-github-source) +* [Usage](#usage) +* [Options](#options) +* [Configuration file](#configuration-file) +* [Client Configuration](#client-configuration) +* [Cache Modules](#cache-modules) + * [cache\_fs (default)](#cache_fs-default) + * [cache\_ram](#cache_ram) +* [Mirroring](#mirroring) +* [Unity project Library Importer](#unity-project-library-importer) +* [Contributors](#contributors) +* [License](#license) + ## Server Setup Download and install the latest LTS version of node from the [Node.JS website](`https://nodejs.org/en/download/`). From 8ce2ac57c04ecfa0cfc4f7f11ff2aa1dcd33abd9 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 27 Jan 2018 12:42:04 -0600 Subject: [PATCH 46/89] =?UTF-8?q?-=20Added=20a=20=E2=80=98persistence?= =?UTF-8?q?=E2=80=99=20(true/false)=20option=20for=20the=20RAM=20cache,=20?= =?UTF-8?q?to=20completely=20disable=20save/load=20of=20page=20files.=20-?= =?UTF-8?q?=20Converted=20the=20last=20bits=20of=20cache=5Fram=20to=20use?= =?UTF-8?q?=20async/await=20-=20Minor=20test=20file=20cleanup?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 3 +- config/default.yml | 3 +- lib/cache/cache_ram.js | 149 +++++++++++++++++------------------------ test/cache_ram.js | 29 +++++--- 4 files changed, 86 insertions(+), 98 deletions(-) diff --git a/README.md b/README.md index dc448ba..058b493 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,8 @@ pageSize | 100000000 | Smallest memory allocation to make, in bytes. i.e. the ca maxPageCount | 10 | Maximum number of pages allowed in the cache. This combined with `pageSize` effectively limits the overall memory footprint of the cache. When this threshold is reached, an LRU mechanism will kick in to find room for new files. minFreeBlockSize | 1024 | Smallest allocation unit within a page. Can be lowered for smaller projects. cachePath | `.cache_ram` | Path to cache directory. Dirty memory pages are saved to disk periodically in this directory, and loaded at startup. -persistenceOptions.autosave | true | `true` to enable saving memory pages; `false` to disable. +persistence | true | Enable saving and loading of page files to disk. If `false`, the cache will be empty at every restart. +persistenceOptions.autosave | true | `true` to periodically save dirty memory pages automatically; `false` to disable. If `false`, pages will only be saved when the cache server is stopped with the `q` console command or with SIGTERM. persistenceOptions.autosaveInterval | 10000 | Minimum interval in milliseconds to save dirty pages. #### Notes * Does not support worker threads diff --git a/config/default.yml b/config/default.yml index 20602bc..4e532da 100644 --- a/config/default.yml +++ b/config/default.yml @@ -2,10 +2,11 @@ Cache: defaultModule: "lib/cache/cache_fs" options: cache_ram: + cachePath: ".cache_ram" pageSize: 100000000 maxPageCount: 10 minFreeBlockSize: 1024 - cachePath: ".cache_ram" + persistence: true persistenceOptions: autosave: true autosaveInterval: 10000 diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index 1b7543c..5a9a547 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -178,20 +178,15 @@ class CacheRAM extends CacheBase { * @returns {Promise<[any]>} * @private */ - _serialize() { - const self = this; - - let pages = self._pageMeta.chain().find({'dirty' : true}).data(); + async _serialize() { + let pages = this._pageMeta.chain().find({'dirty' : true}).data(); - let promises = pages.map(page => { - let pagePath = path.join(self._cachePath, page.index); + let promises = pages.map(async page => { + let pagePath = path.join(this._cachePath, page.index); helpers.log(consts.LOG_INFO, `Writing ${pagePath}`); - return fs.writeFile(pagePath, self._pages[page.index]) - .then(() => { - let doc = self._pageMeta.by('index', page.index); - doc.dirty = false; - self._pageMeta.update(doc); - }); + + await fs.writeFile(pagePath, this._pages[page.index]); + this._pageMeta.findAndUpdate({'index': page.index}, doc => doc.dirty = false); }); return Promise.all(promises); @@ -202,27 +197,17 @@ class CacheRAM extends CacheBase { * @returns {Promise<[any]>} * @private */ - _deserialize() { - const self = this; - - const cachePath = self._cachePath; - let pages = self._pageMeta.chain().find({}).data(); + async _deserialize() { + const cachePath = this._cachePath; + let pages = this._pageMeta.chain().find({}).data(); - let promises = pages.map(page => { - let file = path.join(cachePath, page.index); + let promises = pages.map(async page => { + const file = path.join(cachePath, page.index); helpers.log(consts.LOG_DBG, `Loading page file at ${file}`); - return fs.stat(file) - .then(stats => { - if(stats.size === page.size) { - return fs.readFile(file); - } - - throw new Error(`Unrecognized/invalid page file '${file}'`); - }) - .then(result => { - self._pages[page.index] = result; - }); + const stats = await fs.stat(file); + if(stats.size !== page.size) throw new Error(`Unrecognized/invalid page file '${file}'`); + this._pages[page.index] = await fs.readFile(file); }); return Promise.all(promises); @@ -246,32 +231,30 @@ class CacheRAM extends CacheBase { * @private */ async _initDb(options) { - const self = this; - - let db = new loki(self._dbPath, options); + let db = new loki(this._dbPath, options); let loadDb = promisify(db.loadDatabase).bind(db); this._db = db; await loadDb({}); - self._index = db.getCollection(kIndex); - self._pageMeta = db.getCollection(kPageMeta); + this._index = db.getCollection(kIndex); + this._pageMeta = db.getCollection(kPageMeta); - if(self._index !== null && self._pageMeta !== null) { - return self._deserialize(); + if(this._options.persistence === true && this._index !== null && this._pageMeta !== null) { + return this._deserialize(); } - self._pageMeta = db.addCollection(kPageMeta, { + this._pageMeta = db.addCollection(kPageMeta, { unique: ["index"], indices: ["dirty"] }); - self._index = db.addCollection(kIndex, { + this._index = db.addCollection(kIndex, { unique: ["fileId"], indices: ["size"] }); - self._clearCache(); + this._clearCache(); } /** @@ -284,16 +267,15 @@ class CacheRAM extends CacheBase { } async init(options) { - const self = this; - await super.init(options); - let dbOpts = self._options.persistenceOptions || {}; - if(!dbOpts.hasOwnProperty('adapter') || dbOpts.adapter === null) { - dbOpts.adapter = new PersistenceAdapter(self); + let dbOpts = {}; + if(this._options.persistence === true && this._options.persistenceOptions) { + dbOpts = this._options.persistenceOptions; + dbOpts.adapter = new PersistenceAdapter(this); } - return self._initDb(dbOpts); + return this._initDb(dbOpts); } async shutdown() { @@ -336,14 +318,12 @@ class CacheRAM extends CacheBase { } async endPutTransaction(transaction) { - const self = this; - await this._waitForSerialize(); await transaction.finalize(); try { transaction.files.forEach(file => { - self._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); + this._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); }); } catch(err) { @@ -369,53 +349,46 @@ class PutTransactionRAM extends PutTransaction { return this._finished; } - finalize() { - let self = this; - return new Promise((resolve, reject) => { - self._finished = _.values(self._streams); - let ok = self._finished.every(file => { - return file.pos === file.buffer.length; - }); + async finalize() { + this._finished = _.values(this._streams); + let ok = this._finished.every(file => { + return file.pos === file.buffer.length; + }); - ok ? resolve() : reject(new Error("Transaction failed; file size mismatch")); - }).then(() => super.finalize()); + if(!ok) throw new Error("Transaction failed; file size mismatch"); + return super.finalize(); } - getWriteStream(type, size) { - const self = this; + async getWriteStream(type, size) { + if(typeof(size) !== 'number' || size <= 0) { + throw new Error("Invalid size for write stream"); + } - return new Promise((resolve, reject) => { - if(typeof(size) !== 'number' || size <= 0) { - return reject(new Error("Invalid size for write stream")); - } + if(type !== 'a' && type !== 'i' && type !== 'r') { + throw new Error(`Unrecognized type '${type}' for transaction.`); + } - if(type !== 'a' && type !== 'i' && type !== 'r') { - return reject(new Error(`Unrecognized type '${type}' for transaction.`)); - } + this._streams[type] = { + type: type, + buffer: Buffer.alloc(size, 0, 'ascii'), + pos: 0 + }; + + let self = this; + return new Writable({ + write(chunk, encoding, cb) { + const file = self._streams[type]; - self._streams[type] = { - type: type, - buffer: Buffer.alloc(size, 0, 'ascii'), - pos: 0 - }; - - const stream = new Writable({ - write(chunk, encoding, cb) { - const file = self._streams[type]; - - if (file.buffer.length - file.pos >= chunk.length) { - chunk.copy(file.buffer, file.pos, 0, chunk.length); - file.pos += chunk.length; - } - else { - helpers.log(consts.LOG_ERR, "Attempt to write over stream buffer allocation!"); - } - - cb(); + if (file.buffer.length - file.pos >= chunk.length) { + chunk.copy(file.buffer, file.pos, 0, chunk.length); + file.pos += chunk.length; + } + else { + helpers.log(consts.LOG_ERR, "Attempt to write over stream buffer allocation!"); } - }); - resolve(stream); + cb(); + } }); } } diff --git a/test/cache_ram.js b/test/cache_ram.js index e872932..f6c8902 100644 --- a/test/cache_ram.js +++ b/test/cache_ram.js @@ -45,17 +45,17 @@ describe("Cache: RAM", () => { describe("init", () => { it("should initialize the _db object", async () => { await cache.init(opts); - assert(cache._db !== null); + assert.notEqual(cache._db, null); }); it("should initialize an empty cache if no database was loaded from disk", async () => { await cache.init(opts); - assert(cache._pageMeta.count() === 1); + assert.equal(cache._pageMeta.count(), 1); let index = cache._index.findOne({}); - assert(index !== null); - assert(index.size === opts.pageSize); - assert(index.pageOffset === 0); + assert.notStrictEqual(index, null); + assert.equal(index.size, opts.pageSize); + assert.equal(index.pageOffset, 0); }); it("should populate the _index and _pageMeta when a saved database is loaded from disk", async () => { @@ -64,8 +64,21 @@ describe("Cache: RAM", () => { await cache.shutdown(); await cache.init(opts); - assert(cache._pageMeta.count() === 1); - assert(cache._index.count() === 2); + assert.equal(cache._pageMeta.count(), 1); + assert.equal(cache._index.count(), 2); + }); + + it("should not save or load any database when opts.persistence is false", async () => { + let myOpts = Object.assign({}, opts); + myOpts.persistence = false; + + await cache.init(myOpts); + await cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + await cache.shutdown(); + await cache.init(myOpts); + + assert.equal(cache._pageMeta.count(), 1); + assert.equal(cache._index.count(), 1); }); }); @@ -108,7 +121,7 @@ describe("Cache: RAM", () => { await cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); pages = dirtyPages(); - assert(pages.length === 1); + assert.equal(pages.length, 1); await cache.shutdown(); await fs.access(cache._dbPath); From c86d92c351fe7b0937b3b0a0a693f54fe258924e Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 27 Jan 2018 18:20:57 -0600 Subject: [PATCH 47/89] - Brought test coverage to 100% for helper script - Conformed some function names to convention --- lib/constants.js | 1 + lib/helpers.js | 59 +++++++++++++++++------ main.js | 3 +- package-lock.json | 113 +++++++++++++++++++++++++++++++++++++++++++++ package.json | 1 + test/cache_base.js | 22 +++++---- test/helpers.js | 107 ++++++++++++++++++++++++++++++++++++++++++ test/protocol.js | 2 +- test/server.js | 4 +- test/test_utils.js | 4 +- 10 files changed, 287 insertions(+), 29 deletions(-) diff --git a/lib/constants.js b/lib/constants.js index 7d7c88d..53ea933 100644 --- a/lib/constants.js +++ b/lib/constants.js @@ -6,6 +6,7 @@ const constants = { HASH_SIZE: 16, // bin GUID_SIZE: 16, // bin CMD_SIZE: 2, // bin + LOG_NONE: 0, LOG_ERR: 1, LOG_WARN: 2, LOG_INFO: 3, diff --git a/lib/helpers.js b/lib/helpers.js index a978222..a731261 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -83,14 +83,12 @@ exports.isBuffer = function(obj) { return !(obj === null) && !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) }; -function DefaultLogger(lvl, msg) { - if (logLevel < lvl) - return; - - const prefix = cluster.isMaster ? "[Cluster:M] " : `[Cluster:${cluster.worker.id}] `; - console.log(`${prefix}${msg}`); -} - +/** + * + * @param address + * @param defaultPort + * @returns {Promise} + */ exports.parseAndValidateAddressString = function(address, defaultPort) { let [host, port] = address.split(':'); port = parseInt(port); @@ -101,15 +99,46 @@ exports.parseAndValidateAddressString = function(address, defaultPort) { if(err) return reject(err); resolve({ host: address, port: port }); }); - }) + }); +}; + +/** + * + * @param {Number} lvl + * @param {String} msg + */ +exports.log = exports.defaultLogger = (lvl, msg) => { + if (lvl <= logLevel) + console.log(`${msg}`); +}; + +/** + * + * @param {Number} lvl + * @param {String} msg + */ +exports.defaultClusterLogger = (lvl, msg) => { + if (lvl <= logLevel) { + const prefix = cluster.isMaster ? "[Cluster:M] " : `[Cluster:${cluster.worker.id}] `; + console.log(`${prefix}${msg}`); + } }; -exports.log = DefaultLogger; +/** + * + * @param {Function} logger + */ +exports.setLogger = function(logger) { + if(logger) + exports.log = logger; +}; -exports.SetLogger = function(logger) { - exports.log = logger || DefaultLogger; +/** + * + * @param {Number} lvl + */ +exports.setLogLevel = function(lvl) { + logLevel = Math.min(consts.LOG_DBG, Math.max(consts.LOG_NONE, lvl)); }; -exports.SetLogLevel = function(lvl) { - logLevel = Math.min(consts.LOG_DBG, Math.max(0, lvl)); -}; \ No newline at end of file +exports.getLogLevel = () => logLevel; \ No newline at end of file diff --git a/main.js b/main.js index e3725a6..1fc1b58 100755 --- a/main.js +++ b/main.js @@ -39,7 +39,8 @@ program.description("Unity Cache Server") program.parse(process.argv); -helpers.SetLogLevel(program.logLevel); +helpers.setLogLevel(program.logLevel); +helpers.setLogger(program.workers > 0 ? helpers.defaultClusterLogger : helpers.defaultLogger); if (program.monitorParentProcess > 0) { function monitor() { diff --git a/package-lock.json b/package-lock.json index 535b71b..261234d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -626,6 +626,15 @@ "resolved": "https://registry.npmjs.org/filesize/-/filesize-3.5.11.tgz", "integrity": "sha512-ZH7loueKBoDb7yG9esn1U+fgq7BzlzW6NRi5/rMdxIZ05dj7GFD/Xc5rq2CDt5Yq86CyfSYVyx4242QQNZbx1g==" }, + "formatio": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz", + "integrity": "sha1-87IWfZBoxGmKjVH092CjmlTYGOs=", + "dev": true, + "requires": { + "samsam": "1.3.0" + } + }, "fs-extra": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-5.0.0.tgz", @@ -699,6 +708,12 @@ "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=" }, + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "dev": true + }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", @@ -729,6 +744,12 @@ "graceful-fs": "4.1.11" } }, + "just-extend": { + "version": "1.1.27", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-1.1.27.tgz", + "integrity": "sha512-mJVp13Ix6gFo3SBAy9U/kL+oeZqzlYYYLQBwXVBlVzIsZwBqGREnOro24oC/8s8aox+rJhtZ2DiQof++IrkA+g==", + "dev": true + }, "lodash": { "version": "4.17.4", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", @@ -773,6 +794,12 @@ "lodash._isiterateecall": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" } }, + "lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", + "dev": true + }, "lodash.isarguments": { "version": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", "integrity": "sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo=", @@ -798,6 +825,12 @@ "resolved": "https://registry.npmjs.org/lokijs/-/lokijs-1.5.1.tgz", "integrity": "sha512-Pj67gdP6CxUPV7AXM/VAnUZNyKR6mx4JxNmZfVG7XeebBZyrd8iLcKxKutc6Z5akJlMb0EeCxPW8/YkCPiMQbw==" }, + "lolex": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.3.1.tgz", + "integrity": "sha512-mQuW55GhduF3ppo+ZRUTz1PRjEh1hS5BbqU7d8D0ez2OKxHDod7StPPeAVKisZR5aLkHZjdGWSL42LSONUJsZw==", + "dev": true + }, "minimatch": { "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", @@ -867,6 +900,27 @@ "resolved": "https://registry.npmjs.org/ncp/-/ncp-1.0.1.tgz", "integrity": "sha1-0VNn5cuHQyuhF9K/gP30Wuz7QkY=" }, + "nise": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.2.0.tgz", + "integrity": "sha512-q9jXh3UNsMV28KeqI43ILz5+c3l+RiNW8mhurEwCKckuHQbL+hTJIKKTiUlCPKlgQ/OukFvSnKB/Jk3+sFbkGA==", + "dev": true, + "requires": { + "formatio": "1.2.0", + "just-extend": "1.1.27", + "lolex": "1.6.0", + "path-to-regexp": "1.7.0", + "text-encoding": "0.6.4" + }, + "dependencies": { + "lolex": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz", + "integrity": "sha1-OpoCg0UqR9dDnnJzG54H1zhuSfY=", + "dev": true + } + } + }, "nyc": { "version": "11.4.1", "resolved": "https://registry.npmjs.org/nyc/-/nyc-11.4.1.tgz", @@ -2482,6 +2536,15 @@ "version": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, + "path-to-regexp": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", + "dev": true, + "requires": { + "isarray": "0.0.1" + } + }, "pkginfo": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.4.1.tgz", @@ -2521,6 +2584,44 @@ "glob": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz" } }, + "samsam": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/samsam/-/samsam-1.3.0.tgz", + "integrity": "sha512-1HwIYD/8UlOtFS3QO3w7ey+SdSDFE4HRNLZoZRYVQefrOY3l17epswImeB1ijgJFQJodIaHcwkp3r/myBjFVbg==", + "dev": true + }, + "sinon": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-4.2.2.tgz", + "integrity": "sha512-BEa593xl+IkIc94nKo0O0LauQC/gQy8Gyv4DkzPwF/9DweC5phr1y+42zibCpn9abfkdHxt9r8AhD0R6u9DE/Q==", + "dev": true, + "requires": { + "diff": "https://registry.npmjs.org/diff/-/diff-3.2.0.tgz", + "formatio": "1.2.0", + "lodash.get": "4.4.2", + "lolex": "2.3.1", + "nise": "1.2.0", + "supports-color": "5.1.0", + "type-detect": "4.0.7" + }, + "dependencies": { + "has-flag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=", + "dev": true + }, + "supports-color": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.1.0.tgz", + "integrity": "sha512-Ry0AwkoKjDpVKK4sV4h6o3UJmNRbjYm2uXhwfj3J56lMVdvnUNqzQVRztOOMGQ++w1K/TjNDFvpJk0F/LoeBCQ==", + "dev": true, + "requires": { + "has-flag": "2.0.0" + } + } + } + }, "sprintf-js": { "version": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" @@ -2538,6 +2639,12 @@ "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz" } }, + "text-encoding": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz", + "integrity": "sha1-45mpgiV6J22uQou5KEXLcb3CbRk=", + "dev": true + }, "tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", @@ -2547,6 +2654,12 @@ "os-tmpdir": "1.0.2" } }, + "type-detect": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.7.tgz", + "integrity": "sha512-4Rh17pAMVdMWzktddFhISRnUnFIStObtUMNGzDwlA6w/77bmGv3aBbRdCmQR6IjzfkTo9otnW+2K/cDRhKSxDA==", + "dev": true + }, "universalify": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.1.tgz", diff --git a/package.json b/package.json index f6bcd60..79cc86e 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "mocha": "^3.5.3", "mocha-lcov-reporter": "^1.3.0", "nyc": "^11.4.1", + "sinon": "^4.2.2", "tmp": "0.0.33" }, "dependencies": { diff --git a/test/cache_base.js b/test/cache_base.js index 4c4a2b2..5c43b71 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -26,14 +26,14 @@ describe("Cache: Base Class", () => { describe("get _optionsPath", () => { it("should return 'Cache.options'", () => { - assert(cache._optionsPath === 'Cache.options'); + assert.strictEqual(cache._optionsPath, 'Cache.options'); }); }); describe("get _options", () => { it("should return an object with options for all built-in cache modules", () => { let cacheOptions = cache._options; - assert(typeof(cacheOptions) === 'object'); + assert.strictEqual(typeof(cacheOptions), 'object'); assert(cacheOptions.hasOwnProperty('cache_fs')); assert(cacheOptions.hasOwnProperty('cache_ram')); }); @@ -51,12 +51,12 @@ describe("Cache: Base Class", () => { describe("get _cachePath", () => { it("should return null if there is no cachePath option set", () => { - assert(cache._cachePath === null); + assert.equal(cache._cachePath, null); }); it("should return the exact value of cachePath if cachePath is an absolute path", () => { cache._optionOverrides = opts; - assert(cache._cachePath === opts.cachePath); + assert.strictEqual(cache._cachePath, opts.cachePath); }); it("should return a subdirectory path relative to the app root if cachePath is not an abosolute path", () => { @@ -64,7 +64,7 @@ describe("Cache: Base Class", () => { cachePath: "abc123" }; - assert(cache._cachePath === path.join(path.dirname(require.main.filename), "abc123")); + assert.strictEqual(cache._cachePath, path.join(path.dirname(require.main.filename), "abc123")); }); }); @@ -138,19 +138,25 @@ describe("PutTransaction: Base Class", () => { describe("get guid", () => { it("should return the guid passed to the constructor", () => { - assert(guid.compare(trx.guid) === 0); + assert.equal(guid.compare(trx.guid), 0); }); }); describe("get hash", () => { it("should return the hash passed to the constructor", () => { - assert(hash.compare(trx.hash) === 0); + assert.equal(hash.compare(trx.hash), 0); + }); + }); + + describe("get manifest", () => { + it("should return an empty array", () => { + assert.equal(trx.manifest.length, 0); }); }); describe("get files", () => { it("should return an empty array", () => { - assert(trx.files.length === 0); + assert.equal(trx.files.length, 0); }); }); diff --git a/test/helpers.js b/test/helpers.js index 76bbdde..c8a535e 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -1,5 +1,7 @@ const assert = require('assert'); const helpers = require('../lib/helpers'); +const consts = require('../lib/constants'); +const sinon = require('sinon'); describe("Helper functions", () => { const guid = Buffer.from([80,127,95,145,103,153,135,123,185,19,13,54,122,207,246,26]); @@ -37,4 +39,109 @@ describe("Helper functions", () => { assert(!helpers.isBuffer(null)); }) }); + + describe("parseAndValidateAddressString", () => { + it("should resolve a valid address to an IP and return an object with host and port properties", async () => { + const result = await helpers.parseAndValidateAddressString("localhost", 0); + assert.equal(result.host, "127.0.0.1"); + assert.strictEqual(result.port, 0); + }); + + it("should parse an address:port string", async () => { + const result = await helpers.parseAndValidateAddressString("localhost:1234", 0); + assert.equal(result.host, "127.0.0.1"); + assert.strictEqual(result.port, 1234); + }); + + it("should throw an error if the address can't be resolved", () => { + return helpers.parseAndValidateAddressString("blah", 0) + .then(() => { throw new Error("Expected error"); }, err => {}); + }); + }); + + describe("Logging functions", () => { + before(() => { + this.oldLevel = helpers.getLogLevel(); + + }); + + after(() => { + helpers.setLogLevel(this.oldLevel); + }); + + describe("defaultLogger", () => { + it("should log a console message if the desired log level is >= the minimum level", () => { + const spy = sinon.spy(console, 'log'); + const str = "Hello World"; + helpers.setLogLevel(consts.LOG_TEST); + + helpers.defaultLogger(consts.LOG_WARN, str); + assert(spy.calledOnce); + spy.resetHistory(); + + helpers.defaultLogger(consts.LOG_TEST, str); + assert(spy.calledOnce); + spy.resetHistory(); + + helpers.defaultLogger(consts.LOG_DBG, str); + assert(spy.notCalled); + spy.restore(); + }); + }); + + describe("defaultClusterLogger", () => { + it("should log a console message if the desired log level is >= the minimum level", () => { + const spy = sinon.spy(console, 'log'); + const str = "Hello World"; + helpers.setLogLevel(consts.LOG_TEST); + + helpers.defaultClusterLogger(consts.LOG_WARN, str); + assert(spy.calledOnce); + spy.resetHistory(); + + helpers.defaultClusterLogger(consts.LOG_TEST, str); + assert(spy.calledOnce); + spy.resetHistory(); + + helpers.defaultClusterLogger(consts.LOG_DBG, str); + assert(spy.notCalled); + spy.restore(); + }); + }); + + describe("setLogger", () => { + it("should do nothing if the passeed in logger is null", () => { + let prev = helpers.log; + helpers.setLogger(null); + assert.strictEqual(prev, helpers.log); + }); + + it("should change the logging function to the passed in function", () => { + let myLogger = (lvl, msg) => {}; + helpers.setLogger(myLogger); + assert.strictEqual(myLogger, helpers.log); + }); + }); + + describe("setLogLevel", () => { + it("should change the logging level to the specified level", () => { + helpers.setLogLevel(consts.LOG_INFO); + assert.equal(helpers.getLogLevel(), consts.LOG_INFO); + helpers.setLogLevel(consts.LOG_DBG); + assert.equal(helpers.getLogLevel(), consts.LOG_DBG); + }); + + it("should not allow a value out of range", () => { + helpers.setLogLevel(consts.LOG_DBG); + assert.equal(helpers.getLogLevel(), consts.LOG_DBG); + helpers.setLogLevel(consts.LOG_DBG + 1); + assert.equal(helpers.getLogLevel(), consts.LOG_DBG); + + helpers.setLogLevel(consts.LOG_NONE); + assert.equal(helpers.getLogLevel(), consts.LOG_NONE); + helpers.setLogLevel(consts.LOG_NONE - 1); + assert.equal(helpers.getLogLevel(), consts.LOG_NONE); + }); + }); + }); }); \ No newline at end of file diff --git a/test/protocol.js b/test/protocol.js index e86a5f7..01fe36d 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -48,7 +48,7 @@ describe("Protocol", function() { describe(module.name, function() { beforeEach(function() { - helpers.SetLogger(function() {}); + helpers.setLogger(function() {}); }); before(function () { diff --git a/test/server.js b/test/server.js index 753341e..2e50cd0 100644 --- a/test/server.js +++ b/test/server.js @@ -7,7 +7,7 @@ const Cache = require('../lib/cache/cache_base').CacheBase; const sleep = require('./test_utils').sleep; const cmd = require('./test_utils').cmd; -helpers.SetLogger(()=>{}); +helpers.setLogger(()=>{}); const cache = new Cache(); const server = new CacheServer(cache, {port: 0}); let client; @@ -15,7 +15,7 @@ let client; describe("Server common", function() { beforeEach(function() { - helpers.SetLogger(() => {}); + helpers.setLogger(() => {}); }); before(function () { diff --git a/test/test_utils.js b/test/test_utils.js index 57c5566..6e3a7f0 100644 --- a/test/test_utils.js +++ b/test/test_utils.js @@ -55,12 +55,12 @@ exports.expectLog = function(client, regex, condition, callback) { } let match; - helpers.SetLogger(function (lvl, msg) { + helpers.setLogger(function (lvl, msg) { match = match || regex.test(msg); }); client.on('close', function() { - assert(match === condition); + assert.strictEqual(match, condition); callback(); }); }; From 811005d0a1103fd9a976897ed2734a75fc90d95d Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 27 Jan 2018 19:01:13 -0600 Subject: [PATCH 48/89] more optimizations and conversion to async/await instead of creating new Promises --- lib/server/command_processor.js | 192 +++++++++++++++----------------- 1 file changed, 92 insertions(+), 100 deletions(-) diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index b1fde21..c62e899 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -2,6 +2,7 @@ const helpers = require('./../helpers'); const filesize = require('filesize'); const consts = require('./../constants'); const Duplex = require('stream').Duplex; +const { promisify } = require('util'); const kSource = Symbol("source"); const kCache = Symbol("cache"); @@ -9,6 +10,7 @@ const kSendFileQueue = Symbol("sendFileQueue"); const kReadStateVersion = Symbol("readStateVersion"); const kReadStateCommand = Symbol("readStateCommand"); const kReadStatePutStream = Symbol("readStatePutStream"); +const kReadStateNone = Symbol("readStateNone"); class CommandProcessor extends Duplex { @@ -20,7 +22,7 @@ class CommandProcessor extends Duplex { super(); this[kCache] = cache; this[kSendFileQueue] = []; - this._readState = kReadStateVersion; + this._setWriteHandler(kReadStateVersion); /** * @@ -42,6 +44,27 @@ class CommandProcessor extends Duplex { this._registerEventListeners(); } + /** + * + * @param {symbol} readState + * @private + */ + _setWriteHandler(readState) { + switch(readState) { + case kReadStateVersion: + this._writeHandler = this._handleVersion; + break; + case kReadStateCommand: + this._writeHandler = this._handleCommand; + break; + case kReadStatePutStream: + this._writeHandler = this._handleWrite; + break; + default: + this._writeHandler = () => Promise.resolve(); + } + } + _registerEventListeners() { const self = this; this.once('finish', this._printReadStats); @@ -59,29 +82,14 @@ class CommandProcessor extends Duplex { * @private */ _write(chunk, encoding, callback) { - let handler = null; - - switch(this._readState) { - case kReadStateVersion: - handler = this._handleVersion(chunk); - break; - case kReadStateCommand: - handler = this._handleCommand(chunk); - break; - case kReadStatePutStream: - handler = this._handleWrite(chunk); - break; - default: - return callback(null); - } - - handler.then(() => callback(), err => this._quit(err)); + this._writeHandler.call(this, chunk) + .then(() => callback(), err => this._quit(err)); } /** * @private */ - _read() { + _read() { this._readReady = true; Promise.resolve().then(() => this._read_internal()); } @@ -89,56 +97,51 @@ class CommandProcessor extends Duplex { /** * @private */ - _read_internal() { + async _read_internal() { if(this._isReading || this[kSendFileQueue].length === 0) return; + + let file = this[kSendFileQueue][0]; - let self = this; - let file = self[kSendFileQueue][0]; - - self._readReady = self.push(file.header, 'ascii'); + this._readReady = this.push(file.header, 'ascii'); if(!file.exists) { - self[kSendFileQueue].shift(); + this[kSendFileQueue].shift(); return; } - self._isReading = true; - self._readStartTime = Date.now(); - this[kCache].getFileStream(file.type, file.guid, file.hash) - .then(stream => { - function readChunk() { - if(!self._readReady) { - return setImmediate(readChunk); - } - - let chunk; - while(chunk = stream.read()) { - self._readReady = self.push(chunk, 'ascii'); - self._sendFileQueueChunkReads++; - self._sendFileQueueReadBytes += chunk.length; - - if(!self._readReady) { - setImmediate(readChunk); - break; - } - } + this._isReading = true; + this._readStartTime = Date.now(); + const stream = await this[kCache].getFileStream(file.type, file.guid, file.hash); + const self = this; + + function readChunk() { + if(!self._readReady) { + return setImmediate(readChunk); + } + + let chunk; + while(chunk = stream.read()) { + self._readReady = self.push(chunk, 'ascii'); + self._sendFileQueueChunkReads++; + self._sendFileQueueReadBytes += chunk.length; + + if(!self._readReady) { + setImmediate(readChunk); + break; } + } + } - stream.on('readable', readChunk); - - stream.on('end', () => { - self[kSendFileQueue].shift(); - self._sendFileQueueSentCount++; - self._isReading = false; - self._sendFileQueueReadDuration += Date.now() - self._readStartTime; - self._read(); - }) - }) - .catch(err => { - helpers.log(consts.LOG_ERR, err); - self._isReading = false; - }); + stream.on('readable', readChunk); + + stream.on('end', () => { + self[kSendFileQueue].shift(); + self._sendFileQueueSentCount++; + self._isReading = false; + self._sendFileQueueReadDuration += Date.now() - self._readStartTime; + self._read(); + }); } /** @@ -157,17 +160,13 @@ class CommandProcessor extends Duplex { * @param {Error?} err * @private */ - _quit(err) { - return new Promise(resolve => { - this[kSource].unpipe(this); - this[kSource].emit('quit'); - this._readState = null; - if(err) { - helpers.log(consts.LOG_ERR, err); - } - - resolve(); - }); + async _quit(err) { + this[kSource].unpipe(this); + this[kSource].emit('quit'); + this._setWriteHandler(kReadStateNone); + if(err) { + helpers.log(consts.LOG_ERR, err); + } } /** @@ -175,20 +174,18 @@ class CommandProcessor extends Duplex { * @param {Buffer} data * @private */ - _handleVersion(data) { - const self = this; - return new Promise((resolve, reject) => { - let version = helpers.readUInt32(data); - self._readState = kReadStateCommand; - let err = null; - if(version !== consts.PROTOCOL_VERSION) { - version = 0; - err = new Error("Bad Client protocol version"); - } + async _handleVersion(data) { + let version = helpers.readUInt32(data); + this._setWriteHandler(kReadStateCommand); + + let err = null; + if(version !== consts.PROTOCOL_VERSION) { + version = 0; + err = new Error("Bad Client protocol version"); + } - self.push(helpers.encodeInt32(version)); - err ? reject(err) : resolve(); - }); + this.push(helpers.encodeInt32(version)); + if(err) throw err; } /** @@ -196,21 +193,15 @@ class CommandProcessor extends Duplex { * @param {Buffer} data * @private */ - _handleWrite(data) { - const self = this; - return new Promise(resolve => { - this._putStream.write(data, 'ascii', () => { - self._putSent += data.length; - if(self._putSent === self._putSize) { - self._putStream.end(); - self._readState = kReadStateCommand; - self._putSent = 0; - self._putSize = 0; - } - - resolve(); - }); - }); + async _handleWrite(data) { + await this._putStream.promiseWrite(data, 'ascii'); + this._putSent += data.length; + if(this._putSent === this._putSize) { + this._putStream.end(); + this._setWriteHandler(kReadStateCommand); + this._putSent = 0; + this._putSize = 0; + } } /** @@ -295,7 +286,7 @@ class CommandProcessor extends Duplex { } finally { if(this[kSendFileQueue].length === 1) { - this._read(this._readState.highWaterMark); + this._read(); } } } @@ -346,8 +337,9 @@ class CommandProcessor extends Duplex { } this._putStream = await this._trx.getWriteStream(type, size); + this._putStream.promiseWrite = promisify(this._putStream.write).bind(this._putStream); this._putSize = size; - this._readState = kReadStatePutStream; + this._setWriteHandler(kReadStatePutStream); } } From 6ac6dc9efce1503b380a8e2552472c2621a5a895 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 27 Jan 2018 20:57:40 -0600 Subject: [PATCH 49/89] - more async/await cleanup - conform to using assert.equal or assert.strictEqual in tests --- lib/cache/cache_fs.js | 151 ++++++++++++++------------------ lib/server/command_processor.js | 14 ++- test/cache_api.js | 14 +-- test/cache_base.js | 4 +- test/helpers.js | 4 +- test/protocol.js | 109 ++++++++++------------- test/server.js | 6 +- test/transaction_mirror.js | 10 +-- 8 files changed, 144 insertions(+), 168 deletions(-) diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 3adbffa..40c681b 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -56,47 +56,38 @@ class CacheRAM extends CacheBase { return Promise.resolve(); } - getFileInfo(type, guid, hash) { - return new Promise((resolve, reject) => { - fs.stat(this._calcFilepath(type, guid, hash)) - .then(stats => { - resolve({size: stats.size}); - }) - .catch(err => { - reject(err); - }); - }); + async getFileInfo(type, guid, hash) { + const stats = await fs.stat(this._calcFilepath(type, guid, hash)); + return {size: stats.size}; } getFileStream(type, guid, hash) { let stream = fs.createReadStream(this._calcFilepath(type, guid, hash)); return new Promise((resolve, reject) => { - stream.on('open', () => { - resolve(stream); - }).on('error', err => { + stream.on('open', () => resolve(stream)) + .on('error', err => { helpers.log(consts.LOG_ERR, err); - reject(stream); + reject(err); }); }); } - createPutTransaction(guid, hash) { - return Promise.resolve(new PutTransactionFS(guid, hash, this._cachePath)); + async createPutTransaction(guid, hash) { + return new PutTransactionFS(guid, hash, this._cachePath); } - endPutTransaction(transaction) { + async endPutTransaction(transaction) { let self = this; - function moveFile(file) { + let moveFile = async (file) => { let filePath = self._calcFilepath(file.type, transaction.guid, transaction.hash); helpers.log(consts.LOG_INFO, `Adding file to cache: ${file.size} ${filePath}`); - return fs.move(file.file, filePath, { overwrite: true }); - } + await fs.move(file.file, filePath, { overwrite: true }); + }; - return transaction.finalize().then(() => { - return Promise.all(transaction.files.map(moveFile)); - }); + await transaction.finalize(); + await Promise.all(transaction.files.map(moveFile)); } registerClusterWorker(worker) {} @@ -129,44 +120,42 @@ class PutTransactionFS extends PutTransaction { this._files = []; } - _closeAllStreams() { - return new Promise((resolve, reject) => { - let self = this; - let files = _.values(this._streams); - - if(files.length === 0) - return resolve(); - - let closed = 0; - let toClose = files.length; - let success = true; - - function processClosedStream(stream) { - closed++; - - if(stream.stream.bytesWritten === stream.size) { - self._files.push({ - file: stream.file, - type: stream.type, - size: stream.size - }); - } - else { - success = false; - } - - if(closed === toClose) { - success ? resolve() : reject(new Error("Transaction failed; file size mismatch")); - } + async _closeAllStreams() { + let self = this; + let files = _.values(this._streams); + if(files.length === 0) return; + + function processClosedStream(stream) { + if(stream.stream.bytesWritten === stream.size) { + self._files.push({ + file: stream.file, + type: stream.type, + size: stream.size + }); + } + else { + throw new Error("Transaction failed; file size mismatch"); + } + } + + for(let file of files) { + if(file.stream.closed) { + processClosedStream(file); + continue; } - files.forEach(file => { - if(file.stream.closed) return processClosedStream(file); + await new Promise((resolve, reject) => { file.stream.on('close', () => { - processClosedStream(file); + try { + processClosedStream(file); + resolve(); + } + catch(err) { + reject(err); + } }); }); - }); + } } get manifest() { @@ -177,41 +166,33 @@ class PutTransactionFS extends PutTransaction { return this._files; } - finalize() { - return this._closeAllStreams().then(() => super.finalize()); + async finalize() { + await this._closeAllStreams(); + await super.finalize(); } - getWriteStream(type, size) { - let self = this; + async getWriteStream(type, size) { let file = path.join(this._cachePath, uuid()); - return new Promise((resolve, reject) => { - if(typeof(size) !== 'number' || size <= 0) { - return reject(new Error("Invalid size for write stream")); - } + if(typeof(size) !== 'number' || size <= 0) { + throw new Error("Invalid size for write stream"); + } - if(type !== 'a' && type !== 'i' && type !== 'r') { - return reject(new Error(`Unrecognized type '${type}' for transaction.`)); - } + if(type !== 'a' && type !== 'i' && type !== 'r') { + throw new Error(`Unrecognized type '${type}' for transaction.`); + } - fs.ensureFile(file) - .then(() => { - let stream = fs.createWriteStream(file, this._writeOptions); - stream.on('open', () => { - resolve(stream); - }); - - self._streams[type] = { - file: file, - type: type, - size: size, - stream: stream - }; - }) - .catch(err => { - reject(err); - }); - }); + await fs.ensureFile(file); + const stream = fs.createWriteStream(file, this._writeOptions); + this._streams[type] = { + file: file, + type: type, + size: size, + stream: stream + }; + + await new Promise(resolve => stream.on('open', () => resolve())); + return stream; } } diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index c62e899..cb67e44 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -89,7 +89,7 @@ class CommandProcessor extends Duplex { /** * @private */ - _read() { + _read() { this._readReady = true; Promise.resolve().then(() => this._read_internal()); } @@ -112,7 +112,17 @@ class CommandProcessor extends Duplex { this._isReading = true; this._readStartTime = Date.now(); - const stream = await this[kCache].getFileStream(file.type, file.guid, file.hash); + let stream; + + try { + stream = await this[kCache].getFileStream(file.type, file.guid, file.hash); + } + catch(err) { + helpers.log(consts.LOG_ERR, err); + this._isReading = false; + return; + } + const self = this; function readChunk() { diff --git a/test/cache_api.js b/test/cache_api.js index acd3b18..dd2c6fe 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -80,8 +80,8 @@ describe("Cache API", () => { it("should return a PutTransaction object for the given file hash & guid", () => { return cache.createPutTransaction(fileData.guid, fileData.hash) .then(trx => { - assert(trx.guid.compare(fileData.guid) === 0); - assert(trx.hash.compare(fileData.hash) === 0); + assert.strictEqual(trx.guid.compare(fileData.guid), 0); + assert.strictEqual(trx.hash.compare(fileData.hash), 0); }); }); }); @@ -110,7 +110,7 @@ describe("Cache API", () => { .then(stream => stream.end(fileData.info)) .then(() => cache.endPutTransaction(trx)) .then(() => cache.getFileInfo('i', fileData.guid, fileData.hash)) - .then(info => assert(info.size === fileData.info.length)); + .then(info => assert.equal(info.size, fileData.info.length)); }); it("should return an error if any files were partially written to the transaction", () => { @@ -182,13 +182,13 @@ describe("PutTransaction API", function() { describe("get guid", function() { it("should return the file guid for the transaction", () => { - assert(trx.guid === fileData.guid); + assert.strictEqual(trx.guid, fileData.guid); }); }); describe("get hash", function() { it("should return the file hash for the transaction", () => { - assert(trx.hash === fileData.hash); + assert.strictEqual(trx.hash, fileData.hash); }); }); @@ -210,14 +210,14 @@ describe("PutTransaction API", function() { describe("get files", function() { it("should return an empty array before finalize() is called", () => { - assert(trx.files.length === 0); + assert.strictEqual(trx.files.length, 0); }); it("should return a list of objects that represent completed files for the transaction", () => { return trx.getWriteStream('i', fileData.info.length) .then(stream => stream.end(fileData.info)) .then(() => trx.finalize()) - .then(() => assert(trx.files.length === 1)); + .then(() => assert.equal(trx.files.length, 1)); }); }); diff --git a/test/cache_base.js b/test/cache_base.js index 5c43b71..4c698db 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -45,7 +45,7 @@ describe("Cache: Base Class", () => { let cacheOptions = cache._options; assert(cacheOptions.hasOwnProperty('$testVal')); - assert(cacheOptions.$testVal.nested.option === true); + assert.strictEqual(cacheOptions.$testVal.nested.option, true); }); }); @@ -164,7 +164,7 @@ describe("PutTransaction: Base Class", () => { it("should return a promise and emit a 'finalize' event", (done) => { trx.once('finalize', () => done()); let p = trx.finalize(); - assert(typeof(p.then) === 'function'); + assert.equal(typeof(p.then), 'function'); }); }); diff --git a/test/helpers.js b/test/helpers.js index c8a535e..d43a766 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -9,7 +9,7 @@ describe("Helper functions", () => { describe("GUIDBufferToString", () => { it("should convert a 16 byte buffer to a hex representation that matches Unity's string formatter for GUIDs", () => { - assert(helpers.GUIDBufferToString(guid) === guidStr); + assert.strictEqual(helpers.GUIDBufferToString(guid), guidStr); }); it("should throw an error if the input is not a buffer or the wrong length", () => { @@ -21,7 +21,7 @@ describe("Helper functions", () => { describe("GUIDStringToBuffer", () => { it("should convert a 32 character hex string that represents a Unity GUID to an equivalent byte buffer", () => { - assert(guid.compare(helpers.GUIDStringToBuffer(guidStr)) === 0); + assert.strictEqual(guid.compare(helpers.GUIDStringToBuffer(guidStr)), 0); }); diff --git a/test/protocol.js b/test/protocol.js index 01fe36d..1570d92 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -43,78 +43,73 @@ let test_modules = [ } ]; -describe("Protocol", function() { - test_modules.forEach(function(module) { +describe("Protocol", () => { + test_modules.forEach(module => { describe(module.name, function() { - beforeEach(function() { - helpers.setLogger(function() {}); + beforeEach(() => { + helpers.setLogger(() => {}); }); - before(function () { + before(async () => { /** @type {CacheBase} **/ let CacheModule = require(module.path); cache = new CacheModule(); module.options.cachePath = module.tmpDir.name; - return cache.init(module.options) - .then(() => { - server = new CacheServer(cache, {port: 0}); - }) - .then(() => server.start(err => assert(!err, `Cache Server reported error! ${err}`))); + await cache.init(module.options); + server = new CacheServer(cache, {port: 0}); + await server.start(err => assert(!err, `Cache Server reported error! ${err}`)); }); - after(function() { + after(() => { server.stop(); module.tmpDir.removeCallback(); }); - describe("Transactions", function () { + describe("Transactions", () => { const self = this; - before(function() { + before(() => { self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); }); - beforeEach(() => { - return getClientPromise(server.port) - .then(c => { - client = c; - return clientWrite(c, helpers.encodeInt32(consts.PROTOCOL_VERSION)); - }); + beforeEach(async () => { + client = await getClientPromise(server.port); + await clientWrite(client, helpers.encodeInt32(consts.PROTOCOL_VERSION)); }); - it("should start a transaction with the (ts) command", function (done) { + it("should start a transaction with the (ts) command", (done) => { expectLog(client, /Start transaction/, done); client.end(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); }); - it("should cancel a pending transaction if a new (ts) command is received", function (done) { + it("should cancel a pending transaction if a new (ts) command is received", (done) => { expectLog(client, /Cancel previous transaction/, done); const d = encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash); client.write(d); // first one ... client.end(d); // ... canceled by this one }); - it("should require a start transaction (ts) cmd before an end transaction (te) cmd", function (done) { + it("should require a start transaction (ts) cmd before an end transaction (te) cmd", (done) => { expectLog(client, /Invalid transaction isolation/, done); client.end(cmd.transactionEnd); }); - it("should end a transaction that was started with the (te) command", function (done) { + it("should end a transaction that was started with the (te) command", (done) => { expectLog(client, /End transaction for/, done); client.write(encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash)); client.end(cmd.transactionEnd); }); - it("should require a transaction start (te) command before a put command", function(done) { + it("should require a transaction start (te) command before a put command", (done) => { expectLog(client, /Not in a transaction/, done); client.write(encodeCommand(cmd.putAsset, null, null, 'abc')); }); - it("should close the socket on an invalid transaction command", function(done) { + it("should close the socket on an invalid transaction command", (done) => { expectLog(client, /Unrecognized command/i, done); client.write('tx', self.data.guid, self.data.hash); }); @@ -126,22 +121,19 @@ describe("Protocol", function() { const self = this; - before(function () { + before(() => { self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); }); - beforeEach(() => { - return getClientPromise(server.port) - .then(c => { - client = c; + beforeEach(async () => { + client = await getClientPromise(server.port); - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - return clientWrite(c, helpers.encodeInt32(consts.PROTOCOL_VERSION)); - }); + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + await clientWrite(client, helpers.encodeInt32(consts.PROTOCOL_VERSION)); }); - it("should close the socket on an invalid PUT type", function (done) { + it("should close the socket on an invalid PUT type", (done) => { expectLog(client, /Unrecognized command/i, done); let buf = Buffer.from( encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + @@ -166,7 +158,7 @@ describe("Protocol", function() { return clientWrite(client, buf, test.packetSize) .then(() => cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash)) .then(stream => readStream(stream, self.data[test.ext].length)) - .then(data => assert(self.data[test.ext].compare(data) === 0)); + .then(data => assert.strictEqual(self.data[test.ext].compare(data), 0)); }); }); @@ -181,7 +173,7 @@ describe("Protocol", function() { return clientWrite(client, buf) .then(() => cache.getFileStream('a', self.data.guid, self.data.hash)) .then(stream => readStream(stream, asset.length)) - .then(buffer => assert(asset.compare(buffer) === 0)); + .then(buffer => assert.strictEqual(asset.compare(buffer), 0)); }); }); @@ -191,7 +183,7 @@ describe("Protocol", function() { const self = this; self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); - before(() => { + before(async () => { const buf = Buffer.from( helpers.encodeInt32(consts.PROTOCOL_VERSION) + encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + @@ -201,25 +193,19 @@ describe("Protocol", function() { encodeCommand(cmd.transactionEnd) + encodeCommand(cmd.quit), 'ascii'); - return getClientPromise(server.port) - .then(c => { - client = c; - return clientWrite(c, buf); - }); + client = await getClientPromise(server.port); + await clientWrite(client, buf); }); - beforeEach(() => { - return getClientPromise(server.port) - .then(c => { - client = c; + beforeEach(async () => { + client = await getClientPromise(server.port); - // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended - // to other request data in the tests below. - return clientWrite(c, helpers.encodeInt32(consts.PROTOCOL_VERSION)); - }); + // The Unity client always sends the version once on-connect. i.e., the version should not be pre-pended + // to other request data in the tests below. + await clientWrite(client, helpers.encodeInt32(consts.PROTOCOL_VERSION)); }); - it("should close the socket on an invalid GET type", function (done) { + it("should close the socket on an invalid GET type", (done) => { expectLog(client, /Unrecognized command/i, done); clientWrite(client, encodeCommand('gx', self.data.guid, self.data.hash)).catch(err => done(err)); }); @@ -232,10 +218,10 @@ describe("Protocol", function() { tests.forEach(function (test) { - it(`should respond with not found (-) for missing ${test.type} files (client write packet size = ${test.packetSize})`, function (done) { + it(`should respond with not found (-) for missing ${test.type} files (client write packet size = ${test.packetSize})`, (done) => { client.pipe(new CacheServerResponseTransform()) .on('header', function (header) { - assert(header.cmd === '-' + test.cmd[1]); + assert.strictEqual(header.cmd, '-' + test.cmd[1]); done(); }); @@ -246,33 +232,32 @@ describe("Protocol", function() { .catch(err => done(err)); }); - it(`should retrieve stored ${test.type} data with the (${test.cmd}) command (write packet size = ${test.packetSize})`, function (done) { + it(`should retrieve stored ${test.type} data with the (${test.cmd}) command (write packet size = ${test.packetSize})`, (done) => { let dataBuf; let pos = 0; let resp = new CacheServerResponseTransform(); resp.on('header', function (header) { - assert(header.cmd === '+' + test.cmd[1]); - assert(header.guid.compare(self.data.guid) === 0, "GUID does not match"); - assert(header.hash.compare(self.data.hash) === 0, "HASH does not match"); - assert(header.size === test.blob.length, "Expected size " + test.blob.length); + assert.strictEqual(header.cmd, '+' + test.cmd[1]); + assert.strictEqual(header.guid.compare(self.data.guid), 0, "GUID does not match"); + assert.strictEqual(header.hash.compare(self.data.hash), 0, "HASH does not match"); + assert.strictEqual(header.size, test.blob.length, "Expected size " + test.blob.length); dataBuf = Buffer.allocUnsafe(header.size); }) .on('data', function (data) { let prev = pos; pos += data.copy(dataBuf, pos); - assert(data.compare(test.blob.slice(prev, pos)) === 0, `Blobs don't match at pos ${pos}`); + assert.strictEqual(data.compare(test.blob.slice(prev, pos)), 0, `Blobs don't match at pos ${pos}`); }) .on('dataEnd', function () { - assert(dataBuf.compare(test.blob) === 0); + assert.strictEqual(dataBuf.compare(test.blob), 0); done(); }); client.pipe(resp); const buf = Buffer.from(encodeCommand(test.cmd, self.data.guid, self.data.hash), 'ascii'); - clientWrite(client, buf, test.packetSize).catch(err => done(err)); }); }); diff --git a/test/server.js b/test/server.js index 2e50cd0..994de62 100644 --- a/test/server.js +++ b/test/server.js @@ -35,7 +35,7 @@ describe("Server common", function() { it("should echo the version if supported", function (done) { client.on('data', function (data) { const ver = helpers.readUInt32(data); - assert(ver === consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); + assert.strictEqual(ver, consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); done(); }); @@ -45,7 +45,7 @@ describe("Server common", function() { it("should respond with 0 if unsupported", function (done) { client.on('data', function (data) { const ver = helpers.readUInt32(data); - assert(ver === 0, "Expected 0, Received " + ver); + assert.strictEqual(ver, 0, "Expected 0, Received " + ver); done(); }); @@ -57,7 +57,7 @@ describe("Server common", function() { client.on('data', function(data) { const ver = helpers.readUInt32(data); - assert(ver === consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); + assert.strictEqual(ver, consts.PROTOCOL_VERSION, "Expected " + consts.PROTOCOL_VERSION + " Received " + ver); done(); }); diff --git a/test/transaction_mirror.js b/test/transaction_mirror.js index b165a2f..a2ae93e 100644 --- a/test/transaction_mirror.js +++ b/test/transaction_mirror.js @@ -50,26 +50,26 @@ describe("TransactionMirror", () => { fileData.forEach(async d => { let info = await this.targetCache.getFileInfo('i', d.guid, d.hash); - assert(info && info.size === d.info.length); + assert.strictEqual(info.size, d.info.length); info = await this.targetCache.getFileInfo('r', d.guid, d.hash); - assert(info && info.size === d.resource.length); + assert.strictEqual(info.size, d.resource.length); info = await this.targetCache.getFileInfo('a', d.guid, d.hash); - assert(info && info.size === d.bin.length); + assert.strictEqual(info.size, d.bin.length); }); }); describe("queueTransaction", () => { it("should not queue an empty transaction for mirroring", () => { this.mirror.queueTransaction({manifest: []}); - assert(this.mirror._queue.length === 0); + assert.strictEqual(this.mirror._queue.length, 0); }); }); describe("get address", () => { it("should return the address of the mirror host", () => { - assert(this.mirror.address === "localhost"); + assert.strictEqual(this.mirror.address, "localhost"); }); }); }); \ No newline at end of file From cf08c1c2eb6ff57975e565738e7e4d362a390297 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 27 Jan 2018 21:41:28 -0600 Subject: [PATCH 50/89] =?UTF-8?q?Slight=20fix=20to=20how=20a=20loki=20doc?= =?UTF-8?q?=20is=20modified=20and=20updated=20-=20you=20have=20to=20update?= =?UTF-8?q?=20imediately=20after=20modification=20or=20subsequent=20attemp?= =?UTF-8?q?ts=20to=20update=20after=20finding=20the=20same=20indexed=20doc?= =?UTF-8?q?=20won=E2=80=99t=20be=20allowed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- lib/cache/cache_ram.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index 5a9a547..3947395 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -107,7 +107,8 @@ class CacheRAM extends CacheBase { } if(freeBlock.fileId) { - delete freeBlock.fileId; + freeBlock.fileId = undefined; + this._index.update(freeBlock); helpers.log(consts.LOG_DBG, `Allocated existing block of size ${freeBlock.size} for ${key}, last accessed ${freeBlock.timestamp}`); } else { From ebbd5e1a93c845968c50e13702bd2566564240c5 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sat, 27 Jan 2018 22:08:07 -0600 Subject: [PATCH 51/89] Fixed a bug with LRU cache, where an incorrect field was being referenced --- lib/cache/cache_ram.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index 3947395..d6f6b40 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -109,7 +109,7 @@ class CacheRAM extends CacheBase { if(freeBlock.fileId) { freeBlock.fileId = undefined; this._index.update(freeBlock); - helpers.log(consts.LOG_DBG, `Allocated existing block of size ${freeBlock.size} for ${key}, last accessed ${freeBlock.timestamp}`); + helpers.log(consts.LOG_DBG, `Allocated existing block of size ${freeBlock.size} for ${key}, last accessed ${freeBlock.lastAccessTime}`); } else { helpers.log(consts.LOG_DBG, `Allocated free block of size ${freeBlock.size} for key ${key}`); @@ -119,7 +119,7 @@ class CacheRAM extends CacheBase { let block = _.omit(freeBlock, ['$loki', 'meta']); block['fileId'] = key; block['size'] = size; - block['timestamp'] = Date.now(); + block['lastAccessTime'] = Date.now(); this._index.insert(block); // Update this free block if leftover space is greater than the minimum From e4b93c201719e4bd0faa1788625eff9d980e9e67 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sun, 28 Jan 2018 08:39:06 -0600 Subject: [PATCH 52/89] Cleanup and minimize external package dependencies --- lib/cache/cache_base.js | 4 ++-- lib/cache/cache_fs.js | 3 +-- lib/cache/cache_ram.js | 10 +++++----- package-lock.json | 29 +++++++++++++---------------- package.json | 1 - test/cache_base.js | 5 +++-- 6 files changed, 24 insertions(+), 28 deletions(-) diff --git a/lib/cache/cache_base.js b/lib/cache/cache_base.js index 2605139..1d1ca69 100644 --- a/lib/cache/cache_base.js +++ b/lib/cache/cache_base.js @@ -6,7 +6,7 @@ const helpers = require('../helpers'); const config = require('config'); const path = require('path'); const fs = require('fs-extra'); -const _ = require('lodash'); +const defaultsDeep = require('lodash').defaultsDeep; class CacheBase extends EventEmitter { constructor() { @@ -25,7 +25,7 @@ class CacheBase extends EventEmitter { get _options() { let opts = config.get(this._optionsPath); - return _.defaultsDeep(this._optionOverrides, opts); + return defaultsDeep(this._optionOverrides, opts); } get _cachePath() { diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 40c681b..1fc5cd6 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -4,7 +4,6 @@ const helpers = require('../helpers'); const path = require('path'); const fs = require('fs-extra'); const uuid = require('uuid'); -const _ = require('lodash'); const consts = require('../constants'); class CacheRAM extends CacheBase { @@ -122,7 +121,7 @@ class PutTransactionFS extends PutTransaction { async _closeAllStreams() { let self = this; - let files = _.values(this._streams); + let files = Object.values(this._streams); if(files.length === 0) return; function processClosedStream(stream) { diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index d6f6b40..3e76408 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -6,7 +6,6 @@ const helpers = require('../helpers'); const consts = require('../constants'); const path = require('path'); const fs = require('fs-extra'); -const _ = require('lodash'); const loki = require('lokijs'); const uuid = require('uuid/v4'); @@ -107,8 +106,6 @@ class CacheRAM extends CacheBase { } if(freeBlock.fileId) { - freeBlock.fileId = undefined; - this._index.update(freeBlock); helpers.log(consts.LOG_DBG, `Allocated existing block of size ${freeBlock.size} for ${key}, last accessed ${freeBlock.lastAccessTime}`); } else { @@ -116,7 +113,9 @@ class CacheRAM extends CacheBase { } // Clone the free block, then set it's file id and size - let block = _.omit(freeBlock, ['$loki', 'meta']); + let block = Object.assign({}, freeBlock); + delete block['$loki']; + delete block['meta']; block['fileId'] = key; block['size'] = size; block['lastAccessTime'] = Date.now(); @@ -124,6 +123,7 @@ class CacheRAM extends CacheBase { // Update this free block if leftover space is greater than the minimum if(freeBlock.size - size >= this._options.minFreeBlockSize) { + freeBlock.fileId = undefined; freeBlock.pageOffset += size; freeBlock.size -= size; this._index.update(freeBlock); @@ -351,7 +351,7 @@ class PutTransactionRAM extends PutTransaction { } async finalize() { - this._finished = _.values(this._streams); + this._finished = Object.values(this._streams); let ok = this._finished.every(file => { return file.pos === file.buffer.length; }); diff --git a/package-lock.json b/package-lock.json index 261234d..2f9ef99 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,17 +5,11 @@ "requires": true, "dependencies": { "argparse": { - "version": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=", "requires": { - "sprintf-js": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" - } - }, - "async": { - "version": "https://registry.npmjs.org/async/-/async-2.5.0.tgz", - "integrity": "sha1-hDGQ/WtzV6C54clW7d3V7IRitU0=", - "requires": { - "lodash": "4.17.4" + "sprintf-js": "1.0.3" } }, "balanced-match": { @@ -613,8 +607,9 @@ "dev": true }, "esprima": { - "version": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", - "integrity": "sha1-RJnt3NERDgshi6zy+n9/WfVcqAQ=" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", + "integrity": "sha512-oftTcaMu/EGrEIu904mWteKIv8vMuOgGYo7EhVJJN00R/EED9DCua/xxHRdYnKtcECzVg7xOWhflvJMnqcFZjw==" }, "eyes": { "version": "0.1.8", @@ -720,11 +715,12 @@ "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, "js-yaml": { - "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", - "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "integrity": "sha512-O2v52ffjLa9VeM43J4XocZE//WT9N0IiwDa3KSHH7Tu8CtH+1qM8SIZvnsTh6v+4yFy5KUY3BHUVwjpfAWsjIA==", "requires": { - "argparse": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", - "esprima": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz" + "argparse": "1.0.9", + "esprima": "4.0.0" } }, "json3": { @@ -2623,7 +2619,8 @@ } }, "sprintf-js": { - "version": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" }, "stack-trace": { diff --git a/package.json b/package.json index 79cc86e..77f5820 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,6 @@ "tmp": "0.0.33" }, "dependencies": { - "async": "^2.5.0", "commander": "^2.11.0", "config": "^1.27.0", "filesize": "^3.5.11", diff --git a/test/cache_base.js b/test/cache_base.js index 4c698db..f758876 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -2,7 +2,6 @@ const tmp = require('tmp'); const fs = require('fs-extra'); const { CacheBase, PutTransaction } = require('../lib/cache/cache_base'); const assert = require('assert'); -const _ = require('lodash'); const path = require('path'); const randomBuffer = require('./test_utils').randomBuffer; const consts = require('../lib/constants'); @@ -20,7 +19,9 @@ describe("Cache: Base Class", () => { describe("static get properties", () => { it("should return an empty object", () => { - assert(_.isEmpty(CacheBase.properties)); + const p = CacheBase.properties; + assert.strictEqual(typeof(p), 'object'); + assert.strictEqual(Object.keys(p).length, 0); }); }); From ed50fab2557b9716bc871f6b6c9afd75e0f1bbba Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sun, 28 Jan 2018 08:52:40 -0600 Subject: [PATCH 53/89] Use more efficient lookup by index --- lib/cache/cache_ram.js | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index 3e76408..2dd0d3a 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -97,7 +97,11 @@ class CacheRAM extends CacheBase { _reserveBlock(key, size) { // Free any existing block for this key - this._index.findAndUpdate({'fileId' : key}, doc => doc['fileId'] = undefined); + let doc = this._index.by('fileId', key); + if(doc) { + doc.fileId = undefined; + this._index.update(doc); + } // Find the best free block to use let freeBlock; @@ -187,7 +191,10 @@ class CacheRAM extends CacheBase { helpers.log(consts.LOG_INFO, `Writing ${pagePath}`); await fs.writeFile(pagePath, this._pages[page.index]); - this._pageMeta.findAndUpdate({'index': page.index}, doc => doc.dirty = false); + + let doc = this._pageMeta.by('index', page.index); + doc.dirty = false; + this._pageMeta.update(doc); }); return Promise.all(promises); From 5872057020abaeee9d803aec7ed40c0b802cec55 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sun, 28 Jan 2018 10:44:09 -0600 Subject: [PATCH 54/89] remove extra param from jsdoc comment --- lib/server/server.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/server/server.js b/lib/server/server.js index 192aaa2..092a571 100644 --- a/lib/server/server.js +++ b/lib/server/server.js @@ -51,7 +51,6 @@ class CacheServer { * start the cache server * * @param errCallback error callback (optional) - * @param callback */ start(errCallback) { const self = this; From eff3c5ce3f6ce80928b711d08193f8a238f13bee Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Sun, 28 Jan 2018 13:56:25 -0600 Subject: [PATCH 55/89] Bump node version in .nvmrc --- .nvmrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.nvmrc b/.nvmrc index ad8f873..641c7df 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -v8.9.1 +v8.9.4 From aa5376034a707511078cd0a65dcdf12758ab5639 Mon Sep 17 00:00:00 2001 From: Morten Siebuhr Date: Mon, 29 Jan 2018 13:13:18 +0100 Subject: [PATCH 56/89] Remove extranous defaults in help output --- main.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/main.js b/main.js index 1fc1b58..45e0b67 100755 --- a/main.js +++ b/main.js @@ -29,12 +29,12 @@ const defaultCacheModule = config.get("Cache.defaultModule"); program.description("Unity Cache Server") .version(VERSION) - .option('-p, --port ', `Specify the server port, only apply to new cache server, default is ${consts.DEFAULT_PORT}`, myParseInt, consts.DEFAULT_PORT) - .option('-c --cache-module [path]', `Use cache module at specified path. Default is '${defaultCacheModule}'`, defaultCacheModule) - .option('-P, --cache-path [path]', `Specify the path of the cache directory`) - .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) - .option('-w, --workers ', `Number of worker threads to spawn. Default is ${consts.DEFAULT_WORKERS}`, zeroOrMore, consts.DEFAULT_WORKERS) - .option('-m --mirror [host:port]', `Mirror transactions to another cache server. Can be repeated for multiple mirrors`, collect, []) + .option('-p, --port ', 'Specify the server port, only apply to new cache server', myParseInt, consts.DEFAULT_PORT) + .option('-c --cache-module [path]', 'Use cache module at specified path', defaultCacheModule) + .option('-P, --cache-path [path]', 'Specify the path of the cache directory') + .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug)', myParseInt, consts.DEFAULT_LOG_LEVEL) + .option('-w, --workers ', 'Number of worker threads to spawn', zeroOrMore, consts.DEFAULT_WORKERS) + .option('-m --mirror [host:port]', 'Mirror transactions to another cache server. Can be repeated for multiple mirrors', collect, []) .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0); program.parse(process.argv); From a802fab57ded96724c8dc04037d56c7fce2d676f Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Mon, 29 Jan 2018 11:07:34 -0600 Subject: [PATCH 57/89] Fix a couple of minor bugs in the client - Make sure options is set before trying to read it - Make quit() listen for the client close event before resolving --- lib/client/client.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/client/client.js b/lib/client/client.js index 7c622ea..3433ef5 100644 --- a/lib/client/client.js +++ b/lib/client/client.js @@ -11,6 +11,7 @@ const cmd = { class CacheClient { constructor(address, port, options) { + options = options || {}; this._address = address; this._port = port; this._client = null; @@ -88,7 +89,10 @@ class CacheClient { * @returns {Promise} */ quit() { - return Promise.resolve(this._client.end(cmd.quit)); + return new Promise(resolve => { + this._client.once('close', () => resolve()); + this._client.end(cmd.quit); + }); } /** From 309b4f0c555c8722da3848f368a72e8cfd32013d Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Mon, 29 Jan 2018 11:08:17 -0600 Subject: [PATCH 58/89] change log level of a message --- lib/cache/cache_fs.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 1fc5cd6..45e7f48 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -81,7 +81,7 @@ class CacheRAM extends CacheBase { let moveFile = async (file) => { let filePath = self._calcFilepath(file.type, transaction.guid, transaction.hash); - helpers.log(consts.LOG_INFO, `Adding file to cache: ${file.size} ${filePath}`); + helpers.log(consts.LOG_TEST, `Adding file to cache: ${file.size} ${filePath}`); await fs.move(file.file, filePath, { overwrite: true }); }; From 61491d4c386608a632155e174a7fc0ced9114df4 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Mon, 29 Jan 2018 11:09:04 -0600 Subject: [PATCH 59/89] export the ClientStreamProcessor and CommandProcessor in the module index --- lib/index.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/index.js b/lib/index.js index cf1565f..d39f92a 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,4 +1,6 @@ exports.Server = require('./server/server'); +exports.ClientStreamProcessor = require('./server/client_stream_processor'); +exports.CommandProcessor = require('./server/command_processor'); exports.Client = require('./client/client'); exports.CacheFS = require('./cache/cache_fs'); exports.CacheRAM = require('./cache/cache_ram'); \ No newline at end of file From 84d36c5b227d54e3060d5210434229aa91d93910 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 31 Jan 2018 07:32:14 -0500 Subject: [PATCH 60/89] Fix class name --- lib/cache/cache_fs.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 45e7f48..4467a25 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -6,7 +6,7 @@ const fs = require('fs-extra'); const uuid = require('uuid'); const consts = require('../constants'); -class CacheRAM extends CacheBase { +class CacheFS extends CacheBase { constructor() { super(); } @@ -39,7 +39,7 @@ class CacheRAM extends CacheBase { * @private */ _calcFilepath(type, guid, hash) { - let fileName = CacheRAM._calcFilename(type, guid, hash); + let fileName = CacheFS._calcFilename(type, guid, hash); return path.join(this._cachePath, fileName.substr(0, 2), fileName); } @@ -195,4 +195,4 @@ class PutTransactionFS extends PutTransaction { } } -module.exports = CacheRAM; \ No newline at end of file +module.exports = CacheFS; \ No newline at end of file From cc1d38ac579ea2402430501a10bc93e7560d1ee3 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 31 Jan 2018 07:32:29 -0500 Subject: [PATCH 61/89] export CacheBase in library index --- lib/index.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/index.js b/lib/index.js index d39f92a..6e4f9a6 100644 --- a/lib/index.js +++ b/lib/index.js @@ -2,5 +2,6 @@ exports.Server = require('./server/server'); exports.ClientStreamProcessor = require('./server/client_stream_processor'); exports.CommandProcessor = require('./server/command_processor'); exports.Client = require('./client/client'); +exports.CacheBase = require('./cache/cache_base'); exports.CacheFS = require('./cache/cache_fs'); exports.CacheRAM = require('./cache/cache_ram'); \ No newline at end of file From bbd5d1eedc4a1a26588726ebc8cb50e8c6df7d59 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 31 Jan 2018 07:35:15 -0500 Subject: [PATCH 62/89] Fix export of CacheBase and PutTransactionBase --- lib/index.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/index.js b/lib/index.js index 6e4f9a6..7609996 100644 --- a/lib/index.js +++ b/lib/index.js @@ -2,6 +2,7 @@ exports.Server = require('./server/server'); exports.ClientStreamProcessor = require('./server/client_stream_processor'); exports.CommandProcessor = require('./server/command_processor'); exports.Client = require('./client/client'); -exports.CacheBase = require('./cache/cache_base'); +exports.CacheBase = require('./cache/cache_base').CacheBase; +exports.PutTransactionBase = require('./cache/cache_base').PutTransaction; exports.CacheFS = require('./cache/cache_fs'); exports.CacheRAM = require('./cache/cache_ram'); \ No newline at end of file From 754ab074fef394c66b4d862b2d19257810d9cbd6 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 31 Jan 2018 07:39:26 -0500 Subject: [PATCH 63/89] fix PutTransaction export --- lib/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/index.js b/lib/index.js index 7609996..72b0610 100644 --- a/lib/index.js +++ b/lib/index.js @@ -3,6 +3,6 @@ exports.ClientStreamProcessor = require('./server/client_stream_processor'); exports.CommandProcessor = require('./server/command_processor'); exports.Client = require('./client/client'); exports.CacheBase = require('./cache/cache_base').CacheBase; -exports.PutTransactionBase = require('./cache/cache_base').PutTransaction; +exports.PutTransaction = require('./cache/cache_base').PutTransaction; exports.CacheFS = require('./cache/cache_fs'); exports.CacheRAM = require('./cache/cache_ram'); \ No newline at end of file From 9e7792e2fdf7ab625015cb3a37f259329d12a3bc Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 6 Feb 2018 07:27:34 -0600 Subject: [PATCH 64/89] rename private vars for clarity --- lib/client/server_stream_processor.js | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/client/server_stream_processor.js b/lib/client/server_stream_processor.js index e045166..4eb8baf 100644 --- a/lib/client/server_stream_processor.js +++ b/lib/client/server_stream_processor.js @@ -7,10 +7,10 @@ const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.SIZE_SIZE + consts.ID_SIZE; class ServerStreamProcessor extends Transform { constructor() { super(); - this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); + this._headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); this.didReadVersion = false; this.version = 0; - this.errState = null; + this._errState = null; this._init(); } @@ -39,8 +39,8 @@ class ServerStreamProcessor extends Transform { data = this._emitHeader(data); } - if(this.errState !== null) { - helpers.log(consts.LOG_ERR, this.errState.msg); + if(this._errState !== null) { + helpers.log(consts.LOG_ERR, this._errState.msg); } } @@ -71,7 +71,7 @@ class ServerStreamProcessor extends Transform { // Don't copy past the remaining bytes in the data block const toCopy = Math.min(size, data.length - dataPos); - data.copy(self.headerBuf, self.readState.headerBufPos, dataPos, dataPos + toCopy); + data.copy(self._headerBuf, self.readState.headerBufPos, dataPos, dataPos + toCopy); dataPos += toCopy; self.readState.headerBufPos += toCopy; @@ -79,7 +79,7 @@ class ServerStreamProcessor extends Transform { } function isDone() { - return dataPos >= data.length || self.errState !== null; + return dataPos >= data.length || self._errState !== null; } // Read version @@ -97,7 +97,7 @@ class ServerStreamProcessor extends Transform { // Read command if (!this.readState.didReadCmd) { - const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + const cmd = this._headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); this.readState.headerData.cmd = cmd; switch (cmd[0]) { case '+': // file found @@ -110,7 +110,7 @@ class ServerStreamProcessor extends Transform { this.readState.headerSize += consts.ID_SIZE; break; default: - this.errState = new Error("Unrecognized command response, aborting!"); + this._errState = new Error("Unrecognized command response, aborting!"); } } @@ -120,15 +120,15 @@ class ServerStreamProcessor extends Transform { let pos = consts.CMD_SIZE; if (this.readState.doReadSize) { - this.readState.headerData.size = helpers.readUInt64(this.headerBuf.slice(pos, pos + consts.UINT64_SIZE)); + this.readState.headerData.size = helpers.readUInt64(this._headerBuf.slice(pos, pos + consts.UINT64_SIZE)); pos += consts.UINT64_SIZE; this.readState.dataPassThrough = true; } if(this.readState.doReadId) { - this.readState.headerData.guid = this.headerBuf.slice(pos, pos + consts.GUID_SIZE); + this.readState.headerData.guid = this._headerBuf.slice(pos, pos + consts.GUID_SIZE); pos += consts.GUID_SIZE; - this.readState.headerData.hash = this.headerBuf.slice(pos, pos + consts.HASH_SIZE); + this.readState.headerData.hash = this._headerBuf.slice(pos, pos + consts.HASH_SIZE); } this.emit('header', Object.assign({}, this.readState.headerData)); From 87fa0970efb186fa91181f7a620dd288e26fea8a Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 6 Feb 2018 07:31:17 -0600 Subject: [PATCH 65/89] =?UTF-8?q?-=20minimize=20member=20variable=20access?= =?UTF-8?q?=20(this)=20-=20pre-bind=20transform=20handler=20methods,=20and?= =?UTF-8?q?=20set=20current=20transform=20handler=20explicitly=20instead?= =?UTF-8?q?=20of=20the=20last-second=20decision=20making=20within=20=5Ftra?= =?UTF-8?q?nsform=20-=20don=E2=80=99t=20print=20=E2=80=9CError:=20Client?= =?UTF-8?q?=20quit=E2=80=9D=20when=20a=20client=20disconnects=20(not=20rea?= =?UTF-8?q?lly=20an=20error)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- lib/server/client_stream_processor.js | 132 ++++++++++++-------------- 1 file changed, 61 insertions(+), 71 deletions(-) diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 7497c7d..2a91c1a 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -5,16 +5,24 @@ const assert = require('assert'); const CMD_QUIT = 'q'.charCodeAt(0); const MAX_HEADER_SIZE = consts.CMD_SIZE + consts.ID_SIZE; -const kSource = Symbol("source"); +const kSource = Symbol(); class ClientStreamProcessor extends Transform { constructor() { super(); - this.headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); - this.didSendVersion = false; - this.version = ''; - this.errState = null; + this._headerBuf = Buffer.allocUnsafe(MAX_HEADER_SIZE); + this._errState = null; + this._readState = {}; + + this._transformHandlers = { + command: this._sendCommands.bind(this), + data: this._sendData.bind(this), + version: this._sendVersion.bind(this) + }; + + this._transformHandler = this._transformHandlers.version; + this._registerEventListeners(); this._init(); } @@ -32,92 +40,78 @@ class ClientStreamProcessor extends Transform { } _init() { - this.readState = { - doReadSize: false, - doReadId: false, - dataPassThrough: false, - didParseCmd: false, - dataSize: 0, - headerBufPos: 0, - headerSize : consts.CMD_SIZE, - dataBytesRead: 0 - }; - } - - static get errorCodes() { - return { - quitError: new Error("Client quit") - } + const readState = this._readState; + readState.version = ''; + readState.doReadSize = false; + readState.doReadId = false; + readState.didParseCmd = false; + readState.dataSize = 0; + readState.headerBufPos = 0; + readState.headerSize = consts.CMD_SIZE; + readState.dataBytesRead = 0; } // noinspection JSUnusedGlobalSymbols _transform(data, encoding, callback) { - while(data !== null && data.length > 0 && this.errState === null) { - if(!this.didSendVersion) { - data = this._sendVersion(data); - } - else if (this.readState.dataPassThrough) { - data = this._sendData(data); - } else { - data = this._sendCommands(data); - } + while(data = this._transformHandler(data, this._readState)) {} - if(this.errState !== null) { - helpers.log(consts.LOG_ERR, this.errState); - this.push('q'); // quit - } + if(this._errState !== null) { + helpers.log(consts.LOG_ERR, this._errState); + this.push('q'); } callback(); } - _sendVersion(data) { - let len = Math.min(consts.VERSION_SIZE - this.version.length, data.length); - this.version += data.slice(0, len).toString('ascii'); + _sendVersion(data, readState) { + const len = Math.min(consts.VERSION_SIZE - readState.version.length, data.length); + readState.version += data.slice(0, len).toString('ascii'); - if(this.version.length < consts.PROTOCOL_VERSION_MIN_SIZE) { + if(readState.version.length < consts.PROTOCOL_VERSION_MIN_SIZE) { return null; } - this.push(this.version); - this.didSendVersion = true; + this.push(readState.version); + this._transformHandler = this._transformHandlers.command; return len < data.length ? data.slice(len) : null; } - _sendData(data) { - const len = Math.min(this.readState.dataSize - this.readState.dataBytesRead, data.length); + _sendData(data, readState) { + const len = Math.min(readState.dataSize - readState.dataBytesRead, data.length); this.push(data.slice(0, len)); - this.readState.dataBytesRead += len; + readState.dataBytesRead += len; - if(this.readState.dataBytesRead === this.readState.dataSize) { + if(readState.dataBytesRead === readState.dataSize) { this._init(); + this._transformHandler = this._transformHandlers.command; } return len < data.length ? data.slice(len) : null; } - _sendCommands(data) { + _sendCommands(data, readState) { const self = this; + const headerBuf = this._headerBuf; let dataPos = 0; function fillBufferWithData() { // Only copy as much as we need for the remaining header size - let size = self.readState.headerSize - self.readState.headerBufPos; + let size = readState.headerSize - readState.headerBufPos; // Don't copy past the remaining bytes in the data block const toCopy = Math.min(size, data.length - dataPos); - data.copy(self.headerBuf, self.readState.headerBufPos, dataPos, dataPos + toCopy); + data.copy(headerBuf, readState.headerBufPos, dataPos, dataPos + toCopy); dataPos += toCopy; - self.readState.headerBufPos += toCopy; - assert(self.readState.headerBufPos <= self.headerBuf.length); + readState.headerBufPos += toCopy; + assert(readState.headerBufPos <= headerBuf.length); - return self.readState.headerBufPos === self.readState.headerSize; + return readState.headerBufPos === readState.headerSize; } function isDone() { - return dataPos >= data.length || self.errState !== null; + return dataPos >= data.length || self._errState !== null; } while(!isDone()) { @@ -127,35 +121,34 @@ class ClientStreamProcessor extends Transform { // Quit? if (data[data.length - 1] === CMD_QUIT) { - this.errState = ClientStreamProcessor.errorCodes.quitError; + this.push('q'); + break; } - - break; } - if(!this.readState.didParseCmd) { - this.readState.didParseCmd = true; + if(!readState.didParseCmd) { + readState.didParseCmd = true; - const cmd = this.headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); + const cmd = headerBuf.slice(0, consts.CMD_SIZE).toString('ascii'); switch (cmd[0]) { case 'g': // get - this.readState.doReadId = true; - this.readState.headerSize += consts.ID_SIZE; + readState.doReadId = true; + readState.headerSize += consts.ID_SIZE; break; case 'p': // put - this.readState.doReadSize = true; - this.readState.headerSize += consts.SIZE_SIZE; + readState.doReadSize = true; + readState.headerSize += consts.SIZE_SIZE; break; case 't': // transaction if (cmd[1] === 's') { - this.readState.doReadId = true; - this.readState.headerSize += consts.ID_SIZE; + readState.doReadId = true; + readState.headerSize += consts.ID_SIZE; } break; default: - this.errState = new Error("Unrecognized command, aborting!"); + this._errState = new Error("Unrecognized command, aborting!"); break; } @@ -164,15 +157,12 @@ class ClientStreamProcessor extends Transform { } } - if (this.readState.doReadSize) { - this.readState.dataSize = helpers.readUInt64(this.headerBuf.slice(consts.CMD_SIZE, consts.CMD_SIZE + consts.SIZE_SIZE)); - this.readState.dataPassThrough = true; - } - // noinspection JSCheckFunctionSignatures - this.push(Buffer.from(this.headerBuf.slice(0, this.readState.headerBufPos))); + this.push(Buffer.from(headerBuf.slice(0, readState.headerBufPos))); - if(this.readState.dataPassThrough) { + if (readState.doReadSize) { + readState.dataSize = helpers.readUInt64(headerBuf.slice(consts.CMD_SIZE, consts.CMD_SIZE + consts.SIZE_SIZE)); + this._transformHandler = this._transformHandlers.data; break; } From d715097935dd08fc87cec029b59060f92d05050b Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 6 Feb 2018 07:32:20 -0600 Subject: [PATCH 66/89] conform write handler pattern to match client_stream_processor (pre-binding the handlers and setting them explicitly when necessary) --- lib/server/command_processor.js | 45 ++++++++++----------------------- 1 file changed, 14 insertions(+), 31 deletions(-) diff --git a/lib/server/command_processor.js b/lib/server/command_processor.js index cb67e44..ff689c4 100644 --- a/lib/server/command_processor.js +++ b/lib/server/command_processor.js @@ -7,10 +7,6 @@ const { promisify } = require('util'); const kSource = Symbol("source"); const kCache = Symbol("cache"); const kSendFileQueue = Symbol("sendFileQueue"); -const kReadStateVersion = Symbol("readStateVersion"); -const kReadStateCommand = Symbol("readStateCommand"); -const kReadStatePutStream = Symbol("readStatePutStream"); -const kReadStateNone = Symbol("readStateNone"); class CommandProcessor extends Duplex { @@ -22,7 +18,15 @@ class CommandProcessor extends Duplex { super(); this[kCache] = cache; this[kSendFileQueue] = []; - this._setWriteHandler(kReadStateVersion); + + this._writeHandlers = { + putStream: this._handleWrite.bind(this), + command: this._handleCommand.bind(this), + version: this._handleVersion.bind(this), + none: () => Promise.resolve() + }; + + this._writeHandler = this._writeHandlers.version; /** * @@ -44,27 +48,6 @@ class CommandProcessor extends Duplex { this._registerEventListeners(); } - /** - * - * @param {symbol} readState - * @private - */ - _setWriteHandler(readState) { - switch(readState) { - case kReadStateVersion: - this._writeHandler = this._handleVersion; - break; - case kReadStateCommand: - this._writeHandler = this._handleCommand; - break; - case kReadStatePutStream: - this._writeHandler = this._handleWrite; - break; - default: - this._writeHandler = () => Promise.resolve(); - } - } - _registerEventListeners() { const self = this; this.once('finish', this._printReadStats); @@ -82,7 +65,7 @@ class CommandProcessor extends Duplex { * @private */ _write(chunk, encoding, callback) { - this._writeHandler.call(this, chunk) + this._writeHandler(chunk) .then(() => callback(), err => this._quit(err)); } @@ -173,7 +156,7 @@ class CommandProcessor extends Duplex { async _quit(err) { this[kSource].unpipe(this); this[kSource].emit('quit'); - this._setWriteHandler(kReadStateNone); + this._writeHandler = this._writeHandlers.none; if(err) { helpers.log(consts.LOG_ERR, err); } @@ -186,7 +169,7 @@ class CommandProcessor extends Duplex { */ async _handleVersion(data) { let version = helpers.readUInt32(data); - this._setWriteHandler(kReadStateCommand); + this._writeHandler = this._writeHandlers.command; let err = null; if(version !== consts.PROTOCOL_VERSION) { @@ -208,7 +191,7 @@ class CommandProcessor extends Duplex { this._putSent += data.length; if(this._putSent === this._putSize) { this._putStream.end(); - this._setWriteHandler(kReadStateCommand); + this._writeHandler = this._writeHandlers.command; this._putSent = 0; this._putSize = 0; } @@ -349,7 +332,7 @@ class CommandProcessor extends Duplex { this._putStream = await this._trx.getWriteStream(type, size); this._putStream.promiseWrite = promisify(this._putStream.write).bind(this._putStream); this._putSize = size; - this._setWriteHandler(kReadStatePutStream); + this._writeHandler = this._writeHandlers.putStream; } } From 7bf6e1aa10a42930025ced02eea06f135edb6cce Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 6 Feb 2018 09:01:48 -0600 Subject: [PATCH 67/89] change package version for beta release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 77f5820..d6361d5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0", + "version": "6.0.0-beta.0", "description": "Unity Cache Server", "main": "lib/index.js", "engines": { From 69305628ce32c6930ef9e11cceb449f484c3303a Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 7 Feb 2018 10:54:16 -0600 Subject: [PATCH 68/89] =?UTF-8?q?-=20Add=20=E2=80=9C=E2=80=94no-timstamp-c?= =?UTF-8?q?heck=E2=80=9D=20option=20to=20skip=20the=20timestamp=20check=20?= =?UTF-8?q?that=20prevents=20importing=20files=20that=20have=20changed=20s?= =?UTF-8?q?ince=20the=20last=20export=20-=20Add=20=E2=80=9C=E2=80=94skip?= =?UTF-8?q?=20=E2=80=9D=20option=20to=20skip=20directly=20to=20the=20Nt?= =?UTF-8?q?h=20transaction=20in=20the=20input=20file=20-=20Set=20the=20log?= =?UTF-8?q?=20level=20based=20on=20the=20log-level=20option?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- import.js | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/import.js b/import.js index bc630da..fc9b118 100644 --- a/import.js +++ b/import.js @@ -10,12 +10,18 @@ function myParseInt(val, def) { return (!val && val !== 0) ? def : val; } +const DEFAULT_SERVER_ADDRESS = 'localhost:8126'; + program.description("Unity Cache Server - Project Import") .version(require('./package').version) .description('Imports Unity project Library data into a local or remote Cache Server.') .arguments(' [ServerAddress]') - .option('-l, --log-level ', `Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is ${consts.DEFAULT_LOG_LEVEL}`, myParseInt, consts.DEFAULT_LOG_LEVEL) + .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug).', myParseInt, consts.DEFAULT_LOG_LEVEL) + .option('--no-timestamp-check', 'Do not use timestamp check to protect against importing files from a project that has changed since last exported.', true) + .option('--skip ', 'Skip to transaction # in the import file at startup.', myParseInt, 0) .action((projectRoot, serverAddress) => { + helpers.setLogLevel(program.logLevel); + serverAddress = serverAddress || DEFAULT_SERVER_ADDRESS; importTransactionFile(projectRoot, serverAddress, consts.DEFAULT_PORT) .catch(err => { console.log(err); @@ -37,12 +43,13 @@ async function importTransactionFile(filePath, addressString, defaultPort) { await client.connect(); const trxCount = data.transactions.length; + const trxStart = Math.min(trxCount - 1, Math.max(0, program.skip - 1)); const startTime = Date.now(); let sentBytes = 0; let sentAssetCount = 0; let sentFileCount = 0; - for(let i = 0; i < trxCount; i++) { + for(let i = trxStart; i < trxCount; i++) { const trx = data.transactions[i]; const guid = helpers.GUIDStringToBuffer(trx.guid); const hash = Buffer.from(trx.hash, 'hex'); @@ -62,7 +69,7 @@ async function importTransactionFile(filePath, addressString, defaultPort) { continue; } - if(stats.mtimeMs !== file.ts * 1000) { + if(program.timestampCheck && stats.mtimeMs !== file.ts * 1000) { helpers.log(consts.LOG_WARN, `${file.path} has been modified, skipping`); continue; } From 147361b5b12492b9935244a02501893bde86eac4 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 8 Feb 2018 15:25:27 -0600 Subject: [PATCH 69/89] - Fixed a bug that would cause the server to hang, when a specific byte boundary was encountered while receiving data from the client (during PUT transactions) - Added upload tests that upload small files 1 byte at a time --- .gitignore | 4 ++-- lib/server/client_stream_processor.js | 15 +++++++++------ package-lock.json | 2 +- package.json | 2 +- test/protocol.js | 27 +++++++++++++++------------ 5 files changed, 28 insertions(+), 22 deletions(-) diff --git a/.gitignore b/.gitignore index 5042bca..50147ee 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ cache/ node_modules/ .coveralls.yml !lib/cache -.cache_ram/ -.cache_fs/ +.cache_ram*/ +.cache_fs*/ .nyc_output/ coverage/ \ No newline at end of file diff --git a/lib/server/client_stream_processor.js b/lib/server/client_stream_processor.js index 2a91c1a..fce1667 100644 --- a/lib/server/client_stream_processor.js +++ b/lib/server/client_stream_processor.js @@ -53,11 +53,13 @@ class ClientStreamProcessor extends Transform { // noinspection JSUnusedGlobalSymbols _transform(data, encoding, callback) { - while(data = this._transformHandler(data, this._readState)) {} - - if(this._errState !== null) { - helpers.log(consts.LOG_ERR, this._errState); - this.push('q'); + while(data !== null) { + data = this._transformHandler(data, this._readState); + if(this._errState !== null) { + helpers.log(consts.LOG_ERR, this._errState); + this.push('q'); + break; + } } callback(); @@ -122,8 +124,9 @@ class ClientStreamProcessor extends Transform { // Quit? if (data[data.length - 1] === CMD_QUIT) { this.push('q'); - break; } + + break; } if(!readState.didParseCmd) { diff --git a/package-lock.json b/package-lock.json index 2f9ef99..80dd14a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0", + "version": "6.0.0-beta.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index d6361d5..6883885 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.0", + "version": "6.0.0-beta.1", "description": "Unity Cache Server", "main": "lib/index.js", "engines": { diff --git a/test/protocol.js b/test/protocol.js index 1570d92..3cba64f 100644 --- a/test/protocol.js +++ b/test/protocol.js @@ -14,6 +14,8 @@ const clientWrite = require('./test_utils').clientWrite; const readStream = require('./test_utils').readStream; const getClientPromise = require('./test_utils').getClientPromise; +const SMALL_MIN_FILE_SIZE = 64; +const SMALL_MAX_FILE_SIZE = 128; const MIN_FILE_SIZE = 1024; const MAX_FILE_SIZE = 1024 * 1024; const SMALL_PACKET_SIZE = 64; @@ -120,10 +122,8 @@ describe("Protocol", () => { this.timeout(5000); const self = this; - - before(() => { - self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); - }); + self.data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); + self.smallData = generateCommandData(SMALL_MIN_FILE_SIZE, SMALL_MAX_FILE_SIZE); beforeEach(async () => { client = await getClientPromise(server.port); @@ -143,22 +143,25 @@ describe("Protocol", () => { }); const tests = [ - {ext: 'bin', cmd: cmd.putAsset, packetSize: SMALL_PACKET_SIZE}, - {ext: 'info', cmd: cmd.putInfo, packetSize: MED_PACKET_SIZE}, - {ext: 'resource', cmd: cmd.putResource, packetSize: LARGE_PACKET_SIZE} + {ext: 'bin', data: self.smallData, cmd: cmd.putAsset, packetSize: 1}, + {ext: 'info', data: self.smallData, cmd: cmd.putInfo, packetSize: 1}, + {ext: 'resource', data: self.smallData, cmd: cmd.putResource, packetSize: 1}, + {ext: 'bin', data: self.data, cmd: cmd.putAsset, packetSize: SMALL_PACKET_SIZE}, + {ext: 'info', data: self.data, cmd: cmd.putInfo, packetSize: MED_PACKET_SIZE}, + {ext: 'resource', data: self.data, cmd: cmd.putResource, packetSize: LARGE_PACKET_SIZE} ]; tests.forEach(function (test) { it(`should store ${test.ext} data with a (${test.cmd}) command (client write packet size = ${test.packetSize})`, () => { const buf = Buffer.from( - encodeCommand(cmd.transactionStart, self.data.guid, self.data.hash) + - encodeCommand(test.cmd, null, null, self.data[test.ext]) + + encodeCommand(cmd.transactionStart, test.data.guid, test.data.hash) + + encodeCommand(test.cmd, null, null, test.data[test.ext]) + encodeCommand(cmd.transactionEnd), 'ascii'); return clientWrite(client, buf, test.packetSize) - .then(() => cache.getFileStream(test.cmd[1], self.data.guid, self.data.hash)) - .then(stream => readStream(stream, self.data[test.ext].length)) - .then(data => assert.strictEqual(self.data[test.ext].compare(data), 0)); + .then(() => cache.getFileStream(test.cmd[1], test.data.guid, test.data.hash)) + .then(stream => readStream(stream, test.data[test.ext].length)) + .then(data => assert.strictEqual(test.data[test.ext].compare(data), 0)); }); }); From 97a9d491cf1701023520b8dc861628b5f98a52a4 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Thu, 8 Feb 2018 15:26:40 -0600 Subject: [PATCH 70/89] Better error handling plus debug logging for the import tool --- import.js | 41 ++++++++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/import.js b/import.js index fc9b118..ec05c98 100644 --- a/import.js +++ b/import.js @@ -56,10 +56,18 @@ async function importTransactionFile(filePath, addressString, defaultPort) { helpers.log(consts.LOG_INFO, `(${i + 1}/${trxCount}) ${trx.assetPath}`); - await client.beginTransaction(guid, hash); + try { + helpers.log(consts.LOG_DBG, `Begin transaction for ${helpers.GUIDBufferToString(guid)}-${hash.toString('hex')}`); + await client.beginTransaction(guid, hash); + } + catch (err) { + helpers.log(consts.LOG_ERR, err); + process.exit(1); + } + + let stats; - for(let file of trx.files) { - let stats; + for (let file of trx.files) { try { stats = await fs.stat(file.path); @@ -69,19 +77,34 @@ async function importTransactionFile(filePath, addressString, defaultPort) { continue; } - if(program.timestampCheck && stats.mtimeMs !== file.ts * 1000) { + if (program.timestampCheck && stats.mtimeMs !== file.ts * 1000) { helpers.log(consts.LOG_WARN, `${file.path} has been modified, skipping`); continue; } + try { + const stream = fs.createReadStream(file.path); + helpers.log(consts.LOG_DBG, `Putting file of type: ${file.type} size: ${stats.size}`); + await client.putFile(file.type, guid, hash, stream, stats.size); + } + catch(err) { + helpers.log(consts.LOG_ERR, err); + process.exit(1); + } + sentBytes += stats.size; - const stream = fs.createReadStream(file.path); - await client.putFile(file.type, guid, hash, stream, stats.size); - sentFileCount ++; + sentFileCount++; } - await client.endTransaction(); - sentAssetCount++; + try { + helpers.log(consts.LOG_DBG, `End transaction for ${helpers.GUIDBufferToString(guid)}-${hash.toString('hex')}`); + await client.endTransaction(); + sentAssetCount++; + } + catch (err) { + helpers.log(consts.LOG_ERR, err); + process.exit(1); + } } let totalTime = (Date.now() - startTime) / 1000; From 8fa3841c22232cbfc84faa5c7599ab57e800e66f Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 9 Feb 2018 07:25:09 -0600 Subject: [PATCH 71/89] Implement a fancy progress bar for the import script --- import.js | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/import.js b/import.js index ec05c98..afa0d6c 100644 --- a/import.js +++ b/import.js @@ -4,6 +4,7 @@ const consts = require('./lib/constants'); const fs = require('fs-extra'); const filesize = require('filesize'); const Client = require('./lib/client/client'); +const ProgressBar = require('progress'); function myParseInt(val, def) { val = parseInt(val); @@ -49,12 +50,26 @@ async function importTransactionFile(filePath, addressString, defaultPort) { let sentAssetCount = 0; let sentFileCount = 0; + const pbar = new ProgressBar('Uploading :current/:total :bar :percent :etas ETA', { + complete: String.fromCharCode(9619), + incomplete: String.fromCharCode(9617), + width: 20, + total: (trxCount - trxStart) + }); + + const warns = []; + const logLevel = helpers.getLogLevel(); + for(let i = trxStart; i < trxCount; i++) { const trx = data.transactions[i]; const guid = helpers.GUIDStringToBuffer(trx.guid); const hash = Buffer.from(trx.hash, 'hex'); - helpers.log(consts.LOG_INFO, `(${i + 1}/${trxCount}) ${trx.assetPath}`); + if(logLevel === consts.LOG_DBG) { + helpers.log(consts.LOG_INFO, `(${i + 1}/${trxCount}) ${trx.assetPath}`); + } else { + pbar.tick(); + } try { helpers.log(consts.LOG_DBG, `Begin transaction for ${helpers.GUIDBufferToString(guid)}-${hash.toString('hex')}`); @@ -73,12 +88,12 @@ async function importTransactionFile(filePath, addressString, defaultPort) { stats = await fs.stat(file.path); } catch(err) { - helpers.log(consts.LOG_ERR, err); + warns.push(err.toString()); continue; } if (program.timestampCheck && stats.mtimeMs !== file.ts * 1000) { - helpers.log(consts.LOG_WARN, `${file.path} has been modified, skipping`); + warns.push(`${file.path} has been modified, skipping`); continue; } @@ -107,6 +122,11 @@ async function importTransactionFile(filePath, addressString, defaultPort) { } } + if(warns.length > 0) { + helpers.log(consts.LOG_WARN, `Generated ${warns.length} warnings:`); + helpers.log(consts.LOG_WARN, warns.join('\n')); + } + let totalTime = (Date.now() - startTime) / 1000; let throughput = (sentBytes / totalTime).toFixed(2); helpers.log(consts.LOG_INFO, `Sent ${sentFileCount} files for ${sentAssetCount} assets (${filesize(sentBytes)}) in ${totalTime} seconds (${filesize(throughput)}/sec)`); From 62420cb68313eb4c42c5e12756dfb7803e5fb55b Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 9 Feb 2018 13:35:16 -0600 Subject: [PATCH 72/89] - Minor fixes to import script help output --- import.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/import.js b/import.js index afa0d6c..4d436fb 100644 --- a/import.js +++ b/import.js @@ -13,9 +13,8 @@ function myParseInt(val, def) { const DEFAULT_SERVER_ADDRESS = 'localhost:8126'; -program.description("Unity Cache Server - Project Import") +program.description("Unity Cache Server - Project Import\n\n Imports Unity project Library data into a local or remote Cache Server.") .version(require('./package').version) - .description('Imports Unity project Library data into a local or remote Cache Server.') .arguments(' [ServerAddress]') .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug).', myParseInt, consts.DEFAULT_LOG_LEVEL) .option('--no-timestamp-check', 'Do not use timestamp check to protect against importing files from a project that has changed since last exported.', true) From 472d22ad87e2135dde65f505cd31728521211ac2 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 9 Feb 2018 13:36:17 -0600 Subject: [PATCH 73/89] - Minor changes for consistently returning promises from async functions --- lib/cache/cache_fs.js | 7 +++---- lib/cache/cache_ram.js | 7 ++++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 4467a25..cdb1c43 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -86,7 +86,7 @@ class CacheFS extends CacheBase { }; await transaction.finalize(); - await Promise.all(transaction.files.map(moveFile)); + return Promise.all(transaction.files.map(moveFile)); } registerClusterWorker(worker) {} @@ -167,7 +167,7 @@ class PutTransactionFS extends PutTransaction { async finalize() { await this._closeAllStreams(); - await super.finalize(); + return super.finalize(); } async getWriteStream(type, size) { @@ -190,8 +190,7 @@ class PutTransactionFS extends PutTransaction { stream: stream }; - await new Promise(resolve => stream.on('open', () => resolve())); - return stream; + return new Promise(resolve => stream.on('open', () => resolve(stream))); } } diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index 2dd0d3a..002efe7 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -270,8 +270,8 @@ class CacheRAM extends CacheBase { * @private */ async _saveDb() { - let save = promisify(this._db.saveDatabase).bind(this._db); - await save(); + const save = promisify(this._db.saveDatabase).bind(this._db); + return save(); } async init(options) { @@ -288,7 +288,8 @@ class CacheRAM extends CacheBase { async shutdown() { await this._saveDb(); - await promisify(this._db.close).bind(this._db)(); + const close = promisify(this._db.close).bind(this._db); + return close(); } async getFileInfo(type, guid, hash) { From 1c25ada39136b9557a3ffb720368a4ecaf45955a Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 9 Feb 2018 13:37:27 -0600 Subject: [PATCH 74/89] - Added script to cleanup expired cache files (cleanup.js) --- cleanup.js | 106 +++++++++++++++++++++++++++++++++ lib/cache/cache_base.js | 8 ++- lib/cache/cache_fs.js | 3 +- lib/cache/cache_ram.js | 3 +- package-lock.json | 128 +++++++++++++++++++++++++++++++++++++++- package.json | 4 ++ 6 files changed, 245 insertions(+), 7 deletions(-) create mode 100644 cleanup.js diff --git a/cleanup.js b/cleanup.js new file mode 100644 index 0000000..d8da2c8 --- /dev/null +++ b/cleanup.js @@ -0,0 +1,106 @@ +#!/usr/bin/env node +const helpers = require('./lib/helpers'); +const consts = require('./lib/constants'); +const program = require('commander'); +const path = require('path'); +const moment = require('moment'); +const klaw = require('klaw'); +const filesize =require('filesize'); +const fs = require('fs-extra'); +const ora = require('ora'); + +const { Transform } = require('stream'); + +const config = require('config'); +const VERSION = require('./package.json').version; + +function myParseInt(val, def) { + val = parseInt(val); + return (!val && val !== 0) ? def : val; +} + +const defaultCacheModule = config.get("Cache.defaultModule"); + +program.description("Unity Cache Server - Cache Cleanup\n\n Remove files from cache that have not been accessed within the given .\n\n Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. '15.23:59:59') and ISO 8601 time spans (e.g. 'P15DT23H59M59S') are supported.") + .version(VERSION) + .arguments('') + .option('-c --cache-module [path]', 'Use cache module at specified path', defaultCacheModule) + .option('-P, --cache-path [path]', 'Specify the path of the cache directory') + .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug)', myParseInt, consts.DEFAULT_LOG_LEVEL) + .option('-d, --delete', 'Delete cached files that that have not been accessed within the timeSpan provided.') + .action(timeSpan => doCleanup(timeSpan)); + +program.parse(process.argv); + +if (!process.argv.slice(2).length) { + program.outputHelp(); +} + +function doCleanup(timeSpan) { + helpers.setLogLevel(program.logLevel); + + const CacheModule = require(path.resolve(program.cacheModule)); + + if(!CacheModule.properties.cleanup) { + helpers.log(consts.LOG_ERR, "Configured cache module does not support cleanup script."); + process.exit(1); + } + + const cache = new CacheModule(); + + let cacheOpts = {}; + if(program.cachePath !== null) { + cacheOpts.cachePath = program.cachePath; + } + + cache._options = cacheOpts; + helpers.log(consts.LOG_INFO, `Cache path is ${cache._cachePath}`); + + const duration = moment.duration(timeSpan); + if(!duration.isValid()) { + helpers.log(consts.LOG_ERR, `Invalid timeSpan specified.`); + } + + + const msg = `Gathering cache files that have not been accessed within ${duration.humanize()}`; + const spinner = ora({color:'white'}).start(`${msg} (found 0)`); + + const minFileAccessTime = moment().subtract(duration).toDate(); + let items = []; + let totalSize = 0; + let freedSize = 0; + let filterTransform = new Transform({ + objectMode: true, + transform(item, enc, next) { + if(item.stats.isDirectory()) return next(); + + totalSize += item.stats.size; + if(item.stats.atime < minFileAccessTime) { + spinner.text = `${msg} (found ${items.length}, ${filesize(freedSize)})`; + freedSize += item.stats.size; + this.push(item); + } + + next(); + } + }); + + klaw(cache._cachePath).pipe(filterTransform) + .on('data', item => items.push(item.path)) + .on('end', async () => { + spinner.stop(); + + if(program.delete) { + for(let item of items) { + helpers.log(consts.LOG_INFO, `Deleting ${item}`); + await fs.unlink(item); + } + } + + let pct = totalSize > 0 ? (freedSize/totalSize).toPrecision(2) * 100 : 0; + helpers.log(consts.LOG_INFO, `Found ${items.length} expired files: ${filesize(freedSize)} of ${filesize(totalSize)} total cache size (${pct}%).`); + if(!program.delete) { + helpers.log(consts.LOG_INFO, "Nothing deleted; run with --delete to remove expired files from the cache."); + } + }) +} \ No newline at end of file diff --git a/lib/cache/cache_base.js b/lib/cache/cache_base.js index 1d1ca69..d76c2bb 100644 --- a/lib/cache/cache_base.js +++ b/lib/cache/cache_base.js @@ -28,6 +28,11 @@ class CacheBase extends EventEmitter { return defaultsDeep(this._optionOverrides, opts); } + set _options(val) { + if(typeof(val) === 'object') + this._optionOverrides = val; + } + get _cachePath() { if(!this._options.hasOwnProperty('cachePath')) return null; @@ -42,8 +47,7 @@ class CacheBase extends EventEmitter { * @returns {Promise} */ init(options) { - if(typeof(options) === 'object') - this._optionOverrides = options; + this._options = options; if(cluster.isMaster) { const p = this._cachePath; diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index cdb1c43..d500b98 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -13,7 +13,8 @@ class CacheFS extends CacheBase { static get properties() { return { - clustering: true + clustering: true, + cleanup: true } } diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index 002efe7..1261963 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -23,7 +23,8 @@ class CacheRAM extends CacheBase { static get properties() { return { - clustering: false + clustering: false, + cleanup: false } } diff --git a/package-lock.json b/package-lock.json index 80dd14a..d609bc8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,9 +1,17 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.0", + "version": "6.0.0-beta.1", "lockfileVersion": 1, "requires": true, "dependencies": { + "ansi-styles": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz", + "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==", + "requires": { + "color-convert": "1.9.1" + } + }, "argparse": { "version": "1.0.9", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", @@ -29,6 +37,57 @@ "integrity": "sha1-81HTKWnTL6XXpVZxVCY9korjvR8=", "dev": true }, + "chalk": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz", + "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==", + "requires": { + "ansi-styles": "3.2.0", + "escape-string-regexp": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "supports-color": "4.5.0" + }, + "dependencies": { + "has-flag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=" + }, + "supports-color": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", + "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=", + "requires": { + "has-flag": "2.0.0" + } + } + } + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "requires": { + "restore-cursor": "2.0.0" + } + }, + "cli-spinners": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-1.1.0.tgz", + "integrity": "sha1-8YR7FohE2RemceudFH499JfJDQY=" + }, + "color-convert": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz", + "integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, "colors": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/colors/-/colors-1.1.2.tgz", @@ -603,8 +662,7 @@ }, "escape-string-regexp": { "version": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "esprima": { "version": "4.0.0", @@ -746,6 +804,14 @@ "integrity": "sha512-mJVp13Ix6gFo3SBAy9U/kL+oeZqzlYYYLQBwXVBlVzIsZwBqGREnOro24oC/8s8aox+rJhtZ2DiQof++IrkA+g==", "dev": true }, + "klaw": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-2.1.1.tgz", + "integrity": "sha1-QrdolHARacyRD9DRnOZ3tfs3ivE=", + "requires": { + "graceful-fs": "4.1.11" + } + }, "lodash": { "version": "4.17.4", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", @@ -816,6 +882,14 @@ "lodash.isarray": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" } }, + "log-symbols": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz", + "integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==", + "requires": { + "chalk": "2.3.0" + } + }, "lokijs": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/lokijs/-/lokijs-1.5.1.tgz", @@ -827,6 +901,11 @@ "integrity": "sha512-mQuW55GhduF3ppo+ZRUTz1PRjEh1hS5BbqU7d8D0ez2OKxHDod7StPPeAVKisZR5aLkHZjdGWSL42LSONUJsZw==", "dev": true }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" + }, "minimatch": { "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", @@ -881,6 +960,11 @@ "integrity": "sha1-Rpve9PivyaEWBW8HnfYYLQr7A4Q=", "dev": true }, + "moment": { + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.20.1.tgz", + "integrity": "sha512-Yh9y73JRljxW5QxN08Fner68eFLxM5ynNOAw2LbIB1YAGeQzZT8QFSUvkAz609Zf+IHhhaUxqZK8dG3W/+HEvg==" + }, "ms": { "version": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", @@ -2518,6 +2602,25 @@ "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" } }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "requires": { + "mimic-fn": "1.2.0" + } + }, + "ora": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/ora/-/ora-1.4.0.tgz", + "integrity": "sha512-iMK1DOQxzzh2MBlVsU42G80mnrvUhqsMh74phHtDlrcTZPK0pH6o7l7DRshK+0YsxDyEuaOkziVdvM3T0QTzpw==", + "requires": { + "chalk": "2.3.0", + "cli-cursor": "2.1.0", + "cli-spinners": "1.1.0", + "log-symbols": "2.2.0" + } + }, "os-homedir": { "version": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" @@ -2546,6 +2649,11 @@ "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.4.1.tgz", "integrity": "sha1-tUGO8EOd5UJfxJlQQtztFPsqhP8=" }, + "progress": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.0.tgz", + "integrity": "sha1-ihvjZr+Pwj2yvSPxDG/pILQ4nR8=" + }, "prompt": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/prompt/-/prompt-1.0.0.tgz", @@ -2567,6 +2675,15 @@ "mute-stream": "0.0.7" } }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "requires": { + "onetime": "2.0.1", + "signal-exit": "3.0.2" + } + }, "revalidator": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", @@ -2586,6 +2703,11 @@ "integrity": "sha512-1HwIYD/8UlOtFS3QO3w7ey+SdSDFE4HRNLZoZRYVQefrOY3l17epswImeB1ijgJFQJodIaHcwkp3r/myBjFVbg==", "dev": true }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + }, "sinon": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/sinon/-/sinon-4.2.2.tgz", diff --git a/package.json b/package.json index 6883885..0cca65a 100644 --- a/package.json +++ b/package.json @@ -44,8 +44,12 @@ "fs-extra": "^5.0.0", "ip": "^1.1.5", "js-yaml": "^3.10.0", + "klaw": "^2.1.1", "lodash": "^4.17.4", "lokijs": "^1.5.1", + "moment": "^2.20.1", + "ora": "^1.4.0", + "progress": "^2.0.0", "prompt": "^1.0.0", "uuid": "^3.1.0" } From 924e5ced8246117e7795d9deec03a52c321b0217 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 13 Feb 2018 14:47:20 -0600 Subject: [PATCH 75/89] - Second pass at cleanup script implementation. Added tests and documentation. --- README.md | 27 ++++++- cleanup.js | 150 +++++++++++++++++++----------------- config/default.yml | 3 + lib/cache/cache_base.js | 4 + lib/cache/cache_fs.js | 83 +++++++++++++++++++- lib/cache/cache_ram.js | 5 ++ package.json | 3 +- test/cache_base.js | 7 ++ test/cache_fs.js | 167 ++++++++++++++++++++++++++++++++++++++++ test/cache_ram.js | 6 ++ 10 files changed, 378 insertions(+), 77 deletions(-) create mode 100644 test/cache_fs.js diff --git a/README.md b/README.md index 058b493..ad343cd 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,7 @@ At present time this open-source repository is maintained separately from the Ca * [Cache Modules](#cache-modules) * [cache\_fs (default)](#cache_fs-default) * [cache\_ram](#cache_ram) +* [Cache Cleanup](#cache-cleanup) * [Mirroring](#mirroring) * [Unity project Library Importer](#unity-project-library-importer) * [Contributors](#contributors) @@ -34,7 +35,7 @@ npm install unity-cache-server -g npm install github:Unity-Technologies/unity-cache-server -g ``` ## Usage ->Default options are suitable for quickly starting a cache server, with a default cache location of `./cache5.0` +>Default options are suitable for quickly starting a cache server, with a default cache location of `.cache_fs` ```bash unity-cache-server [arguments] ``` @@ -69,15 +70,15 @@ A simple, efficient file system backed cache. option | default | description --------- | ----------- | ----------- cachePath | `.cache_fs` | Path to cache directory +cleanupOptions.expireTimeSpan | `P30D` | [ASP.NET](https://msdn.microsoft.com/en-us/library/se73z7b9(v=vs.110).aspx) or [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Time_intervals) style timespan. Cache files that have not been accessed within this timespan will be eligible for cleanup/removal. The [moment](https://momentjs.com/docs/#/durations/) library is used to parse durations - more information on duration syntax can be found in the library documentation. +cleanupOptions.maxCacheSize | 0 | Size in bytes to limit overall cache disk utilization. The cleanup script will consider files for removal in least-recently-used order to bring the total disk utilization under this threshold. 0 disables this cleanup feature. #### Notes * This module is backwards compatible with v5.x Cache Server directories -* For performance and simplicity reasons, unlike prior versions, it does NOT operate as an LRU cache and does not enforce overall cache size restrictions. If disk space is a concern, external shell scripts can be executed periodically to clean up files that have not been accessed recently. * Supports worker threads (`--workers` option) ### cache_ram A high performance, fully in-memory LRU cache. #### Usage `--cache-module lib/cache/cache_ram` -#### Options option | default | description --------- | ----------- | ----------- pageSize | 100000000 | Smallest memory allocation to make, in bytes. i.e. the cache will grow in increments of pageSize. @@ -89,7 +90,25 @@ persistenceOptions.autosave | true | `true` to periodically save dirty memory pa persistenceOptions.autosaveInterval | 10000 | Minimum interval in milliseconds to save dirty pages. #### Notes * Does not support worker threads - +## Cache Cleanup +For performance and simplicity reasons, unlike prior versions, the cache_fs module does NOT operate as an LRU cache and does not enforce overall cache size restrictions. To manage disk usage, a separate cleanup script is provided that can either be run periodically or in "daemon" mode to automatically run at a given time interval. +### Usage +`unity-cache-server-cleanup [option]` +or +`node cleanup.js [options]` +#### Options +``` -V, --version output the version number + -c --cache-module [path] Use cache module at specified path + -P, --cache-path [path] Specify the path of the cache directory + -l, --log-level Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug) + -e, --expire-time-span Override the configured file expiration timespan. Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. '15.23:59:59') and ISO 8601 time spans (e.g. 'P15DT23H59M59S') are supported. + -s, --max-cache-size Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check. + -d, --delete Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only. + -D, --daemon Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process. + -h, --help output usage information +``` +#### Notes +* Only the cache_fs module supports cache cleanup (cache_ram does not) ## Mirroring #### Usage Use the `--mirror [host:port]` option to relay all upload transactions to one or more Cache Server hosts (repeat the option for each host). There are checks in place to prevent self-mirroring, but beyond that it would be easy to create infinite transaction loops so use with care. diff --git a/cleanup.js b/cleanup.js index d8da2c8..6765b7e 100644 --- a/cleanup.js +++ b/cleanup.js @@ -4,13 +4,9 @@ const consts = require('./lib/constants'); const program = require('commander'); const path = require('path'); const moment = require('moment'); -const klaw = require('klaw'); const filesize =require('filesize'); -const fs = require('fs-extra'); const ora = require('ora'); -const { Transform } = require('stream'); - const config = require('config'); const VERSION = require('./package.json').version; @@ -19,88 +15,104 @@ function myParseInt(val, def) { return (!val && val !== 0) ? def : val; } +function parseTimeSpan(val) { + if(!moment.duration(val).isValid()) + { + helpers.log(consts.LOG_ERR, "Invalid timespan format"); + process.exit(1); + } + + return val; +} + const defaultCacheModule = config.get("Cache.defaultModule"); -program.description("Unity Cache Server - Cache Cleanup\n\n Remove files from cache that have not been accessed within the given .\n\n Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. '15.23:59:59') and ISO 8601 time spans (e.g. 'P15DT23H59M59S') are supported.") +program.description("Unity Cache Server - Cache Cleanup\n\n Removes old files from supported cache modules.\n\n ") .version(VERSION) - .arguments('') .option('-c --cache-module [path]', 'Use cache module at specified path', defaultCacheModule) .option('-P, --cache-path [path]', 'Specify the path of the cache directory') .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug)', myParseInt, consts.DEFAULT_LOG_LEVEL) - .option('-d, --delete', 'Delete cached files that that have not been accessed within the timeSpan provided.') - .action(timeSpan => doCleanup(timeSpan)); + .option('-e, --expire-time-span ', 'Override the configured file expiration timespan. Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. \'15.23:59:59\') and ISO 8601 time spans (e.g. \'P15DT23H59M59S\') are supported.', parseTimeSpan) + .option('-s, --max-cache-size ', 'Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check.', myParseInt) + .option('-d, --delete', 'Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only.') + .option('-D, --daemon ', 'Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process.', myParseInt); + +if (!process.argv.slice(2).length) { + return program.outputHelp(); +} program.parse(process.argv); -if (!process.argv.slice(2).length) { - program.outputHelp(); +helpers.setLogLevel(program.logLevel); + +const CacheModule = require(path.resolve(program.cacheModule)); + +if(!CacheModule.properties.cleanup) { + helpers.log(consts.LOG_ERR, "Configured cache module does not support cleanup script."); + process.exit(1); } -function doCleanup(timeSpan) { - helpers.setLogLevel(program.logLevel); +const cache = new CacheModule(); - const CacheModule = require(path.resolve(program.cacheModule)); +let cacheOpts = { cleanupOptions: {} }; - if(!CacheModule.properties.cleanup) { - helpers.log(consts.LOG_ERR, "Configured cache module does not support cleanup script."); - process.exit(1); - } +if(program.cachePath !== null) { + cacheOpts.cachePath = program.cachePath; +} - const cache = new CacheModule(); +if(program.hasOwnProperty('expireTimeSpan')) { + cacheOpts.cleanupOptions.expireTimeSpan = program.expireTimeSpan; +} - let cacheOpts = {}; - if(program.cachePath !== null) { - cacheOpts.cachePath = program.cachePath; - } +if(program.hasOwnProperty('maxCacheSize')) { + cacheOpts.cleanupOptions.maxCacheSize = program.maxCacheSize; +} - cache._options = cacheOpts; - helpers.log(consts.LOG_INFO, `Cache path is ${cache._cachePath}`); +const dryRun = !program.delete; +cache._options = cacheOpts; +helpers.log(consts.LOG_INFO, `Cache path is ${cache._cachePath}`); - const duration = moment.duration(timeSpan); - if(!duration.isValid()) { - helpers.log(consts.LOG_ERR, `Invalid timeSpan specified.`); - } +const msg = `Gathering cache files for expiration`; +let spinner = null; + +if(helpers.getLogLevel() < consts.LOG_DBG && helpers.getLogLevel() > consts.LOG_NONE) { + spinner = ora({color: 'white'}); +} +cache.on('cleanup_search_progress', data => { + let txt = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; + spinner ? spinner.text = txt : helpers.log(consts.LOG_DBG, txt); +}); - const msg = `Gathering cache files that have not been accessed within ${duration.humanize()}`; - const spinner = ora({color:'white'}).start(`${msg} (found 0)`); - - const minFileAccessTime = moment().subtract(duration).toDate(); - let items = []; - let totalSize = 0; - let freedSize = 0; - let filterTransform = new Transform({ - objectMode: true, - transform(item, enc, next) { - if(item.stats.isDirectory()) return next(); - - totalSize += item.stats.size; - if(item.stats.atime < minFileAccessTime) { - spinner.text = `${msg} (found ${items.length}, ${filesize(freedSize)})`; - freedSize += item.stats.size; - this.push(item); - } - - next(); - } - }); - - klaw(cache._cachePath).pipe(filterTransform) - .on('data', item => items.push(item.path)) - .on('end', async () => { - spinner.stop(); - - if(program.delete) { - for(let item of items) { - helpers.log(consts.LOG_INFO, `Deleting ${item}`); - await fs.unlink(item); - } - } - - let pct = totalSize > 0 ? (freedSize/totalSize).toPrecision(2) * 100 : 0; - helpers.log(consts.LOG_INFO, `Found ${items.length} expired files: ${filesize(freedSize)} of ${filesize(totalSize)} total cache size (${pct}%).`); - if(!program.delete) { - helpers.log(consts.LOG_INFO, "Nothing deleted; run with --delete to remove expired files from the cache."); - } - }) +cache.on('cleanup_search_finish', () => { + if(spinner) spinner.stop(); +}); + +cache.on('cleanup_delete_item', item => { + helpers.log(consts.LOG_INFO, `Deleted ${item}`); +}); + +cache.on('cleanup_delete_finish', data => { + let pct = data.cacheSize > 0 ? (data.deleteSize/data.cacheSize).toPrecision(2) * 100 : 0; + helpers.log(consts.LOG_INFO, `Found ${data.deleteCount} expired files of ${data.cacheCount}. ${filesize(data.deleteSize)} of ${filesize(data.cacheSize)} (${pct}%).`); + if(dryRun) { + helpers.log(consts.LOG_INFO, "Nothing deleted; run with --delete to remove expired files from the cache."); + } +}); + +function doCleanup() { + if (spinner) spinner.start(msg); + cache.cleanup(dryRun) + .catch(err => { + if (spinner) spinner.stop(); + helpers.log(consts.LOG_ERR, err); + process.exit(1); + }); +} + +if(program.hasOwnProperty('daemon') && program.daemon > 0) { + setInterval(doCleanup, program.daemon * 1000); +} +else { + doCleanup(); } \ No newline at end of file diff --git a/config/default.yml b/config/default.yml index 4e532da..2c45147 100644 --- a/config/default.yml +++ b/config/default.yml @@ -13,6 +13,9 @@ Cache: throttledSaves: false cache_fs: cachePath: ".cache_fs" + cleanupOptions: + expireTimeSpan: "P30D" + maxCacheSize: 0 Mirror: options: queueProcessDelay: 2000 diff --git a/lib/cache/cache_base.js b/lib/cache/cache_base.js index d76c2bb..e888c0c 100644 --- a/lib/cache/cache_base.js +++ b/lib/cache/cache_base.js @@ -116,6 +116,10 @@ class CacheBase extends EventEmitter { registerClusterWorker(worker) { throw new Error("Not implemented"); } + + cleanup(dryRun = true) { + return Promise.reject(new Error("Not implemented")); + } } class PutTransaction extends EventEmitter { diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index d500b98..6545ec7 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -5,6 +5,9 @@ const path = require('path'); const fs = require('fs-extra'); const uuid = require('uuid'); const consts = require('../constants'); +const klaw = require('klaw'); +const moment = require('moment'); +const { Transform } = require('stream'); class CacheFS extends CacheBase { constructor() { @@ -56,6 +59,12 @@ class CacheFS extends CacheBase { return Promise.resolve(); } + async _addFileToCache(type, guid, hash, sourcePath) { + let filePath = this._calcFilepath(type, guid, hash); + await fs.move(sourcePath, filePath, { overwrite: true }); + return filePath; + } + async getFileInfo(type, guid, hash) { const stats = await fs.stat(this._calcFilepath(type, guid, hash)); return {size: stats.size}; @@ -81,9 +90,8 @@ class CacheFS extends CacheBase { let self = this; let moveFile = async (file) => { - let filePath = self._calcFilepath(file.type, transaction.guid, transaction.hash); - helpers.log(consts.LOG_TEST, `Adding file to cache: ${file.size} ${filePath}`); - await fs.move(file.file, filePath, { overwrite: true }); + let filePath = await self._addFileToCache(file.type, transaction.guid, transaction.hash, file.file); + helpers.log(consts.LOG_TEST, `Added file to cache: ${file.size} ${filePath}`); }; await transaction.finalize(); @@ -91,6 +99,75 @@ class CacheFS extends CacheBase { } registerClusterWorker(worker) {} + + cleanup(dryRun = true) { + const self = this; + + const expireDuration = moment.duration(this._options.cleanupOptions.expireTimeSpan); + if(!expireDuration.isValid() || expireDuration.asMilliseconds() === 0) { + return Promise.reject(new Error("Invalid expireTimeSpan option")); + } + + const minFileAccessTime = moment().subtract(expireDuration).toDate(); + const maxCacheSize = this._options.cleanupOptions.maxCacheSize; + + let allItems = []; + let deleteItems = []; + let cacheSize = 0; + let deleteSize = 0; + + let progressData = () => { + return { cacheCount: allItems.length, cacheSize: cacheSize, deleteCount: deleteItems.length, deleteSize: deleteSize }; + }; + + let filterTransform = new Transform({ + objectMode: true, + transform(item, enc, next) { + if(item.stats.isDirectory()) return next(); + allItems.push(item); + cacheSize += item.stats.size; + if(item.stats.atime < minFileAccessTime) { + deleteSize += item.stats.size; + this.push(item); + } + + self.emit('cleanup_search_progress', progressData()); + + next(); + } + }); + + let finalize = async () => { + if(maxCacheSize > 0 && cacheSize - deleteSize > maxCacheSize) { + allItems.sort((a, b) => { return a.stats.atime > b.stats.atime }); + for(let item of allItems) { + self.emit('cleanup_search_progress', progressData()); + deleteSize += item.stats.size; + deleteItems.push(item.path); + if(cacheSize - deleteSize <= maxCacheSize) break; + } + } + + self.emit('cleanup_search_finish', progressData()); + + if(!dryRun) { + for(let item of deleteItems) { + self.emit('cleanup_delete_item', item); + await fs.unlink(item); + } + } + + self.emit('cleanup_delete_finish', progressData()); + }; + + return new Promise((resolve, reject) => { + klaw(self._cachePath) + .on('error', err => reject(err)) + .pipe(filterTransform) + .on('data', item => deleteItems.push(item.path)) + .on('end', () => finalize().catch(reject).then(resolve)); + }); + } } class PutTransactionFS extends PutTransaction { diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index 1261963..a5967b5 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -342,6 +342,11 @@ class CacheRAM extends CacheBase { } registerClusterWorker(worker) {} + + cleanup(dryRun = true) { + // Not supported + return Promise.resolve(); + } } class PutTransactionRAM extends PutTransaction { diff --git a/package.json b/package.json index 0cca65a..3aed020 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,8 @@ "test": "test" }, "bin": { - "unity-cache-server": "./main.js" + "unity-cache-server": "./main.js", + "unity-cache-server-cleanup": "./cleanup.js" }, "scripts": { "test": "nyc mocha", diff --git a/test/cache_base.js b/test/cache_base.js index f758876..b0b4cb6 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -130,6 +130,13 @@ describe("Cache: Base Class", () => { } }); }); + + describe("cleanup", () => { + it("should require override implementation in subclasses by returning an error", () => { + return cache.endPutTransaction() + .then(() => { throw new Error("Expected error!"); }, () => {}); + }); + }); }); describe("PutTransaction: Base Class", () => { diff --git a/test/cache_fs.js b/test/cache_fs.js new file mode 100644 index 0000000..dd561e1 --- /dev/null +++ b/test/cache_fs.js @@ -0,0 +1,167 @@ +const tmp = require('tmp'); +const fs = require('fs-extra'); +const Cache = require('../lib/cache/cache_fs'); +const randomBuffer = require('./test_utils').randomBuffer; +const generateCommandData = require('./test_utils').generateCommandData; +const sleep = require('./test_utils').sleep; +const path = require('path'); +const assert = require('assert'); +const moment = require('moment'); + +const MIN_FILE_SIZE = 1024 * 5; +const MAX_FILE_SIZE = MIN_FILE_SIZE; + +let cacheOpts = { + cachePath: tmp.tmpNameSync({}).toString() +}; + +let cache; + +let addFileToCache = async (atime) => { + const data = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); + let tmpPath = tmp.tmpNameSync({dir: cacheOpts.cachePath}); + await fs.writeFile(tmpPath, data.bin); + let cacheFile = await cache._addFileToCache('a', data.guid, data.hash, tmpPath); + await fs.utimes(cacheFile, atime, atime); + + let stats = await fs.stat(cacheFile); + assert(moment(stats.atime).isSame(atime, 'second'), `${stats.atime} != ${atime}`); + return cacheFile; +}; + +describe("Cache: FS", () => { + describe("Public API", () => { + beforeEach(() => { + cache = new Cache(); + }); + + afterEach(() => fs.remove(cacheOpts.cachePath)); + + describe("cleanup", () => { + it("should remove files that have not been accessed within a given timespan (ASP.NET style)", async () => { + let opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P1D", + maxCacheSize: 0 + }; + + await cache.init(opts); + let file1 = await addFileToCache(moment().subtract(2, 'days').toDate()); + let file2 = await addFileToCache(moment().subtract(2, 'days').toDate()); + let file3 = await addFileToCache(moment().toDate()); + + await cache.cleanup(false); + + assert(!await fs.pathExists(file1)); + assert(!await fs.pathExists(file2)); + assert(await fs.pathExists(file3)); + }); + + it("should remove files that have not been accessed within a given timespan (ISO 8601 style)", async () => { + let opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "1.00:00:00", + maxCacheSize: 0 + }; + + await cache.init(opts); + let file1 = await addFileToCache(moment().subtract(2, 'days').toDate()); + let file2 = await addFileToCache(moment().subtract(2, 'days').toDate()); + let file3 = await addFileToCache(moment().toDate()); + + assert(await fs.pathExists(file1)); + assert(await fs.pathExists(file2)); + assert(await fs.pathExists(file3)); + + await cache.cleanup(false); + + assert(!await fs.pathExists(file1)); + assert(!await fs.pathExists(file2)); + assert(await fs.pathExists(file3)); + }); + + it("should reject a promise with an invalid timespan", () => { + let opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "ABCDEF", + maxCacheSize: 0 + }; + + return cache.init(opts) + .then(() => cache.cleanup()) + .then(() => { throw new Error("Promise resolved, but expected rejection!"); }, err => assert(err)); + }); + + it("should remove files in least-recently-used order until the overall cache size is lower than a given threshold", async () => { + let opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P30D", + maxCacheSize: MIN_FILE_SIZE * 2 + 1 + }; + + await cache.init(opts); + let file1 = await addFileToCache(moment().toDate()); + let file2 = await addFileToCache(moment().subtract(1, 'days').toDate()); + let file3 = await addFileToCache(moment().subtract(5, 'days').toDate()); + + assert(await fs.pathExists(file1)); + assert(await fs.pathExists(file2)); + assert(await fs.pathExists(file3)); + + await cache.cleanup(false); + + assert(await fs.pathExists(file1)); + assert(await fs.pathExists(file2)); + assert(!await fs.pathExists(file3)); + + opts.cleanupOptions.maxCacheSize = MIN_FILE_SIZE + 1; + cache._options = opts; + + await cache.cleanup(false); + assert(await fs.pathExists(file1)); + assert(!await fs.pathExists(file2)); + }); + + it("should emit events while processing files", async () => { + let opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P30D", + maxCacheSize: 1 + }; + + await cache.init(opts); + await addFileToCache(moment().toDate()); + + let cleanup_search_progress = false; + let cleanup_search_finish = false; + let cleanup_delete_item = false; + let cleanup_delete_finish = false; + + cache.on('cleanup_search_progress', () => cleanup_search_progress = true) + .on('cleanup_search_finish', () => cleanup_search_finish = true) + .on('cleanup_delete_item', () => cleanup_delete_item = true) + .on('cleanup_delete_finish', () => cleanup_delete_finish = true); + + return cache.cleanup(false).then(() => { + assert(cleanup_search_progress); + assert(cleanup_search_finish); + assert(cleanup_delete_item); + assert(cleanup_delete_finish); + }); + }); + + it("should not delete any files if the dryRun option is true", async () => { + let opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P30D", + maxCacheSize: 1 + }; + + await cache.init(opts); + let file = await addFileToCache(moment().toDate()); + cache.cleanup(true); + assert(await fs.pathExists(file)); + }); + }); + }); +}); \ No newline at end of file diff --git a/test/cache_ram.js b/test/cache_ram.js index f6c8902..2424a4c 100644 --- a/test/cache_ram.js +++ b/test/cache_ram.js @@ -129,6 +129,12 @@ describe("Cache: RAM", () => { assert(dir.includes(pages[0])); }); }); + + describe("cleanup", () => { + it("should return a resolved promise", () => { + return cache.cleanup(); + }); + }) }); describe("Internal", () => { From 0f9ac18b97b0d0c9d073475ce833f0c32b0830ab Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Tue, 13 Feb 2018 14:48:10 -0600 Subject: [PATCH 76/89] Update .gitignore --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 50147ee..bb3e6ec 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ node_modules/ .cache_ram*/ .cache_fs*/ .nyc_output/ -coverage/ \ No newline at end of file +coverage/ +local-production.yml From 077ab2306340be4973743b9ac61717b189b58461 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 14 Feb 2018 13:03:28 -0600 Subject: [PATCH 77/89] removed unused require --- main.js | 1 - 1 file changed, 1 deletion(-) diff --git a/main.js b/main.js index 45e0b67..c261166 100755 --- a/main.js +++ b/main.js @@ -7,7 +7,6 @@ const path = require('path'); const CacheServer = require('./lib').Server; const config = require('config'); const prompt = require('prompt'); -const dns = require('dns'); const ip = require('ip'); const VERSION = require('./package.json').version; From d2f5eff7cc75b030fd8395a6a268e42d412a3dab Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 14 Feb 2018 13:04:05 -0600 Subject: [PATCH 78/89] Documentation formatting fix --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ad343cd..7327040 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ Configuration options for all modules are set in the `config/default.yml` file. ### cache_fs (default) A simple, efficient file system backed cache. #### Usage -`--cache-module lib/cache/cache_fs`. +`--cache-module lib/cache/cache_fs` #### Options option | default | description --------- | ----------- | ----------- @@ -79,6 +79,7 @@ cleanupOptions.maxCacheSize | 0 | Size in bytes to limit overall cache disk util A high performance, fully in-memory LRU cache. #### Usage `--cache-module lib/cache/cache_ram` +#### Options option | default | description --------- | ----------- | ----------- pageSize | 100000000 | Smallest memory allocation to make, in bytes. i.e. the cache will grow in increments of pageSize. From 9349d916f3b97d9cd3a9174892f032d13c92faea Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 14 Feb 2018 13:06:37 -0600 Subject: [PATCH 79/89] Add unity-cache-server-import bin link --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 3aed020..ffb0df9 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,8 @@ }, "bin": { "unity-cache-server": "./main.js", - "unity-cache-server-cleanup": "./cleanup.js" + "unity-cache-server-cleanup": "./cleanup.js", + "unity-cache-server-import": "./import.js" }, "scripts": { "test": "nyc mocha", From 10eef6b0ba14c0058e675b5e5f43c2be16562502 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 16 Feb 2018 08:10:55 -0600 Subject: [PATCH 80/89] Beta version bump --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ffb0df9..e18a11e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.1", + "version": "6.0.0-beta.2", "description": "Unity Cache Server", "main": "lib/index.js", "engines": { From 0b6b78b74033c151bc574cf2e32f3fb04b882b07 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 16 Feb 2018 16:37:51 -0600 Subject: [PATCH 81/89] =?UTF-8?q?-=20add=20a=20few=20command=20line=20opti?= =?UTF-8?q?ons=20for=20managing=20config=20files=20-=20fixes=20to=20ensure?= =?UTF-8?q?=20cache=20modules=20and=20config=20files=20are=20correctly=20l?= =?UTF-8?q?ocated=20when=20running=20the=20cache=20server=20via=20npm=20gl?= =?UTF-8?q?obal=20install=20-=20simplify=20the=20path=20search=20for=20cac?= =?UTF-8?q?he=20modules=20so=20the=20default=20modules=20don=E2=80=99t=20r?= =?UTF-8?q?equire=20sub=20path=20(e..g,=20lib/cache/cache=5Fram=20can=20no?= =?UTF-8?q?w=20be=20loaded=20as=20just=20cache=5Fram)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 25 ++++++++++++++++++++----- cleanup.js | 5 ++--- config/default.yml | 2 +- lib/helpers.js | 32 +++++++++++++++++++++++++++++++- lib/index.js | 4 ++++ main.js | 39 ++++++++++++++++++++++++++++++++++----- package.json | 2 +- 7 files changed, 93 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 7327040..a6d24f0 100644 --- a/README.md +++ b/README.md @@ -44,16 +44,31 @@ unity-cache-server [arguments] ``` -V, --version output the version number -p, --port Specify the server port, only apply to new cache server, default is 8126 - -c --cache-module [path] Use cache module at specified path. Default is 'lib/cache/cache_fs' + -c --cache-module [path] Use cache module at specified path. Default is 'cache_fs' -P, --cache-path [path] Specify the path of the cache directory. -l, --log-level Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 3 -w, --workers Number of worker threads to spawn. Default is 0 -m --mirror [host:port] Mirror transactions to another cache server. Can be repeated for multiple mirrors. -m, --monitor-parent-process Monitor a parent process and exit if it dies + --dump-config Write the active configuration to the console + --save-config [path] Write the active configuration to the specified file and exit. Defaults to ./default.yml + --NODE_CONFIG_DIR=[path] Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. With this option the default is to look in the current directory for config files. -h, --help output usage information ``` -## Configuration file +## Configuration files `config/default.yml` contains various configuration values for the cache modules (see below) and other features. The config system is based on the [node-config](`https://github.com/lorenwest/node-config/wiki/Configuration-Files`) module. Refer to the documentation in that package for tips on how to manage environment specific config files. +By default, running `unity-cache-server` will use the built-in configuration file. To start using a custom config file, save the current config to a new file and then use the `--NODE_CONFIG_DIR` option to override the location where the cache server will look for your config file(s). +#### Examples (Mac/Linux) +1) `mkdir config` +2) `unity-cache-server --save-config config/default.yml` +3) `unity-cache-server --NODE_CONFIG_DIR=config` + +You can also have multiple configuration files based on environment: +1) `export NODE_ENV=development` +2) `unity-cache-server --save-config config/local-development.yml` + +To dump the current config to the console +`unity-cache-server --dump-config` ## Client Configuration The [Cache Server](https://docs.unity3d.com/Manual/CacheServer.html) section of the Unity Manual contains detailed information on connecting clients to remote Cache Servers. @@ -65,7 +80,7 @@ Configuration options for all modules are set in the `config/default.yml` file. ### cache_fs (default) A simple, efficient file system backed cache. #### Usage -`--cache-module lib/cache/cache_fs` +`--cache-module cache_fs` #### Options option | default | description --------- | ----------- | ----------- @@ -78,7 +93,7 @@ cleanupOptions.maxCacheSize | 0 | Size in bytes to limit overall cache disk util ### cache_ram A high performance, fully in-memory LRU cache. #### Usage -`--cache-module lib/cache/cache_ram` +`--cache-module cache_ram` #### Options option | default | description --------- | ----------- | ----------- @@ -124,7 +139,7 @@ Tools are provided to quickly seed a Cache Server from a fully imported Unity pr #### Steps to Import 1) Add the [CacheServerTransactionImporter.cs](./Unity/CacheServerTransactionExporter.cs) script to the Unity project you wish to export. 2) Select the Menu item _Cache Server Utilities -> Export Transactions_ to save an export data file in .json format. Alternatively, with the script added to your project, you can run Unity in batchmode and [execute the static method](https://docs.unity3d.com/Manual/CommandLineArguments.html) `CacheServerTransactionExporter.ExportTransactions([path])` where `path` is the full path and filename to export. -3) Run the import utility to begin the import process: `node import.js [server:port]` +3) Run the import utility to begin the import process: `unity-cache-server-import [server:port]` #### Notes * On very large projects, Unity may appear to freeze while generating the exported JSON data. * The default `server:port` is `localhost:8126` diff --git a/cleanup.js b/cleanup.js index 6765b7e..5631425 100644 --- a/cleanup.js +++ b/cleanup.js @@ -2,7 +2,6 @@ const helpers = require('./lib/helpers'); const consts = require('./lib/constants'); const program = require('commander'); -const path = require('path'); const moment = require('moment'); const filesize =require('filesize'); const ora = require('ora'); @@ -27,7 +26,7 @@ function parseTimeSpan(val) { const defaultCacheModule = config.get("Cache.defaultModule"); -program.description("Unity Cache Server - Cache Cleanup\n\n Removes old files from supported cache modules.\n\n ") +program.description("Unity Cache Server - Cache Cleanup\n\n Removes old files from supported cache modules.") .version(VERSION) .option('-c --cache-module [path]', 'Use cache module at specified path', defaultCacheModule) .option('-P, --cache-path [path]', 'Specify the path of the cache directory') @@ -45,7 +44,7 @@ program.parse(process.argv); helpers.setLogLevel(program.logLevel); -const CacheModule = require(path.resolve(program.cacheModule)); +const CacheModule = helpers.resolveCacheModule(program.cacheModule, __dirname); if(!CacheModule.properties.cleanup) { helpers.log(consts.LOG_ERR, "Configured cache module does not support cleanup script."); diff --git a/config/default.yml b/config/default.yml index 2c45147..e38c655 100644 --- a/config/default.yml +++ b/config/default.yml @@ -1,5 +1,5 @@ Cache: - defaultModule: "lib/cache/cache_fs" + defaultModule: "cache_fs" options: cache_ram: cachePath: ".cache_ram" diff --git a/lib/helpers.js b/lib/helpers.js index a731261..515c57c 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -1,6 +1,7 @@ const cluster = require('cluster'); const consts = require("./constants"); const dns = require('dns'); +const path =require('path'); let logLevel = consts.LOG_TEST; @@ -141,4 +142,33 @@ exports.setLogLevel = function(lvl) { logLevel = Math.min(consts.LOG_DBG, Math.max(consts.LOG_NONE, lvl)); }; -exports.getLogLevel = () => logLevel; \ No newline at end of file +exports.getLogLevel = () => logLevel; + +exports.initConfigDir = (rootDir) => { + if(!process.env.hasOwnProperty('NODE_CONFIG_DIR')) { + process.env['NODE_CONFIG_DIR'] = path.resolve(rootDir, "config/"); + } +}; + +exports.resolveCacheModule = (module, rootPath) => { + + // Try absolute path first + let modulePath = path.resolve(module); + + try { + return require(modulePath); + } + catch(err) {} + + // Try relative to module root + modulePath = path.resolve(rootPath, module); + + try { + return require(modulePath); + } + catch(err) {} + + // Finally, try inside of the module root lib/cache folder + modulePath = path.resolve(rootPath, 'lib/cache', module); + return require(modulePath); +}; diff --git a/lib/index.js b/lib/index.js index 72b0610..9ac9a22 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,3 +1,7 @@ +const path = require('path'); +const helpers = require('./helpers'); +helpers.initConfigDir(path.dirname(__dirname)); + exports.Server = require('./server/server'); exports.ClientStreamProcessor = require('./server/client_stream_processor'); exports.CommandProcessor = require('./server/command_processor'); diff --git a/main.js b/main.js index c261166..ef79885 100755 --- a/main.js +++ b/main.js @@ -3,12 +3,13 @@ const cluster = require('cluster'); const helpers = require('./lib/helpers'); const consts = require('./lib/constants'); const program = require('commander'); -const path = require('path'); const CacheServer = require('./lib').Server; -const config = require('config'); const prompt = require('prompt'); const ip = require('ip'); const VERSION = require('./package.json').version; +const config = require('config'); +const fs = require('fs-extra'); +const path = require('path'); function myParseInt(val, def) { val = parseInt(val); @@ -34,10 +35,38 @@ program.description("Unity Cache Server") .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug)', myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-w, --workers ', 'Number of worker threads to spawn', zeroOrMore, consts.DEFAULT_WORKERS) .option('-m --mirror [host:port]', 'Mirror transactions to another cache server. Can be repeated for multiple mirrors', collect, []) - .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0); + .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0) + .option('--dump-config', 'Write the active configuration to the console') + .option('--save-config [path]', 'Write the active configuration to the specified file and exit. Defaults to ./default.yml') + .option('--NODE_CONFIG_DIR [path]', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. With this option the default is to look in the current directory for config files.'); program.parse(process.argv); +if(program.saveConfig || program.dumpConfig) { + const configs = config.util.getConfigSources(); + const configData = configs.length > 0 ? configs[configs.length - 1].original : ''; + + if(program.dumpConfig) { + console.log(configData); + } + + if(program.saveConfig) { + let configFile = (typeof(program.saveConfig) === 'boolean') ? 'default.yml' : program.saveConfig; + configFile = path.resolve(configFile); + + if (fs.pathExistsSync(configFile)) { + helpers.log(consts.LOG_ERR, `${configFile} already exists - will not overwrite.`); + process.exit(1); + } + + fs.ensureDirSync(path.dirname(configFile)); + fs.writeFileSync(configFile, configData); + helpers.log(consts.LOG_INFO, `config saved to ${configFile}`); + } + + process.exit(0); +} + helpers.setLogLevel(program.logLevel); helpers.setLogger(program.workers > 0 ? helpers.defaultClusterLogger : helpers.defaultLogger); @@ -67,7 +96,7 @@ const errHandler = function () { process.exit(1); }; -const CacheModule = require(path.resolve(program.cacheModule)); +const CacheModule = helpers.resolveCacheModule(program.cacheModule, __dirname); const Cache = new CacheModule(); if(program.workers > 0 && !CacheModule.properties.clustering) { @@ -111,7 +140,7 @@ Cache.init(cacheOpts) server = new CacheServer(Cache, opts); if(cluster.isMaster) { - helpers.log(consts.LOG_INFO, `Cache Server version ${VERSION}; Cache module ${program.cacheModule}`); + helpers.log(consts.LOG_INFO, `Cache Server version ${VERSION}; Cache module is ${program.cacheModule}`); if(program.workers === 0) { server.start(errHandler).then(() => { diff --git a/package.json b/package.json index e18a11e..4668d71 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.2", + "version": "6.0.0-beta.5", "description": "Unity Cache Server", "main": "lib/index.js", "engines": { From eacc867017b1e15762a2cfef803a77efd835070f Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 21 Feb 2018 20:00:34 -0600 Subject: [PATCH 82/89] - cache_ram: fixed possible race condition when replacing a file while it is open for download - cache_fs: improve upload performance by not waiting for temp file to be moved into place when finalizing a transaction --- lib/cache/cache_fs.js | 5 +++-- lib/cache/cache_ram.js | 35 +++++++++++++++++++++-------- test/cache_api.js | 46 +++++++++++++++++++++++++++++++++++++- test/cache_fs.js | 3 --- test/cache_ram.js | 19 ++++++++-------- test/transaction_mirror.js | 14 ++++++------ 6 files changed, 91 insertions(+), 31 deletions(-) diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 6545ec7..6621c9f 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -90,8 +90,9 @@ class CacheFS extends CacheBase { let self = this; let moveFile = async (file) => { - let filePath = await self._addFileToCache(file.type, transaction.guid, transaction.hash, file.file); - helpers.log(consts.LOG_TEST, `Added file to cache: ${file.size} ${filePath}`); + self._addFileToCache(file.type, transaction.guid, transaction.hash, file.file) + .then(filePath => helpers.log(consts.LOG_TEST, `Added file to cache: ${file.size} ${filePath}`), + err => helpers.log(consts.LOG_ERR, err)); }; await transaction.finalize(); diff --git a/lib/cache/cache_ram.js b/lib/cache/cache_ram.js index a5967b5..a198b6d 100644 --- a/lib/cache/cache_ram.js +++ b/lib/cache/cache_ram.js @@ -19,6 +19,7 @@ class CacheRAM extends CacheBase { this._db = null; this._pages = {}; this._serializeInProgress = false; + this._guidRefs = {}; } static get properties() { @@ -165,9 +166,13 @@ class CacheRAM extends CacheBase { * @param {Buffer} buffer * @private */ - _addFileToCache(type, guid, hash, buffer) { + async _addFileToCache(type, guid, hash, buffer) { const key = CacheRAM._calcIndexKey(type, guid, hash); + if(this._guidRefs.hasOwnProperty(key) && this._guidRefs[key] > 0) { + await new Promise(resolve => this.once(`guidRefRelease-${key}`, resolve)); + } + const entry = this._reserveBlock(key, buffer.length); helpers.log(consts.LOG_TEST, `Saving file key: ${key} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`); @@ -311,7 +316,7 @@ class CacheRAM extends CacheBase { const file = this._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size); - return new Readable({ + const stream = new Readable({ read() { if(this.didPush) return this.push(null); @@ -321,6 +326,22 @@ class CacheRAM extends CacheBase { highWaterMark: file.length }); + + if(this._guidRefs.hasOwnProperty(key)) { + this._guidRefs[key]++; + } + else { + this._guidRefs[key] = 1; + } + + stream.on('end', () => { + this._guidRefs[key]--; + if(this._guidRefs[key] === 0) { + this.emit(`guidRefRelease-${key}`); + } + }); + + return stream; } async createPutTransaction(guid, hash) { @@ -331,13 +352,9 @@ class CacheRAM extends CacheBase { await this._waitForSerialize(); await transaction.finalize(); - try { - transaction.files.forEach(file => { - this._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer); - }); - } - catch(err) { - helpers.log(consts.LOG_ERR, err); + for(let file of transaction.files) { + this._addFileToCache(file.type, transaction.guid, transaction.hash, file.buffer) + .catch(err => helpers.log(consts.LOG_ERR, err)); } } diff --git a/test/cache_api.js b/test/cache_api.js index dd2c6fe..e989977 100644 --- a/test/cache_api.js +++ b/test/cache_api.js @@ -4,6 +4,7 @@ const loki = require('lokijs'); const fs = require('fs-extra'); const sleep = require('./test_utils').sleep; const generateCommandData = require('./test_utils').generateCommandData; +const readStream = require('./test_utils').readStream; const EventEmitter = require('events'); let test_modules = [ @@ -12,7 +13,7 @@ let test_modules = [ path: "../lib/cache/cache_ram", options: { cachePath: tmp.tmpNameSync({}), - pageSize: 10000, + pageSize: 1024 * 1024, minFreeBlockSize: 1024, persistenceOptions: { adapter: new loki.LokiMemoryAdapter() @@ -109,6 +110,7 @@ describe("Cache API", () => { return trx.getWriteStream('i', fileData.info.length) .then(stream => stream.end(fileData.info)) .then(() => cache.endPutTransaction(trx)) + .then(() => sleep(50)) .then(() => cache.getFileInfo('i', fileData.guid, fileData.hash)) .then(info => assert.equal(info.size, fileData.info.length)); }); @@ -154,6 +156,48 @@ describe("Cache API", () => { return cache.getFileStream('a', fileData.guid, fileData.hash) .then(() => { throw new Error("Expected error!"); }, err => assert(err)); }); + + + it("should handle files being replaced while read streams to the same file are already open", async () => { + const TEST_FILE_SIZE = 1024 * 64 * 2; + const FILE_TYPE = 'i'; + + let fData = generateCommandData(TEST_FILE_SIZE, TEST_FILE_SIZE); + + // Add a file to the cache (use the info data) + let trx = await cache.createPutTransaction(fData.guid, fData.hash); + let wStream = await trx.getWriteStream('i', fData.info.length); + await new Promise(resolve => wStream.end(fData.info, resolve)); + await cache.endPutTransaction(trx); + await sleep(50); + + // Get a read stream + let rStream = await cache.getFileStream(FILE_TYPE, fData.guid, fData.hash); + + // Read a block + let buf = Buffer.allocUnsafe(fData.info.length); + let bytes = await new Promise(resolve => rStream.once('readable', () => resolve(rStream.read(1024 * 64)))); + bytes.copy(buf, 0, 0); + + // Replace the file (use the resource data) + trx = await cache.createPutTransaction(fData.guid, fData.hash); + wStream = await trx.getWriteStream(FILE_TYPE, fData.resource.length); + await new Promise(resolve => wStream.end(fData.resource, resolve)); + await cache.endPutTransaction(trx); + await sleep(50); + + // Read the rest of the file - compare it to the info data + bytes = await readStream(rStream, fData.info.length - bytes.length); + bytes.copy(buf, fData.info.length - bytes.length, 0); + assert.equal(buf.compare(fData.info), 0); + + // Get another new read stream to the same guid + rStream = await cache.getFileStream(FILE_TYPE, fData.guid, fData.hash); + + // Read the file and compare it to the resource data + buf = await readStream(rStream, fData.resource.length); + assert.equal(buf.compare(fData.resource), 0); + }); }); }); }); diff --git a/test/cache_fs.js b/test/cache_fs.js index dd561e1..9178a49 100644 --- a/test/cache_fs.js +++ b/test/cache_fs.js @@ -1,10 +1,7 @@ const tmp = require('tmp'); const fs = require('fs-extra'); const Cache = require('../lib/cache/cache_fs'); -const randomBuffer = require('./test_utils').randomBuffer; const generateCommandData = require('./test_utils').generateCommandData; -const sleep = require('./test_utils').sleep; -const path = require('path'); const assert = require('assert'); const moment = require('moment'); diff --git a/test/cache_ram.js b/test/cache_ram.js index 2424a4c..b26439d 100644 --- a/test/cache_ram.js +++ b/test/cache_ram.js @@ -29,10 +29,10 @@ describe("Cache: RAM", () => { let cache; let fileData = generateCommandData(MIN_FILE_SIZE, MAX_FILE_SIZE); - let writeFileDataToCache = (fileData) => { - cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); - cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); - cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); + let writeFileDataToCache = async (fileData) => { + await cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + await cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); + await cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); }; describe("Public API", () => { @@ -170,7 +170,7 @@ describe("Cache: RAM", () => { // Remove all files from the cache dir await fs.emptyDir(opts.cachePath); // Replace a single file - cache._addFileToCache('i', fileData.guid, fileData.hash, randomBuffer(fileData.info.length)); + await cache._addFileToCache('i', fileData.guid, fileData.hash, randomBuffer(fileData.info.length)); // Store the dirty page list again dirty = dirtyPages(); // Serialize the cache again @@ -184,7 +184,7 @@ describe("Cache: RAM", () => { describe("_deserialize", () => { beforeEach(async () => { - writeFileDataToCache(fileData); + await writeFileDataToCache(fileData); await cache._serialize(); }); @@ -285,12 +285,13 @@ describe("Cache: RAM", () => { }); describe("_addFileToCache", () => { - it("should throw an error if the cache cannot grow to accommodate the new file", () => { + it("should throw an error if the cache cannot grow to accommodate the new file", async () => { for(let x = 0; x < opts.maxPageCount; x++) { - cache._addFileToCache('a', randomBuffer(16), randomBuffer(16), randomBuffer(opts.pageSize)); + await cache._addFileToCache('a', randomBuffer(16), randomBuffer(16), randomBuffer(opts.pageSize)); } - assert.throws(() => cache._addFileToCache('a', randomBuffer(16), randomBuffer(16), randomBuffer(opts.pageSize * 2))); + cache._addFileToCache('a', randomBuffer(16), randomBuffer(16), randomBuffer(opts.pageSize * 2)) + .then(() => { throw new Error("Expected exception!") }, () => {}); }); }); }); diff --git a/test/transaction_mirror.js b/test/transaction_mirror.js index a2ae93e..a96331f 100644 --- a/test/transaction_mirror.js +++ b/test/transaction_mirror.js @@ -13,10 +13,10 @@ let cacheOpts = { } }; -let writeFileDataToCache = (cache, fileData) => { - cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); - cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); - cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); +let writeFileDataToCache = async (cache, fileData) => { + await cache._addFileToCache('i', fileData.guid, fileData.hash, fileData.info); + await cache._addFileToCache('a', fileData.guid, fileData.hash, fileData.bin); + await cache._addFileToCache('r', fileData.guid, fileData.hash, fileData.resource); }; describe("TransactionMirror", () => { @@ -40,11 +40,11 @@ describe("TransactionMirror", () => { generateCommandData(1024, 1024) ]; - fileData.forEach(d => { - writeFileDataToCache(this.sourceCache, d); + for(let d of fileData) { + await writeFileDataToCache(this.sourceCache, d); const trxMock = { guid: d.guid, hash: d.hash, manifest: ['i', 'a', 'r'] }; this.mirror.queueTransaction(trxMock); - }); + } await sleep(50); From 866a4a173d529b231b3a48b15ee75241c7391d26 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 21 Feb 2018 20:59:48 -0600 Subject: [PATCH 83/89] - Various fixes to config file handling that only manifested when installing via nom globally, and using the bin links (like unity-cache-server) --- README.md | 2 +- cleanup.js | 9 ++++++--- lib/helpers.js | 3 ++- main.js | 13 ++++++++----- package.json | 2 +- 5 files changed, 18 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index a6d24f0..f3024d6 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,7 @@ unity-cache-server [arguments] -m, --monitor-parent-process Monitor a parent process and exit if it dies --dump-config Write the active configuration to the console --save-config [path] Write the active configuration to the specified file and exit. Defaults to ./default.yml - --NODE_CONFIG_DIR=[path] Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. With this option the default is to look in the current directory for config files. + --NODE_CONFIG_DIR= Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. -h, --help output usage information ``` ## Configuration files diff --git a/cleanup.js b/cleanup.js index 5631425..558fc16 100644 --- a/cleanup.js +++ b/cleanup.js @@ -1,12 +1,13 @@ #!/usr/bin/env node const helpers = require('./lib/helpers'); +helpers.initConfigDir(__dirname); +const config = require('config'); + const consts = require('./lib/constants'); const program = require('commander'); const moment = require('moment'); const filesize =require('filesize'); const ora = require('ora'); - -const config = require('config'); const VERSION = require('./package.json').version; function myParseInt(val, def) { @@ -28,13 +29,15 @@ const defaultCacheModule = config.get("Cache.defaultModule"); program.description("Unity Cache Server - Cache Cleanup\n\n Removes old files from supported cache modules.") .version(VERSION) + .allowUnknownOption(true) .option('-c --cache-module [path]', 'Use cache module at specified path', defaultCacheModule) .option('-P, --cache-path [path]', 'Specify the path of the cache directory') .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug)', myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-e, --expire-time-span ', 'Override the configured file expiration timespan. Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. \'15.23:59:59\') and ISO 8601 time spans (e.g. \'P15DT23H59M59S\') are supported.', parseTimeSpan) .option('-s, --max-cache-size ', 'Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check.', myParseInt) .option('-d, --delete', 'Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only.') - .option('-D, --daemon ', 'Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process.', myParseInt); + .option('-D, --daemon ', 'Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process.', myParseInt) + .option('--NODE_CONFIG_DIR=', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.'); if (!process.argv.slice(2).length) { return program.outputHelp(); diff --git a/lib/helpers.js b/lib/helpers.js index 515c57c..a7d5c9f 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -145,7 +145,8 @@ exports.setLogLevel = function(lvl) { exports.getLogLevel = () => logLevel; exports.initConfigDir = (rootDir) => { - if(!process.env.hasOwnProperty('NODE_CONFIG_DIR')) { + const configDir = process.env['NODE_CONFIG_DIR']; + if(!configDir) { process.env['NODE_CONFIG_DIR'] = path.resolve(rootDir, "config/"); } }; diff --git a/main.js b/main.js index ef79885..c4fadf7 100755 --- a/main.js +++ b/main.js @@ -1,13 +1,15 @@ #!/usr/bin/env node -const cluster = require('cluster'); const helpers = require('./lib/helpers'); +helpers.initConfigDir(__dirname); +const config = require('config'); + +const { Server } = require('./lib'); +const cluster = require('cluster'); const consts = require('./lib/constants'); const program = require('commander'); -const CacheServer = require('./lib').Server; const prompt = require('prompt'); const ip = require('ip'); const VERSION = require('./package.json').version; -const config = require('config'); const fs = require('fs-extra'); const path = require('path'); @@ -29,6 +31,7 @@ const defaultCacheModule = config.get("Cache.defaultModule"); program.description("Unity Cache Server") .version(VERSION) + .allowUnknownOption(true) .option('-p, --port ', 'Specify the server port, only apply to new cache server', myParseInt, consts.DEFAULT_PORT) .option('-c --cache-module [path]', 'Use cache module at specified path', defaultCacheModule) .option('-P, --cache-path [path]', 'Specify the path of the cache directory') @@ -38,7 +41,7 @@ program.description("Unity Cache Server") .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0) .option('--dump-config', 'Write the active configuration to the console') .option('--save-config [path]', 'Write the active configuration to the specified file and exit. Defaults to ./default.yml') - .option('--NODE_CONFIG_DIR [path]', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. With this option the default is to look in the current directory for config files.'); + .option('--NODE_CONFIG_DIR=', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.'); program.parse(process.argv); @@ -137,7 +140,7 @@ Cache.init(cacheOpts) mirror: mirrors }; - server = new CacheServer(Cache, opts); + server = new Server(Cache, opts); if(cluster.isMaster) { helpers.log(consts.LOG_INFO, `Cache Server version ${VERSION}; Cache module is ${program.cacheModule}`); diff --git a/package.json b/package.json index 4668d71..cde67cf 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.5", + "version": "6.0.0-beta.7", "description": "Unity Cache Server", "main": "lib/index.js", "engines": { From 43a9640727f2af7a6f00c377353a24e214d8b5a5 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 28 Feb 2018 15:54:20 -0600 Subject: [PATCH 84/89] - version bump - minor docs cleanup --- README.md | 2 +- import.js | 3 +++ package.json | 2 +- protocol.md | 29 +++++++++++++++++++---------- 4 files changed, 24 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index f3024d6..9eafb5f 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ > The Unity Cache Server, optimized for networked team environments. ## Overview -This is the officially maintained open-source implementation of the Unity Cache Server, specifically optimized for LAN connected teams. The Unity Cache Server speeds up initial import of project data, as well as platform switching within a project. +This is the officially maintained open-source implementation of the Cache Server, specifically optimized for LAN connected teams. The Cache Server speeds up initial import of project data, as well as platform switching within a project. At present time this open-source repository is maintained separately from the Cache Server available on the Unity website, as well as the version packaged with the Unity installer. It is possible that compatibility with specific versions of Unity will diverge between these separate implementations. Check the release notes for specific compatibility information prior to usage. diff --git a/import.js b/import.js index 4d436fb..590f89b 100644 --- a/import.js +++ b/import.js @@ -1,4 +1,7 @@ +#!/usr/bin/env node const helpers = require('./lib/helpers'); +helpers.initConfigDir(__dirname); + const program = require('commander'); const consts = require('./lib/constants'); const fs = require('fs-extra'); diff --git a/package.json b/package.json index cde67cf..80596e8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.7", + "version": "6.0.0-beta.8", "description": "Unity Cache Server", "main": "lib/index.js", "engines": { diff --git a/protocol.md b/protocol.md index 7f31ddb..d00f583 100644 --- a/protocol.md +++ b/protocol.md @@ -1,10 +1,12 @@ # Server Protocol -## version check +## Version check +``` client --- (version ) --> server (using version) client <-- (version ) --- server (echo version if supported or 0) - -## request cached item +``` +## Request cached item +``` client --- 'ga' (id <128bit GUID><128bit HASH>) --> server client <-- '+a' (size ) (id <128bit GUID><128bit HASH>) + size bytes --- server (found in cache) client <-- '-a' (id <128bit GUID><128bit HASH>) --- server (not found in cache) @@ -16,17 +18,24 @@ client <-- '-i' (id <128bit GUID><128bit HASH>) --- server (not found in cache) client --- 'gr' (id <128bit GUID><128bit HASH>) --> server client <-- '+r' (size ) (id <128bit GUID><128bit HASH>) + size bytes --- server (found in cache) client <-- '-r' (id <128bit GUID><128bit HASH>) --- server (not found in cache) - -## start transaction +``` +## Start transaction +``` client --- 'ts' (id <128bit GUID><128bit HASH>) --> server +``` -## put cached item +## Put cached item +``` client --- 'pa' (size ) + size bytes --> server client --- 'pi' (size ) + size bytes --> server client --- 'pr' (size ) + size bytes --> server +``` -## end transaction (ie rename targets to their final names) +## End transaction (i.e. rename targets to their final names) +``` client --- 'te' --> server - -## quit -client --- 'q' --> server \ No newline at end of file +``` +## Quit +``` +client --- 'q' --> server +``` \ No newline at end of file From 16097644d28f9852f0bc7dd4feeda7295887cad3 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Wed, 7 Mar 2018 13:07:13 -0600 Subject: [PATCH 85/89] Clean up tables in the README, both for plain text reading and rendered markdown --- README.md | 92 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 48 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index 9eafb5f..1f608ef 100644 --- a/README.md +++ b/README.md @@ -40,21 +40,21 @@ npm install github:Unity-Technologies/unity-cache-server -g unity-cache-server [arguments] ``` -## Options -``` - -V, --version output the version number - -p, --port Specify the server port, only apply to new cache server, default is 8126 - -c --cache-module [path] Use cache module at specified path. Default is 'cache_fs' - -P, --cache-path [path] Specify the path of the cache directory. - -l, --log-level Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 3 - -w, --workers Number of worker threads to spawn. Default is 0 - -m --mirror [host:port] Mirror transactions to another cache server. Can be repeated for multiple mirrors. - -m, --monitor-parent-process Monitor a parent process and exit if it dies - --dump-config Write the active configuration to the console - --save-config [path] Write the active configuration to the specified file and exit. Defaults to ./default.yml - --NODE_CONFIG_DIR= Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. - -h, --help output usage information -``` +Command | Description +-------------------------------- | ----------- +-V, --version | output the version number +-p, --port | Specify the server port, only apply to new cache server, default is 8126 +-c --cache-module [path] | Use cache module at specified path. Default is 'cache_fs' +-P, --cache-path [path] | Specify the path of the cache directory. +-l, --log-level | Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 3 +-w, --workers | Number of worker threads to spawn. Default is 0 +-m --mirror [host:port] | Mirror transactions to another cache server. Can be repeated for multiple mirrors. +-m, --monitor-parent-process | Monitor a parent process and exit if it dies +--dump-config | Write the active configuration to the console +--save-config [path] | Write the active configuration to the specified file and exit. Defaults to ./default.yml +--NODE_CONFIG_DIR= | Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. +-h, --help | output usage information + ## Configuration files `config/default.yml` contains various configuration values for the cache modules (see below) and other features. The config system is based on the [node-config](`https://github.com/lorenwest/node-config/wiki/Configuration-Files`) module. Refer to the documentation in that package for tips on how to manage environment specific config files. By default, running `unity-cache-server` will use the built-in configuration file. To start using a custom config file, save the current config to a new file and then use the `--NODE_CONFIG_DIR` option to override the location where the cache server will look for your config file(s). @@ -82,11 +82,12 @@ A simple, efficient file system backed cache. #### Usage `--cache-module cache_fs` #### Options -option | default | description ---------- | ----------- | ----------- -cachePath | `.cache_fs` | Path to cache directory -cleanupOptions.expireTimeSpan | `P30D` | [ASP.NET](https://msdn.microsoft.com/en-us/library/se73z7b9(v=vs.110).aspx) or [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Time_intervals) style timespan. Cache files that have not been accessed within this timespan will be eligible for cleanup/removal. The [moment](https://momentjs.com/docs/#/durations/) library is used to parse durations - more information on duration syntax can be found in the library documentation. -cleanupOptions.maxCacheSize | 0 | Size in bytes to limit overall cache disk utilization. The cleanup script will consider files for removal in least-recently-used order to bring the total disk utilization under this threshold. 0 disables this cleanup feature. +Option | Default | Description +------------------------------- | ----------- | ----------- +cachePath | `.cache_fs` | Path to cache directory +cleanupOptions.expireTimeSpan | `P30D` | [ASP.NET](https://msdn.microsoft.com/en-us/library/se73z7b9(v=vs.110).aspx) or [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Time_intervals) style timespan. Cache files that have not been accessed within this timespan will be eligible for cleanup/removal. The [moment](https://momentjs.com/docs/#/durations/) library is used to parse durations - more information on duration syntax can be found in the library documentation. +cleanupOptions.maxCacheSize | 0 | Size in bytes to limit overall cache disk utilization. The cleanup script will consider files for removal in least-recently-used order to bring the total disk utilization under this threshold. 0 disables this cleanup feature. + #### Notes * This module is backwards compatible with v5.x Cache Server directories * Supports worker threads (`--workers` option) @@ -95,15 +96,16 @@ A high performance, fully in-memory LRU cache. #### Usage `--cache-module cache_ram` #### Options -option | default | description ---------- | ----------- | ----------- -pageSize | 100000000 | Smallest memory allocation to make, in bytes. i.e. the cache will grow in increments of pageSize. -maxPageCount | 10 | Maximum number of pages allowed in the cache. This combined with `pageSize` effectively limits the overall memory footprint of the cache. When this threshold is reached, an LRU mechanism will kick in to find room for new files. -minFreeBlockSize | 1024 | Smallest allocation unit within a page. Can be lowered for smaller projects. -cachePath | `.cache_ram` | Path to cache directory. Dirty memory pages are saved to disk periodically in this directory, and loaded at startup. -persistence | true | Enable saving and loading of page files to disk. If `false`, the cache will be empty at every restart. -persistenceOptions.autosave | true | `true` to periodically save dirty memory pages automatically; `false` to disable. If `false`, pages will only be saved when the cache server is stopped with the `q` console command or with SIGTERM. -persistenceOptions.autosaveInterval | 10000 | Minimum interval in milliseconds to save dirty pages. +Option | Default | Description +----------------------------------- | ------------ | ----------- +pageSize | 100000000 | Smallest memory allocation to make, in bytes. i.e. the cache will grow in increments of pageSize. +maxPageCount | 10 | Maximum number of pages allowed in the cache. This combined with `pageSize` effectively limits the overall memory footprint of the cache. When this threshold is reached, an LRU mechanism will kick in to find room for new files. +minFreeBlockSize | 1024 | Smallest allocation unit within a page. Can be lowered for smaller projects. +cachePath | `.cache_ram` | Path to cache directory. Dirty memory pages are saved to disk periodically in this directory, and loaded at startup. +persistence | true | Enable saving and loading of page files to disk. If `false`, the cache will be empty at every restart. +persistenceOptions.autosave | true | `true` to periodically save dirty memory pages automatically; `false` to disable. If `false`, pages will only be saved when the cache server is stopped with the `q` console command or with SIGTERM. +persistenceOptions.autosaveInterval | 10000 | Minimum interval in milliseconds to save dirty pages. + #### Notes * Does not support worker threads ## Cache Cleanup @@ -112,27 +114,29 @@ For performance and simplicity reasons, unlike prior versions, the cache_fs modu `unity-cache-server-cleanup [option]` or `node cleanup.js [options]` -#### Options -``` -V, --version output the version number - -c --cache-module [path] Use cache module at specified path - -P, --cache-path [path] Specify the path of the cache directory - -l, --log-level Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug) - -e, --expire-time-span Override the configured file expiration timespan. Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. '15.23:59:59') and ISO 8601 time spans (e.g. 'P15DT23H59M59S') are supported. - -s, --max-cache-size Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check. - -d, --delete Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only. - -D, --daemon Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process. - -h, --help output usage information -``` + +Command | Description +-------------------------------- | ----------- +-V, --version | output the version number +-c --cache-module [path] | Use cache module at specified path +-P, --cache-path [path] | Specify the path of the cache directory +-l, --log-level | Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug) +-e, --expire-time-span | Override the configured file expiration timespan. Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. '15.23:59:59') and ISO 8601 time spans (e.g. 'P15DT23H59M59S') are supported. +-s, --max-cache-size | Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check. +-d, --delete | Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only. +-D, --daemon | Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process. +-h, --help | output usage information + #### Notes * Only the cache_fs module supports cache cleanup (cache_ram does not) ## Mirroring #### Usage Use the `--mirror [host:port]` option to relay all upload transactions to one or more Cache Server hosts (repeat the option for each host). There are checks in place to prevent self-mirroring, but beyond that it would be easy to create infinite transaction loops so use with care. #### Options -option | default | description ---------- | ----------- | ----------- -queueProcessDelay | 2000 | Each transaction from a client is queued after completion. The `queueProcessDelay` (ms) will delay the start of processing the queue, from when the first transaction is added to an empty queue. It's a good idea to keep this value at or above the default value to avoid possible I/O race conditions with recently completed transactions. -connectionIdleTimeout | 10000 | Keep connections to remote mirror hosts alive for this length in ms, after processing a queue of transactions. Queue processing is 'bursty' so this should be calibrated to minimize the overhead of connection setup & tear-down. +Option | Default | Description +--------------------- | ----------- | ----------- +queueProcessDelay | 2000 | Each transaction from a client is queued after completion. The `queueProcessDelay` (ms) will delay the start of processing the queue, from when the first transaction is added to an empty queue. It's a good idea to keep this value at or above the default value to avoid possible I/O race conditions with recently completed transactions. +connectionIdleTimeout | 10000 | Keep connections to remote mirror hosts alive for this length in ms, after processing a queue of transactions. Queue processing is 'bursty' so this should be calibrated to minimize the overhead of connection setup & tear-down. ## Unity project Library Importer Tools are provided to quickly seed a Cache Server from a fully imported Unity project (a project with a Library folder). From a83e675497d554cd6df6e270da2cc052e89f450a Mon Sep 17 00:00:00 2001 From: DonGloverUnity <32549001+DonGloverUnity@users.noreply.github.com> Date: Tue, 13 Mar 2018 13:44:52 -0700 Subject: [PATCH 86/89] Clean up of readme doc --- README.md | 181 +++++++++++++++++++++++++++++++++++------------------- 1 file changed, 117 insertions(+), 64 deletions(-) diff --git a/README.md b/README.md index 1f608ef..589763f 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,11 @@ -# unity-cache-server [![Build Status](https://travis-ci.org/Unity-Technologies/unity-cache-server.svg?branch=master)](https://travis-ci.org/Unity-Technologies/unity-cache-server) [![Coverage Status](https://coveralls.io/repos/github/Unity-Technologies/unity-cache-server/badge.svg)](https://coveralls.io/github/Unity-Technologies/unity-cache-server) -> The Unity Cache Server, optimized for networked team environments. +# Cache Server v6.0 [![Build Status](https://travis-ci.org/Unity-Technologies/unity-cache-server.svg?branch=master)](https://travis-ci.org/Unity-Technologies/unity-cache-server) [![Coverage Status](https://coveralls.io/repos/github/Unity-Technologies/unity-cache-server/badge.svg)](https://coveralls.io/github/Unity-Technologies/unity-cache-server) +> The Unity Cache Server, optimized for locally networked team environments. ## Overview -This is the officially maintained open-source implementation of the Cache Server, specifically optimized for LAN connected teams. The Cache Server speeds up initial import of project data, as well as platform switching within a project. -At present time this open-source repository is maintained separately from the Cache Server available on the Unity website, as well as the version packaged with the Unity installer. It is possible that compatibility with specific versions of Unity will diverge between these separate implementations. Check the release notes for specific compatibility information prior to usage. +This repository contains an open-source implementation of the Cache Server. This stand-alone version of Cache Server is specifically optimized for LAN connected teams. The Cache Server speeds up initial import of project data, as well as platform switching within a project. + +This open-source repository is maintained separately from the Cache Server available on the Unity website and the implementation of the Cache Server that is packaged with the Unity installer. #### Table of Contents * [Server Setup](#server-setup) @@ -24,131 +25,183 @@ At present time this open-source repository is maintained separately from the Ca * [License](#license) ## Server Setup -Download and install the latest LTS version of node from the [Node.JS website](`https://nodejs.org/en/download/`). + +Download and install the latest LTS version of Node.js from the [Node.JS website](https://nodejs.org/en/download/). #### Install from npm registry + ```bash npm install unity-cache-server -g ``` + #### Install from GitHub source + ```bash npm install github:Unity-Technologies/unity-cache-server -g ``` + ## Usage + >Default options are suitable for quickly starting a cache server, with a default cache location of `.cache_fs` + ```bash unity-cache-server [arguments] ``` Command | Description -------------------------------- | ----------- --V, --version | output the version number --p, --port | Specify the server port, only apply to new cache server, default is 8126 --c --cache-module [path] | Use cache module at specified path. Default is 'cache_fs' --P, --cache-path [path] | Specify the path of the cache directory. --l, --log-level | Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug). Default is 3 --w, --workers | Number of worker threads to spawn. Default is 0 --m --mirror [host:port] | Mirror transactions to another cache server. Can be repeated for multiple mirrors. --m, --monitor-parent-process | Monitor a parent process and exit if it dies ---dump-config | Write the active configuration to the console ---save-config [path] | Write the active configuration to the specified file and exit. Defaults to ./default.yml ---NODE_CONFIG_DIR= | Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used. --h, --help | output usage information +`-V`, `--version` | Show the version number of the Cache Server. +`-p`, `--port ` | The port on which the Cache Server listens. The default value is 8126. +`-c`, `--cache-module [path]` | The path to cache module. The Default path is 'cache_fs'. +`-P`, `--cache-path [path]` | The path of the cache directory. +`-l`, `--log-level ` | The level of log verbosity. Valid values are 0 (silent) through 5 (debug). The default is 3. +`-w`, `--workers ` | The number of worker threads to spawn. The default is 0. +`-m`, `--mirror [host:port]` | Mirror transactions to another cache server. Repeat this option ofr multiple mirrors. +`-m`, `--monitor-parent-process ` | Monitor a parent process and exit if it dies. +`--dump-config` | Write the active configuration to the console. +`--save-config [path]` | Write the active configuration to the specified file and exit. Defaults to `./default.yml`. +`--NODE_CONFIG_DIR=` | The directory to search for config files. This is equivalent to setting the `NODE_CONFIG_DIR` environment variable. If not specified, the built-in configuration is used. +`-h`, `--help` | Show usage information. ## Configuration files -`config/default.yml` contains various configuration values for the cache modules (see below) and other features. The config system is based on the [node-config](`https://github.com/lorenwest/node-config/wiki/Configuration-Files`) module. Refer to the documentation in that package for tips on how to manage environment specific config files. -By default, running `unity-cache-server` will use the built-in configuration file. To start using a custom config file, save the current config to a new file and then use the `--NODE_CONFIG_DIR` option to override the location where the cache server will look for your config file(s). + +The `config/default.yml` file contains configuration values for the cache modules (see below) and other features. The config system is based on the node-config module. For additional information on how to manage environment specific config files, see the [Configuration Files](https://github.com/lorenwest/node-config/wiki/Configuration-Files) documentation on the node-config GitHub repository. + +By default, running `unity-cache-server` uses the built-in configuration file. To start Cache Server using a custom config file, save the current config to a new file and then use the `--NODE_CONFIG_DIR` option to override the location where the cache server will look for your config file(s). + #### Examples (Mac/Linux) + 1) `mkdir config` -2) `unity-cache-server --save-config config/default.yml` +1) `unity-cache-server --save-config config/default.yml` 3) `unity-cache-server --NODE_CONFIG_DIR=config` -You can also have multiple configuration files based on environment: +You can have multiple configuration files based on environment: + 1) `export NODE_ENV=development` 2) `unity-cache-server --save-config config/local-development.yml` -To dump the current config to the console +To dump the current config to the console, run the following command: + `unity-cache-server --dump-config` ## Client Configuration + The [Cache Server](https://docs.unity3d.com/Manual/CacheServer.html) section of the Unity Manual contains detailed information on connecting clients to remote Cache Servers. ## Cache Modules -Two distinct caching mechanisms are provided: a simple file system based cache, and a fully memory (RAM) backed cache. The file system module is the default and suitable for most applications. The RAM cache module provides optimal performance but requires a sufficient amount of physical RAM in the server system. + +Cache Server supports two caching mechanisms: + +* A file system based cache. +* A fully memory (RAM) backed cache. + +The file system module is the default and suitable for most applications. The RAM cache module provides optimal performance but requires a sufficient amount of physical RAM in the server system. Typically, this is two to three times size of your Project's Library folder on disk. Configuration options for all modules are set in the `config/default.yml` file. + ### cache_fs (default) + A simple, efficient file system backed cache. + #### Usage + `--cache-module cache_fs` + #### Options + Option | Default | Description ------------------------------- | ----------- | ----------- -cachePath | `.cache_fs` | Path to cache directory -cleanupOptions.expireTimeSpan | `P30D` | [ASP.NET](https://msdn.microsoft.com/en-us/library/se73z7b9(v=vs.110).aspx) or [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Time_intervals) style timespan. Cache files that have not been accessed within this timespan will be eligible for cleanup/removal. The [moment](https://momentjs.com/docs/#/durations/) library is used to parse durations - more information on duration syntax can be found in the library documentation. -cleanupOptions.maxCacheSize | 0 | Size in bytes to limit overall cache disk utilization. The cleanup script will consider files for removal in least-recently-used order to bring the total disk utilization under this threshold. 0 disables this cleanup feature. - -#### Notes -* This module is backwards compatible with v5.x Cache Server directories -* Supports worker threads (`--workers` option) -### cache_ram +cachePath | `.cache_fs` | The path to the cache directory. +cleanupOptions.expireTimeSpan | `P30D` | [ASP.NET](https://msdn.microsoft.com/en-us/library/se73z7b9(v=vs.110).aspx) or [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Time_intervals) style timespan. Cache files that have not been accessed within this timespan are eligible for cleanup. For more information on duration syntax, see the [moment](https://momentjs.com/docs/#/durations/) library documentation. +cleanupOptions.maxCacheSize | 0 | The maximum size, in bytes, of the cache on disk. To bring the total disk utilization under this threshold, the cleanup script considers files for removal in least recently used order. Set the value to zero (0) to disable the cleanup feature. + +### Notes + +* cache_fs is backwards compatible with v5.x Cache Server directories. +* Supports worker threads using the `--workers` option. +* When you run the cleanup script, the value of the `expireTimeSpan` option is used to determine which file to delete files. If `maxCacheSize` is specified the script checks whether the cache exceeds the value of `maxCacheSize`. If it does, the script deletes files in least-recently-used order until the cache no longer exceed maxCacheSize. + +## cache_ram + A high performance, fully in-memory LRU cache. -#### Usage + +### Usage + `--cache-module cache_ram` -#### Options + +### Options + Option | Default | Description ----------------------------------- | ------------ | ----------- -pageSize | 100000000 | Smallest memory allocation to make, in bytes. i.e. the cache will grow in increments of pageSize. -maxPageCount | 10 | Maximum number of pages allowed in the cache. This combined with `pageSize` effectively limits the overall memory footprint of the cache. When this threshold is reached, an LRU mechanism will kick in to find room for new files. -minFreeBlockSize | 1024 | Smallest allocation unit within a page. Can be lowered for smaller projects. -cachePath | `.cache_ram` | Path to cache directory. Dirty memory pages are saved to disk periodically in this directory, and loaded at startup. -persistence | true | Enable saving and loading of page files to disk. If `false`, the cache will be empty at every restart. -persistenceOptions.autosave | true | `true` to periodically save dirty memory pages automatically; `false` to disable. If `false`, pages will only be saved when the cache server is stopped with the `q` console command or with SIGTERM. -persistenceOptions.autosaveInterval | 10000 | Minimum interval in milliseconds to save dirty pages. - -#### Notes -* Does not support worker threads +pageSize | 100000000 | The page size, in bytes, used to grow the cache. +maxPageCount | 10 | The maximum number of pages to allocate in the cache. The combination of `pageSize` and `maxPageCount` limits the overall memory footprint of the cache. When this threshold is reached, memory recovered using a Least Recently Used (LRU) algorithm. +minFreeBlockSize | 1024 | The size of the minimum allocation unit, in bytes, within a page. You can specify a lower value for smaller projects. +cachePath | `.cache_ram` | The path to the cache directory. Dirty memory pages are saved to disk periodically in this directory, and loaded at startup. +persistence | true | Enable saving and loading of page files to disk. If `false`, the cache is emptied during restart. +persistenceOptions.autosave | true | When set to `true`, automatically save changed memory pages; set to `false` to disable. If `false`, pages are only saved when the cache server is stopped with the `q` console command or with upon a SIGTERM. +persistenceOptions.autosaveInterval | 10000 | The frequency, in milliseconds, to save page that have changed. + +### Notes + +* Does not support worker threads. + ## Cache Cleanup -For performance and simplicity reasons, unlike prior versions, the cache_fs module does NOT operate as an LRU cache and does not enforce overall cache size restrictions. To manage disk usage, a separate cleanup script is provided that can either be run periodically or in "daemon" mode to automatically run at a given time interval. + +Due to performance considerations, the `cache_fs` module shipped with Cache Server v6.0 does NOT operate as an LRU cache and does not enforce overall cache size restrictions. This is a change from previous versions of Cache Server. To manage disk usage, a separate cleanup script is provided that can either be run periodically or in "daemon" mode to automatically run at a given time interval. + ### Usage + `unity-cache-server-cleanup [option]` or `node cleanup.js [options]` Command | Description -------------------------------- | ----------- --V, --version | output the version number --c --cache-module [path] | Use cache module at specified path --P, --cache-path [path] | Specify the path of the cache directory --l, --log-level | Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug) --e, --expire-time-span | Override the configured file expiration timespan. Both ASP.NET style time spans (days.minutes:hours:seconds, e.g. '15.23:59:59') and ISO 8601 time spans (e.g. 'P15DT23H59M59S') are supported. +-V, --version | Show the version number of cleanup script. +-c --cache-module [path] | The path to the cache module. +-P, --cache-path [path] | The path of the cache directory. +-l, --log-level | The level of log verbosity. Valid values are 0 (silent) through 5 (debug) +-e, --expire-time-span | Override the configured file expiration timespan. Both ASP.NET style time spans (days.minutes:hours:seconds, for example '15.23:59:59') and ISO 8601 time spans (For example, 'P15DT23H59M59S') are supported. -s, --max-cache-size | Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check. -d, --delete | Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only. --D, --daemon | Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process. --h, --help | output usage information +-D, --daemon | Daemon mode. Execute the cleanup script at the given interval in seconds as a foreground process. +-h, --help | Show usage information. + +### Notes + +* Only the cache_fs module supports cache cleanup (cache_ram does not). -#### Notes -* Only the cache_fs module supports cache cleanup (cache_ram does not) ## Mirroring -#### Usage -Use the `--mirror [host:port]` option to relay all upload transactions to one or more Cache Server hosts (repeat the option for each host). There are checks in place to prevent self-mirroring, but beyond that it would be easy to create infinite transaction loops so use with care. -#### Options + +### Usage + +Use the `--mirror [host:port]` option to relay all upload transactions to one or more Cache Server hosts (repeat the option for each host). + +__Important__: Use the `--mirror` option cautiously. There are checks in place to prevent self-mirroring, but it is still possible to create infinite transaction loops. + +### Options + Option | Default | Description --------------------- | ----------- | ----------- -queueProcessDelay | 2000 | Each transaction from a client is queued after completion. The `queueProcessDelay` (ms) will delay the start of processing the queue, from when the first transaction is added to an empty queue. It's a good idea to keep this value at or above the default value to avoid possible I/O race conditions with recently completed transactions. -connectionIdleTimeout | 10000 | Keep connections to remote mirror hosts alive for this length in ms, after processing a queue of transactions. Queue processing is 'bursty' so this should be calibrated to minimize the overhead of connection setup & tear-down. +queueProcessDelay | 2000 | The period, in milliseconds, to delay the start of processing the queue, from when the first transaction is added to an empty queue. Each transaction from a client is queued after completion. It's a good idea to keep this value at or above the default value to avoid possible I/O race conditions with recently completed transactions. +connectionIdleTimeout | 10000 | The period, in milliseconds, to keep connections to remote mirror hosts alive, after processing a queue of transactions. Queue processing is bursty. To minimize the overhead of connection setup and teardown, calibrate this value for your environment. ## Unity project Library Importer + Tools are provided to quickly seed a Cache Server from a fully imported Unity project (a project with a Library folder). -#### Steps to Import -1) Add the [CacheServerTransactionImporter.cs](./Unity/CacheServerTransactionExporter.cs) script to the Unity project you wish to export. -2) Select the Menu item _Cache Server Utilities -> Export Transactions_ to save an export data file in .json format. Alternatively, with the script added to your project, you can run Unity in batchmode and [execute the static method](https://docs.unity3d.com/Manual/CommandLineArguments.html) `CacheServerTransactionExporter.ExportTransactions([path])` where `path` is the full path and filename to export. + +### Steps to Import a Project + +1) Add the [CacheServerTransactionImporter.cs](./Unity/CacheServerTransactionExporter.cs) script to the Unity project to export. +2) To save an export data file in .json format, in the Unity Editor, select __Cache Server Utilities__ > __Export Transactions__ . Alternatively, with the script added to your project, you can run Unity in batch mode using the `-executeMethod` option. For the ``, use `CacheServerTransactionExporter.ExportTransactions([path])` where `path` is the full path and filename to export. For more information on running the Editor in silent mode and the `-executeMethod` option, see [Command line arguments](https://docs.unity3d.com/Manual/CommandLineArguments.html). 3) Run the import utility to begin the import process: `unity-cache-server-import [server:port]` -#### Notes -* On very large projects, Unity may appear to freeze while generating the exported JSON data. + +### Notes + +* On very large projects, Unity might appear to freeze while generating the exported JSON data. * The default `server:port` is `localhost:8126` * The import process connects and uploads to the target host like any other Unity client, so it should be safe in a production environment. -* Files will be skipped if any changes were detected between when the JSON data was exported and when the importer tool is executed. +* Files are skipped if any changes were detected between when the JSON data was exported and when the importer tool is executed. ## Contributors Contributions are welcome! Before submitting pull requests please note the Submission of Contributions section of the Apache 2.0 license. From 12c0a7a3998db00befc4a0b042a6275cb2d36fc1 Mon Sep 17 00:00:00 2001 From: DonGloverUnity <32549001+DonGloverUnity@users.noreply.github.com> Date: Wed, 14 Mar 2018 09:09:03 -0700 Subject: [PATCH 87/89] Updates based on review --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 589763f..39294ba 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ This open-source repository is maintained separately from the Cache Server avail ## Server Setup -Download and install the latest LTS version of Node.js from the [Node.JS website](https://nodejs.org/en/download/). +Download and install LTS version 8.10.0 of Node.js from the [Node.JS website](https://nodejs.org/en/download/). #### Install from npm registry @@ -119,7 +119,7 @@ cleanupOptions.maxCacheSize | 0 | The maximum size, in bytes, of t * cache_fs is backwards compatible with v5.x Cache Server directories. * Supports worker threads using the `--workers` option. -* When you run the cleanup script, the value of the `expireTimeSpan` option is used to determine which file to delete files. If `maxCacheSize` is specified the script checks whether the cache exceeds the value of `maxCacheSize`. If it does, the script deletes files in least-recently-used order until the cache no longer exceed maxCacheSize. +* When you run the cleanup script, the value of the `expireTimeSpan` option is used to determine which files to delete. If `maxCacheSize` is specified the script checks whether the cache exceeds the value of `maxCacheSize`. If it does, the script deletes files in least-recently-used order until the cache no longer exceed maxCacheSize. ## cache_ram @@ -134,12 +134,12 @@ A high performance, fully in-memory LRU cache. Option | Default | Description ----------------------------------- | ------------ | ----------- pageSize | 100000000 | The page size, in bytes, used to grow the cache. -maxPageCount | 10 | The maximum number of pages to allocate in the cache. The combination of `pageSize` and `maxPageCount` limits the overall memory footprint of the cache. When this threshold is reached, memory recovered using a Least Recently Used (LRU) algorithm. +maxPageCount | 10 | The maximum number of pages to allocate in the cache. The combination of `pageSize` and `maxPageCount` limits the overall memory footprint of the cache. When this threshold is reached, memory is recovered using a Least Recently Used (LRU) algorithm. minFreeBlockSize | 1024 | The size of the minimum allocation unit, in bytes, within a page. You can specify a lower value for smaller projects. cachePath | `.cache_ram` | The path to the cache directory. Dirty memory pages are saved to disk periodically in this directory, and loaded at startup. persistence | true | Enable saving and loading of page files to disk. If `false`, the cache is emptied during restart. persistenceOptions.autosave | true | When set to `true`, automatically save changed memory pages; set to `false` to disable. If `false`, pages are only saved when the cache server is stopped with the `q` console command or with upon a SIGTERM. -persistenceOptions.autosaveInterval | 10000 | The frequency, in milliseconds, to save page that have changed. +persistenceOptions.autosaveInterval | 10000 | The frequency, in milliseconds, to save pages that have changed. ### Notes From 997337ddc1f957c46d9ca1c9f4af93bbcc1b1eb3 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 16 Mar 2018 13:42:07 -0500 Subject: [PATCH 88/89] Update to Node v8.10.0 (latest LTS). Added a unit test for cache paths with/without a trailing slash --- .nvmrc | 2 +- main.js | 22 ---------------------- package-lock.json | 2 +- package.json | 2 +- test/cache_base.js | 16 +++++++++++++++- 5 files changed, 18 insertions(+), 26 deletions(-) diff --git a/.nvmrc b/.nvmrc index 641c7df..6fe1005 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -v8.9.4 +v8.10.0 diff --git a/main.js b/main.js index c4fadf7..71c2eac 100755 --- a/main.js +++ b/main.js @@ -38,7 +38,6 @@ program.description("Unity Cache Server") .option('-l, --log-level ', 'Specify the level of log verbosity. Valid values are 0 (silent) through 5 (debug)', myParseInt, consts.DEFAULT_LOG_LEVEL) .option('-w, --workers ', 'Number of worker threads to spawn', zeroOrMore, consts.DEFAULT_WORKERS) .option('-m --mirror [host:port]', 'Mirror transactions to another cache server. Can be repeated for multiple mirrors', collect, []) - .option('-m, --monitor-parent-process ', 'Monitor a parent process and exit if it dies', myParseInt, 0) .option('--dump-config', 'Write the active configuration to the console') .option('--save-config [path]', 'Write the active configuration to the specified file and exit. Defaults to ./default.yml') .option('--NODE_CONFIG_DIR=', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.'); @@ -73,27 +72,6 @@ if(program.saveConfig || program.dumpConfig) { helpers.setLogLevel(program.logLevel); helpers.setLogger(program.workers > 0 ? helpers.defaultClusterLogger : helpers.defaultLogger); -if (program.monitorParentProcess > 0) { - function monitor() { - function is_running(pid) { - try { - return process.kill(pid, 0) - } - catch (e) { - return e.code === 'EPERM' - } - } - - if (!is_running(program.monitorParentProcess)) { - helpers.log(consts.LOG_INFO, "monitored parent process has died"); - process.exit(1); - } - setTimeout(monitor, 1000); - } - - monitor(); -} - const errHandler = function () { helpers.log(consts.LOG_ERR, "Unable to start Cache Server"); process.exit(1); diff --git a/package-lock.json b/package-lock.json index d609bc8..96c1098 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.1", + "version": "6.0.0-beta.8", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 80596e8..1a9080a 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "Unity Cache Server", "main": "lib/index.js", "engines": { - "node": "^8.9.4" + "node": "^8.10.0" }, "directories": { "test": "test" diff --git a/test/cache_base.js b/test/cache_base.js index b0b4cb6..fadf300 100644 --- a/test/cache_base.js +++ b/test/cache_base.js @@ -60,13 +60,27 @@ describe("Cache: Base Class", () => { assert.strictEqual(cache._cachePath, opts.cachePath); }); - it("should return a subdirectory path relative to the app root if cachePath is not an abosolute path", () => { + it("should return a subdirectory path relative to the app root if cachePath is not an absolute path", () => { cache._optionOverrides = { cachePath: "abc123" }; assert.strictEqual(cache._cachePath, path.join(path.dirname(require.main.filename), "abc123")); }); + + it("should handle a trailing slash in the cache path", () => { + let noTrailingSlash = "/dir/without/trailing/slash"; + let withTrailingSlash = "/dir/without/trailing/slash/"; + + cache._optionOverrides = { + cachePath: noTrailingSlash + }; + + assert.strictEqual(cache._cachePath, noTrailingSlash); + + cache._optionOverrides.cachePath = withTrailingSlash; + assert.strictEqual(cache._cachePath, withTrailingSlash); + }); }); describe("init", () => { From b79e5f514be03c7f42551fa27185224ee05c1375 Mon Sep 17 00:00:00 2001 From: Stephen Palmer Date: Fri, 16 Mar 2018 13:42:25 -0500 Subject: [PATCH 89/89] Version bump to 6.0.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 1a9080a..e3f4e6e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "unity-cache-server", - "version": "6.0.0-beta.8", + "version": "6.0.0", "description": "Unity Cache Server", "main": "lib/index.js", "engines": {