Skip to content
This repository was archived by the owner on Jan 9, 2023. It is now read-only.

Commit

Permalink
Merge pull request #25 from Unity-Technologies/dev/v6.0.1
Browse files Browse the repository at this point in the history
Dev/v6.0.1
  • Loading branch information
stephen-palmer authored Apr 13, 2018
2 parents 284a1cc + 7c809a4 commit 731d478
Show file tree
Hide file tree
Showing 25 changed files with 276 additions and 204 deletions.
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@ cache/
node_modules/
.coveralls.yml
!lib/cache
.cache_ram*/
.cache_fs*/
.nyc_output/
coverage/
local-production.yml
.cache*/
2 changes: 1 addition & 1 deletion .nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v8.10.0
v8.11.1
51 changes: 26 additions & 25 deletions cleanup.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,6 @@ program.description("Unity Cache Server - Cache Cleanup\n\n Removes old files f
.option('-D, --daemon <interval>', 'Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process.', myParseInt)
.option('--NODE_CONFIG_DIR=<path>', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.');

if (!process.argv.slice(2).length) {
return program.outputHelp();
}

program.parse(process.argv);

helpers.setLogLevel(program.logLevel);
Expand All @@ -56,7 +52,7 @@ if(!CacheModule.properties.cleanup) {

const cache = new CacheModule();

let cacheOpts = { cleanupOptions: {} };
const cacheOpts = { cleanupOptions: {} };

if(program.cachePath !== null) {
cacheOpts.cachePath = program.cachePath;
Expand All @@ -71,37 +67,42 @@ if(program.hasOwnProperty('maxCacheSize')) {
}

const dryRun = !program.delete;
const logLevel = helpers.getLogLevel();

cache._options = cacheOpts;
helpers.log(consts.LOG_INFO, `Cache path is ${cache._cachePath}`);

const msg = `Gathering cache files for expiration`;
let spinner = null;

if(helpers.getLogLevel() < consts.LOG_DBG && helpers.getLogLevel() > consts.LOG_NONE) {
spinner = ora({color: 'white'});
}

cache.on('cleanup_search_progress', data => {
let txt = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`;
spinner ? spinner.text = txt : helpers.log(consts.LOG_DBG, txt);
});

cache.on('cleanup_search_finish', () => {
if(spinner) spinner.stop();
});

cache.on('cleanup_delete_item', item => {
helpers.log(consts.LOG_INFO, `Deleted ${item}`);
});
cache.on('cleanup_delete_item', item => helpers.log(consts.LOG_DBG, item));

cache.on('cleanup_delete_finish', data => {
let pct = data.cacheSize > 0 ? (data.deleteSize/data.cacheSize).toPrecision(2) * 100 : 0;
const pct = data.cacheSize > 0 ? (data.deleteSize/data.cacheSize).toPrecision(2) * 100 : 0;
helpers.log(consts.LOG_INFO, `Found ${data.deleteCount} expired files of ${data.cacheCount}. ${filesize(data.deleteSize)} of ${filesize(data.cacheSize)} (${pct}%).`);
if(dryRun) {
helpers.log(consts.LOG_INFO, "Nothing deleted; run with --delete to remove expired files from the cache.");
}
});

const msg = 'Gathering cache files for expiration';
let spinner = null;

if(logLevel < consts.LOG_DBG && logLevel >= consts.LOG_INFO) {
spinner = ora({color: 'white'});

cache.on('cleanup_search_progress', data => {
spinner.text = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`;
});

cache.on('cleanup_search_finish', () => {
spinner.stop();
});

} else if(logLevel === consts.LOG_DBG) {
cache.on('cleanup_search_progress', data => {
const txt = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`;
helpers.log(consts.LOG_DBG, txt);
});
}

function doCleanup() {
if (spinner) spinner.start(msg);
cache.cleanup(dryRun)
Expand Down
12 changes: 6 additions & 6 deletions import.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,13 @@ program.parse(process.argv);

async function importTransactionFile(filePath, addressString, defaultPort) {

let address = await helpers.parseAndValidateAddressString(addressString, defaultPort);
const address = await helpers.parseAndValidateAddressString(addressString, defaultPort);

if(!await fs.pathExists(filePath)) throw new Error(`Cannot find ${filePath}`);
let data = await fs.readJson(filePath);
const data = await fs.readJson(filePath);
if(!data.hasOwnProperty('transactions')) throw new Error(`Invalid transaction data!`);

let client = new Client(address.host, address.port, {});
const client = new Client(address.host, address.port, {});
await client.connect();

const trxCount = data.transactions.length;
Expand Down Expand Up @@ -84,7 +84,7 @@ async function importTransactionFile(filePath, addressString, defaultPort) {

let stats;

for (let file of trx.files) {
for (const file of trx.files) {

try {
stats = await fs.stat(file.path);
Expand Down Expand Up @@ -129,8 +129,8 @@ async function importTransactionFile(filePath, addressString, defaultPort) {
helpers.log(consts.LOG_WARN, warns.join('\n'));
}

let totalTime = (Date.now() - startTime) / 1000;
let throughput = (sentBytes / totalTime).toFixed(2);
const totalTime = (Date.now() - startTime) / 1000;
const throughput = (sentBytes / totalTime).toFixed(2);
helpers.log(consts.LOG_INFO, `Sent ${sentFileCount} files for ${sentAssetCount} assets (${filesize(sentBytes)}) in ${totalTime} seconds (${filesize(throughput)}/sec)`);

return client.quit();
Expand Down
4 changes: 2 additions & 2 deletions lib/cache/cache_base.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class CacheBase extends EventEmitter {
}

get _options() {
let opts = config.get(this._optionsPath);
const opts = config.get(this._optionsPath);
return defaultsDeep(this._optionOverrides, opts);
}

Expand All @@ -37,7 +37,7 @@ class CacheBase extends EventEmitter {
if(!this._options.hasOwnProperty('cachePath'))
return null;

let cachePath = this._options.cachePath;
const cachePath = this._options.cachePath;
return path.isAbsolute(cachePath) ? cachePath : path.join(path.dirname(require.main.filename), cachePath);
}

Expand Down
81 changes: 36 additions & 45 deletions lib/cache/cache_fs.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@ const path = require('path');
const fs = require('fs-extra');
const uuid = require('uuid');
const consts = require('../constants');
const klaw = require('klaw');
const moment = require('moment');
const { Transform } = require('stream');

class CacheFS extends CacheBase {
constructor() {
Expand Down Expand Up @@ -43,7 +41,7 @@ class CacheFS extends CacheBase {
* @private
*/
_calcFilepath(type, guid, hash) {
let fileName = CacheFS._calcFilename(type, guid, hash);
const fileName = CacheFS._calcFilename(type, guid, hash);
return path.join(this._cachePath, fileName.substr(0, 2), fileName);
}

Expand All @@ -60,7 +58,7 @@ class CacheFS extends CacheBase {
}

async _addFileToCache(type, guid, hash, sourcePath) {
let filePath = this._calcFilepath(type, guid, hash);
const filePath = this._calcFilepath(type, guid, hash);
await fs.move(sourcePath, filePath, { overwrite: true });
return filePath;
}
Expand All @@ -71,7 +69,7 @@ class CacheFS extends CacheBase {
}

getFileStream(type, guid, hash) {
let stream = fs.createReadStream(this._calcFilepath(type, guid, hash));
const stream = fs.createReadStream(this._calcFilepath(type, guid, hash));

return new Promise((resolve, reject) => {
stream.on('open', () => resolve(stream))
Expand All @@ -87,9 +85,9 @@ class CacheFS extends CacheBase {
}

async endPutTransaction(transaction) {
let self = this;
const self = this;

let moveFile = async (file) => {
const moveFile = async (file) => {
self._addFileToCache(file.type, transaction.guid, transaction.hash, file.file)
.then(filePath => helpers.log(consts.LOG_TEST, `Added file to cache: ${file.size} ${filePath}`),
err => helpers.log(consts.LOG_ERR, err));
Expand All @@ -112,61 +110,54 @@ class CacheFS extends CacheBase {
const minFileAccessTime = moment().subtract(expireDuration).toDate();
const maxCacheSize = this._options.cleanupOptions.maxCacheSize;

let allItems = [];
let deleteItems = [];
const allItems = [];
const deleteItems = [];
let cacheSize = 0;
let deleteSize = 0;

let progressData = () => {
return { cacheCount: allItems.length, cacheSize: cacheSize, deleteCount: deleteItems.length, deleteSize: deleteSize };
const progressData = () => {
return {
cacheCount: allItems.length,
cacheSize: cacheSize,
deleteCount: deleteItems.length,
deleteSize: deleteSize
};
};

let filterTransform = new Transform({
objectMode: true,
transform(item, enc, next) {
if(item.stats.isDirectory()) return next();
allItems.push(item);
cacheSize += item.stats.size;
if(item.stats.atime < minFileAccessTime) {
deleteSize += item.stats.size;
this.push(item);
}
const progressEvent = () => self.emit('cleanup_search_progress', progressData());

self.emit('cleanup_search_progress', progressData());
progressEvent();
const progressTimer = setInterval(progressEvent, 250);

next();
return helpers.readDir(self._cachePath, (item) => {
if(item.stats.isDirectory()) return next();
allItems.push(item);
cacheSize += item.stats.size;
if(item.stats.atime < minFileAccessTime) {
deleteSize += item.stats.size;
deleteItems.push(item);
}
});

let finalize = async () => {
}).then(async () => {
if(maxCacheSize > 0 && cacheSize - deleteSize > maxCacheSize) {
allItems.sort((a, b) => { return a.stats.atime > b.stats.atime });
for(let item of allItems) {
self.emit('cleanup_search_progress', progressData());
for(const item of allItems) {
deleteSize += item.stats.size;
deleteItems.push(item.path);
deleteItems.push(item);
if(cacheSize - deleteSize <= maxCacheSize) break;
}
}

clearTimeout(progressTimer);
self.emit('cleanup_search_finish', progressData());

if(!dryRun) {
for(let item of deleteItems) {
self.emit('cleanup_delete_item', item);
await fs.unlink(item);
for(const d of deleteItems) {
self.emit('cleanup_delete_item', d.path);
if(!dryRun) {
await fs.unlink(d.path);
}
}

self.emit('cleanup_delete_finish', progressData());
};

return new Promise((resolve, reject) => {
klaw(self._cachePath)
.on('error', err => reject(err))
.pipe(filterTransform)
.on('data', item => deleteItems.push(item.path))
.on('end', () => finalize().catch(reject).then(resolve));
});
}
}
Expand Down Expand Up @@ -199,8 +190,8 @@ class PutTransactionFS extends PutTransaction {
}

async _closeAllStreams() {
let self = this;
let files = Object.values(this._streams);
const self = this;
const files = Object.values(this._streams);
if(files.length === 0) return;

function processClosedStream(stream) {
Expand All @@ -216,7 +207,7 @@ class PutTransactionFS extends PutTransaction {
}
}

for(let file of files) {
for(const file of files) {
if(file.stream.closed) {
processClosedStream(file);
continue;
Expand Down Expand Up @@ -250,7 +241,7 @@ class PutTransactionFS extends PutTransaction {
}

async getWriteStream(type, size) {
let file = path.join(this._cachePath, uuid());
const file = path.join(this._cachePath, uuid());

if(typeof(size) !== 'number' || size <= 0) {
throw new Error("Invalid size for write stream");
Expand Down
Loading

0 comments on commit 731d478

Please sign in to comment.