Skip to content

Commit

Permalink
chore: benchmarks for lru-cache family
Browse files Browse the repository at this point in the history
  • Loading branch information
mrflip committed Aug 9, 2022
1 parent ff991d6 commit 22cea6b
Show file tree
Hide file tree
Showing 5 changed files with 535 additions and 0 deletions.
157 changes: 157 additions & 0 deletions benchmark/lru-cache/helpers/cache-exercisers.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
var random = require('pandemonium/random');
var Benchmark = require('benchmark')
var Keymaster = require('./key-distributions.js');

var TEST_CAP = 30000

function makeStandardKeys() {
var StrKeys = {}
var NumKeys = {}
//
// 400k entries with approx 42k distinct values btwn 0 and 60k, distributed 300k/65k/23k/10k/5k/3k (~97% in the top 30k)
NumKeys.gen97 = Keymaster.longTailIntGen(60000, -0.4);
NumKeys.arr97 = Keymaster.longTailArr(400000, 60000, -0.4);
StrKeys.arr97 = Keymaster.stringifyArr(NumKeys.arr97);
StrKeys.gen97 = Keymaster.longTailStrGen(60000, -0.4);
NumKeys.arr97.note = 'Long-tail pool of 42,000 distinct numeric values, 97% in the top 30k, 75% in the top 10k'; StrKeys.arr97.note = NumKeys.arr97.note.replace(/numeric/, 'string');
//
// 400k entries with approx 50k distinct values btwn 0 and 60k, distributed 230k/80k/40k/22k/15k/10k (~88% in the top 30k)
// var NumKeys.arr88 = Keymaster.longTailArr(400000, 60000, -0.7)
//
// 400k entries with approx 60k distinct values btwn 0 and 60k, distributed 135k/85k/61k/48k/39k/33k (~70% in the top 30k)
NumKeys.gen70 = Keymaster.longTailIntGen(60000, -10);
NumKeys.arr70 = Keymaster.longTailArr(400000, 60000, -10);
StrKeys.arr70 = Keymaster.stringifyArr(NumKeys.arr70);
StrKeys.gen70 = Keymaster.longTailStrGen(60000, -10);
NumKeys.arr70.note = 'Long-tail pool of ~60,000 distinct numeric values, 70% in the top 30k, 33% in the top 10k'; StrKeys.arr70.note = NumKeys.arr70.note.replace(/numeric/, 'string');
//
// 120k entries with approx 52k distinct values btwn 0 and 60k, distributed evenly
NumKeys.arrFlat = Keymaster.flatDistArr(120000, 60000);
StrKeys.arrFlat = Keymaster.stringifyArr(NumKeys.arrFlat);
//
// 31k entries running 0-31k in order
NumKeys.arrOrd = Keymaster.ascendingArr(31000, 31000);
StrKeys.arrOrd = Keymaster.stringifyArr(NumKeys.arrOrd);
//
return { StrKeys, NumKeys }
}

function read1(cache, arrA) {
var count = arrA.length;
for (var ii = 0; ii < count; ii++) {
cache.get(arrA[ii % arrA.length])
}
}

function readFetch(cache, arrA, rng) {
var count = arrA.length;
for (var ii = 0; ii < count; ii++) {
var result = cache.get(arrA[ii % arrA.length])
if (! result) {
cache.set(rng());
}
}
}

function write1(cache, arrA) {
var count = arrA.length;
for (var ii = 0; ii < count; ii++) {
var storeme = arrA[ii % arrA.length]
cache.set(storeme, storeme)
}
}

function write1Read1(cache, [arrA, arrB], count) {
var blen = arrB.length;
if (! count) { count = arrA.length; }
for (var ii = 0; ii < count; ii++) {
var storeme = arrA[ii % arrA.length]
cache.set(storeme, storeme)
cache.get(arrB[ii % blen])
}
}

function write1Read4(cache, [arrA, arrB], count) {
var blen = arrB.length;
var boff0 = 0, boff1 = blen * 0.25, boff2 = blen * 0.50, boff3 = blen * 0.75;
if (! count) { count = arrA.length; }
for (var ii = 0; ii < count; ii++) {
var storeme = arrA[ii % arrA.length]
cache.set(storeme, storeme)
cache.get(arrB[(ii + boff0) % blen])
cache.get(arrB[(ii + boff1) % blen])
cache.get(arrB[(ii + boff2) % blen])
cache.get(arrB[(ii + boff3) % blen])
}
}

function writeSome(cache, arrA, frac = 0.2) {
var count = arrA.length;
for (var ii = 0; ii < count; ii++) {
if (Math.random() > frac) { continue; }
var storeme = arrA[ii % arrA.length];
cache.set(storeme, storeme);
}
}

function delete1(cache, [arrA], count) {
if (! count) { count = arrA.length; }
for (var ii = 0; ii < count; ii++) {
var delme = arrA[ii % arrA.length]
cache.delete(delme, delme)
}
}

function makeLoadedCaches(CacheFactories, arrA, count, capacity = TEST_CAP, options) {
var caches = CacheFactories.map((CacheFactory) => makeLoadedCache(CacheFactory, arrA, count, capacity, options));
caches.note = `${capacity}-capacity caches${arrA.note ? ' preloaded with ' + arrA.note : ''}`
return caches
}

function makeCaches(CacheFactories, capacity = TEST_CAP, options = {}) {
var caches = CacheFactories.map((CacheFactory) => {
var cache = new CacheFactory(null, null, capacity, options);
cache.name = CacheFactory.name;
return cache;
})
caches.note = `${capacity}-capacity caches`
return caches
}

function makeLoadedCache(CacheFactory, arrA, count, capacity = TEST_CAP, options) {
if (! count) { count = arrA.length; }
var cache = new CacheFactory(null, null, capacity, options);
cache.name = CacheFactory.name;
write1(cache, arrA, count);
var capK = Math.round(capacity / 1000);
cache.note = `Pre-loaded ${cache.name}@${capK}k`;
return cache;
}

function times(count, func, ...args) {
for (var ii = 0; ii < count; ii++) {
func(ii, count, ...args);
}
}

async function promisedTimes(count, func, ...args) {
var results = [];
for (var ii = 0; ii < count; ii++) {
var result = await func(ii, count, ...args);
results.push(result);
}
return Promise.all(results);
}

function round(val, decimals) {
chunk = Math.round(Math.pow(10, decimals));
return Math.round(val * chunk) / chunk;
}

function sleep(millis) { return new Promise((yay) => setTimeout(yay, millis)); }

module.exports = {
read1, readFetch, write1, write1Read1, write1Read4, delete1, writeSome,
makeStandardKeys, makeLoadedCaches, makeLoadedCache, makeCaches,
times, promisedTimes, round, sleep,
}
100 changes: 100 additions & 0 deletions benchmark/lru-cache/helpers/key-distributions.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
var randomString = require('pandemonium/random-string');
var random = require('pandemonium/random');
var typed = require('../../../utils/typed-arrays.js');
var {snipToLast} = require('../../../utils/snip.js');

module.exports.random = random;
module.exports.randomString = randomString;

function randArr(size, range, rng) {
var ValArrayFactory = typed.getPointerArray(range);
var arr = new ValArrayFactory(size)
for (var ii = 0; ii < size; ii++) {
arr[ii] = rng(ii);
}
return arr
}
module.exports.randArr = randArr;

function longTailArr(size, range, power) {
var intgen = longTailIntGen(range, power)
return randArr(size, range, intgen);
}
module.exports.longTailArr = longTailArr;

function flatDistArr(size, range, offset = 0) {
var intgen = () => random(offset, range + offset);
return randArr(size, range, intgen);
}
module.exports.flatDistArr = flatDistArr;

function ascendingArr(size, range) {
var intgen = (ii) => (ii);
return randArr(size, range, intgen);
}
module.exports.ascendingArr = ascendingArr;

function longTailIntGen(range, power = -0.8) {
return function intgen() {
var rand = Math.random()
var yy = (1 - rand)**(power) - 1
var result = Math.floor(0.25 * range * yy)
if (result < range) { return result }
return intgen()
}
}
module.exports.longTailIntGen = longTailIntGen;

function longTailStrGen(range, power = -0.8, tag = '') {
var intgen = longTailIntGen(range, power);
return function strgen() {
return String(intgen()) + tag;
}
}
module.exports.longTailStrGen = longTailStrGen;

function stringifyArr(arr, tag = '') {
var stringArr = [];
for (var ii = 0; ii < arr.length; ii++) {
stringArr.push(String(arr[ii]) + tag);
}
return stringArr;
}
module.exports.stringifyArr = stringifyArr;

function comparePairTails([kk1, vv1], [kk2, vv2]) {
if (vv2 > vv1) { return 1 }
if (vv2 < vv1) { return -1 }
if (kk2 > kk1) { return -1 }
if (kk2 < kk1) { return 1 }
return 1
}

function showDistribution(arr, chunk = 1) {
var counts = new Map();
for (var item of arr) {
const bin = chunk * Math.floor(item / chunk)
if (! counts.has(bin)) { counts.set(bin, 0); }
counts.set(bin, 1 + counts.get(bin));
}
var entries = [...counts].sort(comparePairTails)
var histo = new Map(entries)
histo.last = entries[entries.length - 1]
return histo
}
module.exports.showDistribution = showDistribution;

function examineDist(keys, chunks = 10_000) {
var histA = showDistribution(keys, 1000)
var histB = showDistribution(keys, chunks)
console.log(
keys.length,
histA.size,
snipToLast(histA.entries(), new Map(), {maxToDump: 25, last: histA.last, size: histA.size}),
histB,
)
}
module.exports.examineDist = examineDist;

// var HewJass = longTailArr(2_000_000, 1e6, -0.5);
// examineDist(HewJass, 100_000);
94 changes: 94 additions & 0 deletions benchmark/lru-cache/performance.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
var random = require('pandemonium/random');
var Benchmark = require('benchmark')
var Keymaster = require('./helpers/key-distributions.js');
var Exerciser = require('./helpers/cache-exercisers.js');
var LRUCache = require('../../lru-cache.js'),
LRUMap = require('../../lru-map.js'),
LRUCacheWithDelete = require('../../lru-cache-with-delete.js'),
LRUMapWithDelete = require('../../lru-map-with-delete.js'),
LRUCacheWithExpiry = require('../../lru-cache-with-expiry.js');

// Benchmark.options.minSamples = 3;

var CACHES = [LRUCacheWithExpiry, LRUCache] //, LRUMap, LRUMapWithDelete, LRUMap, LRUCacheWithDelete, LRUCache];

var {
makeStandardKeys, write1Read1, write1Read4, write1, read1,
} = Exerciser;
var { StrKeys, NumKeys } = makeStandardKeys()

function runEmptyCacheBenches(Keyset, benchOptions = {}) {
const { gen70, gen97, arr70, arr97, arrFlat, arrOrd } = Keyset

var emptyCaches = Exerciser.makeCaches(CACHES);
scenario('Empty caches, repeated reads', emptyCaches, arr97.note, (cache) => (function() {
read1(cache, arr97);
}));
}

function runLoadedCacheBenches(Keyset, benchOptions = {}) {
const { gen70, gen97, arr70, arr97, arrFlat, arrOrd } = Keyset

var fullCaches = Exerciser.makeLoadedCaches(CACHES, arrOrd);

if (benchOptions.do_expires) {
fullCaches.forEach((cache) => { if (cache.monitor) { cache.monitor(200, null, {logging: true}); } });
}

scenario('1x flat writes, 4x gentle spread read', fullCaches, arr70.note, (cache) => (function() {
write1Read4(cache, [arrFlat, arr70], arr70.length);
}));

scenario('Individual get then set operations', fullCaches, '97% short tail keys', (cache) => (function() {
cache.get(gen97());
cache.set(gen97(), 'hi');
}));

scenario('Individual get then set', fullCaches, 'flat distribution 33% larger than the cache', (cache) => (function() {
cache.get(String(random(0, 40000)));
cache.set(String(random(0, 40000)), 'hi');
}));

scenario('Read-only sharp spread', fullCaches, arr97.note, (cache) => (function() {
read1(cache, arr97);
}));

scenario('Read-only gentle spread', fullCaches, arr70.note, (cache) => (function() {
read1(cache, arr70);
}));

}

function scenario(act, caches, dataNote, actionsFactory, info) {
var suite = decoratedSuite(act, caches.note, dataNote);
caches.forEach((cache) => {
var actions = actionsFactory(cache, info);
suite.add(`${padEnd(act, 40)} -- ${padEnd(cache.name, 18)} --`, actions);
// console.log(actions())
})
suite.run({ minSamples: 36 });
}

const SPACES = ' ';
function padEnd(str, len) {
var bite = str.length > len ? 0 : len - str.length;
return `${str}${SPACES.slice(0, bite)}`;
}

function decoratedSuite(act, subjectNote, dataNote) {
return new Benchmark.Suite('Testing caches')
.on('start', (event) => {
console.log('\n ', act);
console.log(' using', subjectNote);
console.log(' with', String(dataNote) + "\n Results:");
})
.on('error', (event) => { console.error("error in benchmark", event.target.name, event.target.error) })
.on('cycle', (event) => {
const benchmark = event.target;
console.log(" => ", benchmark.toString());
})
}

console.log('Running with String Keys');
runLoadedCacheBenches(StrKeys);
runEmptyCacheBenches(StrKeys);
Loading

0 comments on commit 22cea6b

Please sign in to comment.