diff --git a/benchmark/lru-cache/helpers/cache-exercisers.js b/benchmark/lru-cache/helpers/cache-exercisers.js new file mode 100644 index 00000000..f2a16588 --- /dev/null +++ b/benchmark/lru-cache/helpers/cache-exercisers.js @@ -0,0 +1,157 @@ +var random = require('pandemonium/random'); +var Benchmark = require('benchmark') +var Keymaster = require('./key-distributions.js'); + +var TEST_CAP = 30000 + +function makeStandardKeys() { + var StrKeys = {} + var NumKeys = {} + // + // 400k entries with approx 42k distinct values btwn 0 and 60k, distributed 300k/65k/23k/10k/5k/3k (~97% in the top 30k) + NumKeys.gen97 = Keymaster.longTailIntGen(60000, -0.4); + NumKeys.arr97 = Keymaster.longTailArr(400000, 60000, -0.4); + StrKeys.arr97 = Keymaster.stringifyArr(NumKeys.arr97); + StrKeys.gen97 = Keymaster.longTailStrGen(60000, -0.4); + NumKeys.arr97.note = 'Long-tail pool of 42,000 distinct numeric values, 97% in the top 30k, 75% in the top 10k'; StrKeys.arr97.note = NumKeys.arr97.note.replace(/numeric/, 'string'); + // + // 400k entries with approx 50k distinct values btwn 0 and 60k, distributed 230k/80k/40k/22k/15k/10k (~88% in the top 30k) + // var NumKeys.arr88 = Keymaster.longTailArr(400000, 60000, -0.7) + // + // 400k entries with approx 60k distinct values btwn 0 and 60k, distributed 135k/85k/61k/48k/39k/33k (~70% in the top 30k) + NumKeys.gen70 = Keymaster.longTailIntGen(60000, -10); + NumKeys.arr70 = Keymaster.longTailArr(400000, 60000, -10); + StrKeys.arr70 = Keymaster.stringifyArr(NumKeys.arr70); + StrKeys.gen70 = Keymaster.longTailStrGen(60000, -10); + NumKeys.arr70.note = 'Long-tail pool of ~60,000 distinct numeric values, 70% in the top 30k, 33% in the top 10k'; StrKeys.arr70.note = NumKeys.arr70.note.replace(/numeric/, 'string'); + // + // 120k entries with approx 52k distinct values btwn 0 and 60k, distributed evenly + NumKeys.arrFlat = Keymaster.flatDistArr(120000, 60000); + StrKeys.arrFlat = Keymaster.stringifyArr(NumKeys.arrFlat); + // + // 31k entries running 0-31k in order + NumKeys.arrOrd = Keymaster.ascendingArr(31000, 31000); + StrKeys.arrOrd = Keymaster.stringifyArr(NumKeys.arrOrd); + // + return { StrKeys, NumKeys } +} + +function read1(cache, arrA) { + var count = arrA.length; + for (var ii = 0; ii < count; ii++) { + cache.get(arrA[ii % arrA.length]) + } +} + +function readFetch(cache, arrA, rng) { + var count = arrA.length; + for (var ii = 0; ii < count; ii++) { + var result = cache.get(arrA[ii % arrA.length]) + if (! result) { + cache.set(rng()); + } + } +} + +function write1(cache, arrA) { + var count = arrA.length; + for (var ii = 0; ii < count; ii++) { + var storeme = arrA[ii % arrA.length] + cache.set(storeme, storeme) + } +} + +function write1Read1(cache, [arrA, arrB], count) { + var blen = arrB.length; + if (! count) { count = arrA.length; } + for (var ii = 0; ii < count; ii++) { + var storeme = arrA[ii % arrA.length] + cache.set(storeme, storeme) + cache.get(arrB[ii % blen]) + } +} + +function write1Read4(cache, [arrA, arrB], count) { + var blen = arrB.length; + var boff0 = 0, boff1 = blen * 0.25, boff2 = blen * 0.50, boff3 = blen * 0.75; + if (! count) { count = arrA.length; } + for (var ii = 0; ii < count; ii++) { + var storeme = arrA[ii % arrA.length] + cache.set(storeme, storeme) + cache.get(arrB[(ii + boff0) % blen]) + cache.get(arrB[(ii + boff1) % blen]) + cache.get(arrB[(ii + boff2) % blen]) + cache.get(arrB[(ii + boff3) % blen]) + } +} + +function writeSome(cache, arrA, frac = 0.2) { + var count = arrA.length; + for (var ii = 0; ii < count; ii++) { + if (Math.random() > frac) { continue; } + var storeme = arrA[ii % arrA.length]; + cache.set(storeme, storeme); + } +} + +function delete1(cache, [arrA], count) { + if (! count) { count = arrA.length; } + for (var ii = 0; ii < count; ii++) { + var delme = arrA[ii % arrA.length] + cache.delete(delme, delme) + } +} + +function makeLoadedCaches(CacheFactories, arrA, count, capacity = TEST_CAP, options) { + var caches = CacheFactories.map((CacheFactory) => makeLoadedCache(CacheFactory, arrA, count, capacity, options)); + caches.note = `${capacity}-capacity caches${arrA.note ? ' preloaded with ' + arrA.note : ''}` + return caches +} + +function makeCaches(CacheFactories, capacity = TEST_CAP, options = {}) { + var caches = CacheFactories.map((CacheFactory) => { + var cache = new CacheFactory(null, null, capacity, options); + cache.name = CacheFactory.name; + return cache; + }) + caches.note = `${capacity}-capacity caches` + return caches +} + +function makeLoadedCache(CacheFactory, arrA, count, capacity = TEST_CAP, options) { + if (! count) { count = arrA.length; } + var cache = new CacheFactory(null, null, capacity, options); + cache.name = CacheFactory.name; + write1(cache, arrA, count); + var capK = Math.round(capacity / 1000); + cache.note = `Pre-loaded ${cache.name}@${capK}k`; + return cache; +} + +function times(count, func, ...args) { + for (var ii = 0; ii < count; ii++) { + func(ii, count, ...args); + } +} + +async function promisedTimes(count, func, ...args) { + var results = []; + for (var ii = 0; ii < count; ii++) { + var result = await func(ii, count, ...args); + results.push(result); + } + return Promise.all(results); +} + +function round(val, decimals) { + chunk = Math.round(Math.pow(10, decimals)); + return Math.round(val * chunk) / chunk; +} + +function sleep(millis) { return new Promise((yay) => setTimeout(yay, millis)); } + +module.exports = { + read1, readFetch, write1, write1Read1, write1Read4, delete1, writeSome, + makeStandardKeys, makeLoadedCaches, makeLoadedCache, makeCaches, + times, promisedTimes, round, sleep, +} diff --git a/benchmark/lru-cache/helpers/key-distributions.js b/benchmark/lru-cache/helpers/key-distributions.js new file mode 100644 index 00000000..b2182fe8 --- /dev/null +++ b/benchmark/lru-cache/helpers/key-distributions.js @@ -0,0 +1,100 @@ +var randomString = require('pandemonium/random-string'); +var random = require('pandemonium/random'); +var typed = require('../../../utils/typed-arrays.js'); +var {snipToLast} = require('../../../utils/snip.js'); + +module.exports.random = random; +module.exports.randomString = randomString; + +function randArr(size, range, rng) { + var ValArrayFactory = typed.getPointerArray(range); + var arr = new ValArrayFactory(size) + for (var ii = 0; ii < size; ii++) { + arr[ii] = rng(ii); + } + return arr +} +module.exports.randArr = randArr; + +function longTailArr(size, range, power) { + var intgen = longTailIntGen(range, power) + return randArr(size, range, intgen); +} +module.exports.longTailArr = longTailArr; + +function flatDistArr(size, range, offset = 0) { + var intgen = () => random(offset, range + offset); + return randArr(size, range, intgen); +} +module.exports.flatDistArr = flatDistArr; + +function ascendingArr(size, range) { + var intgen = (ii) => (ii); + return randArr(size, range, intgen); +} +module.exports.ascendingArr = ascendingArr; + +function longTailIntGen(range, power = -0.8) { + return function intgen() { + var rand = Math.random() + var yy = (1 - rand)**(power) - 1 + var result = Math.floor(0.25 * range * yy) + if (result < range) { return result } + return intgen() + } +} +module.exports.longTailIntGen = longTailIntGen; + +function longTailStrGen(range, power = -0.8, tag = '') { + var intgen = longTailIntGen(range, power); + return function strgen() { + return String(intgen()) + tag; + } +} +module.exports.longTailStrGen = longTailStrGen; + +function stringifyArr(arr, tag = '') { + var stringArr = []; + for (var ii = 0; ii < arr.length; ii++) { + stringArr.push(String(arr[ii]) + tag); + } + return stringArr; +} +module.exports.stringifyArr = stringifyArr; + +function comparePairTails([kk1, vv1], [kk2, vv2]) { + if (vv2 > vv1) { return 1 } + if (vv2 < vv1) { return -1 } + if (kk2 > kk1) { return -1 } + if (kk2 < kk1) { return 1 } + return 1 +} + +function showDistribution(arr, chunk = 1) { + var counts = new Map(); + for (var item of arr) { + const bin = chunk * Math.floor(item / chunk) + if (! counts.has(bin)) { counts.set(bin, 0); } + counts.set(bin, 1 + counts.get(bin)); + } + var entries = [...counts].sort(comparePairTails) + var histo = new Map(entries) + histo.last = entries[entries.length - 1] + return histo +} +module.exports.showDistribution = showDistribution; + +function examineDist(keys, chunks = 10_000) { + var histA = showDistribution(keys, 1000) + var histB = showDistribution(keys, chunks) + console.log( + keys.length, + histA.size, + snipToLast(histA.entries(), new Map(), {maxToDump: 25, last: histA.last, size: histA.size}), + histB, + ) +} +module.exports.examineDist = examineDist; + +// var HewJass = longTailArr(2_000_000, 1e6, -0.5); +// examineDist(HewJass, 100_000); diff --git a/benchmark/lru-cache/performance.js b/benchmark/lru-cache/performance.js new file mode 100644 index 00000000..a1ae1f2e --- /dev/null +++ b/benchmark/lru-cache/performance.js @@ -0,0 +1,94 @@ +var random = require('pandemonium/random'); +var Benchmark = require('benchmark') +var Keymaster = require('./helpers/key-distributions.js'); +var Exerciser = require('./helpers/cache-exercisers.js'); +var LRUCache = require('../../lru-cache.js'), + LRUMap = require('../../lru-map.js'), + LRUCacheWithDelete = require('../../lru-cache-with-delete.js'), + LRUMapWithDelete = require('../../lru-map-with-delete.js'), + LRUCacheWithExpiry = require('../../lru-cache-with-expiry.js'); + +// Benchmark.options.minSamples = 3; + +var CACHES = [LRUCacheWithExpiry, LRUCache] //, LRUMap, LRUMapWithDelete, LRUMap, LRUCacheWithDelete, LRUCache]; + +var { + makeStandardKeys, write1Read1, write1Read4, write1, read1, +} = Exerciser; +var { StrKeys, NumKeys } = makeStandardKeys() + +function runEmptyCacheBenches(Keyset, benchOptions = {}) { + const { gen70, gen97, arr70, arr97, arrFlat, arrOrd } = Keyset + + var emptyCaches = Exerciser.makeCaches(CACHES); + scenario('Empty caches, repeated reads', emptyCaches, arr97.note, (cache) => (function() { + read1(cache, arr97); + })); +} + +function runLoadedCacheBenches(Keyset, benchOptions = {}) { + const { gen70, gen97, arr70, arr97, arrFlat, arrOrd } = Keyset + + var fullCaches = Exerciser.makeLoadedCaches(CACHES, arrOrd); + + if (benchOptions.do_expires) { + fullCaches.forEach((cache) => { if (cache.monitor) { cache.monitor(200, null, {logging: true}); } }); + } + + scenario('1x flat writes, 4x gentle spread read', fullCaches, arr70.note, (cache) => (function() { + write1Read4(cache, [arrFlat, arr70], arr70.length); + })); + + scenario('Individual get then set operations', fullCaches, '97% short tail keys', (cache) => (function() { + cache.get(gen97()); + cache.set(gen97(), 'hi'); + })); + + scenario('Individual get then set', fullCaches, 'flat distribution 33% larger than the cache', (cache) => (function() { + cache.get(String(random(0, 40000))); + cache.set(String(random(0, 40000)), 'hi'); + })); + + scenario('Read-only sharp spread', fullCaches, arr97.note, (cache) => (function() { + read1(cache, arr97); + })); + + scenario('Read-only gentle spread', fullCaches, arr70.note, (cache) => (function() { + read1(cache, arr70); + })); + +} + +function scenario(act, caches, dataNote, actionsFactory, info) { + var suite = decoratedSuite(act, caches.note, dataNote); + caches.forEach((cache) => { + var actions = actionsFactory(cache, info); + suite.add(`${padEnd(act, 40)} -- ${padEnd(cache.name, 18)} --`, actions); + // console.log(actions()) + }) + suite.run({ minSamples: 36 }); +} + +const SPACES = ' '; +function padEnd(str, len) { + var bite = str.length > len ? 0 : len - str.length; + return `${str}${SPACES.slice(0, bite)}`; +} + +function decoratedSuite(act, subjectNote, dataNote) { + return new Benchmark.Suite('Testing caches') + .on('start', (event) => { + console.log('\n ', act); + console.log(' using', subjectNote); + console.log(' with', String(dataNote) + "\n Results:"); + }) + .on('error', (event) => { console.error("error in benchmark", event.target.name, event.target.error) }) + .on('cycle', (event) => { + const benchmark = event.target; + console.log(" => ", benchmark.toString()); + }) +} + +console.log('Running with String Keys'); +runLoadedCacheBenches(StrKeys); +runEmptyCacheBenches(StrKeys); diff --git a/benchmark/lru-cache/workout-with-expire.js b/benchmark/lru-cache/workout-with-expire.js new file mode 100755 index 00000000..9b185b1d --- /dev/null +++ b/benchmark/lru-cache/workout-with-expire.js @@ -0,0 +1,119 @@ +#!/usr/bin/env node --max-old-space-size=8000 +var random = require('pandemonium/random'); +var Benchmark = require('benchmark') +var Keymaster = require('./helpers/key-distributions.js'); +var Exerciser = require('./helpers/cache-exercisers.js'); +var LRUCache = require('../../lru-cache.js'), + LRUMap = require('../../lru-map.js'), + LRUCacheWithDelete = require('../../lru-cache-with-delete.js'), + LRUMapWithDelete = require('../../lru-map-with-delete.js'), + LRUCacheWithExpiry = require('../../lru-cache-with-expiry.js'); +// var {setInterval, setTimeout} = require('timers/promises'); + +// Run +// runbench.sh lru-cache/workout-with-expire.js +// +// ## OR ## +// +// TEST_REPS=1000 node --inspect-brk ./benchmark/lru-cache/workout.js +// +// then open up chrome://inspect/#devices, and look for your process +// kick it in gear until it gets past the setup, then switch to +// profiling and capture your trace. + +var CACHES = { LRUMap, LRUCache, LRUMapWithDelete, LRUCacheWithDelete, LRUCacheWithExpiry }; + +var TEST_REPS = Number(process.env.TEST_REPS) || 30; + +var CacheFactory = CACHES[process.env.TEST_CACHE || "LRUCacheWithExpiry"]; +if (! CacheFactory) { + console.error("Please specify env var TEST_CACHE with one of", Object.keys(CACHES)); + process.exit(-9); +} + +var { write1Read1, write1Read4, write1, read1, delete1, writeSome, readFetch, times, sleep } = Exerciser; +var { longTailStrGen } = Keymaster + +const CACHE_CAPACITY = 1e6; +const CACHE_TTK = 10 * 1000; +const CACHE_EXPIRE_MS = 2 * 1000; +const BATCH_WRITE_MS = 3 * 1000; +const DISTINCT_KEYS = 2e6; + +// 3 million strings with about 500k distinct values, 37% of which are 0-100k and 99% 0-1.5M +var Sharp = Keymaster.stringifyArr(Keymaster.longTailArr(3e6, DISTINCT_KEYS, -0.4), 'A'); +// +// 3 million strings with about 1.4M distinct values, 12% of which are 0-100k and 87% 0-1.5M +var Broad = Keymaster.stringifyArr(Keymaster.longTailArr(3e6, DISTINCT_KEYS, -10), 'B'); +// +var FlatA = Keymaster.stringifyArr(Keymaster.flatDistArr(0.2 * DISTINCT_KEYS, 0.2 * DISTINCT_KEYS), 'A'); +var FlatB = Keymaster.stringifyArr(Keymaster.flatDistArr(0.2 * DISTINCT_KEYS, 0.2 * DISTINCT_KEYS), 'B'); +// var FlatNums = Keymaster.flatDistArr(0.2 * DISTINCT_KEYS, 0.2 * DISTINCT_KEYS); Keymaster.examineDist(FlatNums, 100_000); +// +// process used to penalize a cache "miss". power of eg -10 takes much longer than -0.4 +var strgen = Keymaster.longTailStrGen(1e6, -0.4, 'A'); + +// Create cache with a ttk horizon of 5 seconds. +var cache = new CacheFactory(null, null, CACHE_CAPACITY, {ttk: CACHE_TTK}); + +var writeTimer; + +// Start the expiration watcher to run every two seconds. We are +// doing this to be punishing; a reasonably fast laptop gives ~80-1000ms +// per expire, so this will be 2% of all processing time +if (cache.monitor) { cache.monitor(CACHE_EXPIRE_MS) } + +// We have a 1m element cache for two pools of keys being read and written: +// +// * a long-tail distrib of 6m keys with ~2m distinct values. On a +// every read miss it has to spend time to "load" and store a value, +// perhaps evicting another item. +// * a flat distrib of 800k keys covering the most frequent values, +// representing a batch load process that runs on a timer. +// * A 10 + +async function workout() { + try { + var initT = Date.now(); + var lastLoopT = initT, lastWriteT = initT, nWriteBursts = 0; + // + function logit(act, actT, ii) { + var currDT = new Date(); + var currT = currDT.valueOf(); + console.log( + act, "\t", ii, "\t", + Exerciser.round((currT - initT) / 1000, 1), "\t", + Exerciser.round((currT - initT) / ((ii + 1) * 1000), 2), "\t", + Exerciser.round((currT - actT) / 1000, 1), "\t", + currDT, cache.sinceExpiry, "\t", `/ ${cache.ttk} ms ${cache.capacity}: ${cache.size}\t`, cache.deletedSize, + ); + } + // + // once every BATCH_WRITE_MS, wake up and write flatly-distributed values covering the short-middle tails + writeTimer = setInterval(() => { + writeT = Date.now() + writeSome(cache, FlatA, 0.10); // 10% of 400k + writeSome(cache, FlatB, 0.10); // 10% of 400k + logit('write', lastWriteT, nWriteBursts++); + }, BATCH_WRITE_MS) + // + const tasks = Exerciser.promisedTimes(TEST_REPS, async function (ii) { + loopT = Date.now(); + readFetch(cache, Broad, strgen); + readFetch(cache, Sharp, strgen); + console.log(cache.deletedSize, cache.size); + logit('loop1', loopT, ii); + await sleep(10); // yield the thread + }); + await tasks; + // + } catch (err) { + console.error(err); + cache.stopMonitor && cache.stopMonitor(); + throw err; + } + cache.stopMonitor && cache.stopMonitor(); + if (writeTimer) { clearInterval(writeTimer); } +} + +workout(); diff --git a/benchmark/lru-cache/workout.js b/benchmark/lru-cache/workout.js new file mode 100644 index 00000000..54b62891 --- /dev/null +++ b/benchmark/lru-cache/workout.js @@ -0,0 +1,65 @@ +var random = require('pandemonium/random'); +var Benchmark = require('benchmark') +var Keymaster = require('./helpers/key-distributions.js'); +var Exerciser = require('./helpers/cache-exercisers.js'); +var LRUCache = require('../../lru-cache.js'), + LRUMap = require('../../lru-map.js'), + LRUCacheWithDelete = require('../../lru-cache-with-delete.js'), + LRUMapWithDelete = require('../../lru-map-with-delete.js'), + LRUCacheWithExpiry = require('../../lru-cache-with-expiry.js'); + +// TEST_CACHE=LRUMapWithDelete node --prof ./benchmark/lru-cache/workout.js ; +// PFILE=`ls -1t isolate-* | head -n1`; node --prof-process $PFILE > $(basename $PFILE .log).prof.txt ; echo $PFILE; +// mv $PFILE /tmp/prof/ +// ## OR ## +// TEST_CACHE=LRUMapWithDelete TEST_REPS=1000 node --inspect-brk ./benchmark/lru-cache/workout.js +// then open up chrome://inspect/#devices, look for this process; +// kick it in gear until it gets past the setup, then witch to profiling and capture your trace. + +Benchmark.options.minSamples = 30; + +var CACHES = { LRUMap, LRUCache, LRUMapWithDelete, LRUCacheWithDelete, LRUCacheWithExpiry }; + +var TEST_REPS = Number(process.env.TEST_REPS) || 100; + +var CacheFactory = CACHES[process.env.TEST_CACHE]; +if (! CacheFactory) { + console.error("Please specify env var TEST_CACHE with one of", Object.keys(CACHES)); + process.exit(-9); +} + +var { + makeStandardKeys, + write1Read1, write1Read4, write1, read1, delete1, +} = Exerciser; + +var { NumKeys:Keys } = makeStandardKeys() +var cache = Exerciser.makeLoadedCache(CacheFactory, Keys.arrOrd); + +var initT = Date.now(); +var lastT = initT; + +Exerciser.times(TEST_REPS, function (ii) { + var currDT = new Date(); + var currT = currDT.valueOf(); + console.log( + ii, "\t", + Exerciser.round((currT - initT) / (ii * 1000), 3), "\t", + Exerciser.round((currT - lastT) / 1000, 3), "\t", + currDT, cache.size, cache.capacity, cache.inspect({maxToDump:5}), + ); + lastT = currT; + write1Read4(cache, [Keys.arrFlat, Keys.arr97], Keys.arr97.length); + write1Read4(cache, [Keys.arrFlat, Keys.arr97], Keys.arr97.length); + + var currDT = new Date(); + var currT = currDT.valueOf(); + console.log( + ii, "\t", + Exerciser.round((currT - initT) / (ii * 1000), 3), "\t", + Exerciser.round((currT - lastT) / 1000, 3), "\t", + currDT, cache.size, cache.capacity, cache.inspect({maxToDump:5}), + ); + lastT = currT; + delete1(cache, [cache.K], cache.capacity); +});