-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Add LRU cache alongside redis #6
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -15,7 +15,7 @@ | |
"rimraf": "^5.0.5", | ||
"tsconfig": "workspace:*", | ||
"tsup": "^7.2.0", | ||
"turbo": "latest" | ||
"turbo": "^1.13.4" | ||
}, | ||
"packageManager": "[email protected]", | ||
"name": "explorer-kit" | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,58 +1,100 @@ | ||
import { LRUCache } from "lru-cache"; | ||
import { Gauge } from "prom-client"; | ||
import { createClient, RedisClientType } from "redis"; | ||
|
||
import { register } from "@/components/metrics"; | ||
import { config } from "@/core/config"; | ||
import { onTeardown } from "@/utils/teardown"; | ||
|
||
export async function createCache(): Promise<RedisClientType> { | ||
const client = createClient({ | ||
url: config.REDIS_URL, | ||
}); | ||
const LRU_CACHE_MAX_ITEMS_COUNT = 100; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We could probably bump this to 1000 without any problems right? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Size of IDL is quite big, but I think node would be fine even with that. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Ahh nice! 👍 I'm fine with either 100 or 1000 then |
||
|
||
await client.connect(); | ||
type CacheMetricGauges = { | ||
redisHits: Gauge<string>; | ||
lruHits: Gauge<string>; | ||
misses: Gauge<string>; | ||
}; | ||
|
||
onTeardown(async () => { | ||
await client.disconnect(); | ||
}); | ||
class MultiCache { | ||
constructor( | ||
private redis: RedisClientType, | ||
private lruCache: LRUCache<string, string>, | ||
private guages: CacheMetricGauges | ||
) {} | ||
|
||
return instrumentClient(client as RedisClientType); | ||
} | ||
async multiGet(keys: string[]): Promise<(string | null)[]> { | ||
const items: Record<string, string | null> = {}; | ||
const missingLruKeys: string[] = []; | ||
|
||
const instrumentClient = (client: RedisClientType): RedisClientType => { | ||
const hitsGauge = new Gauge({ | ||
name: "cache_hits_total", | ||
help: "Total number of cache hits", | ||
registers: [register], | ||
}); | ||
for (const key of keys) { | ||
const value = this.lruCache.get(key); | ||
|
||
const missesGauge = new Gauge({ | ||
name: "cache_misses_total", | ||
help: "Total number of cache misses", | ||
registers: [register], | ||
}); | ||
|
||
return new Proxy(client, { | ||
get(target, prop, receiver) { | ||
if (prop === "get") { | ||
return async (key: string) => { | ||
const value = await target.get(key); | ||
if (value) { | ||
items[key] = value; | ||
this.guages.lruHits.inc(); | ||
} else { | ||
missingLruKeys.push(key); | ||
} | ||
} | ||
|
||
if (value) { | ||
hitsGauge.inc(); | ||
} else { | ||
missesGauge.inc(); | ||
} | ||
if (missingLruKeys.length > 0) { | ||
const redisItems = await this.redis.mGet(missingLruKeys); | ||
|
||
return value; | ||
}; | ||
for (const [i, maybeIdl] of redisItems.entries()) { | ||
const key = missingLruKeys[i]!; | ||
items[key] = maybeIdl; | ||
if (maybeIdl) { | ||
this.guages.redisHits.inc(); | ||
this.lruCache.set(key, maybeIdl); | ||
} else { | ||
this.guages.misses.inc(); | ||
} | ||
} | ||
} | ||
|
||
const value = Reflect.get(target, prop, receiver); | ||
return keys.map((key) => items[key] ?? null); | ||
} | ||
|
||
if (typeof value === "function") { | ||
return value.bind(target); | ||
} | ||
}, | ||
async set(key: string, value: string, options: { EX: number }): Promise<void> { | ||
this.lruCache.set(key, value); | ||
await this.redis.set(key, value, options); | ||
} | ||
|
||
async teardown() { | ||
await this.redis.disconnect(); | ||
} | ||
} | ||
|
||
export async function createCache(): Promise<MultiCache> { | ||
const redisClient = createClient({ | ||
url: config.REDIS_URL, | ||
}); | ||
}; | ||
await redisClient.connect(); | ||
|
||
const lruCache = new LRUCache<string, string>({ | ||
max: LRU_CACHE_MAX_ITEMS_COUNT, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We should use the same ttl for the in-memory cache as redis |
||
}); | ||
|
||
const multiCache = new MultiCache(redisClient as RedisClientType, lruCache, { | ||
redisHits: new Gauge({ | ||
name: "redis_cache_hits_total", | ||
help: "Total number of redis cache hits", | ||
registers: [register], | ||
}), | ||
lruHits: new Gauge({ | ||
name: "lru_cache_hits_total", | ||
help: "Total number of lru-cache hits", | ||
registers: [register], | ||
}), | ||
misses: new Gauge({ | ||
name: "cache_misses_total", | ||
help: "Total number of cache misses", | ||
registers: [register], | ||
}), | ||
}); | ||
|
||
onTeardown(async () => { | ||
await multiCache.teardown(); | ||
}); | ||
|
||
return multiCache; | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why do we run
allSettled
on an awaitedPromise<(string | null)[]>
?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It is mostly because missing idls need to make an RPC call to solana-fm to retrieve and store that in the cache. See line below: