Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add LRU cache alongside redis #6

Merged
merged 4 commits into from
Jun 25, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"rimraf": "^5.0.5",
"tsconfig": "workspace:*",
"tsup": "^7.2.0",
"turbo": "latest"
"turbo": "^1.13.4"
},
"packageManager": "[email protected]",
"name": "explorer-kit"
Expand Down
1 change: 1 addition & 0 deletions packages/explorerkit-server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"bs58": "^5.0.0",
"dotenv": "^16.4.5",
"express": "^4.18.2",
"lru-cache": "^10.2.2",
"prom-client": "^15.1.0",
"redis": "^4.6.14",
"vite": "^5.2.13",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,24 @@ import { decodeProgramError } from "@/components/decoders/errors";
import { loadAllIdls } from "@/components/idls";

vi.mock("@/core/shared-dependencies", (loadActual) => {
class MultiCacheMock {
private data: Record<string, string> = {};

async get(key: string) {
return this.data[key] || null;
}

async multiGet(keys: string[]) {
return keys.map((key) => this.data[key] || null);
}

async set(key: string, value: string) {
this.data[key] = value;
}
}

const deps = {
cache: new Map(),
cache: new MultiCacheMock(),
};

return {
Expand Down
4 changes: 2 additions & 2 deletions packages/explorerkit-server/src/components/decoders/errors.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ParserType } from "@solanafm/explorer-kit";
import { checkIfErrorsParser, ParserType } from "@solanafm/explorer-kit";

import { IdlsMap } from "@/components/idls";
import { ProgramError } from "@/types";
Expand All @@ -18,7 +18,7 @@ export function decodeProgramError(idls: IdlsMap, programError: ProgramError): P
const hexErrorCode = `0x${programError.errorCode.toString(16)}`;
const errorParser = parser.createParser(ParserType.ERROR);

if (!errorParser || !("parseError" in errorParser)) {
if (!errorParser || !checkIfErrorsParser(errorParser)) {
return programError;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,24 @@ import { decodeInstruction } from "@/components/decoders/instructions";
import { loadAllIdls } from "@/components/idls";

vi.mock("@/core/shared-dependencies", (loadActual) => {
class MultiCacheMock {
private data: Record<string, string> = {};

async get(key: string) {
return this.data[key] || null;
}

async multiGet(keys: string[]) {
return keys.map((key) => this.data[key] || null);
}

async set(key: string, value: string) {
this.data[key] = value;
}
}

const deps = {
cache: new Map(),
cache: new MultiCacheMock(),
};

return {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { ParserType } from "@solanafm/explorer-kit";
import { checkIfInstructionParser, ParserType } from "@solanafm/explorer-kit";
import { Buffer } from "buffer";

import { IdlsMap } from "@/components/idls";
import { Instruction, TopLevelInstruction } from "@/types";

export async function decodeInstruction(idls: IdlsMap, instruction: Instruction): Promise<Instruction> {
export function decodeInstruction(idls: IdlsMap, instruction: Instruction): Instruction {
const programId = instruction.programId.toString();
let parsedInstruction = {
programId: programId.toString(),
Expand All @@ -20,7 +20,7 @@ export async function decodeInstruction(idls: IdlsMap, instruction: Instruction)
}
let instructionParser = parser.createParser(ParserType.INSTRUCTION);

if (!instructionParser || !("parseInstructions" in instructionParser)) {
if (!instructionParser || !checkIfInstructionParser(instructionParser)) {
return parsedInstruction; // Short-circuit without decodedData parser can't be created
}

Expand Down
11 changes: 8 additions & 3 deletions packages/explorerkit-server/src/components/idls.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,20 @@ const IDL_CACHE_TTL = 3600; // one hour

export async function loadAllIdls(programIds: string[]): Promise<IdlsMap> {
const idls: IdlsMap = new Map();

if (programIds.length === 0) {
return idls;
}

const cache = getSharedDep("cache");
const cachedIdls = await Promise.allSettled(programIds.map((id) => cache.get(id)));
const cachedIdls = await cache.multiGet(programIds);

await Promise.allSettled(
cachedIdls.map(async (res, i) => {
const programId = programIds[i]!;
Comment on lines 20 to 22
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do we run allSettled on an awaited Promise<(string | null)[]>?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is mostly because missing idls need to make an RPC call to solana-fm to retrieve and store that in the cache. See line below:

const idl = await getProgramIdl(programId);


if (res.status === "fulfilled" && res.value) {
const idl = deserializeIdl(res.value);
if (res) {
const idl = deserializeIdl(res);
idls.set(programId, idl && new SolanaFMParser(idl, programId));
return;
}
Expand Down
122 changes: 82 additions & 40 deletions packages/explorerkit-server/src/core/cache.ts
Original file line number Diff line number Diff line change
@@ -1,58 +1,100 @@
import { LRUCache } from "lru-cache";
import { Gauge } from "prom-client";
import { createClient, RedisClientType } from "redis";

import { register } from "@/components/metrics";
import { config } from "@/core/config";
import { onTeardown } from "@/utils/teardown";

export async function createCache(): Promise<RedisClientType> {
const client = createClient({
url: config.REDIS_URL,
});
const LRU_CACHE_MAX_ITEMS_COUNT = 100;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We could probably bump this to 1000 without any problems right?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Size of IDL is quite big, but I think node would be fine even with that.
btw I've tested that even a LRU cache of 100 items is going to decrease redis call rate by >90%

Copy link
Member

@kimpers kimpers Jun 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Size of IDL is quite big, but I think node would be fine even with that. btw I've tested that even a LRU cache of 100 items is going to decrease redis call rate by >90%

Ahh nice! 👍 I'm fine with either 100 or 1000 then


await client.connect();
type CacheMetricGauges = {
redisHits: Gauge<string>;
lruHits: Gauge<string>;
misses: Gauge<string>;
};

onTeardown(async () => {
await client.disconnect();
});
class MultiCache {
constructor(
private redis: RedisClientType,
private lruCache: LRUCache<string, string>,
private guages: CacheMetricGauges
) {}

return instrumentClient(client as RedisClientType);
}
async multiGet(keys: string[]): Promise<(string | null)[]> {
const items: Record<string, string | null> = {};
const missingLruKeys: string[] = [];

const instrumentClient = (client: RedisClientType): RedisClientType => {
const hitsGauge = new Gauge({
name: "cache_hits_total",
help: "Total number of cache hits",
registers: [register],
});
for (const key of keys) {
const value = this.lruCache.get(key);

const missesGauge = new Gauge({
name: "cache_misses_total",
help: "Total number of cache misses",
registers: [register],
});

return new Proxy(client, {
get(target, prop, receiver) {
if (prop === "get") {
return async (key: string) => {
const value = await target.get(key);
if (value) {
items[key] = value;
this.guages.lruHits.inc();
} else {
missingLruKeys.push(key);
}
}

if (value) {
hitsGauge.inc();
} else {
missesGauge.inc();
}
if (missingLruKeys.length > 0) {
const redisItems = await this.redis.mGet(missingLruKeys);

return value;
};
for (const [i, maybeIdl] of redisItems.entries()) {
const key = missingLruKeys[i]!;
items[key] = maybeIdl;
if (maybeIdl) {
this.guages.redisHits.inc();
this.lruCache.set(key, maybeIdl);
} else {
this.guages.misses.inc();
}
}
}

const value = Reflect.get(target, prop, receiver);
return keys.map((key) => items[key] ?? null);
}

if (typeof value === "function") {
return value.bind(target);
}
},
async set(key: string, value: string, options: { EX: number }): Promise<void> {
this.lruCache.set(key, value);
await this.redis.set(key, value, options);
}

async teardown() {
await this.redis.disconnect();
}
}

export async function createCache(): Promise<MultiCache> {
const redisClient = createClient({
url: config.REDIS_URL,
});
};
await redisClient.connect();

const lruCache = new LRUCache<string, string>({
max: LRU_CACHE_MAX_ITEMS_COUNT,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should use the same ttl for the in-memory cache as redis

});

const multiCache = new MultiCache(redisClient as RedisClientType, lruCache, {
redisHits: new Gauge({
name: "redis_cache_hits_total",
help: "Total number of redis cache hits",
registers: [register],
}),
lruHits: new Gauge({
name: "lru_cache_hits_total",
help: "Total number of lru-cache hits",
registers: [register],
}),
misses: new Gauge({
name: "cache_misses_total",
help: "Total number of cache misses",
registers: [register],
}),
});

onTeardown(async () => {
await multiCache.teardown();
});

return multiCache;
}
18 changes: 17 additions & 1 deletion packages/explorerkit-server/src/server.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,24 @@ import { describe, expect, it, vi } from "vitest";
import { app } from "@/server";

vi.mock("@/core/shared-dependencies", (loadActual) => {
class MultiCacheMock {
private data: Record<string, string> = {};

async get(key: string) {
return this.data[key] || null;
}

async multiGet(keys: string[]) {
return keys.map((key) => this.data[key] || null);
}

async set(key: string, value: string) {
this.data[key] = value;
}
}

const deps = {
cache: new Map(),
cache: new MultiCacheMock(),
};

return {
Expand Down
28 changes: 20 additions & 8 deletions packages/explorerkit-server/src/server.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { AccountParserInterface, ParserType } from "@solanafm/explorer-kit";
import { checkIfAccountParser, ParserType } from "@solanafm/explorer-kit";
import bodyParser from "body-parser";
import express, { Express, Request, Response } from "express";
import { collectDefaultMetrics } from "prom-client";
Expand Down Expand Up @@ -76,7 +76,13 @@ app.post("/decode/accounts", responseDurationMiddleware, async (req: Request, re
}

// Parse the account
let accountParser = parser.createParser(ParserType.ACCOUNT) as AccountParserInterface;
const accountParser = parser.createParser(ParserType.ACCOUNT);

if (!accountParser || !checkIfAccountParser(accountParser)) {
decodedAccounts.push({ decodedData: null });
continue;
}

const decodedData = accountParser.parseAccount(account.data);
decodedAccounts.push({
decodedData: decodedData
Expand All @@ -96,7 +102,9 @@ const decodeErrorsSchema = z.object({
errors: z.array(
z
.object({
programId: z.string(),
programId: z.string().refine(isValidBase58, {
message: "error.programId is not a valid base58 string",
}),
errorCode: z.coerce.number().nullable().optional(),
})
.nullable()
Expand Down Expand Up @@ -132,13 +140,17 @@ const decodeInstructionsSchema = z.object({
.array(
z.object({
topLevelInstruction: z.object({
programId: z.string(),
programId: z.string().refine(isValidBase58, {
message: "topLevelInstruction.programId is not a valid base58 string",
}),
encodedData: z.string(),
accountKeys: z.array(z.string()),
}),
flattenedInnerInstructions: z.array(
z.object({
programId: z.string(),
programId: z.string().refine(isValidBase58, {
message: "flattenedInnerInstructions.programId is not a valid base58 string",
}),
encodedData: z.string(),
accountKeys: z.array(z.string()),
})
Expand Down Expand Up @@ -174,11 +186,11 @@ app.post("/decode/instructions", responseDurationMiddleware, async (req: Request

for (const instruction of transactionInstructions) {
// First decode top level ix, then all nested ixs
const decodedTopLevelInstruction = await decodeInstruction(idls, instruction.topLevelInstruction);
const decodedTopLevelInstruction = decodeInstruction(idls, instruction.topLevelInstruction);
const decodedInnerInstruction = [];

for (const innerInstruction of instruction.flattenedInnerInstructions) {
decodedInnerInstruction.push(await decodeInstruction(idls, innerInstruction));
decodedInnerInstruction.push(decodeInstruction(idls, innerInstruction));
}

decodedTransaction.push({
Expand All @@ -191,7 +203,7 @@ app.post("/decode/instructions", responseDurationMiddleware, async (req: Request

return res.status(200).json({ decodedTransactions });
} catch (e: any) {
console.error("failed to decode instructions", e);
console.error("failed to decode instructions", e, JSON.stringify(data));
return res.status(500).json({ error: e.message });
}
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ export const createShankTokenAccount: (idlItem: IdlItem) => AccountParserInterfa

return null;
} catch (error) {
console.error(error);
return null;
}
};
Expand Down
Loading
Loading