diff --git a/package.json b/package.json index 4bf4832..5568a05 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "repository": { "url": "git+https://github.com/Digital-Alchemy-TS/core" }, - "version": "24.7.1", + "version": "24.7.2", "author": { "url": "https://github.com/zoe-codez", "name": "Zoe Codez" diff --git a/src/extensions/cache.extension.ts b/src/extensions/cache.extension.ts index eb69ca9..58610bf 100644 --- a/src/extensions/cache.extension.ts +++ b/src/extensions/cache.extension.ts @@ -1,8 +1,4 @@ import { - CACHE_DELETE_OPERATIONS_TOTAL, - CACHE_DRIVER_ERROR_COUNT, - CACHE_GET_OPERATIONS_TOTAL, - CACHE_SET_OPERATIONS_TOTAL, createMemoryDriver, createRedisDriver, ICacheDriver, @@ -36,10 +32,10 @@ export function Cache({ `init cache`, ); if (config.boilerplate.CACHE_PROVIDER === "redis") { - client = await createRedisDriver({ config, lifecycle, logger }); + client = await createRedisDriver({ config, internal, lifecycle, logger }); return; } - client = await createMemoryDriver({ config, lifecycle, logger }); + client = await createMemoryDriver({ config, internal, lifecycle, logger }); }); // #MARK: Return object @@ -49,12 +45,14 @@ export function Cache({ try { const fullKey = fullKeyName(key); await client.del(fullKey); - CACHE_DELETE_OPERATIONS_TOTAL.inc({ + internal.boilerplate.metrics.CACHE_DELETE_OPERATIONS_TOTAL.inc({ key: fullKey, prefix: prefix(), }); } catch (error) { - CACHE_DRIVER_ERROR_COUNT.labels("del").inc(); + internal.boilerplate.metrics.CACHE_DRIVER_ERROR_COUNT.labels( + "del", + ).inc(); logger.error({ error, name: "del" }, `cache error`); } }, @@ -62,7 +60,7 @@ export function Cache({ try { const fullKey = fullKeyName(key); const result = await client.get(fullKey); - CACHE_GET_OPERATIONS_TOTAL.inc({ + internal.boilerplate.metrics.CACHE_GET_OPERATIONS_TOTAL.inc({ hit_miss: is.undefined(result) ? "miss" : "hit", key: fullKey, prefix: prefix(), @@ -70,7 +68,9 @@ export function Cache({ return is.undefined(result) ? defaultValue : (result as T); } catch (error) { logger.warn({ defaultValue, error, key, name: "get" }, `cache error`); - CACHE_DRIVER_ERROR_COUNT.labels("get").inc(); + internal.boilerplate.metrics.CACHE_DRIVER_ERROR_COUNT.labels( + "get", + ).inc(); return defaultValue; } }, @@ -80,7 +80,9 @@ export function Cache({ const keys = await client.keys(fullPattern); return keys.map((key) => key.slice(Math.max(NONE, prefix().length))); } catch (error) { - CACHE_DRIVER_ERROR_COUNT.labels("keys").inc(); + internal.boilerplate.metrics.CACHE_DRIVER_ERROR_COUNT.labels( + "keys", + ).inc(); logger.warn({ error, name: "keys" }, `cache error`); return []; } @@ -93,12 +95,14 @@ export function Cache({ try { const fullKey = fullKeyName(key); await client.set(fullKey, value, ttl); - CACHE_SET_OPERATIONS_TOTAL.inc({ + internal.boilerplate.metrics.CACHE_SET_OPERATIONS_TOTAL.inc({ key: fullKey, prefix: config.boilerplate.CACHE_PREFIX, }); } catch (error) { - CACHE_DRIVER_ERROR_COUNT.labels("set").inc(); + internal.boilerplate.metrics.CACHE_DRIVER_ERROR_COUNT.labels( + "set", + ).inc(); logger.error({ error, name: "set" }, `cache error`); } }, diff --git a/src/extensions/fetch.extension.ts b/src/extensions/fetch.extension.ts index fcef005..d68eea9 100644 --- a/src/extensions/fetch.extension.ts +++ b/src/extensions/fetch.extension.ts @@ -5,9 +5,6 @@ import { promisify } from "util"; import { buildFilterString, DownloadOptions, - FETCH_DOWNLOAD_REQUESTS_SUCCESSFUL, - FETCH_REQUESTS_FAILED, - FETCH_REQUESTS_SUCCESSFUL, FetchArguments, FetcherOptions, FetchProcessTypes, @@ -23,7 +20,11 @@ import { is } from "."; const streamPipeline = promisify(pipeline); -export function Fetch({ logger, context: parentContext }: TServiceParams) { +export function Fetch({ + logger, + context: parentContext, + internal, +}: TServiceParams) { return ({ headers: base_headers, baseUrl: base_url, @@ -111,13 +112,19 @@ export function Fetch({ logger, context: parentContext }: TServiceParams) { try { const out = await exec(); if (!is.empty(label)) { - FETCH_REQUESTS_SUCCESSFUL.labels(context, label).inc(); + internal.boilerplate.metrics.FETCH_REQUESTS_SUCCESSFUL.labels( + context, + label, + ).inc(); } return out; } catch (error) { logger.error({ error, name: logContext }, `request failed`); if (!is.empty(label)) { - FETCH_REQUESTS_FAILED.labels(context, label).inc(); + internal.boilerplate.metrics.FETCH_REQUESTS_FAILED.labels( + context, + label, + ).inc(); } throw error; } @@ -148,7 +155,10 @@ export function Fetch({ logger, context: parentContext }: TServiceParams) { }); return await fetchHandleResponse(process, result); }); - FETCH_REQUESTS_SUCCESSFUL.labels(context, label).inc(); + internal.boilerplate.metrics.FETCH_REQUESTS_SUCCESSFUL.labels( + context, + label, + ).inc(); return out; } @@ -171,7 +181,10 @@ export function Fetch({ logger, context: parentContext }: TServiceParams) { const stream = createWriteStream(destination); await streamPipeline(response.body, stream); if (!is.empty(label)) { - FETCH_DOWNLOAD_REQUESTS_SUCCESSFUL.labels(context, label).inc(); + internal.boilerplate.metrics.FETCH_DOWNLOAD_REQUESTS_SUCCESSFUL.labels( + context, + label, + ).inc(); } } diff --git a/src/extensions/index.ts b/src/extensions/index.ts index ad1fa76..e03d3b3 100644 --- a/src/extensions/index.ts +++ b/src/extensions/index.ts @@ -4,5 +4,6 @@ export * from "./fetch.extension"; export * from "./internal.extension"; export * from "./is.extension"; export * from "./logger.extension"; +export * from "./metrics.extension"; export * from "./scheduler.extension"; export * from "./wiring.extension"; diff --git a/src/extensions/internal.extension.ts b/src/extensions/internal.extension.ts index 98b2198..9e2f4f3 100644 --- a/src/extensions/internal.extension.ts +++ b/src/extensions/internal.extension.ts @@ -221,7 +221,7 @@ export class InternalDefinition { */ public boilerplate: Pick< GetApis, - "configuration" | "fetch" | "logger" + "configuration" | "fetch" | "logger" | "metrics" >; public boot: { /** diff --git a/src/extensions/metrics.extension.ts b/src/extensions/metrics.extension.ts new file mode 100644 index 0000000..288a060 --- /dev/null +++ b/src/extensions/metrics.extension.ts @@ -0,0 +1,170 @@ +/* eslint-disable @typescript-eslint/no-magic-numbers */ +import { Counter, Gauge, Histogram, Summary } from "prom-client"; + +const build = () => { + /** + * Cache delete operations counter + */ + const CACHE_DELETE_OPERATIONS_TOTAL = new Counter({ + help: "Total number of cache delete operations", + labelNames: ["prefix", "key"] as const, + name: "digital_alchemy_boilerplate_cache_delete_operations_total", + }); + + /** + * Cache get operations counter + */ + const CACHE_GET_OPERATIONS_TOTAL = new Counter({ + help: "Total number of cache get operations", + labelNames: ["prefix", "key", "hit_miss"] as const, + name: "digital_alchemy_boilerplate_cache_get_operations_total", + }); + + /** + * Tracks the number of times a scheduled task has been executed. + * Labels: + * - context: The broader category or module the schedule belongs to. + * - label: A user-defined label to identify the specific schedule. + */ + const SCHEDULE_EXECUTION_COUNT = new Counter({ + help: "Counts the number of times a scheduled task has been executed", + labelNames: ["context", "label"] as const, + name: "digital_alchemy_boilerplate_schedule_execution_count", + }); + + /** + * Counts the number of errors occurred during scheduled task executions. + * Labels: + * - context: The broader category or module the schedule belongs to. + * - label: A user-defined label to identify the specific schedule where the error occurred. + */ + const SCHEDULE_ERRORS = new Counter({ + help: "Counts the number of errors during scheduled task executions", + labelNames: ["context", "label"] as const, + name: "digital_alchemy_boilerplate_schedule_errors", + }); + + /** + * Summary for Execution Time + */ + const SCHEDULE_EXECUTION_TIME = new Summary({ + help: "Measures the duration of each cron job or interval execution", + labelNames: ["context", "label"] as const, + name: "digital_alchemy_boilerplate_schedule_execution_time", + // These percentiles are just examples; adjust them based on what's relevant for your analysis + percentiles: [0.5, 0.9, 0.99], + }); + + /** + * Metric to count errors in cache driver + */ + const CACHE_DRIVER_ERROR_COUNT = new Counter({ + help: "Counts the number of errors caught in the cache driver", + labelNames: ["methodName"] as const, + name: "digital_alchemy_boilerplate_cache_driver_error_count", + }); + + /** + * Cache set operations counter + */ + const CACHE_SET_OPERATIONS_TOTAL = new Counter({ + help: "Total number of cache set operations", + labelNames: ["prefix", "key"] as const, + name: "digital_alchemy_boilerplate_cache_set_operations_total", + }); + + /** + * Counts the total number of initiated fetch requests. + */ + const FETCH_REQUESTS_INITIATED = new Counter({ + help: "Total number of fetch requests that have been initiated", + name: "digital_alchemy_boilerplate_fetch_requests_initiated_total", + }); + + /** + * Counts the total number of successfully completed fetch requests. + */ + const FETCH_REQUESTS_SUCCESSFUL = new Counter({ + help: "Total number of fetch requests that have been successfully completed", + labelNames: ["context", "label"] as const, + name: "digital_alchemy_boilerplate_fetch_requests_successful_total", + }); + + /** + * Counts the total number of successfully completed fetch requests. + */ + const FETCH_DOWNLOAD_REQUESTS_SUCCESSFUL = new Counter({ + help: "Total number of fetch download requests that have been successfully completed", + labelNames: ["context", "label"] as const, + name: "digital_alchemy_boilerplate_fetch_download_requests_successful_total", + }); + + /** + * Counts the total number of failed fetch requests. + */ + const FETCH_REQUESTS_FAILED = new Counter({ + help: "Total number of fetch requests that have failed", + labelNames: ["context", "label"] as const, + name: "digital_alchemy_boilerplate_fetch_requests_failed_total", + }); + + /** + * Gauge to count the number of errors encountered in Redis operations. + */ + const REDIS_ERROR_COUNT = new Gauge({ + help: "Counts the number of errors encountered in Redis operations", + name: "digital_alchemy_boilerplate_redis_error_count", + }); + + /** + * Histogram to track the latency of Redis operations in milliseconds. + * Buckets range from 0.1 ms to 1000 ms (1 second) for granular latency measurement. + */ + const REDIS_OPERATION_LATENCY_MS = new Histogram({ + buckets: [0.1, 0.5, 1, 5, 10, 20, 50, 100, 200, 500, 1000], + help: "Histogram for tracking the latency of Redis operations in milliseconds", + name: "digital_alchemy_boilerplate_redis_operation_latency_ms", + }); + + /** + * Counter to track the number of errors encountered in memory cache operations. + */ + const MEMORY_CACHE_ERROR_COUNT = new Counter({ + help: "Counts the number of errors encountered in memory cache operations", + name: "digital_alchemy_boilerplate_memory_cache_error_count", + }); + + /** + * A Prometheus gauge metric that tracks the number of unique context entries in the logger's context cache. + * This helps in monitoring and managing the memory usage associated with the caching of logger contexts. + */ + const LOGGER_CONTEXT_ENTRIES_COUNT = new Gauge({ + help: "Number of unique context entries in the logger context cache", + name: "digital_alchemy_boilerplate_logger_context_entries_count", + }); + + return { + CACHE_DELETE_OPERATIONS_TOTAL, + CACHE_DRIVER_ERROR_COUNT, + CACHE_GET_OPERATIONS_TOTAL, + CACHE_SET_OPERATIONS_TOTAL, + FETCH_DOWNLOAD_REQUESTS_SUCCESSFUL, + FETCH_REQUESTS_FAILED, + FETCH_REQUESTS_INITIATED, + FETCH_REQUESTS_SUCCESSFUL, + LOGGER_CONTEXT_ENTRIES_COUNT, + MEMORY_CACHE_ERROR_COUNT, + REDIS_ERROR_COUNT, + REDIS_OPERATION_LATENCY_MS, + SCHEDULE_ERRORS, + SCHEDULE_EXECUTION_COUNT, + SCHEDULE_EXECUTION_TIME, + }; +}; + +let metrics: ReturnType; + +export function Metrics() { + metrics ??= build(); + return metrics; +} diff --git a/src/extensions/scheduler.extension.ts b/src/extensions/scheduler.extension.ts index 1001f58..89f4980 100644 --- a/src/extensions/scheduler.extension.ts +++ b/src/extensions/scheduler.extension.ts @@ -6,9 +6,6 @@ import { is, TBlackHole, TContext } from ".."; import { BootstrapException, Schedule, - SCHEDULE_ERRORS, - SCHEDULE_EXECUTION_COUNT, - SCHEDULE_EXECUTION_TIME, SchedulerOptions, TServiceParams, } from "../helpers"; @@ -49,10 +46,10 @@ export function Scheduler({ logger, lifecycle, internal }: TServiceParams) { cronSchedule, async () => await internal.safeExec({ - duration: SCHEDULE_EXECUTION_TIME, - errors: SCHEDULE_ERRORS, + duration: internal.boilerplate.metrics.SCHEDULE_EXECUTION_TIME, + errors: internal.boilerplate.metrics.SCHEDULE_ERRORS, exec, - executions: SCHEDULE_EXECUTION_COUNT, + executions: internal.boilerplate.metrics.SCHEDULE_EXECUTION_COUNT, labels: { context, label }, }), ); @@ -93,10 +90,10 @@ export function Scheduler({ logger, lifecycle, internal }: TServiceParams) { runningInterval = setInterval( async () => await internal.safeExec({ - duration: SCHEDULE_EXECUTION_TIME, - errors: SCHEDULE_ERRORS, + duration: internal.boilerplate.metrics.SCHEDULE_EXECUTION_TIME, + errors: internal.boilerplate.metrics.SCHEDULE_ERRORS, exec, - executions: SCHEDULE_EXECUTION_COUNT, + executions: internal.boilerplate.metrics.SCHEDULE_EXECUTION_COUNT, labels: { context, label }, }), interval, @@ -167,10 +164,11 @@ export function Scheduler({ logger, lifecycle, internal }: TServiceParams) { timeout = setTimeout( async () => { await internal.safeExec({ - duration: SCHEDULE_EXECUTION_TIME, - errors: SCHEDULE_ERRORS, + duration: internal.boilerplate.metrics.SCHEDULE_EXECUTION_TIME, + errors: internal.boilerplate.metrics.SCHEDULE_ERRORS, exec, - executions: SCHEDULE_EXECUTION_COUNT, + executions: + internal.boilerplate.metrics.SCHEDULE_EXECUTION_COUNT, labels: { context, label }, }); }, diff --git a/src/extensions/wiring.extension.ts b/src/extensions/wiring.extension.ts index ca61b71..d914aef 100644 --- a/src/extensions/wiring.extension.ts +++ b/src/extensions/wiring.extension.ts @@ -27,7 +27,7 @@ import { WireOrder, WIRING_CONTEXT, } from "../helpers"; -import { InternalDefinition, is } from "."; +import { InternalDefinition, is, Metrics } from "."; import { Cache } from "./cache.extension"; import { Configuration, @@ -89,12 +89,13 @@ function CreateBoilerplate() { name: "boilerplate", // > 🐔 🥚 dependencies // config system internally resolves this via lifecycle events - priorityInit: ["configuration", "logger"], + priorityInit: ["metrics", "configuration", "logger"], services: { cache: Cache, configuration: Configuration, fetch: Fetch, logger: Logger, + metrics: Metrics, scheduler: Scheduler, }, }); diff --git a/src/helpers/cache-memory.helper.ts b/src/helpers/cache-memory.helper.ts index 178a96c..b387a82 100644 --- a/src/helpers/cache-memory.helper.ts +++ b/src/helpers/cache-memory.helper.ts @@ -2,13 +2,12 @@ import NodeCache from "node-cache"; import { CacheDriverOptions, ICacheDriver, is } from ".."; -import { MEMORY_CACHE_ERROR_COUNT } from "./metrics.helper"; /** * url & name properties automatically generated from config */ export function createMemoryDriver( - { logger, config, lifecycle }: CacheDriverOptions, + { logger, config, lifecycle, internal }: CacheDriverOptions, options?: NodeCache.Options, ): ICacheDriver { let client = new NodeCache({ @@ -27,7 +26,7 @@ export function createMemoryDriver( client.del(key); } catch (error) { logger.error({ err: error, name: "del" }, "memory cache error"); - MEMORY_CACHE_ERROR_COUNT.inc(); + internal.boilerplate.metrics.MEMORY_CACHE_ERROR_COUNT.inc(); } }, async get(key: string, defaultValue?: T): Promise { @@ -39,7 +38,7 @@ export function createMemoryDriver( return defaultValue; } catch (error) { logger.error({ err: error, name: "get" }, "memory cache error"); - MEMORY_CACHE_ERROR_COUNT.inc(); + internal.boilerplate.metrics.MEMORY_CACHE_ERROR_COUNT.inc(); return defaultValue; } }, @@ -51,7 +50,7 @@ export function createMemoryDriver( : allKeys; } catch (error) { logger.error({ err: error, name: "keys" }, "memory cache error"); - MEMORY_CACHE_ERROR_COUNT.inc(); + internal.boilerplate.metrics.MEMORY_CACHE_ERROR_COUNT.inc(); return []; } }, @@ -60,7 +59,7 @@ export function createMemoryDriver( client.set(key, JSON.stringify(value), ttl); } catch (error) { logger.error({ err: error, name: "set" }, "memory cache error"); - MEMORY_CACHE_ERROR_COUNT.inc(); + internal.boilerplate.metrics.MEMORY_CACHE_ERROR_COUNT.inc(); } }, }; diff --git a/src/helpers/cache-redis.helper.ts b/src/helpers/cache-redis.helper.ts index 5b9d4bd..86a0e3e 100644 --- a/src/helpers/cache-redis.helper.ts +++ b/src/helpers/cache-redis.helper.ts @@ -3,15 +3,11 @@ import { createClient } from "redis"; import { CacheDriverOptions, ICacheDriver, is, SECOND } from ".."; -import { - REDIS_ERROR_COUNT, - REDIS_OPERATION_LATENCY_MS, -} from "./metrics.helper"; /** * url & name properties automatically generated from config */ export async function createRedisDriver( - { logger, config, lifecycle }: CacheDriverOptions, + { logger, config, lifecycle, internal }: CacheDriverOptions, options?: Parameters[0], ): Promise { let client = createClient({ @@ -32,7 +28,7 @@ export async function createRedisDriver( await client.del(key); } catch (error) { logger.error({ err: error, name: "del" }, "redis cache error"); - REDIS_ERROR_COUNT.inc(); + internal.boilerplate.metrics.REDIS_ERROR_COUNT.inc(); } }, async get(key: string, defaultValue?: T): Promise { @@ -41,14 +37,16 @@ export async function createRedisDriver( const out = await client.get(key); const diff = process.hrtime(start); const durationInMilliseconds = diff[0] * SECOND + diff[1] / 1e6; - REDIS_OPERATION_LATENCY_MS.observe(durationInMilliseconds); + internal.boilerplate.metrics.REDIS_OPERATION_LATENCY_MS.observe( + durationInMilliseconds, + ); if (out !== null && is.string(out)) { return JSON.parse(out) as T; } return defaultValue; } catch (error) { logger.error({ err: error, name: "get" }, "redis cache error"); - REDIS_ERROR_COUNT.inc(); + internal.boilerplate.metrics.REDIS_ERROR_COUNT.inc(); return defaultValue; } }, @@ -57,7 +55,7 @@ export async function createRedisDriver( return await client.keys(pattern || "*"); } catch (error) { logger.error({ err: error, name: "keys" }, "redis cache error"); - REDIS_ERROR_COUNT.inc(); + internal.boilerplate.metrics.REDIS_ERROR_COUNT.inc(); return []; } }, @@ -68,7 +66,7 @@ export async function createRedisDriver( }); } catch (error) { logger.error({ err: error, name: "set" }, "redis cache error"); - REDIS_ERROR_COUNT.inc(); + internal.boilerplate.metrics.REDIS_ERROR_COUNT.inc(); } }, }; diff --git a/src/helpers/cache.helper.ts b/src/helpers/cache.helper.ts index ab5f8ed..ed9a89f 100644 --- a/src/helpers/cache.helper.ts +++ b/src/helpers/cache.helper.ts @@ -2,7 +2,7 @@ import { TServiceParams } from "."; export type CacheDriverOptions = Pick< TServiceParams, - "logger" | "config" | "lifecycle" + "logger" | "config" | "lifecycle" | "internal" >; export interface ICacheDriver { diff --git a/src/helpers/index.ts b/src/helpers/index.ts index 78115b6..3669e2b 100644 --- a/src/helpers/index.ts +++ b/src/helpers/index.ts @@ -12,6 +12,5 @@ export * from "./events.helper"; export * from "./extend.helper"; export * from "./fetch.helper"; export * from "./lifecycle.helper"; -export * from "./metrics.helper"; export * from "./utilities.helper"; export * from "./wiring.helper"; diff --git a/src/helpers/metrics.helper.ts b/src/helpers/metrics.helper.ts deleted file mode 100644 index 5ec5903..0000000 --- a/src/helpers/metrics.helper.ts +++ /dev/null @@ -1,143 +0,0 @@ -/* eslint-disable @typescript-eslint/no-magic-numbers */ -import { Counter, Gauge, Histogram, Summary } from "prom-client"; - -/** - * Cache delete operations counter - */ -export const CACHE_DELETE_OPERATIONS_TOTAL = new Counter({ - help: "Total number of cache delete operations", - labelNames: ["prefix", "key"] as const, - name: "digital_alchemy_boilerplate_cache_delete_operations_total", -}); - -/** - * Cache get operations counter - */ -export const CACHE_GET_OPERATIONS_TOTAL = new Counter({ - help: "Total number of cache get operations", - labelNames: ["prefix", "key", "hit_miss"] as const, - name: "digital_alchemy_boilerplate_cache_get_operations_total", -}); - -/** - * Tracks the number of times a scheduled task has been executed. - * Labels: - * - context: The broader category or module the schedule belongs to. - * - label: A user-defined label to identify the specific schedule. - */ -export const SCHEDULE_EXECUTION_COUNT = new Counter({ - help: "Counts the number of times a scheduled task has been executed", - labelNames: ["context", "label"] as const, - name: "digital_alchemy_boilerplate_schedule_execution_count", -}); - -/** - * Counts the number of errors occurred during scheduled task executions. - * Labels: - * - context: The broader category or module the schedule belongs to. - * - label: A user-defined label to identify the specific schedule where the error occurred. - */ -export const SCHEDULE_ERRORS = new Counter({ - help: "Counts the number of errors during scheduled task executions", - labelNames: ["context", "label"] as const, - name: "digital_alchemy_boilerplate_schedule_errors", -}); - -/** - * Summary for Execution Time - */ -export const SCHEDULE_EXECUTION_TIME = new Summary({ - help: "Measures the duration of each cron job or interval execution", - labelNames: ["context", "label"] as const, - name: "digital_alchemy_boilerplate_schedule_execution_time", - // These percentiles are just examples; adjust them based on what's relevant for your analysis - percentiles: [0.5, 0.9, 0.99], -}); - -/** - * Metric to count errors in cache driver - */ -export const CACHE_DRIVER_ERROR_COUNT = new Counter({ - help: "Counts the number of errors caught in the cache driver", - labelNames: ["methodName"] as const, - name: "digital_alchemy_boilerplate_cache_driver_error_count", -}); - -/** - * Cache set operations counter - */ -export const CACHE_SET_OPERATIONS_TOTAL = new Counter({ - help: "Total number of cache set operations", - labelNames: ["prefix", "key"] as const, - name: "digital_alchemy_boilerplate_cache_set_operations_total", -}); - -/** - * Counts the total number of initiated fetch requests. - */ -export const FETCH_REQUESTS_INITIATED = new Counter({ - help: "Total number of fetch requests that have been initiated", - name: "digital_alchemy_boilerplate_fetch_requests_initiated_total", -}); - -/** - * Counts the total number of successfully completed fetch requests. - */ -export const FETCH_REQUESTS_SUCCESSFUL = new Counter({ - help: "Total number of fetch requests that have been successfully completed", - labelNames: ["context", "label"] as const, - name: "digital_alchemy_boilerplate_fetch_requests_successful_total", -}); - -/** - * Counts the total number of successfully completed fetch requests. - */ -export const FETCH_DOWNLOAD_REQUESTS_SUCCESSFUL = new Counter({ - help: "Total number of fetch download requests that have been successfully completed", - labelNames: ["context", "label"] as const, - name: "digital_alchemy_boilerplate_fetch_download_requests_successful_total", -}); - -/** - * Counts the total number of failed fetch requests. - */ -export const FETCH_REQUESTS_FAILED = new Counter({ - help: "Total number of fetch requests that have failed", - labelNames: ["context", "label"] as const, - name: "digital_alchemy_boilerplate_fetch_requests_failed_total", -}); - -/** - * Gauge to count the number of errors encountered in Redis operations. - */ -export const REDIS_ERROR_COUNT = new Gauge({ - help: "Counts the number of errors encountered in Redis operations", - name: "digital_alchemy_boilerplate_redis_error_count", -}); - -/** - * Histogram to track the latency of Redis operations in milliseconds. - * Buckets range from 0.1 ms to 1000 ms (1 second) for granular latency measurement. - */ -export const REDIS_OPERATION_LATENCY_MS = new Histogram({ - buckets: [0.1, 0.5, 1, 5, 10, 20, 50, 100, 200, 500, 1000], - help: "Histogram for tracking the latency of Redis operations in milliseconds", - name: "digital_alchemy_boilerplate_redis_operation_latency_ms", -}); - -/** - * Counter to track the number of errors encountered in memory cache operations. - */ -export const MEMORY_CACHE_ERROR_COUNT = new Counter({ - help: "Counts the number of errors encountered in memory cache operations", - name: "digital_alchemy_boilerplate_memory_cache_error_count", -}); - -/** - * A Prometheus gauge metric that tracks the number of unique context entries in the logger's context cache. - * This helps in monitoring and managing the memory usage associated with the caching of logger contexts. - */ -export const LOGGER_CONTEXT_ENTRIES_COUNT = new Gauge({ - help: "Number of unique context entries in the logger context cache", - name: "digital_alchemy_boilerplate_logger_context_entries_count", -}); diff --git a/src/testing/cache.spec.ts b/src/testing/cache.spec.ts index dbe4945..c6cfae6 100644 --- a/src/testing/cache.spec.ts +++ b/src/testing/cache.spec.ts @@ -1,9 +1,4 @@ -import { - CACHE_DELETE_OPERATIONS_TOTAL, - CACHE_GET_OPERATIONS_TOTAL, - CACHE_SET_OPERATIONS_TOTAL, - CreateApplication, -} from ".."; +import { CreateApplication } from ".."; import { BASIC_BOOT, ServiceTest } from "./testing.helper"; describe("Cache Extension", () => { @@ -306,20 +301,14 @@ describe("Cache Extension", () => { }); describe("Cache Operation Metrics", () => { - beforeEach(async () => { - // Resetting metrics before each test - await CACHE_DELETE_OPERATIONS_TOTAL.reset(); - await CACHE_GET_OPERATIONS_TOTAL.reset(); - await CACHE_SET_OPERATIONS_TOTAL.reset(); - }); - it("should increment CACHE_SET_OPERATIONS_TOTAL on set operations", async () => { expect.assertions(1); - await ServiceTest(({ cache, lifecycle }) => { + await ServiceTest(({ cache, lifecycle, internal }) => { lifecycle.onReady(async () => { await cache.set("testKey", "testValue"); - const newCount = (await CACHE_SET_OPERATIONS_TOTAL.get()).values[0] - .value; + const newCount = ( + await internal.boilerplate.metrics.CACHE_SET_OPERATIONS_TOTAL.get() + ).values[0].value; expect(newCount).toBe(1); }); }); @@ -327,11 +316,12 @@ describe("Cache Extension", () => { it("should increment CACHE_GET_OPERATIONS_TOTAL on get operations", async () => { expect.assertions(1); - await ServiceTest(({ cache, lifecycle }) => { + await ServiceTest(({ cache, lifecycle, internal }) => { lifecycle.onReady(async () => { await cache.get("testKey"); - const newCount = (await CACHE_GET_OPERATIONS_TOTAL.get()).values[0] - .value; + const newCount = ( + await internal.boilerplate.metrics.CACHE_GET_OPERATIONS_TOTAL.get() + ).values[0].value; expect(newCount).toBe(1); }); }); @@ -339,11 +329,12 @@ describe("Cache Extension", () => { it("should increment CACHE_DELETE_OPERATIONS_TOTAL on delete operations", async () => { expect.assertions(1); - await ServiceTest(({ cache, lifecycle }) => { + await ServiceTest(({ cache, lifecycle, internal }) => { lifecycle.onReady(async () => { await cache.del("testKey"); - const newCount = (await CACHE_DELETE_OPERATIONS_TOTAL.get()).values[0] - .value; + const newCount = ( + await internal.boilerplate.metrics.CACHE_DELETE_OPERATIONS_TOTAL.get() + ).values[0].value; expect(newCount).toBe(1); }); }); diff --git a/src/testing/testing.helper.ts b/src/testing/testing.helper.ts index edec9ce..d7fc841 100644 --- a/src/testing/testing.helper.ts +++ b/src/testing/testing.helper.ts @@ -16,6 +16,9 @@ export async function ServiceTest( services: { async Testing(params: TServiceParams) { await callback(params); + const { metrics } = params.internal.boilerplate; + const keys = Object.keys(metrics) as (keyof typeof metrics)[]; + keys.forEach((key) => metrics[key].reset()); }, }, });