diff --git a/Cargo.lock b/Cargo.lock index e8315e8049a6..c8d5fc4cecc4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1429,34 +1429,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "axum" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" -dependencies = [ - "async-trait", - "axum-core 0.3.4", - "bitflags 1.3.2", - "bytes 1.7.1", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.30", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "sync_wrapper 0.1.2", - "tower", - "tower-layer", - "tower-service", -] - [[package]] name = "axum" version = "0.7.5" @@ -1464,7 +1436,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", - "axum-core 0.4.3", + "axum-core", "bytes 1.7.1", "futures-util", "http 1.1.0", @@ -1484,23 +1456,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes 1.7.1", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - [[package]] name = "axum-core" version = "0.4.3" @@ -3164,12 +3119,12 @@ dependencies = [ "lettre", "masking", "once_cell", - "prost 0.13.2", + "prost", "router_env", "serde", "thiserror", "tokio 1.40.0", - "tonic 0.12.2", + "tonic", "tonic-build", "tonic-reflection", "tonic-types", @@ -3953,18 +3908,6 @@ dependencies = [ "tokio-rustls 0.24.1", ] -[[package]] -name = "hyper-timeout" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" -dependencies = [ - "hyper 0.14.30", - "pin-project-lite", - "tokio 1.40.0", - "tokio-io-timeout", -] - [[package]] name = "hyper-timeout" version = "0.5.1" @@ -5052,9 +4995,9 @@ checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" [[package]] name = "mutually_exclusive_features" -version = "0.0.3" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e" +checksum = "e94e1e6445d314f972ff7395df2de295fe51b71821694f0b0e1e79c4f12c8577" [[package]] name = "nanoid" @@ -5417,76 +5360,72 @@ dependencies = [ [[package]] name = "opentelemetry" -version = "0.19.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4b8347cc26099d3aeee044065ecc3ae11469796b4d65d065a23a584ed92a6f" +checksum = "ab70038c28ed37b97d8ed414b6429d343a8bbf44c9f79ec854f3a643029ba6d7" dependencies = [ - "opentelemetry_api", + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror", + "tracing", +] + +[[package]] +name = "opentelemetry-aws" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eacb6bb0b662955ba69d788c979462b079e70903e29867c2303cc1305ec8de" +dependencies = [ + "once_cell", + "opentelemetry", "opentelemetry_sdk", + "tracing", ] [[package]] name = "opentelemetry-otlp" -version = "0.12.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af72d59a4484654ea8eb183fea5ae4eb6a41d7ac3e3bae5f4d2a282a3a7d3ca" +checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76" dependencies = [ "async-trait", - "futures 0.3.30", - "futures-util", - "http 0.2.12", + "futures-core", + "http 1.1.0", "opentelemetry", "opentelemetry-proto", - "prost 0.11.9", + "opentelemetry_sdk", + "prost", "thiserror", "tokio 1.40.0", - "tonic 0.8.3", + "tonic", ] [[package]] name = "opentelemetry-proto" -version = "0.2.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "045f8eea8c0fa19f7d48e7bc3128a39c2e5c533d5c61298c548dfefc1064474c" +checksum = "a6e05acbfada5ec79023c85368af14abd0b307c015e9064d249b2a950ef459a6" dependencies = [ - "futures 0.3.30", - "futures-util", "opentelemetry", - "prost 0.11.9", - "tonic 0.8.3", -] - -[[package]] -name = "opentelemetry_api" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed41783a5bf567688eb38372f2b7a8530f5a607a4b49d38dd7573236c23ca7e2" -dependencies = [ - "fnv", - "futures-channel", - "futures-util", - "indexmap 1.9.3", - "once_cell", - "pin-project-lite", - "thiserror", - "urlencoding", + "opentelemetry_sdk", + "prost", + "tonic", ] [[package]] name = "opentelemetry_sdk" -version = "0.19.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b3a2a91fdbfdd4d212c0dcc2ab540de2c2bcbbd90be17de7a7daf8822d010c1" +checksum = "231e9d6ceef9b0b2546ddf52335785ce41252bc7474ee8ba05bfad277be13ab8" dependencies = [ "async-trait", - "crossbeam-channel", - "dashmap", - "fnv", "futures-channel", "futures-executor", "futures-util", - "once_cell", - "opentelemetry_api", + "glob", + "opentelemetry", "percent-encoding", "rand", "thiserror", @@ -6015,16 +5954,6 @@ dependencies = [ "unarray", ] -[[package]] -name = "prost" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" -dependencies = [ - "bytes 1.7.1", - "prost-derive 0.11.9", -] - [[package]] name = "prost" version = "0.13.2" @@ -6032,7 +5961,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b2ecbe40f08db5c006b5764a2645f7f3f141ce756412ac9e1dd6087e6d32995" dependencies = [ "bytes 1.7.1", - "prost-derive 0.13.2", + "prost-derive", ] [[package]] @@ -6049,26 +5978,13 @@ dependencies = [ "once_cell", "petgraph", "prettyplease", - "prost 0.13.2", + "prost", "prost-types", "regex", "syn 2.0.77", "tempfile", ] -[[package]] -name = "prost-derive" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" -dependencies = [ - "anyhow", - "itertools 0.10.5", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "prost-derive" version = "0.13.2" @@ -6088,7 +6004,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60caa6738c7369b940c3d49246a8d1749323674c65cb13010134f5c9bad5b519" dependencies = [ - "prost 0.13.2", + "prost", ] [[package]] @@ -6739,7 +6655,9 @@ dependencies = [ "gethostname", "once_cell", "opentelemetry", + "opentelemetry-aws", "opentelemetry-otlp", + "opentelemetry_sdk", "rustc-hash", "serde", "serde_json", @@ -8460,16 +8378,6 @@ dependencies = [ "log", ] -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio 1.40.0", -] - [[package]] name = "tokio-macros" version = "2.4.0" @@ -8544,9 +8452,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -8724,45 +8632,13 @@ dependencies = [ [[package]] name = "tonic" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb" -dependencies = [ - "async-stream", - "async-trait", - "axum 0.6.20", - "base64 0.13.1", - "bytes 1.7.1", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.30", - "hyper-timeout 0.4.1", - "percent-encoding", - "pin-project", - "prost 0.11.9", - "prost-derive 0.11.9", - "tokio 1.40.0", - "tokio-stream", - "tokio-util", - "tower", - "tower-layer", - "tower-service", - "tracing", - "tracing-futures", -] - -[[package]] -name = "tonic" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6f6ba989e4b2c58ae83d862d3a3e27690b6e3ae630d0deb59f3697f32aa88ad" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", - "axum 0.7.5", + "axum", "base64 0.22.1", "bytes 1.7.1", "h2 0.4.6", @@ -8770,11 +8646,11 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.4.1", - "hyper-timeout 0.5.1", + "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", - "prost 0.13.2", + "prost", "socket2", "tokio 1.40.0", "tokio-stream", @@ -8803,11 +8679,11 @@ version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b56b874eedb04f89907573b408eab1e87c1c1dce43aac6ad63742f57faa99ff" dependencies = [ - "prost 0.13.2", + "prost", "prost-types", "tokio 1.40.0", "tokio-stream", - "tonic 0.12.2", + "tonic", ] [[package]] @@ -8816,9 +8692,9 @@ version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d967793411bc1a5392accf4731114295f0fd122865d22cde46a8584b03402b2" dependencies = [ - "prost 0.13.2", + "prost", "prost-types", - "tonic 0.12.2", + "tonic", ] [[package]] @@ -8883,9 +8759,9 @@ dependencies = [ [[package]] name = "tracing-actix-web" -version = "0.7.11" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee9e39a66d9b615644893ffc1704d2a89b5b315b7fd0228ad3182ca9a306b19" +checksum = "54a9f5c1aca50ebebf074ee665b9f99f2e84906dcf6b993a0d0090edb835166d" dependencies = [ "actix-web", "mutually_exclusive_features", @@ -8940,17 +8816,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -8964,16 +8829,20 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.19.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00a39dcf9bfc1742fa4d6215253b33a6e474be78275884c216fc2a06267b3600" +checksum = "97a971f6058498b5c0f1affa23e7ea202057a7301dbff68e968b2d578bcbd053" dependencies = [ + "js-sys", "once_cell", "opentelemetry", + "opentelemetry_sdk", + "smallvec 1.13.2", "tracing", "tracing-core", - "tracing-log 0.1.4", + "tracing-log", "tracing-subscriber", + "web-time", ] [[package]] @@ -9003,7 +8872,7 @@ dependencies = [ "thread_local", "tracing", "tracing-core", - "tracing-log 0.2.0", + "tracing-log", "tracing-serde", ] @@ -9453,6 +9322,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webdriver" version = "0.46.0" diff --git a/config/loki.yaml b/config/loki.yaml index da84e3c3e22d..51b4fa3e40da 100644 --- a/config/loki.yaml +++ b/config/loki.yaml @@ -23,9 +23,9 @@ ingester: schema_config: configs: - from: 2020-10-24 - store: boltdb-shipper + store: tsdb object_store: filesystem - schema: v11 + schema: v13 index: prefix: index_ period: 24h diff --git a/config/otel-collector.yaml b/config/otel-collector.yaml index d7d571c7c87a..9e64dbc4e229 100644 --- a/config/otel-collector.yaml +++ b/config/otel-collector.yaml @@ -2,6 +2,7 @@ receivers: otlp: protocols: grpc: + endpoint: 0.0.0.0:4317 exporters: otlp: @@ -9,8 +10,8 @@ exporters: tls: insecure: true - logging: - loglevel: debug + debug: + verbosity: detailed prometheus: endpoint: 0.0.0.0:8889 @@ -21,7 +22,7 @@ exporters: service: telemetry: logs: - level: debug + level: DEBUG metrics: level: detailed address: 0.0.0.0:8888 diff --git a/crates/analytics/src/api_event/core.rs b/crates/analytics/src/api_event/core.rs index 425d1476a476..27cc2a4d6975 100644 --- a/crates/analytics/src/api_event/core.rs +++ b/crates/analytics/src/api_event/core.rs @@ -12,7 +12,6 @@ use common_utils::errors::ReportSwitchExt; use error_stack::ResultExt; use router_env::{ instrument, logger, - metrics::add_attributes, tracing::{self, Instrument}, }; @@ -136,14 +135,14 @@ pub async fn get_api_event_metrics( .change_context(AnalyticsError::UnknownError)? { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("metric_type", metric.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } for (id, value) in data { diff --git a/crates/analytics/src/disputes/core.rs b/crates/analytics/src/disputes/core.rs index f4235518b2d2..540a14104c1f 100644 --- a/crates/analytics/src/disputes/core.rs +++ b/crates/analytics/src/disputes/core.rs @@ -11,7 +11,6 @@ use api_models::analytics::{ use error_stack::ResultExt; use router_env::{ logger, - metrics::add_attributes, tracing::{self, Instrument}, }; @@ -72,14 +71,14 @@ pub async fn get_metrics( .change_context(AnalyticsError::UnknownError)? { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("metric_type", metric.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } diff --git a/crates/analytics/src/frm/core.rs b/crates/analytics/src/frm/core.rs index 6f120913a7a4..195266b4191a 100644 --- a/crates/analytics/src/frm/core.rs +++ b/crates/analytics/src/frm/core.rs @@ -9,7 +9,6 @@ use api_models::analytics::{ use error_stack::ResultExt; use router_env::{ logger, - metrics::add_attributes, tracing::{self, Instrument}, }; @@ -66,13 +65,13 @@ pub async fn get_metrics( { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("metric_type", metric.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } diff --git a/crates/analytics/src/metrics.rs b/crates/analytics/src/metrics.rs index 6222315a8c06..03eab289333d 100644 --- a/crates/analytics/src/metrics.rs +++ b/crates/analytics/src/metrics.rs @@ -1,9 +1,8 @@ -use router_env::{global_meter, histogram_metric, histogram_metric_u64, metrics_context}; +use router_env::{global_meter, histogram_metric_f64, histogram_metric_u64}; -metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); -histogram_metric!(METRIC_FETCH_TIME, GLOBAL_METER); +histogram_metric_f64!(METRIC_FETCH_TIME, GLOBAL_METER); histogram_metric_u64!(BUCKETS_FETCHED, GLOBAL_METER); pub mod request; diff --git a/crates/analytics/src/metrics/request.rs b/crates/analytics/src/metrics/request.rs index 39375d391a3e..c30e34da8ee2 100644 --- a/crates/analytics/src/metrics/request.rs +++ b/crates/analytics/src/metrics/request.rs @@ -1,7 +1,5 @@ use std::time; -use router_env::metrics::add_attributes; - #[inline] pub async fn record_operation_time( future: F, @@ -14,12 +12,12 @@ where T: ToString, { let (result, time) = time_future(future).await; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("metric_name", metric_name.to_string()), ("source", source.to_string()), - ]); + ); let value = time.as_secs_f64(); - metric.record(&super::CONTEXT, value, attributes); + metric.record(value, attributes); router_env::logger::debug!("Attributes: {:?}, Time: {}", attributes, value); result diff --git a/crates/analytics/src/payment_intents/core.rs b/crates/analytics/src/payment_intents/core.rs index 80cfc5630710..00a59d82870c 100644 --- a/crates/analytics/src/payment_intents/core.rs +++ b/crates/analytics/src/payment_intents/core.rs @@ -16,7 +16,6 @@ use currency_conversion::{conversion::convert, types::ExchangeRates}; use error_stack::ResultExt; use router_env::{ instrument, logger, - metrics::add_attributes, tracing::{self, Instrument}, }; @@ -118,14 +117,14 @@ pub async fn get_metrics( match task_type { TaskType::MetricTask(metric, data) => { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("metric_type", metric.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } diff --git a/crates/analytics/src/payments/core.rs b/crates/analytics/src/payments/core.rs index c6f0276c47f4..86192265d072 100644 --- a/crates/analytics/src/payments/core.rs +++ b/crates/analytics/src/payments/core.rs @@ -16,7 +16,6 @@ use currency_conversion::{conversion::convert, types::ExchangeRates}; use error_stack::ResultExt; use router_env::{ instrument, logger, - metrics::add_attributes, tracing::{self, Instrument}, }; @@ -126,14 +125,14 @@ pub async fn get_metrics( match task_type { TaskType::MetricTask(metric, data) => { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("metric_type", metric.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } @@ -193,14 +192,14 @@ pub async fn get_metrics( } TaskType::DistributionTask(distribution, data) => { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("distribution_type", distribution.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } diff --git a/crates/analytics/src/refunds/core.rs b/crates/analytics/src/refunds/core.rs index ca72c9003a6e..04badd06bd2d 100644 --- a/crates/analytics/src/refunds/core.rs +++ b/crates/analytics/src/refunds/core.rs @@ -16,7 +16,6 @@ use currency_conversion::{conversion::convert, types::ExchangeRates}; use error_stack::ResultExt; use router_env::{ logger, - metrics::add_attributes, tracing::{self, Instrument}, }; @@ -121,14 +120,14 @@ pub async fn get_metrics( match task_type { TaskType::MetricTask(metric, data) => { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("metric_type", metric.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } @@ -168,13 +167,13 @@ pub async fn get_metrics( } TaskType::DistributionTask(distribution, data) => { let data = data?; - let attributes = &add_attributes([ + let attributes = router_env::metric_attributes!( ("distribution_type", distribution.to_string()), ("source", pool.to_string()), - ]); + ); let value = u64::try_from(data.len()); if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + metrics::BUCKETS_FETCHED.record(val, attributes); logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); } diff --git a/crates/common_utils/src/id_type/merchant.rs b/crates/common_utils/src/id_type/merchant.rs index 08b80249ae34..e12f71e917f9 100644 --- a/crates/common_utils/src/id_type/merchant.rs +++ b/crates/common_utils/src/id_type/merchant.rs @@ -30,12 +30,11 @@ crate::impl_serializable_secret_id_type!(MerchantId); crate::impl_queryable_id_type!(MerchantId); crate::impl_to_sql_from_sql_id_type!(MerchantId); +// This is implemented so that we can use merchant id directly as attribute in metrics #[cfg(feature = "metrics")] -/// This is implemented so that we can use merchant id directly as attribute in metrics impl From for router_env::opentelemetry::Value { fn from(val: MerchantId) -> Self { - let string_value = val.0 .0 .0; - Self::String(router_env::opentelemetry::StringValue::from(string_value)) + Self::from(val.0 .0 .0) } } diff --git a/crates/common_utils/src/id_type/payment.rs b/crates/common_utils/src/id_type/payment.rs index 60bc9968f312..33bf9d241707 100644 --- a/crates/common_utils/src/id_type/payment.rs +++ b/crates/common_utils/src/id_type/payment.rs @@ -79,11 +79,10 @@ crate::impl_try_from_cow_str_id_type!(PaymentReferenceId, "payment_reference_id" crate::impl_queryable_id_type!(PaymentReferenceId); crate::impl_to_sql_from_sql_id_type!(PaymentReferenceId); +// This is implemented so that we can use payment id directly as attribute in metrics #[cfg(feature = "metrics")] -/// This is implemented so that we can use payment id directly as attribute in metrics impl From for router_env::opentelemetry::Value { fn from(val: PaymentId) -> Self { - let string_value = val.0 .0 .0; - Self::String(router_env::opentelemetry::StringValue::from(string_value)) + Self::from(val.0 .0 .0) } } diff --git a/crates/common_utils/src/id_type/profile.rs b/crates/common_utils/src/id_type/profile.rs index a73cdfdd5392..9e1733a84143 100644 --- a/crates/common_utils/src/id_type/profile.rs +++ b/crates/common_utils/src/id_type/profile.rs @@ -31,3 +31,11 @@ impl FromStr for ProfileId { Self::try_from(cow_string) } } + +// This is implemented so that we can use profile id directly as attribute in metrics +#[cfg(feature = "metrics")] +impl From for router_env::opentelemetry::Value { + fn from(val: ProfileId) -> Self { + Self::from(val.0 .0 .0) + } +} diff --git a/crates/common_utils/src/metrics/utils.rs b/crates/common_utils/src/metrics/utils.rs index 71244ecc4fe4..c1f2ef8f985b 100644 --- a/crates/common_utils/src/metrics/utils.rs +++ b/crates/common_utils/src/metrics/utils.rs @@ -21,13 +21,12 @@ where pub async fn record_operation_time( future: F, metric: &opentelemetry::metrics::Histogram, - metric_context: &opentelemetry::Context, key_value: &[opentelemetry::KeyValue], ) -> R where F: futures::Future, { let (result, time) = time_future(future).await; - metric.record(metric_context, time.as_secs_f64(), key_value); + metric.record(time.as_secs_f64(), key_value); result } diff --git a/crates/diesel_models/src/lib.rs b/crates/diesel_models/src/lib.rs index d07f84aa65e2..cc3dc1361545 100644 --- a/crates/diesel_models/src/lib.rs +++ b/crates/diesel_models/src/lib.rs @@ -128,11 +128,10 @@ pub(crate) mod diesel_impl { } pub(crate) mod metrics { - use router_env::{counter_metric, global_meter, histogram_metric, metrics_context, once_cell}; + use router_env::{counter_metric, global_meter, histogram_metric_f64, once_cell}; - metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); counter_metric!(DATABASE_CALLS_COUNT, GLOBAL_METER); - histogram_metric!(DATABASE_CALL_TIME, GLOBAL_METER); + histogram_metric_f64!(DATABASE_CALL_TIME, GLOBAL_METER); } diff --git a/crates/diesel_models/src/query/generics.rs b/crates/diesel_models/src/query/generics.rs index 682766679fd7..bf3238ab4fea 100644 --- a/crates/diesel_models/src/query/generics.rs +++ b/crates/diesel_models/src/query/generics.rs @@ -25,8 +25,6 @@ use router_env::logger; use crate::{errors, PgPooledConn, StorageResult}; pub mod db_metrics { - use router_env::opentelemetry::KeyValue; - #[derive(Debug)] pub enum DatabaseOperation { FindOne, @@ -51,18 +49,14 @@ pub mod db_metrics { let table_name = std::any::type_name::().rsplit("::").nth(1); - let attributes = [ - KeyValue::new("table", table_name.unwrap_or("undefined")), - KeyValue::new("operation", format!("{:?}", operation)), - ]; - - crate::metrics::DATABASE_CALLS_COUNT.add(&crate::metrics::CONTEXT, 1, &attributes); - crate::metrics::DATABASE_CALL_TIME.record( - &crate::metrics::CONTEXT, - time_elapsed.as_secs_f64(), - &attributes, + let attributes = router_env::metric_attributes!( + ("table", table_name.unwrap_or("undefined")), + ("operation", format!("{:?}", operation)) ); + crate::metrics::DATABASE_CALLS_COUNT.add(1, attributes); + crate::metrics::DATABASE_CALL_TIME.record(time_elapsed.as_secs_f64(), attributes); + output } } diff --git a/crates/drainer/src/handler.rs b/crates/drainer/src/handler.rs index d0c26195453b..74984b03fbd2 100644 --- a/crates/drainer/src/handler.rs +++ b/crates/drainer/src/handler.rs @@ -74,7 +74,7 @@ impl Handler { let jobs_picked = Arc::new(atomic::AtomicU8::new(0)); while self.running.load(atomic::Ordering::SeqCst) { - metrics::DRAINER_HEALTH.add(&metrics::CONTEXT, 1, &[]); + metrics::DRAINER_HEALTH.add(1, &[]); for store in self.stores.values() { if store.is_stream_available(stream_index).await { let _task_handle = tokio::spawn( @@ -103,7 +103,7 @@ impl Handler { pub(crate) async fn shutdown_listener(&self, mut rx: mpsc::Receiver<()>) { while let Some(_c) = rx.recv().await { logger::info!("Awaiting shutdown!"); - metrics::SHUTDOWN_SIGNAL_RECEIVED.add(&metrics::CONTEXT, 1, &[]); + metrics::SHUTDOWN_SIGNAL_RECEIVED.add(1, &[]); let shutdown_started = time::Instant::now(); rx.close(); @@ -112,9 +112,9 @@ impl Handler { time::sleep(self.shutdown_interval).await; } logger::info!("Terminating drainer"); - metrics::SUCCESSFUL_SHUTDOWN.add(&metrics::CONTEXT, 1, &[]); + metrics::SUCCESSFUL_SHUTDOWN.add(1, &[]); let shutdown_ended = shutdown_started.elapsed().as_secs_f64() * 1000f64; - metrics::CLEANUP_TIME.record(&metrics::CONTEXT, shutdown_ended, &[]); + metrics::CLEANUP_TIME.record(shutdown_ended, &[]); self.close(); } logger::info!( @@ -217,7 +217,7 @@ async fn drainer( if let redis_interface::errors::RedisError::StreamEmptyOrNotAvailable = redis_err.current_context() { - metrics::STREAM_EMPTY.add(&metrics::CONTEXT, 1, &[]); + metrics::STREAM_EMPTY.add(1, &[]); return Ok(()); } else { return Err(error); @@ -236,12 +236,8 @@ async fn drainer( let read_count = entries.len(); metrics::JOBS_PICKED_PER_STREAM.add( - &metrics::CONTEXT, u64::try_from(read_count).unwrap_or(u64::MIN), - &[metrics::KeyValue { - key: "stream".into(), - value: stream_name.to_string().into(), - }], + router_env::metric_attributes!(("stream", stream_name.to_owned())), ); let session_id = common_utils::generate_id_with_default_len("drainer_session"); @@ -254,12 +250,8 @@ async fn drainer( Err(err) => { logger::error!(operation = "deserialization", err=?err); metrics::STREAM_PARSE_FAIL.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue { - key: "operation".into(), - value: "deserialization".into(), - }], + router_env::metric_attributes!(("operation", "deserialization")), ); // break from the loop in case of a deser error diff --git a/crates/drainer/src/logger.rs b/crates/drainer/src/logger.rs index b23b0ab2675d..8044b0462d94 100644 --- a/crates/drainer/src/logger.rs +++ b/crates/drainer/src/logger.rs @@ -1,2 +1,2 @@ #[doc(inline)] -pub use router_env::*; +pub use router_env::{debug, error, info, warn}; diff --git a/crates/drainer/src/main.rs b/crates/drainer/src/main.rs index c5a4a39f95d4..91ec191bf9f2 100644 --- a/crates/drainer/src/main.rs +++ b/crates/drainer/src/main.rs @@ -1,8 +1,6 @@ use std::collections::HashMap; -use drainer::{ - errors::DrainerResult, logger::logger, services, settings, start_drainer, start_web_server, -}; +use drainer::{errors::DrainerResult, logger, services, settings, start_drainer, start_web_server}; use router_env::tracing::Instrument; #[tokio::main] diff --git a/crates/drainer/src/metrics.rs b/crates/drainer/src/metrics.rs index 750f23bc73b5..13fb31f7c500 100644 --- a/crates/drainer/src/metrics.rs +++ b/crates/drainer/src/metrics.rs @@ -1,9 +1,5 @@ -pub use router_env::opentelemetry::KeyValue; -use router_env::{ - counter_metric, global_meter, histogram_metric, histogram_metric_i64, metrics_context, -}; +use router_env::{counter_metric, global_meter, histogram_metric_f64, histogram_metric_u64}; -metrics_context!(CONTEXT); global_meter!(DRAINER_METER, "DRAINER"); counter_metric!(JOBS_PICKED_PER_STREAM, DRAINER_METER); @@ -17,8 +13,8 @@ counter_metric!(STREAM_EMPTY, DRAINER_METER); counter_metric!(STREAM_PARSE_FAIL, DRAINER_METER); counter_metric!(DRAINER_HEALTH, DRAINER_METER); -histogram_metric!(QUERY_EXECUTION_TIME, DRAINER_METER); // Time in (ms) milliseconds -histogram_metric!(REDIS_STREAM_READ_TIME, DRAINER_METER); // Time in (ms) milliseconds -histogram_metric!(REDIS_STREAM_TRIM_TIME, DRAINER_METER); // Time in (ms) milliseconds -histogram_metric!(CLEANUP_TIME, DRAINER_METER); // Time in (ms) milliseconds -histogram_metric_i64!(DRAINER_DELAY_SECONDS, DRAINER_METER); // Time in (s) seconds +histogram_metric_f64!(QUERY_EXECUTION_TIME, DRAINER_METER); // Time in (ms) milliseconds +histogram_metric_f64!(REDIS_STREAM_READ_TIME, DRAINER_METER); // Time in (ms) milliseconds +histogram_metric_f64!(REDIS_STREAM_TRIM_TIME, DRAINER_METER); // Time in (ms) milliseconds +histogram_metric_f64!(CLEANUP_TIME, DRAINER_METER); // Time in (ms) milliseconds +histogram_metric_u64!(DRAINER_DELAY_SECONDS, DRAINER_METER); // Time in (s) seconds diff --git a/crates/drainer/src/query.rs b/crates/drainer/src/query.rs index a1e04fb6d0f1..ec6b271aa9da 100644 --- a/crates/drainer/src/query.rs +++ b/crates/drainer/src/query.rs @@ -25,32 +25,23 @@ impl ExecuteQuery for kv::DBOperation { let operation = self.operation(); let table = self.table(); - let tags: &[metrics::KeyValue] = &[ - metrics::KeyValue { - key: "operation".into(), - value: operation.into(), - }, - metrics::KeyValue { - key: "table".into(), - value: table.into(), - }, - ]; + let tags = router_env::metric_attributes!(("operation", operation), ("table", table)); let (result, execution_time) = Box::pin(common_utils::date_time::time_it(|| self.execute(&conn))).await; push_drainer_delay(pushed_at, operation, table, tags); - metrics::QUERY_EXECUTION_TIME.record(&metrics::CONTEXT, execution_time, tags); + metrics::QUERY_EXECUTION_TIME.record(execution_time, tags); match result { Ok(result) => { logger::info!(operation = operation, table = table, ?result); - metrics::SUCCESSFUL_QUERY_EXECUTION.add(&metrics::CONTEXT, 1, tags); + metrics::SUCCESSFUL_QUERY_EXECUTION.add(1, tags); Ok(()) } Err(err) => { logger::error!(operation = operation, table = table, ?err); - metrics::ERRORS_WHILE_QUERY_EXECUTION.add(&metrics::CONTEXT, 1, tags); + metrics::ERRORS_WHILE_QUERY_EXECUTION.add(1, tags); Err(err) } } @@ -58,15 +49,25 @@ impl ExecuteQuery for kv::DBOperation { } #[inline(always)] -fn push_drainer_delay(pushed_at: i64, operation: &str, table: &str, tags: &[metrics::KeyValue]) { +fn push_drainer_delay( + pushed_at: i64, + operation: &str, + table: &str, + tags: &[router_env::opentelemetry::KeyValue], +) { let drained_at = common_utils::date_time::now_unix_timestamp(); let delay = drained_at - pushed_at; - logger::debug!( - operation = operation, - table = table, - delay = format!("{delay} secs") - ); + logger::debug!(operation, table, delay = format!("{delay} secs")); - metrics::DRAINER_DELAY_SECONDS.record(&metrics::CONTEXT, delay, tags); + match u64::try_from(delay) { + Ok(delay) => metrics::DRAINER_DELAY_SECONDS.record(delay, tags), + Err(error) => logger::error!( + pushed_at, + drained_at, + delay, + ?error, + "Invalid drainer delay" + ), + } } diff --git a/crates/drainer/src/stream.rs b/crates/drainer/src/stream.rs index 319fc2b0e1d3..f5b41c536727 100644 --- a/crates/drainer/src/stream.rs +++ b/crates/drainer/src/stream.rs @@ -69,9 +69,8 @@ impl Store { .await; metrics::REDIS_STREAM_READ_TIME.record( - &metrics::CONTEXT, execution_time, - &[metrics::KeyValue::new("stream", stream_name.to_owned())], + router_env::metric_attributes!(("stream", stream_name.to_owned())), ); Ok(output?) @@ -104,9 +103,8 @@ impl Store { .await; metrics::REDIS_STREAM_TRIM_TIME.record( - &metrics::CONTEXT, execution_time, - &[metrics::KeyValue::new("stream", stream_name.to_owned())], + router_env::metric_attributes!(("stream", stream_name.to_owned())), ); // adding 1 because we are deleting the given id too diff --git a/crates/drainer/src/utils.rs b/crates/drainer/src/utils.rs index c8c6e312f14b..72f12c60492a 100644 --- a/crates/drainer/src/utils.rs +++ b/crates/drainer/src/utils.rs @@ -63,8 +63,8 @@ pub async fn increment_stream_index( ) -> u8 { if index == total_streams - 1 { match jobs_picked.load(atomic::Ordering::SeqCst) { - 0 => metrics::CYCLES_COMPLETED_UNSUCCESSFULLY.add(&metrics::CONTEXT, 1, &[]), - _ => metrics::CYCLES_COMPLETED_SUCCESSFULLY.add(&metrics::CONTEXT, 1, &[]), + 0 => metrics::CYCLES_COMPLETED_UNSUCCESSFULLY.add(1, &[]), + _ => metrics::CYCLES_COMPLETED_SUCCESSFULLY.add(1, &[]), } jobs_picked.store(0, atomic::Ordering::SeqCst); 0 diff --git a/crates/external_services/src/aws_kms/core.rs b/crates/external_services/src/aws_kms/core.rs index 8ffd631b819c..537e63655b5b 100644 --- a/crates/external_services/src/aws_kms/core.rs +++ b/crates/external_services/src/aws_kms/core.rs @@ -63,7 +63,7 @@ impl AwsKmsClient { // Logging using `Debug` representation of the error as the `Display` // representation does not hold sufficient information. logger::error!(aws_kms_sdk_error=?error, "Failed to AWS KMS decrypt data"); - metrics::AWS_KMS_DECRYPTION_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::AWS_KMS_DECRYPTION_FAILURES.add(1, &[]); }) .change_context(AwsKmsError::DecryptionFailed)?; @@ -75,7 +75,7 @@ impl AwsKmsClient { })?; let time_taken = start.elapsed(); - metrics::AWS_KMS_DECRYPT_TIME.record(&metrics::CONTEXT, time_taken.as_secs_f64(), &[]); + metrics::AWS_KMS_DECRYPT_TIME.record(time_taken.as_secs_f64(), &[]); Ok(output) } @@ -99,7 +99,7 @@ impl AwsKmsClient { // Logging using `Debug` representation of the error as the `Display` // representation does not hold sufficient information. logger::error!(aws_kms_sdk_error=?error, "Failed to AWS KMS encrypt data"); - metrics::AWS_KMS_ENCRYPTION_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::AWS_KMS_ENCRYPTION_FAILURES.add(1, &[]); }) .change_context(AwsKmsError::EncryptionFailed)?; @@ -108,7 +108,7 @@ impl AwsKmsClient { .ok_or(AwsKmsError::MissingCiphertextEncryptionOutput) .map(|blob| consts::BASE64_ENGINE.encode(blob.into_inner()))?; let time_taken = start.elapsed(); - metrics::AWS_KMS_ENCRYPT_TIME.record(&metrics::CONTEXT, time_taken.as_secs_f64(), &[]); + metrics::AWS_KMS_ENCRYPT_TIME.record(time_taken.as_secs_f64(), &[]); Ok(output) } diff --git a/crates/external_services/src/lib.rs b/crates/external_services/src/lib.rs index 4570a5e59605..304ce248317a 100644 --- a/crates/external_services/src/lib.rs +++ b/crates/external_services/src/lib.rs @@ -30,9 +30,8 @@ pub mod consts { /// Metrics for interactions with external systems. #[cfg(feature = "aws_kms")] pub mod metrics { - use router_env::{counter_metric, global_meter, histogram_metric, metrics_context}; + use router_env::{counter_metric, global_meter, histogram_metric_f64}; - metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "EXTERNAL_SERVICES"); #[cfg(feature = "aws_kms")] @@ -41,7 +40,7 @@ pub mod metrics { counter_metric!(AWS_KMS_ENCRYPTION_FAILURES, GLOBAL_METER); // No. of AWS KMS Encryption failures #[cfg(feature = "aws_kms")] - histogram_metric!(AWS_KMS_DECRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS decryption time (in sec) + histogram_metric_f64!(AWS_KMS_DECRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS decryption time (in sec) #[cfg(feature = "aws_kms")] - histogram_metric!(AWS_KMS_ENCRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS encryption time (in sec) + histogram_metric_f64!(AWS_KMS_ENCRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS encryption time (in sec) } diff --git a/crates/hyperswitch_connectors/src/connectors/boku.rs b/crates/hyperswitch_connectors/src/connectors/boku.rs index e6d47c05a91e..73b17ca802ec 100644 --- a/crates/hyperswitch_connectors/src/connectors/boku.rs +++ b/crates/hyperswitch_connectors/src/connectors/boku.rs @@ -38,7 +38,7 @@ use hyperswitch_interfaces::{ }; use masking::{ExposeInterface, Mask, PeekInterface, Secret, WithType}; use ring::hmac; -use router_env::{logger, metrics::add_attributes}; +use router_env::logger; use time::OffsetDateTime; use transformers as boku; @@ -678,11 +678,8 @@ fn get_xml_deserialized( res: Response, event_builder: Option<&mut ConnectorEvent>, ) -> CustomResult { - metrics::CONNECTOR_RESPONSE_DESERIALIZATION_FAILURE.add( - &metrics::CONTEXT, - 1, - &add_attributes([("connector", "boku")]), - ); + metrics::CONNECTOR_RESPONSE_DESERIALIZATION_FAILURE + .add(1, router_env::metric_attributes!(("connector", "boku"))); let response_data = String::from_utf8(res.response.to_vec()) .change_context(errors::ConnectorError::ResponseDeserializationFailed)?; diff --git a/crates/hyperswitch_connectors/src/metrics.rs b/crates/hyperswitch_connectors/src/metrics.rs index 0ce7942663b1..b4e8a2dbd25a 100644 --- a/crates/hyperswitch_connectors/src/metrics.rs +++ b/crates/hyperswitch_connectors/src/metrics.rs @@ -1,8 +1,7 @@ //! Metrics interface -use router_env::{counter_metric, global_meter, metrics_context}; +use router_env::{counter_metric, global_meter}; -metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); counter_metric!(CONNECTOR_RESPONSE_DESERIALIZATION_FAILURE, GLOBAL_METER); diff --git a/crates/hyperswitch_connectors/src/utils.rs b/crates/hyperswitch_connectors/src/utils.rs index 408a0282543d..27e80fafa92e 100644 --- a/crates/hyperswitch_connectors/src/utils.rs +++ b/crates/hyperswitch_connectors/src/utils.rs @@ -34,7 +34,7 @@ use image::Luma; use masking::{ExposeInterface, PeekInterface, Secret}; use once_cell::sync::Lazy; use regex::Regex; -use router_env::{logger, metrics::add_attributes}; +use router_env::logger; use serde::Serializer; use serde_json::Value; @@ -133,11 +133,8 @@ pub(crate) fn handle_json_response_deserialization_failure( res: Response, connector: &'static str, ) -> CustomResult { - crate::metrics::CONNECTOR_RESPONSE_DESERIALIZATION_FAILURE.add( - &crate::metrics::CONTEXT, - 1, - &add_attributes([("connector", connector)]), - ); + crate::metrics::CONNECTOR_RESPONSE_DESERIALIZATION_FAILURE + .add(1, router_env::metric_attributes!(("connector", connector))); let response_data = String::from_utf8(res.response.to_vec()) .change_context(errors::ConnectorError::ResponseDeserializationFailed)?; diff --git a/crates/hyperswitch_domain_models/src/type_encryption.rs b/crates/hyperswitch_domain_models/src/type_encryption.rs index 641e295155fc..07ff1caf7afa 100644 --- a/crates/hyperswitch_domain_models/src/type_encryption.rs +++ b/crates/hyperswitch_domain_models/src/type_encryption.rs @@ -143,7 +143,7 @@ mod encrypt { Ok(response) => Ok(ForeignFrom::foreign_from((masked_data.clone(), response))), Err(err) => { logger::error!("Encryption error {:?}", err); - metrics::ENCRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::ENCRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Encryption"); Self::encrypt(masked_data, key, crypt_algo).await } @@ -187,7 +187,7 @@ mod encrypt { match decrypted { Ok(de) => Ok(de), Err(_) => { - metrics::DECRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::DECRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Decryption"); Self::decrypt(encrypted_data, key, crypt_algo).await } @@ -202,7 +202,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult { - metrics::APPLICATION_ENCRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_ENCRYPTION_COUNT.add(1, &[]); let encrypted_data = crypt_algo.encode_message(key, masked_data.peek().as_bytes())?; Ok(Self::new(masked_data, encrypted_data.into())) } @@ -214,7 +214,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult { - metrics::APPLICATION_DECRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_DECRYPTION_COUNT.add(1, &[]); let encrypted = encrypted_data.into_inner(); let data = crypt_algo.decode_message(key, encrypted.clone())?; @@ -251,7 +251,7 @@ mod encrypt { match result { Ok(response) => Ok(ForeignFrom::foreign_from((masked_data, response))), Err(err) => { - metrics::ENCRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::ENCRYPTION_API_FAILURES.add(1, &[]); logger::error!("Encryption error {:?}", err); logger::info!("Fall back to Application Encryption"); Self::batch_encrypt(masked_data, key, crypt_algo).await @@ -295,7 +295,7 @@ mod encrypt { match decrypted { Ok(de) => Ok(de), Err(_) => { - metrics::DECRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::DECRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Decryption"); Self::batch_decrypt(encrypted_data, key, crypt_algo).await } @@ -310,7 +310,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult, errors::CryptoError> { - metrics::APPLICATION_ENCRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_ENCRYPTION_COUNT.add(1, &[]); masked_data .into_iter() .map(|(k, v)| { @@ -332,7 +332,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult, errors::CryptoError> { - metrics::APPLICATION_DECRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_DECRYPTION_COUNT.add(1, &[]); encrypted_data .into_iter() .map(|(k, v)| { @@ -380,7 +380,7 @@ mod encrypt { Ok(response) => Ok(ForeignFrom::foreign_from((masked_data.clone(), response))), Err(err) => { logger::error!("Encryption error {:?}", err); - metrics::ENCRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::ENCRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Encryption"); Self::encrypt(masked_data, key, crypt_algo).await } @@ -423,7 +423,7 @@ mod encrypt { match decrypted { Ok(de) => Ok(de), Err(_) => { - metrics::DECRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::DECRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Decryption"); Self::decrypt(encrypted_data, key, crypt_algo).await } @@ -438,7 +438,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult { - metrics::APPLICATION_ENCRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_ENCRYPTION_COUNT.add(1, &[]); let data = serde_json::to_vec(&masked_data.peek()) .change_context(errors::CryptoError::DecodingFailed)?; let encrypted_data = crypt_algo.encode_message(key, &data)?; @@ -452,7 +452,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult { - metrics::APPLICATION_DECRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_DECRYPTION_COUNT.add(1, &[]); let encrypted = encrypted_data.into_inner(); let data = crypt_algo.decode_message(key, encrypted.clone())?; @@ -487,7 +487,7 @@ mod encrypt { match result { Ok(response) => Ok(ForeignFrom::foreign_from((masked_data, response))), Err(err) => { - metrics::ENCRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::ENCRYPTION_API_FAILURES.add(1, &[]); logger::error!("Encryption error {:?}", err); logger::info!("Fall back to Application Encryption"); Self::batch_encrypt(masked_data, key, crypt_algo).await @@ -531,7 +531,7 @@ mod encrypt { match decrypted { Ok(de) => Ok(de), Err(_) => { - metrics::DECRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::DECRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Decryption"); Self::batch_decrypt(encrypted_data, key, crypt_algo).await } @@ -546,7 +546,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult, errors::CryptoError> { - metrics::APPLICATION_ENCRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_ENCRYPTION_COUNT.add(1, &[]); masked_data .into_iter() .map(|(k, v)| { @@ -567,7 +567,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult, errors::CryptoError> { - metrics::APPLICATION_DECRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_DECRYPTION_COUNT.add(1, &[]); encrypted_data .into_iter() .map(|(k, v)| { @@ -851,7 +851,7 @@ mod encrypt { Ok(response) => Ok(ForeignFrom::foreign_from((masked_data.clone(), response))), Err(err) => { logger::error!("Encryption error {:?}", err); - metrics::ENCRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::ENCRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Encryption"); Self::encrypt(masked_data, key, crypt_algo).await } @@ -894,7 +894,7 @@ mod encrypt { match decrypted { Ok(de) => Ok(de), Err(_) => { - metrics::DECRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::DECRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Decryption"); Self::decrypt(encrypted_data, key, crypt_algo).await } @@ -909,7 +909,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult { - metrics::APPLICATION_ENCRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_ENCRYPTION_COUNT.add(1, &[]); let encrypted_data = crypt_algo.encode_message(key, masked_data.peek())?; Ok(Self::new(masked_data, encrypted_data.into())) } @@ -921,7 +921,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult { - metrics::APPLICATION_DECRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_DECRYPTION_COUNT.add(1, &[]); let encrypted = encrypted_data.into_inner(); let data = crypt_algo.decode_message(key, encrypted.clone())?; Ok(Self::new(data.into(), encrypted)) @@ -953,7 +953,7 @@ mod encrypt { match result { Ok(response) => Ok(ForeignFrom::foreign_from((masked_data, response))), Err(err) => { - metrics::ENCRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::ENCRYPTION_API_FAILURES.add(1, &[]); logger::error!("Encryption error {:?}", err); logger::info!("Fall back to Application Encryption"); Self::batch_encrypt(masked_data, key, crypt_algo).await @@ -997,7 +997,7 @@ mod encrypt { match decrypted { Ok(de) => Ok(de), Err(_) => { - metrics::DECRYPTION_API_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::DECRYPTION_API_FAILURES.add(1, &[]); logger::info!("Fall back to Application Decryption"); Self::batch_decrypt(encrypted_data, key, crypt_algo).await } @@ -1012,7 +1012,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult, errors::CryptoError> { - metrics::APPLICATION_ENCRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_ENCRYPTION_COUNT.add(1, &[]); masked_data .into_iter() .map(|(k, v)| { @@ -1031,7 +1031,7 @@ mod encrypt { key: &[u8], crypt_algo: V, ) -> CustomResult, errors::CryptoError> { - metrics::APPLICATION_DECRYPTION_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::APPLICATION_DECRYPTION_COUNT.add(1, &[]); encrypted_data .into_iter() .map(|(k, v)| { @@ -1140,7 +1140,6 @@ where record_operation_time( crypto::Encryptable::encrypt_via_api(state, inner, identifier, key, crypto::GcmAes256), &metrics::ENCRYPTION_TIME, - &metrics::CONTEXT, &[], ) .await @@ -1167,7 +1166,6 @@ where crypto::GcmAes256, ), &metrics::ENCRYPTION_TIME, - &metrics::CONTEXT, &[], ) .await @@ -1223,7 +1221,6 @@ where record_operation_time( crypto::Encryptable::decrypt_via_api(state, inner, identifier, key, crypto::GcmAes256), &metrics::DECRYPTION_TIME, - &metrics::CONTEXT, &[], ) .await @@ -1250,7 +1247,6 @@ where crypto::GcmAes256, ), &metrics::ENCRYPTION_TIME, - &metrics::CONTEXT, &[], ) .await @@ -1320,14 +1316,13 @@ where } pub(crate) mod metrics { - use router_env::{counter_metric, global_meter, histogram_metric, metrics_context, once_cell}; + use router_env::{counter_metric, global_meter, histogram_metric_f64, once_cell}; - metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); // Encryption and Decryption metrics - histogram_metric!(ENCRYPTION_TIME, GLOBAL_METER); - histogram_metric!(DECRYPTION_TIME, GLOBAL_METER); + histogram_metric_f64!(ENCRYPTION_TIME, GLOBAL_METER); + histogram_metric_f64!(DECRYPTION_TIME, GLOBAL_METER); counter_metric!(ENCRYPTION_API_FAILURES, GLOBAL_METER); counter_metric!(DECRYPTION_API_FAILURES, GLOBAL_METER); counter_metric!(APPLICATION_ENCRYPTION_COUNT, GLOBAL_METER); diff --git a/crates/hyperswitch_interfaces/src/api.rs b/crates/hyperswitch_interfaces/src/api.rs index 1326444bb475..617832f191df 100644 --- a/crates/hyperswitch_interfaces/src/api.rs +++ b/crates/hyperswitch_interfaces/src/api.rs @@ -35,7 +35,6 @@ use hyperswitch_domain_models::{ router_response_types::{MandateRevokeResponseData, VerifyWebhookSourceResponseData}, }; use masking::Maskable; -use router_env::metrics::add_attributes; use serde_json::json; #[cfg(feature = "payouts")] @@ -121,9 +120,8 @@ pub trait ConnectorIntegration: _connectors: &Connectors, ) -> CustomResult, errors::ConnectorError> { metrics::UNIMPLEMENTED_FLOW.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", req.connector.clone())]), + router_env::metric_attributes!(("connector", req.connector.clone())), ); Ok(None) } diff --git a/crates/hyperswitch_interfaces/src/connector_integration_v2.rs b/crates/hyperswitch_interfaces/src/connector_integration_v2.rs index 9384ed0a0cda..bc18f5c66394 100644 --- a/crates/hyperswitch_interfaces/src/connector_integration_v2.rs +++ b/crates/hyperswitch_interfaces/src/connector_integration_v2.rs @@ -5,7 +5,6 @@ use common_utils::{ }; use hyperswitch_domain_models::{router_data::ErrorResponse, router_data_v2::RouterDataV2}; use masking::Maskable; -use router_env::metrics::add_attributes; use serde_json::json; use crate::{ @@ -65,11 +64,8 @@ pub trait ConnectorIntegrationV2: &self, _req: &RouterDataV2, ) -> CustomResult { - metrics::UNIMPLEMENTED_FLOW.add( - &metrics::CONTEXT, - 1, - &add_attributes([("connector", self.id())]), - ); + metrics::UNIMPLEMENTED_FLOW + .add(1, router_env::metric_attributes!(("connector", self.id()))); Ok(String::new()) } diff --git a/crates/hyperswitch_interfaces/src/metrics.rs b/crates/hyperswitch_interfaces/src/metrics.rs index fc374eba8e24..84aa6d10be09 100644 --- a/crates/hyperswitch_interfaces/src/metrics.rs +++ b/crates/hyperswitch_interfaces/src/metrics.rs @@ -1,8 +1,7 @@ //! Metrics interface -use router_env::{counter_metric, global_meter, metrics_context}; +use router_env::{counter_metric, global_meter}; -metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); counter_metric!(UNIMPLEMENTED_FLOW, GLOBAL_METER); diff --git a/crates/router/src/core/admin.rs b/crates/router/src/core/admin.rs index 40ed4e755210..8e2145071146 100644 --- a/crates/router/src/core/admin.rs +++ b/crates/router/src/core/admin.rs @@ -20,7 +20,6 @@ use hyperswitch_domain_models::merchant_connector_account::{ use masking::{ExposeInterface, PeekInterface, Secret}; use pm_auth::{connector::plaid::transformers::PlaidAuthType, types as pm_auth_types}; use regex::Regex; -use router_env::metrics::add_attributes; use uuid::Uuid; #[cfg(any(feature = "v1", feature = "v2"))] @@ -2927,12 +2926,11 @@ pub async fn create_connector( .await?; metrics::MCA_CREATE.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", req.connector_name.to_string()), - ("merchant", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant", merchant_id.clone()), + ), ); let mca_response = mca.foreign_try_into()?; diff --git a/crates/router/src/core/api_keys.rs b/crates/router/src/core/api_keys.rs index de4a8931c785..4d3f3df9a59c 100644 --- a/crates/router/src/core/api_keys.rs +++ b/crates/router/src/core/api_keys.rs @@ -3,7 +3,7 @@ use common_utils::date_time; use diesel_models::{api_keys::ApiKey, enums as storage_enums}; use error_stack::{report, ResultExt}; use masking::{PeekInterface, StrongSecret}; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use crate::{ configs::settings, @@ -160,9 +160,8 @@ pub async fn create_api_key( ); metrics::API_KEY_CREATED.add( - &metrics::CONTEXT, 1, - &add_attributes([("merchant", merchant_id.get_string_repr().to_owned())]), + router_env::metric_attributes!(("merchant", merchant_id.clone())), ); // Add process to process_tracker for email reminder, only if expiry is set to future date @@ -244,11 +243,7 @@ pub async fn add_api_key_expiry_task( api_key.key_id ) })?; - metrics::TASKS_ADDED_COUNT.add( - &metrics::CONTEXT, - 1, - &add_attributes([("flow", "ApiKeyExpiry")]), - ); + metrics::TASKS_ADDED_COUNT.add(1, router_env::metric_attributes!(("flow", "ApiKeyExpiry"))); Ok(()) } @@ -456,7 +451,7 @@ pub async fn revoke_api_key( ); } - metrics::API_KEY_REVOKED.add(&metrics::CONTEXT, 1, &[]); + metrics::API_KEY_REVOKED.add(1, &[]); #[cfg(feature = "email")] { diff --git a/crates/router/src/core/customers.rs b/crates/router/src/core/customers.rs index 266873b1a111..3081617f3ea5 100644 --- a/crates/router/src/core/customers.rs +++ b/crates/router/src/core/customers.rs @@ -705,7 +705,7 @@ impl CustomerDeleteBridge for customers::GlobalId { payment_methods_deleted: true, id: self.id.clone(), }; - metrics::CUSTOMER_REDACTED.add(&metrics::CONTEXT, 1, &[]); + metrics::CUSTOMER_REDACTED.add(1, &[]); Ok(services::ApplicationResponse::Json(response)) } } @@ -943,7 +943,7 @@ impl CustomerDeleteBridge for customers::CustomerId { address_deleted: true, payment_methods_deleted: true, }; - metrics::CUSTOMER_REDACTED.add(&metrics::CONTEXT, 1, &[]); + metrics::CUSTOMER_REDACTED.add(1, &[]); Ok(services::ApplicationResponse::Json(response)) } } diff --git a/crates/router/src/core/disputes.rs b/crates/router/src/core/disputes.rs index 8f083c3103a3..15bdbafb135b 100644 --- a/crates/router/src/core/disputes.rs +++ b/crates/router/src/core/disputes.rs @@ -155,7 +155,7 @@ pub async fn accept_dispute( !(dispute.dispute_stage == storage_enums::DisputeStage::Dispute && dispute.dispute_status == storage_enums::DisputeStatus::DisputeOpened), || { - metrics::ACCEPT_DISPUTE_STATUS_VALIDATION_FAILURE_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::ACCEPT_DISPUTE_STATUS_VALIDATION_FAILURE_METRIC.add(1, &[]); Err(errors::ApiErrorResponse::DisputeStatusValidationFailed { reason: format!( "This dispute cannot be accepted because the dispute is in {} stage and has {} status", @@ -274,11 +274,7 @@ pub async fn submit_evidence( !(dispute.dispute_stage == storage_enums::DisputeStage::Dispute && dispute.dispute_status == storage_enums::DisputeStatus::DisputeOpened), || { - metrics::EVIDENCE_SUBMISSION_DISPUTE_STATUS_VALIDATION_FAILURE_METRIC.add( - &metrics::CONTEXT, - 1, - &[], - ); + metrics::EVIDENCE_SUBMISSION_DISPUTE_STATUS_VALIDATION_FAILURE_METRIC.add(1, &[]); Err(errors::ApiErrorResponse::DisputeStatusValidationFailed { reason: format!( "Evidence cannot be submitted because the dispute is in {} stage and has {} status", @@ -446,11 +442,7 @@ pub async fn attach_evidence( !(dispute.dispute_stage == storage_enums::DisputeStage::Dispute && dispute.dispute_status == storage_enums::DisputeStatus::DisputeOpened), || { - metrics::ATTACH_EVIDENCE_DISPUTE_STATUS_VALIDATION_FAILURE_METRIC.add( - &metrics::CONTEXT, - 1, - &[], - ); + metrics::ATTACH_EVIDENCE_DISPUTE_STATUS_VALIDATION_FAILURE_METRIC.add(1, &[]); Err(errors::ApiErrorResponse::DisputeStatusValidationFailed { reason: format!( "Evidence cannot be attached because the dispute is in {} stage and has {} status", diff --git a/crates/router/src/core/mandate.rs b/crates/router/src/core/mandate.rs index 5ed3e9105632..da8015662a7e 100644 --- a/crates/router/src/core/mandate.rs +++ b/crates/router/src/core/mandate.rs @@ -5,7 +5,7 @@ use common_utils::{ext_traits::Encode, id_type}; use diesel_models::enums as storage_enums; use error_stack::{report, ResultExt}; use futures::future; -use router_env::{instrument, logger, metrics::add_attributes, tracing}; +use router_env::{instrument, logger, tracing}; use super::payments::helpers as payment_helper; use crate::{ @@ -341,9 +341,8 @@ where .change_context(errors::ApiErrorResponse::MandateUpdateFailed), }?; metrics::SUBSEQUENT_MANDATE_PAYMENT.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", mandate.connector)]), + router_env::metric_attributes!(("connector", mandate.connector)), ); Ok(Some(mandate_id.clone())) } @@ -396,11 +395,7 @@ where .insert_mandate(new_mandate_data, storage_scheme) .await .to_duplicate_response(errors::ApiErrorResponse::DuplicateMandate)?; - metrics::MANDATE_COUNT.add( - &metrics::CONTEXT, - 1, - &add_attributes([("connector", connector)]), - ); + metrics::MANDATE_COUNT.add(1, router_env::metric_attributes!(("connector", connector))); Ok(Some(res_mandate_id)) } } diff --git a/crates/router/src/core/metrics.rs b/crates/router/src/core/metrics.rs index 8956c38e7edd..a98a4ffb259f 100644 --- a/crates/router/src/core/metrics.rs +++ b/crates/router/src/core/metrics.rs @@ -1,7 +1,5 @@ -pub use router_env::opentelemetry::KeyValue; -use router_env::{counter_metric, global_meter, metrics_context}; +use router_env::{counter_metric, global_meter}; -metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); counter_metric!(INCOMING_DISPUTE_WEBHOOK_METRIC, GLOBAL_METER); // No. of incoming dispute webhooks diff --git a/crates/router/src/core/payment_methods/cards.rs b/crates/router/src/core/payment_methods/cards.rs index aaeb1aff16ac..259f77c81a30 100644 --- a/crates/router/src/core/payment_methods/cards.rs +++ b/crates/router/src/core/payment_methods/cards.rs @@ -54,7 +54,7 @@ use hyperswitch_domain_models::customer::CustomerUpdate; ))] use kgraph_utils::transformers::IntoDirValue; use masking::Secret; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use serde_json::json; use strum::IntoEnumIterator; @@ -2438,7 +2438,7 @@ pub async fn add_card_to_locker( ), errors::VaultError, > { - metrics::STORED_TO_LOCKER.add(&metrics::CONTEXT, 1, &[]); + metrics::STORED_TO_LOCKER.add(1, &[]); let add_card_to_hs_resp = Box::pin(common_utils::metrics::utils::record_operation_time( async { add_card_hs( @@ -2453,18 +2453,13 @@ pub async fn add_card_to_locker( .await .inspect_err(|_| { metrics::CARD_LOCKER_FAILURES.add( - &metrics::CONTEXT, 1, - &[ - router_env::opentelemetry::KeyValue::new("locker", "rust"), - router_env::opentelemetry::KeyValue::new("operation", "add"), - ], + router_env::metric_attributes!(("locker", "rust"), ("operation", "add")), ); }) }, &metrics::CARD_ADD_TIME, - &metrics::CONTEXT, - &[router_env::opentelemetry::KeyValue::new("locker", "rust")], + router_env::metric_attributes!(("locker", "rust")), )) .await?; @@ -2478,7 +2473,7 @@ pub async fn get_card_from_locker( merchant_id: &id_type::MerchantId, card_reference: &str, ) -> errors::RouterResult { - metrics::GET_FROM_LOCKER.add(&metrics::CONTEXT, 1, &[]); + metrics::GET_FROM_LOCKER.add(1, &[]); let get_card_from_rs_locker_resp = common_utils::metrics::utils::record_operation_time( async { @@ -2494,18 +2489,13 @@ pub async fn get_card_from_locker( .attach_printable("Failed while getting card from hyperswitch card vault") .inspect_err(|_| { metrics::CARD_LOCKER_FAILURES.add( - &metrics::CONTEXT, 1, - &[ - router_env::opentelemetry::KeyValue::new("locker", "rust"), - router_env::opentelemetry::KeyValue::new("operation", "get"), - ], + router_env::metric_attributes!(("locker", "rust"), ("operation", "get")), ); }) }, &metrics::CARD_GET_TIME, - &metrics::CONTEXT, - &[router_env::opentelemetry::KeyValue::new("locker", "rust")], + router_env::metric_attributes!(("locker", "rust")), ) .await?; @@ -2519,7 +2509,7 @@ pub async fn delete_card_from_locker( merchant_id: &id_type::MerchantId, card_reference: &str, ) -> errors::RouterResult { - metrics::DELETE_FROM_LOCKER.add(&metrics::CONTEXT, 1, &[]); + metrics::DELETE_FROM_LOCKER.add(1, &[]); common_utils::metrics::utils::record_operation_time( async move { @@ -2527,17 +2517,12 @@ pub async fn delete_card_from_locker( .await .inspect_err(|_| { metrics::CARD_LOCKER_FAILURES.add( - &metrics::CONTEXT, 1, - &[ - router_env::opentelemetry::KeyValue::new("locker", "rust"), - router_env::opentelemetry::KeyValue::new("operation", "delete"), - ], + router_env::metric_attributes!(("locker", "rust"), ("operation", "delete")), ); }) }, &metrics::CARD_DELETE_TIME, - &metrics::CONTEXT, &[], ) .await @@ -5778,11 +5763,10 @@ impl TempLockerCardSupport { enums::PaymentMethod::Card, ) .await?; - metrics::TOKENIZED_DATA_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::TOKENIZED_DATA_COUNT.add(1, &[]); metrics::TASKS_ADDED_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([("flow", "DeleteTokenizeData")]), + router_env::metric_attributes!(("flow", "DeleteTokenizeData")), ); Ok(card) } diff --git a/crates/router/src/core/payment_methods/network_tokenization.rs b/crates/router/src/core/payment_methods/network_tokenization.rs index f02bfaa42617..de4524104788 100644 --- a/crates/router/src/core/payment_methods/network_tokenization.rs +++ b/crates/router/src/core/payment_methods/network_tokenization.rs @@ -296,8 +296,7 @@ pub async fn make_card_network_tokenization_request( ) }, &metrics::GENERATE_NETWORK_TOKEN_TIME, - &metrics::CONTEXT, - &[router_env::opentelemetry::KeyValue::new("locker", "rust")], + router_env::metric_attributes!(("locker", "rust")), ) .await } else { @@ -399,7 +398,6 @@ pub async fn get_token_from_tokenization_service( .attach_printable("Fetch network token failed") }, &metrics::FETCH_NETWORK_TOKEN_TIME, - &metrics::CONTEXT, &[], ) .await @@ -483,7 +481,7 @@ pub async fn do_status_check_for_network_token( ) }, &metrics::CHECK_NETWORK_TOKEN_STATUS_TIME, - &metrics::CONTEXT, + &[], ) .await?; @@ -608,7 +606,6 @@ pub async fn delete_network_token_from_locker_and_token_service( .await }, &metrics::DELETE_NETWORK_TOKEN_TIME, - &metrics::CONTEXT, &[], ) .await; diff --git a/crates/router/src/core/payment_methods/vault.rs b/crates/router/src/core/payment_methods/vault.rs index e4f72c717f6c..32a42d301659 100644 --- a/crates/router/src/core/payment_methods/vault.rs +++ b/crates/router/src/core/payment_methods/vault.rs @@ -9,7 +9,7 @@ use common_utils::{ }; use error_stack::{report, ResultExt}; use masking::PeekInterface; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use scheduler::{types::process_data, utils as process_tracker_utils}; #[cfg(feature = "payouts")] @@ -927,7 +927,7 @@ impl Vault { ) .await?; add_delete_tokenized_data_task(&*state.store, &lookup_key, pm).await?; - metrics::TOKENIZED_DATA_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::TOKENIZED_DATA_COUNT.add(1, &[]); Ok(lookup_key) } @@ -979,7 +979,7 @@ impl Vault { ) .await?; // add_delete_tokenized_data_task(&*state.store, &lookup_key, pm).await?; - // scheduler_metrics::TOKENIZED_DATA_COUNT.add(&metrics::CONTEXT, 1, &[]); + // scheduler_metrics::TOKENIZED_DATA_COUNT.add(1, &[]); Ok(lookup_key) } @@ -1015,7 +1015,7 @@ pub async fn create_tokenize( ) -> RouterResult { let redis_key = get_redis_locker_key(lookup_key.as_str()); let func = || async { - metrics::CREATED_TOKENIZED_CARD.add(&metrics::CONTEXT, 1, &[]); + metrics::CREATED_TOKENIZED_CARD.add(1, &[]); let payload_to_be_encrypted = api::TokenizePayloadRequest { value1: value1.clone(), @@ -1048,7 +1048,7 @@ pub async fn create_tokenize( .await .map(|_| lookup_key.clone()) .inspect_err(|error| { - metrics::TEMP_LOCKER_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::TEMP_LOCKER_FAILURES.add(1, &[]); logger::error!(?error, "Failed to store tokenized data in Redis"); }) .change_context(errors::ApiErrorResponse::InternalServerError) @@ -1079,7 +1079,7 @@ pub async fn get_tokenized_data( ) -> RouterResult { let redis_key = get_redis_locker_key(lookup_key); let func = || async { - metrics::GET_TOKENIZED_CARD.add(&metrics::CONTEXT, 1, &[]); + metrics::GET_TOKENIZED_CARD.add(1, &[]); let redis_conn = state .store @@ -1110,7 +1110,7 @@ pub async fn get_tokenized_data( Ok(get_response) } Err(err) => { - metrics::TEMP_LOCKER_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::TEMP_LOCKER_FAILURES.add(1, &[]); Err(err).change_context(errors::ApiErrorResponse::UnprocessableEntity { message: "Token is invalid or expired".into(), }) @@ -1140,7 +1140,7 @@ pub async fn delete_tokenized_data( ) -> RouterResult<()> { let redis_key = get_redis_locker_key(lookup_key); let func = || async { - metrics::DELETED_TOKENIZED_CARD.add(&metrics::CONTEXT, 1, &[]); + metrics::DELETED_TOKENIZED_CARD.add(1, &[]); let redis_conn = state .store @@ -1157,7 +1157,7 @@ pub async fn delete_tokenized_data( .attach_printable("Token invalid or expired") } Err(err) => { - metrics::TEMP_LOCKER_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::TEMP_LOCKER_FAILURES.add(1, &[]); Err(errors::ApiErrorResponse::InternalServerError).attach_printable_lazy(|| { format!("Failed to delete from redis locker: {err:?}") }) @@ -1435,7 +1435,7 @@ pub async fn start_tokenize_data_workflow( Err(err) => { logger::error!("Err: Deleting Card From Locker : {:?}", err); retry_delete_tokenize(db, delete_tokenize_data.pm, tokenize_tracker.to_owned()).await?; - metrics::RETRIED_DELETE_DATA_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::RETRIED_DELETE_DATA_COUNT.add(1, &[]); } } Ok(()) @@ -1479,9 +1479,8 @@ pub async fn retry_delete_tokenize( .await .map_err(Into::into); metrics::TASKS_RESET_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([("flow", "DeleteTokenizeData")]), + router_env::metric_attributes!(("flow", "DeleteTokenizeData")), ); retry_schedule } diff --git a/crates/router/src/core/payments.rs b/crates/router/src/core/payments.rs index dea0ca351231..dcbafa5aa9d3 100644 --- a/crates/router/src/core/payments.rs +++ b/crates/router/src/core/payments.rs @@ -54,7 +54,7 @@ use masking::{ExposeInterface, PeekInterface, Secret}; #[cfg(feature = "v2")] use operations::ValidateStatusForOperation; use redis_interface::errors::RedisError; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; #[cfg(feature = "olap")] use router_types::transformers::ForeignFrom; use scheduler::utils as pt_utils; @@ -1650,18 +1650,14 @@ pub trait PaymentRedirectFlow: Sync { req: PaymentsRedirectResponseData, ) -> RouterResponse { metrics::REDIRECTION_TRIGGERED.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", req.connector.to_owned().unwrap_or("null".to_string()), ), - ( - "merchant_id", - merchant_account.get_id().get_string_repr().to_owned(), - ), - ]), + ("merchant_id", merchant_account.get_id().clone()), + ), ); let connector = req.connector.clone().get_required_value("connector")?; @@ -1723,12 +1719,8 @@ pub trait PaymentRedirectFlow: Sync { request: PaymentsRedirectResponseData, ) -> RouterResponse { metrics::REDIRECTION_TRIGGERED.add( - &metrics::CONTEXT, 1, - &add_attributes([( - "merchant_id", - merchant_account.get_id().get_string_repr().to_owned(), - )]), + router_env::metric_attributes!(("merchant_id", merchant_account.get_id().clone())), ); let payment_flow_response = self @@ -4576,11 +4568,7 @@ pub async fn apply_filters_on_payments( )) }, &metrics::PAYMENT_LIST_LATENCY, - &metrics::CONTEXT, - &[router_env::opentelemetry::KeyValue::new( - "merchant_id", - merchant.get_id().clone(), - )], + router_env::metric_attributes!(("merchant_id", merchant.get_id().clone())), ) .await } diff --git a/crates/router/src/core/payments/access_token.rs b/crates/router/src/core/payments/access_token.rs index 42d0823a256c..7d56c2ccb910 100644 --- a/crates/router/src/core/payments/access_token.rs +++ b/crates/router/src/core/payments/access_token.rs @@ -2,7 +2,6 @@ use std::fmt::Debug; use common_utils::ext_traits::AsyncExt; use error_stack::ResultExt; -use router_env::metrics::add_attributes; use crate::{ consts, @@ -96,17 +95,21 @@ pub async fn add_access_token< access_token.expires ); metrics::ACCESS_TOKEN_CACHE_HIT.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(( + "connector", + connector.connector_name.to_string() + )), ); Ok(Some(access_token)) } None => { metrics::ACCESS_TOKEN_CACHE_MISS.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(( + "connector", + connector.connector_name.to_string() + )), ); let cloned_router_data = router_data.clone(); @@ -242,9 +245,8 @@ pub async fn refresh_connector_auth( }?; metrics::ACCESS_TOKEN_CREATION.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(("connector", connector.connector_name.to_string())), ); Ok(access_token_router_data) } diff --git a/crates/router/src/core/payments/customers.rs b/crates/router/src/core/payments/customers.rs index 4b71c43aca66..b11216c423dd 100644 --- a/crates/router/src/core/payments/customers.rs +++ b/crates/router/src/core/payments/customers.rs @@ -1,6 +1,6 @@ use common_utils::pii; use masking::{ExposeOptionInterface, PeekInterface}; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use crate::{ core::{ @@ -53,9 +53,8 @@ pub async fn create_connector_customer( .to_payment_failed_response()?; metrics::CONNECTOR_CUSTOMER_CREATE.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(("connector", connector.connector_name.to_string())), ); let connector_customer_id = match resp.response { diff --git a/crates/router/src/core/payments/flows/authorize_flow.rs b/crates/router/src/core/payments/flows/authorize_flow.rs index 7ee407fa9e02..75eda0ed9138 100644 --- a/crates/router/src/core/payments/flows/authorize_flow.rs +++ b/crates/router/src/core/payments/flows/authorize_flow.rs @@ -4,7 +4,6 @@ use hyperswitch_domain_models::errors::api_error_response::ApiErrorResponse; #[cfg(feature = "v2")] use hyperswitch_domain_models::payments::PaymentConfirmData; use masking::ExposeInterface; -use router_env::metrics::add_attributes; // use router_env::tracing::Instrument; use super::{ConstructFlowSpecificData, Feature}; @@ -205,7 +204,7 @@ impl Feature for types::PaymentsAu &auth_router_data.response, ); auth_router_data.integrity_check = integrity_result; - metrics::PAYMENT_COUNT.add(&metrics::CONTEXT, 1, &[]); // Move outside of the if block + metrics::PAYMENT_COUNT.add(1, &[]); // Move outside of the if block match auth_router_data.response.clone() { Err(_) => Ok(auth_router_data), @@ -363,12 +362,11 @@ impl Feature for types::PaymentsAu > = connector.connector.get_connector_integration(); metrics::EXECUTE_PRETASK_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", connector.connector_name.to_string()), ("flow", format!("{:?}", api::Authorize)), - ]), + ), ); logger::debug!(completed_pre_tasks=?true); @@ -497,9 +495,8 @@ pub async fn authorize_preprocessing_steps( .to_payment_failed_response()?; metrics::PREPROCESSING_STEPS_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", connector.connector_name.to_string()), ("payment_method", router_data.payment_method.to_string()), ( @@ -507,11 +504,10 @@ pub async fn authorize_preprocessing_steps( router_data .request .payment_method_type - .as_ref() .map(|inner| inner.to_string()) .unwrap_or("null".to_string()), ), - ]), + ), ); let mut authorize_router_data = helpers::router_data_type_conversion::<_, F, _, _, _, _>( resp.clone(), diff --git a/crates/router/src/core/payments/flows/cancel_flow.rs b/crates/router/src/core/payments/flows/cancel_flow.rs index e9c3f63c7920..4d2d6294197f 100644 --- a/crates/router/src/core/payments/flows/cancel_flow.rs +++ b/crates/router/src/core/payments/flows/cancel_flow.rs @@ -1,5 +1,4 @@ use async_trait::async_trait; -use router_env::metrics::add_attributes; use super::{ConstructFlowSpecificData, Feature}; use crate::{ @@ -86,9 +85,8 @@ impl Feature _header_payload: hyperswitch_domain_models::payments::HeaderPayload, ) -> RouterResult { metrics::PAYMENT_CANCEL_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(("connector", connector.connector_name.to_string())), ); let connector_integration: services::BoxedPaymentConnectorIntegrationInterface< diff --git a/crates/router/src/core/payments/flows/complete_authorize_flow.rs b/crates/router/src/core/payments/flows/complete_authorize_flow.rs index e347cd3c3d52..44bb9f2703af 100644 --- a/crates/router/src/core/payments/flows/complete_authorize_flow.rs +++ b/crates/router/src/core/payments/flows/complete_authorize_flow.rs @@ -1,6 +1,5 @@ use async_trait::async_trait; use masking::ExposeInterface; -use router_env::metrics::add_attributes; use super::{ConstructFlowSpecificData, Feature}; use crate::{ @@ -256,12 +255,11 @@ pub async fn complete_authorize_preprocessing_steps( .to_payment_failed_response()?; metrics::PREPROCESSING_STEPS_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", connector.connector_name.to_string()), ("payment_method", router_data.payment_method.to_string()), - ]), + ), ); let mut router_data_request = router_data.request.to_owned(); diff --git a/crates/router/src/core/payments/flows/session_flow.rs b/crates/router/src/core/payments/flows/session_flow.rs index 71049a56241d..464781f3d7cc 100644 --- a/crates/router/src/core/payments/flows/session_flow.rs +++ b/crates/router/src/core/payments/flows/session_flow.rs @@ -9,7 +9,6 @@ use error_stack::{Report, ResultExt}; #[cfg(feature = "v2")] use hyperswitch_domain_models::payments::PaymentIntentData; use masking::ExposeInterface; -use router_env::metrics::add_attributes; use super::{ConstructFlowSpecificData, Feature}; use crate::{ @@ -129,9 +128,8 @@ impl Feature for types::PaymentsSessio header_payload: hyperswitch_domain_models::payments::HeaderPayload, ) -> RouterResult { metrics::SESSION_TOKEN_CREATED.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(("connector", connector.connector_name.to_string())), ); self.decide_flow( state, diff --git a/crates/router/src/core/payments/helpers.rs b/crates/router/src/core/payments/helpers.rs index 6d40593060dd..01bac9a21242 100644 --- a/crates/router/src/core/payments/helpers.rs +++ b/crates/router/src/core/payments/helpers.rs @@ -42,7 +42,7 @@ use openssl::{ pkey::PKey, symm::{decrypt_aead, Cipher}, }; -use router_env::{instrument, logger, metrics::add_attributes, tracing}; +use router_env::{instrument, logger, tracing}; use uuid::Uuid; use x509_parser::parse_x509_certificate; @@ -1398,9 +1398,8 @@ where if !requeue { // Here, increment the count of added tasks every time a payment has been confirmed or PSync has been called metrics::TASKS_ADDED_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([("flow", format!("{:#?}", operation))]), + router_env::metric_attributes!(("flow", format!("{:#?}", operation))), ); super::add_process_sync_task(&*state.store, payment_attempt, stime) .await @@ -1409,9 +1408,8 @@ where } else { // When the requeue is true, we reset the tasks count as we reset the task every time it is requeued metrics::TASKS_RESET_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([("flow", format!("{:#?}", operation))]), + router_env::metric_attributes!(("flow", format!("{:#?}", operation))), ); super::reset_process_sync_task(&*state.store, payment_attempt, stime) .await @@ -1723,7 +1721,7 @@ pub async fn create_customer_if_not_exist<'a, F: Clone, R, D>( updated_by: None, version: hyperswitch_domain_models::consts::API_VERSION, }; - metrics::CUSTOMER_CREATED.add(&metrics::CONTEXT, 1, &[]); + metrics::CUSTOMER_CREATED.add(1, &[]); db.insert_customer(new_customer, key_manager_state, key_store, storage_scheme) .await } @@ -3959,12 +3957,11 @@ pub fn get_attempt_type( Some(api_models::enums::RetryAction::ManualRetry) ) { metrics::MANUAL_RETRY_REQUEST_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([( + router_env::metric_attributes!(( "merchant_id", - payment_attempt.merchant_id.get_string_repr().to_owned(), - )]), + payment_attempt.merchant_id.clone(), + )), ); match payment_attempt.status { enums::AttemptStatus::Started @@ -3986,12 +3983,11 @@ pub fn get_attempt_type( | enums::AttemptStatus::PaymentMethodAwaited | enums::AttemptStatus::DeviceDataCollectionPending => { metrics::MANUAL_RETRY_VALIDATION_FAILED.add( - &metrics::CONTEXT, 1, - &add_attributes([( + router_env::metric_attributes!(( "merchant_id", - payment_attempt.merchant_id.get_string_repr().to_owned(), - )]), + payment_attempt.merchant_id.clone(), + )), ); Err(errors::ApiErrorResponse::InternalServerError) .attach_printable("Payment Attempt unexpected state") @@ -4001,12 +3997,11 @@ pub fn get_attempt_type( | storage_enums::AttemptStatus::RouterDeclined | storage_enums::AttemptStatus::CaptureFailed => { metrics::MANUAL_RETRY_VALIDATION_FAILED.add( - &metrics::CONTEXT, 1, - &add_attributes([( + router_env::metric_attributes!(( "merchant_id", - payment_attempt.merchant_id.get_string_repr().to_owned(), - )]), + payment_attempt.merchant_id.clone(), + )), ); Err(report!(errors::ApiErrorResponse::PreconditionFailed { message: @@ -4019,12 +4014,11 @@ pub fn get_attempt_type( | storage_enums::AttemptStatus::AuthorizationFailed | storage_enums::AttemptStatus::Failure => { metrics::MANUAL_RETRY_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([( + router_env::metric_attributes!(( "merchant_id", - payment_attempt.merchant_id.get_string_repr().to_owned(), - )]), + payment_attempt.merchant_id.clone(), + )), ); Ok(AttemptType::New) } @@ -5465,9 +5459,9 @@ pub async fn get_gsm_record( error_code, error_message ); - metrics::AUTO_RETRY_GSM_MISS_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_GSM_MISS_COUNT.add( 1, &[]); } else { - metrics::AUTO_RETRY_GSM_FETCH_FAILURE_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_GSM_FETCH_FAILURE_COUNT.add( 1, &[]); }; err.change_context(errors::ApiErrorResponse::InternalServerError) .attach_printable("failed to fetch decision from gsm") diff --git a/crates/router/src/core/payments/operations/payment_response.rs b/crates/router/src/core/payments/operations/payment_response.rs index d77236c8a2bc..01a4e0860918 100644 --- a/crates/router/src/core/payments/operations/payment_response.rs +++ b/crates/router/src/core/payments/operations/payment_response.rs @@ -17,7 +17,7 @@ use hyperswitch_domain_models::payments::{ PaymentConfirmData, PaymentIntentData, PaymentStatusData, }; use router_derive; -use router_env::{instrument, logger, metrics::add_attributes, tracing}; +use router_env::{instrument, logger, tracing}; use storage_impl::DataModelExt; use tracing_futures::Instrument; @@ -1655,7 +1655,7 @@ async fn payment_response_update_tracker( } } - metrics::SUCCESSFUL_PAYMENT.add(&metrics::CONTEXT, 1, &[]); + metrics::SUCCESSFUL_PAYMENT.add(1, &[]); } let payment_method_id = @@ -2034,9 +2034,8 @@ async fn payment_response_update_tracker( Ok(()) => Ok(payment_data), Err(err) => { metrics::INTEGRITY_CHECK_FAILED.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", payment_data @@ -2047,13 +2046,9 @@ async fn payment_response_update_tracker( ), ( "merchant_id", - payment_data - .payment_attempt - .merchant_id - .get_string_repr() - .to_owned(), - ), - ]), + payment_data.payment_attempt.merchant_id.clone(), + ) + ), ); Err(error_stack::Report::new( errors::ApiErrorResponse::IntegrityCheckFailed { diff --git a/crates/router/src/core/payments/retry.rs b/crates/router/src/core/payments/retry.rs index fed28b680110..7ae536a1f193 100644 --- a/crates/router/src/core/payments/retry.rs +++ b/crates/router/src/core/payments/retry.rs @@ -61,7 +61,7 @@ where { let mut retries = None; - metrics::AUTO_RETRY_ELIGIBLE_REQUEST_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_ELIGIBLE_REQUEST_COUNT.add(1, &[]); let mut initial_gsm = get_gsm(state, &router_data).await?; @@ -129,14 +129,14 @@ where .await; if retries.is_none() || retries == Some(0) { - metrics::AUTO_RETRY_EXHAUSTED_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_EXHAUSTED_COUNT.add(1, &[]); logger::info!("retries exhausted for auto_retry payment"); break; } if connectors.len() == 0 { logger::info!("connectors exhausted for auto_retry payment"); - metrics::AUTO_RETRY_EXHAUSTED_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_EXHAUSTED_COUNT.add(1, &[]); break; } @@ -275,7 +275,7 @@ pub fn get_gsm_decision( }); if option_gsm_decision.is_some() { - metrics::AUTO_RETRY_GSM_MATCH_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_GSM_MATCH_COUNT.add(1, &[]); } option_gsm_decision.unwrap_or_default() } @@ -323,7 +323,7 @@ where types::RouterData: Feature, dyn api::Connector: services::api::ConnectorIntegration, { - metrics::AUTO_RETRY_PAYMENT_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_PAYMENT_COUNT.add(1, &[]); modify_trackers( state, diff --git a/crates/router/src/core/payments/tokenization.rs b/crates/router/src/core/payments/tokenization.rs index efee99bb7a0e..89fb7752d1b0 100644 --- a/crates/router/src/core/payments/tokenization.rs +++ b/crates/router/src/core/payments/tokenization.rs @@ -16,7 +16,7 @@ use common_utils::{ }; use error_stack::{report, ResultExt}; use masking::{ExposeInterface, Secret}; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use super::helpers; use crate::{ @@ -1090,12 +1090,11 @@ pub async fn add_payment_method_token( .to_payment_failed_response()?; metrics::CONNECTOR_PAYMENT_METHOD_TOKENIZATION.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", connector.connector_name.to_string()), ("payment_method", router_data.payment_method.to_string()), - ]), + ), ); let payment_token_resp = resp.response.map(|res| { diff --git a/crates/router/src/core/payments/transformers.rs b/crates/router/src/core/payments/transformers.rs index 6e277ed460f0..0f248d6cc36c 100644 --- a/crates/router/src/core/payments/transformers.rs +++ b/crates/router/src/core/payments/transformers.rs @@ -21,7 +21,7 @@ use hyperswitch_domain_models::payments::{PaymentConfirmData, PaymentIntentData} use hyperswitch_domain_models::ApiModelToDieselModelConvertor; use hyperswitch_domain_models::{payments::payment_intent::CustomerData, router_request_types}; use masking::{ExposeInterface, Maskable, PeekInterface, Secret}; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use super::{flows::Feature, types::AuthenticationData, OperationSessionGetters, PaymentData}; use crate::{ @@ -2084,14 +2084,13 @@ where }; metrics::PAYMENT_OPS_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("operation", format!("{:?}", operation)), - ("merchant", merchant_id.get_string_repr().to_owned()), + ("merchant", merchant_id.clone()), ("payment_method_type", payment_method_type), ("payment_method", payment_method), - ]), + ), ); Ok(output) diff --git a/crates/router/src/core/payouts/access_token.rs b/crates/router/src/core/payouts/access_token.rs index b7f52b259c04..e5e9b47e4ffa 100644 --- a/crates/router/src/core/payouts/access_token.rs +++ b/crates/router/src/core/payouts/access_token.rs @@ -1,6 +1,5 @@ use common_utils::ext_traits::AsyncExt; use error_stack::ResultExt; -use router_env::metrics::add_attributes; use crate::{ consts, @@ -185,9 +184,8 @@ pub async fn refresh_connector_auth( }?; metrics::ACCESS_TOKEN_CREATION.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(("connector", connector.connector_name.to_string())), ); Ok(access_token_router_data) } diff --git a/crates/router/src/core/payouts/helpers.rs b/crates/router/src/core/payouts/helpers.rs index 5becf01597cb..de287defa868 100644 --- a/crates/router/src/core/payouts/helpers.rs +++ b/crates/router/src/core/payouts/helpers.rs @@ -981,9 +981,9 @@ pub async fn get_gsm_record( error_code, error_message ); - metrics::AUTO_PAYOUT_RETRY_GSM_MISS_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_GSM_MISS_COUNT.add( 1, &[]); } else { - metrics::AUTO_PAYOUT_RETRY_GSM_FETCH_FAILURE_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_GSM_FETCH_FAILURE_COUNT.add( 1, &[]); }; err.change_context(errors::ApiErrorResponse::InternalServerError) .attach_printable("failed to fetch decision from gsm") diff --git a/crates/router/src/core/payouts/retry.rs b/crates/router/src/core/payouts/retry.rs index ed7e4a9b4b88..662bdb334a80 100644 --- a/crates/router/src/core/payouts/retry.rs +++ b/crates/router/src/core/payouts/retry.rs @@ -31,7 +31,7 @@ pub async fn do_gsm_multiple_connector_actions( ) -> RouterResult<()> { let mut retries = None; - metrics::AUTO_PAYOUT_RETRY_ELIGIBLE_REQUEST_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_ELIGIBLE_REQUEST_COUNT.add(1, &[]); let mut connector = original_connector_data; @@ -49,14 +49,14 @@ pub async fn do_gsm_multiple_connector_actions( .await; if retries.is_none() || retries == Some(0) { - metrics::AUTO_PAYOUT_RETRY_EXHAUSTED_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_EXHAUSTED_COUNT.add(1, &[]); logger::info!("retries exhausted for auto_retry payout"); break; } if connectors.len() == 0 { logger::info!("connectors exhausted for auto_retry payout"); - metrics::AUTO_PAYOUT_RETRY_EXHAUSTED_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_EXHAUSTED_COUNT.add(1, &[]); break; } @@ -97,7 +97,7 @@ pub async fn do_gsm_single_connector_actions( ) -> RouterResult<()> { let mut retries = None; - metrics::AUTO_PAYOUT_RETRY_ELIGIBLE_REQUEST_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_ELIGIBLE_REQUEST_COUNT.add(1, &[]); let mut previous_gsm = None; // to compare previous status @@ -121,7 +121,7 @@ pub async fn do_gsm_single_connector_actions( .await; if retries.is_none() || retries == Some(0) { - metrics::AUTO_PAYOUT_RETRY_EXHAUSTED_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_EXHAUSTED_COUNT.add(1, &[]); logger::info!("retries exhausted for auto_retry payment"); break; } @@ -218,7 +218,7 @@ pub fn get_gsm_decision( }); if option_gsm_decision.is_some() { - metrics::AUTO_PAYOUT_RETRY_GSM_MATCH_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_PAYOUT_RETRY_GSM_MATCH_COUNT.add(1, &[]); } option_gsm_decision.unwrap_or_default() } @@ -232,7 +232,7 @@ pub async fn do_retry( key_store: &domain::MerchantKeyStore, payout_data: &mut PayoutData, ) -> RouterResult<()> { - metrics::AUTO_RETRY_PAYOUT_COUNT.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_PAYOUT_COUNT.add(1, &[]); modify_trackers(state, &connector, merchant_account, payout_data).await?; diff --git a/crates/router/src/core/refunds.rs b/crates/router/src/core/refunds.rs index ebc4c926599a..b13fb930cd74 100644 --- a/crates/router/src/core/refunds.rs +++ b/crates/router/src/core/refunds.rs @@ -15,7 +15,7 @@ use error_stack::{report, ResultExt}; use hyperswitch_domain_models::router_data::ErrorResponse; use hyperswitch_interfaces::integrity::{CheckIntegrity, FlowIntegrity, GetIntegrityObject}; use masking::PeekInterface; -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use scheduler::{consumer::types::process_data, utils as process_tracker_utils}; #[cfg(feature = "olap")] use strum::IntoEnumIterator; @@ -153,9 +153,8 @@ pub async fn trigger_refund_to_gateway( let storage_scheme = merchant_account.storage_scheme; metrics::REFUND_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", routed_through.clone())]), + router_env::metric_attributes!(("connector", routed_through.clone())), ); let connector: api::ConnectorData = api::ConnectorData::get_connector_by_name( @@ -302,15 +301,11 @@ pub async fn trigger_refund_to_gateway( (Some(refund_id), refund_data) }); metrics::INTEGRITY_CHECK_FAILED.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", connector.connector_name.to_string()), - ( - "merchant_id", - merchant_account.get_id().get_string_repr().to_owned(), - ), - ]), + ("merchant_id", merchant_account.get_id().clone()), + ), ); storage::RefundUpdate::ErrorUpdate { refund_status: Some(enums::RefundStatus::ManualReview), @@ -327,9 +322,11 @@ pub async fn trigger_refund_to_gateway( Ok(()) => { if response.refund_status == diesel_models::enums::RefundStatus::Success { metrics::SUCCESSFUL_REFUND.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", connector.connector_name.to_string())]), + router_env::metric_attributes!(( + "connector", + connector.connector_name.to_string(), + )), ) } let (connector_refund_id, connector_refund_data) = @@ -619,15 +616,11 @@ pub async fn sync_refund_with_gateway( Ok(response) => match router_data_res.integrity_check.clone() { Err(err) => { metrics::INTEGRITY_CHECK_FAILED.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", connector.connector_name.to_string()), - ( - "merchant_id", - merchant_account.get_id().get_string_repr().to_owned(), - ), - ]), + ("merchant_id", merchant_account.get_id().clone()), + ), ); let (refund_connector_transaction_id, connector_refund_data) = err .connector_transaction_id @@ -1590,7 +1583,7 @@ pub async fn add_refund_sync_task( refund.refund_id ) })?; - metrics::TASKS_ADDED_COUNT.add(&metrics::CONTEXT, 1, &add_attributes([("flow", "Refund")])); + metrics::TASKS_ADDED_COUNT.add(1, router_env::metric_attributes!(("flow", "Refund"))); Ok(response) } diff --git a/crates/router/src/core/routing.rs b/crates/router/src/core/routing.rs index cdd8b518f35a..cf04bec75a00 100644 --- a/crates/router/src/core/routing.rs +++ b/crates/router/src/core/routing.rs @@ -16,8 +16,6 @@ use external_services::grpc_client::dynamic_routing::SuccessBasedDynamicRouting; use hyperswitch_domain_models::{mandates, payment_address}; #[cfg(all(feature = "v1", feature = "dynamic_routing"))] use router_env::logger; -#[cfg(feature = "v1")] -use router_env::metrics::add_attributes; use rustc_hash::FxHashSet; #[cfg(all(feature = "v1", feature = "dynamic_routing"))] use storage_impl::redis::cache; @@ -137,7 +135,7 @@ pub async fn retrieve_merchant_routing_dictionary( query_params: RoutingRetrieveQuery, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_MERCHANT_DICTIONARY_RETRIEVE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_MERCHANT_DICTIONARY_RETRIEVE.add(1, &[]); let routing_metadata: Vec = state .store @@ -157,7 +155,7 @@ pub async fn retrieve_merchant_routing_dictionary( .map(ForeignInto::foreign_into) .collect::>(); - metrics::ROUTING_MERCHANT_DICTIONARY_RETRIEVE_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_MERCHANT_DICTIONARY_RETRIEVE_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json( routing_types::RoutingKind::RoutingAlgorithm(result), )) @@ -172,7 +170,7 @@ pub async fn create_routing_algorithm_under_profile( request: routing_types::RoutingConfigRequest, transaction_type: enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_CREATE_REQUEST_RECEIVED.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_CREATE_REQUEST_RECEIVED.add(1, &[]); let db = &*state.store; let key_manager_state = &(&state).into(); @@ -229,7 +227,7 @@ pub async fn create_routing_algorithm_under_profile( let new_record = record.foreign_into(); - metrics::ROUTING_CREATE_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_CREATE_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(new_record)) } @@ -242,7 +240,7 @@ pub async fn create_routing_algorithm_under_profile( request: routing_types::RoutingConfigRequest, transaction_type: enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_CREATE_REQUEST_RECEIVED.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_CREATE_REQUEST_RECEIVED.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -319,7 +317,7 @@ pub async fn create_routing_algorithm_under_profile( let new_record = record.foreign_into(); - metrics::ROUTING_CREATE_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_CREATE_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(new_record)) } @@ -332,7 +330,7 @@ pub async fn link_routing_config_under_profile( algorithm_id: common_utils::id_type::RoutingId, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_LINK_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_LINK_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -387,7 +385,7 @@ pub async fn link_routing_config_under_profile( ) .await?; - metrics::ROUTING_LINK_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_LINK_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json( routing_algorithm.0.foreign_into(), )) @@ -402,7 +400,7 @@ pub async fn link_routing_config( algorithm_id: common_utils::id_type::RoutingId, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_LINK_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_LINK_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -539,7 +537,7 @@ pub async fn link_routing_config( } }; - metrics::ROUTING_LINK_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_LINK_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json( routing_algorithm.foreign_into(), )) @@ -553,7 +551,7 @@ pub async fn retrieve_routing_algorithm_from_algorithm_id( authentication_profile_id: Option, algorithm_id: common_utils::id_type::RoutingId, ) -> RouterResponse { - metrics::ROUTING_RETRIEVE_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -577,7 +575,7 @@ pub async fn retrieve_routing_algorithm_from_algorithm_id( .change_context(errors::ApiErrorResponse::InternalServerError) .attach_printable("unable to parse routing algorithm")?; - metrics::ROUTING_RETRIEVE_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(response)) } @@ -589,7 +587,7 @@ pub async fn retrieve_routing_algorithm_from_algorithm_id( authentication_profile_id: Option, algorithm_id: common_utils::id_type::RoutingId, ) -> RouterResponse { - metrics::ROUTING_RETRIEVE_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -618,7 +616,7 @@ pub async fn retrieve_routing_algorithm_from_algorithm_id( .change_context(errors::ApiErrorResponse::InternalServerError) .attach_printable("unable to parse routing algorithm")?; - metrics::ROUTING_RETRIEVE_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(response)) } @@ -630,7 +628,7 @@ pub async fn unlink_routing_config_under_profile( profile_id: common_utils::id_type::ProfileId, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_UNLINK_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UNLINK_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -667,7 +665,7 @@ pub async fn unlink_routing_config_under_profile( transaction_type, ) .await?; - metrics::ROUTING_UNLINK_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UNLINK_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(response)) } else { Err(errors::ApiErrorResponse::PreconditionFailed { @@ -685,7 +683,7 @@ pub async fn unlink_routing_config( authentication_profile_id: Option, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_UNLINK_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UNLINK_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -754,7 +752,7 @@ pub async fn unlink_routing_config( ) .await?; - metrics::ROUTING_UNLINK_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UNLINK_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(response)) } None => Err(errors::ApiErrorResponse::PreconditionFailed { @@ -777,7 +775,7 @@ pub async fn update_default_fallback_routing( profile_id: common_utils::id_type::ProfileId, updated_list_of_connectors: Vec, ) -> RouterResponse> { - metrics::ROUTING_UPDATE_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UPDATE_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); let profile = core_utils::validate_and_get_business_profile( @@ -839,7 +837,7 @@ pub async fn update_default_fallback_routing( ) .await?; - metrics::ROUTING_UPDATE_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UPDATE_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json( updated_list_of_connectors, )) @@ -852,7 +850,7 @@ pub async fn update_default_routing_config( updated_config: Vec, transaction_type: &enums::TransactionType, ) -> RouterResponse> { - metrics::ROUTING_UPDATE_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UPDATE_CONFIG.add(1, &[]); let db = state.store.as_ref(); let default_config = helpers::get_merchant_default_config( db, @@ -894,7 +892,7 @@ pub async fn update_default_routing_config( ) .await?; - metrics::ROUTING_UPDATE_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UPDATE_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(updated_config)) } @@ -905,7 +903,7 @@ pub async fn retrieve_default_fallback_algorithm_for_profile( key_store: domain::MerchantKeyStore, profile_id: common_utils::id_type::ProfileId, ) -> RouterResponse> { - metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); let profile = core_utils::validate_and_get_business_profile( @@ -921,7 +919,7 @@ pub async fn retrieve_default_fallback_algorithm_for_profile( let connectors_choice = admin::ProfileWrapper::new(profile) .get_default_fallback_list_of_connector_under_profile()?; - metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(connectors_choice)) } @@ -932,7 +930,7 @@ pub async fn retrieve_default_routing_config( merchant_account: domain::MerchantAccount, transaction_type: &enums::TransactionType, ) -> RouterResponse> { - metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG.add(1, &[]); let db = state.store.as_ref(); let id = profile_id .map(|profile_id| profile_id.get_string_repr().to_owned()) @@ -941,11 +939,7 @@ pub async fn retrieve_default_routing_config( helpers::get_merchant_default_config(db, &id, transaction_type) .await .map(|conn_choice| { - metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG_SUCCESS_RESPONSE.add( - &metrics::CONTEXT, - 1, - &[], - ); + metrics::ROUTING_RETRIEVE_DEFAULT_CONFIG_SUCCESS_RESPONSE.add(1, &[]); service_api::ApplicationResponse::Json(conn_choice) }) } @@ -959,7 +953,7 @@ pub async fn retrieve_routing_config_under_profile( profile_id: common_utils::id_type::ProfileId, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_RETRIEVE_LINK_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_LINK_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -988,7 +982,7 @@ pub async fn retrieve_routing_config_under_profile( .map(|routing_algo| routing_algo.foreign_into()) .collect::>(); - metrics::ROUTING_RETRIEVE_LINK_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_LINK_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json( routing_types::LinkedRoutingConfigRetrieveResponse::ProfileBased(active_algorithms), )) @@ -1003,7 +997,7 @@ pub async fn retrieve_linked_routing_config( query_params: routing_types::RoutingRetrieveLinkQuery, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_RETRIEVE_LINK_CONFIG.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_LINK_CONFIG.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -1062,7 +1056,7 @@ pub async fn retrieve_linked_routing_config( } } - metrics::ROUTING_RETRIEVE_LINK_CONFIG_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_LINK_CONFIG_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json( routing_types::LinkedRoutingConfigRetrieveResponse::ProfileBased(active_algorithms), )) @@ -1074,7 +1068,7 @@ pub async fn retrieve_default_routing_config_for_profiles( key_store: domain::MerchantKeyStore, transaction_type: &enums::TransactionType, ) -> RouterResponse> { - metrics::ROUTING_RETRIEVE_CONFIG_FOR_PROFILE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_CONFIG_FOR_PROFILE.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -1111,7 +1105,7 @@ pub async fn retrieve_default_routing_config_for_profiles( ) .collect::>(); - metrics::ROUTING_RETRIEVE_CONFIG_FOR_PROFILE_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_RETRIEVE_CONFIG_FOR_PROFILE_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json(default_configs)) } @@ -1123,7 +1117,7 @@ pub async fn update_default_routing_config_for_profile( profile_id: common_utils::id_type::ProfileId, transaction_type: &enums::TransactionType, ) -> RouterResponse { - metrics::ROUTING_UPDATE_CONFIG_FOR_PROFILE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UPDATE_CONFIG_FOR_PROFILE.add(1, &[]); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -1190,7 +1184,7 @@ pub async fn update_default_routing_config_for_profile( ) .await?; - metrics::ROUTING_UPDATE_CONFIG_FOR_PROFILE_SUCCESS_RESPONSE.add(&metrics::CONTEXT, 1, &[]); + metrics::ROUTING_UPDATE_CONFIG_FOR_PROFILE_SUCCESS_RESPONSE.add(1, &[]); Ok(service_api::ApplicationResponse::Json( routing_types::ProfileDefaultRoutingConfig { profile_id: business_profile.get_id().to_owned(), @@ -1211,9 +1205,8 @@ pub async fn toggle_specific_dynamic_routing( dynamic_routing_type: routing::DynamicRoutingType, ) -> RouterResponse { metrics::ROUTING_CREATE_REQUEST_RECEIVED.add( - &metrics::CONTEXT, 1, - &add_attributes([("profile_id", profile_id.get_string_repr().to_owned())]), + router_env::metric_attributes!(("profile_id", profile_id.clone())), ); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -1282,9 +1275,8 @@ pub async fn configure_dynamic_routing_volume_split( routing_info: routing::RoutingVolumeSplit, ) -> RouterResponse<()> { metrics::ROUTING_CREATE_REQUEST_RECEIVED.add( - &metrics::CONTEXT, 1, - &add_attributes([("profile_id", profile_id.get_string_repr().to_owned())]), + router_env::metric_attributes!(("profile_id", profile_id.clone())), ); let db = state.store.as_ref(); let key_manager_state = &(&state).into(); @@ -1344,9 +1336,8 @@ pub async fn success_based_routing_update_configs( profile_id: common_utils::id_type::ProfileId, ) -> RouterResponse { metrics::ROUTING_UPDATE_CONFIG_FOR_PROFILE.add( - &metrics::CONTEXT, 1, - &add_attributes([("profile_id", profile_id.get_string_repr().to_owned())]), + router_env::metric_attributes!(("profile_id", profile_id.clone())), ); let db = state.store.as_ref(); @@ -1402,9 +1393,8 @@ pub async fn success_based_routing_update_configs( let new_record = record.foreign_into(); metrics::ROUTING_UPDATE_CONFIG_FOR_PROFILE_SUCCESS_RESPONSE.add( - &metrics::CONTEXT, 1, - &add_attributes([("profile_id", profile_id.get_string_repr().to_owned())]), + router_env::metric_attributes!(("profile_id", profile_id.clone())), ); let prefix_of_dynamic_routing_keys = helpers::generate_tenant_business_profile_id( diff --git a/crates/router/src/core/routing/helpers.rs b/crates/router/src/core/routing/helpers.rs index 112c014202e6..690293c7f6ba 100644 --- a/crates/router/src/core/routing/helpers.rs +++ b/crates/router/src/core/routing/helpers.rs @@ -24,7 +24,7 @@ use hyperswitch_domain_models::api::ApplicationResponse; #[cfg(all(feature = "dynamic_routing", feature = "v1"))] use router_env::logger; #[cfg(any(feature = "dynamic_routing", feature = "v1"))] -use router_env::{instrument, metrics::add_attributes, tracing}; +use router_env::{instrument, tracing}; use rustc_hash::FxHashSet; use storage_impl::redis::cache; @@ -40,7 +40,7 @@ use crate::{ utils::StringExt, }; #[cfg(feature = "v1")] -use crate::{core::metrics as core_metrics, routes::metrics, types::transformers::ForeignInto}; +use crate::{core::metrics as core_metrics, types::transformers::ForeignInto}; pub const SUCCESS_BASED_DYNAMIC_ROUTING_ALGORITHM: &str = "Success rate based dynamic routing algorithm"; pub const ELIMINATION_BASED_DYNAMIC_ROUTING_ALGORITHM: &str = @@ -771,9 +771,8 @@ pub async fn push_metrics_with_update_window_for_success_based_routing( }; core_metrics::DYNAMIC_SUCCESS_BASED_ROUTING.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "tenant", state.tenant.tenant_id.get_string_repr().to_owned(), @@ -827,7 +826,7 @@ pub async fn push_metrics_with_update_window_for_success_based_routing( ), ("payment_status", payment_attempt.status.to_string()), ("conclusive_classification", outcome.to_string()), - ]), + ), ); logger::debug!("successfully pushed success_based_routing metrics"); @@ -953,11 +952,7 @@ pub async fn disable_dynamic_routing_algorithm( let db = state.store.as_ref(); let key_manager_state = &state.into(); let timestamp = common_utils::date_time::now_unix_timestamp(); - let profile_id = business_profile - .get_id() - .clone() - .get_string_repr() - .to_owned(); + let profile_id = business_profile.get_id().clone(); let (algorithm_id, dynamic_routing_algorithm, cache_entries_to_redact) = match dynamic_routing_type { routing_types::DynamicRoutingType::SuccessRateBasedRouting => { @@ -1071,9 +1066,8 @@ pub async fn disable_dynamic_routing_algorithm( .await?; core_metrics::ROUTING_UNLINK_CONFIG_SUCCESS_RESPONSE.add( - &metrics::CONTEXT, 1, - &add_attributes([("profile_id", profile_id)]), + router_env::metric_attributes!(("profile_id", profile_id)), ); Ok(ApplicationResponse::Json(response)) @@ -1187,9 +1181,8 @@ where .to_not_found_response(errors::ApiErrorResponse::ResourceIdNotFound)?; let updated_routing_record = routing_algorithm.foreign_into(); core_metrics::ROUTING_CREATE_SUCCESS_RESPONSE.add( - &metrics::CONTEXT, 1, - &add_attributes([("profile_id", profile_id.get_string_repr().to_owned())]), + router_env::metric_attributes!(("profile_id", profile_id.clone())), ); Ok(ApplicationResponse::Json(updated_routing_record)) } @@ -1268,9 +1261,8 @@ pub async fn default_specific_dynamic_routing_setup( let new_record = record.foreign_into(); core_metrics::ROUTING_CREATE_SUCCESS_RESPONSE.add( - &metrics::CONTEXT, 1, - &add_attributes([("profile_id", profile_id.get_string_repr().to_string())]), + router_env::metric_attributes!(("profile_id", profile_id.clone())), ); Ok(ApplicationResponse::Json(new_record)) } diff --git a/crates/router/src/core/utils.rs b/crates/router/src/core/utils.rs index 62f1425d4d3c..56e5bc2b0852 100644 --- a/crates/router/src/core/utils.rs +++ b/crates/router/src/core/utils.rs @@ -605,11 +605,7 @@ pub fn validate_dispute_stage_and_dispute_status( common_utils::fp_utils::when( !(dispute_stage_validation && dispute_status_validation), || { - super::metrics::INCOMING_DISPUTE_WEBHOOK_VALIDATION_FAILURE_METRIC.add( - &super::metrics::CONTEXT, - 1, - &[], - ); + super::metrics::INCOMING_DISPUTE_WEBHOOK_VALIDATION_FAILURE_METRIC.add(1, &[]); Err(errors::WebhooksFlowError::DisputeWebhookValidationFailed)? }, ) diff --git a/crates/router/src/core/webhooks/incoming.rs b/crates/router/src/core/webhooks/incoming.rs index fe844ef6d731..4b9141c649e9 100644 --- a/crates/router/src/core/webhooks/incoming.rs +++ b/crates/router/src/core/webhooks/incoming.rs @@ -14,7 +14,7 @@ use hyperswitch_domain_models::{ }; use hyperswitch_interfaces::webhooks::{IncomingWebhookFlowError, IncomingWebhookRequestDetails}; use masking::{ExposeInterface, PeekInterface}; -use router_env::{instrument, metrics::add_attributes, tracing, tracing_actix_web::RequestId}; +use router_env::{instrument, tracing, tracing_actix_web::RequestId}; use super::{types, utils, MERCHANT_ID}; use crate::{ @@ -134,12 +134,8 @@ async fn incoming_webhooks_core( let key_manager_state = &(&state).into(); metrics::WEBHOOK_INCOMING_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_account.get_id().get_string_repr().to_owned(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_account.get_id().clone())), ); let mut request_details = IncomingWebhookRequestDetails { method: req.method().clone(), @@ -200,12 +196,11 @@ async fn incoming_webhooks_core( ); metrics::WEBHOOK_EVENT_TYPE_IDENTIFICATION_FAILURE_COUNT.add( - &metrics::CONTEXT, 1, - &[ - metrics::KeyValue::new(MERCHANT_ID, merchant_account.get_id().clone()), - metrics::KeyValue::new("connector", connector_name.to_string()), - ], + router_env::metric_attributes!( + (MERCHANT_ID, merchant_account.get_id().clone()), + ("connector", connector_name) + ), ); let response = connector @@ -328,12 +323,8 @@ async fn incoming_webhooks_core( if source_verified { metrics::WEBHOOK_SOURCE_VERIFIED_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_account.get_id().clone(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_account.get_id().clone())), ); } else if connector.is_webhook_source_verification_mandatory() { // if webhook consumption is mandatory for connector, fail webhook @@ -498,12 +489,8 @@ async fn incoming_webhooks_core( } } else { metrics::WEBHOOK_INCOMING_FILTERED_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_account.get_id().get_string_repr().to_owned(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_account.get_id().clone())), ); WebhookResponseTracker::NoEffect }; @@ -661,9 +648,11 @@ async fn payments_incoming_webhook_flow( .unwrap_or(true) => { metrics::WEBHOOK_PAYMENT_NOT_FOUND.add( - &metrics::CONTEXT, 1, - &add_attributes([("merchant_id", merchant_account.get_id().clone())]), + router_env::metric_attributes!(( + "merchant_id", + merchant_account.get_id().clone() + )), ); return Ok(WebhookResponseTracker::NoEffect); } @@ -722,7 +711,7 @@ async fn payouts_incoming_webhook_flow( event_type: webhooks::IncomingWebhookEvent, source_verified: bool, ) -> CustomResult { - metrics::INCOMING_PAYOUT_WEBHOOK_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::INCOMING_PAYOUT_WEBHOOK_METRIC.add(1, &[]); if source_verified { let db = &*state.store; //find payout_attempt by object_reference_id @@ -838,7 +827,7 @@ async fn payouts_incoming_webhook_flow( status: updated_payout_attempt.status, }) } else { - metrics::INCOMING_PAYOUT_WEBHOOK_SIGNATURE_FAILURE_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::INCOMING_PAYOUT_WEBHOOK_SIGNATURE_FAILURE_METRIC.add(1, &[]); Err(report!( errors::ApiErrorResponse::WebhookAuthenticationFailed )) @@ -998,7 +987,7 @@ async fn get_or_update_dispute_object( let db = &*state.store; match option_dispute { None => { - metrics::INCOMING_DISPUTE_WEBHOOK_NEW_RECORD_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::INCOMING_DISPUTE_WEBHOOK_NEW_RECORD_METRIC.add(1, &[]); let dispute_id = generate_id(consts::ID_LENGTH, "dp"); let new_dispute = diesel_models::dispute::DisputeNew { dispute_id, @@ -1034,7 +1023,7 @@ async fn get_or_update_dispute_object( } Some(dispute) => { logger::info!("Dispute Already exists, Updating the dispute details"); - metrics::INCOMING_DISPUTE_WEBHOOK_UPDATE_RECORD_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::INCOMING_DISPUTE_WEBHOOK_UPDATE_RECORD_METRIC.add(1, &[]); let dispute_status = diesel_models::enums::DisputeStatus::foreign_try_from(event_type) .change_context(errors::ApiErrorResponse::WebhookProcessingFailure) .attach_printable("event type to dispute state conversion failure")?; @@ -1449,7 +1438,7 @@ async fn disputes_incoming_webhook_flow( request_details: &IncomingWebhookRequestDetails<'_>, event_type: webhooks::IncomingWebhookEvent, ) -> CustomResult { - metrics::INCOMING_DISPUTE_WEBHOOK_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::INCOMING_DISPUTE_WEBHOOK_METRIC.add(1, &[]); if source_verified { let db = &*state.store; let dispute_details = connector.get_dispute_details(request_details).switch()?; @@ -1495,14 +1484,14 @@ async fn disputes_incoming_webhook_flow( Some(dispute_object.created_at), )) .await?; - metrics::INCOMING_DISPUTE_WEBHOOK_MERCHANT_NOTIFIED_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::INCOMING_DISPUTE_WEBHOOK_MERCHANT_NOTIFIED_METRIC.add(1, &[]); Ok(WebhookResponseTracker::Dispute { dispute_id: dispute_object.dispute_id, payment_id: dispute_object.payment_id, status: dispute_object.dispute_status, }) } else { - metrics::INCOMING_DISPUTE_WEBHOOK_SIGNATURE_FAILURE_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::INCOMING_DISPUTE_WEBHOOK_SIGNATURE_FAILURE_METRIC.add(1, &[]); Err(report!( errors::ApiErrorResponse::WebhookAuthenticationFailed )) diff --git a/crates/router/src/core/webhooks/incoming_v2.rs b/crates/router/src/core/webhooks/incoming_v2.rs index b91a6f0e9a01..d331e4d67f3d 100644 --- a/crates/router/src/core/webhooks/incoming_v2.rs +++ b/crates/router/src/core/webhooks/incoming_v2.rs @@ -12,7 +12,7 @@ use hyperswitch_domain_models::{ router_response_types::{VerifyWebhookSourceResponseData, VerifyWebhookStatus}, }; use hyperswitch_interfaces::webhooks::IncomingWebhookRequestDetails; -use router_env::{instrument, metrics::add_attributes, tracing, tracing_actix_web::RequestId}; +use router_env::{instrument, tracing, tracing_actix_web::RequestId}; use super::{types, utils, MERCHANT_ID}; use crate::{ @@ -126,12 +126,8 @@ async fn incoming_webhooks_core( serde_json::Value, )> { metrics::WEBHOOK_INCOMING_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_account.get_id().get_string_repr().to_owned(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_account.get_id().clone())), ); let mut request_details = IncomingWebhookRequestDetails { method: req.method().clone(), @@ -183,12 +179,11 @@ async fn incoming_webhooks_core( ); metrics::WEBHOOK_EVENT_TYPE_IDENTIFICATION_FAILURE_COUNT.add( - &metrics::CONTEXT, 1, - &[ - metrics::KeyValue::new(MERCHANT_ID, merchant_account.get_id().clone()), - metrics::KeyValue::new("connector", connector_name.to_string()), - ], + router_env::metric_attributes!( + (MERCHANT_ID, merchant_account.get_id().clone()), + ("connector", connector_name) + ), ); let response = connector @@ -288,12 +283,8 @@ async fn incoming_webhooks_core( if source_verified { metrics::WEBHOOK_SOURCE_VERIFIED_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_account.get_id().clone(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_account.get_id().clone())), ); } @@ -356,12 +347,8 @@ async fn incoming_webhooks_core( } } else { metrics::WEBHOOK_INCOMING_FILTERED_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_account.get_id().get_string_repr().to_owned(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_account.get_id().clone())), ); WebhookResponseTracker::NoEffect }; @@ -476,9 +463,11 @@ async fn payments_incoming_webhook_flow( .unwrap_or(true) => { metrics::WEBHOOK_PAYMENT_NOT_FOUND.add( - &metrics::CONTEXT, 1, - &add_attributes([("merchant_id", merchant_account.get_id().clone())]), + router_env::metric_attributes!(( + "merchant_id", + merchant_account.get_id().clone() + )), ); return Ok(WebhookResponseTracker::NoEffect); } diff --git a/crates/router/src/core/webhooks/outgoing.rs b/crates/router/src/core/webhooks/outgoing.rs index 0a9c17ed3a07..16d927be2682 100644 --- a/crates/router/src/core/webhooks/outgoing.rs +++ b/crates/router/src/core/webhooks/outgoing.rs @@ -17,7 +17,6 @@ use hyperswitch_interfaces::consts; use masking::{ExposeInterface, Mask, PeekInterface, Secret}; use router_env::{ instrument, - metrics::add_attributes, tracing::{self, Instrument}, }; @@ -292,12 +291,8 @@ async fn trigger_webhook_to_merchant( .await; metrics::WEBHOOK_OUTGOING_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - business_profile.merchant_id.get_string_repr().to_owned(), - )], + router_env::metric_attributes!((MERCHANT_ID, business_profile.merchant_id.clone())), ); logger::debug!(outgoing_webhook_response=?response); @@ -561,21 +556,14 @@ pub(crate) async fn add_outgoing_webhook_retry_task_to_process_tracker( ) .map_err(errors::StorageError::from)?; + let attributes = router_env::metric_attributes!(("flow", "OutgoingWebhookRetry")); match db.insert_process(process_tracker_entry).await { Ok(process_tracker) => { - crate::routes::metrics::TASKS_ADDED_COUNT.add( - &metrics::CONTEXT, - 1, - &add_attributes([("flow", "OutgoingWebhookRetry")]), - ); + crate::routes::metrics::TASKS_ADDED_COUNT.add(1, attributes); Ok(process_tracker) } Err(error) => { - crate::routes::metrics::TASK_ADDITION_FAILURES_COUNT.add( - &metrics::CONTEXT, - 1, - &add_attributes([("flow", "OutgoingWebhookRetry")]), - ); + crate::routes::metrics::TASK_ADDITION_FAILURES_COUNT.add(1, attributes); Err(error) } } @@ -848,12 +836,8 @@ async fn update_event_in_storage( fn increment_webhook_outgoing_received_count(merchant_id: &common_utils::id_type::MerchantId) { metrics::WEBHOOK_OUTGOING_RECEIVED_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_id.get_string_repr().to_owned(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_id.clone())), ) } @@ -887,12 +871,8 @@ async fn error_response_handler( schedule_webhook_retry: ScheduleWebhookRetry, ) -> CustomResult<(), errors::WebhooksFlowError> { metrics::WEBHOOK_OUTGOING_NOT_RECEIVED_COUNT.add( - &metrics::CONTEXT, 1, - &[metrics::KeyValue::new( - MERCHANT_ID, - merchant_id.get_string_repr().to_owned(), - )], + router_env::metric_attributes!((MERCHANT_ID, merchant_id.clone())), ); let error = report!(errors::WebhooksFlowError::NotReceivedByMerchant); diff --git a/crates/router/src/routes/health.rs b/crates/router/src/routes/health.rs index 0f2e63336470..7e9de917be38 100644 --- a/crates/router/src/routes/health.rs +++ b/crates/router/src/routes/health.rs @@ -14,7 +14,7 @@ use crate::{ #[instrument(skip_all, fields(flow = ?Flow::HealthCheck))] // #[actix_web::get("/health")] pub async fn health() -> impl actix_web::Responder { - metrics::HEALTH_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::HEALTH_METRIC.add(1, &[]); logger::info!("Health was called"); actix_web::HttpResponse::Ok().body("health is good") @@ -25,7 +25,7 @@ pub async fn deep_health_check( state: web::Data, request: HttpRequest, ) -> impl actix_web::Responder { - metrics::HEALTH_METRIC.add(&metrics::CONTEXT, 1, &[]); + metrics::HEALTH_METRIC.add(1, &[]); let flow = Flow::DeepHealthCheck; diff --git a/crates/router/src/routes/metrics.rs b/crates/router/src/routes/metrics.rs index 1b55c838c45e..5920c001d3f4 100644 --- a/crates/router/src/routes/metrics.rs +++ b/crates/router/src/routes/metrics.rs @@ -2,9 +2,8 @@ pub mod bg_metrics_collector; pub mod request; pub mod utils; -use router_env::{counter_metric, global_meter, histogram_metric, metrics_context}; +use router_env::{counter_metric, global_meter, histogram_metric_f64}; -metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); counter_metric!(HEALTH_METRIC, GLOBAL_METER); // No. of health API hits @@ -13,8 +12,8 @@ counter_metric!(KV_MISS, GLOBAL_METER); // No. of KV misses // API Level Metrics counter_metric!(REQUESTS_RECEIVED, GLOBAL_METER); counter_metric!(REQUEST_STATUS, GLOBAL_METER); -histogram_metric!(REQUEST_TIME, GLOBAL_METER); -histogram_metric!(EXTERNAL_REQUEST_TIME, GLOBAL_METER); +histogram_metric_f64!(REQUEST_TIME, GLOBAL_METER); +histogram_metric_f64!(EXTERNAL_REQUEST_TIME, GLOBAL_METER); // Operation Level Metrics counter_metric!(PAYMENT_OPS_COUNT, GLOBAL_METER); @@ -22,7 +21,7 @@ counter_metric!(PAYMENT_OPS_COUNT, GLOBAL_METER); counter_metric!(PAYMENT_COUNT, GLOBAL_METER); counter_metric!(SUCCESSFUL_PAYMENT, GLOBAL_METER); //TODO: This can be removed, added for payment list debugging -histogram_metric!(PAYMENT_LIST_LATENCY, GLOBAL_METER); +histogram_metric_f64!(PAYMENT_LIST_LATENCY, GLOBAL_METER); counter_metric!(REFUND_COUNT, GLOBAL_METER); counter_metric!(SUCCESSFUL_REFUND, GLOBAL_METER); @@ -58,7 +57,7 @@ counter_metric!(MCA_CREATE, GLOBAL_METER); // Flow Specific Metrics -histogram_metric!(CONNECTOR_REQUEST_TIME, GLOBAL_METER); +histogram_metric_f64!(CONNECTOR_REQUEST_TIME, GLOBAL_METER); counter_metric!(SESSION_TOKEN_CREATED, GLOBAL_METER); counter_metric!(CONNECTOR_CALL_COUNT, GLOBAL_METER); // Attributes needed @@ -89,9 +88,9 @@ counter_metric!(CONNECTOR_HTTP_STATUS_CODE_5XX_COUNT, GLOBAL_METER); counter_metric!(CARD_LOCKER_FAILURES, GLOBAL_METER); counter_metric!(CARD_LOCKER_SUCCESSFUL_RESPONSE, GLOBAL_METER); counter_metric!(TEMP_LOCKER_FAILURES, GLOBAL_METER); -histogram_metric!(CARD_ADD_TIME, GLOBAL_METER); -histogram_metric!(CARD_GET_TIME, GLOBAL_METER); -histogram_metric!(CARD_DELETE_TIME, GLOBAL_METER); +histogram_metric_f64!(CARD_ADD_TIME, GLOBAL_METER); +histogram_metric_f64!(CARD_GET_TIME, GLOBAL_METER); +histogram_metric_f64!(CARD_DELETE_TIME, GLOBAL_METER); // Apple Pay Flow Metrics counter_metric!(APPLE_PAY_MANUAL_FLOW, GLOBAL_METER); @@ -138,7 +137,7 @@ counter_metric!(ACCESS_TOKEN_CACHE_MISS, GLOBAL_METER); counter_metric!(INTEGRITY_CHECK_FAILED, GLOBAL_METER); // Network Tokenization metrics -histogram_metric!(GENERATE_NETWORK_TOKEN_TIME, GLOBAL_METER); -histogram_metric!(FETCH_NETWORK_TOKEN_TIME, GLOBAL_METER); -histogram_metric!(DELETE_NETWORK_TOKEN_TIME, GLOBAL_METER); -histogram_metric!(CHECK_NETWORK_TOKEN_STATUS_TIME, GLOBAL_METER); +histogram_metric_f64!(GENERATE_NETWORK_TOKEN_TIME, GLOBAL_METER); +histogram_metric_f64!(FETCH_NETWORK_TOKEN_TIME, GLOBAL_METER); +histogram_metric_f64!(DELETE_NETWORK_TOKEN_TIME, GLOBAL_METER); +histogram_metric_f64!(CHECK_NETWORK_TOKEN_STATUS_TIME, GLOBAL_METER); diff --git a/crates/router/src/routes/metrics/request.rs b/crates/router/src/routes/metrics/request.rs index 572f3dc93302..c2d49cd7ae44 100644 --- a/crates/router/src/routes/metrics/request.rs +++ b/crates/router/src/routes/metrics/request.rs @@ -1,5 +1,3 @@ -use router_env::metrics::add_attributes; - use super::utils as metric_utils; use crate::services::ApplicationResponse; @@ -11,16 +9,11 @@ where F: futures::Future, { let key = "request_type"; - super::REQUESTS_RECEIVED.add( - &super::CONTEXT, - 1, - &add_attributes([(key, flow.to_string())]), - ); + super::REQUESTS_RECEIVED.add(1, router_env::metric_attributes!((key, flow.to_string()))); let (result, time) = metric_utils::time_future(future).await; super::REQUEST_TIME.record( - &super::CONTEXT, time.as_secs_f64(), - &add_attributes([(key, flow.to_string())]), + router_env::metric_attributes!((key, flow.to_string())), ); result } @@ -31,13 +24,12 @@ pub fn status_code_metrics( merchant_id: common_utils::id_type::MerchantId, ) { super::REQUEST_STATUS.add( - &super::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("status_code", status_code), ("flow", flow), - ("merchant_id", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant_id", merchant_id.clone()), + ), ) } diff --git a/crates/router/src/services/api.rs b/crates/router/src/services/api.rs index 9416ff175a85..a4c1cac0ac43 100644 --- a/crates/router/src/services/api.rs +++ b/crates/router/src/services/api.rs @@ -46,7 +46,7 @@ pub use hyperswitch_interfaces::{ }, }; use masking::{Maskable, PeekInterface}; -use router_env::{instrument, metrics::add_attributes, tracing, tracing_actix_web::RequestId, Tag}; +use router_env::{instrument, tracing, tracing_actix_web::RequestId, Tag}; use serde::Serialize; use serde_json::json; use tera::{Context, Error as TeraError, Tera}; @@ -164,9 +164,8 @@ where } payments::CallConnectorAction::Trigger => { metrics::CONNECTOR_CALL_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("connector", req.connector.to_string()), ( "flow", @@ -174,9 +173,8 @@ where .split("::") .last() .unwrap_or_default() - .to_string(), ), - ]), + ), ); let connector_request = match connector_request { @@ -190,9 +188,11 @@ where | &errors::ConnectorError::RequestEncodingFailedWithReason(_) ) { metrics::REQUEST_BUILD_FAILURE.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", req.connector.to_string())]), + router_env::metric_attributes!(( + "connector", + req.connector.clone() + )), ) } })?, @@ -254,12 +254,12 @@ where == &errors::ConnectorError::ResponseDeserializationFailed { metrics::RESPONSE_DESERIALIZATION_FAILURE.add( - &metrics::CONTEXT, + 1, - &add_attributes([( + router_env::metric_attributes!(( "connector", - req.connector.to_string(), - )]), + req.connector.clone(), + )), ) } }); @@ -294,9 +294,11 @@ where .map_or(external_latency, |val| val + external_latency), ); metrics::CONNECTOR_ERROR_RESPONSE_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([("connector", req.connector.clone())]), + router_env::metric_attributes!(( + "connector", + req.connector.clone(), + )), ); let error = match body.status_code { @@ -436,10 +438,10 @@ pub async fn send_request( )?; let headers = request.headers.construct_header_map()?; - let metrics_tag = router_env::opentelemetry::KeyValue { - key: consts::METRICS_HOST_TAG_NAME.into(), - value: url.host_str().unwrap_or_default().to_string().into(), - }; + let metrics_tag = router_env::metric_attributes!(( + consts::METRICS_HOST_TAG_NAME, + url.host_str().unwrap_or_default().to_owned() + )); let request = { match request.method { Method::Get => client.get(url), @@ -503,11 +505,11 @@ pub async fn send_request( .await .map_err(|error| match error { error if error.is_timeout() => { - metrics::REQUEST_BUILD_FAILURE.add(&metrics::CONTEXT, 1, &[]); + metrics::REQUEST_BUILD_FAILURE.add(1, &[]); errors::ApiClientError::RequestTimeoutReceived } error if is_connection_closed_before_message_could_complete(&error) => { - metrics::REQUEST_BUILD_FAILURE.add(&metrics::CONTEXT, 1, &[]); + metrics::REQUEST_BUILD_FAILURE.add(1, &[]); errors::ApiClientError::ConnectionClosedIncompleteMessage } _ => errors::ApiClientError::RequestNotSent(error.to_string()), @@ -521,11 +523,11 @@ pub async fn send_request( .await .map_err(|error| match error { error if error.is_timeout() => { - metrics::REQUEST_BUILD_FAILURE.add(&metrics::CONTEXT, 1, &[]); + metrics::REQUEST_BUILD_FAILURE.add(1, &[]); errors::ApiClientError::RequestTimeoutReceived } error if is_connection_closed_before_message_could_complete(&error) => { - metrics::REQUEST_BUILD_FAILURE.add(&metrics::CONTEXT, 1, &[]); + metrics::REQUEST_BUILD_FAILURE.add(1, &[]); errors::ApiClientError::ConnectionClosedIncompleteMessage } _ => errors::ApiClientError::RequestNotSent(error.to_string()), @@ -536,8 +538,7 @@ pub async fn send_request( let response = common_utils::metrics::utils::record_operation_time( send_request, &metrics::EXTERNAL_REQUEST_TIME, - &metrics::CONTEXT, - &[metrics_tag.clone()], + metrics_tag, ) .await; // Retry once if the response is connection closed. @@ -555,7 +556,7 @@ pub async fn send_request( if error.current_context() == &errors::ApiClientError::ConnectionClosedIncompleteMessage => { - metrics::AUTO_RETRY_CONNECTION_CLOSED.add(&metrics::CONTEXT, 1, &[]); + metrics::AUTO_RETRY_CONNECTION_CLOSED.add(1, &[]); match cloned_send_request { Some(cloned_request) => { logger::info!( @@ -564,8 +565,7 @@ pub async fn send_request( common_utils::metrics::utils::record_operation_time( cloned_request, &metrics::EXTERNAL_REQUEST_TIME, - &metrics::CONTEXT, - &[metrics_tag], + metrics_tag, ) .await } diff --git a/crates/router/src/services/authentication.rs b/crates/router/src/services/authentication.rs index f465719949aa..e8435243ff4d 100644 --- a/crates/router/src/services/authentication.rs +++ b/crates/router/src/services/authentication.rs @@ -600,7 +600,7 @@ where } let report_failure = || { - metrics::PARTIAL_AUTH_FAILURE.add(&metrics::CONTEXT, 1, &[]); + metrics::PARTIAL_AUTH_FAILURE.add(1, &[]); }; let payload = ExtractedPayload::from_headers(request_headers) diff --git a/crates/router/src/services/authentication/decision.rs b/crates/router/src/services/authentication/decision.rs index 4ff9f8401baa..a1796f97c3b2 100644 --- a/crates/router/src/services/authentication/decision.rs +++ b/crates/router/src/services/authentication/decision.rs @@ -1,6 +1,5 @@ use common_utils::{errors::CustomResult, request::RequestContent}; use masking::{ErasedMaskSerialize, Secret}; -use router_env::opentelemetry::KeyValue; use serde::Serialize; use storage_impl::errors::ApiClientError; @@ -197,19 +196,13 @@ where E: std::fmt::Debug, F: futures::Future> + Send + 'static, { - metrics::API_KEY_REQUEST_INITIATED.add( - &metrics::CONTEXT, - 1, - &[KeyValue::new("type", request_type)], - ); + metrics::API_KEY_REQUEST_INITIATED + .add(1, router_env::metric_attributes!(("type", request_type))); tokio::spawn(async move { match future.await { Ok(_) => { - metrics::API_KEY_REQUEST_COMPLETED.add( - &metrics::CONTEXT, - 1, - &[KeyValue::new("type", request_type)], - ); + metrics::API_KEY_REQUEST_COMPLETED + .add(1, router_env::metric_attributes!(("type", request_type))); } Err(e) => { router_env::error!("Error in tracked job: {:?}", e); diff --git a/crates/router/src/utils.rs b/crates/router/src/utils.rs index 9dca6cf34778..e4046f95a153 100644 --- a/crates/router/src/utils.rs +++ b/crates/router/src/utils.rs @@ -38,7 +38,6 @@ use hyperswitch_domain_models::payments::PaymentIntent; use hyperswitch_domain_models::type_encryption::{crypto_operation, CryptoOperation}; use masking::{ExposeInterface, SwitchStrategy}; use nanoid::nanoid; -use router_env::metrics::add_attributes; use serde::de::DeserializeOwned; use serde_json::Value; use tracing_futures::Instrument; @@ -678,11 +677,8 @@ pub fn handle_json_response_deserialization_failure( res: types::Response, connector: &'static str, ) -> CustomResult { - metrics::RESPONSE_DESERIALIZATION_FAILURE.add( - &metrics::CONTEXT, - 1, - &add_attributes([("connector", connector)]), - ); + metrics::RESPONSE_DESERIALIZATION_FAILURE + .add(1, router_env::metric_attributes!(("connector", connector))); let response_data = String::from_utf8(res.response.to_vec()) .change_context(errors::ConnectorError::ResponseDeserializationFailed)?; @@ -726,21 +722,11 @@ pub fn add_connector_http_status_code_metrics(option_status_code: Option) { if let Some(status_code) = option_status_code { let status_code_type = get_http_status_code_type(status_code).ok(); match status_code_type.as_deref() { - Some("1xx") => { - metrics::CONNECTOR_HTTP_STATUS_CODE_1XX_COUNT.add(&metrics::CONTEXT, 1, &[]) - } - Some("2xx") => { - metrics::CONNECTOR_HTTP_STATUS_CODE_2XX_COUNT.add(&metrics::CONTEXT, 1, &[]) - } - Some("3xx") => { - metrics::CONNECTOR_HTTP_STATUS_CODE_3XX_COUNT.add(&metrics::CONTEXT, 1, &[]) - } - Some("4xx") => { - metrics::CONNECTOR_HTTP_STATUS_CODE_4XX_COUNT.add(&metrics::CONTEXT, 1, &[]) - } - Some("5xx") => { - metrics::CONNECTOR_HTTP_STATUS_CODE_5XX_COUNT.add(&metrics::CONTEXT, 1, &[]) - } + Some("1xx") => metrics::CONNECTOR_HTTP_STATUS_CODE_1XX_COUNT.add(1, &[]), + Some("2xx") => metrics::CONNECTOR_HTTP_STATUS_CODE_2XX_COUNT.add(1, &[]), + Some("3xx") => metrics::CONNECTOR_HTTP_STATUS_CODE_3XX_COUNT.add(1, &[]), + Some("4xx") => metrics::CONNECTOR_HTTP_STATUS_CODE_4XX_COUNT.add(1, &[]), + Some("5xx") => metrics::CONNECTOR_HTTP_STATUS_CODE_5XX_COUNT.add(1, &[]), _ => logger::info!("Skip metrics as invalid http status code received from connector"), }; } else { @@ -1049,26 +1035,24 @@ pub fn add_apple_pay_flow_metrics( if let Some(flow) = apple_pay_flow { match flow { domain::ApplePayFlow::Simplified(_) => metrics::APPLE_PAY_SIMPLIFIED_FLOW.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", connector.to_owned().unwrap_or("null".to_string()), ), - ("merchant_id", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant_id", merchant_id.clone()), + ), ), domain::ApplePayFlow::Manual => metrics::APPLE_PAY_MANUAL_FLOW.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", connector.to_owned().unwrap_or("null".to_string()), ), - ("merchant_id", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant_id", merchant_id.clone()), + ), ), } } @@ -1085,28 +1069,26 @@ pub fn add_apple_pay_payment_status_metrics( match flow { domain::ApplePayFlow::Simplified(_) => { metrics::APPLE_PAY_SIMPLIFIED_FLOW_SUCCESSFUL_PAYMENT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", connector.to_owned().unwrap_or("null".to_string()), ), - ("merchant_id", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant_id", merchant_id.clone()), + ), ) } domain::ApplePayFlow::Manual => metrics::APPLE_PAY_MANUAL_FLOW_SUCCESSFUL_PAYMENT .add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", connector.to_owned().unwrap_or("null".to_string()), ), - ("merchant_id", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant_id", merchant_id.clone()), + ), ), } } @@ -1115,27 +1097,25 @@ pub fn add_apple_pay_payment_status_metrics( match flow { domain::ApplePayFlow::Simplified(_) => { metrics::APPLE_PAY_SIMPLIFIED_FLOW_FAILED_PAYMENT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", connector.to_owned().unwrap_or("null".to_string()), ), - ("merchant_id", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant_id", merchant_id.clone()), + ), ) } domain::ApplePayFlow::Manual => metrics::APPLE_PAY_MANUAL_FLOW_FAILED_PAYMENT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ( "connector", connector.to_owned().unwrap_or("null".to_string()), ), - ("merchant_id", merchant_id.get_string_repr().to_owned()), - ]), + ("merchant_id", merchant_id.clone()), + ), ), } } diff --git a/crates/router/src/utils/db_utils.rs b/crates/router/src/utils/db_utils.rs index 67641a3fe7b1..07ec03f4de46 100644 --- a/crates/router/src/utils/db_utils.rs +++ b/crates/router/src/utils/db_utils.rs @@ -28,7 +28,7 @@ where Ok(output) => Ok(output), Err(redis_error) => match redis_error.current_context() { redis_interface::errors::RedisError::NotFound => { - metrics::KV_MISS.add(&metrics::CONTEXT, 1, &[]); + metrics::KV_MISS.add(1, &[]); database_call_closure().await } // Keeping the key empty here since the error would never go here. @@ -74,7 +74,7 @@ where }), (Err(redis_error), _) => match redis_error.current_context() { redis_interface::errors::RedisError::NotFound => { - metrics::KV_MISS.add(&metrics::CONTEXT, 1, &[]); + metrics::KV_MISS.add(1, &[]); database_call().await } // Keeping the key empty here since the error would never go here. diff --git a/crates/router/src/workflows/api_key_expiry.rs b/crates/router/src/workflows/api_key_expiry.rs index c04e4c5d576c..bf6100179bf2 100644 --- a/crates/router/src/workflows/api_key_expiry.rs +++ b/crates/router/src/workflows/api_key_expiry.rs @@ -2,7 +2,7 @@ use common_utils::{errors::ValidationError, ext_traits::ValueExt}; use diesel_models::{ enums as storage_enums, process_tracker::business_status, ApiKeyExpiryTrackingData, }; -use router_env::{logger, metrics::add_attributes}; +use router_env::logger; use scheduler::{workflows::ProcessTrackerWorkflow, SchedulerSessionState}; use crate::{ @@ -134,11 +134,8 @@ impl ProcessTrackerWorkflow for ApiKeyExpiryWorkflow { db.process_tracker_update_process_status_by_ids(task_ids, updated_process_tracker_data) .await?; // Remaining tasks are re-scheduled, so will be resetting the added count - metrics::TASKS_RESET_COUNT.add( - &metrics::CONTEXT, - 1, - &add_attributes([("flow", "ApiKeyExpiry")]), - ); + metrics::TASKS_RESET_COUNT + .add(1, router_env::metric_attributes!(("flow", "ApiKeyExpiry"))); } Ok(()) diff --git a/crates/router_env/Cargo.toml b/crates/router_env/Cargo.toml index 27fe451b753f..8a3482a0ac53 100644 --- a/crates/router_env/Cargo.toml +++ b/crates/router_env/Cargo.toml @@ -13,8 +13,10 @@ config = { version = "0.14.0", features = ["toml"] } error-stack = "0.4.1" gethostname = "0.4.3" once_cell = "1.19.0" -opentelemetry = { version = "0.19.0", features = ["rt-tokio-current-thread", "metrics"] } -opentelemetry-otlp = { version = "0.12.0", features = ["metrics"] } +opentelemetry = { version = "0.27.1", default-features = false, features = ["internal-logs", "metrics", "trace"] } +opentelemetry-aws = { version = "0.15.0", default-features = false, features = ["internal-logs", "trace"] } +opentelemetry-otlp = { version = "0.27.0", default-features = false, features = ["grpc-tonic", "metrics", "trace"] } +opentelemetry_sdk = { version = "0.27.1", default-features = false, features = ["rt-tokio-current-thread", "metrics", "trace"] } rustc-hash = "1.1" serde = { version = "1.0.197", features = ["derive"] } serde_json = "1.0.115" @@ -23,10 +25,10 @@ strum = { version = "0.26.2", features = ["derive"] } time = { version = "0.3.35", default-features = false, features = ["formatting"] } tokio = { version = "1.37.0" } tracing = { workspace = true } -tracing-actix-web = { version = "0.7.10", features = ["opentelemetry_0_19", "uuid_v7"], optional = true } +tracing-actix-web = { version = "0.7.15", features = ["opentelemetry_0_27", "uuid_v7"], optional = true } tracing-appender = { version = "0.2.3" } tracing-attributes = "0.1.27" -tracing-opentelemetry = { version = "0.19.0" } +tracing-opentelemetry = { version = "0.28.0", default-features = false } tracing-subscriber = { version = "0.3.18", default-features = true, features = ["env-filter", "json", "registry"] } vergen = { version = "8.3.1", optional = true, features = ["cargo", "git", "git2", "rustc"] } diff --git a/crates/router_env/src/logger/setup.rs b/crates/router_env/src/logger/setup.rs index 6433172edf4d..7447c8787e06 100644 --- a/crates/router_env/src/logger/setup.rs +++ b/crates/router_env/src/logger/setup.rs @@ -3,20 +3,6 @@ use std::time::Duration; use ::config::ConfigError; -use opentelemetry::{ - global, runtime, - sdk::{ - export::metrics::aggregation::cumulative_temporality_selector, - metrics::{controllers::BasicController, selectors::simple}, - propagation::TraceContextPropagator, - trace, - trace::BatchConfig, - Resource, - }, - trace::{TraceContextExt, TraceState}, - KeyValue, -}; -use opentelemetry_otlp::{TonicExporterBuilder, WithExportConfig}; use serde_json::ser::{CompactFormatter, PrettyFormatter}; use tracing_appender::non_blocking::WorkerGuard; use tracing_subscriber::{fmt, prelude::*, util::SubscriberInitExt, EnvFilter, Layer}; @@ -27,7 +13,6 @@ use crate::{config, FormattingLayer, StorageSubscription}; #[derive(Debug)] pub struct TelemetryGuard { _log_guards: Vec, - _metrics_controller: Option, } /// Setup logging sub-system specifying the logging configuration, service (binary) name, and a @@ -47,10 +32,9 @@ pub fn setup( } else { None }; - let _metrics_controller = if config.telemetry.metrics_enabled { + + if config.telemetry.metrics_enabled { setup_metrics_pipeline(&config.telemetry) - } else { - None }; // Setup file logging @@ -132,21 +116,23 @@ pub fn setup( // dropped Ok(TelemetryGuard { _log_guards: guards, - _metrics_controller, }) } -fn get_opentelemetry_exporter(config: &config::LogTelemetry) -> TonicExporterBuilder { - let mut exporter_builder = opentelemetry_otlp::new_exporter().tonic(); +fn get_opentelemetry_exporter_config( + config: &config::LogTelemetry, +) -> opentelemetry_otlp::ExportConfig { + let mut exporter_config = opentelemetry_otlp::ExportConfig { + protocol: opentelemetry_otlp::Protocol::Grpc, + endpoint: config.otel_exporter_otlp_endpoint.clone(), + ..Default::default() + }; - if let Some(ref endpoint) = config.otel_exporter_otlp_endpoint { - exporter_builder = exporter_builder.with_endpoint(endpoint); - } if let Some(timeout) = config.otel_exporter_otlp_timeout { - exporter_builder = exporter_builder.with_timeout(Duration::from_millis(timeout)); + exporter_config.timeout = Duration::from_millis(timeout); } - exporter_builder + exporter_config } #[derive(Debug, Clone)] @@ -192,39 +178,41 @@ impl TraceAssertion { /// Conditional Sampler for providing control on url based tracing #[derive(Clone, Debug)] -struct ConditionalSampler(TraceAssertion, T); +struct ConditionalSampler( + TraceAssertion, + T, +); -impl trace::ShouldSample for ConditionalSampler { +impl + opentelemetry_sdk::trace::ShouldSample for ConditionalSampler +{ fn should_sample( &self, parent_context: Option<&opentelemetry::Context>, trace_id: opentelemetry::trace::TraceId, name: &str, span_kind: &opentelemetry::trace::SpanKind, - attributes: &opentelemetry::trace::OrderMap, + attributes: &[opentelemetry::KeyValue], links: &[opentelemetry::trace::Link], - instrumentation_library: &opentelemetry::InstrumentationLibrary, ) -> opentelemetry::trace::SamplingResult { + use opentelemetry::trace::TraceContextExt; + match attributes - .get(&opentelemetry::Key::new("http.route")) + .iter() + .find(|&kv| kv.key == opentelemetry::Key::new("http.route")) .map_or(self.0.default, |inner| { - self.0.should_trace_url(&inner.as_str()) + self.0.should_trace_url(&inner.value.as_str()) }) { - true => self.1.should_sample( - parent_context, - trace_id, - name, - span_kind, - attributes, - links, - instrumentation_library, - ), + true => { + self.1 + .should_sample(parent_context, trace_id, name, span_kind, attributes, links) + } false => opentelemetry::trace::SamplingResult { decision: opentelemetry::trace::SamplingDecision::Drop, attributes: Vec::new(), trace_state: match parent_context { Some(ctx) => ctx.span().span_context().trace_state().clone(), - None => TraceState::default(), + None => opentelemetry::trace::TraceState::default(), }, }, } @@ -234,95 +222,117 @@ impl trace::ShouldSample for Condition fn setup_tracing_pipeline( config: &config::LogTelemetry, service_name: &str, -) -> Option> -{ - global::set_text_map_propagator(TraceContextPropagator::new()); +) -> Option< + tracing_opentelemetry::OpenTelemetryLayer< + tracing_subscriber::Registry, + opentelemetry_sdk::trace::Tracer, + >, +> { + use opentelemetry::trace::TracerProvider; + use opentelemetry_otlp::WithExportConfig; + use opentelemetry_sdk::trace; + + opentelemetry::global::set_text_map_propagator( + opentelemetry_sdk::propagation::TraceContextPropagator::new(), + ); + + // Set the export interval to 1 second + let batch_config = trace::BatchConfigBuilder::default() + .with_scheduled_delay(Duration::from_millis(1000)) + .build(); + + let exporter_result = opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_export_config(get_opentelemetry_exporter_config(config)) + .build(); + + let exporter = if config.ignore_errors { + #[allow(clippy::print_stderr)] // The logger hasn't been initialized yet + exporter_result + .inspect_err(|error| eprintln!("Failed to build traces exporter: {error:?}")) + .ok()? + } else { + // Safety: This is conditional, there is an option to avoid this behavior at runtime. + #[allow(clippy::expect_used)] + exporter_result.expect("Failed to build traces exporter") + }; - let mut trace_config = trace::config() + let mut provider_builder = trace::TracerProvider::builder() + .with_span_processor( + trace::BatchSpanProcessor::builder( + exporter, + // The runtime would have to be updated if a different web framework is used + opentelemetry_sdk::runtime::TokioCurrentThread, + ) + .with_batch_config(batch_config) + .build(), + ) .with_sampler(trace::Sampler::ParentBased(Box::new(ConditionalSampler( TraceAssertion { clauses: config .route_to_trace .clone() - .map(|inner| inner.into_iter().map(Into::into).collect()), + .map(|inner| inner.into_iter().map(TraceUrlAssert::from).collect()), default: false, }, trace::Sampler::TraceIdRatioBased(config.sampling_rate.unwrap_or(1.0)), )))) - .with_resource(Resource::new(vec![KeyValue::new( - "service.name", - service_name.to_owned(), - )])); + .with_resource(opentelemetry_sdk::Resource::new(vec![ + opentelemetry::KeyValue::new("service.name", service_name.to_owned()), + ])); + if config.use_xray_generator { - trace_config = trace_config.with_id_generator(trace::XrayIdGenerator::default()); + provider_builder = provider_builder + .with_id_generator(opentelemetry_aws::trace::XrayIdGenerator::default()); } - // Change the default export interval from 5 seconds to 1 second - let batch_config = BatchConfig::default().with_scheduled_delay(Duration::from_millis(1000)); - - let traces_layer_result = opentelemetry_otlp::new_pipeline() - .tracing() - .with_exporter(get_opentelemetry_exporter(config)) - .with_batch_config(batch_config) - .with_trace_config(trace_config) - .install_batch(runtime::TokioCurrentThread) - .map(|tracer| tracing_opentelemetry::layer().with_tracer(tracer)); - - #[allow(clippy::print_stderr)] // The logger hasn't been initialized yet - if config.ignore_errors { - traces_layer_result - .map_err(|error| { - eprintln!("Failed to create an `opentelemetry_otlp` tracer: {error:?}") - }) - .ok() - } else { - // Safety: This is conditional, there is an option to avoid this behavior at runtime. - #[allow(clippy::expect_used)] - Some(traces_layer_result.expect("Failed to create an `opentelemetry_otlp` tracer")) - } + Some( + tracing_opentelemetry::layer() + .with_tracer(provider_builder.build().tracer(service_name.to_owned())), + ) } -fn setup_metrics_pipeline(config: &config::LogTelemetry) -> Option { - let histogram_buckets = { - let mut init = 0.01; - let mut buckets: [f64; 15] = [0.0; 15]; - - for bucket in &mut buckets { - init *= 2.0; - *bucket = init; - } - buckets - }; +fn setup_metrics_pipeline(config: &config::LogTelemetry) { + use opentelemetry_otlp::WithExportConfig; - let metrics_controller_result = opentelemetry_otlp::new_pipeline() - .metrics( - simple::histogram(histogram_buckets), - cumulative_temporality_selector(), - // This would have to be updated if a different web framework is used - runtime::TokioCurrentThread, - ) - .with_exporter(get_opentelemetry_exporter(config)) - .with_period(Duration::from_secs(3)) - .with_timeout(Duration::from_secs(10)) - .with_resource(Resource::new(vec![KeyValue::new( - "pod", - std::env::var("POD_NAME").map_or( - "hyperswitch-server-default".into(), - Into::::into, - ), - )])) + let exporter_result = opentelemetry_otlp::MetricExporter::builder() + .with_tonic() + .with_temporality(opentelemetry_sdk::metrics::Temporality::Cumulative) + .with_export_config(get_opentelemetry_exporter_config(config)) .build(); - #[allow(clippy::print_stderr)] // The logger hasn't been initialized yet - if config.ignore_errors { - metrics_controller_result - .map_err(|error| eprintln!("Failed to setup metrics pipeline: {error:?}")) - .ok() + let exporter = if config.ignore_errors { + #[allow(clippy::print_stderr)] // The logger hasn't been initialized yet + exporter_result + .inspect_err(|error| eprintln!("Failed to build metrics exporter: {error:?}")) + .ok(); + return; } else { // Safety: This is conditional, there is an option to avoid this behavior at runtime. #[allow(clippy::expect_used)] - Some(metrics_controller_result.expect("Failed to setup metrics pipeline")) - } + exporter_result.expect("Failed to build metrics exporter") + }; + + let reader = opentelemetry_sdk::metrics::PeriodicReader::builder( + exporter, + // The runtime would have to be updated if a different web framework is used + opentelemetry_sdk::runtime::TokioCurrentThread, + ) + .with_interval(Duration::from_secs(3)) + .with_timeout(Duration::from_secs(10)) + .build(); + + let provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder() + .with_reader(reader) + .with_resource(opentelemetry_sdk::Resource::new([ + opentelemetry::KeyValue::new( + "pod", + std::env::var("POD_NAME").unwrap_or(String::from("hyperswitch-server-default")), + ), + ])) + .build(); + + opentelemetry::global::set_meter_provider(provider); } fn get_envfilter( diff --git a/crates/router_env/src/metrics.rs b/crates/router_env/src/metrics.rs index 780c010579f7..9a7efff04948 100644 --- a/crates/router_env/src/metrics.rs +++ b/crates/router_env/src/metrics.rs @@ -1,16 +1,5 @@ //! Utilities to easily create opentelemetry contexts, meters and metrics. -/// Create a metrics [`Context`][Context] with the specified name. -/// -/// [Context]: opentelemetry::Context -#[macro_export] -macro_rules! metrics_context { - ($name:ident) => { - pub(crate) static $name: once_cell::sync::Lazy<$crate::opentelemetry::Context> = - once_cell::sync::Lazy::new($crate::opentelemetry::Context::current); - }; -} - /// Create a global [`Meter`][Meter] with the specified name and an optional description. /// /// [Meter]: opentelemetry::metrics::Meter @@ -20,14 +9,14 @@ macro_rules! global_meter { static $name: once_cell::sync::Lazy<$crate::opentelemetry::metrics::Meter> = once_cell::sync::Lazy::new(|| $crate::opentelemetry::global::meter(stringify!($name))); }; - ($name:ident, $description:literal) => { - static $name: once_cell::sync::Lazy<$crate::opentelemetry::metrics::Meter> = - once_cell::sync::Lazy::new(|| $crate::opentelemetry::global::meter($description)); + ($meter:ident, $name:literal) => { + static $meter: once_cell::sync::Lazy<$crate::opentelemetry::metrics::Meter> = + once_cell::sync::Lazy::new(|| $crate::opentelemetry::global::meter(stringify!($name))); }; } /// Create a [`Counter`][Counter] metric with the specified name and an optional description, -/// associated with the specified meter. Note that the meter must be to a valid [`Meter`][Meter]. +/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter]. /// /// [Counter]: opentelemetry::metrics::Counter /// [Meter]: opentelemetry::metrics::Meter @@ -36,36 +25,54 @@ macro_rules! counter_metric { ($name:ident, $meter:ident) => { pub(crate) static $name: once_cell::sync::Lazy< $crate::opentelemetry::metrics::Counter, - > = once_cell::sync::Lazy::new(|| $meter.u64_counter(stringify!($name)).init()); + > = once_cell::sync::Lazy::new(|| $meter.u64_counter(stringify!($name)).build()); }; ($name:ident, $meter:ident, description:literal) => { + #[doc = $description] pub(crate) static $name: once_cell::sync::Lazy< $crate::opentelemetry::metrics::Counter, - > = once_cell::sync::Lazy::new(|| $meter.u64_counter($description).init()); + > = once_cell::sync::Lazy::new(|| { + $meter + .u64_counter(stringify!($name)) + .with_description($description) + .build() + }); }; } -/// Create a [`Histogram`][Histogram] metric with the specified name and an optional description, -/// associated with the specified meter. Note that the meter must be to a valid [`Meter`][Meter]. +/// Create a [`Histogram`][Histogram] f64 metric with the specified name and an optional description, +/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter]. /// /// [Histogram]: opentelemetry::metrics::Histogram /// [Meter]: opentelemetry::metrics::Meter #[macro_export] -macro_rules! histogram_metric { +macro_rules! histogram_metric_f64 { ($name:ident, $meter:ident) => { pub(crate) static $name: once_cell::sync::Lazy< $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.f64_histogram(stringify!($name)).init()); + > = once_cell::sync::Lazy::new(|| { + $meter + .f64_histogram(stringify!($name)) + .with_boundaries($crate::metrics::f64_histogram_buckets()) + .build() + }); }; ($name:ident, $meter:ident, $description:literal) => { + #[doc = $description] pub(crate) static $name: once_cell::sync::Lazy< $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.f64_histogram($description).init()); + > = once_cell::sync::Lazy::new(|| { + $meter + .f64_histogram(stringify!($name)) + .with_description($description) + .with_boundaries($crate::metrics::f64_histogram_buckets()) + .build() + }); }; } /// Create a [`Histogram`][Histogram] u64 metric with the specified name and an optional description, -/// associated with the specified meter. Note that the meter must be to a valid [`Meter`][Meter]. +/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter]. /// /// [Histogram]: opentelemetry::metrics::Histogram /// [Meter]: opentelemetry::metrics::Meter @@ -74,64 +81,72 @@ macro_rules! histogram_metric_u64 { ($name:ident, $meter:ident) => { pub(crate) static $name: once_cell::sync::Lazy< $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram(stringify!($name)).init()); + > = once_cell::sync::Lazy::new(|| { + $meter + .u64_histogram(stringify!($name)) + .with_boundaries($crate::metrics::f64_histogram_buckets()) + .build() + }); }; ($name:ident, $meter:ident, $description:literal) => { + #[doc = $description] pub(crate) static $name: once_cell::sync::Lazy< $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram($description).init()); + > = once_cell::sync::Lazy::new(|| { + $meter + .u64_histogram(stringify!($name)) + .with_description($description) + .with_boundaries($crate::metrics::f64_histogram_buckets()) + .build() + }); }; } -/// Create a [`Histogram`][Histogram] i64 metric with the specified name and an optional description, -/// associated with the specified meter. Note that the meter must be to a valid [`Meter`][Meter]. +/// Create a [`Gauge`][Gauge] metric with the specified name and an optional description, +/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter]. /// -/// [Histogram]: opentelemetry::metrics::Histogram +/// [Gauge]: opentelemetry::metrics::Gauge /// [Meter]: opentelemetry::metrics::Meter #[macro_export] -macro_rules! histogram_metric_i64 { +macro_rules! gauge_metric { ($name:ident, $meter:ident) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.i64_histogram(stringify!($name)).init()); + pub(crate) static $name: once_cell::sync::Lazy<$crate::opentelemetry::metrics::Gauge> = + once_cell::sync::Lazy::new(|| $meter.u64_gauge(stringify!($name)).build()); }; - ($name:ident, $meter:ident, $description:literal) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.i64_histogram($description).init()); + ($name:ident, $meter:ident, description:literal) => { + #[doc = $description] + pub(crate) static $name: once_cell::sync::Lazy<$crate::opentelemetry::metrics::Gauge> = + once_cell::sync::Lazy::new(|| { + $meter + .u64_gauge(stringify!($name)) + .with_description($description) + .build() + }); }; } -/// Create a [`ObservableGauge`][ObservableGauge] metric with the specified name and an optional description, -/// associated with the specified meter. Note that the meter must be to a valid [`Meter`][Meter]. -/// -/// [ObservableGauge]: opentelemetry::metrics::ObservableGauge -/// [Meter]: opentelemetry::metrics::Meter +/// Create attributes to associate with a metric from key-value pairs. #[macro_export] -macro_rules! gauge_metric { - ($name:ident, $meter:ident) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::ObservableGauge, - > = once_cell::sync::Lazy::new(|| $meter.u64_observable_gauge(stringify!($name)).init()); - }; - ($name:ident, $meter:ident, description:literal) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::ObservableGauge, - > = once_cell::sync::Lazy::new(|| $meter.u64_observable_gauge($description).init()); +macro_rules! metric_attributes { + ($(($key:expr, $value:expr $(,)?)),+ $(,)?) => { + &[$($crate::opentelemetry::KeyValue::new($key, $value)),+] }; } -pub use helpers::add_attributes; +pub use helpers::f64_histogram_buckets; mod helpers { - pub fn add_attributes(attributes: U) -> Vec - where - T: Into, - U: IntoIterator, - { - attributes - .into_iter() - .map(|(key, value)| opentelemetry::KeyValue::new(key, value)) - .collect::>() + /// Returns the buckets to be used for a f64 histogram + #[inline(always)] + pub fn f64_histogram_buckets() -> Vec { + let mut init = 0.01; + let mut buckets: [f64; 15] = [0.0; 15]; + + for bucket in &mut buckets { + init *= 2.0; + *bucket = init; + } + + Vec::from(buckets) } } diff --git a/crates/scheduler/src/consumer.rs b/crates/scheduler/src/consumer.rs index 846f1137b129..99b9df524ae0 100644 --- a/crates/scheduler/src/consumer.rs +++ b/crates/scheduler/src/consumer.rs @@ -165,7 +165,7 @@ pub async fn consumer_operations( pt_utils::add_histogram_metrics(&pickup_time, task, &stream_name); - metrics::TASK_CONSUMED.add(&metrics::CONTEXT, 1, &[]); + metrics::TASK_CONSUMED.add(1, &[]); handler.push(tokio::task::spawn(start_workflow( state.clone(), @@ -244,7 +244,7 @@ where .inspect_err(|error| { logger::error!(?error, "Failed to trigger workflow"); }); - metrics::TASK_PROCESSED.add(&metrics::CONTEXT, 1, &[]); + metrics::TASK_PROCESSED.add(1, &[]); res } diff --git a/crates/scheduler/src/db/process_tracker.rs b/crates/scheduler/src/db/process_tracker.rs index c73b53b608c2..1b23ff1b5579 100644 --- a/crates/scheduler/src/db/process_tracker.rs +++ b/crates/scheduler/src/db/process_tracker.rs @@ -149,7 +149,7 @@ impl ProcessTrackerInterface for Store { this: storage::ProcessTracker, schedule_time: PrimitiveDateTime, ) -> CustomResult<(), errors::StorageError> { - metrics::TASK_RETRIED.add(&metrics::CONTEXT, 1, &[]); + metrics::TASK_RETRIED.add(1, &[]); let retry_count = this.retry_count + 1; self.update_process( this, @@ -177,7 +177,7 @@ impl ProcessTrackerInterface for Store { ) .await .attach_printable("Failed to update business status of process")?; - metrics::TASK_FINISHED.add(&metrics::CONTEXT, 1, &[]); + metrics::TASK_FINISHED.add(1, &[]); Ok(()) } diff --git a/crates/scheduler/src/metrics.rs b/crates/scheduler/src/metrics.rs index ca4fb9ec2424..27ac860d0794 100644 --- a/crates/scheduler/src/metrics.rs +++ b/crates/scheduler/src/metrics.rs @@ -1,9 +1,8 @@ -use router_env::{counter_metric, global_meter, histogram_metric, metrics_context}; +use router_env::{counter_metric, global_meter, histogram_metric_f64}; -metrics_context!(CONTEXT); global_meter!(PT_METER, "PROCESS_TRACKER"); -histogram_metric!(CONSUMER_STATS, PT_METER, "CONSUMER_OPS"); +histogram_metric_f64!(CONSUMER_OPS, PT_METER); counter_metric!(PAYMENT_COUNT, PT_METER); // No. of payments created counter_metric!(TASKS_PICKED_COUNT, PT_METER); // Tasks picked by diff --git a/crates/scheduler/src/producer.rs b/crates/scheduler/src/producer.rs index b91434fcbb04..3d28ec91fab8 100644 --- a/crates/scheduler/src/producer.rs +++ b/crates/scheduler/src/producer.rs @@ -175,6 +175,6 @@ pub async fn fetch_producer_tasks( // Safety: Assuming we won't deal with more than `u64::MAX` tasks at once #[allow(clippy::as_conversions)] - metrics::TASKS_PICKED_COUNT.add(&metrics::CONTEXT, new_tasks.len() as u64, &[]); + metrics::TASKS_PICKED_COUNT.add(new_tasks.len() as u64, &[]); Ok(new_tasks) } diff --git a/crates/scheduler/src/utils.rs b/crates/scheduler/src/utils.rs index e4b636ac5f11..89328479537e 100644 --- a/crates/scheduler/src/utils.rs +++ b/crates/scheduler/src/utils.rs @@ -5,7 +5,7 @@ use diesel_models::enums::{self, ProcessTrackerStatus}; pub use diesel_models::process_tracker as storage; use error_stack::{report, ResultExt}; use redis_interface::{RedisConnectionPool, RedisEntryId}; -use router_env::{instrument, opentelemetry, tracing}; +use router_env::{instrument, tracing}; use uuid::Uuid; use super::{ @@ -29,7 +29,7 @@ where let batches = divide(tasks, settings); // Safety: Assuming we won't deal with more than `u64::MAX` batches at once #[allow(clippy::as_conversions)] - metrics::BATCHES_CREATED.add(&metrics::CONTEXT, batches.len() as u64, &[]); // Metrics + metrics::BATCHES_CREATED.add(batches.len() as u64, &[]); // Metrics for batch in batches { let result = update_status_and_append(state, flow, batch).await; match result { @@ -209,7 +209,7 @@ pub async fn get_batches( } }; - metrics::BATCHES_CONSUMED.add(&metrics::CONTEXT, 1, &[]); + metrics::BATCHES_CONSUMED.add(1, &[]); let (batches, entry_ids): (Vec>, Vec>) = response.into_values().map(|entries| { entries.into_iter().try_fold( @@ -290,13 +290,9 @@ pub fn add_histogram_metrics( let pickup_schedule_delta = (*pickup_time - *schedule_time).as_seconds_f64(); logger::error!("Time delta for scheduled tasks: {pickup_schedule_delta} seconds"); let runner_name = runner.clone(); - metrics::CONSUMER_STATS.record( - &metrics::CONTEXT, + metrics::CONSUMER_OPS.record( pickup_schedule_delta, - &[opentelemetry::KeyValue::new( - stream_name.to_owned(), - runner_name, - )], + router_env::metric_attributes!((stream_name.to_owned(), runner_name)), ); }; } diff --git a/crates/storage_impl/src/lib.rs b/crates/storage_impl/src/lib.rs index 9b306ec8b077..1d48606bf006 100644 --- a/crates/storage_impl/src/lib.rs +++ b/crates/storage_impl/src/lib.rs @@ -262,9 +262,9 @@ impl KVRouterStore { .change_context(RedisError::JsonSerializationFailed)?, ) .await - .map(|_| metrics::KV_PUSHED_TO_DRAINER.add(&metrics::CONTEXT, 1, &[])) + .map(|_| metrics::KV_PUSHED_TO_DRAINER.add(1, &[])) .inspect_err(|error| { - metrics::KV_FAILED_TO_PUSH_TO_DRAINER.add(&metrics::CONTEXT, 1, &[]); + metrics::KV_FAILED_TO_PUSH_TO_DRAINER.add(1, &[]); logger::error!(?error, "Failed to add entry in drainer stream"); }) .change_context(RedisError::StreamAppendFailed) diff --git a/crates/storage_impl/src/metrics.rs b/crates/storage_impl/src/metrics.rs index cb7a6b216e47..b0c0c70af0a5 100644 --- a/crates/storage_impl/src/metrics.rs +++ b/crates/storage_impl/src/metrics.rs @@ -1,6 +1,5 @@ -use router_env::{counter_metric, gauge_metric, global_meter, metrics_context}; +use router_env::{counter_metric, gauge_metric, global_meter}; -metrics_context!(CONTEXT); global_meter!(GLOBAL_METER, "ROUTER_API"); counter_metric!(KV_MISS, GLOBAL_METER); // No. of KV misses diff --git a/crates/storage_impl/src/redis/cache.rs b/crates/storage_impl/src/redis/cache.rs index fff5435fc74d..93255fac9144 100644 --- a/crates/storage_impl/src/redis/cache.rs +++ b/crates/storage_impl/src/redis/cache.rs @@ -9,10 +9,7 @@ use error_stack::{Report, ResultExt}; use moka::future::Cache as MokaCache; use once_cell::sync::Lazy; use redis_interface::{errors::RedisError, RedisConnectionPool, RedisValue}; -use router_env::{ - metrics::add_attributes, - tracing::{self, instrument}, -}; +use router_env::tracing::{self, instrument}; use crate::{ errors::StorageError, @@ -193,12 +190,11 @@ impl Cache { // Record the metrics of manual invalidation of cache entry by the application let eviction_listener = move |_, _, cause| { metrics::IN_MEMORY_CACHE_EVICTION_COUNT.add( - &metrics::CONTEXT, 1, - &add_attributes([ + router_env::metric_attributes!( ("cache_type", name.to_owned()), ("removal_cause", format!("{:?}", cause)), - ]), + ), ); }; let mut cache_builder = MokaCache::builder() @@ -225,17 +221,11 @@ impl Cache { // Add cache hit and cache miss metrics if val.is_some() { - metrics::IN_MEMORY_CACHE_HIT.add( - &metrics::CONTEXT, - 1, - &add_attributes([("cache_type", self.name)]), - ); + metrics::IN_MEMORY_CACHE_HIT + .add(1, router_env::metric_attributes!(("cache_type", self.name))); } else { - metrics::IN_MEMORY_CACHE_MISS.add( - &metrics::CONTEXT, - 1, - &add_attributes([("cache_type", self.name)]), - ); + metrics::IN_MEMORY_CACHE_MISS + .add(1, router_env::metric_attributes!(("cache_type", self.name))); } let val = (*val?).as_any().downcast_ref::().cloned(); @@ -269,10 +259,9 @@ impl Cache { pub async fn record_entry_count_metric(&self) { self.run_pending_tasks().await; - metrics::IN_MEMORY_CACHE_ENTRY_COUNT.observe( - &metrics::CONTEXT, + metrics::IN_MEMORY_CACHE_ENTRY_COUNT.record( self.get_entry_count(), - &add_attributes([("cache_type", self.name)]), + router_env::metric_attributes!(("cache_type", self.name)), ); } } diff --git a/crates/storage_impl/src/redis/kv_store.rs b/crates/storage_impl/src/redis/kv_store.rs index 83d8de9c30ac..9203a14ae21c 100644 --- a/crates/storage_impl/src/redis/kv_store.rs +++ b/crates/storage_impl/src/redis/kv_store.rs @@ -257,19 +257,16 @@ where } }; + let attributes = router_env::metric_attributes!(("operation", operation.clone())); result .await .inspect(|_| { logger::debug!(kv_operation= %operation, status="success"); - let keyvalue = router_env::opentelemetry::KeyValue::new("operation", operation.clone()); - - metrics::KV_OPERATION_SUCCESSFUL.add(&metrics::CONTEXT, 1, &[keyvalue]); + metrics::KV_OPERATION_SUCCESSFUL.add(1, attributes); }) .inspect_err(|err| { logger::error!(kv_operation = %operation, status="error", error = ?err); - let keyvalue = router_env::opentelemetry::KeyValue::new("operation", operation); - - metrics::KV_OPERATION_FAILED.add(&metrics::CONTEXT, 1, &[keyvalue]); + metrics::KV_OPERATION_FAILED.add(1, attributes); }) } @@ -320,7 +317,7 @@ where .await { Ok(_) => { - metrics::KV_SOFT_KILL_ACTIVE_UPDATE.add(&metrics::CONTEXT, 1, &[]); + metrics::KV_SOFT_KILL_ACTIVE_UPDATE.add(1, &[]); MerchantStorageScheme::RedisKv } Err(_) => MerchantStorageScheme::PostgresOnly, diff --git a/crates/storage_impl/src/utils.rs b/crates/storage_impl/src/utils.rs index b634f41a98f1..01e0e8cbc141 100644 --- a/crates/storage_impl/src/utils.rs +++ b/crates/storage_impl/src/utils.rs @@ -59,7 +59,7 @@ where Ok(output) => Ok(output), Err(redis_error) => match redis_error.current_context() { redis_interface::errors::RedisError::NotFound => { - metrics::KV_MISS.add(&metrics::CONTEXT, 1, &[]); + metrics::KV_MISS.add(1, &[]); database_call_closure().await } // Keeping the key empty here since the error would never go here.