diff --git a/node/metrics.go b/node/metrics.go index 940b0a1300..1b72e14069 100644 --- a/node/metrics.go +++ b/node/metrics.go @@ -45,6 +45,8 @@ type Metrics struct { EigenMetrics eigenmetrics.Metrics // Reachability gauge to monitoring the reachability of the node's retrieval/dispersal sockets ReachabilityGauge *prometheus.GaugeVec + // The throughput (bytes per second) at which the data is written to database. + DBWriteThroughput prometheus.Gauge registry *prometheus.Registry // socketAddr is the address at which the metrics server will be listening. @@ -139,6 +141,14 @@ func NewMetrics(eigenMetrics eigenmetrics.Metrics, reg *prometheus.Registry, log }, []string{"service"}, ), + DBWriteThroughput: promauto.With(reg).NewGauge( + prometheus.GaugeOpts{ + Namespace: Namespace, + Name: "db_write_throughput_bytes_per_second", + Help: "the throughput (bytes per second) at which the data is written to database", + }, + ), + EigenMetrics: eigenMetrics, logger: logger.With("component", "NodeMetrics"), registry: reg, diff --git a/node/store.go b/node/store.go index 06849a7848..0e38be43d1 100644 --- a/node/store.go +++ b/node/store.go @@ -311,7 +311,9 @@ func (s *Store) StoreBatch(ctx context.Context, header *core.BatchHeader, blobs log.Error("Failed to write the batch into local database:", "err", err) return nil, err } - log.Debug("StoreBatch succeeded", "chunk serialization duration", serializationDuration, "bytes encoding duration", encodingDuration, "write batch duration", time.Since(start), "total store batch duration", time.Since(storeBatchStart), "total bytes", size) + throughput := float64(size) / time.Since(start).Seconds() + s.metrics.DBWriteThroughput.Set(throughput) + log.Debug("StoreBatch succeeded", "chunk serialization duration", serializationDuration, "bytes encoding duration", encodingDuration, "num blobs", len(blobs), "num of key-value pair entries", len(keys), "write batch duration", time.Since(start), "write throughput (MB/s)", throughput/1000_000, "total store batch duration", time.Since(storeBatchStart), "total bytes", size) return &keys, nil }