Skip to content

Commit

Permalink
Issue 6614 - CLI - Error when trying to display global DB stats with …
Browse files Browse the repository at this point in the history
…LMDB

Bug description:
Displaying global monitor stats fails with key error. Caused by BDB
backend keys being used when MDB is the configured DB implementation.

Fix description:
Ensure backend and monitor keys match the configured DB implementation.

Fixes: 389ds#6614

Reviewed by:
  • Loading branch information
jchapma committed Feb 17, 2025
1 parent 885cdf4 commit a3a46e1
Show file tree
Hide file tree
Showing 4 changed files with 129 additions and 118 deletions.
3 changes: 1 addition & 2 deletions dirsrvtests/tests/suites/monitor/monitor_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,13 +101,12 @@ def test_monitor_ldbm(topo):

# Check that known attributes exist (only NDN cache stats)
assert 'normalizeddncachehits' in monitor

# Check for library specific attributes
if db_lib == 'bdb':
assert 'dbcachehits' in monitor
assert 'nsslapd-db-configured-locks' in monitor
elif db_lib == 'mdb':
pass
assert not 'dbcachehits' in monitor
else:
# Unknown - the server would probably fail to start but check it anyway
log.fatal(f'Unknown backend library: {db_lib}')
Expand Down
4 changes: 4 additions & 0 deletions src/lib389/lib389/_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,3 +383,7 @@ class AccessLog(IntEnum):
'BUNDLED', # lib389 bundled rpm is installed
'READ_ONLY', # Read-only version is available
'NONE' ]) # bdb is not usasable

# DB implementation
DB_IMPL_BDB = "bdb"
DB_IMPL_MDB = "mdb"
127 changes: 69 additions & 58 deletions src/lib389/lib389/cli_conf/monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import datetime
import json
import os
from lib389._constants import (DB_IMPL_BDB, DB_IMPL_MDB)
from lib389.monitor import (Monitor, MonitorLDBM, MonitorSNMP, MonitorDiskSpace)
from lib389.chaining import (ChainingLinks)
from lib389.backend import Backends
Expand Down Expand Up @@ -125,27 +126,30 @@ def db_monitor(inst, basedn, log, args):
# Gather the global DB stats
report_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
ldbm_mon = ldbm_monitor.get_status()
dbcachesize = int(ldbm_mon['nsslapd-db-cache-size-bytes'][0])
# Warning: there are two different page sizes associated with bdb:
# - nsslapd-db-mp-pagesize the db mempool (i.e the db cache) page size which is usually 4K
# - nsslapd-db-pagesize the db instances (i.e id2entry, indexes, changelog) page size which
# is usually 8K
# To compute the db cache statistics we must use the nsslapd-db-mp-pagesize
if 'nsslapd-db-mp-pagesize' in ldbm_mon:
pagesize = int(ldbm_mon['nsslapd-db-mp-pagesize'][0])
else:
# targeting a remote instance that does not have github issue 5550 fix.
# So lets use the usual default file system preferred block size
# db cache free statistics may be wrong but we gave no way to
# compute it rightly.
pagesize = 4096
dbhitratio = ldbm_mon['dbcachehitratio'][0]
dbcachepagein = ldbm_mon['dbcachepagein'][0]
dbcachepageout = ldbm_mon['dbcachepageout'][0]
dbroevict = ldbm_mon['nsslapd-db-page-ro-evict-rate'][0]
dbpages = int(ldbm_mon['nsslapd-db-pages-in-use'][0])
dbcachefree = max(int(dbcachesize - (pagesize * dbpages)), 0)
dbcachefreeratio = dbcachefree/dbcachesize
if ldbm_monitor.inst_db_impl == DB_IMPL_BDB :
dbcachesize = int(ldbm_mon['nsslapd-db-cache-size-bytes'][0])
# Warning: there are two different page sizes associated with bdb:
# - nsslapd-db-mp-pagesize the db mempool (i.e the db cache) page size which is usually 4K
# - nsslapd-db-pagesize the db instances (i.e id2entry, indexes, changelog) page size which
# is usually 8K
# To compute the db cache statistics we must use the nsslapd-db-mp-pagesize
if 'nsslapd-db-mp-pagesize' in ldbm_mon:
pagesize = int(ldbm_mon['nsslapd-db-mp-pagesize'][0])
else:
# targeting a remote instance that does not have github issue 5550 fix.
# So lets use the usual default file system preferred block size
# db cache free statistics may be wrong but we gave no way to
# compute it rightly.
pagesize = 4096

dbhitratio = ldbm_mon['dbcachehitratio'][0]
dbcachepagein = ldbm_mon['dbcachepagein'][0]
dbcachepageout = ldbm_mon['dbcachepageout'][0]
dbroevict = ldbm_mon['nsslapd-db-page-ro-evict-rate'][0]
dbpages = int(ldbm_mon['nsslapd-db-pages-in-use'][0])
dbcachefree = max(int(dbcachesize - (pagesize * dbpages)), 0)
dbcachefreeratio = dbcachefree/dbcachesize

ndnratio = ldbm_mon['normalizeddncachehitratio'][0]
ndncursize = int(ldbm_mon['currentnormalizeddncachesize'][0])
ndnmaxsize = int(ldbm_mon['maxnormalizeddncachesize'][0])
Expand All @@ -161,14 +165,6 @@ def db_monitor(inst, basedn, log, args):
# Build global cache stats
result = {
'date': report_time,
'dbcache': {
'hit_ratio': dbhitratio,
'free': convert_bytes(str(dbcachefree)),
'free_percentage': "{:.1f}".format(dbcachefreeratio * 100),
'roevicts': dbroevict,
'pagein': dbcachepagein,
'pageout': dbcachepageout
},
'ndncache': {
'hit_ratio': ndnratio,
'free': convert_bytes(str(ndnfree)),
Expand All @@ -179,6 +175,16 @@ def db_monitor(inst, basedn, log, args):
'backends': {},
}

if ldbm_monitor.inst_db_impl == DB_IMPL_BDB:
result['dbcache'] = {
'hit_ratio': dbhitratio,
'free': convert_bytes(str(dbcachefree)),
'free_percentage': "{:.1f}".format(dbcachefreeratio * 100),
'roevicts': dbroevict,
'pagein': dbcachepagein,
'pageout': dbcachepageout
}

# Build the backend results
for be in backend_objs:
be_name = be.rdn
Expand All @@ -198,17 +204,18 @@ def db_monitor(inst, basedn, log, args):
else:
entsize = int(entcur / entcnt)

# Process DN cache stats
dncur = int(all_attrs['currentdncachesize'][0])
dnmax = int(all_attrs['maxdncachesize'][0])
dncnt = int(all_attrs['currentdncachecount'][0])
dnratio = all_attrs['dncachehitratio'][0]
dnfree = dnmax - dncur
dnfreep = "{:.1f}".format(dnfree / dnmax * 100)
if dncnt == 0:
dnsize = 0
else:
dnsize = int(dncur / dncnt)
if ldbm_monitor.inst_db_impl == DB_IMPL_BDB :
# Process DN cache stats
dncur = int(all_attrs['currentdncachesize'][0])
dnmax = int(all_attrs['maxdncachesize'][0])
dncnt = int(all_attrs['currentdncachecount'][0])
dnratio = all_attrs['dncachehitratio'][0]
dnfree = dnmax - dncur
dnfreep = "{:.1f}".format(dnfree / dnmax * 100)
if dncnt == 0:
dnsize = 0
else:
dnsize = int(dncur / dncnt)

# Build the backend result
result['backends'][be_name] = {
Expand All @@ -218,13 +225,15 @@ def db_monitor(inst, basedn, log, args):
'entry_cache_free_percentage': entfreep,
'entry_cache_size': convert_bytes(str(entsize)),
'entry_cache_hit_ratio': entratio,
'dn_cache_count': all_attrs['currentdncachecount'][0],
'dn_cache_free': convert_bytes(str(dnfree)),
'dn_cache_free_percentage': dnfreep,
'dn_cache_size': convert_bytes(str(dnsize)),
'dn_cache_hit_ratio': dnratio,
'indexes': []
}
if ldbm_monitor.inst_db_impl == DB_IMPL_BDB :
backend = result['backends'][be_name]
backend['dn_cache_count'] = all_attrs['currentdncachecount'][0]
backend['dn_cache_free'] = convert_bytes(str(dnfree))
backend['dn_cache_free_percentage'] = dnfreep
backend['dn_cache_size'] = convert_bytes(str(dnsize))
backend['dn_cache_hit_ratio'] = dnratio

# Process indexes if requested
if args.indexes:
Expand Down Expand Up @@ -256,14 +265,15 @@ def db_monitor(inst, basedn, log, args):
else:
log.info("DB Monitor Report: " + result['date'])
log.info("--------------------------------------------------------")
log.info("Database Cache:")
log.info(" - Cache Hit Ratio: {}%".format(result['dbcache']['hit_ratio']))
log.info(" - Free Space: {}".format(result['dbcache']['free']))
log.info(" - Free Percentage: {}%".format(result['dbcache']['free_percentage']))
log.info(" - RO Page Drops: {}".format(result['dbcache']['roevicts']))
log.info(" - Pages In: {}".format(result['dbcache']['pagein']))
log.info(" - Pages Out: {}".format(result['dbcache']['pageout']))
log.info("")
if ldbm_monitor.inst_db_impl == DB_IMPL_BDB :
log.info("Database Cache:")
log.info(" - Cache Hit Ratio: {}%".format(result['dbcache']['hit_ratio']))
log.info(" - Free Space: {}".format(result['dbcache']['free']))
log.info(" - Free Percentage: {}%".format(result['dbcache']['free_percentage']))
log.info(" - RO Page Drops: {}".format(result['dbcache']['roevicts']))
log.info(" - Pages In: {}".format(result['dbcache']['pagein']))
log.info(" - Pages Out: {}".format(result['dbcache']['pageout']))
log.info("")
log.info("Normalized DN Cache:")
log.info(" - Cache Hit Ratio: {}%".format(result['ndncache']['hit_ratio']))
log.info(" - Free Space: {}".format(result['ndncache']['free']))
Expand All @@ -279,11 +289,12 @@ def db_monitor(inst, basedn, log, args):
log.info(" - Entry Cache Free Space: {}".format(attr_dict['entry_cache_free']))
log.info(" - Entry Cache Free Percentage: {}%".format(attr_dict['entry_cache_free_percentage']))
log.info(" - Entry Cache Average Size: {}".format(attr_dict['entry_cache_size']))
log.info(" - DN Cache Hit Ratio: {}%".format(attr_dict['dn_cache_hit_ratio']))
log.info(" - DN Cache Count: {}".format(attr_dict['dn_cache_count']))
log.info(" - DN Cache Free Space: {}".format(attr_dict['dn_cache_free']))
log.info(" - DN Cache Free Percentage: {}%".format(attr_dict['dn_cache_free_percentage']))
log.info(" - DN Cache Average Size: {}".format(attr_dict['dn_cache_size']))
if ldbm_monitor.inst_db_impl == DB_IMPL_BDB :
log.info(" - DN Cache Hit Ratio: {}%".format(attr_dict['dn_cache_hit_ratio']))
log.info(" - DN Cache Count: {}".format(attr_dict['dn_cache_count']))
log.info(" - DN Cache Free Space: {}".format(attr_dict['dn_cache_free']))
log.info(" - DN Cache Free Percentage: {}%".format(attr_dict['dn_cache_free_percentage']))
log.info(" - DN Cache Average Size: {}".format(attr_dict['dn_cache_size']))
if len(result['backends'][be_name]['indexes']) > 0:
log.info(" - Indexes:")
for index in result['backends'][be_name]['indexes']:
Expand Down
113 changes: 55 additions & 58 deletions src/lib389/lib389/monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,66 +117,63 @@ def __init__(self, instance, dn=None):
super(MonitorLDBM, self).__init__(instance=instance)
self._dn = DN_MONITOR_LDBM
self._db_mon = MonitorDatabase(instance)
self._backend_keys = [
'dbcachehits',
'dbcachetries',
'dbcachehitratio',
'dbcachepagein',
'dbcachepageout',
'dbcacheroevict',
'dbcacherwevict',
]
self._db_mon_keys = [
'nsslapd-db-abort-rate',
'nsslapd-db-active-txns',
'nsslapd-db-cache-hit',
'nsslapd-db-cache-try',
'nsslapd-db-cache-region-wait-rate',
'nsslapd-db-cache-size-bytes',
'nsslapd-db-clean-pages',
'nsslapd-db-commit-rate',
'nsslapd-db-deadlock-rate',
'nsslapd-db-dirty-pages',
'nsslapd-db-hash-buckets',
'nsslapd-db-hash-elements-examine-rate',
'nsslapd-db-hash-search-rate',
'nsslapd-db-lock-conflicts',
'nsslapd-db-lock-region-wait-rate',
'nsslapd-db-lock-request-rate',
'nsslapd-db-lockers',
'nsslapd-db-configured-locks',
'nsslapd-db-current-locks',
'nsslapd-db-max-locks',
'nsslapd-db-current-lock-objects',
'nsslapd-db-max-lock-objects',
'nsslapd-db-log-bytes-since-checkpoint',
'nsslapd-db-log-region-wait-rate',
'nsslapd-db-log-write-rate',
'nsslapd-db-longest-chain-length',
'nsslapd-db-page-create-rate',
'nsslapd-db-page-read-rate',
'nsslapd-db-page-ro-evict-rate',
'nsslapd-db-page-rw-evict-rate',
'nsslapd-db-page-trickle-rate',
'nsslapd-db-page-write-rate',
'nsslapd-db-pages-in-use',
'nsslapd-db-txn-region-wait-rate',
'nsslapd-db-mp-pagesize',
]
if not ds_is_older("1.4.0", instance=instance):
self._backend_keys.extend([
'normalizeddncachetries',
'normalizeddncachehits',
'normalizeddncachemisses',
'normalizeddncachehitratio',
'normalizeddncacheevictions',
'currentnormalizeddncachesize',
'maxnormalizeddncachesize',
'currentnormalizeddncachecount',
'normalizeddncachethreadsize',
'normalizeddncachethreadslots'
self.inst_db_impl = self._instance.get_db_lib()
db_keys = {
DB_IMPL_BDB: [
'dbcachehits', 'dbcachetries', 'dbcachehitratio',
'dbcachepagein', 'dbcachepageout', 'dbcacheroevict',
'dbcacherwevict'
],
DB_IMPL_MDB: [
'normalizeddncachetries', 'normalizeddncachehits',
'normalizeddncachemisses', 'normalizeddncachehitratio',
'normalizeddncacheevictions', 'currentnormalizeddncachesize',
'maxnormalizeddncachesize', 'currentnormalizeddncachecount',
'normalizeddncachethreadsize', 'normalizeddncachethreadslots'
]
}

if self.inst_db_impl == DB_IMPL_BDB and not ds_is_older("1.4.0", instance=instance):
db_keys[DB_IMPL_BDB].extend([
'normalizeddncachetries', 'normalizeddncachehits',
'normalizeddncachemisses', 'normalizeddncachehitratio',
'normalizeddncacheevictions', 'currentnormalizeddncachesize',
'maxnormalizeddncachesize', 'currentnormalizeddncachecount',
'normalizeddncachethreadsize', 'normalizeddncachethreadslots'
])

db_monitor_keys = {
DB_IMPL_BDB: [
'nsslapd-db-abort-rate', 'nsslapd-db-active-txns', 'nsslapd-db-cache-hit',
'nsslapd-db-cache-try', 'nsslapd-db-cache-region-wait-rate',
'nsslapd-db-cache-size-bytes', 'nsslapd-db-clean-pages', 'nsslapd-db-commit-rate',
'nsslapd-db-deadlock-rate', 'nsslapd-db-dirty-pages', 'nsslapd-db-hash-buckets',
'nsslapd-db-hash-elements-examine-rate', 'nsslapd-db-hash-search-rate',
'nsslapd-db-lock-conflicts', 'nsslapd-db-lock-region-wait-rate',
'nsslapd-db-lock-request-rate', 'nsslapd-db-lockers', 'nsslapd-db-configured-locks',
'nsslapd-db-current-locks', 'nsslapd-db-max-locks', 'nsslapd-db-current-lock-objects',
'nsslapd-db-max-lock-objects', 'nsslapd-db-log-bytes-since-checkpoint',
'nsslapd-db-log-region-wait-rate', 'nsslapd-db-log-write-rate',
'nsslapd-db-longest-chain-length', 'nsslapd-db-page-create-rate',
'nsslapd-db-page-read-rate', 'nsslapd-db-page-ro-evict-rate',
'nsslapd-db-page-rw-evict-rate', 'nsslapd-db-page-trickle-rate',
'nsslapd-db-page-write-rate', 'nsslapd-db-pages-in-use',
'nsslapd-db-txn-region-wait-rate', 'nsslapd-db-mp-pagesize'
],
DB_IMPL_MDB: [
'dbenvmapmaxsize', 'dbenvmapsize', 'dbenvlastpageno',
'dbenvlasttxnid', 'dbenvmaxreaders', 'dbenvnumreaders',
'dbenvnumdbis', 'waitingrwtxn', 'activerwtxn',
'abortrwtxn', 'commitrwtxn', 'granttimerwtxn',
'lifetimerwtxn', 'waitingrotxn', 'activerotxn',
'abortrotxn', 'commitrotxn', 'granttimerotxn',
'lifetimerotxn'
]
}

self._backend_keys = db_keys.get(self.inst_db_impl, [])
self._db_mon_keys = db_monitor_keys.get(self.inst_db_impl, [])

def get_status(self, use_json=False):
ldbm_dict = self.get_attrs_vals_utf8(self._backend_keys)
db_dict = self._db_mon.get_attrs_vals_utf8(self._db_mon_keys)
Expand Down

0 comments on commit a3a46e1

Please sign in to comment.