Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

log: cut logs omnibus #1942

Merged
merged 2 commits into from
May 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changes/2024-05-30T145052-0400.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Cut pipeline logging is more descriptive
4 changes: 4 additions & 0 deletions src/Chainweb/BlockHash.hs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ module Chainweb.BlockHash
, decodeBlockHash
, nullBlockHash
, blockHashToText
, blockHashToTextShort
, blockHashFromText

-- * Block Hash Record
Expand Down Expand Up @@ -154,6 +155,9 @@ blockHashToText :: BlockHash_ a -> T.Text
blockHashToText = encodeB64UrlNoPaddingText . runPutS . encodeBlockHash
{-# INLINE blockHashToText #-}

blockHashToTextShort :: BlockHash_ a -> T.Text
blockHashToTextShort = T.take 6 . blockHashToText

blockHashFromText
:: MerkleHashAlgorithm a
=> MonadThrow m
Expand Down
13 changes: 13 additions & 0 deletions src/Chainweb/BlockHeader.hs
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ module Chainweb.BlockHeader
, decodeBlockHeaderWithoutHash
, decodeBlockHeaderChecked
, decodeBlockHeaderCheckedChainId
, blockHeaderShortDescription
, ObjectEncoded(..)

, timeBetween
Expand Down Expand Up @@ -386,6 +387,18 @@ instance IsCasValue BlockHeader where

type BlockHeaderCas tbl = Cas tbl BlockHeader

-- | Used for quickly identifying "which block" this is.
-- Example output:
-- "0 @ bSQgL5 (height 4810062)"
blockHeaderShortDescription :: BlockHeader -> T.Text
blockHeaderShortDescription bh =
T.unwords
[ toText (_chainId bh)
, "@"
, blockHashToTextShort (_blockHash bh)
, "(height " <> sshow (getBlockHeight $ _blockHeight bh) <> ")"
]

makeLenses ''BlockHeader

-- | During the first epoch after genesis there are 10 extra difficulty
Expand Down
23 changes: 23 additions & 0 deletions src/Chainweb/Cut.hs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@
--
module Chainweb.Cut
( Cut
, cutToTextShort
, cutDiffToTextShort
, _cutMap
, cutMap
, _cutHeight
Expand Down Expand Up @@ -106,9 +108,11 @@ import Data.Functor.Of
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Data.Heap as H
import qualified Data.List as List
import Data.Maybe (catMaybes, fromMaybe)
import Data.Monoid
import Data.Ord
import Data.Text (Text)
import qualified Data.Text as T
import Data.These

Expand Down Expand Up @@ -898,3 +902,22 @@ forkDepth wdb a b = do
maxDepth l u = maximum
$ (\(_, x, y) -> _blockHeight y - _blockHeight x)
<$> zipCuts l u

cutToTextShort :: Cut -> [Text]
cutToTextShort c =
[ blockHeaderShortDescription bh
| (_, bh) <- List.sortOn fst $ HM.toList (_cutHeaders c)
]

cutDiffToTextShort :: Cut -> Cut -> [Text]
cutDiffToTextShort c c' =
[ T.unwords
[ maybe "No block" blockHeaderShortDescription bh
, "->"
, maybe "No block" blockHeaderShortDescription bh'
]
| cid <- List.sort $ HM.keys $ HM.union (_cutHeaders c) (_cutHeaders c')
, let bh = HM.lookup cid (_cutHeaders c)
, let bh' = HM.lookup cid (_cutHeaders c')
, bh /= bh'
]
61 changes: 38 additions & 23 deletions src/Chainweb/CutDB.hs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TupleSections #-}

-- |
-- Module: Chainweb.CutDB
Expand Down Expand Up @@ -433,7 +434,8 @@ startCutDb config logfun headerStore payloadStore cutHashesStore = mask_ $ do
(Just $ over _1 succ $ casKey $ cutToCutHashes Nothing initialCut, Nothing)
cutVar <- newTVarIO initialCut
c <- readTVarIO cutVar
logg Info $ "got initial cut: " <> sshow c
logg Info $ T.unlines $
"got initial cut:" : [" " <> block | block <- cutToTextShort c]
queue <- newEmptyPQueue
cutAsync <- asyncWithUnmask $ \u -> u $ processor queue cutVar
logg Info "CutDB started"
Expand Down Expand Up @@ -554,19 +556,16 @@ processCuts
processCuts conf logFun headerStore payloadStore cutHashesStore queue cutVar = do
rng <- Prob.createSystemRandom
queueToStream
& S.chain (\c -> loggc Debug c "start processing")
& S.chain (\c -> loggCutId logFun Debug c "start processing")
& S.filterM (fmap not . isVeryOld)
& S.filterM (fmap not . farAhead)
& S.filterM (fmap not . isOld)
& S.filterM (fmap not . isCurrent)
& S.chain (\c -> loggc Debug c "fetch all prerequesites")

& S.chain (\c -> loggCutId logFun Info c "fetching all prerequisites")
& S.mapM (cutHashesToBlockHeaderMap conf logFun headerStore payloadStore)
& S.chain (either
(\(T2 hsid c) -> loggc Warn hsid $ "failed to get prerequesites for some blocks. Missing: " <> encodeToText c)
(\c -> loggc Info c "got all prerequesites")
)
& S.concat
-- ignore left values for now
& S.catMaybes
-- ignore unsuccessful values for now

-- using S.scanM would be slightly more efficient (one pointer dereference)
-- by keeping the value of cutVar in memory. We use the S.mapM variant with
Expand All @@ -577,14 +576,22 @@ processCuts conf logFun headerStore payloadStore cutHashesStore queue cutVar = d
$ joinIntoHeavier_ hdrStore (_cutMap curCut) newCut
unless (_cutDbParamsReadOnly conf) $ do
maybePrune rng (cutAvgBlockHeight v curCut)
loggc Info newCut "writing cut"
loggCutId logFun Debug newCut "writing cut"
casInsert cutHashesStore (cutToCutHashes Nothing resultCut)
atomically $ writeTVar cutVar resultCut
loggc Info resultCut "published cut"
let cutDiff = cutDiffToTextShort curCut resultCut
let currentCutIdMsg = T.unwords
[ "current cut is now"
, cutIdToTextShort (_cutId resultCut) <> ","
, "diff:"
]
let catOverflowing x xs =
if length xs == 1
then T.unwords (x : xs)
else T.intercalate "\n" (x : (map (" " <>) xs))
logFun @T.Text Info $ catOverflowing currentCutIdMsg cutDiff
)
where
loggc :: HasCutId c => LogLevel -> c -> T.Text -> IO ()
loggc l c msg = logFun @T.Text l $ "cut " <> cutIdToTextShort (_cutId c) <> ": " <> msg

v = _chainwebVersion headerStore

Expand Down Expand Up @@ -615,7 +622,7 @@ processCuts conf logFun headerStore payloadStore cutHashesStore queue cutVar = d
curMax <- maxChainHeight <$> readTVarIO cutVar
let newMax = _cutHashesMaxHeight x
let r = newMax >= curMax + farAheadThreshold
when r $ loggc Debug x
when r $ loggCutId logFun Debug x
$ "skip far ahead cut. Current maximum block height: " <> sshow curMax
<> ", got: " <> sshow newMax
-- log at debug level because this is a common case during catchup
Expand All @@ -635,7 +642,7 @@ processCuts conf logFun headerStore payloadStore cutHashesStore queue cutVar = d
let diam = diameter $ chainGraphAt headerStore curMin
newMin = _cutHashesMinHeight x
let r = newMin + 2 * (1 + int diam) <= curMin
when r $ loggc Debug x "skip very old cut"
when r $ loggCutId logFun Debug x "skip very old cut"
-- log at debug level because this is a common case during catchup
return r

Expand All @@ -644,13 +651,13 @@ processCuts conf logFun headerStore payloadStore cutHashesStore queue cutVar = d
isOld x = do
curHashes <- cutToCutHashes Nothing <$> readTVarIO cutVar
let r = all (>= (0 :: Int)) $ (HM.unionWith (-) `on` (fmap (int . _bhwhHeight) . _cutHashes)) curHashes x
when r $ loggc Debug x "skip old cut"
when r $ loggCutId logFun Debug x "skip old cut"
return r

isCurrent x = do
curHashes <- cutToCutHashes Nothing <$> readTVarIO cutVar
let r = _cutHashes curHashes == _cutHashes x
when r $ loggc Debug x "skip current cut"
when r $ loggCutId logFun Debug x "skip current cut"
return r

-- | Stream of most recent cuts. This stream does not generally include the full
Expand Down Expand Up @@ -754,7 +761,7 @@ cutHashesToBlockHeaderMap
-> WebBlockHeaderStore
-> WebBlockPayloadStore tbl
-> CutHashes
-> IO (Either (T2 CutId (HM.HashMap ChainId BlockHash)) (HM.HashMap ChainId BlockHeader))
-> IO (Maybe (HM.HashMap ChainId BlockHeader))
-- ^ The 'Left' value holds missing hashes, the 'Right' value holds
-- a 'Cut'.
cutHashesToBlockHeaderMap conf logfun headerStore payloadStore hs =
Expand All @@ -768,9 +775,13 @@ cutHashesToBlockHeaderMap conf logfun headerStore payloadStore hs =
$ "Timeout while processing cut "
<> cutIdToTextShort hsid
<> " at height " <> sshow (_cutHashesHeight hs)
<> cutOriginText
return $! Left $! T2 hsid mempty
Just x -> return $! x
<> " from origin " <> cutOriginText
return Nothing
Just (Left missing) -> do
loggCutId logfun Warn hs $ "Failed to get prerequisites for some blocks. Missing: " <> encodeToText missing
return Nothing
Just (Right headers) -> do
return (Just headers)
where
hsid = _cutId hs
go =
Expand All @@ -792,12 +803,12 @@ cutHashesToBlockHeaderMap conf logfun headerStore payloadStore hs =
& S.fold_ (\x (cid, h) -> HM.insert cid h x) mempty id
& S.fold (\x (cid, h) -> HM.insert cid h x) mempty id
if null missing
then return (Right headers)
then return $! Right headers
else do
when (isJust $ _cutHashesLocalPayload hs) $
logfun @Text Error
"error validating locally mined cut; the mining loop will stall until unstuck by another mining node"
return $! Left $! T2 hsid missing
return $! Left missing

origin = _cutOrigin hs
priority = Priority (- int (_cutHashesHeight hs))
Expand Down Expand Up @@ -883,3 +894,7 @@ getQueueStats db = QueueStats
<*> (int <$> TM.size (_webBlockHeaderStoreMemo $ view cutDbWebBlockHeaderStore db))
<*> pQueueSize (_webBlockPayloadStoreQueue $ view cutDbPayloadStore db)
<*> (int <$> TM.size (_webBlockPayloadStoreMemo $ view cutDbPayloadStore db))

-- Logging
loggCutId :: HasCutId c => LogFunction -> LogLevel -> c -> T.Text -> IO ()
loggCutId logFun l c msg = logFun @T.Text l $ "cut " <> cutIdToTextShort (_cutId c) <> ": " <> msg
Loading