Skip to content

Commit

Permalink
fix: cleanup logs, remove unnecessary wait
Browse files Browse the repository at this point in the history
  • Loading branch information
JamesPiechota committed Jan 23, 2025
1 parent ad1f645 commit 2905679
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 17 deletions.
7 changes: 4 additions & 3 deletions apps/arweave/src/ar_chunk_storage.erl
Original file line number Diff line number Diff line change
Expand Up @@ -597,14 +597,15 @@ do_prepare_replica_2_9(State) ->
%%
%% In practice we only expect pending writes to be a problem in tests. It can
%% hypothetically happen in production but is unlikely.
?LOG_DEBUG([{event, prepare_replica_2_9_slice_changed}, {store_id, StoreID},
{bucket_end_offset, BucketEndOffset},
{previous_slice_index, PreviousSliceIndex},
{slice_index, SliceIndex}]),
ar_entropy_storage:is_ready(StoreID);
_ ->
ok
end,

%% Only block here when we start a new slice index
true = ar_entropy_storage:is_ready(StoreID),

CheckRangeEnd =
case BucketEndOffset > PaddedRangeEnd of
true ->
Expand Down
1 change: 1 addition & 0 deletions apps/arweave/src/ar_device_lock.erl
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,7 @@ log_device_locks(State) ->
sync -> sync;
{prepare, StoreID} -> prepare;
{repack, StoreID} -> repack;
{prepare_and_repack, StoreID} -> prepare_and_repack;
_ -> paused
end,
?LOG_INFO([{event, device_lock_status}, {device, Device}, {store_id, StoreID}, {status, Status}])
Expand Down
14 changes: 0 additions & 14 deletions apps/arweave/src/ar_repack.erl
Original file line number Diff line number Diff line change
Expand Up @@ -216,15 +216,6 @@ send_chunk_for_repacking(AbsoluteOffset, ChunkMeta, Args) ->
PaddedOffset = ar_block:get_chunk_padded_offset(AbsoluteOffset),
{ChunkDataKey, TXRoot, DataRoot, TXPath,
RelativeOffset, ChunkSize} = ChunkMeta,
?LOG_DEBUG([{event, send_chunk_for_repacking},
{tags, [repack_in_place]},
{pid, self()},
{storage_module, StoreID},
{end_offset, AbsoluteOffset},
{padded_end_offset, PaddedOffset},
{bucket_start_offset, ar_chunk_storage:get_chunk_bucket_start(AbsoluteOffset)},
{chunk_size, ChunkSize},
{required_packing, ar_serialize:encode_packing(RequiredPacking, true)}]),
case ar_sync_record:is_recorded(PaddedOffset, ar_data_sync, StoreID) of
{true, unpacked_padded} ->
%% unpacked_padded is a special internal packing used
Expand Down Expand Up @@ -341,11 +332,6 @@ chunk_repacked(ChunkArgs, Args, StoreID, FileIndex, IsPrepared, RewardAddr) ->
StoreID, FileIndex, IsPrepared, RewardAddr),
case StoreResults of
{ok, FileIndex2, NewPacking} ->
?LOG_DEBUG([{event, ar_chunk_storage_packed}, {e, PaddedEndOffset},
{s, StartOffset}, {store_id, StoreID},
{chunk_size, ChunkSize},
{requested_packing, ar_serialize:encode_packing(Packing, true)},
{stored_packing, ar_serialize:encode_packing(NewPacking, true)}]),
ar_sync_record:add_async(repacked_chunk,
PaddedEndOffset, StartOffset,
NewPacking, ar_data_sync, StoreID),
Expand Down

0 comments on commit 2905679

Please sign in to comment.