Skip to content

Commit

Permalink
Removed some back and forth casting
Browse files Browse the repository at this point in the history
  • Loading branch information
LucaCappelletti94 committed Sep 10, 2024
1 parent 1e0ab87 commit 7080d2a
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 24 deletions.
26 changes: 13 additions & 13 deletions src/composite_hash/gaps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,14 +149,14 @@ impl<P: Precision, B: Bits> GapHash<P, B> {

#[inline]
/// Returns the number of bits necessary to encode the rank index.
const fn rank_index_bits() -> u8 {
P::EXPONENT
const fn rank_index_bits() -> usize {
P::EXPONENT as usize
+ match B::NUMBER_OF_BITS {
4 => 2,
5 => 3,
6 => 3,
_ => unreachable!(),
}
} as usize
}

#[inline]
Expand Down Expand Up @@ -210,7 +210,7 @@ impl<P: Precision, B: Bits> GapHash<P, B> {

for _ in 1..Self::rank_index_capacity() {
writer.write_bits(Self::rank_index_mask(), Self::rank_index_bits());
writer.write_bits(0, hash_bits);
writer.write_bits(0, usize::from(hash_bits));

debug_assert!(
writer.tell() <= Self::rank_index_total_size(usize::from(hash_bits)),
Expand Down Expand Up @@ -277,7 +277,7 @@ impl<P: Precision, B: Bits> GapHash<P, B> {
return;
}

let bucket_size = usize::from(hash_bits + Self::rank_index_bits());
let bucket_size = usize::from(hash_bits) + Self::rank_index_bits();
let hash_bucket_position = hash_bucket * bucket_size;

let hashes64 = unsafe {
Expand Down Expand Up @@ -444,7 +444,7 @@ impl<P: Precision, B: Bits> GapHash<P, B> {
let mut writer = BitWriter::new(hashes64);
writer.seek(hash_bucket_position);
writer.write_bits(bit_index as u64, Self::rank_index_bits());
writer.write_bits(hash, hash_bits);
writer.write_bits(hash, usize::from(hash_bits));

debug_assert!(
writer.tell() <= Self::rank_index_total_size(usize::from(hash_bits)),
Expand Down Expand Up @@ -589,8 +589,8 @@ impl<P: Precision, B: Bits> GapHash<P, B> {
}

#[inline]
fn uniform_coefficient(hash_bits: u8) -> u8 {
Self::b(hash_bits).1
fn uniform_coefficient(hash_bits: u8) -> usize {
usize::from(Self::b(hash_bits).1)
}

#[inline]
Expand Down Expand Up @@ -1040,7 +1040,7 @@ impl<P: Precision, B: Bits> CompositeHash for GapHash<P, B> {
"The writer tell must be 0 or rank index size if there is no previous value"
);

writer.write_bits(encoded_hash, hash_bits);
writer.write_bits(encoded_hash, usize::from(hash_bits));
}

// We check that practice matches theory:
Expand Down Expand Up @@ -1191,7 +1191,7 @@ impl<P: Precision, B: Bits> CompositeHash for GapHash<P, B> {
// We write the first hash explicitly, as otherwise it would be
// written in a very inefficient way.
let mut previous_hash = iter.next().unwrap();
writer.write_bits(previous_hash, target_hash_bits);
writer.write_bits(previous_hash, usize::from(target_hash_bits));

if Self::has_rank_index() {
Self::update_rank_index(hashes_8, target_hash_bits, 0, previous_hash);
Expand Down Expand Up @@ -1368,15 +1368,15 @@ struct BypassIter<'a> {
}

impl Iterator for BypassIter<'_> {
type Item = (u64, u8);
type Item = (u64, usize);

#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.bitstream.last_read_bit_position() >= self.bit_index {
return None;
}
let n_bits = core::cmp::min(64, self.bit_index - self.bitstream.last_read_bit_position());
Some((self.bitstream.read_bits(n_bits), n_bits as u8))
Some((self.bitstream.read_bits(n_bits), n_bits))
}
}

Expand Down Expand Up @@ -1455,7 +1455,7 @@ pub struct PrefixCodeIter<'a, P: Precision, B: Bits> {
previous_index: u64,
previous_hash_remainder: u64,
previous_uniform: u64,
uniform_coefficient: u8,
uniform_coefficient: usize,
#[cfg(test)]
iteration: usize,
_phantom: PhantomData<GapHash<P, B>>,
Expand Down
8 changes: 4 additions & 4 deletions src/composite_hash/gaps/bitreader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ impl<'a> BitReader<'a> {
}

#[inline]
pub fn read_rice(&mut self, b: u8) -> u64 {
(self.read_unary() << b) + self.read_bits(usize::from(b))
pub fn read_rice(&mut self, b: usize) -> u64 {
(self.read_unary() << b) + self.read_bits(b)
}
}

Expand All @@ -116,8 +116,8 @@ impl<'a> BitReader<'a> {
/// * `uniform` - The uniform value to encode.
/// * `b1` - The rice coefficient to use for the uniform value.
/// * `geometric` - The geometric value to encode.
pub fn len_rice(uniform_delta: u64, b1: u8, geometric_minus_one: u64) -> usize {
usize::try_from((uniform_delta >> b1) + geometric_minus_one).unwrap() + 2 + usize::from(b1)
pub fn len_rice(uniform_delta: u64, b1: usize, geometric_minus_one: u64) -> usize {
usize::try_from((uniform_delta >> b1) + geometric_minus_one).unwrap() + 2 + b1
}

#[cfg(test)]
Expand Down
14 changes: 7 additions & 7 deletions src/composite_hash/gaps/bitwriter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,25 +65,25 @@ impl<'a> BitWriter<'a> {
}

#[inline]
pub(super) fn write_bits(&mut self, value: u64, n_bits: u8) -> usize {
pub(super) fn write_bits(&mut self, value: u64, n_bits: usize) -> usize {
debug_assert!(n_bits <= 64);
debug_assert!(self.space_left_in_buffer > 0);

if usize::from(n_bits) < self.space_left_in_buffer {
if n_bits < self.space_left_in_buffer {
self.buffer <<= n_bits;
self.buffer |= value & !(u64::MAX << n_bits as u32);
self.space_left_in_buffer -= usize::from(n_bits);
return usize::from(n_bits);
self.space_left_in_buffer -= n_bits;
return n_bits;
}

self.buffer = self.buffer << (self.space_left_in_buffer - 1) << 1;
self.buffer |= value << (64 - n_bits) >> (64 - self.space_left_in_buffer);
self.data[self.word_idx] = self.buffer.to_be();
self.word_idx += 1;

self.space_left_in_buffer += 64 - usize::from(n_bits);
self.space_left_in_buffer += 64 - n_bits;
self.buffer = value;
usize::from(n_bits)
n_bits
}

#[inline]
Expand Down Expand Up @@ -135,7 +135,7 @@ impl<'a> BitWriter<'a> {
&mut self,
uniform_delta: u64,
geometric_minus_one: u64,
b1: u8,
b1: usize,
) -> usize {
self.write_unary(uniform_delta >> b1)
+ usize::from(self.write_bits(uniform_delta, b1))
Expand Down

0 comments on commit 7080d2a

Please sign in to comment.