Skip to content

Commit

Permalink
finish half completed interner documentation (#13)
Browse files Browse the repository at this point in the history
* finish half completed interner documentation

* more fly-by and very minor improvements to the documentation.

* Final fix to new_for_token_source documentation

---------

Co-authored-by: Sebastian Thiel <[email protected]>
  • Loading branch information
loftyinclination and Byron authored Dec 9, 2024
1 parent 5c6534c commit fd5913f
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 9 deletions.
14 changes: 7 additions & 7 deletions src/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ impl<T: Eq + Hash> InternedInput<T> {
}
}

/// A hastable based interner that allows
/// An interner that allows for fast access of tokens produced by a [`TokenSource`].
#[derive(Default)]
pub struct Interner<T: Hash + Eq> {
tokens: Vec<T>,
Expand All @@ -103,13 +103,13 @@ pub struct Interner<T: Hash + Eq> {
}

impl<T: Hash + Eq> Interner<T> {
/// Create an Interner with an intial capacity calculated by calling
/// [`estimate_tokens`](crate::intern::TokenSource::estimate_tokens) methods of `before` and `after`
/// Create an Interner with an initial capacity calculated by summing the results of calling
/// [`estimate_tokens`](crate::intern::TokenSource::estimate_tokens) methods of `before` and `after`.
pub fn new_for_token_source<S: TokenSource<Token = T>>(before: &S, after: &S) -> Self {
Self::new(before.estimate_tokens() as usize + after.estimate_tokens() as usize)
}

/// Create an Interner with inital capacity `capacity`.
/// Create an Interner with initial capacity `capacity`.
pub fn new(capacity: usize) -> Interner<T> {
Interner {
tokens: Vec::with_capacity(capacity),
Expand All @@ -118,13 +118,13 @@ impl<T: Hash + Eq> Interner<T> {
}
}

/// Remove all interned tokens
/// Remove all interned tokens.
pub fn clear(&mut self) {
self.table.clear();
self.tokens.clear();
}

/// Intern `token` and return a the interned integer
/// Intern `token` and return a the interned integer.
pub fn intern(&mut self, token: T) -> Token {
let hash = self.hasher.hash_one(&token);
match self.table.entry(
Expand All @@ -147,7 +147,7 @@ impl<T: Hash + Eq> Interner<T> {
self.tokens.len() as u32
}

/// Erases `first_erased_token` and any tokens interned afterwards from the interner.
/// Erases `first_erased_token` and any tokens interned afterward from the interner.
pub fn erase_tokens_after(&mut self, first_erased_token: Token) {
assert!(first_erased_token.0 <= self.tokens.len() as u32);
let retained = first_erased_token.0 as usize;
Expand Down
4 changes: 2 additions & 2 deletions src/sources.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pub fn byte_lines(data: &[u8]) -> ByteLines<'_, false> {
ByteLines(data)
}

/// By default a line diff is produced for a string
/// By default, a line diff is produced for a string
impl<'a> TokenSource for &'a str {
type Token = &'a str;

Expand All @@ -55,7 +55,7 @@ impl<'a> TokenSource for &'a str {
}
}

/// By default a line diff is produced for a bytes
/// By default, a line diff is produced for a bytes
impl<'a> TokenSource for &'a [u8] {
type Token = Self;
type Tokenizer = ByteLines<'a, false>;
Expand Down

0 comments on commit fd5913f

Please sign in to comment.