From 68f10ed355748d0d9557ca6f18b95b57f584ea05 Mon Sep 17 00:00:00 2001 From: Bnchi Date: Sun, 20 Oct 2024 10:03:42 +0300 Subject: [PATCH] Fix clippy issues --- src/construct/content.rs | 2 +- src/construct/document.rs | 65 ++++++++++++++++++--------------------- src/parser.rs | 2 +- src/subtokenize.rs | 6 ++-- 4 files changed, 34 insertions(+), 41 deletions(-) diff --git a/src/construct/content.rs b/src/construct/content.rs index 2c5d6e54..5ee03541 100644 --- a/src/construct/content.rs +++ b/src/construct/content.rs @@ -182,7 +182,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result, message:: let result = subtokenize( &mut tokenizer.events, tokenizer.parse_state, - &Some(Content::Content), + Some(&Content::Content), )?; Ok(Some(result)) diff --git a/src/construct/document.rs b/src/construct/document.rs index c8e351a2..f27294e7 100644 --- a/src/construct/document.rs +++ b/src/construct/document.rs @@ -304,26 +304,24 @@ pub fn containers_after(tokenizer: &mut Tokenizer) -> State { != tokenizer.tokenize_state.document_container_stack.len(); child.define_skip(tokenizer.point.clone()); - match tokenizer.current { - // Note: EOL is part of data. - None => State::Retry(StateName::DocumentFlowEnd), - Some(_) => { - let current = tokenizer.events.len(); - let previous = tokenizer.tokenize_state.document_data_index; - if let Some(previous) = previous { - tokenizer.events[previous].link.as_mut().unwrap().next = Some(current); - } - tokenizer.tokenize_state.document_data_index = Some(current); - tokenizer.enter_link( - Name::Data, - Link { - previous, - next: None, - content: Content::Flow, - }, - ); - State::Retry(StateName::DocumentFlowInside) + if tokenizer.current.is_some() { + let current = tokenizer.events.len(); + let previous = tokenizer.tokenize_state.document_data_index; + if let Some(previous) = previous { + tokenizer.events[previous].link.as_mut().unwrap().next = Some(current); } + tokenizer.tokenize_state.document_data_index = Some(current); + tokenizer.enter_link( + Name::Data, + Link { + previous, + next: None, + content: Content::Flow, + }, + ); + State::Retry(StateName::DocumentFlowInside) + } else { + State::Retry(StateName::DocumentFlowEnd) } } @@ -450,23 +448,20 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State { debug_assert!(result.is_ok(), "did not expect error when exiting"); } - match tokenizer.current { - None => { - tokenizer.tokenize_state.document_continued = 0; - if let Err(message) = exit_containers(tokenizer, &Phase::Eof) { - return State::Error(message); - } - resolve(tokenizer); - State::Ok - } - Some(_) => { - tokenizer.tokenize_state.document_continued = 0; - tokenizer.tokenize_state.document_lazy_accepting_before = - document_lazy_continuation_current; - // Containers would only be interrupting if we’ve continued. - tokenizer.interrupt = false; - State::Retry(StateName::DocumentContainerExistingBefore) + if tokenizer.current.is_some() { + tokenizer.tokenize_state.document_continued = 0; + tokenizer.tokenize_state.document_lazy_accepting_before = + document_lazy_continuation_current; + // Containers would only be interrupting if we’ve continued. + tokenizer.interrupt = false; + State::Retry(StateName::DocumentContainerExistingBefore) + } else { + tokenizer.tokenize_state.document_continued = 0; + if let Err(message) = exit_containers(tokenizer, &Phase::Eof) { + return State::Error(message); } + resolve(tokenizer); + State::Ok } } diff --git a/src/parser.rs b/src/parser.rs index 9bcfa0ac..67f8f1f7 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -74,6 +74,6 @@ pub fn parse<'a>( return Ok((events, parse_state)); } - result = subtokenize(&mut events, &parse_state, &None)?; + result = subtokenize(&mut events, &parse_state, None)?; } } diff --git a/src/subtokenize.rs b/src/subtokenize.rs index 467ca463..2c8d0e24 100644 --- a/src/subtokenize.rs +++ b/src/subtokenize.rs @@ -78,7 +78,7 @@ pub fn link_to(events: &mut [Event], previous: usize, next: usize) { pub fn subtokenize( events: &mut Vec, parse_state: &ParseState, - filter: &Option, + filter: Option<&Content>, ) -> Result { let mut map = EditMap::new(); let mut index = 0; @@ -97,9 +97,7 @@ pub fn subtokenize( debug_assert_eq!(event.kind, Kind::Enter); // No need to enter linked events again. - if link.previous.is_none() - && (filter.is_none() || &link.content == filter.as_ref().unwrap()) - { + if link.previous.is_none() && (filter.is_none() || link.content == *filter.unwrap()) { // Index into `events` pointing to a chunk. let mut link_index = Some(index); // Subtokenizer.