Skip to content

Commit

Permalink
feat(tests): added parser/tokenizer tests to increase code coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
AurumTheEnd committed Mar 8, 2024
1 parent a639ff1 commit 1308044
Show file tree
Hide file tree
Showing 3 changed files with 153 additions and 1 deletion.
30 changes: 29 additions & 1 deletion src/parser/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ fn priority_2_terminal(data: &[FinalToken]) -> Result<Expression<String>, ParseT

#[cfg(test)]
mod tests {
use crate::expressions::Expression::Literal;
use crate::expressions::Expression::{Constant, Literal};
use crate::parser::{tokenize, ParseError};
use crate::traits::SemanticEq;

Expand Down Expand Up @@ -152,6 +152,23 @@ mod tests {
Ok(())
}

#[test]
fn test_parentheses_naryor_naryand_constants_ok() -> Result<(), ParseError> {
let input = tokenize("F | 0 | False | (T & 1 & True)")?;
let actual = parse_tokens(&input)?;
let expected = Expression::n_ary_or(vec![
Constant(false),
Constant(false),
Constant(false),
Expression::n_ary_and(vec![Constant(true), Constant(true), Constant(true)]),
]);

assert!(actual.semantic_eq(&expected));
assert_eq!(actual, expected);

Ok(())
}

#[test]
fn test_priorities_naryor_naryand_ok() -> Result<(), ParseError> {
let input = tokenize("a | b | a & b & !c")?;
Expand All @@ -172,6 +189,17 @@ mod tests {
Ok(())
}

#[test]
fn test_terminal_and_emptyside_nok() -> Result<(), ParseError> {
let input = tokenize("a & ")?;
let actual = parse_tokens(&input);

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), ParseTokensError::EmptySideOfOperator);

Ok(())
}

#[test]
fn test_terminal_and_nok() -> Result<(), ParseError> {
let input = tokenize("a & b c")?;
Expand Down
11 changes: 11 additions & 0 deletions src/parser/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -220,4 +220,15 @@ mod tests {

assert!(!pattern.is_match(&builder))
}

#[test]
#[should_panic]
fn test_from_panics() {
let input = "abcdefgh";

// test sanity
assert!(!IntermediateToken::all_token_patterns().contains(&input));

let _ = IntermediateToken::from(input);
}
}
113 changes: 113 additions & 0 deletions src/parser/tokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,9 @@ fn peek_until_n(n: usize, input: &mut MultiPeek<Chars>, buffer: &mut String) ->
mod tests {
use super::FinalToken::*;
use super::*;
use crate::parser::error::TokenizeError::{
MissingClosingCurlyBrace, UnexpectedClosingCurlyBrace, UnexpectedClosingParenthesis,
};

#[test]
fn test_peek_n() {
Expand Down Expand Up @@ -585,6 +588,116 @@ mod tests {
Ok(())
}

#[test]
fn test_nospace_parenthesesnotclosed_minimal_nok() -> Result<(), TokenizeError> {
let actual = tokenize("(");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), MissingClosingParenthesis);

Ok(())
}

#[test]
fn test_nospace_parenthesesnotclosed_nested_nok() -> Result<(), TokenizeError> {
let actual = tokenize("(((()()))");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), MissingClosingParenthesis);

Ok(())
}

#[test]
fn test_singlespace_parenthesesnotclosed_minimal_nok() -> Result<(), TokenizeError> {
let actual = tokenize(" ( ");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), MissingClosingParenthesis);

Ok(())
}

#[test]
fn test_singlespace_parenthesesnotclosed_nested_nok() -> Result<(), TokenizeError> {
let actual = tokenize(" ( ( ( ( ) ( ) ) )");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), MissingClosingParenthesis);

Ok(())
}

#[test]
fn test_nospace_parenthesesnotopened_minimal_nok() -> Result<(), TokenizeError> {
let actual = tokenize(")");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), UnexpectedClosingParenthesis);

Ok(())
}

#[test]
fn test_nospace_parenthesesnotopened_nested_nok() -> Result<(), TokenizeError> {
let actual = tokenize("(((()))))");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), UnexpectedClosingParenthesis);

Ok(())
}

#[test]
fn test_singlespace_parenthesesnotopened_minimal_nok() -> Result<(), TokenizeError> {
let actual = tokenize(" ) ");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), UnexpectedClosingParenthesis);

Ok(())
}

#[test]
fn test_singlespace_parenthesesnotopened_nested_nok() -> Result<(), TokenizeError> {
let actual = tokenize(" ( ( ( ( ) ) ) ) )");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), UnexpectedClosingParenthesis);

Ok(())
}

#[test]
fn test_bracenotopened_nok() -> Result<(), TokenizeError> {
let actual = tokenize("}");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), UnexpectedClosingCurlyBrace);

Ok(())
}

#[test]
fn test_bracenotclosed_empty_nok() -> Result<(), TokenizeError> {
let actual = tokenize("{");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), MissingClosingCurlyBrace);

Ok(())
}

#[test]
fn test_bracenotclosed_nonempty_nok() -> Result<(), TokenizeError> {
let actual = tokenize("{abc&&");

assert!(actual.is_err());
assert_eq!(actual.unwrap_err(), MissingClosingCurlyBrace);

Ok(())
}

#[test]
fn test_operator_boundary_andword_space_charvar_ok() -> Result<(), TokenizeError> {
let actual = tokenize("a and b")?;
Expand Down

0 comments on commit 1308044

Please sign in to comment.