From e49acf27e3d0d1639346769790d3cbe5dbe8d075 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Huvar?= Date: Thu, 7 Nov 2024 18:47:38 +0100 Subject: [PATCH] Extending HCTL syntax (#19) * Add option to use hybrid operator names in formulas. * Update CI. * Update syntax details in readme. --- .github/workflows/build.yml | 84 ++++----- Cargo.toml | 6 +- README.md | 4 + src/preprocessing/tokenizer.rs | 306 +++++++++++++++++++-------------- 4 files changed, 222 insertions(+), 178 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 676f572..c2791f0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,7 +1,8 @@ name: build + # This should ensure that the workflow won't run on `dev-*` branches, but will -# otherwise execute on any other branch and any pull request (including dev -# branches). +# otherwise execute on any other branch and any pull request (including PRs +# from dev branches). on: push: branches-ignore: @@ -16,21 +17,21 @@ env: # Make sure to update this from time to time. RUST_VERSION: "1.77.0" jobs: - # Check formatting + # Checks syntax formatting. fmt: name: Rustfmt runs-on: ubuntu-latest env: RUSTFLAGS: "-D warnings" steps: - - name: Checkout. - uses: actions/checkout@v3 - - name: Install Rust toolchain. - run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain ${{ env.RUST_VERSION }} - - name: Rust format check. - run: cargo fmt --all -- --check + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + toolchain: ${{ env.RUST_VERSION }} + components: rustfmt + - run: cargo fmt --all -- --check - # Run basic code validity check + # Run basic code validity check. check: needs: fmt name: Check @@ -38,30 +39,28 @@ jobs: env: RUSTFLAGS: "-D warnings" steps: - - name: Checkout. - uses: actions/checkout@v3 - - name: Install Rust toolchain. - run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain ${{ env.RUST_VERSION }} - - name: Rust code validity check. - run: cargo check + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + toolchain: ${{ env.RUST_VERSION }} + - run: cargo check --all-features - # Run all tests + # Run tests. test: needs: check - name: Test Suite + name: Test Suite (linux) runs-on: ubuntu-latest env: RUSTFLAGS: "-D warnings" steps: - - uses: actions/checkout@v3 - - name: Checkout. - uses: actions/checkout@v3 - - name: Install Rust toolchain. - run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain ${{ env.RUST_VERSION }} - - name: Run tests. - run: cargo test --all-features + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + toolchain: ${{ env.RUST_VERSION }} + components: rustfmt + - run: cargo test --all-features - # Check code style + # Checks code style. clippy: needs: check name: Clippy @@ -69,12 +68,12 @@ jobs: env: RUSTFLAGS: "-D warnings" steps: - - name: Checkout. - uses: actions/checkout@v3 - - name: Install Rust toolchain. - run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain ${{ env.RUST_VERSION }} - - name: Run clippy. - run: cargo clippy + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + toolchain: ${{ env.RUST_VERSION }} + components: clippy + - run: cargo clippy --all-features # Compute code coverage codecov: @@ -82,20 +81,21 @@ jobs: name: Code coverage runs-on: ubuntu-latest steps: - - name: Checkout. - uses: actions/checkout@v3 - - name: Install Rust toolchain. - run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain ${{ env.RUST_VERSION }} - - name: Setup cargo-tarpaulin. - run: cargo install cargo-tarpaulin - - name: Run tarpaulin to compute coverage. - run: cargo tarpaulin --verbose --lib --examples --all-features --out xml + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + toolchain: ${{ env.RUST_VERSION }} + # Install action using cargo-binstall, which is faster because we don't have to compile tarpaulin every time. + - uses: taiki-e/install-action@v2 + with: + tool: cargo-tarpaulin + - run: cargo tarpaulin --verbose --lib --examples --all-features --out xml - name: Upload to codecov.io - uses: codecov/codecov-action@v1.0.2 + uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} - name: Archive code coverage results - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: code-coverage-report path: cobertura.xml diff --git a/Cargo.toml b/Cargo.toml index d0957ad..aa5aa9e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "biodivine-hctl-model-checker" -version = "0.3.0" +version = "0.3.1" authors = ["Ondřej Huvar ", "Samuel Pastva "] edition = "2021" description = "Library for symbolic HCTL model checking on partially defined Boolean networks." @@ -25,8 +25,8 @@ name = "convert-aeon-to-bnet" path = "src/bin/convert_aeon_to_bnet.rs" [dependencies] -biodivine-lib-bdd = ">=0.5.7, <1.0.0" -biodivine-lib-param-bn = ">=0.5.1, <1.0.0" +biodivine-lib-bdd = ">=0.5.22, <1.0.0" +biodivine-lib-param-bn = ">=0.5.13, <1.0.0" clap = { version = "4.1.4", features = ["derive"] } rand = "0.8.5" termcolor = "1.1.2" diff --git a/README.md b/README.md index 48d259e..36ffecd 100644 --- a/README.md +++ b/README.md @@ -83,6 +83,10 @@ We use the following syntax: * forall x: `V{x}:` * parentheses: `(`, `)` +We also allow to specify the hybrid operators using their names (prefixed by backslash): `\bind`, `\jump`, `\exists`, `\forall`. +You can use this syntax to write a formula like `\bind {x}: AG EF {x}`. +Note that the default for serialization is the short format above. + The operator precedence is following (the lower, the stronger): * unary operators (negation + temporal): 1 * binary temporal operators: 2 diff --git a/src/preprocessing/tokenizer.rs b/src/preprocessing/tokenizer.rs index 800a486..29d29f6 100644 --- a/src/preprocessing/tokenizer.rs +++ b/src/preprocessing/tokenizer.rs @@ -128,33 +128,60 @@ fn try_tokenize_recursive( } } '!' => { - // we will collect the variable name via inside helper function + // collect the variable name via inside helper function let (name, domain) = collect_var_and_dom_from_operator(input_chars, '!', parse_wild_cards)?; output.push(HctlToken::Hybrid(HybridOp::Bind, name, domain)); } // "3" can be either exist quantifier or part of some proposition '3' if !is_valid_in_name_optional(input_chars.peek()) => { - // we will collect the variable name via inside helper function + // collect the variable name via inside helper function let (name, domain) = collect_var_and_dom_from_operator(input_chars, '3', parse_wild_cards)?; output.push(HctlToken::Hybrid(HybridOp::Exists, name, domain)); } // "V" can be either forall quantifier or part of some proposition 'V' if !is_valid_in_name_optional(input_chars.peek()) => { - // we will collect the variable name via inside helper function + // collect the variable name via inside helper function let (name, domain) = collect_var_and_dom_from_operator(input_chars, 'V', parse_wild_cards)?; output.push(HctlToken::Hybrid(HybridOp::Forall, name, domain)); } '@' => { - // we will collect the variable name via inside helper function + // collect the variable name via inside helper function let (name, domain) = collect_var_and_dom_from_operator(input_chars, '@', false)?; if domain.is_some() { return Err("Cannot specify domain after '@'.".to_string()); } output.push(HctlToken::Hybrid(HybridOp::Jump, name, None)); } + // long name for hybrid operators (\bind, \exists, \forall, \jump) + '\\' => { + // collect the name of the operator, and its variable/domain + let operator_name = collect_name(input_chars)?; + if &operator_name == "exists" { + let (name, domain) = + collect_var_and_dom_from_operator(input_chars, '3', parse_wild_cards)?; + output.push(HctlToken::Hybrid(HybridOp::Exists, name, domain)); + } else if operator_name == "forall" { + let (name, domain) = + collect_var_and_dom_from_operator(input_chars, 'V', parse_wild_cards)?; + output.push(HctlToken::Hybrid(HybridOp::Forall, name, domain)); + } else if operator_name == "bind" { + let (name, domain) = + collect_var_and_dom_from_operator(input_chars, '!', parse_wild_cards)?; + output.push(HctlToken::Hybrid(HybridOp::Bind, name, domain)); + } else if operator_name == "jump" { + let (name, domain) = + collect_var_and_dom_from_operator(input_chars, '@', false)?; + if domain.is_some() { + return Err("Cannot specify domain after '@' operator.".to_string()); + } + output.push(HctlToken::Hybrid(HybridOp::Jump, name, None)); + } else { + return Err(format!("Invalid hybrid operator `\\{operator_name}`.")); + } + } ')' => { return if !top_level { Ok(output) @@ -368,97 +395,103 @@ mod tests { /// Test both some important and meaningful formulae and formulae that include wide /// range of operators. fn tokenize_valid_formulae() { - // formula for attractors - let valid1 = "!{x}: AG EF {x}".to_string(); - let tokens1 = try_tokenize_formula(valid1).unwrap(); - assert_eq!( - tokens1, - vec![ - HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), None), - HctlToken::Unary(UnaryOp::AG), - HctlToken::Unary(UnaryOp::EF), - HctlToken::Atom(Atomic::Var("x".to_string())), - ] - ); - - // formula for cyclic attractors - let valid2 = "!{x}: (AX (~{x} & AF {x}))".to_string(); - let tokens2 = try_tokenize_formula(valid2).unwrap(); - assert_eq!( - tokens2, - vec![ - HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), None), - HctlToken::Tokens(vec![ - HctlToken::Unary(UnaryOp::AX), - HctlToken::Tokens(vec![ - HctlToken::Unary(UnaryOp::Not), - HctlToken::Atom(Atomic::Var("x".to_string())), - HctlToken::Binary(BinaryOp::And), - HctlToken::Unary(UnaryOp::AF), - HctlToken::Atom(Atomic::Var("x".to_string())), - ]), - ]), - ] - ); - - // formula for bi-stability - let valid3 = "!{x}: 3{y}: (@{x}: ~{y} & AX {x}) & (@{y}: AX {y})".to_string(); - let tokens3 = try_tokenize_formula(valid3).unwrap(); - assert_eq!( - tokens3, - vec![ - HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), None), - HctlToken::Hybrid(HybridOp::Exists, "y".to_string(), None), + // formula for attractors (both variants of hybrid operator syntax) + let formula = "!{x}: AG EF {x}".to_string(); + let tokens = try_tokenize_formula(formula).unwrap(); + let formula_v2 = "\\bind {x}: AG EF {x}".to_string(); + let tokens_v2 = try_tokenize_formula(formula_v2).unwrap(); + let expected_tokens = vec![ + HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), None), + HctlToken::Unary(UnaryOp::AG), + HctlToken::Unary(UnaryOp::EF), + HctlToken::Atom(Atomic::Var("x".to_string())), + ]; + assert_eq!(tokens, expected_tokens); + assert_eq!(tokens_v2, expected_tokens); + + // formula for cyclic attractors (both variants of hybrid operator syntax) + let formula = "!{x}: (AX (~{x} & AF {x}))".to_string(); + let tokens = try_tokenize_formula(formula).unwrap(); + let formula_v2 = "\\bind {x}: (AX (~{x} & AF {x}))".to_string(); + let tokens_v2 = try_tokenize_formula(formula_v2).unwrap(); + let expected_tokens = vec![ + HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), None), + HctlToken::Tokens(vec![ + HctlToken::Unary(UnaryOp::AX), HctlToken::Tokens(vec![ - HctlToken::Hybrid(HybridOp::Jump, "x".to_string(), None), HctlToken::Unary(UnaryOp::Not), - HctlToken::Atom(Atomic::Var("y".to_string())), + HctlToken::Atom(Atomic::Var("x".to_string())), HctlToken::Binary(BinaryOp::And), - HctlToken::Unary(UnaryOp::AX), + HctlToken::Unary(UnaryOp::AF), HctlToken::Atom(Atomic::Var("x".to_string())), ]), + ]), + ]; + assert_eq!(tokens, expected_tokens); + assert_eq!(tokens_v2, expected_tokens); + + // formula for bi-stability (both variants of hybrid operator syntax) + let formula = "!{x}: 3{y}: (@{x}: ~{y} & AX {x}) & (@{y}: AX {y})".to_string(); + let tokens = try_tokenize_formula(formula).unwrap(); + let formula_v2 = + "\\bind {x}: \\exists {y}: (\\jump {x}: ~{y} & AX {x}) & (\\jump {y}: AX {y})" + .to_string(); + let tokens_v2 = try_tokenize_formula(formula_v2).unwrap(); + let expected_tokens = vec![ + HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), None), + HctlToken::Hybrid(HybridOp::Exists, "y".to_string(), None), + HctlToken::Tokens(vec![ + HctlToken::Hybrid(HybridOp::Jump, "x".to_string(), None), + HctlToken::Unary(UnaryOp::Not), + HctlToken::Atom(Atomic::Var("y".to_string())), HctlToken::Binary(BinaryOp::And), - HctlToken::Tokens(vec![ - HctlToken::Hybrid(HybridOp::Jump, "y".to_string(), None), - HctlToken::Unary(UnaryOp::AX), - HctlToken::Atom(Atomic::Var("y".to_string())), - ]), - ] - ); + HctlToken::Unary(UnaryOp::AX), + HctlToken::Atom(Atomic::Var("x".to_string())), + ]), + HctlToken::Binary(BinaryOp::And), + HctlToken::Tokens(vec![ + HctlToken::Hybrid(HybridOp::Jump, "y".to_string(), None), + HctlToken::Unary(UnaryOp::AX), + HctlToken::Atom(Atomic::Var("y".to_string())), + ]), + ]; + assert_eq!(tokens, expected_tokens); + assert_eq!(tokens_v2, expected_tokens); // Formula with all kinds of binary operators, and terminals, including propositions // starting on A/E. Note that constants are treated as propositions at this point. - let valid4 = "(prop1 <=> PROP2 | false => 1) EU (0 AW A_prop ^ E_prop)".to_string(); - let tokens4 = try_tokenize_formula(valid4).unwrap(); - assert_eq!( - tokens4, - vec![ - HctlToken::Tokens(vec![ - HctlToken::Atom(Atomic::Prop("prop1".to_string())), - HctlToken::Binary(BinaryOp::Iff), - HctlToken::Atom(Atomic::Prop("PROP2".to_string())), - HctlToken::Binary(BinaryOp::Or), - HctlToken::Atom(Atomic::Prop("false".to_string())), - HctlToken::Binary(BinaryOp::Imp), - HctlToken::Atom(Atomic::Prop("1".to_string())), - ]), - HctlToken::Binary(BinaryOp::EU), - HctlToken::Tokens(vec![ - HctlToken::Atom(Atomic::Prop("0".to_string())), - HctlToken::Binary(BinaryOp::AW), - HctlToken::Atom(Atomic::Prop("A_prop".to_string())), - HctlToken::Binary(BinaryOp::Xor), - HctlToken::Atom(Atomic::Prop("E_prop".to_string())), - ]), - ] - ); + let formula = "(prop1 <=> PROP2 | false => 1) EU (0 AW A_prop ^ E_prop)".to_string(); + let tokens = try_tokenize_formula(formula).unwrap(); + let expected_tokens = vec![ + HctlToken::Tokens(vec![ + HctlToken::Atom(Atomic::Prop("prop1".to_string())), + HctlToken::Binary(BinaryOp::Iff), + HctlToken::Atom(Atomic::Prop("PROP2".to_string())), + HctlToken::Binary(BinaryOp::Or), + HctlToken::Atom(Atomic::Prop("false".to_string())), + HctlToken::Binary(BinaryOp::Imp), + HctlToken::Atom(Atomic::Prop("1".to_string())), + ]), + HctlToken::Binary(BinaryOp::EU), + HctlToken::Tokens(vec![ + HctlToken::Atom(Atomic::Prop("0".to_string())), + HctlToken::Binary(BinaryOp::AW), + HctlToken::Atom(Atomic::Prop("A_prop".to_string())), + HctlToken::Binary(BinaryOp::Xor), + HctlToken::Atom(Atomic::Prop("E_prop".to_string())), + ]), + ]; + assert_eq!(tokens, expected_tokens); } #[test] /// Test tokenization process on HCTL formula with several whitespaces. fn tokenize_with_whitespaces() { - let valid_formula = " ! {x} : AG EF {x} "; - assert!(try_tokenize_formula(valid_formula.to_string()).is_ok()) + let valid_formula = " 3 {x} : @ {x} : AG EF {x} "; + assert!(try_tokenize_formula(valid_formula.to_string()).is_ok()); + + let valid_formula = " \\exists {x} : \\jump {x} : AG EF {x} "; + assert!(try_tokenize_formula(valid_formula.to_string()).is_ok()); } #[test] @@ -467,6 +500,7 @@ mod tests { fn tokenize_invalid_formulae() { let invalid_formulae = vec![ "!{x}: AG EF {x<&}", + "\\bind {x}: A* EF {x}", "!{x}: A* EF {x}", "!{x}: AG E* {x}", "!{x}: AG EF {}", @@ -475,6 +509,7 @@ mod tests { "!EF p1", "{x}: AG EF {x}", "V{x} AG EF {x}", + "\\forall {x} AG EF {x}", "!{x}: AG EX {x} $", "!{x}: # AG EF {x}", "!{x}: AG* EF {x}", @@ -495,60 +530,61 @@ mod tests { /// Test tokenization process on several extended HCTL formulae containing /// `wild-card propositions` or `variable domains`. fn tokenize_extended_formulae() { - let formula1 = "p & %p%"; + let formula = "p & %p%"; // tokenizer for standard HCTL should fail, for extended succeed - assert!(try_tokenize_formula(formula1.to_string()).is_err()); - assert!(try_tokenize_extended_formula(formula1.to_string()).is_ok()); - let tokens = try_tokenize_extended_formula(formula1.to_string()).unwrap(); - assert_eq!( - tokens, - vec![ - HctlToken::Atom(Atomic::Prop("p".to_string())), - HctlToken::Binary(BinaryOp::And), - HctlToken::Atom(Atomic::WildCardProp("p".to_string())), - ] - ); - - let formula2 = "!{x}: (@{x}: {x} & %s%)"; - assert!(try_tokenize_formula(formula2.to_string()).is_err()); - assert!(try_tokenize_extended_formula(formula2.to_string()).is_ok()); - let tokens = try_tokenize_extended_formula(formula2.to_string()).unwrap(); - assert_eq!( - tokens, - vec![ - HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), None), - HctlToken::Tokens(vec![ - HctlToken::Hybrid(HybridOp::Jump, "x".to_string(), None), - HctlToken::Atom(Atomic::Var("x".to_string())), - HctlToken::Binary(BinaryOp::And), - HctlToken::Atom(Atomic::WildCardProp("s".to_string())), - ]), - ] - ); - - let formula3 = "!{x} in %dom_x%: {x}"; - assert!(try_tokenize_formula(formula3.to_string()).is_err()); - assert!(try_tokenize_extended_formula(formula3.to_string()).is_ok()); - let tokens = try_tokenize_extended_formula(formula3.to_string()).unwrap(); - assert_eq!( - tokens, - vec![ - HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), Some("dom_x".to_string())), + assert!(try_tokenize_formula(formula.to_string()).is_err()); + assert!(try_tokenize_extended_formula(formula.to_string()).is_ok()); + let tokens = try_tokenize_extended_formula(formula.to_string()).unwrap(); + let expected_tokens = vec![ + HctlToken::Atom(Atomic::Prop("p".to_string())), + HctlToken::Binary(BinaryOp::And), + HctlToken::Atom(Atomic::WildCardProp("p".to_string())), + ]; + assert_eq!(tokens, expected_tokens); + + let formula = "V{x}: (@{x}: {x} & %s%)"; + let formula_v2 = "\\forall {x}: (\\jump{x}: {x} & %s%)"; + assert!(try_tokenize_formula(formula.to_string()).is_err()); + assert!(try_tokenize_extended_formula(formula.to_string()).is_ok()); + let tokens = try_tokenize_extended_formula(formula.to_string()).unwrap(); + let tokens_v2 = try_tokenize_extended_formula(formula_v2.to_string()).unwrap(); + let expected_tokens = vec![ + HctlToken::Hybrid(HybridOp::Forall, "x".to_string(), None), + HctlToken::Tokens(vec![ + HctlToken::Hybrid(HybridOp::Jump, "x".to_string(), None), HctlToken::Atom(Atomic::Var("x".to_string())), - ] - ); - - let formula4 = "!{x} in %dom_x%: %wild_card%"; - assert!(try_tokenize_formula(formula4.to_string()).is_err()); - assert!(try_tokenize_extended_formula(formula4.to_string()).is_ok()); - let tokens = try_tokenize_extended_formula(formula4.to_string()).unwrap(); - assert_eq!( - tokens, - vec![ - HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), Some("dom_x".to_string())), - HctlToken::Atom(Atomic::WildCardProp("wild_card".to_string())), - ] - ); + HctlToken::Binary(BinaryOp::And), + HctlToken::Atom(Atomic::WildCardProp("s".to_string())), + ]), + ]; + assert_eq!(tokens, expected_tokens); + assert_eq!(tokens_v2, expected_tokens); + + let formula = "3{x} in %dom_x%: {x}"; + let formula_v2 = "\\exists {x} in %dom_x%: {x}"; + assert!(try_tokenize_formula(formula.to_string()).is_err()); + assert!(try_tokenize_extended_formula(formula.to_string()).is_ok()); + let tokens = try_tokenize_extended_formula(formula.to_string()).unwrap(); + let tokens_v2 = try_tokenize_extended_formula(formula_v2.to_string()).unwrap(); + let expected_tokens = vec![ + HctlToken::Hybrid(HybridOp::Exists, "x".to_string(), Some("dom_x".to_string())), + HctlToken::Atom(Atomic::Var("x".to_string())), + ]; + assert_eq!(tokens, expected_tokens); + assert_eq!(tokens_v2, expected_tokens); + + let formula = "!{x} in %dom_x%: %wild_card%"; + let formula_v2 = "\\bind {x} in %dom_x%: %wild_card%"; + assert!(try_tokenize_formula(formula.to_string()).is_err()); + assert!(try_tokenize_extended_formula(formula.to_string()).is_ok()); + let tokens = try_tokenize_extended_formula(formula.to_string()).unwrap(); + let tokens_v2 = try_tokenize_extended_formula(formula_v2.to_string()).unwrap(); + let expected_tokens = vec![ + HctlToken::Hybrid(HybridOp::Bind, "x".to_string(), Some("dom_x".to_string())), + HctlToken::Atom(Atomic::WildCardProp("wild_card".to_string())), + ]; + assert_eq!(tokens, expected_tokens); + assert_eq!(tokens_v2, expected_tokens); } #[test] @@ -564,12 +600,16 @@ mod tests { fn tokenize_invalid_extended() { let invalid_formulae = vec![ "!{x} in: AG EF {x}", + "\\bind {x} in: AG EF {x}", "!{x} i %d%: AG EF {x}", + "\\bind {x} i %d%: AG EF {x}", "!{x} in %%: AG EF {x}", + "\\bind {x} in %%: AG EF {x}", "!{x} %d%: AG EF {x}", "!{x} in abc: AG EF {x}", "!{} in %d%: AG EF {x}", "3{x}: @{x} in %s%: AG EF {x}", + "\\exists {x}: \\jump {x} in %s%: AG EF {x}", "%%", "%ddd %", "%ddd*%",