diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 6fcc33fc95b..58a3e5704fb 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -a7f8d9670902dfa4856b8514ce5eb4ad031a44fc +b60a39d989b77702a89ebb24047e5b2419915dc3 diff --git a/.github/actions/download-nargo/action.yml b/.github/actions/download-nargo/action.yml index 49c8b1d0bd8..b727520978a 100644 --- a/.github/actions/download-nargo/action.yml +++ b/.github/actions/download-nargo/action.yml @@ -4,15 +4,15 @@ description: Downloads the nargo binary from an artifact and adds it to the path runs: using: composite steps: - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo - - name: Set nargo on PATH - shell: bash - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + - name: Set nargo on PATH + shell: bash + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH diff --git a/.github/scripts/merge-bench-reports.sh b/.github/scripts/merge-bench-reports.sh deleted file mode 100755 index 23a62874148..00000000000 --- a/.github/scripts/merge-bench-reports.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -set -eu - -echo "Merging reports" - -REPORT_NAME=$1 -NAME_PLURAL=""$REPORT_NAME"s" - -combined_reports="[]" - -# Iterate over each report and merge them -for report in ./reports/*; do - # The report is saved under ./$REPORT_NAME_{ matrix_report }/$REPORT_NAME_{ matrix_report }.json - FILE_PATH=$(echo $(ls $report)) - - # Extract the $NAME_PLURAL array from each report and merge it - combined_reports=$(jq '[."'"$NAME_PLURAL"'"[]] + '"$combined_reports" <<< "$(cat "$report/$FILE_PATH")") -done - -combined_reports=$(jq '[."'$NAME_PLURAL'"[]] + '"$combined_reports" <<< "$(cat ./$REPORT_NAME.json)") - -# Wrap the merged memory reports into a new object as to keep the $NAME_PLURAL key -final_report="{\"$NAME_PLURAL\": $combined_reports}" - -echo "$final_report" > $REPORT_NAME.json - -cat $REPORT_NAME.json \ No newline at end of file diff --git a/.github/scripts/playwright-install.sh b/.github/scripts/playwright-install.sh index 3e65219346d..d22b4c3d1a6 100755 --- a/.github/scripts/playwright-install.sh +++ b/.github/scripts/playwright-install.sh @@ -1,4 +1,4 @@ #!/bin/bash set -eu -npx -y playwright@1.50 install --with-deps +npx -y playwright@1.49 install --with-deps diff --git a/.github/workflows/reports.yml b/.github/workflows/reports.yml index 1ac775591a6..10169f4249c 100644 --- a/.github/workflows/reports.yml +++ b/.github/workflows/reports.yml @@ -292,7 +292,7 @@ jobs: sparse-checkout-cone-mode: false - name: Download nargo binary - uses: scripts/.github/actions/download-nargo + uses: ./scripts/.github/actions/download-nargo - name: Checkout uses: actions/checkout@v4 @@ -318,6 +318,18 @@ jobs: ./compilation_report.sh 1 ${{ matrix.project.num_runs }} env: FLAGS: ${{ matrix.project.flags }} + + - name: Check compilation time limit + run: | + TIME=$(jq '.[0].value' ./test-repo/${{ matrix.project.path }}/compilation_report.json) + TIME_LIMIT=80 + if awk 'BEGIN{exit !(ARGV[1]>ARGV[2])}' "$TIME" "$TIME_LIMIT"; then + # Don't bump this timeout without understanding why this has happened and confirming that you're not the cause. + echo "Failing due to compilation exceeding timeout..." + echo "Timeout: "$TIME_LIMIT"s" + echo "Compilation took: "$TIME"s". + exit 1 + fi - name: Generate execution report working-directory: ./test-repo/${{ matrix.project.path }} @@ -326,6 +338,19 @@ jobs: mv /home/runner/work/noir/noir/scripts/test_programs/execution_report.sh ./execution_report.sh mv /home/runner/work/noir/noir/scripts/test_programs/parse_time.sh ./parse_time.sh ./execution_report.sh 1 ${{ matrix.project.num_runs }} + + - name: Check execution time limit + if: ${{ !matrix.project.cannot_execute }} + run: | + TIME=$(jq '.[0].value' ./test-repo/${{ matrix.project.path }}/execution_report.json) + TIME_LIMIT=60 + if awk 'BEGIN{exit !(ARGV[1]>ARGV[2])}' "$TIME" "$TIME_LIMIT"; then + # Don't bump this timeout without understanding why this has happened and confirming that you're not the cause. + echo "Failing due to execution exceeding timeout..." + echo "Timeout: "$TIME_LIMIT"s" + echo "Execution took: "$TIME"s". + exit 1 + fi - name: Move compilation report id: compilation_report @@ -390,7 +415,7 @@ jobs: with: path: scripts sparse-checkout: | - ./.github/actions/download-nargo/action.yml + .github/actions/download-nargo/action.yml test_programs/memory_report.sh test_programs/parse_memory.sh sparse-checkout-cone-mode: false @@ -416,12 +441,37 @@ jobs: env: FLAGS: ${{ matrix.project.flags }} + - name: Check compilation memory limit + run: | + MEMORY=$(jq '.[0].value' ./test-repo/${{ matrix.project.path }}/compilation_memory_report.json) + MEMORY_LIMIT=6000 + if awk 'BEGIN{exit !(ARGV[1]>ARGV[2])}' "$MEMORY" "$MEMORY_LIMIT"; then + # Don't bump this limit without understanding why this has happened and confirming that you're not the cause. + echo "Failing due to compilation exceeding memory limit..." + echo "Limit: "$MEMORY_LIMIT"MB" + echo "Compilation took: "$MEMORY"MB". + exit 1 + fi + - name: Generate execution memory report working-directory: ./test-repo/${{ matrix.project.path }} if: ${{ !matrix.project.cannot_execute }} run: | ./memory_report.sh 1 1 + - name: Check execution memory limit + if: ${{ !matrix.project.cannot_execute }} + run: | + MEMORY=$(jq '.[0].value' ./test-repo/${{ matrix.project.path }}/memory_report.json) + MEMORY_LIMIT=1300 + if awk 'BEGIN{exit !(ARGV[1]>ARGV[2])}' "$MEMORY" "$MEMORY_LIMIT"; then + # Don't bump this limit without understanding why this has happened and confirming that you're not the cause. + echo "Failing due to execution exceeding memory limit..." + echo "Limit: "$MEMORY_LIMIT"MB" + echo "Execution took: "$MEMORY"MB". + exit 1 + fi + - name: Move compilation report id: compilation_mem_report shell: bash @@ -442,7 +492,6 @@ jobs: - name: Move execution report id: execution_mem_report if: ${{ !matrix.project.cannot_execute }} - shell: bash run: | PACKAGE_NAME=${{ matrix.project.path }} PACKAGE_NAME=$(basename $PACKAGE_NAME) @@ -477,21 +526,22 @@ jobs: uses: actions/download-artifact@v4 with: name: in_progress_compilation_report + path: ./reports - name: Download matrix compilation reports uses: actions/download-artifact@v4 with: pattern: compilation_report_* path: ./reports + merge-multiple: true - name: Merge compilation reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh compilation_report - jq ".compilation_reports | map({name: .artifact_name, value: (.time[:-1] | tonumber), unit: \"s\"}) " ./compilation_report.json > time_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee time_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Compilation Time" @@ -532,15 +582,15 @@ jobs: with: pattern: compilation_mem_report_* path: ./reports + merge-multiple: true - name: Merge memory reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh memory_report - jq ".memory_reports | map({name: .artifact_name, value: (.peak_memory | tonumber), unit: \"MB\"}) " ./memory_report.json > memory_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee memory_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Compilation Memory" @@ -581,17 +631,15 @@ jobs: with: pattern: execution_mem_report_* path: ./reports + merge-multiple: true - name: Merge memory reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh memory_report - # Rename the memory report as to not clash with the compilation memory report file name - cp memory_report.json execution_memory_report.json - jq ".memory_reports | map({name: .artifact_name, value: (.peak_memory | tonumber), unit: \"MB\"}) " ./execution_memory_report.json > memory_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee memory_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Execution Memory" @@ -633,15 +681,15 @@ jobs: with: pattern: execution_report_* path: ./reports + merge-multiple: true - name: Merge execution reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh execution_report - jq ".execution_reports | map({name: .artifact_name, value: (.time[:-1] | tonumber), unit: \"s\"}) " ./execution_report.json > time_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee time_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Execution Time" @@ -655,3 +703,28 @@ jobs: fail-on-alert: false alert-comment-cc-users: "@TomAFrench" max-items-in-chart: 50 + + # This is a job which depends on all test jobs and reports the overall status. + # This allows us to add/remove test jobs without having to update the required workflows. + reports-end: + name: End + runs-on: ubuntu-22.04 + # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. + if: ${{ always() }} + needs: + - upload_compilation_report + - upload_compilation_memory_report + - upload_execution_report + - upload_execution_memory_report + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index b92b2ae2030..3b0c1a79d92 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -527,19 +527,13 @@ jobs: nargo test --silence-warnings --skip-brillig-constraints-check --format json ${{ matrix.project.nargo_args }} | tee $output_file TIME=$(($SECONDS-$BEFORE)) - if [ "TIME" -gt "${{ matrix.project.timeout }}" ]; then - # Library testing time has regressed past the set timeout. - # Don't bump this timeout without understanding why this has happened and confirming that you're not the cause. - exit 1 - fi - NAME=${{ matrix.project.repo }}/${{ matrix.project.path }} # Replace any slashes with underscores NAME=${NAME//\//_} TEST_REPORT_NAME=test_report_$NAME echo "test_report_name=$TEST_REPORT_NAME" >> $GITHUB_OUTPUT - jq --null-input "{ test_reports: [{ name: \"$NAME\", value: (\"$TIME\" | tonumber), unit: \"s\" }]}" > $TEST_REPORT_NAME.json + jq --null-input "[{ name: \"$NAME\", value: (\"$TIME\" | tonumber), unit: \"s\" }]" > $TEST_REPORT_NAME.json if [ ! -s $output_file ]; then # The file is empty so we delete it to signal that `nargo test` failed before it could run any tests @@ -547,6 +541,17 @@ jobs: fi env: NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true + + - name: Check test time limit + run: | + TIME=$(jq '.[0].value' ./test-repo/${{ matrix.project.path }}/${{ steps.test_report.outputs.test_report_name }}.json) + if awk 'BEGIN{exit !(ARGV[1]>ARGV[2])}' "$TIME" "${{ matrix.project.timeout }}"; then + # Don't bump this timeout without understanding why this has happened and confirming that you're not the cause. + echo "Failing due to test suite exceeding timeout..." + echo "Timeout: ${{ matrix.project.timeout }}" + echo "Test suite took: $TIME". + exit 1 + fi - name: Compare test results working-directory: ./noir-repo @@ -613,16 +618,15 @@ jobs: with: pattern: test_report_* path: ./reports + merge-multiple: true - name: Merge test reports using jq run: | - jq --null-input "{ test_reports: [] }" > test_report.json - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh test_report - jq ".test_reports" < ./test_report.json > test_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee test_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Test Suite Duration" diff --git a/EXTERNAL_NOIR_LIBRARIES.yml b/EXTERNAL_NOIR_LIBRARIES.yml index 5e95d435f3e..ca811941b1e 100644 --- a/EXTERNAL_NOIR_LIBRARIES.yml +++ b/EXTERNAL_NOIR_LIBRARIES.yml @@ -23,8 +23,10 @@ libraries: timeout: 2 noir-bignum: repo: noir-lang/noir-bignum + timeout: 360 noir_bigcurve: repo: noir-lang/noir_bigcurve + timeout: 290 noir_base64: repo: noir-lang/noir_base64 timeout: 2 @@ -39,7 +41,7 @@ libraries: timeout: 2 noir_json_parser: repo: noir-lang/noir_json_parser - timeout: 10 + timeout: 12 sha256: repo: noir-lang/sha256 timeout: 3 @@ -50,7 +52,7 @@ libraries: noir_contracts: repo: AztecProtocol/aztec-packages path: noir-projects/noir-contracts - timeout: 60 + timeout: 80 blob: repo: AztecProtocol/aztec-packages path: noir-projects/noir-protocol-circuits/crates/blob diff --git a/compiler/integration-tests/web-test-runner.config.mjs b/compiler/integration-tests/web-test-runner.config.mjs index 1f4d5d7a9a5..6d7198212fb 100644 --- a/compiler/integration-tests/web-test-runner.config.mjs +++ b/compiler/integration-tests/web-test-runner.config.mjs @@ -26,10 +26,9 @@ export default { // playwrightLauncher({ product: "webkit" }), // playwrightLauncher({ product: "firefox" }), ], - middleware: [async function setGzHeader(ctx, next) { - if (ctx.url.endsWith('.gz')) { - ctx.set('Content-Encoding', 'gzip'); - ctx.res.removeHeader('Content-Length'); + middleware: [async (ctx, next) => { + if (ctx.url.endsWith('.wasm.gz')) { + ctx.url = ctx.url.replace('/', "/node_modules/@aztec/bb.js/dest/browser/"); } await next(); }], diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs index 6f5645485a2..5e7f250a6b0 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs @@ -92,6 +92,13 @@ impl BrilligGlobals { ); } + // NB: Temporary fix to override entry point analysis + let merged_set = + used_globals.values().flat_map(|set| set.iter().copied()).collect::>(); + for set in used_globals.values_mut() { + *set = merged_set.clone(); + } + Self { used_globals, brillig_entry_points, ..Default::default() } } @@ -303,10 +310,11 @@ mod tests { if func_id.to_u32() == 1 { assert_eq!( artifact.byte_code.len(), - 1, + 2, "Expected just a `Return`, but got more than a single opcode" ); - assert!(matches!(&artifact.byte_code[0], Opcode::Return)); + // TODO: Bring this back (https://github.com/noir-lang/noir/issues/7306) + // assert!(matches!(&artifact.byte_code[0], Opcode::Return)); } else if func_id.to_u32() == 2 { assert_eq!( artifact.byte_code.len(), @@ -420,17 +428,17 @@ mod tests { if func_id.to_u32() == 1 { assert_eq!( artifact.byte_code.len(), - 2, + 30, "Expected enough opcodes to initialize the globals" ); - let Opcode::Const { destination, bit_size, value } = &artifact.byte_code[0] else { - panic!("First opcode is expected to be `Const`"); - }; - assert_eq!(destination.unwrap_direct(), GlobalSpace::start()); - assert!(matches!(bit_size, BitSize::Field)); - assert_eq!(*value, FieldElement::from(1u128)); - - assert!(matches!(&artifact.byte_code[1], Opcode::Return)); + // TODO: Bring this back (https://github.com/noir-lang/noir/issues/7306) + // let Opcode::Const { destination, bit_size, value } = &artifact.byte_code[0] else { + // panic!("First opcode is expected to be `Const`"); + // }; + // assert_eq!(destination.unwrap_direct(), GlobalSpace::start()); + // assert!(matches!(bit_size, BitSize::Field)); + // assert_eq!(*value, FieldElement::from(1u128)); + // assert!(matches!(&artifact.byte_code[1], Opcode::Return)); } else if func_id.to_u32() == 2 || func_id.to_u32() == 3 { // We want the entry point which uses globals (f2) and the entry point which calls f2 function internally (f3 through f4) // to have the same globals initialized. diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index 37a63466119..97b200d657a 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -277,6 +277,10 @@ impl VariableLiveness { fn compute_loop_body(&self, edge: BackEdge) -> HashSet { let mut loop_blocks = HashSet::default(); + if edge.header == edge.start { + loop_blocks.insert(edge.header); + return loop_blocks; + } loop_blocks.insert(edge.header); loop_blocks.insert(edge.start); diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index c17fc2d0b7a..d916cd534a7 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -494,6 +494,11 @@ impl SsaBuilder { } fn print(mut self, msg: &str) -> Self { + // Always normalize if we are going to print at least one of the passes + if !matches!(self.ssa_logging, SsaLogging::None) { + self.ssa.normalize_ids(); + } + let print_ssa_pass = match &self.ssa_logging { SsaLogging::None => false, SsaLogging::All => true, @@ -505,7 +510,6 @@ impl SsaBuilder { } }; if print_ssa_pass { - self.ssa.normalize_ids(); println!("After {msg}:\n{}", self.ssa); } self diff --git a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 24ad67c3b69..4dbdf18ac72 100644 --- a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -663,10 +663,11 @@ impl Context { &mut self, function: &Function, ) -> BTreeSet { + let returns = function.returns(); let variable_parameters_and_return_values = function .parameters() .iter() - .chain(function.returns()) + .chain(returns) .filter(|id| function.dfg.get_numeric_constant(**id).is_none()) .map(|value_id| function.dfg.resolve(*value_id)); diff --git a/compiler/noirc_evaluator/src/ssa/ir/function.rs b/compiler/noirc_evaluator/src/ssa/ir/function.rs index e7748b5f13f..7db8e317c46 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function.rs @@ -165,17 +165,13 @@ impl Function { /// Returns the return types of this function. pub(crate) fn returns(&self) -> &[ValueId] { - let blocks = self.reachable_blocks(); - let mut function_return_values = None; - for block in blocks { + for block in self.reachable_blocks() { let terminator = self.dfg[block].terminator(); if let Some(TerminatorInstruction::Return { return_values, .. }) = terminator { - function_return_values = Some(return_values); - break; + return return_values; } } - function_return_values - .expect("Expected a return instruction, as function construction is finished") + &[] } /// Collects all the reachable blocks of this function. diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 0b5e37e817e..e5753aeba4e 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -470,14 +470,14 @@ impl<'function> PerFunctionContext<'function> { &mut self, mut returns: Vec<(BasicBlockId, Vec)>, ) -> Vec { - // Clippy complains if this were written as an if statement match returns.len() { + 0 => Vec::new(), 1 => { let (return_block, return_values) = returns.remove(0); self.context.builder.switch_to_block(return_block); return_values } - n if n > 1 => { + _ => { // If there is more than 1 return instruction we'll need to create a single block we // can return to and continue inserting in afterwards. let return_block = self.context.builder.insert_block(); @@ -490,7 +490,6 @@ impl<'function> PerFunctionContext<'function> { self.context.builder.switch_to_block(return_block); self.context.builder.block_parameters(return_block).to_vec() } - _ => unreachable!("Inlined function had no return values"), } } @@ -682,7 +681,7 @@ impl<'function> PerFunctionContext<'function> { block_id: BasicBlockId, block_queue: &mut VecDeque, ) -> Option<(BasicBlockId, Vec)> { - match self.source_function.dfg[block_id].unwrap_terminator() { + match &self.source_function.dfg[block_id].unwrap_terminator() { TerminatorInstruction::Jmp { destination, arguments, call_stack } => { let destination = self.translate_block(*destination, block_queue); let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index 9c9c0ded867..9c18cc0dd34 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -4,8 +4,8 @@ use std::fmt::Display; use thiserror::Error; use crate::ast::{ - Ident, ItemVisibility, Path, Pattern, Recoverable, Statement, StatementKind, - UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, Visibility, + Ident, ItemVisibility, Path, Pattern, Statement, StatementKind, UnresolvedTraitConstraint, + UnresolvedType, UnresolvedTypeData, Visibility, }; use crate::node_interner::{ ExprId, InternedExpressionKind, InternedStatementKind, QuotedTypeId, TypeId, @@ -26,6 +26,7 @@ pub enum ExpressionKind { Index(Box), Call(Box), MethodCall(Box), + Constrain(ConstrainExpression), Constructor(Box), MemberAccess(Box), Cast(Box), @@ -226,24 +227,6 @@ impl ExpressionKind { } } -impl Recoverable for ExpressionKind { - fn error(_: Span) -> Self { - ExpressionKind::Error - } -} - -impl Recoverable for Expression { - fn error(span: Span) -> Self { - Expression::new(ExpressionKind::Error, span) - } -} - -impl Recoverable for Option { - fn error(span: Span) -> Self { - Some(Expression::new(ExpressionKind::Error, span)) - } -} - #[derive(Debug, Eq, Clone)] pub struct Expression { pub kind: ExpressionKind, @@ -600,6 +583,55 @@ impl BlockExpression { } } +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct ConstrainExpression { + pub kind: ConstrainKind, + pub arguments: Vec, + pub span: Span, +} + +impl Display for ConstrainExpression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.kind { + ConstrainKind::Assert | ConstrainKind::AssertEq => write!( + f, + "{}({})", + self.kind, + vecmap(&self.arguments, |arg| arg.to_string()).join(", ") + ), + ConstrainKind::Constrain => { + write!(f, "constrain {}", &self.arguments[0]) + } + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum ConstrainKind { + Assert, + AssertEq, + Constrain, +} + +impl ConstrainKind { + pub fn required_arguments_count(&self) -> usize { + match self { + ConstrainKind::Assert | ConstrainKind::Constrain => 1, + ConstrainKind::AssertEq => 2, + } + } +} + +impl Display for ConstrainKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ConstrainKind::Assert => write!(f, "assert"), + ConstrainKind::AssertEq => write!(f, "assert_eq"), + ConstrainKind::Constrain => write!(f, "constrain"), + } + } +} + impl Display for Expression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.kind.fmt(f) @@ -616,6 +648,7 @@ impl Display for ExpressionKind { Index(index) => index.fmt(f), Call(call) => call.fmt(f), MethodCall(call) => call.fmt(f), + Constrain(constrain) => constrain.fmt(f), Cast(cast) => cast.fmt(f), Infix(infix) => infix.fmt(f), If(if_expr) => if_expr.fmt(f), diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index 33f504437c0..b6282da01d6 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -253,12 +253,6 @@ pub enum UnresolvedTypeExpression { AsTraitPath(Box), } -impl Recoverable for UnresolvedType { - fn error(span: Span) -> Self { - UnresolvedType { typ: UnresolvedTypeData::Error, span } - } -} - impl std::fmt::Display for GenericTypeArg { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 02715e8c2d3..88d1e97a96f 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -42,7 +42,6 @@ pub struct Statement { #[derive(Debug, PartialEq, Eq, Clone)] pub enum StatementKind { Let(LetStatement), - Constrain(ConstrainStatement), Expression(Expression), Assign(AssignStatement), For(ForLoopStatement), @@ -88,7 +87,6 @@ impl StatementKind { match self { StatementKind::Let(_) - | StatementKind::Constrain(_) | StatementKind::Assign(_) | StatementKind::Semi(_) | StatementKind::Break @@ -140,12 +138,6 @@ impl StatementKind { } } -impl Recoverable for StatementKind { - fn error(_: Span) -> Self { - StatementKind::Error - } -} - impl StatementKind { pub fn new_let( pattern: Pattern, @@ -284,25 +276,6 @@ impl Ident { } } -impl Recoverable for Ident { - fn error(span: Span) -> Self { - Ident(Spanned::from(span, ERROR_IDENT.to_owned())) - } -} - -impl Recoverable for Vec { - fn error(_: Span) -> Self { - vec![] - } -} - -/// Trait for recoverable nodes during parsing. -/// This is similar to Default but is expected -/// to return an Error node of the appropriate type. -pub trait Recoverable { - fn error(span: Span) -> Self; -} - #[derive(Debug, PartialEq, Eq, Clone)] pub struct ModuleDeclaration { pub visibility: ItemVisibility, @@ -420,9 +393,6 @@ pub struct TypePath { pub turbofish: Option, } -// Note: Path deliberately doesn't implement Recoverable. -// No matter which default value we could give in Recoverable::error, -// it would most likely cause further errors during name resolution #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub struct Path { pub segments: Vec, @@ -593,55 +563,6 @@ pub enum LValue { Interned(InternedExpressionKind, Span), } -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct ConstrainStatement { - pub kind: ConstrainKind, - pub arguments: Vec, - pub span: Span, -} - -impl Display for ConstrainStatement { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self.kind { - ConstrainKind::Assert | ConstrainKind::AssertEq => write!( - f, - "{}({})", - self.kind, - vecmap(&self.arguments, |arg| arg.to_string()).join(", ") - ), - ConstrainKind::Constrain => { - write!(f, "constrain {}", &self.arguments[0]) - } - } - } -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum ConstrainKind { - Assert, - AssertEq, - Constrain, -} - -impl ConstrainKind { - pub fn required_arguments_count(&self) -> usize { - match self { - ConstrainKind::Assert | ConstrainKind::Constrain => 1, - ConstrainKind::AssertEq => 2, - } - } -} - -impl Display for ConstrainKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ConstrainKind::Assert => write!(f, "assert"), - ConstrainKind::AssertEq => write!(f, "assert_eq"), - ConstrainKind::Constrain => write!(f, "constrain"), - } - } -} - #[derive(Debug, PartialEq, Eq, Clone)] pub enum Pattern { Identifier(Ident), @@ -707,12 +628,6 @@ impl Pattern { } } -impl Recoverable for Pattern { - fn error(span: Span) -> Self { - Pattern::Identifier(Ident::error(span)) - } -} - impl LValue { pub fn as_expression(&self) -> Expression { let kind = match self { @@ -969,7 +884,6 @@ impl Display for StatementKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { StatementKind::Let(let_statement) => let_statement.fmt(f), - StatementKind::Constrain(constrain) => constrain.fmt(f), StatementKind::Expression(expression) => expression.fmt(f), StatementKind::Assign(assign) => assign.fmt(f), StatementKind::For(for_loop) => for_loop.fmt(f), diff --git a/compiler/noirc_frontend/src/ast/visitor.rs b/compiler/noirc_frontend/src/ast/visitor.rs index a43bd0a5d3d..e40c534c3b9 100644 --- a/compiler/noirc_frontend/src/ast/visitor.rs +++ b/compiler/noirc_frontend/src/ast/visitor.rs @@ -4,7 +4,7 @@ use noirc_errors::Span; use crate::{ ast::{ ArrayLiteral, AsTraitPath, AssignStatement, BlockExpression, CallExpression, - CastExpression, ConstrainStatement, ConstructorExpression, Expression, ExpressionKind, + CastExpression, ConstrainExpression, ConstructorExpression, Expression, ExpressionKind, ForLoopStatement, ForRange, Ident, IfExpression, IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, MemberAccessExpression, MethodCallExpression, ModuleDeclaration, NoirFunction, NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Path, @@ -294,7 +294,7 @@ pub trait Visitor { true } - fn visit_constrain_statement(&mut self, _: &ConstrainStatement) -> bool { + fn visit_constrain_statement(&mut self, _: &ConstrainExpression) -> bool { true } @@ -855,6 +855,9 @@ impl Expression { ExpressionKind::MethodCall(method_call_expression) => { method_call_expression.accept(self.span, visitor); } + ExpressionKind::Constrain(constrain) => { + constrain.accept(visitor); + } ExpressionKind::Constructor(constructor_expression) => { constructor_expression.accept(self.span, visitor); } @@ -1148,9 +1151,6 @@ impl Statement { StatementKind::Let(let_statement) => { let_statement.accept(visitor); } - StatementKind::Constrain(constrain_statement) => { - constrain_statement.accept(visitor); - } StatementKind::Expression(expression) => { expression.accept(visitor); } @@ -1199,7 +1199,7 @@ impl LetStatement { } } -impl ConstrainStatement { +impl ConstrainExpression { pub fn accept(&self, visitor: &mut impl Visitor) { if visitor.visit_constrain_statement(self) { self.accept_children(visitor); diff --git a/compiler/noirc_frontend/src/elaborator/expressions.rs b/compiler/noirc_frontend/src/elaborator/expressions.rs index 4c3fb5a7616..8bee7241d43 100644 --- a/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -1,15 +1,15 @@ use acvm::{AcirField, FieldElement}; use iter_extended::vecmap; -use noirc_errors::{Location, Span}; +use noirc_errors::{Location, Span, Spanned}; use rustc_hash::FxHashSet as HashSet; use crate::{ ast::{ - ArrayLiteral, BlockExpression, CallExpression, CastExpression, ConstructorExpression, - Expression, ExpressionKind, Ident, IfExpression, IndexExpression, InfixExpression, - ItemVisibility, Lambda, Literal, MatchExpression, MemberAccessExpression, - MethodCallExpression, Path, PathSegment, PrefixExpression, StatementKind, UnaryOp, - UnresolvedTypeData, UnresolvedTypeExpression, + ArrayLiteral, BinaryOpKind, BlockExpression, CallExpression, CastExpression, + ConstrainExpression, ConstrainKind, ConstructorExpression, Expression, ExpressionKind, + Ident, IfExpression, IndexExpression, InfixExpression, ItemVisibility, Lambda, Literal, + MatchExpression, MemberAccessExpression, MethodCallExpression, Path, PathSegment, + PrefixExpression, StatementKind, UnaryOp, UnresolvedTypeData, UnresolvedTypeExpression, }, hir::{ comptime::{self, InterpreterError}, @@ -21,9 +21,9 @@ use crate::{ hir_def::{ expr::{ HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, - HirConstructorExpression, HirExpression, HirIdent, HirIfExpression, HirIndexExpression, - HirInfixExpression, HirLambda, HirLiteral, HirMemberAccess, HirMethodCallExpression, - HirPrefixExpression, + HirConstrainExpression, HirConstructorExpression, HirExpression, HirIdent, + HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirLiteral, + HirMemberAccess, HirMethodCallExpression, HirPrefixExpression, }, stmt::HirStatement, traits::{ResolvedTraitBound, TraitConstraint}, @@ -52,6 +52,7 @@ impl<'context> Elaborator<'context> { ExpressionKind::Index(index) => self.elaborate_index(*index), ExpressionKind::Call(call) => self.elaborate_call(*call, expr.span), ExpressionKind::MethodCall(call) => self.elaborate_method_call(*call, expr.span), + ExpressionKind::Constrain(constrain) => self.elaborate_constrain(constrain), ExpressionKind::Constructor(constructor) => self.elaborate_constructor(*constructor), ExpressionKind::MemberAccess(access) => { return self.elaborate_member_access(*access, expr.span) @@ -583,6 +584,61 @@ impl<'context> Elaborator<'context> { } } + pub(super) fn elaborate_constrain( + &mut self, + mut expr: ConstrainExpression, + ) -> (HirExpression, Type) { + let span = expr.span; + let min_args_count = expr.kind.required_arguments_count(); + let max_args_count = min_args_count + 1; + let actual_args_count = expr.arguments.len(); + + let (message, expr) = if !(min_args_count..=max_args_count).contains(&actual_args_count) { + self.push_err(TypeCheckError::AssertionParameterCountMismatch { + kind: expr.kind, + found: actual_args_count, + span, + }); + + // Given that we already produced an error, let's make this an `assert(true)` so + // we don't get further errors. + let message = None; + let kind = ExpressionKind::Literal(crate::ast::Literal::Bool(true)); + let expr = Expression { kind, span }; + (message, expr) + } else { + let message = + (actual_args_count != min_args_count).then(|| expr.arguments.pop().unwrap()); + let expr = match expr.kind { + ConstrainKind::Assert | ConstrainKind::Constrain => expr.arguments.pop().unwrap(), + ConstrainKind::AssertEq => { + let rhs = expr.arguments.pop().unwrap(); + let lhs = expr.arguments.pop().unwrap(); + let span = Span::from(lhs.span.start()..rhs.span.end()); + let operator = Spanned::from(span, BinaryOpKind::Equal); + let kind = + ExpressionKind::Infix(Box::new(InfixExpression { lhs, operator, rhs })); + Expression { kind, span } + } + }; + (message, expr) + }; + + let expr_span = expr.span; + let (expr_id, expr_type) = self.elaborate_expression(expr); + + // Must type check the assertion message expression so that we instantiate bindings + let msg = message.map(|assert_msg_expr| self.elaborate_expression(assert_msg_expr).0); + + self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { + expr_typ: expr_type.to_string(), + expected_typ: Type::Bool.to_string(), + expr_span, + }); + + (HirExpression::Constrain(HirConstrainExpression(expr_id, self.file, msg)), Type::Unit) + } + /// Elaborates an expression knowing that it has to match a given type. fn elaborate_expression_with_type( &mut self, @@ -914,6 +970,7 @@ impl<'context> Elaborator<'context> { target_type: Option<&Type>, ) -> (HirExpression, Type) { let expr_span = if_expr.condition.span; + let consequence_span = if_expr.consequence.span; let (condition, cond_type) = self.elaborate_expression(if_expr.condition); let (consequence, mut ret_type) = self.elaborate_expression_with_target_type(if_expr.consequence, target_type); @@ -924,29 +981,30 @@ impl<'context> Elaborator<'context> { expr_span, }); - let alternative = if_expr.alternative.map(|alternative| { - let expr_span = alternative.span; + let (alternative, else_type, error_span) = if let Some(alternative) = if_expr.alternative { let (else_, else_type) = self.elaborate_expression_with_target_type(alternative, target_type); + (Some(else_), else_type, expr_span) + } else { + (None, Type::Unit, consequence_span) + }; - self.unify(&ret_type, &else_type, || { - let err = TypeCheckError::TypeMismatch { - expected_typ: ret_type.to_string(), - expr_typ: else_type.to_string(), - expr_span, - }; + self.unify(&ret_type, &else_type, || { + let err = TypeCheckError::TypeMismatch { + expected_typ: ret_type.to_string(), + expr_typ: else_type.to_string(), + expr_span: error_span, + }; - let context = if ret_type == Type::Unit { - "Are you missing a semicolon at the end of your 'else' branch?" - } else if else_type == Type::Unit { - "Are you missing a semicolon at the end of the first block of this 'if'?" - } else { - "Expected the types of both if branches to be equal" - }; + let context = if ret_type == Type::Unit { + "Are you missing a semicolon at the end of your 'else' branch?" + } else if else_type == Type::Unit { + "Are you missing a semicolon at the end of the first block of this 'if'?" + } else { + "Expected the types of both if branches to be equal" + }; - err.add_context(context) - }); - else_ + err.add_context(context) }); if alternative.is_none() { diff --git a/compiler/noirc_frontend/src/elaborator/lints.rs b/compiler/noirc_frontend/src/elaborator/lints.rs index af80dfaa823..7910d8cebdb 100644 --- a/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/compiler/noirc_frontend/src/elaborator/lints.rs @@ -283,8 +283,7 @@ fn can_return_without_recursing(interner: &NodeInterner, func_id: FuncId, expr_i // Rust doesn't seem to check the for loop body (it's bounds might mean it's never called). HirStatement::For(e) => check(e.start_range) && check(e.end_range), HirStatement::Loop(e) => check(e), - HirStatement::Constrain(_) - | HirStatement::Comptime(_) + HirStatement::Comptime(_) | HirStatement::Break | HirStatement::Continue | HirStatement::Error => true, @@ -310,6 +309,7 @@ fn can_return_without_recursing(interner: &NodeInterner, func_id: FuncId, expr_i HirExpression::MemberAccess(e) => check(e.lhs), HirExpression::Call(e) => check(e.func) && e.arguments.iter().cloned().all(check), HirExpression::MethodCall(e) => check(e.object) && e.arguments.iter().cloned().all(check), + HirExpression::Constrain(e) => check(e.0) && e.2.map(check).unwrap_or(true), HirExpression::Cast(e) => check(e.lhs), HirExpression::If(e) => { check(e.condition) && (check(e.consequence) || e.alternative.map(check).unwrap_or(true)) diff --git a/compiler/noirc_frontend/src/elaborator/statements.rs b/compiler/noirc_frontend/src/elaborator/statements.rs index b17052d01ef..c401646332f 100644 --- a/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/compiler/noirc_frontend/src/elaborator/statements.rs @@ -1,9 +1,8 @@ -use noirc_errors::{Location, Span, Spanned}; +use noirc_errors::{Location, Span}; use crate::{ ast::{ - AssignStatement, BinaryOpKind, ConstrainKind, ConstrainStatement, Expression, - ExpressionKind, ForLoopStatement, ForRange, Ident, InfixExpression, ItemVisibility, LValue, + AssignStatement, Expression, ForLoopStatement, ForRange, Ident, ItemVisibility, LValue, LetStatement, Path, Statement, StatementKind, }, hir::{ @@ -15,10 +14,7 @@ use crate::{ }, hir_def::{ expr::HirIdent, - stmt::{ - HirAssignStatement, HirConstrainStatement, HirForStatement, HirLValue, HirLetStatement, - HirStatement, - }, + stmt::{HirAssignStatement, HirForStatement, HirLValue, HirLetStatement, HirStatement}, }, node_interner::{DefinitionId, DefinitionKind, GlobalId, StmtId}, DataType, Type, @@ -38,7 +34,6 @@ impl<'context> Elaborator<'context> { ) -> (HirStatement, Type) { match statement.kind { StatementKind::Let(let_stmt) => self.elaborate_local_let(let_stmt), - StatementKind::Constrain(constrain) => self.elaborate_constrain(constrain), StatementKind::Assign(assign) => self.elaborate_assign(assign), StatementKind::For(for_stmt) => self.elaborate_for(for_stmt), StatementKind::Loop(block, span) => self.elaborate_loop(block, span), @@ -149,61 +144,6 @@ impl<'context> Elaborator<'context> { (HirStatement::Let(let_), Type::Unit) } - pub(super) fn elaborate_constrain( - &mut self, - mut stmt: ConstrainStatement, - ) -> (HirStatement, Type) { - let span = stmt.span; - let min_args_count = stmt.kind.required_arguments_count(); - let max_args_count = min_args_count + 1; - let actual_args_count = stmt.arguments.len(); - - let (message, expr) = if !(min_args_count..=max_args_count).contains(&actual_args_count) { - self.push_err(TypeCheckError::AssertionParameterCountMismatch { - kind: stmt.kind, - found: actual_args_count, - span, - }); - - // Given that we already produced an error, let's make this an `assert(true)` so - // we don't get further errors. - let message = None; - let kind = ExpressionKind::Literal(crate::ast::Literal::Bool(true)); - let expr = Expression { kind, span }; - (message, expr) - } else { - let message = - (actual_args_count != min_args_count).then(|| stmt.arguments.pop().unwrap()); - let expr = match stmt.kind { - ConstrainKind::Assert | ConstrainKind::Constrain => stmt.arguments.pop().unwrap(), - ConstrainKind::AssertEq => { - let rhs = stmt.arguments.pop().unwrap(); - let lhs = stmt.arguments.pop().unwrap(); - let span = Span::from(lhs.span.start()..rhs.span.end()); - let operator = Spanned::from(span, BinaryOpKind::Equal); - let kind = - ExpressionKind::Infix(Box::new(InfixExpression { lhs, operator, rhs })); - Expression { kind, span } - } - }; - (message, expr) - }; - - let expr_span = expr.span; - let (expr_id, expr_type) = self.elaborate_expression(expr); - - // Must type check the assertion message expression so that we instantiate bindings - let msg = message.map(|assert_msg_expr| self.elaborate_expression(assert_msg_expr).0); - - self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { - expr_typ: expr_type.to_string(), - expected_typ: Type::Bool.to_string(), - expr_span, - }); - - (HirStatement::Constrain(HirConstrainStatement(expr_id, self.file, msg)), Type::Unit) - } - pub(super) fn elaborate_assign(&mut self, assign: AssignStatement) -> (HirStatement, Type) { let expr_span = assign.expression.span; let (expression, expr_type) = self.elaborate_expression(assign.expression); diff --git a/compiler/noirc_frontend/src/hir/comptime/display.rs b/compiler/noirc_frontend/src/hir/comptime/display.rs index 1be4bbe61ab..a6927ab3fe8 100644 --- a/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -6,7 +6,7 @@ use noirc_errors::Span; use crate::{ ast::{ ArrayLiteral, AsTraitPath, AssignStatement, BlockExpression, CallExpression, - CastExpression, ConstrainStatement, ConstructorExpression, Expression, ExpressionKind, + CastExpression, ConstrainExpression, ConstructorExpression, Expression, ExpressionKind, ForBounds, ForLoopStatement, ForRange, GenericTypeArgs, IfExpression, IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, MatchExpression, MemberAccessExpression, MethodCallExpression, Pattern, PrefixExpression, Statement, @@ -573,6 +573,12 @@ fn remove_interned_in_expression_kind( ..*call })) } + ExpressionKind::Constrain(constrain) => ExpressionKind::Constrain(ConstrainExpression { + arguments: vecmap(constrain.arguments, |expr| { + remove_interned_in_expression(interner, expr) + }), + ..constrain + }), ExpressionKind::Constructor(constructor) => { ExpressionKind::Constructor(Box::new(ConstructorExpression { fields: vecmap(constructor.fields, |(name, expr)| { @@ -728,12 +734,6 @@ fn remove_interned_in_statement_kind( r#type: remove_interned_in_unresolved_type(interner, let_statement.r#type), ..let_statement }), - StatementKind::Constrain(constrain) => StatementKind::Constrain(ConstrainStatement { - arguments: vecmap(constrain.arguments, |expr| { - remove_interned_in_expression(interner, expr) - }), - ..constrain - }), StatementKind::Expression(expr) => { StatementKind::Expression(remove_interned_in_expression(interner, expr)) } diff --git a/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index d46484d05fa..3ba7ae42950 100644 --- a/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -8,7 +8,7 @@ use crate::ast::{ MemberAccessExpression, MethodCallExpression, Path, PathKind, PathSegment, Pattern, PrefixExpression, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, }; -use crate::ast::{ConstrainStatement, Expression, Statement, StatementKind}; +use crate::ast::{ConstrainExpression, Expression, Statement, StatementKind}; use crate::hir_def::expr::{ HirArrayLiteral, HirBlockExpression, HirExpression, HirIdent, HirLiteral, }; @@ -32,20 +32,6 @@ impl HirStatement { let expression = let_stmt.expression.to_display_ast(interner); StatementKind::new_let(pattern, r#type, expression, let_stmt.attributes.clone()) } - HirStatement::Constrain(constrain) => { - let expr = constrain.0.to_display_ast(interner); - let mut arguments = vec![expr]; - if let Some(message) = constrain.2 { - arguments.push(message.to_display_ast(interner)); - } - - // TODO: Find difference in usage between Assert & AssertEq - StatementKind::Constrain(ConstrainStatement { - kind: ConstrainKind::Assert, - arguments, - span, - }) - } HirStatement::Assign(assign) => StatementKind::Assign(AssignStatement { lvalue: assign.lvalue.to_display_ast(interner), expression: assign.expression.to_display_ast(interner), @@ -180,6 +166,20 @@ impl HirExpression { is_macro_call: false, })) } + HirExpression::Constrain(constrain) => { + let expr = constrain.0.to_display_ast(interner); + let mut arguments = vec![expr]; + if let Some(message) = constrain.2 { + arguments.push(message.to_display_ast(interner)); + } + + // TODO: Find difference in usage between Assert & AssertEq + ExpressionKind::Constrain(ConstrainExpression { + kind: ConstrainKind::Assert, + arguments, + span, + }) + } HirExpression::Cast(cast) => { let lhs = cast.lhs.to_display_ast(interner); let r#type = cast.r#type.to_display_ast(); diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 33f8e43863e..5f001192dac 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -14,7 +14,7 @@ use crate::elaborator::Elaborator; use crate::graph::CrateId; use crate::hir::def_map::ModuleId; use crate::hir::type_check::TypeCheckError; -use crate::hir_def::expr::{HirEnumConstructorExpression, ImplKind}; +use crate::hir_def::expr::{HirConstrainExpression, HirEnumConstructorExpression, ImplKind}; use crate::hir_def::function::FunctionBody; use crate::monomorphization::{ perform_impl_bindings, perform_instantiation_bindings, resolve_trait_method, @@ -32,8 +32,8 @@ use crate::{ HirPrefixExpression, }, stmt::{ - HirAssignStatement, HirConstrainStatement, HirForStatement, HirLValue, HirLetStatement, - HirPattern, HirStatement, + HirAssignStatement, HirForStatement, HirLValue, HirLetStatement, HirPattern, + HirStatement, }, types::Kind, }, @@ -532,6 +532,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { HirExpression::MemberAccess(access) => self.evaluate_access(access, id), HirExpression::Call(call) => self.evaluate_call(call, id), HirExpression::MethodCall(call) => self.evaluate_method_call(call, id), + HirExpression::Constrain(constrain) => self.evaluate_constrain(constrain), HirExpression::Cast(cast) => self.evaluate_cast(&cast, id), HirExpression::If(if_) => self.evaluate_if(if_, id), HirExpression::Tuple(tuple) => self.evaluate_tuple(tuple), @@ -1560,7 +1561,6 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { pub fn evaluate_statement(&mut self, statement: StmtId) -> IResult { match self.elaborator.interner.statement(&statement) { HirStatement::Let(let_) => self.evaluate_let(let_), - HirStatement::Constrain(constrain) => self.evaluate_constrain(constrain), HirStatement::Assign(assign) => self.evaluate_assign(assign), HirStatement::For(for_) => self.evaluate_for(for_), HirStatement::Loop(expression) => self.evaluate_loop(expression), @@ -1586,7 +1586,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Ok(Value::Unit) } - fn evaluate_constrain(&mut self, constrain: HirConstrainStatement) -> IResult { + fn evaluate_constrain(&mut self, constrain: HirConstrainExpression) -> IResult { match self.evaluate(constrain.0)? { Value::Bool(true) => Ok(Value::Unit), Value::Bool(false) => { diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 9abb1b190d5..6655c8977e2 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -1540,7 +1540,7 @@ fn expr_as_assert( location: Location, ) -> IResult { expr_as(interner, arguments, return_type.clone(), location, |expr| { - if let ExprValue::Statement(StatementKind::Constrain(mut constrain)) = expr { + if let ExprValue::Expression(ExpressionKind::Constrain(mut constrain)) = expr { if constrain.kind == ConstrainKind::Assert && !constrain.arguments.is_empty() && constrain.arguments.len() <= 2 @@ -1580,7 +1580,7 @@ fn expr_as_assert_eq( location: Location, ) -> IResult { expr_as(interner, arguments, return_type.clone(), location, |expr| { - if let ExprValue::Statement(StatementKind::Constrain(mut constrain)) = expr { + if let ExprValue::Expression(ExpressionKind::Constrain(mut constrain)) = expr { if constrain.kind == ConstrainKind::AssertEq && constrain.arguments.len() >= 2 && constrain.arguments.len() <= 3 diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index 00b94411fcd..543c13fac9c 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -34,6 +34,7 @@ pub enum HirExpression { MemberAccess(HirMemberAccess), Call(HirCallExpression), MethodCall(HirMethodCallExpression), + Constrain(HirConstrainExpression), Cast(HirCastExpression), If(HirIfExpression), Tuple(Vec), @@ -200,6 +201,13 @@ pub struct HirMethodCallExpression { pub location: Location, } +/// Corresponds to `assert` and `assert_eq` in the source code. +/// This node also contains the FileId of the file the constrain +/// originates from. This is used later in the SSA pass to issue +/// an error if a constrain is found to be always false. +#[derive(Debug, Clone)] +pub struct HirConstrainExpression(pub ExprId, pub FileId, pub Option); + #[derive(Debug, Clone)] pub enum HirMethodReference { /// A method can be defined in a regular `impl` block, in which case diff --git a/compiler/noirc_frontend/src/hir_def/stmt.rs b/compiler/noirc_frontend/src/hir_def/stmt.rs index 8a580e735b1..96ef7161341 100644 --- a/compiler/noirc_frontend/src/hir_def/stmt.rs +++ b/compiler/noirc_frontend/src/hir_def/stmt.rs @@ -3,7 +3,6 @@ use crate::ast::Ident; use crate::node_interner::{ExprId, StmtId}; use crate::token::SecondaryAttribute; use crate::Type; -use fm::FileId; use noirc_errors::{Location, Span}; /// A HirStatement is the result of performing name resolution on @@ -13,7 +12,6 @@ use noirc_errors::{Location, Span}; #[derive(Debug, Clone)] pub enum HirStatement { Let(HirLetStatement), - Constrain(HirConstrainStatement), Assign(HirAssignStatement), For(HirForStatement), Loop(ExprId), @@ -74,13 +72,6 @@ pub struct HirAssignStatement { pub expression: ExprId, } -/// Corresponds to `constrain expr;` in the source code. -/// This node also contains the FileId of the file the constrain -/// originates from. This is used later in the SSA pass to issue -/// an error if a constrain is found to be always false. -#[derive(Debug, Clone)] -pub struct HirConstrainStatement(pub ExprId, pub FileId, pub Option); - #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum HirPattern { Identifier(HirIdent), diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 7ad703523d4..5d81913f4ec 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -557,6 +557,22 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Call(call) => self.function_call(call, expr)?, + HirExpression::Constrain(constrain) => { + let expr = self.expr(constrain.0)?; + let location = self.interner.expr_location(&constrain.0); + let assert_message = constrain + .2 + .map(|assert_msg_expr| { + self.expr(assert_msg_expr).map(|expr| { + (expr, self.interner.id_type(assert_msg_expr).follow_bindings()) + }) + }) + .transpose()? + .map(Box::new); + + ast::Expression::Constrain(Box::new(expr), location, assert_message) + } + HirExpression::Cast(cast) => { let location = self.interner.expr_location(&expr); let typ = Self::convert_type(&cast.r#type, location)?; @@ -658,21 +674,6 @@ impl<'interner> Monomorphizer<'interner> { fn statement(&mut self, id: StmtId) -> Result { match self.interner.statement(&id) { HirStatement::Let(let_statement) => self.let_statement(let_statement), - HirStatement::Constrain(constrain) => { - let expr = self.expr(constrain.0)?; - let location = self.interner.expr_location(&constrain.0); - let assert_message = constrain - .2 - .map(|assert_msg_expr| { - self.expr(assert_msg_expr).map(|expr| { - (expr, self.interner.id_type(assert_msg_expr).follow_bindings()) - }) - }) - .transpose()? - .map(Box::new); - - Ok(ast::Expression::Constrain(Box::new(expr), location, assert_message)) - } HirStatement::Assign(assign) => self.assign(assign), HirStatement::For(for_loop) => { self.is_range_loop = true; diff --git a/compiler/noirc_frontend/src/parser/parser/expression.rs b/compiler/noirc_frontend/src/parser/parser/expression.rs index eff309154e3..319eefc190a 100644 --- a/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -3,9 +3,10 @@ use noirc_errors::Span; use crate::{ ast::{ - ArrayLiteral, BlockExpression, CallExpression, CastExpression, ConstructorExpression, - Expression, ExpressionKind, Ident, IfExpression, IndexExpression, Literal, MatchExpression, - MemberAccessExpression, MethodCallExpression, Statement, TypePath, UnaryOp, UnresolvedType, + ArrayLiteral, BlockExpression, CallExpression, CastExpression, ConstrainExpression, + ConstrainKind, ConstructorExpression, Expression, ExpressionKind, Ident, IfExpression, + IndexExpression, Literal, MatchExpression, MemberAccessExpression, MethodCallExpression, + Statement, TypePath, UnaryOp, UnresolvedType, }, parser::{labels::ParsingRuleLabel, parser::parse_many::separated_by_comma, ParserErrorReason}, token::{Keyword, Token, TokenKind}, @@ -653,6 +654,7 @@ impl<'a> Parser<'a> { /// | ArrayExpression /// | SliceExpression /// | BlockExpression + /// | ConstrainExpression /// /// QuoteExpression = 'quote' '{' token* '}' /// @@ -696,6 +698,10 @@ impl<'a> Parser<'a> { return Some(ExpressionKind::Block(kind)); } + if let Some(constrain) = self.parse_constrain_expression() { + return Some(ExpressionKind::Constrain(constrain)); + } + None } @@ -800,6 +806,49 @@ impl<'a> Parser<'a> { } } + /// ConstrainExpression + /// = 'constrain' Expression + /// | 'assert' Arguments + /// | 'assert_eq' Arguments + pub(super) fn parse_constrain_expression(&mut self) -> Option { + let start_span = self.current_token_span; + let kind = self.parse_constrain_kind()?; + + Some(match kind { + ConstrainKind::Assert | ConstrainKind::AssertEq => { + let arguments = self.parse_arguments(); + if arguments.is_none() { + self.expected_token(Token::LeftParen); + } + let arguments = arguments.unwrap_or_default(); + + ConstrainExpression { kind, arguments, span: self.span_since(start_span) } + } + ConstrainKind::Constrain => { + self.push_error(ParserErrorReason::ConstrainDeprecated, self.previous_token_span); + + let expression = self.parse_expression_or_error(); + ConstrainExpression { + kind, + arguments: vec![expression], + span: self.span_since(start_span), + } + } + }) + } + + fn parse_constrain_kind(&mut self) -> Option { + if self.eat_keyword(Keyword::Assert) { + Some(ConstrainKind::Assert) + } else if self.eat_keyword(Keyword::AssertEq) { + Some(ConstrainKind::AssertEq) + } else if self.eat_keyword(Keyword::Constrain) { + Some(ConstrainKind::Constrain) + } else { + None + } + } + /// Block = '{' Statement* '}' pub(super) fn parse_block(&mut self) -> Option { if !self.eat_left_brace() { @@ -849,8 +898,8 @@ mod tests { use crate::{ ast::{ - ArrayLiteral, BinaryOpKind, Expression, ExpressionKind, Literal, StatementKind, - UnaryOp, UnresolvedTypeData, + ArrayLiteral, BinaryOpKind, ConstrainKind, Expression, ExpressionKind, Literal, + StatementKind, UnaryOp, UnresolvedTypeData, }, parser::{ parser::tests::{ @@ -1749,4 +1798,45 @@ mod tests { }; assert_eq!(expr.kind.to_string(), "((1 + 2))"); } + + #[test] + fn parses_assert() { + let src = "assert(true, \"good\")"; + let expression = parse_expression_no_errors(src); + let ExpressionKind::Constrain(constrain) = expression.kind else { + panic!("Expected constrain expression"); + }; + assert_eq!(constrain.kind, ConstrainKind::Assert); + assert_eq!(constrain.arguments.len(), 2); + } + + #[test] + fn parses_assert_eq() { + let src = "assert_eq(1, 2, \"bad\")"; + let expression = parse_expression_no_errors(src); + let ExpressionKind::Constrain(constrain) = expression.kind else { + panic!("Expected constrain expression"); + }; + assert_eq!(constrain.kind, ConstrainKind::AssertEq); + assert_eq!(constrain.arguments.len(), 3); + } + + #[test] + fn parses_constrain() { + let src = " + constrain 1 + ^^^^^^^^^ + "; + let (src, span) = get_source_with_error_span(src); + let mut parser = Parser::for_str(&src); + let expression = parser.parse_expression_or_error(); + let ExpressionKind::Constrain(constrain) = expression.kind else { + panic!("Expected constrain expression"); + }; + assert_eq!(constrain.kind, ConstrainKind::Constrain); + assert_eq!(constrain.arguments.len(), 1); + + let reason = get_single_error_reason(&parser.errors, span); + assert!(matches!(reason, ParserErrorReason::ConstrainDeprecated)); + } } diff --git a/compiler/noirc_frontend/src/parser/parser/item.rs b/compiler/noirc_frontend/src/parser/parser/item.rs index d928d8e82d3..43641935565 100644 --- a/compiler/noirc_frontend/src/parser/parser/item.rs +++ b/compiler/noirc_frontend/src/parser/parser/item.rs @@ -1,7 +1,7 @@ use iter_extended::vecmap; use crate::{ - parser::{labels::ParsingRuleLabel, Item, ItemKind}, + parser::{labels::ParsingRuleLabel, Item, ItemKind, ParserErrorReason}, token::{Keyword, Token}, }; @@ -94,6 +94,10 @@ impl<'a> Parser<'a> { let kinds = self.parse_item_kind(); let span = self.span_since(start_span); + if kinds.is_empty() && !doc_comments.is_empty() { + self.push_error(ParserErrorReason::DocCommentDoesNotDocumentAnything, start_span); + } + vecmap(kinds, |kind| Item { kind, span, doc_comments: doc_comments.clone() }) } @@ -260,4 +264,18 @@ mod tests { let error = get_single_error(&errors, span); assert_eq!(error.to_string(), "Expected a '}' but found end of input"); } + + #[test] + fn errors_on_trailing_doc_comment() { + let src = " + fn foo() {} + /// doc comment + ^^^^^^^^^^^^^^^ + "; + let (src, span) = get_source_with_error_span(src); + let (module, errors) = parse_program(&src); + assert_eq!(module.items.len(), 1); + let error = get_single_error(&errors, span); + assert!(error.to_string().contains("Documentation comment does not document anything")); + } } diff --git a/compiler/noirc_frontend/src/parser/parser/statement.rs b/compiler/noirc_frontend/src/parser/parser/statement.rs index 37013e91528..f9cc63a364e 100644 --- a/compiler/noirc_frontend/src/parser/parser/statement.rs +++ b/compiler/noirc_frontend/src/parser/parser/statement.rs @@ -2,9 +2,9 @@ use noirc_errors::{Span, Spanned}; use crate::{ ast::{ - AssignStatement, BinaryOp, BinaryOpKind, ConstrainKind, ConstrainStatement, Expression, - ExpressionKind, ForBounds, ForLoopStatement, ForRange, Ident, InfixExpression, LValue, - LetStatement, Statement, StatementKind, + AssignStatement, BinaryOp, BinaryOpKind, Expression, ExpressionKind, ForBounds, + ForLoopStatement, ForRange, Ident, InfixExpression, LValue, LetStatement, Statement, + StatementKind, }, parser::{labels::ParsingRuleLabel, ParserErrorReason}, token::{Attribute, Keyword, Token, TokenKind}, @@ -89,7 +89,6 @@ impl<'a> Parser<'a> { /// | ContinueStatement /// | ReturnStatement /// | LetStatement - /// | ConstrainStatement /// | ComptimeStatement /// | ForStatement /// | LoopStatement @@ -145,10 +144,6 @@ impl<'a> Parser<'a> { return Some(StatementKind::Let(let_statement)); } - if let Some(constrain) = self.parse_constrain_statement() { - return Some(StatementKind::Constrain(constrain)); - } - if self.at_keyword(Keyword::Comptime) { return self.parse_comptime_statement(attributes); } @@ -432,58 +427,12 @@ impl<'a> Parser<'a> { is_global_let: false, }) } - - /// ConstrainStatement - /// = 'constrain' Expression - /// | 'assert' Arguments - /// | 'assert_eq' Arguments - fn parse_constrain_statement(&mut self) -> Option { - let start_span = self.current_token_span; - let kind = self.parse_constrain_kind()?; - - Some(match kind { - ConstrainKind::Assert | ConstrainKind::AssertEq => { - let arguments = self.parse_arguments(); - if arguments.is_none() { - self.expected_token(Token::LeftParen); - } - let arguments = arguments.unwrap_or_default(); - - ConstrainStatement { kind, arguments, span: self.span_since(start_span) } - } - ConstrainKind::Constrain => { - self.push_error(ParserErrorReason::ConstrainDeprecated, self.previous_token_span); - - let expression = self.parse_expression_or_error(); - ConstrainStatement { - kind, - arguments: vec![expression], - span: self.span_since(start_span), - } - } - }) - } - - fn parse_constrain_kind(&mut self) -> Option { - if self.eat_keyword(Keyword::Assert) { - Some(ConstrainKind::Assert) - } else if self.eat_keyword(Keyword::AssertEq) { - Some(ConstrainKind::AssertEq) - } else if self.eat_keyword(Keyword::Constrain) { - Some(ConstrainKind::Constrain) - } else { - None - } - } } #[cfg(test)] mod tests { use crate::{ - ast::{ - ConstrainKind, ExpressionKind, ForRange, LValue, Statement, StatementKind, - UnresolvedTypeData, - }, + ast::{ExpressionKind, ForRange, LValue, Statement, StatementKind, UnresolvedTypeData}, parser::{ parser::tests::{ expect_no_errors, get_single_error, get_single_error_reason, @@ -551,47 +500,6 @@ mod tests { assert_eq!(let_statement.pattern.to_string(), "x"); } - #[test] - fn parses_assert() { - let src = "assert(true, \"good\")"; - let statement = parse_statement_no_errors(src); - let StatementKind::Constrain(constrain) = statement.kind else { - panic!("Expected constrain statement"); - }; - assert_eq!(constrain.kind, ConstrainKind::Assert); - assert_eq!(constrain.arguments.len(), 2); - } - - #[test] - fn parses_assert_eq() { - let src = "assert_eq(1, 2, \"bad\")"; - let statement = parse_statement_no_errors(src); - let StatementKind::Constrain(constrain) = statement.kind else { - panic!("Expected constrain statement"); - }; - assert_eq!(constrain.kind, ConstrainKind::AssertEq); - assert_eq!(constrain.arguments.len(), 3); - } - - #[test] - fn parses_constrain() { - let src = " - constrain 1 - ^^^^^^^^^ - "; - let (src, span) = get_source_with_error_span(src); - let mut parser = Parser::for_str(&src); - let statement = parser.parse_statement_or_error(); - let StatementKind::Constrain(constrain) = statement.kind else { - panic!("Expected constrain statement"); - }; - assert_eq!(constrain.kind, ConstrainKind::Constrain); - assert_eq!(constrain.arguments.len(), 1); - - let reason = get_single_error_reason(&parser.errors, span); - assert!(matches!(reason, ParserErrorReason::ConstrainDeprecated)); - } - #[test] fn parses_comptime_block() { let src = "comptime { 1 }"; @@ -851,4 +759,15 @@ mod tests { }; assert_eq!(block.statements.len(), 2); } + + #[test] + fn parses_let_with_assert() { + let src = "let _ = assert(true);"; + let mut parser = Parser::for_str(src); + let statement = parser.parse_statement_or_error(); + let StatementKind::Let(let_statement) = statement.kind else { + panic!("Expected let"); + }; + assert!(matches!(let_statement.expression.kind, ExpressionKind::Constrain(..))); + } } diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index ed6321dbe50..cda6c267ec7 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -900,7 +900,6 @@ fn find_lambda_captures(stmts: &[StmtId], interner: &NodeInterner, result: &mut HirStatement::Expression(expr_id) => expr_id, HirStatement::Let(let_stmt) => let_stmt.expression, HirStatement::Assign(assign_stmt) => assign_stmt.expression, - HirStatement::Constrain(constr_stmt) => constr_stmt.0, HirStatement::Semi(semi_expr) => semi_expr, HirStatement::For(for_loop) => for_loop.block, HirStatement::Loop(block) => block, @@ -4347,3 +4346,21 @@ fn call_function_alias_type() { "#; assert_no_errors(src); } + +#[test] +fn errors_on_if_without_else_type_mismatch() { + let src = r#" + fn main() { + if true { + 1 + } + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::Context { err, .. }) = &errors[0].0 else { + panic!("Expected a Context error"); + }; + assert!(matches!(**err, TypeCheckError::TypeMismatch { .. })); +} diff --git a/noir_stdlib/src/cmp.nr b/noir_stdlib/src/cmp.nr index 7f19594c30e..16bcd4390f7 100644 --- a/noir_stdlib/src/cmp.nr +++ b/noir_stdlib/src/cmp.nr @@ -360,13 +360,7 @@ where let mut result = Ordering::equal(); for i in 0..self.len() { if result == Ordering::equal() { - let result_i = self[i].cmp(other[i]); - - if result_i == Ordering::less() { - result = result_i; - } else if result_i == Ordering::greater() { - result = result_i; - } + result = self[i].cmp(other[i]); } } result @@ -383,13 +377,7 @@ where let mut result = self.len().cmp(other.len()); for i in 0..self.len() { if result == Ordering::equal() { - let result_i = self[i].cmp(other[i]); - - if result_i == Ordering::less() { - result = result_i; - } else if result_i == Ordering::greater() { - result = result_i; - } + result = self[i].cmp(other[i]); } } result diff --git a/test_programs/compilation_report.sh b/test_programs/compilation_report.sh index 66f1a53626e..6f7ef254477 100755 --- a/test_programs/compilation_report.sh +++ b/test_programs/compilation_report.sh @@ -8,7 +8,7 @@ base_path="$current_dir/execution_success" # Tests to be profiled for compilation report tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") -echo "{\"compilation_reports\": [ " > $current_dir/compilation_report.json +echo "[ " > $current_dir/compilation_report.json # If there is an argument that means we want to generate a report for only the current directory if [ "$1" == "1" ]; then @@ -62,7 +62,7 @@ for dir in ${tests_to_profile[@]}; do printf "%.3f\n", 0 }' <<<"${TIMES[@]}") - jq -rc "{artifact_name: \"$PACKAGE_NAME\", time: \""$AVG_TIME"s\"}" --null-input >> $current_dir/compilation_report.json + jq -rc "{name: \"$PACKAGE_NAME\", value: \""$AVG_TIME"\" | tonumber, unit: \"s\"}" --null-input >> $current_dir/compilation_report.json if (($ITER != $NUM_ARTIFACTS)); then echo "," >> $current_dir/compilation_report.json @@ -73,4 +73,4 @@ for dir in ${tests_to_profile[@]}; do ITER=$(( $ITER + 1 )) done -echo "]}" >> $current_dir/compilation_report.json +echo "]" >> $current_dir/compilation_report.json diff --git a/test_programs/compile_success_with_bug/regression_7103/Nargo.toml b/test_programs/compile_success_with_bug/regression_7103/Nargo.toml new file mode 100644 index 00000000000..fcffef80530 --- /dev/null +++ b/test_programs/compile_success_with_bug/regression_7103/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_7103" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/test_programs/compile_success_with_bug/regression_7103/src/main.nr b/test_programs/compile_success_with_bug/regression_7103/src/main.nr new file mode 100644 index 00000000000..7ce01c2b079 --- /dev/null +++ b/test_programs/compile_success_with_bug/regression_7103/src/main.nr @@ -0,0 +1,16 @@ +fn main() { + /// Safety: n/a + unsafe { loophole() }; +} + + +unconstrained fn loophole() { + let mut i = 0; + loop { + println(i); + i += 1; + if false { + break; + } + } +} \ No newline at end of file diff --git a/test_programs/execution_report.sh b/test_programs/execution_report.sh index dcdc1bb8879..5c916ef6bd7 100755 --- a/test_programs/execution_report.sh +++ b/test_programs/execution_report.sh @@ -8,7 +8,7 @@ base_path="$current_dir/execution_success" # Tests to be profiled for execution report tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") -echo "{\"execution_reports\": [ " > $current_dir/execution_report.json +echo "[" > $current_dir/execution_report.json # If there is an argument that means we want to generate a report for only the current directory if [ "$1" == "1" ]; then @@ -70,7 +70,7 @@ for dir in ${tests_to_profile[@]}; do printf "%.3f\n", 0 }' <<<"${TIMES[@]}") - jq -rc "{artifact_name: \"$PACKAGE_NAME\", time: \""$AVG_TIME"s\"}" --null-input >> $current_dir/execution_report.json + jq -rc "{name: \"$PACKAGE_NAME\", value: \""$AVG_TIME"\" | tonumber, unit: \"s\"}" --null-input >> $current_dir/execution_report.json if (($ITER != $NUM_ARTIFACTS)); then echo "," >> $current_dir/execution_report.json @@ -81,4 +81,4 @@ for dir in ${tests_to_profile[@]}; do ITER=$(( $ITER + 1 )) done -echo "]}" >> $current_dir/execution_report.json +echo "]" >> $current_dir/execution_report.json diff --git a/test_programs/execution_success/signed_cmp/Prover.toml b/test_programs/execution_success/signed_cmp/Prover.toml index 4b719f83c16..2761799a9cc 100644 --- a/test_programs/execution_success/signed_cmp/Prover.toml +++ b/test_programs/execution_success/signed_cmp/Prover.toml @@ -1 +1 @@ -minus_one = 255 +minus_one = -1 diff --git a/test_programs/execution_success/signed_div/Prover.toml b/test_programs/execution_success/signed_div/Prover.toml index be93fec5cc3..ac0e08269cf 100644 --- a/test_programs/execution_success/signed_div/Prover.toml +++ b/test_programs/execution_success/signed_div/Prover.toml @@ -1,51 +1,51 @@ [[ops]] lhs = 4 -rhs = 255 # -1 -result = 252 # -4 +rhs = -1 +result = -4 [[ops]] lhs = 4 -rhs = 254 # -2 -result = 254 # -2 +rhs = -2 +result = -2 [[ops]] lhs = 4 -rhs = 253 # -3 -result = 255 # -1 +rhs = -3 +result = -1 [[ops]] lhs = 4 -rhs = 252 # -4 -result = 255 # -1 +rhs = -4 +result = -1 [[ops]] lhs = 4 -rhs = 251 # -5 +rhs = -5 result = 0 [[ops]] -lhs = 252 # -4 -rhs = 255 # -1 +lhs = -4 +rhs = -1 result = 4 [[ops]] -lhs = 252 # -4 -rhs = 254 # -2 +lhs = -4 +rhs = -2 result = 2 [[ops]] -lhs = 252 # -4 -rhs = 253 # -3 +lhs = -4 +rhs = -3 result = 1 [[ops]] -lhs = 252 # -4 -rhs = 252 # -4 +lhs = -4 +rhs = -4 result = 1 [[ops]] -lhs = 252 # -4 -rhs = 251 # -5 +lhs = -4 +rhs = -5 result = 0 diff --git a/test_programs/memory_report.sh b/test_programs/memory_report.sh index 00065fbfb36..eb83004affd 100755 --- a/test_programs/memory_report.sh +++ b/test_programs/memory_report.sh @@ -22,7 +22,7 @@ fi FIRST="1" FLAGS=${FLAGS:- ""} -echo "{\"memory_reports\": [ " > memory_report.json +echo "[" > memory_report.json for test_name in ${tests_to_profile[@]}; do cd $base_path/$test_name @@ -57,8 +57,9 @@ for test_name in ${tests_to_profile[@]}; do peak=${consumption:30:len} rm $current_dir/$test_name"_heap_analysis.txt" peak_memory=$($PARSE_MEMORY $peak) - echo -e " {\n \"artifact_name\":\"$test_name\",\n \"peak_memory\":\"$peak_memory\"\n }" >> $current_dir"/memory_report.json" + jq -rc "{name: \"$test_name\", value: \"$peak_memory\" | tonumber, unit: \"MB\"}" --null-input >> $current_dir/memory_report.json + done -echo "]}" >> $current_dir"/memory_report.json" +echo "]" >> $current_dir"/memory_report.json" diff --git a/tooling/lsp/src/requests/inlay_hint.rs b/tooling/lsp/src/requests/inlay_hint.rs index 8e091d1eb04..b9673755da6 100644 --- a/tooling/lsp/src/requests/inlay_hint.rs +++ b/tooling/lsp/src/requests/inlay_hint.rs @@ -575,6 +575,7 @@ fn get_expression_name(expression: &Expression) -> Option { ExpressionKind::Parenthesized(expr) => get_expression_name(expr), ExpressionKind::AsTraitPath(path) => Some(path.impl_item.to_string()), ExpressionKind::TypePath(path) => Some(path.item.to_string()), + ExpressionKind::Constrain(constrain) => Some(constrain.kind.to_string()), ExpressionKind::Constructor(..) | ExpressionKind::Infix(..) | ExpressionKind::Index(..) diff --git a/tooling/lsp/src/requests/signature_help.rs b/tooling/lsp/src/requests/signature_help.rs index 99bd463f44a..4a2609d7ae3 100644 --- a/tooling/lsp/src/requests/signature_help.rs +++ b/tooling/lsp/src/requests/signature_help.rs @@ -8,7 +8,7 @@ use lsp_types::{ use noirc_errors::{Location, Span}; use noirc_frontend::{ ast::{ - CallExpression, ConstrainKind, ConstrainStatement, Expression, FunctionReturnType, + CallExpression, ConstrainExpression, ConstrainKind, Expression, FunctionReturnType, MethodCallExpression, Statement, Visitor, }, hir_def::{function::FuncMeta, stmt::HirPattern}, @@ -383,7 +383,7 @@ impl<'a> Visitor for SignatureFinder<'a> { false } - fn visit_constrain_statement(&mut self, constrain_statement: &ConstrainStatement) -> bool { + fn visit_constrain_statement(&mut self, constrain_statement: &ConstrainExpression) -> bool { constrain_statement.accept_children(self); if self.signature_help.is_some() { diff --git a/tooling/nargo_fmt/src/formatter/expression.rs b/tooling/nargo_fmt/src/formatter/expression.rs index 98eabe10e7e..54d9d2e41f5 100644 --- a/tooling/nargo_fmt/src/formatter/expression.rs +++ b/tooling/nargo_fmt/src/formatter/expression.rs @@ -1,9 +1,10 @@ use noirc_frontend::{ ast::{ ArrayLiteral, BinaryOpKind, BlockExpression, CallExpression, CastExpression, - ConstructorExpression, Expression, ExpressionKind, IfExpression, IndexExpression, - InfixExpression, Lambda, Literal, MatchExpression, MemberAccessExpression, - MethodCallExpression, PrefixExpression, TypePath, UnaryOp, UnresolvedTypeData, + ConstrainExpression, ConstrainKind, ConstructorExpression, Expression, ExpressionKind, + IfExpression, IndexExpression, InfixExpression, Lambda, Literal, MatchExpression, + MemberAccessExpression, MethodCallExpression, PrefixExpression, TypePath, UnaryOp, + UnresolvedTypeData, }, token::{Keyword, Token}, }; @@ -39,6 +40,9 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { ExpressionKind::MethodCall(method_call) => { group.group(self.format_method_call(*method_call)); } + ExpressionKind::Constrain(constrain) => { + group.group(self.format_constrain(constrain)); + } ExpressionKind::Constructor(constructor) => { group.group(self.format_constructor(*constructor)); } @@ -1145,6 +1149,40 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { group } + fn format_constrain(&mut self, constrain_statement: ConstrainExpression) -> ChunkGroup { + let mut group = ChunkGroup::new(); + + let keyword = match constrain_statement.kind { + ConstrainKind::Assert => Keyword::Assert, + ConstrainKind::AssertEq => Keyword::AssertEq, + ConstrainKind::Constrain => { + unreachable!("constrain always produces an error, and the formatter doesn't run when there are errors") + } + }; + + group.text(self.chunk(|formatter| { + formatter.write_keyword(keyword); + formatter.write_left_paren(); + })); + + group.kind = GroupKind::ExpressionList { + prefix_width: group.width(), + expressions_count: constrain_statement.arguments.len(), + }; + + self.format_expressions_separated_by_comma( + constrain_statement.arguments, + false, // force trailing comma + &mut group, + ); + + group.text(self.chunk(|formatter| { + formatter.write_right_paren(); + })); + + group + } + pub(super) fn format_block_expression( &mut self, block: BlockExpression, diff --git a/tooling/nargo_fmt/src/formatter/statement.rs b/tooling/nargo_fmt/src/formatter/statement.rs index 751bc419d4a..ae4177c224b 100644 --- a/tooling/nargo_fmt/src/formatter/statement.rs +++ b/tooling/nargo_fmt/src/formatter/statement.rs @@ -1,8 +1,7 @@ use noirc_frontend::{ ast::{ - AssignStatement, ConstrainKind, ConstrainStatement, Expression, ExpressionKind, - ForLoopStatement, ForRange, LetStatement, Pattern, Statement, StatementKind, - UnresolvedType, UnresolvedTypeData, + AssignStatement, Expression, ExpressionKind, ForLoopStatement, ForRange, LetStatement, + Pattern, Statement, StatementKind, UnresolvedType, UnresolvedTypeData, }, token::{Keyword, SecondaryAttribute, Token, TokenKind}, }; @@ -49,9 +48,6 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { StatementKind::Let(let_statement) => { group.group(self.format_let_statement(let_statement)); } - StatementKind::Constrain(constrain_statement) => { - group.group(self.format_constrain_statement(constrain_statement)); - } StatementKind::Expression(expression) => match expression.kind { ExpressionKind::Block(block) => group.group(self.format_block_expression( block, true, // force multiple lines @@ -153,44 +149,6 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { group } - fn format_constrain_statement( - &mut self, - constrain_statement: ConstrainStatement, - ) -> ChunkGroup { - let mut group = ChunkGroup::new(); - - let keyword = match constrain_statement.kind { - ConstrainKind::Assert => Keyword::Assert, - ConstrainKind::AssertEq => Keyword::AssertEq, - ConstrainKind::Constrain => { - unreachable!("constrain always produces an error, and the formatter doesn't run when there are errors") - } - }; - - group.text(self.chunk(|formatter| { - formatter.write_keyword(keyword); - formatter.write_left_paren(); - })); - - group.kind = GroupKind::ExpressionList { - prefix_width: group.width(), - expressions_count: constrain_statement.arguments.len(), - }; - - self.format_expressions_separated_by_comma( - constrain_statement.arguments, - false, // force trailing comma - &mut group, - ); - - group.text(self.chunk(|formatter| { - formatter.write_right_paren(); - formatter.write_semicolon(); - })); - - group - } - fn format_assign(&mut self, assign_statement: AssignStatement) -> ChunkGroup { let mut group = ChunkGroup::new(); let mut is_op_assign = false; diff --git a/tooling/noir_js/test/node/cjs.test.cjs b/tooling/noir_js/test/node/cjs.test.cjs index 8698f9dfe2b..bbedd748203 100644 --- a/tooling/noir_js/test/node/cjs.test.cjs +++ b/tooling/noir_js/test/node/cjs.test.cjs @@ -59,7 +59,7 @@ it('0x prefixed string input for inputs will throw', async () => { }); describe('input validation', () => { - it('x should be a uint64 not a string', async () => { + it('x should be a int64 not a string', async () => { const inputs = { x: 'foo', y: '3', @@ -67,13 +67,13 @@ describe('input validation', () => { try { await new Noir(assert_lt_json).execute(inputs); - chai.expect.fail('Expected generatedWitness to throw, due to x not being convertible to a uint64'); + chai.expect.fail('Expected generatedWitness to throw, due to x not being convertible to a int64'); } catch (error) { const knownError = error; chai .expect(knownError.message) .to.equal( - 'Expected witness values to be integers, provided value causes `invalid digit found in string` error', + 'The value passed for parameter `x` is invalid:\nExpected witness values to be integers, but `foo` failed with `invalid digit found in string`', ); } }); diff --git a/tooling/noir_js/test/node/smoke.test.ts b/tooling/noir_js/test/node/smoke.test.ts index 6993a44f66e..dc04388e7ca 100644 --- a/tooling/noir_js/test/node/smoke.test.ts +++ b/tooling/noir_js/test/node/smoke.test.ts @@ -58,7 +58,7 @@ it('0x prefixed string input for inputs will throw', async () => { }); describe('input validation', () => { - it('x should be a uint64 not a string', async () => { + it('x should be a int64 not a string', async () => { const inputs = { x: 'foo', y: '3', @@ -66,11 +66,11 @@ describe('input validation', () => { try { await new Noir(assert_lt_program).execute(inputs); - expect.fail('Expected generatedWitness to throw, due to x not being convertible to a uint64'); + expect.fail('Expected generatedWitness to throw, due to x not being convertible to a int64'); } catch (error) { const knownError = error as Error; expect(knownError.message).to.equal( - 'Expected witness values to be integers, provided value causes `invalid digit found in string` error', + 'The value passed for parameter `x` is invalid:\nExpected witness values to be integers, but `foo` failed with `invalid digit found in string`', ); } }); diff --git a/tooling/noirc_abi/src/errors.rs b/tooling/noirc_abi/src/errors.rs index 4209a9e218b..c46945d8ff2 100644 --- a/tooling/noirc_abi/src/errors.rs +++ b/tooling/noirc_abi/src/errors.rs @@ -2,17 +2,26 @@ use crate::{ input_parser::{InputTypecheckingError, InputValue}, AbiType, }; -use acvm::acir::native_types::Witness; +use acvm::{acir::native_types::Witness, AcirField, FieldElement}; use thiserror::Error; #[derive(Debug, Error)] pub enum InputParserError { #[error("input file is badly formed, could not parse, {0}")] ParseInputMap(String), - #[error("Expected witness values to be integers, provided value causes `{0}` error")] - ParseStr(String), - #[error("Could not parse hex value {0}")] - ParseHexStr(String), + #[error( + "The value passed for parameter `{arg_name}` is invalid:\nExpected witness values to be integers, but `{value}` failed with `{error}`" + )] + ParseStr { arg_name: String, value: String, error: String }, + #[error("The value passed for parameter `{arg_name}` is invalid:\nValue {value} is less than minimum allowed value of {min}")] + InputUnderflowsMinimum { arg_name: String, value: String, min: String }, + #[error("The value passed for parameter `{arg_name}` is invalid:\nValue {value} exceeds maximum allowed value of {max}")] + InputOverflowsMaximum { arg_name: String, value: String, max: String }, + #[error( + "The value passed for parameter `{arg_name}` is invalid:\nValue {value} exceeds field modulus. Values must fall within [0, {})", + FieldElement::modulus() + )] + InputExceedsFieldModulus { arg_name: String, value: String }, #[error("cannot parse value into {0:?}")] AbiTypeMismatch(AbiType), #[error("Expected argument `{0}`, but none was found")] diff --git a/tooling/noirc_abi/src/input_parser/json.rs b/tooling/noirc_abi/src/input_parser/json.rs index 760c06c3867..82986d05f59 100644 --- a/tooling/noirc_abi/src/input_parser/json.rs +++ b/tooling/noirc_abi/src/input_parser/json.rs @@ -1,4 +1,7 @@ -use super::{field_to_signed_hex, parse_str_to_field, parse_str_to_signed, InputValue}; +use super::{ + field_to_signed_hex, parse_integer_to_signed, parse_str_to_field, parse_str_to_signed, + InputValue, +}; use crate::{errors::InputParserError, Abi, AbiType, MAIN_RETURN_NAME}; use acvm::{AcirField, FieldElement}; use iter_extended::{try_btree_map, try_vecmap}; @@ -69,9 +72,9 @@ pub enum JsonTypes { // Just a regular integer, that can fit in 64 bits. // // The JSON spec does not specify any limit on the size of integer number types, - // however we restrict the allowable size. Values which do not fit in a u64 should be passed + // however we restrict the allowable size. Values which do not fit in a i64 should be passed // as a string. - Integer(u64), + Integer(i64), // Simple boolean flag Bool(bool), // Array of JsonTypes @@ -147,12 +150,20 @@ impl InputValue { let input_value = match (value, param_type) { (JsonTypes::String(string), AbiType::String { .. }) => InputValue::String(string), (JsonTypes::String(string), AbiType::Integer { sign: crate::Sign::Signed, width }) => { - InputValue::Field(parse_str_to_signed(&string, *width)?) + InputValue::Field(parse_str_to_signed(&string, *width, arg_name)?) } ( JsonTypes::String(string), AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, - ) => InputValue::Field(parse_str_to_field(&string)?), + ) => InputValue::Field(parse_str_to_field(&string, arg_name)?), + + ( + JsonTypes::Integer(integer), + AbiType::Integer { sign: crate::Sign::Signed, width }, + ) => { + let new_value = parse_integer_to_signed(integer as i128, *width, arg_name)?; + InputValue::Field(new_value) + } ( JsonTypes::Integer(integer), @@ -166,8 +177,13 @@ impl InputValue { (JsonTypes::Bool(boolean), AbiType::Boolean) => InputValue::Field(boolean.into()), (JsonTypes::Array(array), AbiType::Array { typ, .. }) => { - let array_elements = - try_vecmap(array, |value| InputValue::try_from_json(value, typ, arg_name))?; + let mut index = 0; + let array_elements = try_vecmap(array, |value| { + let sub_name = format!("{arg_name}[{index}]"); + let value = InputValue::try_from_json(value, typ, &sub_name); + index += 1; + value + })?; InputValue::Vec(array_elements) } @@ -186,8 +202,12 @@ impl InputValue { } (JsonTypes::Array(array), AbiType::Tuple { fields }) => { + let mut index = 0; let tuple_fields = try_vecmap(array.into_iter().zip(fields), |(value, typ)| { - InputValue::try_from_json(value, typ, arg_name) + let sub_name = format!("{arg_name}[{index}]"); + let value = InputValue::try_from_json(value, typ, &sub_name); + index += 1; + value })?; InputValue::Vec(tuple_fields) } @@ -201,11 +221,13 @@ impl InputValue { #[cfg(test)] mod test { + use acvm::FieldElement; use proptest::prelude::*; use crate::{ arbitrary::arb_abi_and_input_map, input_parser::{arbitrary::arb_signed_integer_type_and_value, json::JsonTypes, InputValue}, + AbiType, }; use super::{parse_json, serialize_to_json}; @@ -234,4 +256,26 @@ mod test { prop_assert_eq!(output_number, value); } } + + #[test] + fn errors_on_integer_to_signed_integer_overflow() { + let typ = AbiType::Integer { sign: crate::Sign::Signed, width: 8 }; + let input = JsonTypes::Integer(128); + assert!(InputValue::try_from_json(input, &typ, "foo").is_err()); + + let typ = AbiType::Integer { sign: crate::Sign::Signed, width: 16 }; + let input = JsonTypes::Integer(32768); + assert!(InputValue::try_from_json(input, &typ, "foo").is_err()); + } + + #[test] + fn try_from_json_negative_integer() { + let typ = AbiType::Integer { sign: crate::Sign::Signed, width: 8 }; + let input = JsonTypes::Integer(-1); + let InputValue::Field(field) = InputValue::try_from_json(input, &typ, "foo").unwrap() + else { + panic!("Expected field"); + }; + assert_eq!(field, FieldElement::from(255_u128)); + } } diff --git a/tooling/noirc_abi/src/input_parser/mod.rs b/tooling/noirc_abi/src/input_parser/mod.rs index b7732235eb2..565870b0835 100644 --- a/tooling/noirc_abi/src/input_parser/mod.rs +++ b/tooling/noirc_abi/src/input_parser/mod.rs @@ -304,25 +304,35 @@ mod serialization_tests { } } -fn parse_str_to_field(value: &str) -> Result { +fn parse_str_to_field(value: &str, arg_name: &str) -> Result { let big_num = if let Some(hex) = value.strip_prefix("0x") { BigUint::from_str_radix(hex, 16) } else { BigUint::from_str_radix(value, 10) }; - big_num.map_err(|err_msg| InputParserError::ParseStr(err_msg.to_string())).and_then(|bigint| { - if bigint < FieldElement::modulus() { - Ok(field_from_big_uint(bigint)) - } else { - Err(InputParserError::ParseStr(format!( - "Input exceeds field modulus. Values must fall within [0, {})", - FieldElement::modulus(), - ))) - } - }) + big_num + .map_err(|err_msg| InputParserError::ParseStr { + arg_name: arg_name.into(), + value: value.into(), + error: err_msg.to_string(), + }) + .and_then(|bigint| { + if bigint < FieldElement::modulus() { + Ok(field_from_big_uint(bigint)) + } else { + Err(InputParserError::InputExceedsFieldModulus { + arg_name: arg_name.into(), + value: value.to_string(), + }) + } + }) } -fn parse_str_to_signed(value: &str, width: u32) -> Result { +fn parse_str_to_signed( + value: &str, + width: u32, + arg_name: &str, +) -> Result { let big_num = if let Some(hex) = value.strip_prefix("-0x") { BigInt::from_str_radix(hex, 16).map(|value| -value) } else if let Some(hex) = value.strip_prefix("0x") { @@ -331,22 +341,75 @@ fn parse_str_to_signed(value: &str, width: u32) -> Result max { + return Err(InputParserError::InputOverflowsMaximum { + arg_name: arg_name.into(), + value: bigint.to_string(), + max: max.to_string(), + }); + } + + let modulus: BigInt = FieldElement::modulus().into(); + let bigint = if bigint.sign() == num_bigint::Sign::Minus { + BigInt::from(2).pow(width) + bigint + } else { + bigint + }; + if bigint.is_zero() || (bigint.sign() == num_bigint::Sign::Plus && bigint < modulus) { + Ok(field_from_big_int(bigint)) + } else { + Err(InputParserError::InputExceedsFieldModulus { + arg_name: arg_name.into(), + value: value.to_string(), + }) + } + }) +} + +fn parse_integer_to_signed( + integer: i128, + width: u32, + arg_name: &str, +) -> Result { + let min = -(1 << (width - 1)); + let max = (1 << (width - 1)) - 1; + + if integer < min { + return Err(InputParserError::InputUnderflowsMinimum { + arg_name: arg_name.into(), + value: integer.to_string(), + min: min.to_string(), + }); + } + + if integer > max { + return Err(InputParserError::InputOverflowsMaximum { + arg_name: arg_name.into(), + value: integer.to_string(), + max: max.to_string(), + }); + } + + let integer = if integer < 0 { (1 << width) + integer } else { integer }; + Ok(FieldElement::from(integer as u128)) } fn field_from_big_uint(bigint: BigUint) -> FieldElement { @@ -390,7 +453,7 @@ mod test { #[test] fn parse_empty_str_fails() { // Check that this fails appropriately rather than being treated as 0, etc. - assert!(parse_str_to_field("").is_err()); + assert!(parse_str_to_field("", "arg_name").is_err()); } #[test] @@ -405,11 +468,11 @@ mod test { for field in fields { let hex_field = format!("0x{}", field.to_hex()); - let field_from_hex = parse_str_to_field(&hex_field).unwrap(); + let field_from_hex = parse_str_to_field(&hex_field, "arg_name").unwrap(); assert_eq!(field_from_hex, field); let dec_field = big_uint_from_field(field).to_string(); - let field_from_dec = parse_str_to_field(&dec_field).unwrap(); + let field_from_dec = parse_str_to_field(&dec_field, "arg_name").unwrap(); assert_eq!(field_from_dec, field); } } @@ -417,19 +480,46 @@ mod test { #[test] fn rejects_noncanonical_fields() { let noncanonical_field = FieldElement::modulus().to_string(); - assert!(parse_str_to_field(&noncanonical_field).is_err()); + assert!(parse_str_to_field(&noncanonical_field, "arg_name").is_err()); } #[test] fn test_parse_str_to_signed() { - let value = parse_str_to_signed("1", 8).unwrap(); + let value = parse_str_to_signed("1", 8, "arg_name").unwrap(); assert_eq!(value, FieldElement::from(1_u128)); - let value = parse_str_to_signed("-1", 8).unwrap(); + let value = parse_str_to_signed("-1", 8, "arg_name").unwrap(); assert_eq!(value, FieldElement::from(255_u128)); - let value = parse_str_to_signed("-1", 16).unwrap(); + let value = parse_str_to_signed("-1", 16, "arg_name").unwrap(); assert_eq!(value, FieldElement::from(65535_u128)); + + assert_eq!( + parse_str_to_signed("127", 8, "arg_name").unwrap(), + FieldElement::from(127_i128) + ); + assert!(parse_str_to_signed("128", 8, "arg_name").is_err()); + assert_eq!( + parse_str_to_signed("-128", 8, "arg_name").unwrap(), + FieldElement::from(128_i128) + ); + assert_eq!(parse_str_to_signed("-1", 8, "arg_name").unwrap(), FieldElement::from(255_i128)); + assert!(parse_str_to_signed("-129", 8, "arg_name").is_err()); + + assert_eq!( + parse_str_to_signed("32767", 16, "arg_name").unwrap(), + FieldElement::from(32767_i128) + ); + assert!(parse_str_to_signed("32768", 16, "arg_name").is_err()); + assert_eq!( + parse_str_to_signed("-32768", 16, "arg_name").unwrap(), + FieldElement::from(32768_i128) + ); + assert_eq!( + parse_str_to_signed("-1", 16, "arg_name").unwrap(), + FieldElement::from(65535_i128) + ); + assert!(parse_str_to_signed("-32769", 16, "arg_name").is_err()); } } diff --git a/tooling/noirc_abi/src/input_parser/toml.rs b/tooling/noirc_abi/src/input_parser/toml.rs index 6f2be68a0c4..aaa3358f4fc 100644 --- a/tooling/noirc_abi/src/input_parser/toml.rs +++ b/tooling/noirc_abi/src/input_parser/toml.rs @@ -1,4 +1,7 @@ -use super::{field_to_signed_hex, parse_str_to_field, parse_str_to_signed, InputValue}; +use super::{ + field_to_signed_hex, parse_integer_to_signed, parse_str_to_field, parse_str_to_signed, + InputValue, +}; use crate::{errors::InputParserError, Abi, AbiType, MAIN_RETURN_NAME}; use acvm::{AcirField, FieldElement}; use iter_extended::{try_btree_map, try_vecmap}; @@ -68,7 +71,7 @@ enum TomlTypes { String(String), // Just a regular integer, that can fit in 64 bits // Note that the toml spec specifies that all numbers are represented as `i64`s. - Integer(u64), + Integer(i64), // Simple boolean flag Bool(bool), // Array of TomlTypes @@ -135,15 +138,23 @@ impl InputValue { AbiType::Field | AbiType::Integer { sign: crate::Sign::Unsigned, .. } | AbiType::Boolean, - ) => InputValue::Field(parse_str_to_field(&string)?), + ) => InputValue::Field(parse_str_to_field(&string, arg_name)?), (TomlTypes::String(string), AbiType::Integer { sign: crate::Sign::Signed, width }) => { - InputValue::Field(parse_str_to_signed(&string, *width)?) + InputValue::Field(parse_str_to_signed(&string, *width, arg_name)?) } + ( + TomlTypes::Integer(integer), + AbiType::Integer { sign: crate::Sign::Signed, width }, + ) => { + let new_value = parse_integer_to_signed(integer as i128, *width, arg_name)?; + InputValue::Field(new_value) + } + ( TomlTypes::Integer(integer), AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, ) => { - let new_value = FieldElement::from(u128::from(integer)); + let new_value = FieldElement::from(i128::from(integer)); InputValue::Field(new_value) } @@ -151,8 +162,13 @@ impl InputValue { (TomlTypes::Bool(boolean), AbiType::Boolean) => InputValue::Field(boolean.into()), (TomlTypes::Array(array), AbiType::Array { typ, .. }) => { - let array_elements = - try_vecmap(array, |value| InputValue::try_from_toml(value, typ, arg_name))?; + let mut index = 0; + let array_elements = try_vecmap(array, |value| { + let sub_name = format!("{arg_name}[{index}]"); + let value = InputValue::try_from_toml(value, typ, &sub_name); + index += 1; + value + })?; InputValue::Vec(array_elements) } @@ -171,8 +187,12 @@ impl InputValue { } (TomlTypes::Array(array), AbiType::Tuple { fields }) => { + let mut index = 0; let tuple_fields = try_vecmap(array.into_iter().zip(fields), |(value, typ)| { - InputValue::try_from_toml(value, typ, arg_name) + let sub_name = format!("{arg_name}[{index}]"); + let value = InputValue::try_from_toml(value, typ, &sub_name); + index += 1; + value })?; InputValue::Vec(tuple_fields) } @@ -186,11 +206,13 @@ impl InputValue { #[cfg(test)] mod test { + use acvm::FieldElement; use proptest::prelude::*; use crate::{ arbitrary::arb_abi_and_input_map, input_parser::{arbitrary::arb_signed_integer_type_and_value, toml::TomlTypes, InputValue}, + AbiType, }; use super::{parse_toml, serialize_to_toml}; @@ -219,4 +241,26 @@ mod test { prop_assert_eq!(output_number, value); } } + + #[test] + fn errors_on_integer_to_signed_integer_overflow() { + let typ = AbiType::Integer { sign: crate::Sign::Signed, width: 8 }; + let input = TomlTypes::Integer(128); + assert!(InputValue::try_from_toml(input, &typ, "foo").is_err()); + + let typ = AbiType::Integer { sign: crate::Sign::Signed, width: 16 }; + let input = TomlTypes::Integer(32768); + assert!(InputValue::try_from_toml(input, &typ, "foo").is_err()); + } + + #[test] + fn try_from_toml_negative_integer() { + let typ = AbiType::Integer { sign: crate::Sign::Signed, width: 8 }; + let input = TomlTypes::Integer(-1); + let InputValue::Field(field) = InputValue::try_from_toml(input, &typ, "foo").unwrap() + else { + panic!("Expected field"); + }; + assert_eq!(field, FieldElement::from(255_u128)); + } } diff --git a/yarn.lock b/yarn.lock index fa687aa7ced..26298a6e6b4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -14925,7 +14925,7 @@ __metadata: languageName: node linkType: hard -"fsevents@patch:fsevents@2.3.2#~builtin": +"fsevents@patch:fsevents@npm%3A2.3.2#~builtin": version: 2.3.2 resolution: "fsevents@patch:fsevents@npm%3A2.3.2#~builtin::version=2.3.2&hash=df0bf1" dependencies: @@ -20322,27 +20322,27 @@ __metadata: languageName: node linkType: hard -"playwright-core@npm:1.50.0": - version: 1.50.0 - resolution: "playwright-core@npm:1.50.0" +"playwright-core@npm:1.49.0": + version: 1.49.0 + resolution: "playwright-core@npm:1.49.0" bin: playwright-core: cli.js - checksum: aca5222d7859039bc579b4b860db57c8adc1cc94c3de990ed08cec911bf888e2decb331560bd456991c98222a55c58526187a2a070e6f101fbef43a8e07e1dea + checksum: d8423ad0cab2e672856529bf6b98b406e7e605da098b847b9b54ee8ebd8d716ed8880a9afff4b38f0a2e3f59b95661c74589116ce3ff2b5e0ae3561507086c94 languageName: node linkType: hard "playwright@npm:^1.22.2": - version: 1.50.0 - resolution: "playwright@npm:1.50.0" + version: 1.49.0 + resolution: "playwright@npm:1.49.0" dependencies: - fsevents: 2.3.2 - playwright-core: 1.50.0 + fsevents: "npm:2.3.2" + playwright-core: "npm:1.49.0" dependenciesMeta: fsevents: optional: true bin: playwright: cli.js - checksum: 44004e3082433f6024665fcf04bd37cda2b284bd5262682a40a60c66943ccf66f68fbc9ca859908dfd0d117235424580a55e9ccd07e2ad9c30df363b6445448b + checksum: f1bfb2fff65cad2ce996edab74ec231dfd21aeb5961554b765ce1eaec27efb87eaba37b00e91ecd27727b82861e5d8c230abe4960e93f6ada8be5ad1020df306 languageName: node linkType: hard