From 673b2a5bda9a6b132095b6a664606a7876146261 Mon Sep 17 00:00:00 2001 From: brockelmore <31553173+brockelmore@users.noreply.github.com> Date: Fri, 21 Jun 2024 09:26:30 -0700 Subject: [PATCH] Restructure entire repo (#71) * init restructure * more restructuring * range improvements * dynamic -> reference refactor * more restructing * fix up some Cargo.toml's and delete old stuff * graph improvements * docs * resolve cyclic package deps * progress on moving structure over * bring range into graph, get graph and analyzers compiling * fix up solc-expressions * pyrometer lib compiling * its alive * move tests * lint * Change storagelocation to include block and msg + fix benches * lint * fix mermaid graph * lint * fix issue 69 Cleans up wrapping subtraction and when an assign occurs, the assignee inherits tmp_of from assignor * lint * fix issue 66 When referencing a variable in yul, if the variable is memory based, convert it into the memory offset * improve mistyped variable name error * lint * fixes #51 We were creating a recursive range, this fixes that by removing calls to `update_deps` which was pointless anyways * lint * fix #50 * lint * performance improvements * lint * cleanup * lint * remove profile.json * member access restructure * lint * rearchitect intrinsic calls * lint * improve error messages * mostly documentation and some reorg * split statement and expression parsers * lint * fully refactor dynamic types adds rangeops for dynamic types * works but needs perf improvement * lint * recursive caching * refactor function call * fixes * higher support, still slow - need to convert ranges to arena style * small fix * add non-useful arena * init arena * arena works * more arena work * fix binop * fix cast * remove debug print * lint * simple pure func joining * lint * fix returns in pure func joins * lint * perf improvements * dunno * perf * lint * dl fix * lint * dl fix2 * lint * fix(ranges): Clean up range executions (#76) * idk * lint * begin range refactor * lint * add range exec tests * lint * more refactor * lint * refactor range execution * lint * further refactor of range exec * more range work * lint * more tests * lint * remove commented code * MOAR: mother of all refactors * lint * more improvements * lint --- .gitignore | 3 +- Cargo.lock | 1014 ++++---- Cargo.toml | 96 +- LICENSE | 674 ----- LICENSE-APACHE | 187 ++ LICENSE-MIT | 20 + TODO.md | 2 +- cli/Cargo.lock | 1616 ------------ cli/Cargo.toml | 23 - crates/analyzers/Cargo.toml | 18 + .../analyzers/src}/bounds.rs | 97 +- .../analyzers/src}/func_analyzer/mod.rs | 87 +- .../src}/func_analyzer/report_display.rs | 57 +- .../mod.rs => crates/analyzers/src/lib.rs | 38 +- .../analyzers/src}/var_analyzer/mod.rs | 100 +- .../src}/var_analyzer/report_display.rs | 61 +- crates/cli/Cargo.toml | 37 + {cli => crates/cli}/src/main.rs | 292 ++- crates/graph/Cargo.toml | 27 + crates/graph/src/graph_elements.rs | 423 +++ crates/graph/src/lib.rs | 10 + {shared => crates/graph}/src/nodes/block.rs | 23 +- crates/graph/src/nodes/builtin.rs | 531 ++++ .../graph}/src/nodes/concrete.rs | 336 ++- crates/graph/src/nodes/context/context_tys.rs | 88 + .../graph/src/nodes}/context/expr_ret.rs | 79 +- crates/graph/src/nodes/context/mod.rs | 18 + crates/graph/src/nodes/context/node.rs | 141 + crates/graph/src/nodes/context/querying.rs | 254 ++ crates/graph/src/nodes/context/solving.rs | 160 ++ crates/graph/src/nodes/context/typing.rs | 88 + crates/graph/src/nodes/context/underlying.rs | 331 +++ crates/graph/src/nodes/context/var/mod.rs | 8 + crates/graph/src/nodes/context/var/node.rs | 422 +++ crates/graph/src/nodes/context/var/ranging.rs | 450 ++++ crates/graph/src/nodes/context/var/typing.rs | 614 +++++ .../graph/src/nodes/context/var/underlying.rs | 720 ++++++ .../graph/src/nodes/context/var/versioning.rs | 237 ++ crates/graph/src/nodes/context/variables.rs | 323 +++ crates/graph/src/nodes/context/versioning.rs | 533 ++++ .../graph}/src/nodes/contract_ty.rs | 172 +- {shared => crates/graph}/src/nodes/enum_ty.rs | 33 +- {shared => crates/graph}/src/nodes/err_ty.rs | 26 +- {shared => crates/graph}/src/nodes/func_ty.rs | 329 ++- crates/graph/src/nodes/mod.rs | 41 + {shared => crates/graph}/src/nodes/msg.rs | 35 +- crates/graph/src/nodes/source_unit.rs | 146 ++ crates/graph/src/nodes/source_unit_part.rs | 151 ++ .../graph}/src/nodes/struct_ty.rs | 65 +- {shared => crates/graph}/src/nodes/ty_ty.rs | 34 +- {shared => crates/graph}/src/nodes/var_ty.rs | 83 +- crates/graph/src/range/elem/concrete.rs | 284 +++ .../graph/src/range/elem/elem_enum/arena.rs | 216 ++ .../graph/src/range/elem/elem_enum/impls.rs | 686 +++++ crates/graph/src/range/elem/elem_enum/mod.rs | 28 + crates/graph/src/range/elem/elem_enum/ops.rs | 93 + .../src/range/elem/elem_enum/range_elem.rs | 617 +++++ .../graph/src/range/elem/elem_enum/traits.rs | 114 + crates/graph/src/range/elem/elem_trait.rs | 126 + crates/graph/src/range/elem/expr/collapse.rs | 633 +++++ crates/graph/src/range/elem/expr/mod.rs | 728 ++++++ .../graph/src/range/elem/expr/simplify/add.rs | 162 ++ .../graph/src/range/elem/expr/simplify/mod.rs | 55 + .../src/range/elem/expr/simplify/ords.rs | 122 + .../graph/src/range/elem/expr/simplify/sub.rs | 177 ++ crates/graph/src/range/elem/map_or_array.rs | 668 +++++ .../graph/src/range/elem/mod.rs | 174 +- crates/graph/src/range/elem/reference.rs | 468 ++++ crates/graph/src/range/exec/bitwise.rs | 816 ++++++ crates/graph/src/range/exec/cast.rs | 262 ++ crates/graph/src/range/exec/exec_op.rs | 454 ++++ crates/graph/src/range/exec/math_ops/add.rs | 569 +++++ crates/graph/src/range/exec/math_ops/div.rs | 494 ++++ crates/graph/src/range/exec/math_ops/exp.rs | 201 ++ crates/graph/src/range/exec/math_ops/mod.rs | 12 + .../graph/src/range/exec/math_ops/modulo.rs | 329 +++ crates/graph/src/range/exec/math_ops/mul.rs | 507 ++++ crates/graph/src/range/exec/math_ops/sub.rs | 567 +++++ crates/graph/src/range/exec/max.rs | 82 + crates/graph/src/range/exec/mem_ops/concat.rs | 318 +++ .../graph/src/range/exec/mem_ops/mem_get.rs | 409 +++ .../graph/src/range/exec/mem_ops/mem_set.rs | 427 ++++ .../graph/src/range/exec/mem_ops/memcopy.rs | 25 + crates/graph/src/range/exec/mem_ops/mod.rs | 9 + crates/graph/src/range/exec/min.rs | 90 + crates/graph/src/range/exec/mod.rs | 29 + crates/graph/src/range/exec/shift.rs | 213 ++ .../src/range/exec/truthy_ops/logical.rs | 145 ++ crates/graph/src/range/exec/truthy_ops/mod.rs | 5 + crates/graph/src/range/exec/truthy_ops/ord.rs | 333 +++ crates/graph/src/range/exec_traits.rs | 183 ++ crates/graph/src/range/mod.rs | 20 + .../graph}/src/range/range_string.rs | 203 +- crates/graph/src/range/range_trait.rs | 134 + crates/graph/src/range/solc_range.rs | 887 +++++++ crates/graph/src/solvers/atoms.rs | 430 ++++ crates/graph/src/solvers/brute.rs | 941 +++++++ crates/graph/src/solvers/dl.rs | 1215 +++++++++ crates/graph/src/solvers/mod.rs | 6 + crates/graph/src/var_type.rs | 821 ++++++ crates/pyrometer/Cargo.toml | 36 + .../pyrometer/benches}/README.md | 0 .../pyrometer/benches}/flamegraphs/parse.svg | 0 .../pyrometer/benches}/parse.rs | 43 +- crates/pyrometer/src/analyzer.rs | 1452 +++++++++++ crates/pyrometer/src/analyzer_backend.rs | 279 ++ {src => crates/pyrometer/src}/builtin_fns.rs | 17 +- crates/pyrometer/src/graph_backend.rs | 698 +++++ crates/pyrometer/src/lib.rs | 6 + .../tests}/benches/flat_comptroller.sol | 0 .../pyrometer/tests}/benches/flat_ctoken.sol | 0 .../pyrometer/tests}/challenges/apron.sol | 10 +- .../tests}/challenges/func_stress.sol | 0 .../challenges/reverse_bound_propogation.sol | 0 crates/pyrometer/tests/helpers.rs | 112 + .../pyrometer/tests}/no_killed_ctxs.rs | 15 +- .../pyrometer/tests}/test_data/abstract.sol | 0 .../pyrometer/tests}/test_data/assembly.sol | 0 .../pyrometer/tests}/test_data/bitwise.sol | 2 +- .../pyrometer/tests}/test_data/cast.sol | 24 +- .../pyrometer/tests}/test_data/const_var.sol | 2 +- .../tests}/test_data/constructor.sol | 4 +- .../pyrometer/tests/test_data/dyn_types.sol | 81 + .../pyrometer/tests}/test_data/env.sol | 0 .../tests}/test_data/func_override.sol | 0 .../tests}/test_data/function_calls.sol | 0 .../pyrometer/tests}/test_data/interface.sol | 0 .../pyrometer/tests}/test_data/intrinsics.sol | 3 +- crates/pyrometer/tests/test_data/join.sol | 35 + .../pyrometer/tests}/test_data/logical.sol | 0 crates/pyrometer/tests/test_data/loops.sol | 59 + .../pyrometer/tests}/test_data/math.sol | 142 +- .../pyrometer/tests}/test_data/modifier.sol | 13 +- .../tests}/test_data/named_func_call.sol | 0 .../pyrometer/tests}/test_data/precedence.sol | 0 .../relative_imports/relative_import.sol | 0 .../tests}/test_data/remapping_import.sol | 0 .../pyrometer/tests}/test_data/remappings.txt | 0 .../tests/test_data/repros/issue50.sol | 12 + .../tests/test_data/repros/issue66.sol | 14 + .../tests/test_data/repros/issue69.sol | 19 + .../tests/test_data/repros/overflow.sol | 37 + .../tests/test_data/repros/overflow2.sol | 25 + .../pyrometer/tests}/test_data/require.sol | 4 - .../pyrometer/tests}/test_data/storage.sol | 2 +- .../pyrometer/tests}/test_data/using.sol | 0 .../pyrometer/tests/test_data/viz/basic.sol | 12 + .../tests/test_data/viz/func_call.sol | 32 + crates/queries/Cargo.toml | 22 + crates/queries/src/lib.rs | 1 + crates/shared/Cargo.toml | 20 + crates/shared/src/analyzer_like.rs | 186 ++ crates/shared/src/gas.rs | 12 + crates/shared/src/graph_like.rs | 188 ++ crates/shared/src/lib.rs | 39 + crates/shared/src/mermaidConfig.json | 3 + crates/shared/src/search.rs | 605 +++++ crates/solc-expressions/Cargo.toml | 25 + crates/solc-expressions/src/array.rs | 485 ++++ crates/solc-expressions/src/assign.rs | 269 ++ crates/solc-expressions/src/bin_op.rs | 747 ++++++ .../solc-expressions/src}/cmp.rs | 283 ++- .../solc-expressions/src}/cond_op.rs | 138 +- .../src/context_builder/expr.rs | 436 ++++ .../src/context_builder/fn_calls.rs | 322 +++ .../src/context_builder/mod.rs | 206 ++ .../src/context_builder/stmt.rs | 572 +++++ .../solc-expressions/src}/env.rs | 60 +- .../src/func_call/func_caller.rs | 608 +++++ .../solc-expressions/src/func_call/helper.rs | 630 +++++ .../src/func_call/internal_call.rs | 335 +++ .../src/func_call/intrinsic_call/abi.rs | 135 + .../src/func_call/intrinsic_call/address.rs | 80 + .../src/func_call/intrinsic_call/array.rs | 357 +++ .../src/func_call/intrinsic_call/block.rs | 61 + .../func_call/intrinsic_call/constructors.rs | 225 ++ .../func_call/intrinsic_call/dyn_builtin.rs | 265 ++ .../intrinsic_call/intrinsic_caller.rs | 320 +++ .../src/func_call/intrinsic_call/mod.rs | 24 + .../src/func_call/intrinsic_call/msg.rs | 45 + .../func_call/intrinsic_call/precompile.rs | 206 ++ .../src/func_call/intrinsic_call/solidity.rs | 130 + .../src/func_call/intrinsic_call/types.rs | 228 ++ crates/solc-expressions/src/func_call/join.rs | 661 +++++ crates/solc-expressions/src/func_call/mod.rs | 7 + .../src/func_call/modifier.rs | 283 +++ .../src/func_call/namespaced_call.rs | 555 ++++ .../solc-expressions/src/lib.rs | 96 +- .../solc-expressions/src}/list.rs | 89 +- .../solc-expressions/src}/literal.rs | 66 +- .../solc-expressions/src}/loops.rs | 66 +- .../src/member_access/builtin_access.rs | 298 +++ .../src/member_access/contract_access.rs | 164 ++ .../src/member_access/enum_access.rs | 57 + .../src/member_access/func_access.rs | 0 .../src/member_access/library_access.rs | 66 + .../src/member_access/list_access.rs | 299 +++ .../src/member_access/member_trait.rs | 330 +++ .../solc-expressions/src/member_access/mod.rs | 18 + .../src/member_access/struct_access.rs | 74 + .../src/pre_post_in_decrement.rs | 226 ++ .../solc-expressions/src}/require.rs | 1047 +++++--- crates/solc-expressions/src/variable.rs | 525 ++++ crates/solc-expressions/src/yul/mod.rs | 8 + .../solc-expressions/src/yul/yul_builder.rs | 470 ++++ .../solc-expressions/src}/yul/yul_cond_op.rs | 207 +- .../solc-expressions/src}/yul/yul_funcs.rs | 451 ++-- shared/Cargo.lock | 1427 ----------- shared/Cargo.toml | 15 - shared/src/analyzer.rs | 810 ------ shared/src/context/mod.rs | 1663 ------------ shared/src/context/var.rs | 1506 ----------- shared/src/lib.rs | 136 - shared/src/nodes/mod.rs | 1189 --------- shared/src/range/elem_ty.rs | 2258 ----------------- shared/src/range/mod.rs | 749 ------ shared/src/range/range_ops.rs | 1792 ------------- src/context/exprs/array.rs | 183 -- src/context/exprs/bin_op.rs | 797 ------ src/context/exprs/member_access.rs | 1092 -------- src/context/exprs/variable.rs | 135 - src/context/func_call/internal_call.rs | 287 --- src/context/func_call/intrinsic_call.rs | 1060 -------- src/context/func_call/mod.rs | 1174 --------- src/context/func_call/modifier.rs | 34 - src/context/func_call/namespaced_call.rs | 372 --- src/context/mod.rs | 1537 ----------- src/context/queries/mod.rs | 2 - src/context/queries/storage_write/access.rs | 93 - src/context/queries/storage_write/mod.rs | 5 - src/context/queries/storage_write/target.rs | 156 -- src/context/queries/taint.rs | 68 - src/context/yul/mod.rs | 343 --- src/lib.rs | 1134 --------- tests/helpers.rs | 84 - tests/test_data/dyn_types.sol | 41 - tests/test_data/loops.sol | 31 - 237 files changed, 40951 insertions(+), 24583 deletions(-) delete mode 100644 LICENSE create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT delete mode 100644 cli/Cargo.lock delete mode 100644 cli/Cargo.toml create mode 100644 crates/analyzers/Cargo.toml rename {src/context/analyzers => crates/analyzers/src}/bounds.rs (77%) rename {src/context/analyzers => crates/analyzers/src}/func_analyzer/mod.rs (87%) rename {src/context/analyzers => crates/analyzers/src}/func_analyzer/report_display.rs (59%) rename src/context/analyzers/mod.rs => crates/analyzers/src/lib.rs (79%) rename {src/context/analyzers => crates/analyzers/src}/var_analyzer/mod.rs (73%) rename {src/context/analyzers => crates/analyzers/src}/var_analyzer/report_display.rs (59%) create mode 100644 crates/cli/Cargo.toml rename {cli => crates/cli}/src/main.rs (54%) create mode 100644 crates/graph/Cargo.toml create mode 100644 crates/graph/src/graph_elements.rs create mode 100644 crates/graph/src/lib.rs rename {shared => crates/graph}/src/nodes/block.rs (73%) create mode 100644 crates/graph/src/nodes/builtin.rs rename {shared => crates/graph}/src/nodes/concrete.rs (71%) create mode 100644 crates/graph/src/nodes/context/context_tys.rs rename {shared/src => crates/graph/src/nodes}/context/expr_ret.rs (71%) create mode 100644 crates/graph/src/nodes/context/mod.rs create mode 100644 crates/graph/src/nodes/context/node.rs create mode 100644 crates/graph/src/nodes/context/querying.rs create mode 100644 crates/graph/src/nodes/context/solving.rs create mode 100644 crates/graph/src/nodes/context/typing.rs create mode 100644 crates/graph/src/nodes/context/underlying.rs create mode 100644 crates/graph/src/nodes/context/var/mod.rs create mode 100644 crates/graph/src/nodes/context/var/node.rs create mode 100644 crates/graph/src/nodes/context/var/ranging.rs create mode 100644 crates/graph/src/nodes/context/var/typing.rs create mode 100644 crates/graph/src/nodes/context/var/underlying.rs create mode 100644 crates/graph/src/nodes/context/var/versioning.rs create mode 100644 crates/graph/src/nodes/context/variables.rs create mode 100644 crates/graph/src/nodes/context/versioning.rs rename {shared => crates/graph}/src/nodes/contract_ty.rs (58%) rename {shared => crates/graph}/src/nodes/enum_ty.rs (78%) rename {shared => crates/graph}/src/nodes/err_ty.rs (74%) rename {shared => crates/graph}/src/nodes/func_ty.rs (70%) create mode 100644 crates/graph/src/nodes/mod.rs rename {shared => crates/graph}/src/nodes/msg.rs (89%) create mode 100644 crates/graph/src/nodes/source_unit.rs create mode 100644 crates/graph/src/nodes/source_unit_part.rs rename {shared => crates/graph}/src/nodes/struct_ty.rs (71%) rename {shared => crates/graph}/src/nodes/ty_ty.rs (61%) rename {shared => crates/graph}/src/nodes/var_ty.rs (72%) create mode 100644 crates/graph/src/range/elem/concrete.rs create mode 100644 crates/graph/src/range/elem/elem_enum/arena.rs create mode 100644 crates/graph/src/range/elem/elem_enum/impls.rs create mode 100644 crates/graph/src/range/elem/elem_enum/mod.rs create mode 100644 crates/graph/src/range/elem/elem_enum/ops.rs create mode 100644 crates/graph/src/range/elem/elem_enum/range_elem.rs create mode 100644 crates/graph/src/range/elem/elem_enum/traits.rs create mode 100644 crates/graph/src/range/elem/elem_trait.rs create mode 100644 crates/graph/src/range/elem/expr/collapse.rs create mode 100644 crates/graph/src/range/elem/expr/mod.rs create mode 100644 crates/graph/src/range/elem/expr/simplify/add.rs create mode 100644 crates/graph/src/range/elem/expr/simplify/mod.rs create mode 100644 crates/graph/src/range/elem/expr/simplify/ords.rs create mode 100644 crates/graph/src/range/elem/expr/simplify/sub.rs create mode 100644 crates/graph/src/range/elem/map_or_array.rs rename shared/src/range/elem.rs => crates/graph/src/range/elem/mod.rs (50%) create mode 100644 crates/graph/src/range/elem/reference.rs create mode 100644 crates/graph/src/range/exec/bitwise.rs create mode 100644 crates/graph/src/range/exec/cast.rs create mode 100644 crates/graph/src/range/exec/exec_op.rs create mode 100644 crates/graph/src/range/exec/math_ops/add.rs create mode 100644 crates/graph/src/range/exec/math_ops/div.rs create mode 100644 crates/graph/src/range/exec/math_ops/exp.rs create mode 100644 crates/graph/src/range/exec/math_ops/mod.rs create mode 100644 crates/graph/src/range/exec/math_ops/modulo.rs create mode 100644 crates/graph/src/range/exec/math_ops/mul.rs create mode 100644 crates/graph/src/range/exec/math_ops/sub.rs create mode 100644 crates/graph/src/range/exec/max.rs create mode 100644 crates/graph/src/range/exec/mem_ops/concat.rs create mode 100644 crates/graph/src/range/exec/mem_ops/mem_get.rs create mode 100644 crates/graph/src/range/exec/mem_ops/mem_set.rs create mode 100644 crates/graph/src/range/exec/mem_ops/memcopy.rs create mode 100644 crates/graph/src/range/exec/mem_ops/mod.rs create mode 100644 crates/graph/src/range/exec/min.rs create mode 100644 crates/graph/src/range/exec/mod.rs create mode 100644 crates/graph/src/range/exec/shift.rs create mode 100644 crates/graph/src/range/exec/truthy_ops/logical.rs create mode 100644 crates/graph/src/range/exec/truthy_ops/mod.rs create mode 100644 crates/graph/src/range/exec/truthy_ops/ord.rs create mode 100644 crates/graph/src/range/exec_traits.rs create mode 100644 crates/graph/src/range/mod.rs rename {shared => crates/graph}/src/range/range_string.rs (53%) create mode 100644 crates/graph/src/range/range_trait.rs create mode 100644 crates/graph/src/range/solc_range.rs create mode 100644 crates/graph/src/solvers/atoms.rs create mode 100644 crates/graph/src/solvers/brute.rs create mode 100644 crates/graph/src/solvers/dl.rs create mode 100644 crates/graph/src/solvers/mod.rs create mode 100644 crates/graph/src/var_type.rs create mode 100644 crates/pyrometer/Cargo.toml rename {benches => crates/pyrometer/benches}/README.md (100%) rename {benches => crates/pyrometer/benches}/flamegraphs/parse.svg (100%) rename {benches => crates/pyrometer/benches}/parse.rs (72%) create mode 100644 crates/pyrometer/src/analyzer.rs create mode 100644 crates/pyrometer/src/analyzer_backend.rs rename {src => crates/pyrometer/src}/builtin_fns.rs (98%) create mode 100644 crates/pyrometer/src/graph_backend.rs create mode 100644 crates/pyrometer/src/lib.rs rename {tests => crates/pyrometer/tests}/benches/flat_comptroller.sol (100%) rename {tests => crates/pyrometer/tests}/benches/flat_ctoken.sol (100%) rename {tests => crates/pyrometer/tests}/challenges/apron.sol (77%) rename {tests => crates/pyrometer/tests}/challenges/func_stress.sol (100%) rename {tests => crates/pyrometer/tests}/challenges/reverse_bound_propogation.sol (100%) create mode 100644 crates/pyrometer/tests/helpers.rs rename {tests => crates/pyrometer/tests}/no_killed_ctxs.rs (92%) rename {tests => crates/pyrometer/tests}/test_data/abstract.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/assembly.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/bitwise.sol (99%) rename {tests => crates/pyrometer/tests}/test_data/cast.sol (94%) rename {tests => crates/pyrometer/tests}/test_data/const_var.sol (99%) rename {tests => crates/pyrometer/tests}/test_data/constructor.sol (96%) create mode 100644 crates/pyrometer/tests/test_data/dyn_types.sol rename {tests => crates/pyrometer/tests}/test_data/env.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/func_override.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/function_calls.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/interface.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/intrinsics.sol (98%) create mode 100644 crates/pyrometer/tests/test_data/join.sol rename {tests => crates/pyrometer/tests}/test_data/logical.sol (100%) create mode 100644 crates/pyrometer/tests/test_data/loops.sol rename {tests => crates/pyrometer/tests}/test_data/math.sol (83%) rename {tests => crates/pyrometer/tests}/test_data/modifier.sol (89%) rename {tests => crates/pyrometer/tests}/test_data/named_func_call.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/precedence.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/relative_imports/relative_import.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/remapping_import.sol (100%) rename {tests => crates/pyrometer/tests}/test_data/remappings.txt (100%) create mode 100644 crates/pyrometer/tests/test_data/repros/issue50.sol create mode 100644 crates/pyrometer/tests/test_data/repros/issue66.sol create mode 100644 crates/pyrometer/tests/test_data/repros/issue69.sol create mode 100644 crates/pyrometer/tests/test_data/repros/overflow.sol create mode 100644 crates/pyrometer/tests/test_data/repros/overflow2.sol rename {tests => crates/pyrometer/tests}/test_data/require.sol (99%) rename {tests => crates/pyrometer/tests}/test_data/storage.sol (99%) rename {tests => crates/pyrometer/tests}/test_data/using.sol (100%) create mode 100644 crates/pyrometer/tests/test_data/viz/basic.sol create mode 100644 crates/pyrometer/tests/test_data/viz/func_call.sol create mode 100644 crates/queries/Cargo.toml create mode 100644 crates/queries/src/lib.rs create mode 100644 crates/shared/Cargo.toml create mode 100644 crates/shared/src/analyzer_like.rs create mode 100644 crates/shared/src/gas.rs create mode 100644 crates/shared/src/graph_like.rs create mode 100644 crates/shared/src/lib.rs create mode 100644 crates/shared/src/mermaidConfig.json create mode 100644 crates/shared/src/search.rs create mode 100644 crates/solc-expressions/Cargo.toml create mode 100644 crates/solc-expressions/src/array.rs create mode 100644 crates/solc-expressions/src/assign.rs create mode 100644 crates/solc-expressions/src/bin_op.rs rename {src/context/exprs => crates/solc-expressions/src}/cmp.rs (70%) rename {src/context/exprs => crates/solc-expressions/src}/cond_op.rs (58%) create mode 100644 crates/solc-expressions/src/context_builder/expr.rs create mode 100644 crates/solc-expressions/src/context_builder/fn_calls.rs create mode 100644 crates/solc-expressions/src/context_builder/mod.rs create mode 100644 crates/solc-expressions/src/context_builder/stmt.rs rename {src/context/exprs => crates/solc-expressions/src}/env.rs (90%) create mode 100644 crates/solc-expressions/src/func_call/func_caller.rs create mode 100644 crates/solc-expressions/src/func_call/helper.rs create mode 100644 crates/solc-expressions/src/func_call/internal_call.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/abi.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/address.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/array.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/block.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/constructors.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/dyn_builtin.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/intrinsic_caller.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/mod.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/msg.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/precompile.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/solidity.rs create mode 100644 crates/solc-expressions/src/func_call/intrinsic_call/types.rs create mode 100644 crates/solc-expressions/src/func_call/join.rs create mode 100644 crates/solc-expressions/src/func_call/mod.rs create mode 100644 crates/solc-expressions/src/func_call/modifier.rs create mode 100644 crates/solc-expressions/src/func_call/namespaced_call.rs rename src/context/exprs/mod.rs => crates/solc-expressions/src/lib.rs (56%) rename {src/context/exprs => crates/solc-expressions/src}/list.rs (57%) rename {src/context/exprs => crates/solc-expressions/src}/literal.rs (83%) rename {src/context => crates/solc-expressions/src}/loops.rs (56%) create mode 100644 crates/solc-expressions/src/member_access/builtin_access.rs create mode 100644 crates/solc-expressions/src/member_access/contract_access.rs create mode 100644 crates/solc-expressions/src/member_access/enum_access.rs create mode 100644 crates/solc-expressions/src/member_access/func_access.rs create mode 100644 crates/solc-expressions/src/member_access/library_access.rs create mode 100644 crates/solc-expressions/src/member_access/list_access.rs create mode 100644 crates/solc-expressions/src/member_access/member_trait.rs create mode 100644 crates/solc-expressions/src/member_access/mod.rs create mode 100644 crates/solc-expressions/src/member_access/struct_access.rs create mode 100644 crates/solc-expressions/src/pre_post_in_decrement.rs rename {src/context/exprs => crates/solc-expressions/src}/require.rs (59%) create mode 100644 crates/solc-expressions/src/variable.rs create mode 100644 crates/solc-expressions/src/yul/mod.rs create mode 100644 crates/solc-expressions/src/yul/yul_builder.rs rename {src/context => crates/solc-expressions/src}/yul/yul_cond_op.rs (60%) rename {src/context => crates/solc-expressions/src}/yul/yul_funcs.rs (59%) delete mode 100644 shared/Cargo.lock delete mode 100644 shared/Cargo.toml delete mode 100644 shared/src/analyzer.rs delete mode 100644 shared/src/context/mod.rs delete mode 100644 shared/src/context/var.rs delete mode 100644 shared/src/lib.rs delete mode 100644 shared/src/nodes/mod.rs delete mode 100644 shared/src/range/elem_ty.rs delete mode 100644 shared/src/range/mod.rs delete mode 100644 shared/src/range/range_ops.rs delete mode 100644 src/context/exprs/array.rs delete mode 100644 src/context/exprs/bin_op.rs delete mode 100644 src/context/exprs/member_access.rs delete mode 100644 src/context/exprs/variable.rs delete mode 100644 src/context/func_call/internal_call.rs delete mode 100644 src/context/func_call/intrinsic_call.rs delete mode 100644 src/context/func_call/mod.rs delete mode 100644 src/context/func_call/modifier.rs delete mode 100644 src/context/func_call/namespaced_call.rs delete mode 100644 src/context/mod.rs delete mode 100644 src/context/queries/mod.rs delete mode 100644 src/context/queries/storage_write/access.rs delete mode 100644 src/context/queries/storage_write/mod.rs delete mode 100644 src/context/queries/storage_write/target.rs delete mode 100644 src/context/queries/taint.rs delete mode 100644 src/context/yul/mod.rs delete mode 100644 src/lib.rs delete mode 100644 tests/helpers.rs delete mode 100644 tests/test_data/dyn_types.sol delete mode 100644 tests/test_data/loops.sol diff --git a/.gitignore b/.gitignore index 9ba2a3b7..113d56e2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +**/profile.json **/target **/.DS_Store **/out @@ -6,4 +7,4 @@ **/dot.svg **/dot.dot **/flamegraph.svg -**/.swp \ No newline at end of file +**/.swp diff --git a/Cargo.lock b/Cargo.lock index c89c6964..bd01078d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,15 +2,38 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "ahash" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b79b82693f705137f8fb9b37871d99e4f9a7df12b917eed79c3d3954830a60b" +dependencies = [ + "cfg-if", + "getrandom", + "once_cell", + "version_check", + "zerocopy", +] + [[package]] name = "aho-corasick" -version = "0.7.20" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] +[[package]] +name = "analyzers" +version = "0.2.0" +dependencies = [ + "ariadne", + "graph", + "shared", + "solang-parser", +] + [[package]] name = "anes" version = "0.1.6" @@ -19,42 +42,50 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.2.6" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "342258dd14006105c2b75ab1bd7543a03bdf0cfc94383303ac212a04939dff6f" +checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44" dependencies = [ "anstyle", "anstyle-parse", + "anstyle-query", "anstyle-wincon", - "concolor-override", - "concolor-query", - "is-terminal", + "colorchoice", "utf8parse", ] [[package]] name = "anstyle" -version = "0.3.5" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.1.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7d1bb534e9efed14f3e5f44e7dd1a4f709384023a4165199a4241e18dff0116" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] +[[package]] +name = "anstyle-query" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3a318f1f38d2418400f8209655bfd825785afd25aa30bb7ba6cc792e4596748" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "anstyle-wincon" -version = "0.2.0" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3127af6145b149f3287bb9a0d10ad9c5692dba8c53ad48285e5bec4063834fa" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] @@ -68,9 +99,9 @@ dependencies = [ [[package]] name = "arrayvec" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "ascii-canvas" @@ -94,9 +125,9 @@ dependencies = [ [[package]] name = "auto_impl" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a8c1df849285fbacd587de7818cc7d13be6cd2cbcd47a04fb1801b0e2706e33" +checksum = "fee3da8ef1276b0bee5dd1c7258010d8fffd31801447323115a25560e1327b89" dependencies = [ "proc-macro-error", "proc-macro2", @@ -143,6 +174,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + [[package]] name = "bitvec" version = "1.0.1" @@ -166,9 +203,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "byte-slice-cast" @@ -178,15 +215,15 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ "serde", ] @@ -197,12 +234,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "cc" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" - [[package]] name = "cfg-if" version = "1.0.0" @@ -211,11 +242,10 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.24" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" dependencies = [ - "num-integer", "num-traits", ] @@ -252,46 +282,44 @@ version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ - "bitflags", + "bitflags 1.3.2", "clap_lex 0.2.4", - "indexmap", + "indexmap 1.9.3", "textwrap", ] [[package]] name = "clap" -version = "4.2.1" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046ae530c528f252094e4a77886ee1374437744b2bff1497aa898bbddbbb29b3" +checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.2.1" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "223163f58c9a40c3b0a43e1c4b50a9ce09f007ea2cb1ec258a687945b4b7929f" +checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" dependencies = [ "anstream", "anstyle", - "bitflags", - "clap_lex 0.4.1", + "clap_lex 0.6.0", "strsim", ] [[package]] name = "clap_derive" -version = "4.2.0" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.13", + "syn 2.0.39", ] [[package]] @@ -305,49 +333,57 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.4.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "cli" -version = "0.1.0" +version = "0.2.0" dependencies = [ + "analyzers", "ariadne", - "clap 4.2.1", + "clap 4.4.11", + "ethers-core", + "graph", "petgraph", "pyrometer", "shared", "tracing", "tracing-subscriber", + "tracing-tree", ] [[package]] -name = "concolor-override" +name = "colorchoice" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a855d4a1978dc52fb0536a04d384c2c0c1aa273597f08b77c8c4d3b2eec6037f" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] -name = "concolor-query" -version = "0.3.3" +name = "const-hex" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d11d52c3d7ca2e6d0040212be9e4dbbcd78b6447f535b6b561f449427944cf" +checksum = "a5104de16b218eddf8e34ffe2f86f74bfa4e61e95a1b89732fccf6325efd0557" dependencies = [ - "windows-sys 0.45.0", + "cfg-if", + "cpufeatures", + "hex", + "proptest", + "serde", ] [[package]] name = "const-oid" -version = "0.9.2" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913" +checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" [[package]] name = "cpufeatures" -version = "0.2.6" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "280a9f2d8b3a38871a3c8a46fb80db65e5e5ed97da80c4d08bf27fb63e35e181" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" dependencies = [ "libc", ] @@ -388,16 +424,6 @@ dependencies = [ "itertools", ] -[[package]] -name = "crossbeam-channel" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - [[package]] name = "crossbeam-deque" version = "0.8.3" @@ -411,9 +437,9 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", @@ -424,9 +450,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] @@ -439,9 +465,9 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-bigint" -version = "0.5.1" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c2538c4e68e52548bacb3e83ac549f903d44f011ac9d5abb5e132e67d0808f7" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core", @@ -461,9 +487,9 @@ dependencies = [ [[package]] name = "der" -version = "0.7.3" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b10af9f9f9f2134a42d3f8aa74658660f2e0234b0eb81bd171df8aa32779ed" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ "const-oid", "zeroize", @@ -488,9 +514,9 @@ checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "const-oid", @@ -521,28 +547,29 @@ dependencies = [ [[package]] name = "ecdsa" -version = "0.16.4" +version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "106401dadc137d05cb0d4ab4d42be089746aefdfe8992df4d0edcf351c16ddca" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest", "elliptic-curve", "rfc6979", "signature", + "spki", ] [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" -version = "0.13.3" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cdacd4d6ed3f9b98680b679c0e52a823b8a2c7a97358d508fe247f2180c282" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", @@ -567,24 +594,19 @@ dependencies = [ ] [[package]] -name = "errno" -version = "0.3.0" +name = "equivalent" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys 0.45.0", -] +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] -name = "errno-dragonfly" -version = "0.1.2" +name = "errno" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -637,18 +659,17 @@ dependencies = [ [[package]] name = "ethers-core" -version = "2.0.2" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40bf114f1017ace0f622f1652f59c2c5e1abfe7d88891cca0c43da979b351de0" +checksum = "2f03e0bdc216eeb9e355b90cf610ef6c5bb8aca631f97b5ae9980ce34ea7878d" dependencies = [ "arrayvec", "bytes", "chrono", + "const-hex", "elliptic-curve", "ethabi", "generic-array", - "getrandom", - "hex", "k256", "num_enum", "open-fastrlp", @@ -665,12 +686,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.9.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "ff" @@ -719,15 +737,29 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", - "js-sys", "libc", "wasi", - "wasm-bindgen", +] + +[[package]] +name = "graph" +version = "0.2.0" +dependencies = [ + "ethers-core", + "hex", + "itertools", + "lazy_static", + "petgraph", + "pretty_assertions", + "shared", + "solang-parser", + "tracing", + "tracing-subscriber", ] [[package]] @@ -753,6 +785,12 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + [[package]] name = "heck" version = "0.4.1" @@ -770,18 +808,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] - -[[package]] -name = "hermit-abi" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" [[package]] name = "hex" @@ -843,37 +872,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", - "hashbrown", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", + "hashbrown 0.12.3", ] [[package]] -name = "io-lifetimes" -version = "1.0.10" +name = "indexmap" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ - "hermit-abi 0.3.1", - "libc", - "windows-sys 0.48.0", + "equivalent", + "hashbrown 0.14.3", ] [[package]] name = "is-terminal" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.1", - "io-lifetimes", + "hermit-abi 0.3.3", "rustix", "windows-sys 0.48.0", ] @@ -889,24 +907,24 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.63" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" dependencies = [ "wasm-bindgen", ] [[package]] name = "k256" -version = "0.13.0" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955890845095ccf31ef83ad41a05aabb4d8cc23dc3cac5a9f5c89cf26dd0da75" +checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" dependencies = [ "cfg-if", "ecdsa", @@ -917,18 +935,28 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" +checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" dependencies = [ "cpufeatures", ] +[[package]] +name = "keccak-hash" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b286e6b663fb926e1eeb68528e69cb70ed46c6d65871a21b2215ae8154c6d3c" +dependencies = [ + "primitive-types", + "tiny-keccak", +] + [[package]] name = "lalrpop" -version = "0.19.9" +version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f34313ec00c2eb5c3c87ca6732ea02dcf3af99c3ff7a8fb622ffb99c9d860a87" +checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" dependencies = [ "ascii-canvas", "bit-set", @@ -938,9 +966,8 @@ dependencies = [ "itertools", "lalrpop-util", "petgraph", - "pico-args", "regex", - "regex-syntax", + "regex-syntax 0.6.29", "string_cache", "term", "tiny-keccak", @@ -949,9 +976,9 @@ dependencies = [ [[package]] name = "lalrpop-util" -version = "0.19.9" +version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5c1f7869c94d214466c5fd432dfed12c379fd87786768d36455892d46b18edd" +checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" dependencies = [ "regex", ] @@ -964,21 +991,38 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.141" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libredox" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.1", + "libc", + "redox_syscall", +] [[package]] name = "linux-raw-sys" -version = "0.3.1" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59d8c75012853d2e872fb56bc8a2e53718e2cafe1a4c823143141c6d90c322f" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -986,12 +1030,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "matchers" @@ -999,20 +1040,20 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" dependencies = [ - "regex-automata", + "regex-automata 0.1.10", ] [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "memoffset" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", ] @@ -1034,60 +1075,50 @@ dependencies = [ ] [[package]] -name = "num-integer" -version = "0.1.45" +name = "nu-ansi-term" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "c073d3c1930d0751774acf49e66653acecb416c3a54c6ec095a9b11caddb5a68" dependencies = [ - "autocfg", - "num-traits", + "windows-sys 0.48.0", ] [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", -] - -[[package]] -name = "num_cpus" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" -dependencies = [ - "hermit-abi 0.2.6", - "libc", + "libm", ] [[package]] name = "num_enum" -version = "0.5.11" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +checksum = "683751d591e6d81200c39fb0d1032608b77724f34114db54f571ff1317b337c0" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.5.11" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +checksum = "6c11e44798ad209ccdd91fc192f0526a369a01234f7373e1b141c96d7cee4f0e" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 2.0.1", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.39", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "oorandom" @@ -1122,9 +1153,9 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.5.0" +version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" [[package]] name = "overload" @@ -1134,9 +1165,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parity-scale-codec" -version = "3.4.0" +version = "3.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "637935964ff85a605d114591d4d2c13c5d1ba2806dae97cea6bf180238a749ac" +checksum = "881331e34fa842a2fb61cc2db9643a8fedc615e47cfcc52597d1af0db9a7e8fe" dependencies = [ "arrayvec", "bitvec", @@ -1148,11 +1179,11 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.1.4" +version = "3.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" +checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 2.0.1", "proc-macro2", "quote", "syn 1.0.109", @@ -1170,58 +1201,58 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.7" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall", "smallvec", - "windows-sys 0.45.0", + "windows-targets 0.48.5", ] [[package]] name = "petgraph" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 2.1.0", ] [[package]] name = "phf" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928c6535de93548188ef63bb7c4036bd415cd8f36ad25af44b9789b2ee72a48c" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" dependencies = [ "phf_macros", - "phf_shared 0.11.1", + "phf_shared 0.11.2", ] [[package]] name = "phf_generator" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1181c94580fa345f50f19d738aaa39c0ed30a600d95cb2d3e23f94266f14fbf" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ - "phf_shared 0.11.1", + "phf_shared 0.11.2", "rand", ] [[package]] name = "phf_macros" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92aacdc5f16768709a569e913f7451034034178b05bdc8acda226659a3dccc66" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" dependencies = [ "phf_generator", - "phf_shared 0.11.1", + "phf_shared 0.11.2", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.39", ] [[package]] @@ -1235,24 +1266,18 @@ dependencies = [ [[package]] name = "phf_shared" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1fb5f6f826b772a8d4c0394209441e7d37cbbb967ae9c7e0e8134365c9ee676" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ "siphasher", ] -[[package]] -name = "pico-args" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468" - [[package]] name = "pin-project-lite" -version = "0.2.9" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pkcs8" @@ -1266,9 +1291,9 @@ dependencies = [ [[package]] name = "plotters" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" dependencies = [ "num-traits", "plotters-backend", @@ -1279,15 +1304,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" [[package]] name = "plotters-svg" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" dependencies = [ "plotters-backend", ] @@ -1304,11 +1329,21 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "pretty_assertions" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +dependencies = [ + "diff", + "yansi", +] + [[package]] name = "primitive-types" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", @@ -1325,7 +1360,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ "once_cell", - "toml_edit", + "toml_edit 0.19.15", +] + +[[package]] +name = "proc-macro-crate" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97dc5fea232fc28d2f597b37c4876b348a40e33f3b02cc975c8d006d78d94b1a" +dependencies = [ + "toml_datetime", + "toml_edit 0.20.2", ] [[package]] @@ -1354,33 +1399,67 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.56" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" dependencies = [ "unicode-ident", ] +[[package]] +name = "proptest" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +dependencies = [ + "bitflags 2.4.1", + "lazy_static", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax 0.8.2", + "unarray", +] + [[package]] name = "pyrometer" -version = "0.1.0" +version = "0.2.0" dependencies = [ + "ahash", + "analyzers", "ariadne", "criterion", "ethers-core", - "hex", + "graph", "petgraph", + "serde_json", "shared", "solang-parser", + "solc-expressions", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "queries" +version = "0.2.0" +dependencies = [ + "analyzers", + "ariadne", + "ethers-core", + "graph", + "solang-parser", + "solc-expressions", "tracing", "tracing-subscriber", ] [[package]] name = "quote" -version = "1.0.26" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] @@ -1421,11 +1500,20 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core", +] + [[package]] name = "rayon" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ "either", "rayon-core", @@ -1433,54 +1521,44 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", -] - -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags", ] [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ "getrandom", - "redox_syscall 0.2.16", + "libredox", "thiserror", ] [[package]] name = "regex" -version = "1.7.3" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", ] [[package]] @@ -1489,7 +1567,18 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" dependencies = [ - "regex-syntax", + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.2", ] [[package]] @@ -1498,6 +1587,12 @@ version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + [[package]] name = "rfc6979" version = "0.4.0" @@ -1538,29 +1633,28 @@ checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" [[package]] name = "rustix" -version = "0.37.8" +version = "0.38.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aef160324be24d31a62147fae491c14d2204a3865c7ca8c3b0d7f7bcb3ea635" +checksum = "9470c4bf8246c8daf25f9598dca807fb6510347b1e1cfa55749113850c79d88a" dependencies = [ - "bitflags", + "bitflags 2.4.1", "errno", - "io-lifetimes", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "rustversion" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "ryu" -version = "1.0.13" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "same-file" @@ -1573,9 +1667,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.5.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cfdffd972d76b22f3d7f81c8be34b2296afd3a25e0a547bd9abe340a4dbbe97" +checksum = "7f7d66a1128282b7ef025a8ead62a4a9fcf017382ec53b8ffbf4d7bf77bd3c60" dependencies = [ "cfg-if", "derive_more", @@ -1585,11 +1679,11 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "2.5.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61fa974aea2d63dd18a4ec3a49d59af9f34178c73a4f56d2f18205628d00681e" +checksum = "abf2c68b89cafb3b8d918dd07b42be0da66ff202cf1155c5739a4e0c1ea0dc19" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "syn 1.0.109", @@ -1597,15 +1691,15 @@ dependencies = [ [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sec1" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48518a2b5775ba8ca5b46596aae011caa431e6ce7e4a67ead66d92f08884220e" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct", "der", @@ -1617,29 +1711,29 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.159" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.159" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.13", + "syn 2.0.39", ] [[package]] name = "serde_json" -version = "1.0.95" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" dependencies = [ "itoa", "ryu", @@ -1648,9 +1742,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -1659,9 +1753,9 @@ dependencies = [ [[package]] name = "sha3" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" dependencies = [ "digest", "keccak", @@ -1669,20 +1763,20 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] [[package]] name = "shared" -version = "0.1.0" +version = "0.2.0" dependencies = [ + "ahash", "ethers-core", "hex", - "lazy_static", "petgraph", "solang-parser", "tracing", @@ -1691,9 +1785,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", "rand_core", @@ -1701,15 +1795,15 @@ dependencies = [ [[package]] name = "siphasher" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" [[package]] name = "solang-parser" @@ -1726,11 +1820,27 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "solc-expressions" +version = "0.2.0" +dependencies = [ + "analyzers", + "ethers-core", + "graph", + "hex", + "keccak-hash", + "petgraph", + "shared", + "solang-parser", + "tracing", + "tracing-subscriber", +] + [[package]] name = "spki" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a5be806ab6f127c3da44b7378837ebf01dadca8510a0e572460216b228bd0e" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", @@ -1763,31 +1873,31 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strum" -version = "0.24.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.24.3" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", - "syn 1.0.109", + "syn 2.0.39", ] [[package]] name = "subtle" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" @@ -1802,9 +1912,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.13" +version = "2.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c9da457c5285ac1f936ebd076af6dac17a61cfe7826f2076b4d015cf47bc8ec" +checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" dependencies = [ "proc-macro2", "quote", @@ -1819,15 +1929,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.5.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", "fastrand", - "redox_syscall 0.3.5", + "redox_syscall", "rustix", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] @@ -1849,22 +1959,22 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.40" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.40" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.13", + "syn 2.0.39", ] [[package]] @@ -1898,28 +2008,38 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.1.0", + "toml_datetime", + "winnow", +] [[package]] name = "toml_edit" -version = "0.19.8" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" +checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" dependencies = [ - "indexmap", + "indexmap 2.1.0", "toml_datetime", "winnow", ] [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -1927,20 +2047,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.23" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.39", ] [[package]] name = "tracing-core" -version = "0.1.30" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -1948,23 +2068,23 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] [[package]] name = "tracing-subscriber" -version = "0.3.16" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", - "nu-ansi-term", + "nu-ansi-term 0.46.0", "once_cell", "regex", "sharded-slab", @@ -1975,11 +2095,23 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "tracing-tree" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65139ecd2c3f6484c3b99bc01c77afe21e95473630747c7aca525e78b0666675" +dependencies = [ + "nu-ansi-term 0.49.0", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "uint" @@ -1993,17 +2125,23 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicode-ident" -version = "1.0.8" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -2031,9 +2169,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" dependencies = [ "same-file", "winapi-util", @@ -2047,9 +2185,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.86" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -2057,24 +2195,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.86" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.13", + "syn 2.0.39", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.86" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2082,28 +2220,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.86" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.13", + "syn 2.0.39", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.86" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "web-sys" -version = "0.3.63" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2" +checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" dependencies = [ "js-sys", "wasm-bindgen", @@ -2127,9 +2265,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -2142,141 +2280,141 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-sys" -version = "0.45.0" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.42.2", + "windows-targets 0.48.5", ] [[package]] name = "windows-sys" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.48.0", + "windows-targets 0.52.0", ] [[package]] name = "windows-targets" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winnow" -version = "0.4.1" +version = "0.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae8970b36c66498d8ff1d66685dc86b91b29db0c7739899012f63a63814b4b28" +checksum = "b67b5f0a4e7a27a64c651977932b9dc5667ca7fc31ac44b03ed37a0cf42fdfff" dependencies = [ "memchr", ] @@ -2296,8 +2434,28 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + [[package]] name = "zeroize" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" diff --git a/Cargo.toml b/Cargo.toml index 09ad9588..5bc8a535 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,50 +1,74 @@ -[package] -name = "pyrometer" -version = "0.1.0" -edition = "2021" -autobenches = false # turns off autodiscovery of benchmarks in ./benches -exclude = ["benches"] # exclude the benches directory from the build - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -petgraph = "0.6.2" -solang-parser = { version = "0.2.4", features = ["pt-serde"] } -ethers-core = "*" -ariadne = "0.2.0" -shared = { path = "./shared" } -hex = "0.4.3" -tracing = { version = "0.1", features = ["attributes"] } -tracing-subscriber = "0.3" - -[dev-dependencies] -criterion = { version = "0.4"} # benching - [workspace] -members = ["cli", "shared"] +members = [ + "crates/analyzers", + "crates/cli", + "crates/graph", + "crates/pyrometer", + "crates/queries", + "crates/shared", + "crates/solc-expressions", +] +resolver = "2" -# we patch ariadne to allow for counting by bytes because solang uses byte locations not char locations -[patch.crates-io] -ariadne = {git = "https://github.com/brockelmore/ariadne"} +[workspace.package] +version = "0.2.0" +edition = "2021" +authors = ["Brock Elmore"] +license = "MIT OR Apache-2.0" +homepage = "https://github.com/nascentxyz/pyrometer" +repository = "https://github.com/nascentxyz/pyrometer" +exclude = [ + "benches/", + "tests/", + "examples/", +] # exclude the benches directory from the build +rust-version = "1.74" [profile.release] debug = true [profile.dev] -opt-level = 1 # Enable some optimizations like tail call +# opt-level = 1 # Enable some optimizations like tail call inline = true [profile.bench] debug = true +[workspace.dependencies] +analyzers = { path = "crates/analyzers" } +graph = { path = "crates/graph" } +pyrometer = { path = "crates/pyrometer" } +queries = { path = "crates/queries" } +shared = { path = "crates/shared" } +solc-expressions = { path = "crates/solc-expressions" } + +solang-parser = { version = "0.2.4", features = ["pt-serde"] } +tracing = { version = "0.1", features = ["attributes"] } +tracing-subscriber = { version = "0.3", features = [ + "registry", + "env-filter", + "fmt", +] } +tracing-tree = "0.3.0" +ethers-core = "*" +hex = "0.4.3" +ariadne = "0.2.0" +petgraph = "0.6.2" +ahash = "0.8.10" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +# [workspace] +# members = ["cli", "shared"] + +# we patch ariadne to allow for counting by bytes because solang uses byte locations not char locations +[patch.crates-io] +ariadne = { git = "https://github.com/brockelmore/ariadne" } -###################################### -# Benchmarks -###################################### -[[example]] -name = "parse" -path = "examples/parse.rs" +# ###################################### +# # Benchmarks +# ###################################### -[[bench]] -name = "parse" -harness = false \ No newline at end of file +# [[bench]] +# name = "parse" +# harness = false diff --git a/LICENSE b/LICENSE deleted file mode 100644 index f6f0bf49..00000000 --- a/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - Pyrometer - Copyright (C) 2023 Nascent - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Pyrometer Copyright (C) 2023 Nascent - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. \ No newline at end of file diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 00000000..a7b8b5e6 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,187 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. \ No newline at end of file diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 00000000..dc2b8972 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,20 @@ +Copyright (c) 2022-2023 Nascent + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/TODO.md b/TODO.md index 450b4281..c63b9fb8 100644 --- a/TODO.md +++ b/TODO.md @@ -3,7 +3,7 @@ - [x] Cleanup repo (remove unnecessary files, automate testing, etc.) - [x] Graceful error handling - [ ] `join` operations between contexts - - [ ] Trait/Lang separation cleanup + - [x] Trait/Lang separation cleanup - [ ] Propogate requirements across variables (i.e. `y = x;` & `x != 0;` therefore `y != 0;`) - [ ] Language - [x] Better import handling (`foundry.toml` reading?) diff --git a/cli/Cargo.lock b/cli/Cargo.lock deleted file mode 100644 index 16ae4bf4..00000000 --- a/cli/Cargo.lock +++ /dev/null @@ -1,1616 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "aho-corasick" -version = "0.7.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" -dependencies = [ - "memchr", -] - -[[package]] -name = "ariadne" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "367fd0ad87307588d087544707bc5fbf4805ded96c7db922b70d368fa1cb5702" -dependencies = [ - "unicode-width", - "yansi", -] - -[[package]] -name = "arrayvec" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" - -[[package]] -name = "ascii-canvas" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" -dependencies = [ - "term", -] - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - -[[package]] -name = "auto_impl" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a8c1df849285fbacd587de7818cc7d13be6cd2cbcd47a04fb1801b0e2706e33" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "base16ct" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" - -[[package]] -name = "base64ct" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" - -[[package]] -name = "bit-set" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" -dependencies = [ - "generic-array", -] - -[[package]] -name = "byte-slice-cast" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" -dependencies = [ - "serde", -] - -[[package]] -name = "cc" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "clap" -version = "4.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f13b9c79b5d1dd500d20ef541215a6423c75829ef43117e1b4d17fd8af0b5d76" -dependencies = [ - "bitflags", - "clap_derive", - "clap_lex", - "is-terminal", - "once_cell", - "strsim", - "termcolor", -] - -[[package]] -name = "clap_derive" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "684a277d672e91966334af371f1a7b5833f9aa00b07c84e92fbce95e00208ce8" -dependencies = [ - "heck", - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "clap_lex" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "783fe232adfca04f90f56201b26d79682d4cd2625e0bc7290b95123afe558ade" -dependencies = [ - "os_str_bytes", -] - -[[package]] -name = "cli" -version = "0.1.0" -dependencies = [ - "ariadne", - "clap", - "pyrometer", - "shared", -] - -[[package]] -name = "const-oid" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" - -[[package]] -name = "cpufeatures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" -dependencies = [ - "libc", -] - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-bigint" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" -dependencies = [ - "generic-array", - "rand_core", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "der" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - -[[package]] -name = "digest" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" -dependencies = [ - "block-buffer", - "crypto-common", - "subtle", -] - -[[package]] -name = "dirs-next" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" -dependencies = [ - "cfg-if", - "dirs-sys-next", -] - -[[package]] -name = "dirs-sys-next" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - -[[package]] -name = "ecdsa" -version = "0.14.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" -dependencies = [ - "der", - "elliptic-curve", - "rfc6979", - "signature", -] - -[[package]] -name = "either" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" - -[[package]] -name = "elliptic-curve" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" -dependencies = [ - "base16ct", - "crypto-bigint", - "der", - "digest", - "ff", - "generic-array", - "group", - "rand_core", - "sec1", - "subtle", - "zeroize", -] - -[[package]] -name = "ena" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3" -dependencies = [ - "log", -] - -[[package]] -name = "errno" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" -dependencies = [ - "errno-dragonfly", - "libc", - "winapi", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "ethabi" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "primitive-types", - "scale-info", - "uint", -] - -[[package]] -name = "ethers-core" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" -dependencies = [ - "arrayvec", - "bytes", - "chrono", - "elliptic-curve", - "ethabi", - "generic-array", - "hex", - "k256", - "open-fastrlp", - "rand", - "rlp", - "rlp-derive", - "serde", - "serde_json", - "strum", - "thiserror", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "ff" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" -dependencies = [ - "rand_core", - "subtle", -] - -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "generic-array" -version = "0.14.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "group" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" -dependencies = [ - "ff", - "rand_core", - "subtle", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "heck" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - -[[package]] -name = "impl-serde" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" -dependencies = [ - "serde", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "indexmap" -version = "1.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" -dependencies = [ - "autocfg", - "hashbrown", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d6c6f8c91b4b9ed43484ad1a938e393caf35960fce7f82a040497207bd8e9e" -dependencies = [ - "libc", - "windows-sys", -] - -[[package]] -name = "is-terminal" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dfb6c8100ccc63462345b67d1bbc3679177c75ee4bf59bf29c8b1d110b8189" -dependencies = [ - "hermit-abi 0.2.6", - "io-lifetimes", - "rustix", - "windows-sys", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" - -[[package]] -name = "k256" -version = "0.11.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "sha2", - "sha3", -] - -[[package]] -name = "keccak" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "lalrpop" -version = "0.19.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b30455341b0e18f276fa64540aff54deafb54c589de6aca68659c63dd2d5d823" -dependencies = [ - "ascii-canvas", - "atty", - "bit-set", - "diff", - "ena", - "itertools", - "lalrpop-util", - "petgraph", - "pico-args", - "regex", - "regex-syntax", - "string_cache", - "term", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "lalrpop-util" -version = "0.19.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf796c978e9b4d983414f4caedc9273aa33ee214c5b887bd55fde84c85d2dc4" -dependencies = [ - "regex", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "libc" -version = "0.2.139" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" - -[[package]] -name = "linux-raw-sys" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" - -[[package]] -name = "lock_api" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "memchr" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" - -[[package]] -name = "new_debug_unreachable" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" - -[[package]] -name = "nom8" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" -dependencies = [ - "memchr", -] - -[[package]] -name = "num-integer" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" -dependencies = [ - "autocfg", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" -dependencies = [ - "autocfg", -] - -[[package]] -name = "once_cell" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" - -[[package]] -name = "open-fastrlp" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", - "ethereum-types", - "open-fastrlp-derive", -] - -[[package]] -name = "open-fastrlp-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" -dependencies = [ - "bytes", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "os_str_bytes" -version = "6.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" - -[[package]] -name = "parity-scale-codec" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3840933452adf7b3b9145e27086a5a3376c619dca1a21b1e5a5af0d54979bed" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "parking_lot" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-sys", -] - -[[package]] -name = "petgraph" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" -dependencies = [ - "fixedbitset", - "indexmap", -] - -[[package]] -name = "phf" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928c6535de93548188ef63bb7c4036bd415cd8f36ad25af44b9789b2ee72a48c" -dependencies = [ - "phf_macros", - "phf_shared 0.11.1", -] - -[[package]] -name = "phf_generator" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1181c94580fa345f50f19d738aaa39c0ed30a600d95cb2d3e23f94266f14fbf" -dependencies = [ - "phf_shared 0.11.1", - "rand", -] - -[[package]] -name = "phf_macros" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92aacdc5f16768709a569e913f7451034034178b05bdc8acda226659a3dccc66" -dependencies = [ - "phf_generator", - "phf_shared 0.11.1", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "phf_shared" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" -dependencies = [ - "siphasher", -] - -[[package]] -name = "phf_shared" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1fb5f6f826b772a8d4c0394209441e7d37cbbb967ae9c7e0e8134365c9ee676" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pico-args" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468" - -[[package]] -name = "pkcs8" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - -[[package]] -name = "primitive-types" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" -dependencies = [ - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" -dependencies = [ - "once_cell", - "toml_edit", -] - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "pyrometer" -version = "0.1.0" -dependencies = [ - "ariadne", - "ethers-core", - "hex", - "petgraph", - "shared", - "solang-parser", -] - -[[package]] -name = "quote" -version = "1.0.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags", -] - -[[package]] -name = "redox_users" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" -dependencies = [ - "getrandom", - "redox_syscall", - "thiserror", -] - -[[package]] -name = "regex" -version = "1.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" - -[[package]] -name = "rfc6979" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" -dependencies = [ - "crypto-bigint", - "hmac", - "zeroize", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "rlp-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustix" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fdebc4b395b7fbb9ab11e462e20ed9051e7b16e42d24042c776eca0ac81b03" -dependencies = [ - "bitflags", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys", - "windows-sys", -] - -[[package]] -name = "rustversion" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" - -[[package]] -name = "ryu" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" - -[[package]] -name = "scale-info" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "001cf62ece89779fd16105b5f515ad0e5cedcd5440d3dd806bb067978e7c3608" -dependencies = [ - "cfg-if", - "derive_more", - "parity-scale-codec", - "scale-info-derive", -] - -[[package]] -name = "scale-info-derive" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "303959cf613a6f6efd19ed4b4ad5bf79966a13352716299ad532cfb115f4205c" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "sec1" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "subtle", - "zeroize", -] - -[[package]] -name = "serde" -version = "1.0.152" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.152" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha2" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha3" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" -dependencies = [ - "digest", - "keccak", -] - -[[package]] -name = "shared" -version = "0.1.0" -dependencies = [ - "ethers-core", - "hex", - "lazy_static", - "petgraph", - "solang-parser", -] - -[[package]] -name = "signature" -version = "1.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" -dependencies = [ - "digest", - "rand_core", -] - -[[package]] -name = "siphasher" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" - -[[package]] -name = "smallvec" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" - -[[package]] -name = "solang-parser" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff87dae6cdccacdbf3b19e99b271083556e808de0f59c74a01482f64fdbc61fc" -dependencies = [ - "itertools", - "lalrpop", - "lalrpop-util", - "phf", - "serde", - "unicode-xid", -] - -[[package]] -name = "spki" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "string_cache" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213494b7a2b503146286049378ce02b482200519accc31872ee8be91fa820a08" -dependencies = [ - "new_debug_unreachable", - "once_cell", - "parking_lot", - "phf_shared 0.10.0", - "precomputed-hash", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "strum" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn", -] - -[[package]] -name = "subtle" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" - -[[package]] -name = "syn" -version = "1.0.107" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "term" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" -dependencies = [ - "dirs-next", - "rustversion", - "winapi", -] - -[[package]] -name = "termcolor" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "thiserror" -version = "1.0.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "toml_datetime" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" - -[[package]] -name = "toml_edit" -version = "0.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" -dependencies = [ - "indexmap", - "nom8", - "toml_datetime", -] - -[[package]] -name = "typenum" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" - -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unicode-ident" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" - -[[package]] -name = "unicode-width" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-sys" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "yansi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" - -[[package]] -name = "zeroize" -version = "1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" diff --git a/cli/Cargo.toml b/cli/Cargo.toml deleted file mode 100644 index 9ac847ea..00000000 --- a/cli/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "cli" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -clap = { version = "4.1.4", features = ["derive"] } -pyrometer = { path = "../" } -shared = { path = "../shared" } -ariadne = "0.2.0" -tracing = "0.1" -tracing-subscriber = { version = "0.3", features = ["registry", "env-filter", "fmt"] } -petgraph = "0.6.2" - -[[bin]] -name = "pyrometer" -path = "src/main.rs" - - -[profile.release] -debug = true \ No newline at end of file diff --git a/crates/analyzers/Cargo.toml b/crates/analyzers/Cargo.toml new file mode 100644 index 00000000..136ace93 --- /dev/null +++ b/crates/analyzers/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "analyzers" +description = "Pyrometer's builtin analyzers" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +graph.workspace = true +shared.workspace = true + +solang-parser.workspace = true +ariadne.workspace = true \ No newline at end of file diff --git a/src/context/analyzers/bounds.rs b/crates/analyzers/src/bounds.rs similarity index 77% rename from src/context/analyzers/bounds.rs rename to crates/analyzers/src/bounds.rs index 2ea3d72a..636147a9 100644 --- a/src/context/analyzers/bounds.rs +++ b/crates/analyzers/src/bounds.rs @@ -1,17 +1,14 @@ -use crate::analyzers::FunctionVarsBoundAnalysis; -use crate::analyzers::VarBoundAnalysis; +use crate::{FunctionVarsBoundAnalysis, LocSpan, LocStrSpan, ReportConfig, VarBoundAnalysis}; -use crate::analyzers::LocSpan; -use crate::analyzers::{LocStrSpan, ReportConfig}; - -use shared::analyzer::GraphLike; -use shared::{ - context::*, - range::{range_string::*, Range, RangeEval, SolcRange}, +use graph::{ + elem::Elem, + nodes::{Concrete, ContextNode}, + range_string::ToRangeString, + GraphBackend, Range, RangeEval, SolcRange, }; +use shared::{RangeArena, StorageLocation}; use ariadne::{Color, Fmt, Label, Span}; -use solang_parser::pt::StorageLocation; use std::collections::{BTreeMap, BTreeSet}; pub static MIN_COLOR: Color = Color::Fixed(111); @@ -77,9 +74,13 @@ pub struct OrderedAnalysis { } impl OrderedAnalysis { - pub fn from_bound_analysis(ba: VarBoundAnalysis, analyzer: &impl GraphLike) -> Self { + pub fn from_bound_analysis( + ba: VarBoundAnalysis, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Self { let mut analyses: BTreeMap> = Default::default(); - if let Some(init) = ba.init_item(analyzer) { + if let Some(init) = ba.init_item(analyzer, arena) { let source: usize = *LocSpan(init.loc.1).source(); let mut set = BTreeSet::new(); set.insert(init.into()); @@ -88,16 +89,17 @@ impl OrderedAnalysis { ba.bound_changes .iter() .enumerate() - .for_each(|(i, bound_change)| { - let (parts, unsat) = range_parts(analyzer, &ba.report_config, &bound_change.1); + .for_each(|(_i, bound_change)| { + let (parts, unsat) = + range_parts(analyzer, arena, &ba.report_config, &bound_change.1); let item = StrippedAnalysisItem { init: false, name: ba.var_display_name.clone(), loc: LocSpan(bound_change.0 .1), - order: i as i32, + order: (bound_change.0.end() - bound_change.0.start()) as i32, //i as i32, // storage: ba.storage.clone(), ctx: ba.ctx, - ctx_conditionals: ba.conditionals(analyzer), + ctx_conditionals: ba.conditionals(analyzer, arena), parts, unsat, }; @@ -110,11 +112,15 @@ impl OrderedAnalysis { Self { analyses } } - pub fn from_func_analysis(fvba: FunctionVarsBoundAnalysis, analyzer: &impl GraphLike) -> Self { + pub fn from_func_analysis( + fvba: FunctionVarsBoundAnalysis, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Self { let mut analyses = Self::default(); fvba.vars_by_ctx.iter().for_each(|(_ctx, bas)| { bas.iter().for_each(|ba| { - analyses.extend(Self::from_bound_analysis(ba.clone(), analyzer)); + analyses.extend(Self::from_bound_analysis(ba.clone(), analyzer, arena)); }) }); analyses @@ -164,39 +170,43 @@ impl RangePart { } } -impl Into> for AnalysisItem { - fn into(self) -> ariadne::Label { - let (color, order, priority) = if self.init { - (Color::Magenta, self.order, -1) +impl From for Label { + fn from(val: AnalysisItem) -> Self { + let (color, order, priority) = if val.init { + (Color::Magenta, val.order, -1) } else { ( - match self.storage { + match val.storage { Some(StorageLocation::Memory(..)) => Color::Blue, Some(StorageLocation::Storage(..)) => Color::Green, Some(StorageLocation::Calldata(..)) => Color::White, + Some(StorageLocation::Block(..)) => Color::Magenta, + Some(StorageLocation::Msg(..)) => Color::Cyan, None => Color::Cyan, }, - self.order, + val.order, 0, ) }; - Label::new(self.loc) + Label::new(val.loc) .with_message(format!( "{}\"{}\"{}{}", - match self.storage { + match val.storage { Some(StorageLocation::Memory(..)) => "Memory var ", Some(StorageLocation::Storage(..)) => "Storage var ", Some(StorageLocation::Calldata(..)) => "Calldata var ", + Some(StorageLocation::Block(..)) => "Block var ", + Some(StorageLocation::Msg(..)) => "Msg var ", None => "", }, - self.name, - self.parts + val.name, + val.parts .into_iter() .map(|part| part.to_cli_string()) .collect::>() .join(" "), - if self.unsat { + if val.unsat { " - unsatisfiable range, unreachable".fg(Color::Red) } else { "".fg(Color::Red) @@ -252,40 +262,41 @@ impl ToString for RangePart { /// Creates an Vec<[RangePart]> from a range based on the current [ReportConfig] pub fn range_parts( - analyzer: &impl GraphLike, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, report_config: &ReportConfig, range: &SolcRange, ) -> (Vec, bool) { let mut parts = vec![]; let min = if report_config.eval_bounds { range - .evaled_range_min(analyzer) + .evaled_range_min(analyzer, arena) .unwrap() - .to_range_string(false, analyzer) + .to_range_string(false, analyzer, arena) .s } else if report_config.simplify_bounds { range - .simplified_range_min(analyzer) + .simplified_range_min(analyzer, arena) .unwrap() - .to_range_string(false, analyzer) + .to_range_string(false, analyzer, arena) .s } else { - range.range_min().to_range_string(false, analyzer).s + range.range_min().to_range_string(false, analyzer, arena).s }; let max = if report_config.eval_bounds { range - .evaled_range_max(analyzer) + .evaled_range_max(analyzer, arena) .unwrap() - .to_range_string(true, analyzer) + .to_range_string(true, analyzer, arena) .s } else if report_config.simplify_bounds { range - .simplified_range_max(analyzer) + .simplified_range_max(analyzer, arena) .unwrap() - .to_range_string(true, analyzer) + .to_range_string(true, analyzer, arena) .s } else { - range.range_max().to_range_string(true, analyzer).s + range.range_max().to_range_string(true, analyzer, arena).s }; if min == max { @@ -300,8 +311,8 @@ pub fn range_parts( let mut excls = range_excl .iter() .map(|range| { - let min = range.to_range_string(false, analyzer).s; - let max = range.to_range_string(true, analyzer).s; + let min = range.to_range_string(false, analyzer, arena).s; + let max = range.to_range_string(true, analyzer, arena).s; if min == max { RangePart::Equal(min) } else { @@ -313,6 +324,6 @@ pub fn range_parts( excls })); } - let unsat = range.unsat(analyzer); + let unsat = range.unsat(analyzer, arena); (parts, unsat) } diff --git a/src/context/analyzers/func_analyzer/mod.rs b/crates/analyzers/src/func_analyzer/mod.rs similarity index 87% rename from src/context/analyzers/func_analyzer/mod.rs rename to crates/analyzers/src/func_analyzer/mod.rs index 3ff556b1..dc3c5bcf 100644 --- a/src/context/analyzers/func_analyzer/mod.rs +++ b/crates/analyzers/src/func_analyzer/mod.rs @@ -1,20 +1,18 @@ -use crate::analyzers::range_parts; -use crate::analyzers::VarBoundAnalysis; -use crate::analyzers::VarBoundAnalyzer; - -use crate::analyzers::{LocStrSpan, ReportConfig, ReportDisplay}; -use ariadne::ReportKind; -use std::collections::BTreeSet; +use crate::{ + bounds::range_parts, LocStrSpan, ReportConfig, ReportDisplay, ReportKind, VarBoundAnalysis, + VarBoundAnalyzer, +}; -use shared::analyzer::GraphLike; -use shared::{ - analyzer::{AnalyzerLike, Search}, - context::*, +use graph::{ + elem::Elem, + nodes::{Concrete, ContextNode, KilledKind}, + AnalyzerBackend, GraphBackend, }; +use shared::{RangeArena, Search}; use ariadne::{Color, Config, Fmt, Label, Report, Span}; use solang_parser::pt::CodeLocation; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; mod report_display; pub use report_display::*; @@ -48,7 +46,8 @@ impl<'a> FunctionVarsBoundAnalysis { pub fn reports_for_forks( &self, file_mapping: &'a BTreeMap, - analyzer: &impl GraphLike, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, ) -> Vec> { let mut handled_ctx_switches = BTreeSet::default(); let reports = self @@ -58,20 +57,35 @@ impl<'a> FunctionVarsBoundAnalysis { // sort by display name instead of normal name let deps = ctx.ctx_deps(analyzer).unwrap(); let deps = deps - .values() - .map(|var| (var.display_name(analyzer).unwrap(), var)) + .iter() + .map(|var| (var.as_controllable_name(analyzer, arena).unwrap(), var)) .collect::>(); // create the bound strings + // let atoms = ctx.dep_atoms(analyzer).unwrap(); + // println!("had {} atoms", atoms.len()); + // let mut handled_atom = vec![]; + // let mut bounds_string: Vec = vec![]; + // atoms.iter().enumerate().for_each(|(i, atom)| { + // let atom_str = atom.to_range_string(true, analyzer, arena).s; + // if !handled_atom.contains(&atom_str) { + // handled_atom.push(atom_str.clone()); + // bounds_string.push(format!("{}. {}", i + 1, atom_str)) + // } + // }); + // let bounds_string = bounds_string.into_iter().collect::>().join("\n"); + let bounds_string = deps .iter() .enumerate() .filter_map(|(i, (name, cvar))| { let range = cvar.ref_range(analyzer).unwrap()?; - let (parts, _unsat) = range_parts(analyzer, &self.report_config, &range); + + let (parts, _unsat) = + range_parts(analyzer, arena, &self.report_config, &range); let ret = parts.into_iter().fold( format!("{}. {name}", i + 1), - |mut acc, part| { - acc = format!("{acc}{}", part.to_cli_string()); + |mut acc, _part| { + acc = acc.to_string(); acc }, ); @@ -108,7 +122,7 @@ impl<'a> FunctionVarsBoundAnalysis { let mut labels: Vec<_> = analyses .iter() .flat_map(|analysis| { - let mut labels = analysis.labels(analyzer); + let mut labels = analysis.labels(analyzer, arena); labels.extend( analysis .spanned_ctx_info @@ -186,6 +200,7 @@ impl<'a> FunctionVarsBoundAnalysis { let range = var.ref_range(analyzer).unwrap()?; let (parts, _unsat) = range_parts( analyzer, + arena, &self.report_config, &range, ); @@ -244,7 +259,7 @@ impl<'a> FunctionVarsBoundAnalysis { .filter_map(|(loc, var)| { let range = var.ref_range(analyzer).unwrap()?; let (parts, _unsat) = - range_parts(analyzer, &self.report_config, &range); + range_parts(analyzer, arena, &self.report_config, &range); Some( Label::new(LocStrSpan::new(file_mapping, loc)) .with_message( @@ -291,18 +306,16 @@ impl<'a> FunctionVarsBoundAnalysis { } } -impl FunctionVarsBoundAnalyzer for T where T: VarBoundAnalyzer + Search + AnalyzerLike + Sized {} -pub trait FunctionVarsBoundAnalyzer: VarBoundAnalyzer + Search + AnalyzerLike + Sized { - fn bounds_for_all<'a>( +impl FunctionVarsBoundAnalyzer for T where T: VarBoundAnalyzer + Search + AnalyzerBackend + Sized {} +pub trait FunctionVarsBoundAnalyzer: VarBoundAnalyzer + Search + AnalyzerBackend + Sized { + fn bounds_for_lineage<'a>( &'a self, + arena: &mut RangeArena>, file_mapping: &'a BTreeMap, ctx: ContextNode, + edges: Vec, report_config: ReportConfig, ) -> FunctionVarsBoundAnalysis { - let mut edges = ctx.all_edges(self).unwrap(); - if edges.is_empty() { - edges.push(ctx); - } let lineage_analyses = edges .iter() .filter_map(|fork| { @@ -314,8 +327,7 @@ pub trait FunctionVarsBoundAnalyzer: VarBoundAnalyzer + Search + AnalyzerLike + { return None; } - if !report_config.show_nonreverts - && matches!(fork.underlying(self).unwrap().killed, None) + if !report_config.show_nonreverts && fork.underlying(self).unwrap().killed.is_none() { return None; } @@ -346,10 +358,11 @@ pub trait FunctionVarsBoundAnalyzer: VarBoundAnalyzer + Search + AnalyzerLike + let is_ret = var.is_return_node_in_any(&parents, self); if is_ret | report_config.show_tmps - | (report_config.show_consts && var.is_const(self).unwrap()) + | (report_config.show_consts && var.is_const(self, arena).unwrap()) | (report_config.show_symbolics && var.is_symbolic(self).unwrap()) { Some(self.bounds_for_var_in_family_tree( + arena, file_mapping, parents.clone(), var.name(self).unwrap(), @@ -375,4 +388,18 @@ pub trait FunctionVarsBoundAnalyzer: VarBoundAnalyzer + Search + AnalyzerLike + report_config, } } + + fn bounds_for_all<'a>( + &'a self, + arena: &mut RangeArena>, + file_mapping: &'a BTreeMap, + ctx: ContextNode, + report_config: ReportConfig, + ) -> FunctionVarsBoundAnalysis { + let mut edges = ctx.all_edges(self).unwrap(); + if edges.is_empty() { + edges.push(ctx); + } + self.bounds_for_lineage(arena, file_mapping, ctx, edges, report_config) + } } diff --git a/src/context/analyzers/func_analyzer/report_display.rs b/crates/analyzers/src/func_analyzer/report_display.rs similarity index 59% rename from src/context/analyzers/func_analyzer/report_display.rs rename to crates/analyzers/src/func_analyzer/report_display.rs index 0f54d0c1..be106ff4 100644 --- a/src/context/analyzers/func_analyzer/report_display.rs +++ b/crates/analyzers/src/func_analyzer/report_display.rs @@ -1,7 +1,11 @@ -use crate::analyzers::func_analyzer::*; -use crate::analyzers::{LocStrSpan, ReportDisplay}; -use ariadne::{Cache, Color, Config, Fmt, Label, Report, ReportKind, Span}; -use shared::analyzer::GraphLike; +use crate::{FunctionVarsBoundAnalysis, LocStrSpan, ReportDisplay, ReportKind}; + +use graph::{elem::Elem, nodes::Concrete, GraphBackend}; + +use shared::RangeArena; + +use ariadne::{Cache, Color, Config, Fmt, Label, Report, Span}; + use std::collections::BTreeMap; pub struct CLIFunctionVarsBoundAnalysis<'a> { @@ -25,7 +29,7 @@ impl<'a> ReportDisplay for CLIFunctionVarsBoundAnalysis<'a> { fn report_kind(&self) -> ReportKind { ReportKind::Custom("Bounds", Color::Cyan) } - fn msg(&self, analyzer: &impl GraphLike) -> String { + fn msg(&self, analyzer: &impl GraphBackend, _arena: &mut RangeArena>) -> String { format!( "Bounds for function: {}", format!( @@ -39,17 +43,25 @@ impl<'a> ReportDisplay for CLIFunctionVarsBoundAnalysis<'a> { ) } - fn labels(&self, _analyzer: &impl GraphLike) -> Vec> { + fn labels( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Vec> { vec![] } - fn reports(&self, analyzer: &impl GraphLike) -> Vec> { + fn reports( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec> { let mut report = Report::build( self.report_kind(), self.func_var_bound_analysis.ctx_loc.source(), self.func_var_bound_analysis.ctx_loc.start(), ) - .with_message(self.msg(analyzer)) + .with_message(self.msg(analyzer, arena)) .with_config( Config::default() .with_cross_gap(false) @@ -57,7 +69,7 @@ impl<'a> ReportDisplay for CLIFunctionVarsBoundAnalysis<'a> { .with_tab_width(4), ); - report.add_labels(self.labels(analyzer)); + report.add_labels(self.labels(analyzer, arena)); if let Some((killed_span, kind)) = &self.func_var_bound_analysis.ctx_killed { report = report.with_label( Label::new(killed_span.clone()) @@ -68,23 +80,34 @@ impl<'a> ReportDisplay for CLIFunctionVarsBoundAnalysis<'a> { let mut reports = vec![report.finish()]; - reports.extend( - self.func_var_bound_analysis - .reports_for_forks(self.file_mapping, analyzer), - ); + reports.extend(self.func_var_bound_analysis.reports_for_forks( + self.file_mapping, + analyzer, + arena, + )); reports } - fn print_reports(&self, mut src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); + fn print_reports( + &self, + mut src: &mut impl Cache, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) { + let reports = &self.reports(analyzer, arena); for report in reports.iter() { report.print(&mut src).unwrap(); } } - fn eprint_reports(&self, mut src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); + fn eprint_reports( + &self, + mut src: &mut impl Cache, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) { + let reports = &self.reports(analyzer, arena); reports.iter().for_each(|report| { report.eprint(&mut src).unwrap(); }); diff --git a/src/context/analyzers/mod.rs b/crates/analyzers/src/lib.rs similarity index 79% rename from src/context/analyzers/mod.rs rename to crates/analyzers/src/lib.rs index 9414cbc3..f469f356 100644 --- a/src/context/analyzers/mod.rs +++ b/crates/analyzers/src/lib.rs @@ -1,10 +1,8 @@ pub mod bounds; -use crate::AnalyzerLike; -use crate::GraphLike; use ariadne::{Cache, Label, Report, ReportKind, Span}; -use bounds::*; -use shared::analyzer::Search; +use graph::{elem::Elem, nodes::Concrete, AnalyzerBackend, GraphBackend}; +use shared::{RangeArena, Search}; use solang_parser::pt::Loc; use std::collections::BTreeMap; @@ -14,11 +12,11 @@ mod var_analyzer; pub use var_analyzer::*; pub trait ContextAnalyzer: - AnalyzerLike + Search + VarBoundAnalyzer + FunctionVarsBoundAnalyzer + AnalyzerBackend + Search + VarBoundAnalyzer + FunctionVarsBoundAnalyzer { } impl ContextAnalyzer for T where - T: AnalyzerLike + Search + VarBoundAnalyzer + FunctionVarsBoundAnalyzer + T: AnalyzerBackend + Search + VarBoundAnalyzer + FunctionVarsBoundAnalyzer { } @@ -168,9 +166,27 @@ impl Default for ReportConfig { pub trait ReportDisplay { fn report_kind(&self) -> ReportKind; - fn msg(&self, analyzer: &impl GraphLike) -> String; - fn labels(&self, analyzer: &impl GraphLike) -> Vec>; - fn reports(&self, analyzer: &impl GraphLike) -> Vec>; - fn print_reports(&self, src: &mut impl Cache, analyzer: &impl GraphLike); - fn eprint_reports(&self, src: &mut impl Cache, analyzer: &impl GraphLike); + fn msg(&self, analyzer: &impl GraphBackend, arena: &mut RangeArena>) -> String; + fn labels( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec>; + fn reports( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec>; + fn print_reports( + &self, + src: &mut impl Cache, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ); + fn eprint_reports( + &self, + src: &mut impl Cache, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ); } diff --git a/src/context/analyzers/var_analyzer/mod.rs b/crates/analyzers/src/var_analyzer/mod.rs similarity index 73% rename from src/context/analyzers/var_analyzer/mod.rs rename to crates/analyzers/src/var_analyzer/mod.rs index 0f0fb23f..26399012 100644 --- a/src/context/analyzers/var_analyzer/mod.rs +++ b/crates/analyzers/src/var_analyzer/mod.rs @@ -1,20 +1,21 @@ -use crate::analyzers::range_parts; -use crate::analyzers::AnalysisItem; -use crate::analyzers::RangePart; -use crate::analyzers::{LocStrSpan, ReportConfig}; -use shared::analyzer::GraphLike; -use shared::{ - analyzer::{AnalyzerLike, Search}, - context::*, - range::{Range, SolcRange}, +use crate::{ + bounds::{range_parts, AnalysisItem, RangePart}, + LocStrSpan, ReportConfig, }; + +use graph::{ + elem::Elem, + nodes::{Concrete, ContextNode, ContextVarNode, KilledKind}, + AnalyzerBackend, GraphBackend, Range, SolcRange, +}; +use shared::{RangeArena, Search, StorageLocation}; + use std::collections::BTreeSet; -use solang_parser::pt::{CodeLocation, StorageLocation}; +use solang_parser::pt::CodeLocation; use std::collections::BTreeMap; mod report_display; -pub use report_display::*; #[derive(PartialOrd, Eq, PartialEq, Ord, Clone, Debug)] pub struct CtxSwitch { @@ -66,10 +67,14 @@ impl Default for VarBoundAnalysis { } impl VarBoundAnalysis { - pub fn conditionals(&self, analyzer: &impl GraphLike) -> Vec<(String, Vec)> { + pub fn conditionals( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec<(String, Vec)> { let deps = self.ctx.ctx_deps(analyzer).unwrap(); let deps = deps - .values() + .iter() .map(|var| (var.display_name(analyzer).unwrap(), var)) .collect::>(); // create the bound strings @@ -77,18 +82,22 @@ impl VarBoundAnalysis { .enumerate() .filter_map(|(_i, (_name, cvar))| { let range = cvar.ref_range(analyzer).unwrap()?; - let parts = range_parts(analyzer, &self.report_config, &range).0; + let parts = range_parts(analyzer, arena, &self.report_config, &range).0; Some((cvar.display_name(analyzer).unwrap(), parts)) }) .collect() } /// Creates an [AnalysisItem] if there is a initial bound for a variable - pub fn init_item(&self, analyzer: &impl GraphLike) -> Option { + pub fn init_item( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Option { let mut parts = vec![]; let mut unsat = false; if let Some(init_range) = &self.var_def.1 { - (parts, unsat) = range_parts(analyzer, &self.report_config, init_range) + (parts, unsat) = range_parts(analyzer, arena, &self.report_config, init_range) } if parts.is_empty() { None @@ -98,9 +107,9 @@ impl VarBoundAnalysis { order: -1, name: self.var_display_name.clone(), loc: self.var_def.0.clone(), - storage: self.storage.clone(), + storage: self.storage, ctx: self.ctx, - ctx_conditionals: self.conditionals(analyzer), + ctx_conditionals: self.conditionals(analyzer, arena), parts, unsat, }) @@ -108,12 +117,13 @@ impl VarBoundAnalysis { } } -impl VarBoundAnalyzer for T where T: Search + AnalyzerLike + Sized {} -pub trait VarBoundAnalyzer: Search + AnalyzerLike + Sized { +impl VarBoundAnalyzer for T where T: Search + AnalyzerBackend + Sized {} +pub trait VarBoundAnalyzer: Search + AnalyzerBackend + Sized { /// Given a lineage of a context (first element being the youngest, last element being the oldest), /// generate a bound analysis for a variable throughout the lineage fn bounds_for_var_in_family_tree( &self, + arena: &mut RangeArena>, file_mapping: &'_ BTreeMap, ordered_ctxs: Vec, var_name: String, @@ -125,6 +135,7 @@ pub trait VarBoundAnalyzer: Search + AnalyzerLike + Sized { .filter_map(|ctx| Some((ctx, ctx.var_by_name(self, &var_name)?))) .for_each(|(_ctx, cvar)| { let analysis = self.bounds_for_var_node( + arena, &inherited, file_mapping, &var_name, @@ -140,6 +151,7 @@ pub trait VarBoundAnalyzer: Search + AnalyzerLike + Sized { /// Analyzes the bounds for a variable up to the provided node fn bounds_for_var_node( &self, + arena: &mut RangeArena>, inherited: &Option, file_mapping: &'_ BTreeMap, var_name: &str, @@ -215,7 +227,7 @@ pub trait VarBoundAnalyzer: Search + AnalyzerLike + Sized { ), bound_changes: vec![], report_config, - storage: curr.underlying(self).unwrap().storage.clone(), + storage: curr.underlying(self).unwrap().storage, ctx_killed: ctx .killed_loc(self) .unwrap() @@ -224,15 +236,49 @@ pub trait VarBoundAnalyzer: Search + AnalyzerLike + Sized { } }; - if let Some(curr_range) = curr.ref_range(self).unwrap() { - let mut cr_min = curr_range.evaled_range_min(self).unwrap(); - let mut cr_max = curr_range.evaled_range_max(self).unwrap(); + let (comparator, needs_curr) = + if let Some(inherited) = curr.previous_or_inherited_version(self) { + (inherited, true) + } else { + (curr, false) + }; + + if let Some(curr_range) = comparator.ref_range(self).unwrap() { + let mut cr_min = curr_range.evaled_range_min(self, arena).unwrap(); + let mut cr_max = curr_range.evaled_range_max(self, arena).unwrap(); let mut cr_excl = curr_range.range_exclusions(); + + if needs_curr { + if let Some(next_range) = curr.ref_range(self).unwrap() { + let nr_min = next_range.evaled_range_min(self, arena).unwrap(); + let nr_max = next_range.evaled_range_max(self, arena).unwrap(); + let nr_excl = &next_range.range_exclusions(); + + // check if there was a bound change + if report_config.show_all_lines + || nr_min != cr_min + || nr_max != cr_max + || nr_excl != &cr_excl + { + cr_min = nr_min; + cr_max = nr_max; + cr_excl = nr_excl.to_vec(); + let new = ( + LocStrSpan::new(file_mapping, curr.loc(self).unwrap()), + next_range.into_owned(), + ); + if !ba.bound_changes.contains(&new) { + ba.bound_changes.push(new); + } + } + } + } + while let Some(next) = curr.next_version(self) { if let Some(next_range) = next.ref_range(self).unwrap() { - let nr_min = next_range.evaled_range_min(self).unwrap(); - let nr_max = next_range.evaled_range_max(self).unwrap(); - let nr_excl = &next_range.exclusions; + let nr_min = next_range.evaled_range_min(self, arena).unwrap(); + let nr_max = next_range.evaled_range_max(self, arena).unwrap(); + let nr_excl = &next_range.range_exclusions(); // check if there was a bound change if report_config.show_all_lines diff --git a/src/context/analyzers/var_analyzer/report_display.rs b/crates/analyzers/src/var_analyzer/report_display.rs similarity index 59% rename from src/context/analyzers/var_analyzer/report_display.rs rename to crates/analyzers/src/var_analyzer/report_display.rs index 66210180..a4b1e48a 100644 --- a/src/context/analyzers/var_analyzer/report_display.rs +++ b/crates/analyzers/src/var_analyzer/report_display.rs @@ -1,23 +1,32 @@ -use crate::analyzers::{LocStrSpan, ReportDisplay}; -use ariadne::{Cache, Color, Config, Fmt, Label, Report, ReportKind, Span}; -use shared::analyzer::GraphLike; +use crate::{ + bounds::{range_parts, AnalysisItem}, + LocStrSpan, ReportDisplay, ReportKind, VarBoundAnalysis, +}; -use crate::analyzers::var_analyzer::*; +use graph::{elem::Elem, nodes::Concrete, GraphBackend}; + +use shared::RangeArena; + +use ariadne::{Cache, Color, Config, Fmt, Label, Report, Span}; impl ReportDisplay for VarBoundAnalysis { fn report_kind(&self) -> ReportKind { ReportKind::Custom("Bounds", Color::Cyan) } - fn msg(&self, analyzer: &impl GraphLike) -> String { + fn msg(&self, analyzer: &impl GraphBackend, _arena: &mut RangeArena>) -> String { format!( "Bounds for {} in {}:", self.var_display_name, self.ctx.underlying(analyzer).unwrap().path ) } - fn labels(&self, analyzer: &impl GraphLike) -> Vec> { + fn labels( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec> { let mut labels = if self.report_config.show_initial_bounds { - if let Some(init_item) = self.init_item(analyzer) { + if let Some(init_item) = self.init_item(analyzer, arena) { vec![init_item.into()] } else { vec![] @@ -30,17 +39,17 @@ impl ReportDisplay for VarBoundAnalysis { self.bound_changes .iter() .enumerate() - .map(|(i, bound_change)| { + .map(|(_i, bound_change)| { let (parts, unsat) = - range_parts(analyzer, &self.report_config, &bound_change.1); + range_parts(analyzer, arena, &self.report_config, &bound_change.1); AnalysisItem { init: false, name: self.var_display_name.clone(), loc: bound_change.0.clone(), - order: i as i32, - storage: self.storage.clone(), + order: (bound_change.0.end() - bound_change.0.start()) as i32, + storage: self.storage, ctx: self.ctx, - ctx_conditionals: self.conditionals(analyzer), + ctx_conditionals: self.conditionals(analyzer, arena), parts, unsat, } @@ -52,13 +61,17 @@ impl ReportDisplay for VarBoundAnalysis { labels } - fn reports(&self, analyzer: &impl GraphLike) -> Vec> { + fn reports( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec> { let mut report = Report::build( self.report_kind(), self.var_def.0.source(), self.var_def.0.start(), ) - .with_message(self.msg(analyzer)) + .with_message(self.msg(analyzer, arena)) .with_config( Config::default() .with_cross_gap(false) @@ -66,7 +79,7 @@ impl ReportDisplay for VarBoundAnalysis { .with_tab_width(4), ); - report.add_labels(self.labels(analyzer)); + report.add_labels(self.labels(analyzer, arena)); if let Some((killed_span, kind)) = &self.ctx_killed { report = report.with_label( @@ -90,15 +103,25 @@ impl ReportDisplay for VarBoundAnalysis { reports } - fn print_reports(&self, mut src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = self.reports(analyzer); + fn print_reports( + &self, + mut src: &mut impl Cache, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) { + let reports = self.reports(analyzer, arena); reports.into_iter().for_each(|report| { report.print(&mut src).unwrap(); }); } - fn eprint_reports(&self, mut src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = self.reports(analyzer); + fn eprint_reports( + &self, + mut src: &mut impl Cache, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) { + let reports = self.reports(analyzer, arena); reports.into_iter().for_each(|report| { report.eprint(&mut src).unwrap(); }); diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml new file mode 100644 index 00000000..5029e551 --- /dev/null +++ b/crates/cli/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "cli" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +analyzers.workspace = true +shared.workspace = true +pyrometer.workspace = true +graph.workspace = true +# queries.workspace = true + +ariadne.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +tracing-tree.workspace = true +petgraph.workspace = true +ethers-core.workspace = true + + +clap = { version = "4.1.4", features = ["derive"] } + +[[bin]] +name = "pyrometer" +path = "src/main.rs" + + +# [profile.release] +# debug = true \ No newline at end of file diff --git a/cli/src/main.rs b/crates/cli/src/main.rs similarity index 54% rename from cli/src/main.rs rename to crates/cli/src/main.rs index 5449ab71..a55d8426 100644 --- a/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -1,26 +1,24 @@ -use crate::analyzers::ReportConfig; +use analyzers::{FunctionVarsBoundAnalyzer, ReportConfig, ReportDisplay}; +use graph::{ + nodes::{ContractNode, FunctionNode}, + solvers::{AtomicSolveStatus, BruteBinSearchSolver, SolcSolver}, + Edge, +}; +use pyrometer::{Analyzer, Root, SourcePath}; +use shared::GraphDot; +use shared::Search; + use ariadne::sources; use clap::{ArgAction, Parser, ValueHint}; -use pyrometer::context::analyzers::FunctionVarsBoundAnalyzer; -use pyrometer::{ - context::{analyzers::ReportDisplay, *}, - Analyzer, -}; -use shared::nodes::FunctionNode; +use tracing_subscriber::{prelude::*, Registry}; -use shared::Edge; -use shared::{ - analyzer::{GraphLike, Search}, - nodes::ContractNode, +use std::{ + collections::{BTreeMap, HashMap}, + env::{self}, + fs, + path::PathBuf, }; -use tracing_subscriber::prelude::*; - -use std::collections::{BTreeMap, HashMap}; -use std::path::PathBuf; - -use std::env::{self}; -use std::fs; #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] @@ -58,10 +56,16 @@ struct Args { pub verbosity: u8, /// Whether to print out a dot string of the analyzed contracts #[clap(long, short, default_value = "false")] + pub mermaid: bool, + /// Whether to print out a dot string of the analyzed contracts + #[clap(long, short, default_value = "false")] pub dot: bool, /// Whether to generate and open a dot visualization of the analyzed contracts #[clap(long, short, default_value = "false")] pub open_dot: bool, + /// Whether to generate and open a mermaid visualization of the analyzed contracts + #[clap(long, default_value = "false")] + pub open_mermaid: bool, /// Whether to evaluate variables down to their intervals or to keep them symbolic/relational to other variables #[clap(long, short)] pub eval: Option, @@ -80,16 +84,22 @@ struct Args { /// Show non-revert paths #[clap(long)] pub show_nonreverts: Option, - // #[clap(long, short)] - // pub access_query: Vec, - // #[clap(long, short)] - // pub query: Vec, - // #[clap(long, short)] - // pub write_query: Vec, /// A debugging command to prevent bound analysis printing. Useful for debugging parse errors during development. Only prints out parse errors /// then ends the program #[clap(long)] pub debug: bool, + + /// Forces a panic on first error encountered + #[clap(long)] + pub debug_panic: bool, + + /// Max stack depth to evaluate to + #[clap(long, default_value = "200")] + pub max_stack_depth: usize, + + /// Print stats about the IR + #[clap(long)] + pub stats: bool, } pub fn subscriber() { @@ -99,10 +109,24 @@ pub fn subscriber() { .init() } +pub fn tree_subscriber() { + let subscriber = Registry::default() + .with( + tracing_tree::HierarchicalLayer::default() + .with_indent_lines(true) + .with_indent_amount(2) + .with_thread_names(true), // .with_thread_ids(true) + // .with_verbose_exit(true) + // .with_verbose_entry(true) + // .with_targets(true) + ) + .with(tracing_subscriber::filter::EnvFilter::from_default_env()); + tracing::subscriber::set_global_default(subscriber).unwrap(); +} + fn main() { - subscriber(); + tree_subscriber(); let args = Args::parse(); - let path_str = args.path.to_string(); let verbosity = args.verbosity; let config = match verbosity { 0 => ReportConfig { @@ -202,62 +226,152 @@ fn main() { show_nonreverts: args.show_nonreverts.unwrap_or(true), }, }; - - let sol = fs::read_to_string(args.path.clone()).expect("Could not find file"); - let mut analyzer = Analyzer { - root: env::current_dir().unwrap(), + max_depth: args.max_stack_depth, + root: Root::RemappingsDirectory(env::current_dir().unwrap()), ..Default::default() }; - if args.remappings.is_some() { - let remappings = args.remappings.unwrap(); - analyzer.set_remappings_and_root(remappings); - } + println!("debug panic: {}", args.debug_panic); + analyzer.debug_panic = args.debug_panic; + + let (current_path, sol) = if args.path.ends_with(".sol") { + let sol = fs::read_to_string(args.path.clone()).expect("Could not find file"); + // Remappings file only required for Solidity files + if args.remappings.is_some() { + let remappings = args.remappings.unwrap(); + analyzer.set_remappings_and_root(remappings); + } + + ( + SourcePath::SolidityFile(PathBuf::from(args.path.clone())), + sol, + ) + } else if args.path.ends_with(".json") { + let json_path_buf = PathBuf::from(args.path.clone()); + analyzer.update_with_solc_json(&json_path_buf); + let (current_path, sol, _, _) = analyzer.sources.first().unwrap().clone(); + (current_path, sol) + } else { + panic!("Unsupported file type") + }; + + let mut arena_base = Default::default(); + let arena = &mut arena_base; let t0 = std::time::Instant::now(); - let (maybe_entry, mut all_sources) = - analyzer.parse(&sol, &PathBuf::from(args.path.clone()), true); - let parse_time = t0.elapsed().as_millis(); + let maybe_entry = analyzer.parse(arena, &sol, ¤t_path, true); + let t_end = t0.elapsed(); + let parse_time = t_end.as_millis(); println!("DONE ANALYZING IN: {parse_time}ms. Writing to cli..."); - all_sources.push((maybe_entry, args.path, sol, 0)); - let entry = maybe_entry.unwrap(); + if args.stats { + println!("{}", analyzer.stats(t_end, arena)); + } + // println!("Arena: {:#?}", analyzer.range_arena); - let mut file_mapping: BTreeMap<_, _> = vec![(0usize, path_str)].into_iter().collect(); - file_mapping.extend( - all_sources - .iter() - .map(|(_entry, name, _src, num)| (*num, name.clone())) - .collect::>(), - ); + // use self.sources to fill a BTreeMap with the file_no and SourcePath.path_to_solidity_file + let mut file_mapping: BTreeMap = BTreeMap::new(); + let mut src_map: HashMap = HashMap::new(); + for (source_path, sol, o_file_no, _o_entry) in analyzer.sources.iter() { + if let Some(file_no) = o_file_no { + file_mapping.insert( + *file_no, + source_path.path_to_solidity_source().display().to_string(), + ); + } + src_map.insert( + source_path.path_to_solidity_source().display().to_string(), + sol.to_string(), + ); + } + let mut source_map = sources(src_map); - let mut source_map = sources( - all_sources - .iter() - .map(|(_entry, name, src, _num)| (name.clone(), src)) - .collect::>(), - ); + let entry = maybe_entry.unwrap(); + + // analyzer.print_errors(&file_mapping, &mut source_map); // let t = petgraph::algo::toposort(&analyzer.graph, None); analyzer.print_errors(&file_mapping, &mut source_map); if args.open_dot { - analyzer.open_dot() + analyzer.open_dot(arena) } if args.dot { - println!("{}", analyzer.dot_str_no_tmps()); + println!("{}", analyzer.dot_str_no_tmps(arena)); + } + + if args.mermaid { + println!("{}", analyzer.mermaid_str(arena)); } + if args.open_mermaid { + analyzer.open_mermaid(arena); + } + + // println!("{}", analyzer.range_arena.ranges.iter().map(|i| { + // let j = i.borrow(); + // let (min_cached, max_cached) = j.is_min_max_cached(&analyzer); + // format!("\t{j}, is cached: {min_cached}, {max_cached}\n") + // }).collect::>().join("")); + // println!("{}", analyzer.range_arena.map.iter().map(|(k, v)| { + // format!("\t{}: {}\n", k, v) + // }).collect::>().join("")); + if args.debug { return; } + // println!("getting contracts"); let all_contracts = analyzer .search_children(entry, &Edge::Contract) .into_iter() .map(ContractNode::from) .collect::>(); + + // TODO: clean this up to actually run on all contracts + // if args.swq { + // println!("Creating SWQ graph for {} contracts", all_contracts.len()); + // let mut cond_graph: Option = None; + // for i in 0..all_contracts.len() { + // match (&mut cond_graph, analyzer.func_query(all_contracts[i])) { + // (Some(ref mut existing), Some(new)) => { + // existing.append_graph(new); + // } + // (None, Some(new)) => { + // cond_graph = Some(new); + // } + // _ => {} + // } + // } + + // if let Some(graph) = cond_graph { + // println!("{}", graph.dot_str()); + // graph.open_dot(); + // } else { + // println!("no graph"); + // } + // } else if args.swq_mermaid { + // println!("Creating SWQ graph for {} contracts", all_contracts.len()); + // let mut cond_graph: Option = None; + // for i in 0..all_contracts.len() { + // match (&mut cond_graph, analyzer.func_query(all_contracts[i])) { + // (Some(ref mut existing), Some(new)) => { + // existing.append_graph(new); + // } + // (None, Some(new)) => { + // cond_graph = Some(new); + // } + // _ => {} + // } + // } + + // if let Some(graph) = cond_graph { + // println!("{}", graph.mermaid_str()); + // } else { + // println!("no graph"); + // } + // } else { let _t1 = std::time::Instant::now(); if args.contracts.is_empty() { let funcs = analyzer.search_children(entry, &Edge::Func); @@ -265,22 +379,67 @@ fn main() { if !args.funcs.is_empty() { if args.funcs.iter().any(|analyze_for| { FunctionNode::from(func) - .name(&analyzer) + .name(&mut analyzer) .unwrap() .starts_with(analyze_for) }) { if let Some(ctx) = FunctionNode::from(func).maybe_body_ctx(&mut analyzer) { - let analysis = analyzer - .bounds_for_all(&file_mapping, ctx, config) - .as_cli_compat(&file_mapping); - analysis.print_reports(&mut source_map, &analyzer); + let mut all_edges = ctx.all_edges(&analyzer).unwrap(); + all_edges.push(ctx); + all_edges.iter().for_each(|c| { + let _rets = c.return_nodes(&analyzer).unwrap(); + // if c.path(&analyzer).starts_with(r#"step(uint64, uint64, uint64, uint64, uint64, uint64, uint64, uint64, uint64, uint64)"#) + // && rets.iter().take(1).any(|ret| { + // let range = ret.1.ref_range(&analyzer).unwrap().unwrap(); + // range.evaled_range_min(&analyzer).unwrap().range_eq(&Elem::from(Concrete::from(I256::from(-1)))) + // }) + { + // step(uint64, uint64, uint64, uint64, uint64, uint64, uint64, uint64).fork{ false }.fork{ true }.fork{ true }.fork{ false }"#.to_string()) { + // println!("{:#?}", c.ctx_deps_as_controllables_str(&analyzer).unwrap()); + if let Some(mut solver) = BruteBinSearchSolver::maybe_new( + c.ctx_deps(&analyzer).unwrap(), + &mut analyzer, + arena, + ) + .unwrap() + { + match solver.solve(&mut analyzer, arena).unwrap() { + AtomicSolveStatus::Unsat => { + println!("TRUE UNSAT: {}", c.path(&analyzer)); + } + AtomicSolveStatus::Sat(ranges) => { + // println!("-----------------------"); + // println!("sat for: {}", c.path(&analyzer)); + ranges.iter().for_each(|(atomic, conc)| { + println!( + "{}: {}", + atomic.idxs[0].display_name(&analyzer).unwrap(), + conc.as_human_string() + ); + }); + } + AtomicSolveStatus::Indeterminate => { + // println!("-----------------------"); + // println!("sat for: {}", c.path(&analyzer)); + // println!("MAYBE UNSAT"); + } + } + } + // println!("-----------------------"); + let analysis = analyzer + .bounds_for_lineage(arena, &file_mapping, *c, vec![*c], config) + .as_cli_compat(&file_mapping); + analysis.print_reports(&mut source_map, &analyzer, arena); + // return; + } + }); } } } else if let Some(ctx) = FunctionNode::from(func).maybe_body_ctx(&mut analyzer) { let analysis = analyzer - .bounds_for_all(&file_mapping, ctx, config) + .bounds_for_all(arena, &file_mapping, ctx, config) .as_cli_compat(&file_mapping); - analysis.print_reports(&mut source_map, &analyzer); + analysis.print_reports(&mut source_map, &analyzer, arena); } } } else { @@ -296,23 +455,24 @@ fn main() { let funcs = contract.funcs(&analyzer); for func in funcs.into_iter() { if !args.funcs.is_empty() { - if args.funcs.contains(&func.name(&analyzer).unwrap()) { + if args.funcs.contains(&func.name(&mut analyzer).unwrap()) { let ctx = func.body_ctx(&mut analyzer); let analysis = analyzer - .bounds_for_all(&file_mapping, ctx, config) + .bounds_for_all(arena, &file_mapping, ctx, config) .as_cli_compat(&file_mapping); - analysis.print_reports(&mut source_map, &analyzer); + analysis.print_reports(&mut source_map, &analyzer, arena); } } else { let ctx = func.body_ctx(&mut analyzer); let analysis = analyzer - .bounds_for_all(&file_mapping, ctx, config) + .bounds_for_all(arena, &file_mapping, ctx, config) .as_cli_compat(&file_mapping); - analysis.print_reports(&mut source_map, &analyzer); + analysis.print_reports(&mut source_map, &analyzer, arena); } } }); } + // } // args.query.iter().for_each(|query| { // analyzer.taint_query(entry, query.to_string()); diff --git a/crates/graph/Cargo.toml b/crates/graph/Cargo.toml new file mode 100644 index 00000000..af3dff3a --- /dev/null +++ b/crates/graph/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "graph" +description = "Pyrometer's internal graph" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +shared.workspace = true +solang-parser.workspace = true + +petgraph.workspace = true +ethers-core.workspace = true +hex.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true + +itertools = "0.10.5" +lazy_static = "1.4.0" + +[dev-dependencies] +pretty_assertions = "1.4.0" diff --git a/crates/graph/src/graph_elements.rs b/crates/graph/src/graph_elements.rs new file mode 100644 index 00000000..652d643d --- /dev/null +++ b/crates/graph/src/graph_elements.rs @@ -0,0 +1,423 @@ +use crate::elem::Elem; +use crate::{nodes::*, VarType}; + +use shared::{AnalyzerLike, GraphLike, Heirarchical, NodeIdx, RangeArena}; + +use lazy_static::lazy_static; +use petgraph::{Directed, Graph}; +use solang_parser::pt::{Identifier, Loc}; + +use std::collections::HashMap; + +pub trait GraphBackend: GraphLike> {} +pub trait AnalyzerBackend: + AnalyzerLike< + Builtin = Builtin, + MsgNode = MsgNode, + BlockNode = BlockNode, + FunctionNode = FunctionNode, + FunctionParam = FunctionParam, + FunctionReturn = FunctionReturn, + Function = Function, + > + GraphBackend +{ + fn add_concrete_var( + &mut self, + ctx: ContextNode, + concrete: Concrete, + loc: Loc, + ) -> Result; +} + +pub trait AsDotStr { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String; +} + +#[derive(Debug, Clone, Ord, Eq, PartialEq, PartialOrd)] +pub enum GraphError { + /// The analyzer thought the node was suppose to be one type, but it was a different one + NodeConfusion(String), + /// Call depth limit reached + MaxStackDepthReached(String), + /// Fork width limit reached + MaxStackWidthReached(String), + /// Tried to set the subcontext of a context that already had a subcontext + ChildRedefinition(String), + /// Tried to update a variable that is in an old context + VariableUpdateInOldContext(String), + /// Variable is detached from all contexts + DetachedVariable(String), + /// Expected a single element, found multiple + ExpectedSingle(String), + /// Expected a vector with a certain number of elements, but it was a different number of elements + StackLengthMismatch(String), + /// A variable had a cyclic reference to another variable and we were unable to break the cycle + UnbreakableRecursion(String), + /// The analyzer thought the node was suppose to be one type, but it was a different one + UnknownVariable(String), +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Node { + /// An analyzed function body/context + Context(Context), + /// A variable attached to a context or the previous version of this variable (akin to SSA form) + ContextVar(ContextVar), + /// A fork in execution caused by an if-like statement + ContextFork, + /// A call to another function, either public or internal + FunctionCall, + /// A builtin solidity type (i.e. Address, Uint, Bytes, etc) + Builtin(Builtin), + /// A node that represents whether a variable's type is a User-defined type, builtin type or a concrete + VarType(VarType), + /// The entry node in the graph + Entry, + /// A source unit (i.e. a source file) + SourceUnit(SourceUnit), + /// A subcomponent of the source unit + SourceUnitPart(SourceUnitPart), + /// A contract + Contract(Contract), + /// A solidity-based function + Function(Function), + /// A solidity-based function parameter + FunctionParam(FunctionParam), + /// A solidity-based function return parameter + FunctionReturn(FunctionReturn), + /// A solidity-based struct + Struct(Struct), + /// A solidity-based enum + Enum(Enum), + /// A solidity-based error + Error(Error), + /// A solidity-based error parameter + ErrorParam(ErrorParam), + /// A solidity-based struct or contract field + Field(Field), + /// A storage or constant variable on a contract + Var(Var), + /// A solidity-based type alias + Ty(Ty), + /// An unresolved type + Unresolved(Identifier), + /// A concrete value (i.e. '1' or '0x111') + Concrete(Concrete), + /// The `msg` global in solidity + Msg(Msg), + /// The `block` global in solidity + Block(Block), +} + +pub fn as_dot_str( + idx: NodeIdx, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> String { + use crate::Node::*; + match analyzer.node(idx) { + Context(_) => ContextNode::from(idx).as_dot_str(analyzer, arena), + ContextVar(_) => ContextVarNode::from(idx).as_dot_str(analyzer, arena), + ContextFork => "Context Fork".to_string(), + FunctionCall => "Function Call".to_string(), + Builtin(bi) => bi.as_string(analyzer).unwrap(), + VarType(v_ty) => v_ty.as_string(analyzer).unwrap(), + Contract(_c) => ContractNode::from(idx).as_dot_str(analyzer, arena), + Function(_f) => FunctionNode::from(idx).as_dot_str(analyzer, arena), + FunctionParam(_fp) => FunctionParamNode::from(idx).as_dot_str(analyzer, arena), + FunctionReturn(_fr) => FunctionReturnNode::from(idx).as_dot_str(analyzer, arena), + Struct(_s) => StructNode::from(idx).as_dot_str(analyzer, arena), + Enum(_e) => EnumNode::from(idx).as_dot_str(analyzer, arena), + Field(_f) => FieldNode::from(idx).as_dot_str(analyzer, arena), + Var(_v) => VarNode::from(idx).as_dot_str(analyzer, arena), + Ty(_t) => TyNode::from(idx).as_dot_str(analyzer, arena), + // Concrete(c) => c.as_human_string(), + e => format!("{e:?}"), + } +} + +impl Node { + pub fn dot_str_color(&self) -> String { + use crate::Node::*; + let c = match self { + Context(_) => TOKYO_NIGHT_COLORS.get("purple").unwrap(), + ContextVar(_) => TOKYO_NIGHT_COLORS.get("orange").unwrap(), + FunctionCall => TOKYO_NIGHT_COLORS.get("cyan").unwrap(), + Contract(_c) => TOKYO_NIGHT_COLORS.get("green").unwrap(), + Function(_f) => TOKYO_NIGHT_COLORS.get("cyan").unwrap(), + Struct(_s) => TOKYO_NIGHT_COLORS.get("yellow").unwrap(), + Enum(_e) => TOKYO_NIGHT_COLORS.get("yellow").unwrap(), + _ => TOKYO_NIGHT_COLORS.get("default").unwrap(), + }; + c.to_string() + } +} + +pub fn num_to_color(x: usize) -> String { + let c = match x % 29 { + 0 => TOKYO_NIGHT_COLORS.get("default").unwrap(), + 1 => TOKYO_NIGHT_COLORS.get("font").unwrap(), + 2 => TOKYO_NIGHT_COLORS.get("bg_highlight").unwrap(), + 3 => TOKYO_NIGHT_COLORS.get("terminal_black").unwrap(), + 4 => TOKYO_NIGHT_COLORS.get("fg_dark").unwrap(), + 5 => TOKYO_NIGHT_COLORS.get("fg_gutter").unwrap(), + 6 => TOKYO_NIGHT_COLORS.get("dark3").unwrap(), + 7 => TOKYO_NIGHT_COLORS.get("dark5").unwrap(), + 8 => TOKYO_NIGHT_COLORS.get("blue0").unwrap(), + 9 => TOKYO_NIGHT_COLORS.get("cyan").unwrap(), + 10 => TOKYO_NIGHT_COLORS.get("blue2").unwrap(), + 11 => TOKYO_NIGHT_COLORS.get("blue5").unwrap(), + 12 => TOKYO_NIGHT_COLORS.get("blue6").unwrap(), + 13 => TOKYO_NIGHT_COLORS.get("blue7").unwrap(), + 14 => TOKYO_NIGHT_COLORS.get("magenta2").unwrap(), + 15 => TOKYO_NIGHT_COLORS.get("purple").unwrap(), + 16 => TOKYO_NIGHT_COLORS.get("orange").unwrap(), + 17 => TOKYO_NIGHT_COLORS.get("yellow").unwrap(), + 18 => TOKYO_NIGHT_COLORS.get("green").unwrap(), + 19 => TOKYO_NIGHT_COLORS.get("green1").unwrap(), + 20 => TOKYO_NIGHT_COLORS.get("teal").unwrap(), + 21 => TOKYO_NIGHT_COLORS.get("red").unwrap(), + 22 => TOKYO_NIGHT_COLORS.get("red1").unwrap(), + 23 => TOKYO_NIGHT_COLORS.get("cyan").unwrap(), + 24 => TOKYO_NIGHT_COLORS.get("teal").unwrap(), + 25 => TOKYO_NIGHT_COLORS.get("darkblue").unwrap(), + 26 => TOKYO_NIGHT_COLORS.get("purple").unwrap(), + 27 => TOKYO_NIGHT_COLORS.get("bg1").unwrap(), + 28 => TOKYO_NIGHT_COLORS.get("deepred").unwrap(), + _ => unreachable!(), + }; + c.to_string() +} + +lazy_static! { + pub static ref TOKYO_NIGHT_COLORS: HashMap<&'static str, &'static str> = { + let mut m = HashMap::new(); + m.insert("bg_dark", "#1f2335"); + m.insert("bg1", "#24283b"); + m.insert("bg_highlight", "#292e42"); + m.insert("terminal_black", "#414868"); + m.insert("fg_dark", "#a9b1d6"); + m.insert("fg_gutter", "#3b4261"); + m.insert("dark3", "#545c7e"); + m.insert("dark5", "#737aa2"); + m.insert("blue0", "#3d59a1"); + m.insert("cyan", "#7dcfff"); + m.insert("blue2", "#0db9d7"); + m.insert("blue5", "#89ddff"); + m.insert("blue6", "#b4f9f8"); + m.insert("blue7", "#394b70"); + m.insert("magenta2", "#ff007c"); + m.insert("purple", "#9d7cd8"); + m.insert("orange", "#ff9e64"); + m.insert("yellow", "#e0af68"); + m.insert("green", "#9ece6a"); + m.insert("green1", "#41a6b5"); + m.insert("teal", "#1abc9c"); + m.insert("red", "#f7768e"); + m.insert("red1", "#db4b4b"); + m.insert("cyan", "#73daca"); + m.insert("teal", "#2ac3de"); + m.insert("darkblue", "#7aa2f7"); + m.insert("purple", "#bb9af7"); + m.insert("bg", "#1a1b26"); + m.insert("font", "#c0caf5"); + m.insert("deepred", "#703440"); + m.insert("default", "#565f89"); + m + }; +} + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub enum Edge { + /// A connection between a Source and the Entry + Source, + /// A connection between a SourceUnitPart and a Source + Part, + /// An edge indicating that a source or contract was imported + Import, + /// An edge that contains a subtype of edge corresponding to some + /// kind of context-based relationship + Context(ContextEdge), + /// A connection for a contract to it's parent + Contract, + /// A connection for a contract to it's parent contract + InheritedContract, + /// A connection for a field to it's parent + Field, + /// A connection for an enum to it's parent + Enum, + /// A connection for a struct to it's parent + Struct, + /// A connection for an error to it's parent + Error, + /// A connection for an error parameter to it's parent error + ErrorParam, + /// A connection for an event to it's parent + Event, + /// A connection for a storage/constant variable to it's parent + Var, + Ty, + /// A connection for a function to it's parent + Func, + /// A connection for a function parameter to it's parent function + FunctionParam, + /// A connection for a function return to it's parent function + FunctionReturn, + /// A connection for a function modifier to it's parent function, with its order + FuncModifier(usize), + /// A connection for a modifier to it's parent + Modifier, + /// A connection for a fallback function to it's parent contract + FallbackFunc, + /// A connection for a contract constructor function to it's parent contract + Constructor, + /// A connection for a receive function to it's parent contract + ReceiveFunc, + /// A connection for a library-based function to a contract + LibraryFunction(NodeIdx), + /// A connection for a builtin function + BuiltinFunction, +} + +impl Heirarchical for Edge { + fn heirarchical_num(&self) -> usize { + use crate::Edge::*; + match self { + Source => 0, + Part | Import => 1, + + Contract | Ty | Field | Enum | Struct | Error | Event | Var | InheritedContract + | Modifier | FallbackFunc | Constructor | ReceiveFunc | LibraryFunction(_) + | BuiltinFunction | Func => 2, + + Context(_) | ErrorParam | FunctionParam | FunctionReturn | FuncModifier(_) => 3, + } + } +} + +/// An enum denoting either a call or a fork +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub enum CallFork { + Call(ContextNode), + Fork(ContextNode, ContextNode), +} + +impl CallFork { + pub fn maybe_call(&self) -> Option { + match self { + CallFork::Call(c) => Some(*c), + _ => None, + } + } + + pub fn maybe_fork(&self) -> Option<(ContextNode, ContextNode)> { + match self { + CallFork::Fork(w1, w2) => Some((*w1, *w2)), + _ => None, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub enum ContextEdge { + // Control flow + /// A connection for a context to a function + Context, + /// A connection for a subcontext to it's parent context + Subcontext, + /// A connection for a context to another context in which the source is the new + /// context and the target is the original context. That is: + /// ContextA -subcontext-> ContextB -subcontext> ContextAPrime + /// ^-----------------------ReturningContext--------| + ReturningContext, + /// A connection to a ContextFork to denote a fork in execution + ContextFork, + /// Currently unused + ContextMerge, + /// A call to a function, a connection from a context to a `FuncCall` node + Call, + /// A loop + Loop, + + // Context Variables + /// A new variable in cotext + Variable, + /// A connection between a variable in a new context and that variable in a parent context denoting + /// that it is inherited from a parent scope + InheritedVariable, + /// A connection between a `Var` and a context variable denoting that the variable reads from storage + InheritedStorageVariable, + /// A connection to the calldata variable + CalldataVariable, + + /// A connection between a variable and a parent variable where the child is some attribute on the parent + /// (i.e. `.length`) + AttrAccess(&'static str), + /// A connection between a variable and the index that was used to create the variable from an IndexAccess + Index, + /// A connection between a variable and a parent variable where the child is some index into the parent + /// (i.e. `x[1]`) + IndexAccess, + /// A connection between a variable and a parent variable where the child is some field of the parent + StructAccess, + /// A connection between a function-as-a-variable and the contract that holds that function + FuncAccess, + /// A write to a storage variable, connecting the variable that is written to the variable and the storage variable itself + StorageWrite, + /// An access to a storage slot + SlotAccess, + + // Variable incoming edges + /// Unused + Assign, + /// Unused + StorageAssign, + /// Unused + MemoryAssign, + /// A connection of a variable to the previous version of that variable + Prev, + + // Control flow + /// A connection between a variable and the context denoting that the variable is returned + Return, + /// A continuation of a context + Continue(&'static str), + /// A connection between a brand new created variable for a function's context and the variable + InputVariable, + /// A connection to a return variable that should be assigned + ReturnAssign(bool), + + // Range analysis + /// Unused + Range, +} + +#[derive(Default)] +pub(crate) struct DummyGraph { + pub range_arena: RangeArena>, +} + +impl GraphLike for DummyGraph { + type Node = Node; + type Edge = Edge; + type RangeElem = Elem; + fn graph_mut(&mut self) -> &mut Graph { + panic!("Dummy Graph") + } + + fn graph(&self) -> &Graph { + panic!("Dummy Graph") + } + fn range_arena(&self) -> &RangeArena> { + &self.range_arena + } + fn range_arena_mut(&mut self) -> &mut RangeArena> { + &mut self.range_arena + } +} + +impl GraphBackend for DummyGraph {} diff --git a/crates/graph/src/lib.rs b/crates/graph/src/lib.rs new file mode 100644 index 00000000..2f35381d --- /dev/null +++ b/crates/graph/src/lib.rs @@ -0,0 +1,10 @@ +mod graph_elements; +mod range; +mod var_type; + +pub mod nodes; +pub mod solvers; + +pub use graph_elements::*; +pub use range::*; +pub use var_type::*; diff --git a/shared/src/nodes/block.rs b/crates/graph/src/nodes/block.rs similarity index 73% rename from shared/src/nodes/block.rs rename to crates/graph/src/nodes/block.rs index e5cd8201..4ac5bf6a 100644 --- a/shared/src/nodes/block.rs +++ b/crates/graph/src/nodes/block.rs @@ -1,12 +1,7 @@ -use crate::analyzer::AsDotStr; -use crate::analyzer::GraphError; -use crate::analyzer::GraphLike; +use crate::{nodes::Concrete, range::elem::Elem, AsDotStr, GraphBackend, GraphError, Node}; +use shared::{NodeIdx, RangeArena}; -use crate::Node; -use crate::NodeIdx; -use ethers_core::types::Address; -use ethers_core::types::H256; -use ethers_core::types::U256; +use ethers_core::types::{Address, H256, U256}; /// An index in the graph that references a Block node #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] @@ -14,9 +9,13 @@ pub struct BlockNode(pub usize); impl BlockNode { /// Gets the underlying node data for the block environment - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Block, GraphError> { + pub fn underlying<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a Block, GraphError> { match analyzer.node(*self) { Node::Block(st) => Ok(st), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Msg but it was: {e:?}" ))), @@ -25,7 +24,11 @@ impl BlockNode { } impl AsDotStr for BlockNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { format!("block {{ {:?} }}", self.underlying(analyzer).unwrap()) } } diff --git a/crates/graph/src/nodes/builtin.rs b/crates/graph/src/nodes/builtin.rs new file mode 100644 index 00000000..757ca985 --- /dev/null +++ b/crates/graph/src/nodes/builtin.rs @@ -0,0 +1,531 @@ +use crate::{nodes::Concrete, AnalyzerBackend, GraphBackend, GraphError, Node, SolcRange, VarType}; + +use crate::range::elem::*; +use shared::{NodeIdx, RangeArena}; + +use ethers_core::types::{Address, H256, I256, U256}; +use solang_parser::pt::{Expression, Loc, Type}; + +/// A builtin node +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct BuiltInNode(pub usize); + +impl BuiltInNode { + /// Gets the underlying builtin from the graph + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Builtin, GraphError> { + match analyzer.node(*self) { + Node::Builtin(b) => Ok(b), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be Builtin but it was: {e:?}" + ))), + } + } + + /// Gets the size of the builtin + pub fn num_size(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + let underlying = self.underlying(analyzer)?; + Ok(underlying.num_size()) + } + + /// Checks if this builtin is implicitly castable to another builtin + pub fn implicitly_castable_to( + &self, + other: &Self, + analyzer: &impl GraphBackend, + ) -> Result { + Ok(self + .underlying(analyzer)? + .implicitly_castable_to(other.underlying(analyzer)?)) + } + + /// Gets the maximum size version of this builtin, i.e. uint16 -> uint256 + pub fn max_size(&self, analyzer: &mut impl AnalyzerBackend) -> Result { + let m = self.underlying(analyzer)?.max_size(); + Ok(analyzer.builtin_or_add(m).into()) + } + + /// Gets the underlying type of the dynamic builtin backing it. i.e. uint256[] -> uint256 + pub fn dynamic_underlying_ty( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + match self.underlying(analyzer)? { + Builtin::Array(v_ty) | Builtin::SizedArray(_, v_ty) => { + v_ty.unresolved_as_resolved(analyzer) + } + Builtin::Mapping(_, v_ty) => v_ty.unresolved_as_resolved(analyzer), + Builtin::DynamicBytes | Builtin::Bytes(_) => Ok(VarType::BuiltIn( + analyzer.builtin_or_add(Builtin::Bytes(1)).into(), + Some(SolcRange::new( + Elem::from(Concrete::from(vec![0x00])), + Elem::from(Concrete::from(vec![0xff])), + vec![], + )), + )), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be Builtin::Array but it was: {e:?}" + ))), + } + } + + /// Returns whether the builtin is a mapping + pub fn is_mapping(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!(self.underlying(analyzer)?, Builtin::Mapping(_, _))) + } + + /// Returns whether the builtin is a sized array + pub fn is_sized_array(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!( + self.underlying(analyzer)?, + Builtin::SizedArray(_, _) + )) + } + + /// Returns whether the builtin is a sized array or bytes + pub fn maybe_array_size( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + match self.underlying(analyzer)? { + Builtin::SizedArray(s, _) => Ok(Some(*s)), + Builtin::Bytes(s) => Ok(Some(U256::from(*s))), + _ => Ok(None), + } + } + + /// Returns whether the builtin is a dynamic type + pub fn is_dyn(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.is_dyn()) + } + + /// Returns whether the builtin is indexable + pub fn is_indexable(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.is_indexable()) + } + + /// Returns the zero range for this builtin type, i.e. uint256 -> [0, 0] + pub fn zero_range( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + Ok(self.underlying(analyzer)?.zero_range()) + } +} + +impl From for BuiltInNode { + fn from(idx: NodeIdx) -> Self { + BuiltInNode(idx.index()) + } +} + +impl From for NodeIdx { + fn from(val: BuiltInNode) -> Self { + val.0.into() + } +} + +/// A fundamental builtin type +#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub enum Builtin { + /// An address + Address, + /// A payable address + AddressPayable, + /// A payable address, differentiated in Solang so we differentiate + Payable, + /// A boolean + Bool, + /// A string - TODO: we should represent this as bytes internally + String, + /// A signed integer that has a size + Int(u16), + /// An unsigned integer that has a size + Uint(u16), + /// A bytes that has a size, i.e. bytes8 + Bytes(u8), + /// A rational. Rarely used in practice + Rational, + /// A byte array, i.e. bytes + DynamicBytes, + /// An array that has an internal type, i.e. uint256[] + Array(VarType), + /// An array that has an internal type and is sized, i.e. uint256[5] + SizedArray(U256, VarType), + /// A mapping, i.e. `mapping (address => uint)` + Mapping(VarType, VarType), + /// A function pointer that takes a vector of types and returns a vector of types + Func(Vec, Vec), +} + +impl Builtin { + /// Resolves the `VarType` in dynamic builtins due to parse order - i.e. we could + /// `mapping (uint => MyType)`, we may not have parsed `MyType`, so we now try to resolve it + pub fn unresolved_as_resolved( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + match self { + Builtin::Array(n) => Ok(Builtin::Array(n.unresolved_as_resolved(analyzer)?)), + Builtin::SizedArray(s, n) => { + Ok(Builtin::SizedArray(*s, n.unresolved_as_resolved(analyzer)?)) + } + Builtin::Mapping(k, v) => Ok(Builtin::Mapping( + k.unresolved_as_resolved(analyzer)?, + v.unresolved_as_resolved(analyzer)?, + )), + _ => Ok(self.clone()), + } + } + + /// Possible types that this type could have been had a literal been parsed differently - i.e. a `1` + /// could be uint8 to uint256. + pub fn possible_builtins_from_ty_inf(&self) -> Vec { + let mut builtins = vec![]; + match self { + Builtin::Uint(size) => { + let mut s = *size; + while s > 0 { + builtins.push(Builtin::Uint(s)); + s -= 8; + } + } + Builtin::Int(size) => { + let mut s = *size; + while s > 0 { + builtins.push(Builtin::Int(s)); + s -= 8; + } + } + Builtin::Bytes(size) => { + let mut s = *size; + while s > 0 { + builtins.push(Builtin::Bytes(s)); + s -= 1; + } + } + _ => {} + } + builtins + } + + /// Construct a [`SolcRange`] that is zero + pub fn zero_range(&self) -> Option { + match self { + Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { + let zero = Concrete::Address(Address::from_slice(&[0x00; 20])); + Some(SolcRange::new(zero.clone().into(), zero.into(), vec![])) + } + Builtin::Bool => SolcRange::from(Concrete::from(false)), + Builtin::String => SolcRange::from(Concrete::from("".to_string())), + Builtin::Int(_) => SolcRange::from(Concrete::from(I256::from(0))), + Builtin::Uint(_) => SolcRange::from(Concrete::from(U256::from(0))), + Builtin::Bytes(s) => SolcRange::from(Concrete::Bytes(*s, H256::zero())), + Builtin::DynamicBytes | Builtin::Array(_) | Builtin::Mapping(_, _) => { + let zero = Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::zero())), + Default::default(), + Loc::Implicit, + )); + Some(SolcRange::new(zero.clone(), zero, vec![])) + } + Builtin::SizedArray(s, _) => { + let sized = Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(*s)), + Default::default(), + Loc::Implicit, + )); + Some(SolcRange::new(sized.clone(), sized, vec![])) + } + Builtin::Rational | Builtin::Func(_, _) => None, + } + } + + /// Try to convert from a [`Type`] to a Builtin + pub fn try_from_ty( + ty: Type, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Option { + use Type::*; + match ty { + Address => Some(Builtin::Address), + AddressPayable => Some(Builtin::AddressPayable), + Payable => Some(Builtin::Payable), + Bool => Some(Builtin::Bool), + String => Some(Builtin::String), + Int(size) => Some(Builtin::Int(size)), + Uint(size) => Some(Builtin::Uint(size)), + Bytes(size) => Some(Builtin::Bytes(size)), + Rational => Some(Builtin::Rational), + DynamicBytes => Some(Builtin::DynamicBytes), + Mapping { key, value, .. } => { + let key_idx = analyzer.parse_expr(arena, &key, None); + let val_idx = analyzer.parse_expr(arena, &value, None); + let key_var_ty = VarType::try_from_idx(analyzer, key_idx)?; + let val_var_ty = VarType::try_from_idx(analyzer, val_idx)?; + Some(Builtin::Mapping(key_var_ty, val_var_ty)) + } + Function { + params, + attributes: _, + returns, + } => { + let inputs = params + .iter() + .filter_map(|(_, param)| param.as_ref()) + .map(|param| analyzer.parse_expr(arena, ¶m.ty, None)) + .collect::>(); + let inputs = inputs + .iter() + .map(|idx| VarType::try_from_idx(analyzer, *idx).expect("Couldn't parse param")) + .collect::>(); + let mut outputs = vec![]; + if let Some((params, _attrs)) = returns { + let tmp_outputs = params + .iter() + .filter_map(|(_, param)| param.as_ref()) + .map(|param| analyzer.parse_expr(arena, ¶m.ty, None)) + .collect::>(); + outputs = tmp_outputs + .iter() + .map(|idx| { + VarType::try_from_idx(analyzer, *idx) + .expect("Couldn't parse output param") + }) + .collect::>(); + } + Some(Builtin::Func(inputs, outputs)) + } + } + } + + /// Returns whether the builtin is dynamic + pub fn is_dyn(&self) -> bool { + matches!( + self, + Builtin::DynamicBytes + | Builtin::Array(..) + | Builtin::SizedArray(..) + | Builtin::Mapping(..) + | Builtin::String + ) + } + + /// Returns whether the builtin requires input to perform an operation on (like addition) + pub fn requires_input(&self) -> bool { + matches!( + self, + Builtin::Array(..) | Builtin::SizedArray(..) | Builtin::Mapping(..) + ) + } + + /// Returns the size of the integer if it is an integer (signed or unsigned) + pub fn num_size(&self) -> Option { + match self { + Builtin::Uint(size) => Some(*size), + Builtin::Int(size) => Some(*size), + _ => None, + } + } + + /// Returns whether the builtin is a signed integer + pub fn is_int(&self) -> bool { + matches!(self, Builtin::Int(_)) + } + + /// Returns whether the builtin is indexable (bytes, array[], array[5], mapping(..), bytes32, string) + pub fn is_indexable(&self) -> bool { + matches!( + self, + Builtin::DynamicBytes + | Builtin::Array(..) + | Builtin::SizedArray(..) + | Builtin::Mapping(..) + | Builtin::Bytes(..) + | Builtin::String + ) + } + + /// Checks if self is implicitly castable to another builtin + pub fn implicitly_castable_to(&self, other: &Self) -> bool { + use Builtin::*; + match (self, other) { + (Address, Address) => true, + (Address, AddressPayable) => true, + (Address, Payable) => true, + (AddressPayable, Address) => true, + (AddressPayable, Payable) => true, + (AddressPayable, AddressPayable) => true, + (Payable, Address) => true, + (Payable, AddressPayable) => true, + (Payable, Payable) => true, + (Bool, Bool) => true, + (Rational, Rational) => true, + (DynamicBytes, DynamicBytes) => true, + (String, String) => true, + (Uint(from_size), Uint(to_size)) => from_size <= to_size, + (Uint(from_size), Address) => *from_size == 160, + (Int(from_size), Int(to_size)) => from_size <= to_size, + (Bytes(from_size), Bytes(to_size)) => from_size <= to_size, + _ => false, + } + } + + /// Returns the max size version of this builtin + pub fn max_size(&self) -> Self { + use Builtin::*; + match self { + Uint(_) => Uint(256), + Int(_from_size) => Uint(256), + Bytes(_from_size) => Uint(32), + _ => self.clone(), + } + } + + pub fn zero_concrete(&self) -> Option { + match self { + Builtin::Uint(size) => Some(Concrete::Uint(*size, U256::zero())), + Builtin::Int(size) => Some(Concrete::Int(*size, I256::from_raw(U256::zero()))), + Builtin::Bytes(size) => { + let h = H256::default(); + Some(Concrete::Bytes(*size, h)) + } + Builtin::Address => Some(Concrete::Address(Address::from_slice(&[0x00; 20]))), + Builtin::Bool => Some(Concrete::Bool(false)), + _ => None, + } + } + + pub fn max_concrete(&self) -> Option { + match self { + Builtin::Uint(size) => { + let max = if *size == 256 { + U256::MAX + } else { + U256::from(2).pow(U256::from(*size)) - 1 + }; + Some(Concrete::Uint(*size, max)) + } + Builtin::Int(size) => { + let max: I256 = + I256::from_raw((U256::from(1u8) << U256::from(*size - 1)) - U256::from(1)); + Some(Concrete::Int(*size, max)) + } + Builtin::Bytes(size) => { + let size = *size as u16 * 8; + let max = if size == 256 { + U256::MAX + } else { + U256::from(2).pow(U256::from(size)) - 1 + }; + + let mut h = H256::default(); + max.to_big_endian(h.as_mut()); + Some(Concrete::Bytes((size / 8) as u8, h)) + } + Builtin::Address => Some(Concrete::Address(Address::from_slice(&[0xff; 20]))), + Builtin::Bool => Some(Concrete::Bool(true)), + _ => None, + } + } + + pub fn min_concrete(&self) -> Option { + match self { + Builtin::Uint(size) => Some(Concrete::Uint(*size, U256::zero())), + Builtin::Int(size) => Some(Concrete::Int(*size, I256::MIN)), + Builtin::Bytes(size) => { + let h = H256::default(); + Some(Concrete::Bytes(*size, h)) + } + Builtin::Address => Some(Concrete::Address(Address::from_slice(&[0x00; 20]))), + Builtin::Bool => Some(Concrete::Bool(false)), + _ => None, + } + } + + /// Converts the builtin to a string + pub fn as_string(&self, analyzer: &impl GraphBackend) -> Result { + use Builtin::*; + match self { + Address => Ok("address".to_string()), + AddressPayable => Ok("address".to_string()), + Payable => Ok("address".to_string()), + Bool => Ok("bool".to_string()), + String => Ok("string".to_string()), + Int(size) => Ok(format!("int{size}")), + Uint(size) => Ok(format!("uint{size}")), + Bytes(size) => Ok(format!("bytes{size}")), + Rational => Ok("rational".to_string()), + DynamicBytes => Ok("bytes".to_string()), + Array(v_ty) => Ok(format!( + "{}[]", + v_ty.unresolved_as_resolved(analyzer)?.as_string(analyzer)? + )), + SizedArray(s, v_ty) => Ok(format!( + "{}[{}]", + v_ty.unresolved_as_resolved(analyzer)?.as_string(analyzer)?, + s + )), + Mapping(key_ty, v_ty) => Ok(format!( + "mapping ({} => {})", + key_ty + .unresolved_as_resolved(analyzer)? + .as_string(analyzer)?, + v_ty.unresolved_as_resolved(analyzer)?.as_string(analyzer)? + )), + Func(inputs, outputs) => Ok(format!( + "function({}) returns ({})", + inputs + .iter() + .map(|input| input.as_string(analyzer).unwrap()) + .collect::>() + .join(", "), + outputs + .iter() + .map(|output| output.as_string(analyzer).unwrap()) + .collect::>() + .join(", ") + )), + } + } + + /// Converts the builtin to a string if it is not dynamic + pub fn basic_as_string(&self) -> String { + use Builtin::*; + match self { + Address => "address".to_string(), + AddressPayable => "address".to_string(), + Payable => "address".to_string(), + Bool => "bool".to_string(), + String => "string".to_string(), + Int(size) => format!("int{size}"), + Uint(size) => format!("uint{size}"), + Bytes(size) => format!("bytes{size}"), + Rational => "rational".to_string(), + DynamicBytes => "bytes".to_string(), + Array(_v_ty) => "[]".to_string(), + SizedArray(s, _v_ty) => format!("[{}]", s), + Mapping(_key_ty, _v_ty) => "mapping ( => )".to_string(), + Func(inputs, outputs) => format!( + "function({}) returns ({})", + inputs + .iter() + .map(|_input| "") + .collect::>() + .join(", "), + outputs + .iter() + .map(|_output| "") + .collect::>() + .join(", ") + ), + } + } +} diff --git a/shared/src/nodes/concrete.rs b/crates/graph/src/nodes/concrete.rs similarity index 71% rename from shared/src/nodes/concrete.rs rename to crates/graph/src/nodes/concrete.rs index 542c83f6..eeafe272 100644 --- a/shared/src/nodes/concrete.rs +++ b/crates/graph/src/nodes/concrete.rs @@ -1,8 +1,6 @@ -use crate::analyzer::GraphError; -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::Builtin; -use crate::VarType; -use crate::{Node, NodeIdx}; +use crate::{nodes::Builtin, AnalyzerBackend, GraphBackend, GraphError, Node, VarType}; +use shared::NodeIdx; + use ethers_core::types::{Address, H256, I256, U256}; /// An index in the graph that references a [`Concrete`] node @@ -11,26 +9,32 @@ pub struct ConcreteNode(pub usize); impl ConcreteNode { /// Gets the underlying node data for the [`Concrete`] - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Concrete, GraphError> { + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Concrete, GraphError> { match analyzer.node(*self) { Node::Concrete(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Concrete but it was: {e:?}" ))), } } - pub fn max_size( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { + /// Creates a version of this concrete that is max size + pub fn max_size(&self, analyzer: &mut impl AnalyzerBackend) -> Result { let c = self.underlying(analyzer)?.max_size(); Ok(analyzer.add_node(Node::Concrete(c)).into()) } + /// Gets the internal type of the dynamic that backs this. Panics if this is not a dynamic concrete pub fn dynamic_underlying_ty( &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, ) -> Result { let builtin = self.underlying(analyzer)?.dynamic_underlying_ty().unwrap(); let bn = analyzer.builtin_or_add(builtin); @@ -38,19 +42,26 @@ impl ConcreteNode { Ok(v_ty) } - pub fn is_dyn(&self, analyzer: &impl GraphLike) -> Result { + /// Returns whether this is a dynamic concrete + pub fn is_dyn(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.is_dyn()) } - pub fn is_sized_array(&self, analyzer: &impl GraphLike) -> Result { + /// Returns whether this is a concrete sized array + pub fn is_sized_array(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.is_sized_array()) } - pub fn maybe_array_size(&self, analyzer: &impl GraphLike) -> Result, GraphError> { + /// Returns the size of the array size if it is an array-like concrete + pub fn maybe_array_size( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { Ok(self.underlying(analyzer)?.maybe_array_size()) } - pub fn is_indexable(&self, analyzer: &impl GraphLike) -> Result { + /// Returns whether this concrete is indexable + pub fn is_indexable(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.is_indexable()) } } @@ -67,12 +78,6 @@ impl From for NodeIdx { } } -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] -pub enum DynCapacity { - Cap(U256), - Unlimited, -} - /// EVM/Solidity basic concrete types #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum Concrete { @@ -95,6 +100,12 @@ pub enum Concrete { Array(Vec), } +impl Default for Concrete { + fn default() -> Self { + Concrete::Uint(0, U256::zero()) + } +} + impl From for Concrete { fn from(u: U256) -> Self { Concrete::Uint(256, u) @@ -113,6 +124,14 @@ impl From> for Concrete { } } +impl From for Concrete { + fn from(u: u8) -> Self { + let mut h = H256::default(); + h.0[0] = u; + Concrete::Bytes(1, h) + } +} + impl From for Concrete { fn from(u: H256) -> Self { Concrete::Bytes(32, u) @@ -146,13 +165,86 @@ impl From for Concrete { } } -impl> From> for Concrete { - fn from(u: Vec) -> Self { - Concrete::Array(u.into_iter().map(|t| t.into()).collect()) +impl From<&str> for Concrete { + fn from(u: &str) -> Self { + Concrete::String(u.to_string()) } } +// impl> From> for Concrete { +// fn from(u: Vec) -> Self { +// Concrete::Array(u.into_iter().map(|t| t.into()).collect()) +// } +// } + impl Concrete { + pub fn raw_bits_u256(&self) -> Option { + match self { + Concrete::Int(_, val) => Some(val.into_raw()), + _ => self.into_u256(), + } + } + + pub fn set_indices(&mut self, other: &Self) { + match (self, other) { + (Concrete::DynBytes(s), Concrete::DynBytes(o)) => { + o.iter().enumerate().for_each(|(i, b)| { + s[i] = *b; + }); + } + (Concrete::Array(s), Concrete::Array(o)) => { + o.iter().enumerate().for_each(|(i, b)| { + s[i] = b.clone(); + }); + } + (Concrete::String(s), Concrete::String(o)) => { + o.chars().enumerate().for_each(|(i, b)| { + s.replace_range(i..i + 1, &b.to_string()); + }); + } + (Concrete::Bytes(size, s), Concrete::Bytes(cap, o)) => { + let mut bytes = [0u8; 32]; + s.0.into_iter() + .take((*size).into()) + .enumerate() + .for_each(|(i, b)| bytes[i] = b); + o.0.into_iter() + .take((*cap).into()) + .enumerate() + .for_each(|(i, b)| bytes[i] = b); + *s = H256(bytes); + } + _ => {} + } + } + + pub fn get_index(&self, other: &Self) -> Option { + match (self, other) { + (Concrete::DynBytes(s), Concrete::Uint(_, o)) => { + let index = o.as_usize(); + let mut bytes = [0u8; 32]; + bytes[0] = s[index]; + Some(Concrete::Bytes(1, H256(bytes))) + } + (Concrete::Array(s), Concrete::Uint(_, o)) => { + let index = o.as_usize(); + Some(s[index].clone()) + } + (Concrete::String(s), Concrete::Uint(_, o)) => { + let index = o.as_usize(); + Some(Concrete::String(s[index..index + 1].to_string())) + } + (Concrete::Bytes(_size, s), Concrete::Uint(_, o)) => { + let index = o.as_usize(); + let mut bytes = [0u8; 32]; + bytes[0] = s[index]; + Some(Concrete::Bytes(1, H256(bytes))) + } + _ => None, + } + } + + /// Returns whether this concrete is a dynamic type pub fn is_dyn(&self) -> bool { matches!( self, @@ -160,10 +252,12 @@ impl Concrete { ) } + /// Returns whether this concrete is a sized array pub fn is_sized_array(&self) -> bool { matches!(self, Concrete::DynBytes(..) | Concrete::Array(..)) } + /// Returns the internal type of this dynamic concrete pub fn dynamic_underlying_ty(&self) -> Option { match self { Concrete::DynBytes(_v) => Some(Builtin::Bytes(1)), @@ -174,6 +268,7 @@ impl Concrete { } } + /// Returns the length of the array if it is an array pub fn maybe_array_size(&self) -> Option { match self { Concrete::DynBytes(v) => Some(U256::from(v.len())), @@ -184,6 +279,7 @@ impl Concrete { } } + /// Returns whether this concrete is indexable pub fn is_indexable(&self) -> bool { self.is_dyn() || matches!(self, Concrete::Bytes(..)) } @@ -194,12 +290,16 @@ impl Concrete { /// be fine. pub fn u256_as_original(&self, uint: U256) -> Self { match self { - Concrete::Uint(size, _) => Concrete::Uint(*size, uint), - Concrete::Int(size, _) => Concrete::Int(*size, I256::from_raw(uint)), + Concrete::Uint(size, _) => Concrete::Uint(256, uint) + .cast(Builtin::Uint(*size)) + .unwrap(), + Concrete::Int(size, _) => Concrete::Int(256, I256::from_raw(uint)) + .cast(Builtin::Int(*size)) + .unwrap(), Concrete::Bytes(size, _) => { let mut h = H256::default(); uint.to_big_endian(h.as_mut()); - Concrete::Bytes(*size, h) + Concrete::Bytes(32, h).cast(Builtin::Bytes(*size)).unwrap() } Concrete::Address(_) => { let mut bytes = [0u8; 32]; @@ -213,12 +313,33 @@ impl Concrete { Concrete::Bool(false) } } + Concrete::DynBytes(v) => { + let mut bytes = [0u8; 32]; + uint.to_big_endian(&mut bytes); + let new_vec = &bytes.to_vec()[0..v.len()]; + Concrete::DynBytes(new_vec.to_vec()) + } e => todo!("Unsupported: {e:?}"), } } + pub fn is_zero(&self) -> bool { + self.into_u256() == Some(U256::zero()) + } + + pub fn is_one(&self) -> bool { + self.into_u256() == Some(U256::from(1)) + } + /// Cast from one concrete variant given another concrete variant pub fn cast_from(self, other: &Self) -> Option { + if let (Concrete::DynBytes(s), Concrete::DynBytes(o)) = (&self, other) { + if s.len() < o.len() { + let mut t = s.clone(); + t.resize(o.len(), 0); + return Some(Concrete::DynBytes(t)); + } + } self.cast(other.as_builtin()) } @@ -250,10 +371,19 @@ impl Concrete { } } + /// Returns whether this concrete is an unsigned integer pub fn is_int(&self) -> bool { matches!(self, Concrete::Int(_, _)) } + pub fn size_wrap(self) -> Self { + match self { + Concrete::Int(size, val) => Concrete::Int(256, val).cast(Builtin::Int(size)).unwrap(), + _ => self, + } + } + + /// Performs a literal cast to another type pub fn literal_cast(self, builtin: Builtin) -> Option { match self { Concrete::Uint(_, val) => match builtin { @@ -279,6 +409,7 @@ impl Concrete { } } + /// Concatenate two concretes together pub fn concat(self, other: &Self) -> Option { match (self, other) { (Concrete::String(a), Concrete::String(b)) => Some(Concrete::from(format!("{a}{b}"))), @@ -290,6 +421,22 @@ impl Concrete { } } + pub fn bit_representation(&self) -> Option { + match self { + Concrete::Int(size, val) => { + let mut bytes = [0u8; 32]; + val.to_big_endian(&mut bytes); + Some(Concrete::Uint(*size, U256::from_big_endian(&bytes))) + } + Concrete::Bytes(size, _) => { + Some(Concrete::Uint(*size as u16 / 8, self.into_u256().unwrap())) + } + Concrete::Bool(_val) => Some(Concrete::Uint(8, self.into_u256().unwrap())), + Concrete::Address(_val) => Some(Concrete::Uint(20, self.into_u256().unwrap())), + _ => None, + } + } + /// Cast the concrete to another type as denoted by a [`Builtin`]. pub fn cast(self, builtin: Builtin) -> Option { match self { @@ -313,7 +460,7 @@ impl Concrete { if val < mask { Some(Concrete::Uint(size, val)) } else { - Some(Concrete::Uint(size, mask)) + Some(Concrete::Uint(size, val & mask)) } } } @@ -347,33 +494,44 @@ impl Concrete { val.to_big_endian(&mut bytes); Some(Concrete::Address(Address::from_slice(&bytes[12..]))) } - Builtin::Uint(size) => { - let mask = if size == 256 { - U256::MAX - } else { - U256::from(2).pow(size.into()) - 1 - }; - Some(Concrete::Uint(size, val.into_raw() & mask)) + Builtin::Uint(_size) => { + let bit_repr = self.bit_representation().unwrap(); + bit_repr.cast(builtin) } Builtin::Int(size) => { - // no op - if r_size == size { - Some(self) - } else { - let mask = if size == 256 { - U256::MAX / 2 - } else { - U256::from(2).pow((size - 1).into()) - 1 - }; - - let (sign, abs) = val.into_sign_and_abs(); - if abs < mask { + match r_size.cmp(&size) { + std::cmp::Ordering::Less => { + // upcast Some(Concrete::Int(size, val)) - } else { - Some(Concrete::Int( - size, - I256::checked_from_sign_and_abs(sign, mask).unwrap(), - )) + } + std::cmp::Ordering::Equal => { + // noop + Some(self) + } + std::cmp::Ordering::Greater => { + // downcast + let mask = if size == 256 { + U256::MAX / 2 + } else { + U256::from(2).pow((size).into()) - 1 + }; + + let raw = val.into_raw(); + + if raw < mask / U256::from(2) { + Some(Concrete::Int(size, val)) + } else { + let base_value = raw & mask; + let res = + if base_value >> (size - 1) & U256::from(1) == U256::from(1) { + let top = U256::MAX << size; + base_value | top + } else { + base_value + }; + + Some(Concrete::Int(size, I256::from_raw(res))) + } } } } @@ -545,6 +703,10 @@ impl Concrete { } } Concrete::Bytes(_, b) => Some(U256::from_big_endian(b.as_bytes())), + Concrete::DynBytes(v) if v.len() <= 32 => self + .clone() + .cast(Builtin::Bytes(v.len() as u8))? + .into_u256(), Concrete::Address(a) => Some(U256::from_big_endian(a.as_bytes())), Concrete::Bool(b) => { if *b { @@ -557,6 +719,7 @@ impl Concrete { } } + /// Returns this concrete as a max-sized version pub fn max_size(&self) -> Self { match self { Concrete::Uint(_, val) => Concrete::Uint(256, *val), @@ -567,7 +730,7 @@ impl Concrete { } /// Gets the default max for a given concrete variant. - pub fn max(&self) -> Option { + pub fn max_of_type(&self) -> Option { match self { Concrete::Uint(size, _) => { let max = if *size == 256 { @@ -678,7 +841,7 @@ impl Concrete { } /// Gets the default min for a given concrete variant. - pub fn min(&self) -> Option { + pub fn min_of_type(&self) -> Option { match self { Concrete::Uint(size, _) => Some(Concrete::Uint(*size, 0.into())), Concrete::Int(size, _) => { @@ -731,6 +894,49 @@ impl Concrete { } } + pub fn is_negative(&self) -> bool { + matches!(self, Concrete::Int(_, val) if *val < I256::from(0)) + } + + pub fn as_hex_string(&self) -> String { + match self { + Concrete::Uint(_, val) => { + let mut bytes = [0u8; 32]; + val.to_big_endian(&mut bytes); + format!("0x{}", hex::encode(bytes)) + } + Concrete::Int(_, val) => { + let mut bytes = [0u8; 32]; + val.to_big_endian(&mut bytes); + format!("0x{}", hex::encode(bytes)) + } + Concrete::Bytes(size, b) => format!( + "0x{}", + b.0.iter() + .take(*size as usize) + .map(|byte| format!("{byte:02x}")) + .collect::>() + .join("") + ), + Concrete::String(s) => hex::encode(s), + Concrete::Bool(_b) => self.bit_representation().unwrap().as_hex_string(), + Concrete::Address(_a) => self.bit_representation().unwrap().as_hex_string(), + Concrete::DynBytes(a) => { + if a.is_empty() { + "0x".to_string() + } else { + hex::encode(a) + } + } + Concrete::Array(arr) => format!( + "0x{}", + arr.iter() + .map(|i| i.as_hex_string()[2..].to_string()) + .collect::>() + .join("") + ), + } + } /// Converts to a string pub fn as_string(&self) -> String { match self { @@ -764,6 +970,28 @@ impl Concrete { } } + pub fn as_bytes(&self) -> Vec { + match self { + Concrete::Uint(_, val) => { + let mut bytes = [0; 32]; + val.to_big_endian(&mut bytes); + bytes.to_vec() + } + Concrete::Int(_, val) => { + let mut bytes = [0; 32]; + val.to_big_endian(&mut bytes); + bytes.to_vec() + } + Concrete::Bytes(size, val) => val[0..(*size as usize)].to_vec(), + Concrete::Address(_) | Concrete::Bool(_) => { + Concrete::Uint(256, self.into_u256().unwrap()).as_bytes() + } + Concrete::DynBytes(inner) => inner.clone(), + Concrete::String(inner) => inner.as_bytes().to_vec(), + Concrete::Array(inner) => inner.iter().flat_map(|i| i.as_bytes()).collect(), + } + } + /// Converts to a human readable string. For integers, this means trying to find a /// power of 2 that is close to the value. pub fn as_human_string(&self) -> String { diff --git a/crates/graph/src/nodes/context/context_tys.rs b/crates/graph/src/nodes/context/context_tys.rs new file mode 100644 index 00000000..b7b6789c --- /dev/null +++ b/crates/graph/src/nodes/context/context_tys.rs @@ -0,0 +1,88 @@ +use crate::nodes::{ContextNode, ContextVarNode, ContractNode, FunctionNode, StructNode}; +use shared::NodeIdx; + +use solang_parser::pt::Loc; + +use std::collections::BTreeMap; + +/// An enum that denotes either a call or a fork of a context +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub enum CallFork { + Call(ContextNode), + Fork(ContextNode, ContextNode), +} + +impl CallFork { + /// Returns an option of the call context + pub fn maybe_call(&self) -> Option { + match self { + CallFork::Call(c) => Some(*c), + _ => None, + } + } + + /// Returns an option of the two fork contexts + pub fn maybe_fork(&self) -> Option<(ContextNode, ContextNode)> { + match self { + CallFork::Fork(w1, w2) => Some((*w1, *w2)), + _ => None, + } + } +} + +/// Holds the current modifier state +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ModifierState { + /// The number of the current modifier being evaluated + pub num: usize, + /// The location in source + pub loc: Loc, + /// The calling function + pub parent_fn: FunctionNode, + /// The context of the caller to the function that had this modifier + pub parent_caller_ctx: ContextNode, + /// The parent context + pub parent_ctx: ContextNode, + /// Renamed inputs based on the modifier + pub renamed_inputs: BTreeMap, +} + +impl ModifierState { + /// Constructs a modifier state + pub fn new( + num: usize, + loc: Loc, + parent_fn: FunctionNode, + parent_ctx: ContextNode, + parent_caller_ctx: ContextNode, + renamed_inputs: BTreeMap, + ) -> Self { + Self { + num, + loc, + parent_fn, + parent_ctx, + parent_caller_ctx, + renamed_inputs, + } + } +} + +/// Holds cached information about the context to speed up lookups +#[derive(Default, Debug, Clone, Eq, PartialEq)] +pub struct ContextCache { + /// Variables in this context + pub vars: BTreeMap, + /// Temporary variables in this context + pub tmp_vars: BTreeMap, + /// Visible functions from this context + pub visible_funcs: Option>, + /// Visible structs from this context + pub visible_structs: Option>, + /// First ancestor of this context + pub first_ancestor: Option, + /// Associated source of this context + pub associated_source: Option, + /// Associated contract of this context + pub associated_contract: Option, +} diff --git a/shared/src/context/expr_ret.rs b/crates/graph/src/nodes/context/expr_ret.rs similarity index 71% rename from shared/src/context/expr_ret.rs rename to crates/graph/src/nodes/context/expr_ret.rs index 00640a60..2195fef9 100644 --- a/shared/src/context/expr_ret.rs +++ b/crates/graph/src/nodes/context/expr_ret.rs @@ -1,16 +1,25 @@ -use crate::analyzer::AsDotStr; -use crate::context::GraphError; -use crate::{ContextVarNode, GraphLike, Node, NodeIdx, VarType}; +use crate::{ + nodes::{context::ContextVarNode, Concrete}, + range::elem::Elem, + AsDotStr, GraphBackend, GraphError, Node, VarType, +}; +use shared::{NodeIdx, RangeArena}; +/// The reason a context was killed #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum KilledKind { + /// Execution ended here successfully Ended, + /// Unsatisifiable bounds, therefore dead code Unreachable, + /// Execution guaranteed to revert here! Revert, + /// Unexpected parse error. This is likely a bug or invalid solidity. See the `errors` section of the CLI output or rerun with `--debug` for more information ParseError, } impl KilledKind { + /// Returns a string explanation of the KilledKind pub fn analysis_str(&self) -> &str { use KilledKind::*; match self { @@ -22,21 +31,32 @@ impl KilledKind { } } +/// A representation of the evaluation of an expression #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum ExprRet { + /// The expression resulted in a killing of the context CtxKilled(KilledKind), + /// The expression resulted in nothing Null, + /// The expression resulted in a single element Single(NodeIdx), + /// The expression resulted in a single element that was a literal SingleLiteral(NodeIdx), + /// The expression resulted in multiple elements Multi(Vec), } impl ExprRet { - pub fn debug_str(&self, analyzer: &impl GraphLike) -> String { + /// Converts the expression return into a debug string + pub fn debug_str(&self, analyzer: &impl GraphBackend) -> String { match self { ExprRet::Single(inner) | ExprRet::SingleLiteral(inner) => match analyzer.node(*inner) { - Node::ContextVar(_) => ContextVarNode::from(*inner).display_name(analyzer).unwrap(), - e => format!("{:?}", e), + Node::ContextVar(_) => format!( + "idx_{}: {}", + inner.index(), + ContextVarNode::from(*inner).display_name(analyzer).unwrap() + ), + e => format!("idx_{}: {:?}", inner.index(), e), }, ExprRet::Multi(inner) => { format!( @@ -53,6 +73,7 @@ impl ExprRet { } } + /// Take one element from the expression return. pub fn take_one(&mut self) -> Result, GraphError> { match self { ExprRet::Single(..) | ExprRet::SingleLiteral(..) => { @@ -70,6 +91,8 @@ impl ExprRet { } } + /// Checks if the expression return is a `SingleLiteral`. It returns + /// a list of bools that match if each is a literal pub fn literals_list(&self) -> Result, GraphError> { match self { ExprRet::SingleLiteral(..) => Ok(vec![true]), @@ -88,6 +111,7 @@ impl ExprRet { } } + /// Expect the expression result to be the Single variant pub fn expect_single(&self) -> Result { match self { ExprRet::Single(inner) => Ok(*inner), @@ -99,6 +123,7 @@ impl ExprRet { } } + /// Expect the expression result to be some length pub fn expect_length(&self, len: usize) -> Result<(), GraphError> { match self { ExprRet::Single(_) | ExprRet::SingleLiteral(_) => { @@ -130,6 +155,7 @@ impl ExprRet { } } + /// Return whether the expression return is a Single or SingleLiteral pub fn is_single(&self) -> bool { match self { ExprRet::Single(_inner) => true, @@ -139,10 +165,12 @@ impl ExprRet { } } + /// Return whether the expression return resulted in the Context being killed pub fn is_killed(&self) -> bool { matches!(self, ExprRet::CtxKilled(_)) } + /// Return the kind of the killed context if it was killed pub fn killed_kind(&self) -> Option { match self { ExprRet::CtxKilled(k) => Some(*k), @@ -151,10 +179,7 @@ impl ExprRet { } } - pub fn has_fork(&self) -> bool { - false - } - + /// Check if any of the expression returns are killed pub fn has_killed(&self) -> bool { match self { ExprRet::CtxKilled(_) => true, @@ -163,6 +188,7 @@ impl ExprRet { } } + /// Check if any of the expression returns are literals pub fn has_literal(&self) -> bool { match self { ExprRet::SingleLiteral(..) => true, @@ -171,6 +197,7 @@ impl ExprRet { } } + /// Expect the return to be a multi, and return the inner list. Panics if not mulit pub fn expect_multi(self) -> Vec { match self { ExprRet::Multi(inner) => inner, @@ -178,14 +205,19 @@ impl ExprRet { } } - pub fn try_as_func_input_str(&self, analyzer: &impl GraphLike) -> String { + /// Try to convert to a solidity-like function input string, i.e. `(uint256, uint256, bytes32)` + pub fn try_as_func_input_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { match self { ExprRet::Single(inner) | ExprRet::SingleLiteral(inner) => { let idx = inner; match VarType::try_from_idx(analyzer, *idx) { Some(var_ty) => { if let Ok(ty) = var_ty.unresolved_as_resolved(analyzer) { - format!("({})", ty.as_dot_str(analyzer)) + format!("({})", ty.as_dot_str(analyzer, arena)) } else { "".to_string() } @@ -193,10 +225,13 @@ impl ExprRet { None => "".to_string(), } } - ExprRet::Multi(inner) if !self.has_fork() => { + ExprRet::Multi(inner) => { let mut strs = vec![]; for ret in inner.iter() { - strs.push(ret.try_as_func_input_str(analyzer).replace(['(', ')'], "")); + strs.push( + ret.try_as_func_input_str(analyzer, arena) + .replace(['(', ')'], ""), + ); } format!("({})", strs.join(", ")) } @@ -204,6 +239,7 @@ impl ExprRet { } } + /// Flatten the expression returns recursively into a single list of node indices pub fn as_flat_vec(&self) -> Vec { let mut idxs = vec![]; match self { @@ -221,6 +257,7 @@ impl ExprRet { idxs } + /// Convert to a normal vector, does not recurse pub fn as_vec(&self) -> Vec { match self { s @ ExprRet::Single(_) | s @ ExprRet::SingleLiteral(_) => vec![s.clone()], @@ -231,6 +268,7 @@ impl ExprRet { } } + /// Flatten into a single ExprRet pub fn flatten(self) -> Self { match self { ExprRet::Single(_) | ExprRet::SingleLiteral(_) => self, @@ -244,4 +282,17 @@ impl ExprRet { _ => self, } } + + pub fn len(&self) -> usize { + match self { + ExprRet::Single(_) | ExprRet::SingleLiteral(_) => 1, + ExprRet::Multi(inner) => inner.len(), + ExprRet::CtxKilled(..) => 0, + ExprRet::Null => 0, + } + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } } diff --git a/crates/graph/src/nodes/context/mod.rs b/crates/graph/src/nodes/context/mod.rs new file mode 100644 index 00000000..4e8ac1ac --- /dev/null +++ b/crates/graph/src/nodes/context/mod.rs @@ -0,0 +1,18 @@ +mod context_tys; +mod expr_ret; +mod node; +mod underlying; +mod var; + +pub use context_tys::{CallFork, ContextCache, ModifierState}; +pub use expr_ret::{ExprRet, KilledKind}; +pub use node::ContextNode; +pub use underlying::Context; +pub use var::{ContextVar, ContextVarNode, TmpConstruction}; + +// ContextNode implementations are split to ease in maintainability +mod querying; +mod solving; +mod typing; +mod variables; +mod versioning; diff --git a/crates/graph/src/nodes/context/node.rs b/crates/graph/src/nodes/context/node.rs new file mode 100644 index 00000000..6c4bb8d5 --- /dev/null +++ b/crates/graph/src/nodes/context/node.rs @@ -0,0 +1,141 @@ +use crate::{ + nodes::{Concrete, Context, ContextVarNode, KilledKind}, + range::elem::Elem, + AnalyzerBackend, AsDotStr, GraphBackend, GraphError, Node, +}; + +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::Loc; + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +/// A wrapper of a node index that corresponds to a [`Context`] +pub struct ContextNode(pub usize); + +impl AsDotStr for ContextNode { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { + format!("Context {{ {} }}", self.path(analyzer)) + } +} + +impl ContextNode { + pub fn add_gas_cost( + &self, + analyzer: &mut impl GraphBackend, + cost: u64, + ) -> Result<(), GraphError> { + self.associated_fn(analyzer)?.add_gas_cost(analyzer, cost) + } + + /// Gets the total context width + pub fn total_width(&self, analyzer: &mut impl AnalyzerBackend) -> Result { + self.first_ancestor(analyzer)? + .number_of_live_edges(analyzer) + } + + /// Gets the total context depth + pub fn depth(&self, analyzer: &impl GraphBackend) -> usize { + self.underlying(analyzer).unwrap().depth + } + + /// The path of the underlying context + pub fn path(&self, analyzer: &impl GraphBackend) -> String { + self.underlying(analyzer).unwrap().path.clone() + } + + /// Gets a mutable reference to the underlying context in the graph + pub fn underlying_mut<'a>( + &self, + analyzer: &'a mut impl AnalyzerBackend, + ) -> Result<&'a mut Context, GraphError> { + match analyzer.node_mut(*self) { + Node::Context(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be Context but it was: {e:?}" + ))), + } + } + + /// Gets an immutable reference to the underlying context in the graph + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Context, GraphError> { + match analyzer.node(*self) { + Node::Context(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be Context but it was: {e:?}" + ))), + } + } + + /// Returns an option to where the context was killed + pub fn killed_loc( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + Ok(self.underlying(analyzer)?.killed) + } + + /// Add a return node to the context + pub fn add_return_node( + &self, + ret_stmt_loc: Loc, + ret: ContextVarNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.ret.push((ret_stmt_loc, ret)); + self.propogate_end(analyzer)?; + Ok(()) + } + + /// Propogate that this context has ended up the context graph + pub fn propogate_end(&self, analyzer: &mut impl AnalyzerBackend) -> Result<(), GraphError> { + let underlying = &mut self.underlying_mut(analyzer)?; + let curr_live = underlying.number_of_live_edges; + underlying.number_of_live_edges = 0; + if let Some(parent) = self.underlying(analyzer)?.parent_ctx { + let live_edges = &mut parent.underlying_mut(analyzer)?.number_of_live_edges; + *live_edges = live_edges.saturating_sub(1 + curr_live); + parent.propogate_end(analyzer)?; + } + Ok(()) + } + + /// Gets the return nodes for this context + pub fn return_nodes( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + Ok(self.underlying(analyzer)?.ret.clone()) + } + + /// Returns a string for dot-string things + pub fn as_string(&mut self) -> String { + "Context".to_string() + } +} + +impl From for NodeIdx { + fn from(val: ContextNode) -> Self { + val.0.into() + } +} + +impl From for ContextNode { + fn from(idx: NodeIdx) -> Self { + ContextNode(idx.index()) + } +} diff --git a/crates/graph/src/nodes/context/querying.rs b/crates/graph/src/nodes/context/querying.rs new file mode 100644 index 00000000..16fd28a9 --- /dev/null +++ b/crates/graph/src/nodes/context/querying.rs @@ -0,0 +1,254 @@ +use crate::{ + nodes::{ + ContextNode, ContractNode, FunctionNode, SourceUnitNode, SourceUnitPartNode, StructNode, + }, + AnalyzerBackend, ContextEdge, Edge, GraphBackend, GraphError, +}; + +use shared::Search; +use std::collections::{BTreeMap, BTreeSet}; + +impl ContextNode { + /// Gets the associated contract for the function for the context + pub fn associated_contract( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + Ok(self + .associated_fn(analyzer)? + .maybe_associated_contract(analyzer) + .expect("No associated contract for context")) + } + + /// Tries to get the associated function for the context + pub fn maybe_associated_contract( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + Ok(self + .associated_fn(analyzer)? + .maybe_associated_contract(analyzer)) + } + + /// Tries to get the associated source for the context + pub fn maybe_associated_source( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Option { + let context = self.underlying(analyzer).unwrap(); + if let Some(src) = context.cache.associated_source { + Some(src.into()) + } else if let Some(parent_ctx) = context.parent_ctx { + let src = parent_ctx.maybe_associated_source(analyzer)?; + self.underlying_mut(analyzer) + .unwrap() + .cache + .associated_source = Some(src.into()); + Some(src) + } else { + let func = self.associated_fn(analyzer).unwrap(); + func.maybe_associated_source(analyzer) + } + } + + /// Tries to get the associated source unit part for the context + pub fn associated_source_unit_part( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + if let Some(sup) = self + .associated_fn(analyzer)? + .maybe_associated_source_unit_part(analyzer) + { + Ok(sup) + } else { + Err(GraphError::NodeConfusion( + "Expected context to have an associated source but didnt".to_string(), + )) + } + } + + /// Gets visible functions + pub fn visible_modifiers( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + // TODO: filter privates + let Some(source) = self.maybe_associated_source(analyzer) else { + return Err(GraphError::NodeConfusion( + "Expected context to have an associated source but didnt".to_string(), + )); + }; + if let Some(contract) = self.maybe_associated_contract(analyzer)? { + let mut modifiers = contract.modifiers(analyzer); + // extend with free floating functions + modifiers.extend( + analyzer + .search_children_depth(source.into(), &Edge::Modifier, 1, 0) + .into_iter() + .map(FunctionNode::from) + .collect::>(), + ); + + // extend with inherited functions + let inherited_contracts = analyzer.search_children_exclude_via( + contract.0.into(), + &Edge::InheritedContract, + &[Edge::Func], + ); + modifiers.extend( + inherited_contracts + .into_iter() + .flat_map(|inherited_contract| { + ContractNode::from(inherited_contract).modifiers(analyzer) + }) + .collect::>(), + ); + + let mut mapping: BTreeMap> = BTreeMap::new(); + for modifier in modifiers.iter() { + let entry = mapping.entry(modifier.name(analyzer)?).or_default(); + entry.insert(*modifier); + } + mapping + .into_values() + .map(|modifier_set| { + let as_vec = modifier_set.iter().collect::>(); + + match as_vec.len() { + 2 => { + as_vec[0].get_overriding(as_vec[1], analyzer) + } + 3.. => { + panic!("3+ visible functions with the same name. This is invalid solidity, {as_vec:#?}") + } + _ => Ok(*as_vec[0]) + } + }) + .collect() + } else { + // we are in a free floating function, only look at free floating functions + let Some(source) = self.maybe_associated_source(analyzer) else { + return Err(GraphError::NodeConfusion( + "Expected context to have an associated source but didnt".to_string(), + )); + }; + Ok(analyzer + .search_children_depth(source.into(), &Edge::Modifier, 1, 0) + .into_iter() + .map(FunctionNode::from) + .collect::>()) + } + } + + /// Gets visible functions + pub fn visible_funcs( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + // TODO: filter privates + if let Some(vis) = &self.underlying(analyzer)?.cache.visible_funcs { + return Ok(vis.clone()); + } + if let Some(contract) = self.maybe_associated_contract(analyzer)? { + let mut mapping = contract.linearized_functions(analyzer)?; + // extend with free floating functions + mapping.extend( + analyzer + .search_children_depth(analyzer.entry(), &Edge::Func, 2, 0) + .into_iter() + .filter_map(|i| { + let fn_node = FunctionNode::from(i); + if let Ok(name) = fn_node.name(analyzer) { + if !mapping.contains_key(&name) { + Some((name, fn_node)) + } else { + None + } + } else { + None + } + }) + .collect::>(), + ); + let funcs: Vec<_> = mapping.values().copied().collect(); + self.underlying_mut(analyzer)?.cache.visible_funcs = Some(funcs.clone()); + Ok(funcs) + } else { + // we are in a free floating function, only look at free floating functions + let funcs = analyzer + .search_children_depth(analyzer.entry(), &Edge::Func, 2, 0) + .into_iter() + .map(FunctionNode::from) + .collect::>(); + + self.underlying_mut(analyzer)?.cache.visible_funcs = Some(funcs.clone()); + Ok(funcs) + } + } + + /// Gets all visible functions + pub fn source_funcs( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Vec { + // TODO: filter privates + let Some(source) = self.maybe_associated_source(analyzer) else { + return vec![]; + }; + analyzer + .search_children_exclude_via( + source.into(), + &Edge::Func, + &[ + Edge::Context(ContextEdge::Context), + Edge::Context(ContextEdge::Variable), + ], + ) + .into_iter() + .map(FunctionNode::from) + .collect::>() + } + + /// Gets all visible structs + pub fn visible_structs( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + // TODO: filter privates + if let Some(vis) = &self.underlying(analyzer)?.cache.visible_structs { + return Ok(vis.clone()); + } + + let Some(source) = self.maybe_associated_source(analyzer) else { + return Ok(vec![]); + }; + + let mut structs = source.visible_structs(analyzer)?; + let contract = self.associated_contract(analyzer)?; + structs.extend(contract.visible_structs(analyzer)); + + structs.sort(); + structs.dedup(); + + self.underlying_mut(analyzer)?.cache.visible_structs = Some(structs.clone()); + Ok(structs) + } + + /// Gets the associated function for the context + pub fn associated_fn(&self, analyzer: &impl GraphBackend) -> Result { + let underlying = self.underlying(analyzer)?; + if let Some(fn_call) = underlying.fn_call { + Ok(fn_call) + } else if let Some(ext_fn_call) = underlying.ext_fn_call { + Ok(ext_fn_call) + } else { + Ok(underlying.parent_fn) + } + } + + /// Gets the associated function name for the context + pub fn associated_fn_name(&self, analyzer: &impl GraphBackend) -> Result { + self.associated_fn(analyzer)?.name(analyzer) + } +} diff --git a/crates/graph/src/nodes/context/solving.rs b/crates/graph/src/nodes/context/solving.rs new file mode 100644 index 00000000..af834be9 --- /dev/null +++ b/crates/graph/src/nodes/context/solving.rs @@ -0,0 +1,160 @@ +use crate::elem::Elem; + +use crate::{ + nodes::{Concrete, ContextNode, ContextVarNode}, + range::Range, + solvers::{ + dl::{DLSolver, SolveStatus}, + Atomize, SolverAtom, + }, + AnalyzerBackend, GraphBackend, GraphError, +}; +use std::borrow::Cow; + +use shared::RangeArena; + +use std::collections::BTreeMap; + +impl ContextNode { + /// Use a Difference Logic solver to see if it is unreachable + pub fn unreachable( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + // println!("checking unreachable: {}", self.path(analyzer)); + let mut solver = self.dl_solver(analyzer)?.clone(); + match solver.solve_partial(analyzer, arena)? { + SolveStatus::Unsat => { + tracing::trace!("{} is unreachable via UNSAT", self.path(analyzer)); + Ok(true) + } + _e => { + // println!("other: {e:?}"); + Ok(false) + } + } + } + + /// Get the dependencies as normalized solver atoms + pub fn dep_atoms( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let deps: Vec<_> = self.ctx_deps(analyzer)?; + let mut ranges = BTreeMap::default(); + deps.iter().try_for_each(|dep| { + let mut range = dep.range(analyzer)?.unwrap(); + let r: Cow<'_, _> = range.flattened_range(analyzer, arena)?; + ranges.insert(*dep, r.into_owned()); + Ok(()) + })?; + + Ok(ranges + .iter() + .filter_map(|(_dep, range)| { + if let Some(atom) = Elem::Arena(range.min).atomize(analyzer, arena) { + Some(atom) + } else { + Elem::Arena(range.max).atomize(analyzer, arena) + } + }) + .collect::>()) + } + + /// Get the difference logic solver associated with this context + pub fn dl_solver<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a DLSolver, GraphError> { + Ok(&self.underlying(analyzer)?.dl_solver) + } + + /// Returns a map of variable dependencies for this context + pub fn ctx_deps( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + let deps = self + .underlying(analyzer)? + .ctx_deps + .clone() + .into_iter() + .collect::>(); + Ok(deps) + } + + pub fn debug_ctx_deps( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + let deps = self.ctx_deps(analyzer)?; + deps.iter().enumerate().for_each(|(i, var)| { + println!( + "{i}. {}", + var.as_controllable_name(analyzer, arena).unwrap() + ) + }); + Ok(()) + } + + /// Adds a dependency for this context to exit successfully + pub fn add_ctx_dep( + &self, + dep: ContextVarNode, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + tracing::trace!( + "Adding ctx ({}) dependency: {}, is_controllable: {}", + self.path(analyzer), + dep.display_name(analyzer)?, + dep.is_controllable(analyzer)? + ); + if dep.is_controllable(analyzer)? { + // let underlying = self.underlying_mut(analyzer)?; + if !self.underlying(analyzer)?.ctx_deps.contains(&dep) { + // dep.cache_flattened_range(analyzer)?; + let mut range = dep.range(analyzer)?.unwrap(); + + let min = range.simplified_range_min(analyzer, arena)?; + let max = range.simplified_range_max(analyzer, arena)?; + + let true_elem = Elem::from(true); + let trivial_sat = min == true_elem && max == true_elem; + if trivial_sat || min == Elem::Null || max == Elem::Null { + return Ok(()); + } + + let r = range.flattened_range(analyzer, arena)?.into_owned(); + + // add the atomic constraint + if let Some(atom) = Elem::Arena(r.min).atomize(analyzer, arena) { + let mut solver = std::mem::take(&mut self.underlying_mut(analyzer)?.dl_solver); + let constraints = solver.add_constraints(vec![atom], analyzer, arena); + constraints + .into_iter() + .for_each(|(constraint, normalized)| { + solver.add_constraint(constraint, normalized); + }); + self.underlying_mut(analyzer)?.dl_solver = solver; + } else if let Some(atom) = Elem::Arena(r.max).atomize(analyzer, arena) { + let mut solver = std::mem::take(&mut self.underlying_mut(analyzer)?.dl_solver); + let constraints = solver.add_constraints(vec![atom], analyzer, arena); + constraints + .into_iter() + .for_each(|(constraint, normalized)| { + solver.add_constraint(constraint, normalized); + }); + self.underlying_mut(analyzer)?.dl_solver = solver; + } + + let underlying = self.underlying_mut(analyzer)?; + underlying.ctx_deps.insert(dep); + } + } + Ok(()) + } +} diff --git a/crates/graph/src/nodes/context/typing.rs b/crates/graph/src/nodes/context/typing.rs new file mode 100644 index 00000000..e8a56bda --- /dev/null +++ b/crates/graph/src/nodes/context/typing.rs @@ -0,0 +1,88 @@ +use crate::{ + nodes::{ContextNode, FunctionNode}, + AnalyzerBackend, GraphBackend, GraphError, +}; + +impl ContextNode { + /// Checks if its an anonymous function call (i.e. loop) + pub fn is_anonymous_fn_call(&self, analyzer: &impl GraphBackend) -> Result { + let underlying = self.underlying(analyzer)?; + + Ok(underlying.fn_call.is_none() && underlying.ext_fn_call.is_none() && !underlying.is_fork) + } + + pub fn has_continuation(&self, analyzer: &mut impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.continuation_of.is_some()) + } + + /// Returns whether this context is killed or returned + pub fn killed_or_ret(&self, analyzer: &impl GraphBackend) -> Result { + let underlying = self.underlying(analyzer)?; + Ok(underlying.killed.is_some() + || (!underlying.ret.is_empty() && underlying.modifier_state.is_none())) + } + + /// Returns whether the context is killed + pub fn is_returned(&self, analyzer: &impl GraphBackend) -> Result { + Ok(!self.underlying(analyzer)?.ret.is_empty()) + } + + /// Returns whether the context is killed + pub fn is_killed(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.killed.is_some()) + } + + /// Returns whether the context is killed + pub fn is_ended(&self, analyzer: &impl GraphBackend) -> Result { + let underlying = self.underlying(analyzer)?; + Ok(underlying.child.is_some() || underlying.killed.is_some() || !underlying.ret.is_empty()) + } + + /// Check if this context is in an external function call + pub fn is_ext_fn(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.ext_fn_call.is_some()) + } + + /// Checks whether a function is external to the current context + pub fn is_fn_ext( + &self, + fn_node: FunctionNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + match fn_node.maybe_associated_contract(analyzer) { + None => Ok(false), + Some(fn_ctrt) => { + if let Some(self_ctrt) = self + .associated_fn(analyzer)? + .maybe_associated_contract(analyzer) + { + Ok(Some(self_ctrt) != Some(fn_ctrt) + && !self_ctrt + .underlying(analyzer)? + .inherits + .iter() + .any(|inherited| *inherited == fn_ctrt)) + } else { + Ok(false) + } + } + } + } + + /// Returns whether this context *currently* uses unchecked math + pub fn unchecked(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.unchecked) + } + + /// Sets the context to use unchecked math + pub fn set_unchecked(&self, analyzer: &mut impl AnalyzerBackend) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.unchecked = true; + Ok(()) + } + + /// Sets the context to use checked math + pub fn unset_unchecked(&self, analyzer: &mut impl AnalyzerBackend) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.unchecked = false; + Ok(()) + } +} diff --git a/crates/graph/src/nodes/context/underlying.rs b/crates/graph/src/nodes/context/underlying.rs new file mode 100644 index 00000000..5008c0b6 --- /dev/null +++ b/crates/graph/src/nodes/context/underlying.rs @@ -0,0 +1,331 @@ +use crate::{ + nodes::{ + CallFork, ContextCache, ContextNode, ContextVarNode, ExprRet, FunctionNode, KilledKind, + ModifierState, + }, + solvers::dl::DLSolver, + AnalyzerBackend, GraphError, +}; + +use solang_parser::pt::Loc; +use std::collections::BTreeSet; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Context { + /// The function associated with this context + pub parent_fn: FunctionNode, + /// Whether this function call is actually a modifier call + pub modifier_state: Option, + /// An optional parent context (i.e. this context is a fork or subcontext of another previous context) + pub parent_ctx: Option, + pub returning_ctx: Option, + pub continuation_of: Option, + /// Variables whose bounds are required to be met for this context fork to exist. i.e. a conditional operator + /// like an if statement + pub ctx_deps: BTreeSet, + /// A string that represents the path taken from the root context (i.e. `fn_entry.fork.1`) + pub path: String, + /// Denotes whether this context was killed by an unsatisfiable require, assert, etc. statement + pub killed: Option<(Loc, KilledKind)>, + /// Denotes whether this context is a fork of another context + pub is_fork: bool, + /// Denotes whether this context is the result of a internal function call, and points to the FunctionNode + pub fn_call: Option, + /// Denotes whether this context is the result of a internal function call, and points to the FunctionNode + pub ext_fn_call: Option, + /// The child context. This is either of the form `Call(child_context)` or `Fork(world1, world2)`. Once + /// a child is defined we should *never* evaluate an expression in this context. + pub child: Option, + /// A counter for temporary variables - this lets a context create unique temporary variables + pub tmp_var_ctr: usize, + /// The location in source of the context + pub loc: Loc, + /// The return node and the return location + pub ret: Vec<(Loc, ContextVarNode)>, + /// Depth tracker + pub depth: usize, + /// Width tracker + pub width: usize, + /// A temporary stack of ExprRets for this context + pub tmp_expr: Vec>, + /// The stack of ExprRets for this context + pub expr_ret_stack: Vec, + /// Whether the context currently uses unchecked math + pub unchecked: bool, + /// The number of live edges + pub number_of_live_edges: usize, + /// Caching related things + pub cache: ContextCache, + /// A difference logic solver used for determining reachability + pub dl_solver: DLSolver, +} + +impl Context { + /// Creates a new context from a function + pub fn new(parent_fn: FunctionNode, fn_name: String, loc: Loc) -> Self { + Context { + parent_fn, + parent_ctx: None, + returning_ctx: None, + continuation_of: None, + path: fn_name, + tmp_var_ctr: 0, + killed: None, + ctx_deps: Default::default(), + is_fork: false, + fn_call: None, + ext_fn_call: None, + child: None, + ret: vec![], + loc, + modifier_state: None, + depth: 0, + width: 0, + expr_ret_stack: Vec::with_capacity(5), + tmp_expr: vec![], + unchecked: false, + number_of_live_edges: 0, + cache: Default::default(), + dl_solver: Default::default(), + } + } + + /// Creates a new subcontext from an existing context + pub fn new_subctx( + parent_ctx: ContextNode, + returning_ctx: Option, + loc: Loc, + fork_expr: Option<&str>, + fn_call: Option, + fn_ext: bool, + analyzer: &mut impl AnalyzerBackend, + modifier_state: Option, + ) -> Result { + let mut depth = + parent_ctx.underlying(analyzer)?.depth + if fork_expr.is_some() { 0 } else { 1 }; + + let width = + parent_ctx.underlying(analyzer)?.width + if fork_expr.is_some() { 1 } else { 0 }; + + let modifier_state = if let Some(mstate) = modifier_state { + Some(mstate) + } else if fn_call.is_none() || parent_ctx.associated_fn(analyzer)? == fn_call.unwrap() { + parent_ctx.underlying(analyzer)?.modifier_state.clone() + } else { + None + }; + + if analyzer.max_depth() < depth { + return Err(GraphError::MaxStackDepthReached(format!( + "Stack depth limit reached: {}", + depth - 1 + ))); + } + + let tw = parent_ctx.total_width(analyzer)?; + if analyzer.max_width() < tw { + return Err(GraphError::MaxStackWidthReached(format!( + "Stack width limit reached: {}", + width - 1 + ))); + } + + let (fn_name, ext_fn_call, fn_call) = if let Some(fn_call) = fn_call { + if fn_ext { + (fn_call.name(analyzer)?, Some(fn_call), None) + } else { + (fn_call.name(analyzer)?, None, Some(fn_call)) + } + } else if let Some(returning_ctx) = returning_ctx { + let fn_node = returning_ctx.associated_fn(analyzer)?; + (fn_node.name(analyzer)?, None, Some(fn_node)) + } else { + ("anonymous_fn_call".to_string(), None, None) + }; + + let path = format!( + "{}.{}", + parent_ctx.underlying(analyzer)?.path, + if let Some(ref fork_expr) = fork_expr { + format!("fork{{ {} }}", fork_expr) + } else if let Some(returning_ctx) = returning_ctx { + depth = depth.saturating_sub(2); + format!( + "resume{{ {} }}", + returning_ctx.associated_fn_name(analyzer)? + ) + } else { + fn_name + } + ); + + let parent_fn = parent_ctx.associated_fn(analyzer)?; + + parent_ctx.underlying_mut(analyzer)?.number_of_live_edges += 1; + + tracing::trace!("new subcontext path: {path}, depth: {depth}"); + Ok(Context { + parent_fn, + parent_ctx: Some(parent_ctx), + returning_ctx, + continuation_of: None, + path, + is_fork: fork_expr.is_some(), + fn_call, + ext_fn_call, + ctx_deps: parent_ctx.underlying(analyzer)?.ctx_deps.clone(), + killed: None, + child: None, + tmp_var_ctr: parent_ctx.underlying(analyzer)?.tmp_var_ctr, + ret: vec![], + loc, + modifier_state, + depth, + width, + expr_ret_stack: if fork_expr.is_some() { + parent_ctx.underlying(analyzer)?.expr_ret_stack.clone() + } else if let Some(ret_ctx) = returning_ctx { + ret_ctx.underlying(analyzer)?.expr_ret_stack.clone() + } else { + vec![] + }, + tmp_expr: if fork_expr.is_some() { + parent_ctx.underlying(analyzer)?.tmp_expr.clone() + } else if let Some(ret_ctx) = returning_ctx { + ret_ctx.underlying(analyzer)?.tmp_expr.clone() + } else { + vec![] + }, + unchecked: if fork_expr.is_some() { + parent_ctx.underlying(analyzer)?.unchecked + } else if let Some(ret_ctx) = returning_ctx { + ret_ctx.underlying(analyzer)?.unchecked + } else { + false + }, + number_of_live_edges: 0, + cache: ContextCache { + vars: Default::default(), + tmp_vars: Default::default(), + visible_funcs: if fork_expr.is_some() { + parent_ctx.underlying(analyzer)?.cache.visible_funcs.clone() + } else if let Some(ret_ctx) = returning_ctx { + ret_ctx.underlying(analyzer)?.cache.visible_funcs.clone() + } else { + None + }, + visible_structs: if fork_expr.is_some() { + parent_ctx + .underlying(analyzer)? + .cache + .visible_structs + .clone() + } else if let Some(ret_ctx) = returning_ctx { + ret_ctx.underlying(analyzer)?.cache.visible_structs.clone() + } else { + None + }, + first_ancestor: if fork_expr.is_some() { + parent_ctx.underlying(analyzer)?.cache.first_ancestor + } else if let Some(ret_ctx) = returning_ctx { + ret_ctx.underlying(analyzer)?.cache.first_ancestor + } else { + None + }, + associated_source: None, + associated_contract: None, + }, + dl_solver: parent_ctx.underlying(analyzer)?.dl_solver.clone(), + }) + } + + pub fn new_loop_subctx( + parent_ctx: ContextNode, + loc: Loc, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + let depth = parent_ctx.underlying(analyzer)?.depth + 1; + + if analyzer.max_depth() < depth { + return Err(GraphError::MaxStackDepthReached(format!( + "Stack depth limit reached: {}", + depth - 1 + ))); + } + + let fn_name = "loop"; + + let path = format!("{}.{}", parent_ctx.underlying(analyzer)?.path, fn_name); + + let parent_fn = parent_ctx.associated_fn(analyzer)?; + + parent_ctx.underlying_mut(analyzer)?.number_of_live_edges += 1; + + tracing::trace!("new subcontext path: {path}, depth: {depth}"); + Ok(Context { + parent_fn, + parent_ctx: Some(parent_ctx), + path, + returning_ctx: None, + continuation_of: None, + is_fork: false, + fn_call: None, + ext_fn_call: None, + ctx_deps: parent_ctx.underlying(analyzer)?.ctx_deps.clone(), + killed: None, + child: None, + tmp_var_ctr: parent_ctx.underlying(analyzer)?.tmp_var_ctr, + ret: vec![], + loc, + modifier_state: None, + depth, + width: 0, + expr_ret_stack: parent_ctx.underlying(analyzer)?.expr_ret_stack.clone(), + tmp_expr: parent_ctx.underlying(analyzer)?.tmp_expr.clone(), + unchecked: parent_ctx.underlying(analyzer)?.unchecked, + number_of_live_edges: 0, + cache: ContextCache { + vars: parent_ctx.underlying(analyzer)?.cache.vars.clone(), + tmp_vars: Default::default(), + visible_funcs: parent_ctx.underlying(analyzer)?.cache.visible_funcs.clone(), + visible_structs: parent_ctx + .underlying(analyzer)? + .cache + .visible_structs + .clone(), + first_ancestor: parent_ctx.underlying(analyzer)?.cache.first_ancestor, + associated_source: None, + associated_contract: None, + }, + dl_solver: parent_ctx.underlying(analyzer)?.dl_solver.clone(), + }) + } + + /// Set the child context to a fork + pub fn set_child_fork(&mut self, world1: ContextNode, world2: ContextNode) -> bool { + if self.child.is_some() { + false + } else { + self.child = Some(CallFork::Fork(world1, world2)); + true + } + } + + /// Set the child context to a call + pub fn set_child_call(&mut self, call_ctx: ContextNode) -> bool { + if self.child.is_some() { + false + } else { + self.child = Some(CallFork::Call(call_ctx)); + true + } + } + + pub fn delete_child(&mut self) { + self.child = None; + } + + pub fn as_string(&mut self) -> String { + "Context".to_string() + } +} diff --git a/crates/graph/src/nodes/context/var/mod.rs b/crates/graph/src/nodes/context/var/mod.rs new file mode 100644 index 00000000..1f665e95 --- /dev/null +++ b/crates/graph/src/nodes/context/var/mod.rs @@ -0,0 +1,8 @@ +mod node; +mod ranging; +mod typing; +mod underlying; +mod versioning; + +pub use node::*; +pub use underlying::*; diff --git a/crates/graph/src/nodes/context/var/node.rs b/crates/graph/src/nodes/context/var/node.rs new file mode 100644 index 00000000..8d1df01f --- /dev/null +++ b/crates/graph/src/nodes/context/var/node.rs @@ -0,0 +1,422 @@ +use crate::{ + nodes::{Concrete, ContextNode, ContextVar, TmpConstruction, VarNode}, + range::{elem::*, range_string::ToRangeString, Range}, + AsDotStr, ContextEdge, Edge, GraphBackend, GraphError, Node, +}; + +use shared::{NodeIdx, RangeArena, Search, StorageLocation}; + +use petgraph::{visit::EdgeRef, Direction}; +use solang_parser::pt::Loc; + +use std::collections::BTreeMap; + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct ContextVarNode(pub usize); + +impl AsDotStr for ContextVarNode { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { + let underlying = self.underlying(analyzer).unwrap(); + + let range_str = if let Some(r) = underlying.ty.ref_range(analyzer).unwrap() { + format!( + "[{}, {}]", + r.evaled_range_min(analyzer, arena) + .unwrap() + .to_range_string(false, analyzer, arena) + .s, + r.evaled_range_max(analyzer, arena) + .unwrap() + .to_range_string(true, analyzer, arena) + .s + ) + } else { + "".to_string() + }; + + format!( + "{} - {} -- {} -- range: {}", + underlying.display_name, + self.0, + underlying.ty.as_string(analyzer).unwrap(), + range_str + ) + } +} + +impl From for NodeIdx { + fn from(val: ContextVarNode) -> Self { + val.0.into() + } +} + +impl From for ContextVarNode { + fn from(idx: NodeIdx) -> Self { + ContextVarNode(idx.index()) + } +} + +impl ContextVarNode { + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a ContextVar, GraphError> { + match analyzer.node(*self) { + Node::ContextVar(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be ContextVar but it was: {e:?}" + ))), + } + } + + pub fn underlying_mut<'a>( + &self, + analyzer: &'a mut impl GraphBackend, + ) -> Result<&'a mut ContextVar, GraphError> { + match analyzer.node_mut(*self) { + Node::ContextVar(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be ContextVar but it was: {e:?}" + ))), + } + } + + pub fn storage<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Option, GraphError> { + Ok(&self.underlying(analyzer)?.storage) + } + + pub fn loc(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self + .underlying(analyzer)? + .loc + .expect("No loc for ContextVar")) + } + + pub fn ctx(&self, analyzer: &impl GraphBackend) -> ContextNode { + ContextNode::from( + analyzer + .search_for_ancestor(self.0.into(), &Edge::Context(ContextEdge::Variable)) + .into_iter() + .take(1) + .next() + .expect("No associated ctx"), + ) + } + + pub fn maybe_ctx(&self, analyzer: &impl GraphBackend) -> Option { + let first = self.first_version(analyzer); + analyzer + .graph() + .edges_directed(first.0.into(), Direction::Outgoing) + .filter(|edge| *edge.weight() == Edge::Context(ContextEdge::Variable)) + .map(|edge| ContextNode::from(edge.target())) + .take(1) + .next() + } + + pub fn maybe_storage_var(&self, analyzer: &impl GraphBackend) -> Option { + Some( + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .find(|edge| { + *edge.weight() == Edge::Context(ContextEdge::InheritedStorageVariable) + })? + .target() + .into(), + ) + } + + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.name.clone()) + } + + pub fn as_controllable_name( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + if self.is_fundamental(analyzer)? { + self.display_name(analyzer) + } else if let Some(ref_range) = self.ref_range(analyzer)? { + let min_name = ref_range.range_min().simplify_minimize(analyzer, arena)?; + let min_name = min_name.to_range_string(false, analyzer, arena).s; + let max_name = ref_range + .range_max() + .simplify_maximize(analyzer, arena)? + .to_range_string(true, analyzer, arena) + .s; + + if max_name == min_name { + Ok(max_name) + } else { + self.display_name(analyzer) + } + } else { + self.display_name(analyzer) + } + } + + pub fn display_name(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.display_name.clone()) + } + + pub fn return_assignments(&self, analyzer: &impl GraphBackend) -> Vec { + let latest = self.latest_version(analyzer); + let mut earlier = latest; + let mut return_assignments = vec![]; + while let Some(prev) = earlier.previous_version(analyzer) { + if earlier.is_return_assignment(analyzer) { + return_assignments.push(earlier) + } + earlier = prev; + } + return_assignments + } + + pub fn ext_return_assignments(&self, analyzer: &impl GraphBackend) -> Vec { + let latest = self.latest_version(analyzer); + let mut earlier = latest; + let mut return_assignments = vec![]; + if earlier.is_ext_return_assignment(analyzer) { + return_assignments.push(earlier) + } + while let Some(prev) = earlier.previous_version(analyzer) { + earlier = prev; + if earlier.is_ext_return_assignment(analyzer) { + return_assignments.push(earlier) + } + } + return_assignments + } + + pub fn tmp_of( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + Ok(self.underlying(analyzer)?.tmp_of()) + } + + pub fn struct_to_fields( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + if self.ref_range(analyzer)?.is_none() { + let fields = analyzer + .graph() + .edges_directed(self.first_version(analyzer).into(), Direction::Incoming) + .filter(|edge| *edge.weight() == Edge::Context(ContextEdge::AttrAccess("field"))) + .map(|edge| ContextVarNode::from(edge.source()).latest_version(analyzer)) + .collect::>(); + Ok(fields) + } else { + Ok(vec![]) + } + } + + pub fn array_to_len_var(&self, analyzer: &impl GraphBackend) -> Option { + if let Some(len) = analyzer + .graph() + .edges_directed(self.0.into(), Direction::Incoming) + .find(|edge| *edge.weight() == Edge::Context(ContextEdge::AttrAccess("length"))) + .map(|edge| edge.source()) + { + Some(len.into()) + } else if let Some(prev) = self.previous_version(analyzer) { + prev.array_to_len_var(analyzer) + } else { + None + } + } + + pub fn slot_to_storage(&self, analyzer: &impl GraphBackend) -> Option { + let slot = analyzer + .graph() + .edges_directed(self.first_version(analyzer).into(), Direction::Outgoing) + .filter(|edge| *edge.weight() == Edge::Context(ContextEdge::SlotAccess)) + .map(|edge| edge.target()) + .take(1) + .next()?; + Some(ContextVarNode::from(slot).latest_version(analyzer)) + } + + pub fn index_access_to_array(&self, analyzer: &impl GraphBackend) -> Option { + if let Some(arr) = analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .find(|edge| *edge.weight() == Edge::Context(ContextEdge::IndexAccess)) + .map(|edge| edge.target()) + { + Some(arr.into()) + } else if let Some(prev) = self.previous_version(analyzer) { + prev.index_access_to_array(analyzer) + } else { + None + } + } + + pub fn len_var_to_array( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + if let Some(arr) = analyzer.search_for_ancestor( + self.0.into(), + &Edge::Context(ContextEdge::AttrAccess("length")), + ) { + Ok(Some(ContextVarNode::from(arr).latest_version(analyzer))) + } else { + Ok(None) + } + } + + pub fn index_to_array(&self, analyzer: &impl GraphBackend) -> Option { + let arr = analyzer + .graph() + .edges_directed(self.first_version(analyzer).into(), Direction::Outgoing) + .find(|edge| *edge.weight() == Edge::Context(ContextEdge::IndexAccess)) + .map(|edge| edge.target())?; + Some(ContextVarNode::from(arr).latest_version(analyzer)) + } + + /// Goes from an index access (i.e. `x[idx]`) to the index (i.e. `idx`) + pub fn index_access_to_index(&self, analyzer: &impl GraphBackend) -> Option { + let index = analyzer.find_child_exclude_via( + self.first_version(analyzer).into(), + &Edge::Context(ContextEdge::Index), + &[], + &|idx, _| Some(idx), + )?; + Some(ContextVarNode::from(index)) + } + + pub fn index_or_attr_access(&self, analyzer: &impl GraphBackend) -> Vec { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Incoming) + .filter(|edge| { + matches!( + *edge.weight(), + Edge::Context(ContextEdge::IndexAccess) + | Edge::Context(ContextEdge::AttrAccess(_)) + ) + }) + .map(|edge| ContextVarNode::from(edge.source())) + .collect() + } + + pub fn set_dependent_on(&self, analyzer: &mut impl GraphBackend) -> Result<(), GraphError> { + let mut return_self = false; + let mut first_iter = true; + let mut stack = vec![*self]; + let mut result = vec![]; + + while let Some(node) = stack.pop() { + if result.contains(&node) { + continue; + } + + let underlying = node.underlying(analyzer)?; + if let Some(tmp) = underlying.tmp_of() { + stack.push(tmp.lhs); + if let Some(rhs) = tmp.rhs { + stack.push(rhs); + } + } else if return_self { + result.push(node); + } + + if first_iter { + first_iter = false; + return_self = true; + } + } + + self.underlying_mut(analyzer)?.dep_on = Some(result); + Ok(()) + } + + pub fn dependent_on( + &self, + analyzer: &impl GraphBackend, + mut return_self: bool, + ) -> Result, GraphError> { + if let Some(dep_on) = &self.underlying(analyzer)?.dep_on { + return Ok(dep_on.to_vec()); + } + let mut first_iter = true; + let mut stack = vec![*self]; + let mut result = vec![]; + + while let Some(node) = stack.pop() { + if result.contains(&node) { + continue; + } + + let underlying = node.underlying(analyzer)?; + if let Some(tmp) = underlying.tmp_of() { + stack.push(tmp.lhs); + if let Some(rhs) = tmp.rhs { + stack.push(rhs); + } + } else if return_self { + result.push(node); + } + + if first_iter { + first_iter = false; + return_self = true; + } + } + + Ok(result) + } + + pub fn graph_dependent_on( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + let underlying = self.underlying(analyzer)?; + let mut tree = BTreeMap::default(); + if let Some(tmp) = underlying.tmp_of() { + tree.insert(*self, tmp); + tmp.lhs + .graph_dependent_on(analyzer)? + .into_iter() + .for_each(|(key, v)| { + if let Some(_v) = tree.get_mut(&key) { + panic!("here") + } else { + tree.insert(key, v); + } + }); + if let Some(rhs) = tmp.rhs { + rhs.graph_dependent_on(analyzer)? + .into_iter() + .for_each(|(key, v)| { + if let Some(_v) = tree.get_mut(&key) { + panic!("here") + } else { + tree.insert(key, v); + } + }); + } + } + + Ok(tree) + } +} diff --git a/crates/graph/src/nodes/context/var/ranging.rs b/crates/graph/src/nodes/context/var/ranging.rs new file mode 100644 index 00000000..8f0fbef1 --- /dev/null +++ b/crates/graph/src/nodes/context/var/ranging.rs @@ -0,0 +1,450 @@ +use crate::range::elem::*; +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::{range_string::ToRangeString, Range, RangeEval}, + AnalyzerBackend, GraphBackend, GraphError, SolcRange, VarType, +}; + +use shared::RangeArena; + +use solang_parser::pt::Loc; + +impl ContextVarNode { + pub fn range(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + self.underlying(analyzer)?.ty.range(analyzer) + } + + pub fn range_string( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(range) = self.ref_range(analyzer)? { + Ok(Some(format!( + "[ {}, {} ]", + range + .evaled_range_min(analyzer, arena)? + .to_range_string(false, analyzer, arena) + .s, + range + .evaled_range_max(analyzer, arena)? + .to_range_string(true, analyzer, arena) + .s + ))) + } else { + Ok(None) + } + } + + pub fn simplified_range_string( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(range) = self.ref_range(analyzer)? { + Ok(Some(format!( + "[ {}, {} ]", + range + .simplified_range_min(analyzer, arena)? + .to_range_string(false, analyzer, arena) + .s, + range + .simplified_range_max(analyzer, arena)? + .to_range_string(true, analyzer, arena) + .s + ))) + } else { + Ok(None) + } + } + + pub fn ref_range<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result>, GraphError> { + self.underlying(analyzer)?.ty.ref_range(analyzer) + } + + pub fn range_min( + &self, + analyzer: &impl GraphBackend, + ) -> Result>, GraphError> { + if let Some(r) = self.ref_range(analyzer)? { + Ok(Some(r.range_min().into_owned())) + } else { + Ok(None) + } + } + + pub fn range_max( + &self, + analyzer: &impl GraphBackend, + ) -> Result>, GraphError> { + if let Some(r) = self.ref_range(analyzer)? { + Ok(Some(r.range_max().into_owned())) + } else { + Ok(None) + } + } + + pub fn evaled_range_min( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result>, GraphError> { + if let Some(r) = self.ref_range(analyzer)? { + Ok(Some(r.evaled_range_min(analyzer, arena)?)) + } else { + Ok(None) + } + } + + pub fn evaled_range_max( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result>, GraphError> { + if let Some(r) = self.ref_range(analyzer)? { + Ok(Some(r.evaled_range_max(analyzer, arena)?)) + } else { + Ok(None) + } + } + + pub fn as_range_elem( + &self, + analyzer: &impl GraphBackend, + loc: Loc, + ) -> Result, GraphError> { + match self.underlying(analyzer)?.ty { + VarType::Concrete(c) => Ok(Elem::Concrete(RangeConcrete { + val: c.underlying(analyzer)?.clone(), + loc, + })), + _ => Ok(Elem::from(*self)), + } + } + + pub fn cache_range( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if let Some(mut range) = self.ty_mut(analyzer)?.take_range() { + // range.cache_flatten(analyzer)?; + range.cache_eval(analyzer, arena)?; + self.set_range(analyzer, range)?; + } + Ok(()) + } + + pub fn cache_flattened_range( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if let Some(mut range) = self.ty_mut(analyzer)?.take_range() { + range.cache_flatten(analyzer, arena)?; + self.set_range(analyzer, range)?; + } + Ok(()) + } + + pub fn cache_eval_range( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if let Some(mut range) = self.ty_mut(analyzer)?.take_range() { + range.cache_eval(analyzer, arena)?; + self.set_range(analyzer, range)?; + } + Ok(()) + } + + pub fn ty_mut<'a>( + &self, + analyzer: &'a mut impl GraphBackend, + ) -> Result<&'a mut VarType, GraphError> { + Ok(&mut self.underlying_mut(analyzer)?.ty) + } + + pub fn set_range( + &self, + analyzer: &mut impl GraphBackend, + new_range: SolcRange, + ) -> Result<(), GraphError> { + let underlying = self.underlying_mut(analyzer)?; + underlying.set_range(new_range); + Ok(()) + } + + pub fn fallback_range( + &self, + analyzer: &mut impl GraphBackend, + ) -> Result, GraphError> { + let underlying = self.underlying(analyzer)?; + underlying.fallback_range(analyzer) + } + + pub fn needs_fallback(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.needs_fallback()) + } + + pub fn range_contains_elem( + &self, + elem: Elem, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + if let Some(r) = self.ref_range(analyzer)? { + Ok(r.contains_elem(&elem, analyzer, arena)) + } else { + Ok(false) + } + } + + // #[tracing::instrument(level = "trace", skip_all)] + pub fn set_range_min( + &self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + mut new_min: Elem, + ) -> Result<(), GraphError> { + assert!(self.latest_version(analyzer) == *self); + if new_min + .recursive_dependent_on(analyzer, arena)? + .contains(self) + { + if let Some(prev) = self.previous_or_inherited_version(analyzer) { + new_min.filter_recursion((*self).into(), prev.into(), analyzer, arena); + } else { + return Err(GraphError::UnbreakableRecursion(format!("The variable {}'s range is self-referential and we cannot break the recursion.", self.display_name(analyzer)?))); + } + } + + tracing::trace!("new min: {new_min}"); + new_min.arenaize(analyzer, arena)?; + + // new_min.cache_flatten(analyzer)?; + // new_min.cache_minimize(analyzer)?; + + tracing::trace!( + "setting range minimum: {} (node idx: {}), current:{}, new_min:{} ({}), deps: {:#?}", + self.display_name(analyzer)?, + self.0, + self.range_min(analyzer) + .unwrap_or_default() + .unwrap_or_default(), + new_min.recurse_dearenaize(analyzer, arena), + new_min, + new_min.recursive_dependent_on(analyzer, arena)? + ); + + if self.is_concrete(analyzer)? { + let mut new_ty = self.ty(analyzer)?.clone(); + new_ty.concrete_to_builtin(analyzer)?; + self.underlying_mut(analyzer)?.ty = new_ty; + self.set_range_min(analyzer, arena, new_min)?; + } else { + let fallback = if self.needs_fallback(analyzer)? { + self.fallback_range(analyzer)? + } else { + None + }; + self.underlying_mut(analyzer)? + .set_range_min(new_min, fallback)?; + } + self.cache_range(analyzer, arena)?; + Ok(()) + } + + // #[tracing::instrument(level = "trace", skip_all)] + pub fn set_range_max( + &self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + mut new_max: Elem, + ) -> Result<(), GraphError> { + assert!(self.latest_version(analyzer) == *self); + if new_max + .recursive_dependent_on(analyzer, arena)? + .contains(self) + { + if let Some(prev) = self.previous_or_inherited_version(analyzer) { + new_max.filter_recursion((*self).into(), prev.into(), analyzer, arena); + } + } + + new_max.arenaize(analyzer, arena)?; + + tracing::trace!( + "setting range maximum: {:?}, {}, current: {}, new: {}", + self, + self.display_name(analyzer)?, + self.ref_range(analyzer)?.unwrap().range_max(), // .unwrap() + new_max + ); + + if self.is_concrete(analyzer)? { + let mut new_ty = self.ty(analyzer)?.clone(); + new_ty.concrete_to_builtin(analyzer)?; + self.underlying_mut(analyzer)?.ty = new_ty; + self.set_range_max(analyzer, arena, new_max)?; + } else { + let fallback = if self.needs_fallback(analyzer)? { + self.fallback_range(analyzer)? + } else { + None + }; + + self.underlying_mut(analyzer)? + .set_range_max(new_max, fallback)?; + } + + self.cache_range(analyzer, arena)?; + Ok(()) + } + + pub fn set_range_exclusions( + &self, + analyzer: &mut impl GraphBackend, + new_exclusions: Vec, + ) -> Result<(), GraphError> { + tracing::trace!( + "setting range exclusions for {}", + self.display_name(analyzer)? + ); + assert!(*self == self.latest_version(analyzer)); + let fallback = if self.needs_fallback(analyzer)? { + self.fallback_range(analyzer)? + } else { + None + }; + + // let new_exclusions = new_exclusions + // .into_iter() + // .map(|excl| analyzer.range_arena_idx_or_upsert(excl)) + // .collect(); + + self.underlying_mut(analyzer)? + .set_range_exclusions(new_exclusions, fallback)?; + Ok(()) + } + + pub fn try_set_range_min( + &self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + mut new_min: Elem, + ) -> Result { + assert!(self.latest_version(analyzer) == *self); + if new_min + .recursive_dependent_on(analyzer, arena)? + .contains(self) + { + if let Some(prev) = self.previous_version(analyzer) { + new_min.filter_recursion((*self).into(), prev.into(), analyzer, arena); + } + } + + new_min.arenaize(analyzer, arena)?; + + if self.is_concrete(analyzer)? { + let mut new_ty = self.ty(analyzer)?.clone(); + new_ty.concrete_to_builtin(analyzer)?; + self.underlying_mut(analyzer)?.ty = new_ty; + self.try_set_range_min(analyzer, arena, new_min) + } else { + let fallback = if self.needs_fallback(analyzer)? { + self.fallback_range(analyzer)? + } else { + None + }; + Ok(self + .underlying_mut(analyzer)? + .try_set_range_min(new_min, fallback)) + } + } + + pub fn try_set_range_max( + &self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + mut new_max: Elem, + ) -> Result { + assert!(self.latest_version(analyzer) == *self); + if new_max + .recursive_dependent_on(analyzer, arena)? + .contains(self) + { + if let Some(prev) = self.previous_version(analyzer) { + new_max.filter_recursion((*self).into(), prev.into(), analyzer, arena); + } + } + + new_max.arenaize(analyzer, arena)?; + + if self.is_concrete(analyzer)? { + let mut new_ty = self.ty(analyzer)?.clone(); + new_ty.concrete_to_builtin(analyzer)?; + self.underlying_mut(analyzer)?.ty = new_ty; + self.try_set_range_max(analyzer, arena, new_max) + } else { + let fallback = if self.needs_fallback(analyzer)? { + self.fallback_range(analyzer)? + } else { + None + }; + Ok(self + .underlying_mut(analyzer)? + .try_set_range_max(new_max, fallback)) + } + } + + pub fn try_set_range_exclusions( + &self, + analyzer: &mut impl GraphBackend, + new_exclusions: Vec, + ) -> Result { + tracing::trace!( + "setting range exclusions for: {}", + self.display_name(analyzer).unwrap() + ); + assert!(*self == self.latest_version(analyzer)); + let fallback = if self.needs_fallback(analyzer)? { + self.fallback_range(analyzer)? + } else { + None + }; + + // let new_exclusions = new_exclusions + // .into_iter() + // .map(|excl| analyzer.range_arena_idx_or_upsert(excl)) + // .collect(); + + Ok(self + .underlying_mut(analyzer)? + .try_set_range_exclusions(new_exclusions, fallback)) + } + + pub fn range_deps( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(range) = self.ref_range(analyzer)? { + Ok(range.dependent_on(analyzer, arena)) + } else { + Ok(vec![]) + } + } + + pub fn sol_delete_range(&mut self, analyzer: &mut impl GraphBackend) -> Result<(), GraphError> { + let ty = self.ty(analyzer)?; + if let Some(delete_range) = ty.delete_range_result(analyzer)? { + self.set_range(analyzer, delete_range)?; + } + Ok(()) + } +} diff --git a/crates/graph/src/nodes/context/var/typing.rs b/crates/graph/src/nodes/context/var/typing.rs new file mode 100644 index 00000000..4ca75ccf --- /dev/null +++ b/crates/graph/src/nodes/context/var/typing.rs @@ -0,0 +1,614 @@ +use crate::{ + elem::Elem, + nodes::{Builtin, Concrete, ContextNode, ContextVarNode}, + range::{ + elem::{RangeElem, RangeExpr, RangeOp}, + RangeEval, + }, + AnalyzerBackend, ContextEdge, Edge, GraphBackend, GraphError, Node, VarType, +}; + +use shared::{RangeArena, Search, StorageLocation}; + +use ethers_core::types::{I256, U256}; +use petgraph::{visit::EdgeRef, Direction}; +use solang_parser::pt::Loc; + +impl ContextVarNode { + pub fn ty<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a VarType, GraphError> { + Ok(&self.underlying(analyzer)?.ty) + } + + pub fn ty_max_concrete( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + if let Ok(b) = self.underlying(analyzer)?.ty.as_builtin(analyzer) { + if let Some(zero) = b.zero_concrete() { + return Ok(Concrete::max_of_type(&zero)); + } + } + + Ok(None) + } + pub fn ty_min_concrete( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + if let Ok(b) = self.underlying(analyzer)?.ty.as_builtin(analyzer) { + if let Some(zero) = b.zero_concrete() { + return Ok(Concrete::min_of_type(&zero)); + } + } + + Ok(None) + } + + pub fn ty_zero_concrete( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + if let Ok(b) = self.underlying(analyzer)?.ty.as_builtin(analyzer) { + return Ok(b.zero_concrete()); + } + + Ok(None) + } + + pub fn ty_eq_ty( + &self, + other: &VarType, + analyzer: &impl GraphBackend, + ) -> Result { + self.ty(analyzer)?.ty_eq(other, analyzer) + } + + pub fn is_mapping(&self, analyzer: &impl GraphBackend) -> Result { + self.ty(analyzer)?.is_mapping(analyzer) + } + + pub fn is_dyn(&self, analyzer: &impl GraphBackend) -> Result { + self.ty(analyzer)?.is_dyn(analyzer) + } + + pub fn is_indexable(&self, analyzer: &impl GraphBackend) -> Result { + self.ty(analyzer)?.is_indexable(analyzer) + } + + pub fn is_storage(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!( + self.underlying(analyzer)?.storage, + Some(StorageLocation::Storage(..)) + ) || self.is_attr_or_index_of_storage(analyzer)) + } + + pub fn is_memory(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!( + self.underlying(analyzer)?.storage, + Some(StorageLocation::Memory(..)) + )) + } + + pub fn is_return_assignment(&self, analyzer: &impl GraphBackend) -> bool { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Incoming) + .any(|edge| { + Edge::Context(ContextEdge::ReturnAssign(true)) == *edge.weight() + || Edge::Context(ContextEdge::ReturnAssign(false)) == *edge.weight() + }) + } + + pub fn is_ext_return_assignment(&self, analyzer: &impl GraphBackend) -> bool { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Incoming) + .any(|edge| Edge::Context(ContextEdge::ReturnAssign(true)) == *edge.weight()) + } + + pub fn is_storage_or_calldata_input( + &self, + analyzer: &impl GraphBackend, + ) -> Result { + let global_first = self.global_first_version(analyzer); + Ok(global_first.is_storage(analyzer)? || global_first.is_calldata_input(analyzer)) + } + + pub fn is_fundamental(&self, analyzer: &impl GraphBackend) -> Result { + let global_first = self.global_first_version(analyzer); + let is_independent = self.is_independent(analyzer)?; + + Ok(is_independent + && ( + global_first.is_storage(analyzer)? + || global_first.is_calldata_input(analyzer) + || global_first.is_msg(analyzer)? + || global_first.is_block(analyzer)? + || ( + // if its a function input, and we are evaluating the function + // as a standalone (i.e. its internal, but we are treating it like its external) + // it wont be marked as calldata, but for the purposes + // of determining controllability it is to better to assume there is some path that lets us + // control it + global_first.is_func_input(analyzer) + && global_first.maybe_ctx(analyzer).is_some() + && !global_first.ctx(analyzer).has_parent(analyzer)? + ) + || self.is_attr_or_index_of_fundamental(analyzer) + // performed last to try to not have to do this check + )) + } + + pub fn is_attr_or_index_of_fundamental(&self, analyzer: &impl GraphBackend) -> bool { + let direct_is_fundamental = analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .any(|edge| { + if matches!( + edge.weight(), + Edge::Context(ContextEdge::AttrAccess(_)) + | Edge::Context(ContextEdge::IndexAccess) + | Edge::Context(ContextEdge::Index) + ) { + ContextVarNode::from(edge.target()) + .is_fundamental(analyzer) + .unwrap_or(false) + } else { + false + } + }); + if direct_is_fundamental { + direct_is_fundamental + } else if let Some(prev) = self.previous_global_version(analyzer) { + prev.is_attr_or_index_of_fundamental(analyzer) + } else { + false + } + } + + pub fn is_attr_or_index_of_storage(&self, analyzer: &impl GraphBackend) -> bool { + let direct_is_storage = analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .any(|edge| { + if matches!( + edge.weight(), + Edge::Context(ContextEdge::AttrAccess(_)) + | Edge::Context(ContextEdge::IndexAccess) + | Edge::Context(ContextEdge::Index) + ) { + ContextVarNode::from(edge.target()) + .is_storage(analyzer) + .unwrap_or(false) + } else { + false + } + }); + if direct_is_storage { + direct_is_storage + } else if let Some(prev) = self.previous_or_inherited_version(analyzer) { + prev.is_attr_or_index_of_storage(analyzer) + } else { + false + } + } + + pub fn is_independent(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.dependent_on(analyzer, false)?.is_empty() && self.tmp_of(analyzer)?.is_none()) + } + + pub fn is_controllable(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self + .dependent_on(analyzer, true)? + .iter() + .any(|dependent_on| { + if let Ok(t) = dependent_on.is_fundamental(analyzer) { + t + } else { + false + } + })) + } + + pub fn is_calldata_input(&self, analyzer: &impl GraphBackend) -> bool { + let global_first = self.global_first_version(analyzer); + analyzer + .graph() + .edges_directed(global_first.0.into(), Direction::Outgoing) + .any(|edge| Edge::Context(ContextEdge::CalldataVariable) == *edge.weight()) + } + + pub fn is_msg(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!( + self.underlying(analyzer)?.storage, + Some(StorageLocation::Msg(..)) + )) + } + + pub fn is_block(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!( + self.underlying(analyzer)?.storage, + Some(StorageLocation::Block(..)) + )) + } + + pub fn is_func_input(&self, analyzer: &impl GraphBackend) -> bool { + let first = self.first_version(analyzer); + analyzer + .graph() + .edges_directed(first.0.into(), Direction::Outgoing) + .any(|edge| { + Edge::Context(ContextEdge::InputVariable) == *edge.weight() + || Edge::Context(ContextEdge::CalldataVariable) == *edge.weight() + }) + } + + pub fn is_const( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let underlying = self.underlying(analyzer)?; + underlying.ty.is_const(analyzer, arena) + } + + pub fn is_symbolic(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.is_symbolic) + } + + pub fn is_tmp(&self, analyzer: &impl GraphBackend) -> Result { + let underlying = self.underlying(analyzer)?; + Ok(underlying.is_tmp()) + } + + pub fn is_return_node(&self, analyzer: &impl GraphBackend) -> Result { + if let Some(ctx) = self.maybe_ctx(analyzer) { + return Ok(ctx + .underlying(analyzer)? + .ret + .iter() + .any(|(_, node)| node.name(analyzer).unwrap() == self.name(analyzer).unwrap())); + } + Ok(false) + } + + pub fn is_return_node_in_any( + &self, + ctxs: &[ContextNode], + analyzer: &impl GraphBackend, + ) -> bool { + ctxs.iter().any(|ctx| { + ctx.underlying(analyzer) + .unwrap() + .ret + .iter() + .any(|(_, node)| node.name(analyzer).unwrap() == self.name(analyzer).unwrap()) + }) + } + + pub fn is_len_var(&self, analyzer: &impl GraphBackend) -> bool { + analyzer + .search_for_ancestor( + self.first_version(analyzer).into(), + &Edge::Context(ContextEdge::AttrAccess("length")), + ) + .is_some() + } + + pub fn is_array_index_access(&self, analyzer: &impl GraphBackend) -> bool { + analyzer + .search_for_ancestor( + self.first_version(analyzer).into(), + &Edge::Context(ContextEdge::IndexAccess), + ) + .is_some() + } + + pub fn is_concrete(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!(self.ty(analyzer)?, VarType::Concrete(_))) + } + + pub fn as_concrete(&self, analyzer: &impl GraphBackend) -> Result { + match &self.ty(analyzer)? { + VarType::Concrete(c) => Ok(c.underlying(analyzer)?.clone()), + e => Err(GraphError::NodeConfusion(format!( + "Expected variable type to be concrete but was: {e:?}" + ))), + } + } + + pub fn as_cast_tmp( + &self, + loc: Loc, + ctx: ContextNode, + cast_ty: Builtin, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + let new_underlying = self + .underlying(analyzer)? + .clone() + .as_cast_tmp(loc, ctx, cast_ty, analyzer)?; + let node = analyzer.add_node(Node::ContextVar(new_underlying)); + ctx.add_var(node.into(), analyzer)?; + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + Ok(node.into()) + } + + pub fn as_tmp( + &self, + loc: Loc, + ctx: ContextNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + let new_underlying = self + .underlying(analyzer)? + .clone() + .as_tmp(loc, ctx, analyzer)?; + Ok(analyzer.add_node(Node::ContextVar(new_underlying)).into()) + } + + pub fn ty_eq( + &self, + other: &Self, + analyzer: &mut impl GraphBackend, + ) -> Result { + self.ty(analyzer)?.ty_eq(other.ty(analyzer)?, analyzer) + } + + /// Performs an in-place cast + pub fn cast_from( + &self, + other: &Self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + let other_ty = other.ty(analyzer)?.clone(); + if other_ty.ty_eq(&self.underlying(analyzer)?.ty, analyzer)? { + return Ok(()); + } + + let min_expr = Elem::Expr(RangeExpr::new( + self.range_min(analyzer)?.expect("Should have a minimum"), + RangeOp::Cast, + Elem::from(*other), + )); + + let max_expr = Elem::Expr(RangeExpr::new( + self.range_max(analyzer)?.expect("Should have a maximum"), + RangeOp::Cast, + Elem::from(*other), + )); + + self.underlying_mut(analyzer)?.ty = other_ty; + + self.set_range_min(analyzer, arena, min_expr)?; + self.set_range_max(analyzer, arena, max_expr)?; + Ok(()) + } + + pub fn literal_cast_from( + &self, + other: &Self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + let to_ty = other.ty(analyzer)?.clone(); + self.literal_cast_from_ty(to_ty, analyzer)?; + Ok(()) + } + + // pub fn cast_from_ty( + // &self, + // to_ty: VarType, + // analyzer: &mut (impl GraphBackend + AnalyzerBackend), + // ) -> Result<(), GraphError> { + // let new_underlying = self.underlying(analyzer)?.clone(); + // let node = analyzer.add_node(Node::ContextVar(new_underlying)); + // analyzer.add_edge(node, *self, Edge::Context(ContextEdge::Prev)); + // let new_self = ContextVarNode::from(node); + + // let from_ty = self.ty(analyzer)?.clone(); + // if !from_ty.ty_eq(&to_ty, analyzer)? { + // if let Some(new_ty) = from_ty.try_cast(&to_ty, analyzer)? { + // new_self.underlying_mut(analyzer)?.ty = new_ty; + // } + + // if let Some((new_min, new_max)) = self.cast_exprs(&to_ty, analyzer)? { + // new_self.set_range_min(analyzer, new_min)?; + // new_self.set_range_max(analyzer, new_max)?; + // } + // } + + // if let (VarType::Concrete(_), VarType::Concrete(cnode)) = (new_self.ty(analyzer)?, to_ty) { + // // update name + // let display_name = cnode.underlying(analyzer)?.as_human_string(); + // new_self.underlying_mut(analyzer)?.display_name = display_name; + // } + // Ok(()) + // } + + pub fn cast_from_ty( + &self, + to_ty: VarType, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + let from_ty = self.ty(analyzer)?.clone(); + if !from_ty.ty_eq(&to_ty, analyzer)? { + if let Some(new_ty) = from_ty.clone().try_cast(&to_ty, analyzer)? { + self.underlying_mut(analyzer)?.ty = new_ty; + } + + if let (Some(mut r), Some(r2)) = + (self.ty_mut(analyzer)?.take_range(), to_ty.range(analyzer)?) + { + r.min.arenaize(analyzer, arena)?; + r.max.arenaize(analyzer, arena)?; + + let mut min_expr = r + .min + .clone() + .cast(r2.min.clone()) + .min(r.max.clone().cast(r2.min.clone())); + let mut max_expr = r + .min + .clone() + .cast(r2.min.clone()) + .max(r.max.clone().cast(r2.min)); + + min_expr.arenaize(analyzer, arena)?; + max_expr.arenaize(analyzer, arena)?; + + let zero = Elem::from(Concrete::from(U256::zero())); + if r.contains_elem(&zero, analyzer, arena) { + min_expr = min_expr.min(zero.clone()); + max_expr = max_expr.max(zero); + } + + if let (VarType::BuiltIn(from_bn, _), VarType::BuiltIn(to_bn, _)) = + (self.ty(analyzer)?, to_ty.clone()) + { + match (from_bn.underlying(analyzer)?, to_bn.underlying(analyzer)?) { + (Builtin::Uint(_), int @ Builtin::Int(_)) => { + // from ty is uint, to ty is int, check if type(int.min).bit_representation() + // is in range + if let Some(r) = self.ref_range(analyzer)? { + let int_min = int.min_concrete().unwrap(); + let bit_repr = int_min.bit_representation().unwrap(); + let bit_repr = bit_repr.into(); + if r.contains_elem(&bit_repr, analyzer, arena) { + min_expr = min_expr.min(int_min.clone().into()); + max_expr = max_expr.max(int_min.into()); + } + } + } + (Builtin::Int(_), Builtin::Uint(_size)) => { + // from ty is int, to ty is uint + if let Some(r) = self.ref_range(analyzer)? { + let neg1 = Concrete::from(I256::from(-1i32)); + if r.contains_elem(&neg1.clone().into(), analyzer, arena) { + max_expr = + max_expr.max(neg1.bit_representation().unwrap().into()); + } + } + } + _ => {} + } + } + r.min = min_expr; + r.max = max_expr; + r.min.arenaize(analyzer, arena)?; + r.max.arenaize(analyzer, arena)?; + self.set_range(analyzer, r)?; + } + } + + if let (VarType::Concrete(_), VarType::Concrete(cnode)) = (self.ty(analyzer)?, to_ty) { + // update name + let display_name = cnode.underlying(analyzer)?.as_string(); + self.underlying_mut(analyzer)?.display_name = display_name; + } + Ok(()) + } + + pub fn literal_cast_from_ty( + &self, + to_ty: VarType, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + let from_ty = self.ty(analyzer)?.clone(); + if !from_ty.ty_eq(&to_ty, analyzer)? { + if let Some(new_ty) = from_ty.try_literal_cast(&to_ty, analyzer)? { + self.underlying_mut(analyzer)?.ty = new_ty; + } + // we dont need to update the ranges because a literal by definition is concrete + } + + if let (VarType::Concrete(_), VarType::Concrete(cnode)) = (self.ty(analyzer)?, to_ty) { + // update name + let display_name = cnode.underlying(analyzer)?.as_human_string(); + self.underlying_mut(analyzer)?.display_name = display_name; + } + Ok(()) + } + + pub fn try_increase_size( + &self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + let from_ty = self.ty(analyzer)?.clone(); + self.cast_from_ty(from_ty.max_size(analyzer)?, analyzer, arena)?; + Ok(()) + } + + pub fn is_int(&self, analyzer: &impl GraphBackend) -> Result { + self.ty(analyzer)?.is_int(analyzer) + } + + pub fn cast_exprs( + &self, + to_ty: &VarType, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result, Elem)>, GraphError> { + if let Some(to_range) = to_ty.range(analyzer)? { + let mut min_expr = (*self) + .range_min(analyzer)? + .unwrap() + .cast(to_range.min.clone()) + .min( + (*self) + .range_max(analyzer)? + .unwrap() + .cast(to_range.min.clone()), + ); + let mut max_expr = (*self) + .range_min(analyzer)? + .unwrap() + .cast(to_range.min.clone()) + .max((*self).range_max(analyzer)?.unwrap().cast(to_range.min)); + + if let Some(r) = self.ref_range(analyzer)? { + let zero = Elem::from(Concrete::from(U256::zero())); + if r.contains_elem(&zero, analyzer, arena) { + min_expr = min_expr.min(zero.clone()); + max_expr = max_expr.max(zero); + } + } + + if let (VarType::BuiltIn(from_bn, _), VarType::BuiltIn(to_bn, _)) = + (self.ty(analyzer)?, to_ty) + { + match (from_bn.underlying(analyzer)?, to_bn.underlying(analyzer)?) { + (Builtin::Uint(_), int @ Builtin::Int(_)) => { + // from ty is uint, to ty is int, check if type(int.min).bit_representation() + // is in range + if let Some(r) = self.ref_range(analyzer)? { + let int_min = int.min_concrete().unwrap(); + let bit_repr = int_min.bit_representation().unwrap(); + let bit_repr = bit_repr.into(); + if r.contains_elem(&bit_repr, analyzer, arena) { + min_expr = min_expr.min(int_min.clone().into()); + max_expr = max_expr.max(int_min.into()); + } + } + } + (Builtin::Int(_), Builtin::Uint(_size)) => { + // from ty is int, to ty is uint + if let Some(r) = self.ref_range(analyzer)? { + let neg1 = Concrete::from(I256::from(-1i32)); + if r.contains_elem(&neg1.clone().into(), analyzer, arena) { + max_expr = max_expr.max(neg1.bit_representation().unwrap().into()); + } + } + } + _ => {} + } + } + + Ok(Some((min_expr, max_expr))) + } else { + Ok(None) + } + } +} diff --git a/crates/graph/src/nodes/context/var/underlying.rs b/crates/graph/src/nodes/context/var/underlying.rs new file mode 100644 index 00000000..ff31d6fb --- /dev/null +++ b/crates/graph/src/nodes/context/var/underlying.rs @@ -0,0 +1,720 @@ +use crate::{ + nodes::{ + BuiltInNode, Builtin, Concrete, ConcreteNode, ContextNode, ContextVarNode, ContractNode, + EnumNode, Field, FunctionNode, FunctionParam, FunctionReturn, StructNode, TyNode, + }, + range::Range, + AnalyzerBackend, GraphBackend, GraphError, Node, SolcRange, TypeNode, VarType, +}; + +use crate::range::elem::*; +use shared::{NodeIdx, StorageLocation}; + +use solang_parser::pt::Loc; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ContextVar { + pub loc: Option, + pub name: String, + pub display_name: String, + pub storage: Option, + pub is_tmp: bool, + pub tmp_of: Option, + pub dep_on: Option>, + pub is_symbolic: bool, + pub is_return: bool, + pub ty: VarType, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub struct TmpConstruction { + pub lhs: ContextVarNode, + pub op: RangeOp, + pub rhs: Option, +} + +impl TmpConstruction { + pub fn new(lhs: ContextVarNode, op: RangeOp, rhs: Option) -> Self { + Self { lhs, op, rhs } + } +} + +impl ContextVar { + pub fn eq_ignore_loc(&self, other: &Self) -> bool { + self.name == other.name + && self.display_name == other.display_name + && self.storage == other.storage + && self.is_tmp == other.is_tmp + && self.tmp_of == other.tmp_of + && self.is_symbolic == other.is_symbolic + && self.is_return == other.is_return + && self.ty == other.ty + } + + pub fn is_tmp(&self) -> bool { + self.is_tmp || self.tmp_of.is_some() + } + + pub fn tmp_of(&self) -> Option { + self.tmp_of + } + + pub fn new_bin_op_tmp( + lhs_cvar: ContextVarNode, + op: RangeOp, + rhs_cvar: ContextVarNode, + ctx: ContextNode, + loc: Loc, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + Ok(ContextVar { + loc: Some(loc), + name: format!( + "tmp{}({} {} {})", + ctx.new_tmp(analyzer)?, + lhs_cvar.name(analyzer)?, + op.to_string(), + rhs_cvar.name(analyzer)? + ), + display_name: format!( + "({} {} {})", + lhs_cvar.display_name(analyzer)?, + op.to_string(), + rhs_cvar.display_name(analyzer)? + ), + storage: None, + is_tmp: true, + is_symbolic: lhs_cvar.is_symbolic(analyzer)? || rhs_cvar.is_symbolic(analyzer)?, + is_return: false, + tmp_of: Some(TmpConstruction::new(lhs_cvar, op, Some(rhs_cvar))), + dep_on: { + let mut deps = lhs_cvar.dependent_on(analyzer, true)?; + deps.extend(rhs_cvar.dependent_on(analyzer, true)?); + Some(deps) + }, + ty: lhs_cvar.underlying(analyzer)?.ty.clone(), + }) + } + + pub fn new_from_concrete( + loc: Loc, + ctx: ContextNode, + concrete_node: ConcreteNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + let name = format!( + "tmp_{}({})", + ctx.new_tmp(analyzer)?, + concrete_node.underlying(analyzer)?.as_string() + ); + Ok(ContextVar { + loc: Some(loc), + name, + display_name: concrete_node.underlying(analyzer)?.as_human_string(), + storage: None, + is_tmp: true, + tmp_of: None, + dep_on: None, + is_symbolic: false, + is_return: false, + ty: VarType::Concrete(concrete_node), + }) + } + + pub fn as_cast_tmp( + &self, + loc: Loc, + ctx: ContextNode, + cast_ty: Builtin, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + let mut new_tmp = self.clone(); + new_tmp.loc = Some(loc); + new_tmp.is_tmp = true; + new_tmp.name = format!( + "tmp_{}({}({}))", + ctx.new_tmp(analyzer)?, + cast_ty.as_string(analyzer)?, + self.name + ); + new_tmp.display_name = format!("{}({})", cast_ty.as_string(analyzer)?, self.display_name); + Ok(new_tmp) + } + + pub fn as_tmp( + &self, + loc: Loc, + ctx: ContextNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + let mut new_tmp = self.clone(); + new_tmp.loc = Some(loc); + new_tmp.is_tmp = true; + new_tmp.name = format!("tmp{}({})", ctx.new_tmp(analyzer)?, self.name); + new_tmp.display_name = format!("tmp_{}", self.display_name); + Ok(new_tmp) + } + + pub fn new_from_contract( + loc: Loc, + contract_node: ContractNode, + analyzer: &impl GraphBackend, + ) -> Result { + Ok(ContextVar { + loc: Some(loc), + name: contract_node.name(analyzer)?, + display_name: contract_node.name(analyzer)?, + storage: None, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::User( + TypeNode::Contract(contract_node), + SolcRange::try_from_builtin(&Builtin::Address), + ), + }) + } + + pub fn new_from_struct( + loc: Loc, + struct_node: StructNode, + ctx: ContextNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + Ok(ContextVar { + loc: Some(loc), + name: format!( + "tmp_struct_{}_{}", + ctx.new_tmp(analyzer)?, + struct_node.name(analyzer)? + ), + display_name: struct_node.name(analyzer)?, + storage: Some(StorageLocation::Memory(Loc::Implicit)), + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::User(TypeNode::Struct(struct_node), None), + }) + } + + pub fn new_from_ty( + loc: Loc, + ty_node: TyNode, + ctx: ContextNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + Ok(ContextVar { + loc: Some(loc), + name: format!( + "tmp_ty_{}_{}", + ctx.new_tmp(analyzer)?, + ty_node.name(analyzer)? + ), + display_name: ty_node.name(analyzer)?, + storage: Some(StorageLocation::Memory(Loc::Implicit)), + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::try_from_idx(analyzer, ty_node.0.into()).unwrap(), + }) + } + + pub fn new_from_builtin( + loc: Loc, + bn_node: BuiltInNode, + analyzer: &mut impl GraphBackend, + ) -> Result { + Ok(ContextVar { + loc: Some(loc), + name: format!("tmp_{}", bn_node.underlying(analyzer)?.as_string(analyzer)?), + display_name: format!("tmp_{}", bn_node.underlying(analyzer)?.as_string(analyzer)?), + storage: None, + is_tmp: true, + tmp_of: None, + dep_on: None, + is_symbolic: false, + is_return: false, + ty: VarType::try_from_idx(analyzer, bn_node.into()).unwrap(), + }) + } + + pub fn fallback_range( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + match &self.ty { + VarType::User(TypeNode::Contract(_), ref maybe_range) => { + if let Some(range) = maybe_range { + Ok(Some(range.clone())) + } else { + Ok(SolcRange::try_from_builtin(&Builtin::Address)) + } + } + VarType::User(TypeNode::Enum(enum_node), ref maybe_range) => { + if let Some(range) = maybe_range { + Ok(Some(range.clone())) + } else { + Ok(enum_node.maybe_default_range(analyzer)?) + } + } + VarType::User(TypeNode::Ty(ty_node), ref maybe_range) => { + if let Some(range) = maybe_range { + Ok(Some(range.clone())) + } else { + let underlying = + BuiltInNode::from(ty_node.underlying(analyzer)?.ty).underlying(analyzer)?; + Ok(SolcRange::try_from_builtin(underlying)) + } + } + VarType::BuiltIn(bn, ref maybe_range) => { + if let Some(range) = maybe_range { + Ok(Some(range.clone())) + } else { + let underlying = bn.underlying(analyzer)?; + Ok(SolcRange::try_from_builtin(underlying)) + } + } + VarType::Concrete(cn) => Ok(SolcRange::from(cn.underlying(analyzer)?.clone())), + _ => Ok(None), + } + } + + pub fn set_range(&mut self, new_range: SolcRange) { + match &mut self.ty { + VarType::User(TypeNode::Contract(_), ref mut maybe_range) + | VarType::User(TypeNode::Enum(_), ref mut maybe_range) + | VarType::User(TypeNode::Ty(_), ref mut maybe_range) + | VarType::BuiltIn(_, ref mut maybe_range) => { + *maybe_range = Some(new_range); + } + VarType::Concrete(_) => {} + e => panic!("wasnt builtin: {e:?}"), + } + } + + pub fn needs_fallback(&self) -> bool { + match &self.ty { + VarType::User(TypeNode::Contract(_), ref maybe_range) + | VarType::User(TypeNode::Enum(_), ref maybe_range) + | VarType::User(TypeNode::Ty(_), ref maybe_range) + | VarType::BuiltIn(_, ref maybe_range) => maybe_range.is_none(), + _ => false, + } + } + + // #[tracing::instrument(level = "trace", skip_all)] + pub fn set_range_min( + &mut self, + new_min: Elem, + fallback_range: Option, + ) -> Result<(), GraphError> { + // tracing::trace!("Setting range min in underlying: {:?}", self.ty); + match &mut self.ty { + VarType::User(TypeNode::Contract(_), ref mut maybe_range) + | VarType::User(TypeNode::Enum(_), ref mut maybe_range) + | VarType::User(TypeNode::Ty(_), ref mut maybe_range) + | VarType::BuiltIn(_, ref mut maybe_range) => { + if let Some(range) = maybe_range { + range.set_range_min(new_min); + Ok(()) + } else if let Some(mut fr) = fallback_range { + fr.set_range_min(new_min); + *maybe_range = Some(fr); + Ok(()) + } else { + Err(GraphError::NodeConfusion(format!( + "No range and no fallback range for type: {:?}", + self.ty + ))) + } + } + VarType::Concrete(_) => Ok(()), + e => Err(GraphError::NodeConfusion(format!( + "Expected a type that has a range, but was type: {e:?} that had no range" + ))), + } + } + + pub fn try_set_range_min( + &mut self, + new_min: Elem, + fallback_range: Option, + ) -> bool { + match &mut self.ty { + VarType::User(TypeNode::Contract(_), ref mut maybe_range) + | VarType::User(TypeNode::Enum(_), ref mut maybe_range) + | VarType::User(TypeNode::Ty(_), ref mut maybe_range) + | VarType::BuiltIn(_, ref mut maybe_range) => { + if let Some(range) = maybe_range { + range.set_range_min(new_min); + true + } else if let Some(mut fr) = fallback_range { + fr.set_range_min(new_min); + *maybe_range = Some(fr); + true + } else { + false + } + } + VarType::Concrete(_) => true, + _ => false, + } + } + + pub fn set_range_max( + &mut self, + new_max: Elem, + fallback_range: Option, + ) -> Result<(), GraphError> { + match &mut self.ty { + VarType::User(TypeNode::Contract(_), ref mut maybe_range) + | VarType::User(TypeNode::Enum(_), ref mut maybe_range) + | VarType::User(TypeNode::Ty(_), ref mut maybe_range) + | VarType::BuiltIn(_, ref mut maybe_range) => { + if let Some(range) = maybe_range { + range.set_range_max(new_max); + Ok(()) + } else if let Some(mut fr) = fallback_range { + fr.set_range_max(new_max); + *maybe_range = Some(fr); + Ok(()) + } else { + Err(GraphError::NodeConfusion(format!( + "No range and no fallback range for type: {:?}", + self.ty + ))) + } + } + VarType::Concrete(_) => Ok(()), + e => Err(GraphError::NodeConfusion(format!( + "Expected a type that has a range, but was type: {e:?} that had no range" + ))), + } + } + + pub fn set_range_exclusions( + &mut self, + new_exclusions: Vec, + fallback_range: Option, + ) -> Result<(), GraphError> { + match &mut self.ty { + VarType::User(TypeNode::Contract(_), ref mut maybe_range) + | VarType::User(TypeNode::Enum(_), ref mut maybe_range) + | VarType::User(TypeNode::Ty(_), ref mut maybe_range) + | VarType::BuiltIn(_, ref mut maybe_range) => { + if let Some(range) = maybe_range { + range.set_range_exclusions(new_exclusions); + Ok(()) + } else if let Some(mut fr) = fallback_range { + fr.set_range_exclusions(new_exclusions); + *maybe_range = Some(fr); + Ok(()) + } else { + Err(GraphError::NodeConfusion(format!( + "No range and no fallback range for type: {:?}", + self.ty + ))) + } + } + VarType::Concrete(_) => Ok(()), + e => Err(GraphError::NodeConfusion(format!( + "Expected a type that has a range, but was type: {e:?} that had no range" + ))), + } + } + + pub fn try_set_range_max( + &mut self, + new_max: Elem, + fallback_range: Option, + ) -> bool { + match &mut self.ty { + VarType::User(TypeNode::Contract(_), ref mut maybe_range) + | VarType::User(TypeNode::Enum(_), ref mut maybe_range) + | VarType::User(TypeNode::Ty(_), ref mut maybe_range) + | VarType::BuiltIn(_, ref mut maybe_range) => { + if let Some(range) = maybe_range { + range.set_range_max(new_max); + true + } else if let Some(mut fr) = fallback_range { + fr.set_range_max(new_max); + *maybe_range = Some(fr); + true + } else { + false + } + } + VarType::Concrete(_) => true, + _ => false, + } + } + + pub fn try_set_range_exclusions( + &mut self, + new_exclusions: Vec, + fallback_range: Option, + ) -> bool { + match &mut self.ty { + VarType::User(TypeNode::Contract(_), ref mut maybe_range) + | VarType::User(TypeNode::Enum(_), ref mut maybe_range) + | VarType::User(TypeNode::Ty(_), ref mut maybe_range) + | VarType::BuiltIn(_, ref mut maybe_range) => { + if let Some(range) = maybe_range { + range.set_range_exclusions(new_exclusions); + true + } else if let Some(mut fr) = fallback_range { + fr.set_range_exclusions(new_exclusions); + *maybe_range = Some(fr); + true + } else { + false + } + } + VarType::Concrete(_) => true, + _ => false, + } + } + + pub fn maybe_from_user_ty( + analyzer: &impl GraphBackend, + loc: Loc, + node_idx: NodeIdx, + ) -> Option { + if let Some(ty) = VarType::try_from_idx(analyzer, node_idx) { + let (name, storage) = match analyzer.node(node_idx) { + Node::Contract(c) => { + let name = c.name.clone().expect("Contract had no name").name; + (name, None) + } + Node::Function(f) => { + let name = f.name.clone().expect("Function had no name").name; + (name, None) + } + Node::Struct(s) => { + let name = s.name.clone().expect("Struct had no name").name; + (name, None) + } + Node::Enum(e) => { + let name = e.name.clone().expect("Enum had no name").name; + (name, None) + } + Node::Var(var) => { + let name = var.name.clone().expect("Variable had no name").name; + let storage = if var.in_contract { + if !var.attrs.iter().any(|attr| { + matches!(attr, solang_parser::pt::VariableAttribute::Constant(_)) + }) { + Some(StorageLocation::Storage(var.loc)) + } else { + None + } + } else { + None + }; + (name, storage) + } + Node::Ty(ty) => { + let name = &ty.name.name; + (name.clone(), None) + } + _ => return None, + }; + + Some(ContextVar { + loc: Some(loc), + name: name.clone(), + display_name: name, + storage, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty, + }) + } else { + None + } + } + + pub fn maybe_new_from_field( + analyzer: &impl GraphBackend, + loc: Loc, + parent_var: &ContextVar, + field: Field, + ) -> Option { + if let Some(ty) = VarType::try_from_idx(analyzer, field.ty) { + Some(ContextVar { + loc: Some(loc), + name: parent_var.name.clone() + + "." + + &field.name.clone().expect("Field had no name").name, + display_name: parent_var.name.clone() + + "." + + &field.name.expect("Field had no name").name, + storage: parent_var.storage, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty, + }) + } else { + None + } + } + + pub fn new_from_enum_variant( + analyzer: &mut impl AnalyzerBackend, + ctx: ContextNode, + loc: Loc, + enum_node: EnumNode, + variant: String, + ) -> Result { + let enum_name = enum_node.name(analyzer)?; + Ok(ContextVar { + loc: Some(loc), + name: format!("{}.{}_{}", enum_name, variant, ctx.new_tmp(analyzer)?), + display_name: format!("{}.{}", enum_name, variant), + storage: None, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::User( + TypeNode::Enum(enum_node), + Some(enum_node.range_from_variant(variant, analyzer)?), + ), + }) + } + + pub fn new_from_index( + analyzer: &mut impl AnalyzerBackend, + loc: Loc, + parent_name: String, + parent_display_name: String, + parent_storage: StorageLocation, + parent_var: &BuiltInNode, + index: ContextVarNode, + ) -> Result { + Ok(ContextVar { + loc: Some(loc), + name: parent_name + "[" + &index.name(analyzer)? + "]", + display_name: parent_display_name + "[" + &index.display_name(analyzer)? + "]", + storage: Some(parent_storage), + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: index.underlying(analyzer)?.is_symbolic, + is_return: false, + ty: parent_var.dynamic_underlying_ty(analyzer)?, + }) + } + + pub fn new_from_func( + analyzer: &mut impl AnalyzerBackend, + func: FunctionNode, + ) -> Result { + Ok(ContextVar { + loc: Some(func.underlying(analyzer)?.loc), + name: func.name(analyzer)?, + display_name: func.name(analyzer)?, + storage: None, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: false, + is_return: false, + ty: VarType::User(TypeNode::Func(func), None), + }) + } + + pub fn maybe_new_from_func_param( + analyzer: &impl GraphBackend, + param: FunctionParam, + ) -> Option { + if let Some(name) = param.name { + if let Some(ty) = VarType::try_from_idx(analyzer, param.ty) { + Some(ContextVar { + loc: Some(param.loc), + name: name.name.clone(), + display_name: name.name, + storage: param.storage, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty, + }) + } else { + None + } + } else { + None + } + } + + pub fn maybe_new_from_func_ret( + analyzer: &impl GraphBackend, + ret: FunctionReturn, + ) -> Option { + if let Some(name) = ret.name { + if let Some(ty) = VarType::try_from_idx(analyzer, ret.ty) { + Some(ContextVar { + loc: Some(ret.loc), + name: name.name.clone(), + display_name: name.name, + storage: ret.storage, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: true, + ty, + }) + } else { + None + } + } else { + None + } + } + + pub fn new_from_func_ret( + ctx: ContextNode, + analyzer: &mut impl AnalyzerBackend, + ret: FunctionReturn, + ) -> Result, GraphError> { + let (is_tmp, name) = if let Some(name) = ret.name { + (false, name.name) + } else { + (true, format!("tmp_func_ret_{}", ctx.new_tmp(analyzer)?)) + }; + + if let Some(ty) = VarType::try_from_idx(analyzer, ret.ty) { + Ok(Some(ContextVar { + loc: Some(ret.loc), + name: name.clone(), + display_name: name, + storage: ret.storage, + is_tmp, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: true, + ty, + })) + } else { + Ok(None) + } + } +} diff --git a/crates/graph/src/nodes/context/var/versioning.rs b/crates/graph/src/nodes/context/var/versioning.rs new file mode 100644 index 00000000..594dfa90 --- /dev/null +++ b/crates/graph/src/nodes/context/var/versioning.rs @@ -0,0 +1,237 @@ +use crate::{ + nodes::{ContextNode, ContextVarNode}, + ContextEdge, Edge, GraphBackend, GraphError, +}; + +use shared::NodeIdx; + +use petgraph::{visit::EdgeRef, Direction}; + +impl ContextVarNode { + pub fn latest_version(&self, analyzer: &impl GraphBackend) -> Self { + let mut latest = *self; + while let Some(next) = latest.next_version(analyzer) { + latest = next; + } + latest + } + + pub fn latest_version_less_than(&self, idx: NodeIdx, analyzer: &impl GraphBackend) -> Self { + let mut latest = *self; + while let Some(next) = latest.next_version(analyzer) { + if next.0 <= idx.index() { + latest = next; + } else { + break; + } + } + latest + } + + pub fn latest_version_in_ctx( + &self, + ctx: ContextNode, + analyzer: &impl GraphBackend, + ) -> Result { + if let Some(cvar) = ctx.var_by_name(analyzer, &self.name(analyzer)?) { + Ok(cvar.latest_version(analyzer)) + } else { + Ok(*self) + } + } + + pub fn latest_version_in_ctx_less_than( + &self, + idx: NodeIdx, + ctx: ContextNode, + analyzer: &impl GraphBackend, + ) -> Result { + if let Some(cvar) = ctx.var_by_name(analyzer, &self.name(analyzer)?) { + Ok(cvar.latest_version_less_than(idx, analyzer)) + } else { + Ok(*self) + } + } + + pub fn global_first_version(&self, analyzer: &impl GraphBackend) -> Self { + let mut global_first = self.first_version(analyzer); + + let mut stack = vec![global_first]; + + while let Some(current_node) = stack.pop() { + let mut pushed = false; + if let Some(target_node) = analyzer + .graph() + .edges_directed(current_node.0.into(), Direction::Outgoing) + .filter(|edge| { + matches!( + edge.weight(), + Edge::Context(ContextEdge::InheritedVariable) + | Edge::Context(ContextEdge::InputVariable) + ) + }) + .map(|edge| ContextVarNode::from(edge.target())) + .take(1) + .next() + { + global_first = target_node.first_version(analyzer); + stack.push(global_first); + pushed = true; + } + + if !pushed { + continue; + } + } + + global_first + } + + pub fn first_version(&self, analyzer: &impl GraphBackend) -> Self { + let mut earlier = *self; + while let Some(prev) = earlier.previous_version(analyzer) { + earlier = prev; + } + earlier + } + + pub fn num_versions(&self, analyzer: &impl GraphBackend) -> usize { + let mut count = 1; + let mut earlier = self.latest_version(analyzer); + while let Some(prev) = earlier.previous_version(analyzer) { + earlier = prev; + count += 1; + } + count + } + + pub fn curr_version_num(&self, analyzer: &impl GraphBackend) -> usize { + let mut count = 0; + let mut earlier = self.first_version(analyzer); + while let Some(next) = earlier.next_version(analyzer) { + if next == *self { + break; + } + earlier = next; + count += 1; + } + count + } + + pub fn global_curr_version_num(&self, analyzer: &impl GraphBackend) -> usize { + let mut stack = vec![*self]; + let mut total_version_num = 0; + + while let Some(current_node) = stack.pop() { + total_version_num += current_node.curr_version_num(analyzer); + + let mut pushed = false; + if let Some(target_node) = analyzer + .graph() + .edges_directed(current_node.0.into(), Direction::Outgoing) + .filter(|edge| { + matches!( + edge.weight(), + Edge::Context(ContextEdge::InheritedVariable) + | Edge::Context(ContextEdge::InputVariable) + ) + }) + .map(|edge| ContextVarNode::from(edge.target())) + .take(1) + .next() + { + stack.push(target_node); + pushed = true; + } + + if !pushed { + continue; + } + } + + total_version_num + } + + pub fn all_versions(&self, analyzer: &impl GraphBackend) -> Vec { + let mut versions = vec![]; + let mut earlier = self.latest_version(analyzer); + while let Some(prev) = earlier.previous_version(analyzer) { + versions.push(prev); + earlier = prev; + } + versions + } + + pub fn next_version(&self, analyzer: &impl GraphBackend) -> Option { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Incoming) + .filter(|edge| Edge::Context(ContextEdge::Prev) == *edge.weight()) + .map(|edge| ContextVarNode::from(edge.source())) + .take(1) + .next() + } + + pub fn next_version_or_inheriteds(&self, analyzer: &impl GraphBackend) -> Vec { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Incoming) + .filter(|edge| { + Edge::Context(ContextEdge::Prev) == *edge.weight() + || Edge::Context(ContextEdge::InheritedVariable) == *edge.weight() + }) + .map(|edge| ContextVarNode::from(edge.source())) + .collect() + } + + pub fn other_is_version(&self, other: &Self, analyzer: &impl GraphBackend) -> bool { + self.all_versions(analyzer).contains(other) + } + + pub fn previous_version(&self, analyzer: &impl GraphBackend) -> Option { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .filter(|edge| Edge::Context(ContextEdge::Prev) == *edge.weight()) + .map(|edge| ContextVarNode::from(edge.target())) + .take(1) + .next() + } + + pub fn previous_or_inherited_version(&self, analyzer: &impl GraphBackend) -> Option { + if let Some(prev) = self.previous_version(analyzer) { + Some(prev) + } else { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .filter(|edge| Edge::Context(ContextEdge::InheritedVariable) == *edge.weight()) + .map(|edge| ContextVarNode::from(edge.target())) + .take(1) + .next() + } + } + + pub fn previous_global_version(&self, analyzer: &impl GraphBackend) -> Option { + if let Some(prev) = self.previous_version(analyzer) { + Some(prev) + } else if let Some(inherited) = analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .filter(|edge| Edge::Context(ContextEdge::InheritedVariable) == *edge.weight()) + .map(|edge| ContextVarNode::from(edge.target())) + .take(1) + .next() + { + Some(inherited) + } else { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Outgoing) + .filter(|edge| Edge::Context(ContextEdge::InputVariable) == *edge.weight()) + .map(|edge| ContextVarNode::from(edge.target())) + .take(1) + .next() + } + } +} diff --git a/crates/graph/src/nodes/context/variables.rs b/crates/graph/src/nodes/context/variables.rs new file mode 100644 index 00000000..20ccb24e --- /dev/null +++ b/crates/graph/src/nodes/context/variables.rs @@ -0,0 +1,323 @@ +use crate::{ + nodes::{ContextNode, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, GraphBackend, GraphError, Node, +}; + +use solang_parser::pt::Loc; + +use std::collections::BTreeMap; + +impl ContextNode { + pub fn input_variables(&self, analyzer: &impl GraphBackend) -> Vec { + self.vars(analyzer) + .iter() + .filter_map(|(_, var)| { + if var.is_func_input(analyzer) { + Some(var.first_version(analyzer)) + } else { + None + } + }) + .collect() + } + + /// Debug print the stack + pub fn debug_expr_stack(&self, analyzer: &impl GraphBackend) -> Result<(), GraphError> { + let underlying_mut = self.underlying(analyzer)?; + underlying_mut + .expr_ret_stack + .iter() + .enumerate() + .for_each(|(i, elem)| println!("{i}. {}", elem.debug_str(analyzer))); + Ok(()) + } + + /// Add a variable to this context + pub fn add_var( + &self, + var: ContextVarNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + // var.cache_range(analyzer)?; + if var.underlying(analyzer)?.is_tmp { + let name = var.display_name(analyzer)?; + let vars = &mut self.underlying_mut(analyzer)?.cache.tmp_vars; + vars.insert(name, var); + Ok(()) + } else { + let name = var.name(analyzer)?; + let vars = &mut self.underlying_mut(analyzer)?.cache.vars; + vars.insert(name, var); + Ok(()) + } + } + + /// Returns whether the context's cache contains a variable (by name) + pub fn contains_var( + &self, + var_name: &str, + analyzer: &impl GraphBackend, + ) -> Result { + Ok(self.underlying(analyzer)?.cache.vars.contains_key(var_name)) + } + + /// Gets a variable by name in the context + pub fn var_by_name(&self, analyzer: &impl GraphBackend, name: &str) -> Option { + self.underlying(analyzer) + .unwrap() + .cache + .vars + .get(name) + .copied() + } + + pub fn tmp_var_by_name( + &self, + analyzer: &impl GraphBackend, + name: &str, + ) -> Option { + self.underlying(analyzer) + .unwrap() + .cache + .tmp_vars + .get(name) + .copied() + } + + /// Gets a variable by name or recurses up the relevant scopes/contexts until it is found + pub fn var_by_name_or_recurse( + &self, + analyzer: &impl GraphBackend, + name: &str, + ) -> Result, GraphError> { + if let Some(var) = self.var_by_name(analyzer, name) { + Ok(Some(var)) + } else if let Some(parent) = self.ancestor_in_fn(analyzer, self.associated_fn(analyzer)?)? { + parent.var_by_name_or_recurse(analyzer, name) + } else if let Some(parent) = self.underlying(analyzer)?.continuation_of { + parent.var_by_name_or_recurse(analyzer, name) + } else { + Ok(None) + } + } + + /// Gets all variables associated with a context + pub fn vars<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> &'a BTreeMap { + &self.underlying(analyzer).unwrap().cache.vars + } + + /// Gets all variables associated with a context + pub fn local_vars<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> &'a BTreeMap { + self.vars(analyzer) + } + + /// Gets the latest version of a variable associated with a context + pub fn latest_var_by_name( + &self, + analyzer: &impl GraphBackend, + name: &str, + ) -> Option { + self.var_by_name(analyzer, name) + .map(|var| var.latest_version(analyzer)) + } + + /// Reads the current temporary counter and increments the counter + pub fn new_tmp(&self, analyzer: &mut impl AnalyzerBackend) -> Result { + let context = self.underlying_mut(analyzer)?; + let ret = context.tmp_var_ctr; + context.tmp_var_ctr += 1; + Ok(ret) + } + + /// Push an expression return into the temporary stack + pub fn push_tmp_expr( + &self, + expr_ret: ExprRet, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + let underlying_mut = self.underlying_mut(analyzer)?; + underlying_mut.tmp_expr.push(Some(expr_ret)); + Ok(()) + } + + /// Append a new expression return to an expression return + /// currently in the temporary stack + pub fn append_tmp_expr( + &self, + expr_ret: ExprRet, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + let underlying_mut = self.underlying_mut(analyzer)?; + match underlying_mut.tmp_expr.pop() { + Some(Some(s @ ExprRet::Single(_))) => { + underlying_mut + .tmp_expr + .push(Some(ExprRet::Multi(vec![s, expr_ret]))); + } + Some(Some(s @ ExprRet::SingleLiteral(_))) => { + underlying_mut + .tmp_expr + .push(Some(ExprRet::Multi(vec![s, expr_ret]))); + } + Some(Some(ExprRet::Multi(ref mut inner))) => { + inner.push(expr_ret); + underlying_mut + .tmp_expr + .push(Some(ExprRet::Multi(inner.to_vec()))); + } + Some(Some(s @ ExprRet::Null)) => { + underlying_mut + .tmp_expr + .push(Some(ExprRet::Multi(vec![s, expr_ret]))); + } + Some(Some(ExprRet::CtxKilled(kind))) => { + underlying_mut.tmp_expr = vec![Some(ExprRet::CtxKilled(kind))]; + underlying_mut.expr_ret_stack = vec![ExprRet::CtxKilled(kind)]; + } + _ => { + underlying_mut.tmp_expr.push(Some(expr_ret)); + } + } + Ok(()) + } + + /// Pops a from the temporary ExprRet stack + pub fn pop_tmp_expr( + &self, + loc: Loc, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + let underlying_mut = self.underlying_mut(analyzer)?; + if let Some(Some(expr)) = underlying_mut.tmp_expr.pop() { + Ok(Some(self.maybe_move_expr(expr, loc, analyzer)?)) + } else { + Ok(None) + } + } + + /// Pushes an ExprRet to the stack + #[tracing::instrument(level = "trace", skip_all)] + pub fn push_expr( + &self, + expr_ret: ExprRet, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + tracing::trace!( + "pushing: {}, existing: {:?}, path: {}", + expr_ret.debug_str(analyzer), + self.underlying(analyzer)? + .expr_ret_stack + .iter() + .map(|i| i.debug_str(analyzer)) + .collect::>(), + self.path(analyzer) + ); + let underlying_mut = self.underlying_mut(analyzer)?; + underlying_mut.expr_ret_stack.push(expr_ret); + Ok(()) + } + + /// May move the inner variables of an ExprRet from an old context to this context + pub fn maybe_move_expr( + &self, + expr: ExprRet, + loc: Loc, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + tracing::trace!("moving expr to {}", self.path(analyzer)); + match expr { + ExprRet::SingleLiteral(var) => Ok(ExprRet::SingleLiteral( + self.maybe_move_var(var.into(), loc, analyzer)?.into(), + )), + ExprRet::Single(var) => Ok(ExprRet::Single( + self.maybe_move_var(var.into(), loc, analyzer)?.into(), + )), + ExprRet::Multi(inner) => Ok(ExprRet::Multi( + inner + .iter() + .map(|i| self.maybe_move_expr(i.clone(), loc, analyzer)) + .collect::>()?, + )), + e => Ok(e), + } + } + + /// May move the variable from an old context to this context + pub fn maybe_move_var( + &self, + var: ContextVarNode, + loc: Loc, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + let var = var.latest_version(analyzer); + if let Some(ctx) = var.maybe_ctx(analyzer) { + if ctx != *self { + tracing::trace!( + "moving var {} from {}", + ctx.path(analyzer), + self.path(analyzer) + ); + let mut new_cvar = var.latest_version(analyzer).underlying(analyzer)?.clone(); + new_cvar.loc = Some(loc); + + let new_cvarnode = analyzer.add_node(Node::ContextVar(new_cvar)); + analyzer.add_edge(new_cvarnode, *self, Edge::Context(ContextEdge::Variable)); + analyzer.add_edge( + new_cvarnode, + var.0, + Edge::Context(ContextEdge::InheritedVariable), + ); + Ok(new_cvarnode.into()) + } else { + Ok(var) + } + } else { + Ok(var) + } + } + + /// Pop the latest expression return off the stack + #[tracing::instrument(level = "trace", skip_all)] + pub fn pop_expr_latest( + &self, + loc: Loc, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + let underlying_mut = self.underlying_mut(analyzer)?; + if let Some(elem) = underlying_mut.expr_ret_stack.pop() { + tracing::trace!( + "popping var {} from: {}", + elem.debug_str(analyzer), + self.path(analyzer) + ); + Ok(Some(self.maybe_move_expr(elem, loc, analyzer)?)) + } else { + Ok(None) + } + } + + /// Gets local vars that were assigned from a function return + pub fn vars_assigned_from_fn_ret(&self, analyzer: &impl GraphBackend) -> Vec { + self.local_vars(analyzer) + .iter() + .flat_map(|(_name, var)| var.return_assignments(analyzer)) + .collect() + } + + /// Gets local vars that were assigned from an external function return + pub fn vars_assigned_from_ext_fn_ret( + &self, + analyzer: &impl GraphBackend, + ) -> Vec { + self.local_vars(analyzer) + .iter() + .flat_map(|(_name, var)| var.ext_return_assignments(analyzer)) + .collect() + } +} diff --git a/crates/graph/src/nodes/context/versioning.rs b/crates/graph/src/nodes/context/versioning.rs new file mode 100644 index 00000000..0af39443 --- /dev/null +++ b/crates/graph/src/nodes/context/versioning.rs @@ -0,0 +1,533 @@ +use crate::nodes::Context; +use crate::ContextEdge; +use crate::Edge; +use crate::{ + nodes::{CallFork, ContextNode, FunctionNode, KilledKind}, + AnalyzerBackend, GraphBackend, GraphError, Node, +}; +use petgraph::visit::EdgeRef; +use petgraph::Direction; + +use solang_parser::pt::Loc; + +impl ContextNode { + /// Query whether this context has a parent + pub fn has_parent(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.parent_ctx.is_some()) + } + + /// Sets the continuation context + pub fn set_continuation_ctx( + &self, + analyzer: &mut impl AnalyzerBackend, + continuation_of_ctx: ContextNode, + ty: &'static str, + ) -> Result<(), GraphError> { + assert!( + self.0 > continuation_of_ctx.0, + "{} {}", + self.0, + continuation_of_ctx.0 + ); + + let parent_list = self.parent_list(analyzer)?; + // if `continuation_of` already has a continuation, build off that continuation if it is in the parent list + if let Some(cont) = analyzer + .graph() + .edges_directed(continuation_of_ctx.into(), Direction::Incoming) + .find(|edge| { + matches!(edge.weight(), Edge::Context(ContextEdge::Continue(_))) + && parent_list.contains(&ContextNode::from(edge.source())) + }) + .map(|edge| ContextNode::from(edge.source())) + { + self.set_continuation_ctx(analyzer, cont, ty) + } else { + analyzer.add_edge( + *self, + continuation_of_ctx, + Edge::Context(ContextEdge::Continue(ty)), + ); + self.underlying_mut(analyzer)?.continuation_of = Some(continuation_of_ctx); + self.underlying_mut(analyzer)?.cache.vars = + continuation_of_ctx.underlying(analyzer)?.cache.vars.clone(); + Ok(()) + } + } + + /// Gets the first ancestor of this context + pub fn first_ancestor( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + if let Some(first_ancestor) = self.underlying(analyzer)?.cache.first_ancestor { + Ok(first_ancestor) + } else if let Some(parent) = self.underlying(analyzer)?.parent_ctx { + let first = parent.first_ancestor(analyzer)?; + self.underlying_mut(analyzer)?.cache.first_ancestor = Some(first); + Ok(first) + } else { + Ok(*self) + } + } + + /// Gets the subcontexts of this context + pub fn subcontexts(&self, analyzer: &impl GraphBackend) -> Vec { + let underlying = self.underlying(analyzer).unwrap(); + match underlying.child { + Some(CallFork::Call(c)) => vec![c], + Some(CallFork::Fork(w1, w2)) => vec![w1, w2], + None => vec![], + } + } + + /// Get the first ancestor context that is in the same function + pub fn ancestor_in_fn( + &self, + analyzer: &impl GraphBackend, + associated_fn: FunctionNode, + ) -> Result, GraphError> { + if let Some(ret) = self.underlying(analyzer)?.returning_ctx { + if ret.associated_fn(analyzer)? == associated_fn { + return Ok(Some(ret)); + } + } + + if let Some(parent) = self.underlying(analyzer)?.parent_ctx { + if parent.associated_fn(analyzer)? == associated_fn { + Ok(Some(parent)) + } else if let Some(mod_state) = &parent.underlying(analyzer)?.modifier_state { + if mod_state.parent_fn == associated_fn { + Ok(Some(parent)) + } else { + parent.ancestor_in_fn(analyzer, associated_fn) + } + } else { + parent.ancestor_in_fn(analyzer, associated_fn) + } + } else { + Ok(None) + } + } + + /// Returns all forks associated with the context + pub fn calls(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + let descendents = self.descendents(analyzer)?; + Ok(descendents + .into_iter() + .filter_map(|c| c.maybe_call()) + .collect()) + } + + /// Returns tail contexts associated with the context + pub fn live_edges(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + if let Some(child) = self.underlying(analyzer)?.child { + let mut lineage = vec![]; + match child { + CallFork::Call(call) => { + let call_edges = call.live_edges(analyzer)?; + if call_edges.is_empty() && !call.is_ended(analyzer)? { + lineage.push(call) + } else { + lineage.extend(call_edges); + } + } + CallFork::Fork(w1, w2) => { + let fork_edges = w1.live_edges(analyzer)?; + if fork_edges.is_empty() && !w1.is_ended(analyzer)? { + lineage.push(w1) + } else { + lineage.extend(fork_edges); + } + + let fork_edges = w2.live_edges(analyzer)?; + if fork_edges.is_empty() && !w2.is_ended(analyzer)? { + lineage.push(w2) + } else { + lineage.extend(fork_edges); + } + } + } + Ok(lineage) + } else { + Ok(vec![]) + } + } + + /// Gets all reverted descendents + pub fn reverted_edges(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + if let Some(child) = self.underlying(analyzer)?.child { + let mut lineage = vec![]; + match child { + CallFork::Call(call) => { + let call_edges = call.reverted_edges(analyzer)?; + if call_edges.is_empty() && call.is_killed(analyzer)? { + lineage.push(call) + } else { + lineage.extend(call_edges); + } + } + CallFork::Fork(w1, w2) => { + let fork_edges = w1.reverted_edges(analyzer)?; + if fork_edges.is_empty() && w1.is_killed(analyzer)? { + lineage.push(w1) + } else { + lineage.extend(fork_edges); + } + + let fork_edges = w2.reverted_edges(analyzer)?; + if fork_edges.is_empty() && w2.is_killed(analyzer)? { + lineage.push(w2) + } else { + lineage.extend(fork_edges); + } + } + } + Ok(lineage) + } else { + Ok(vec![]) + } + } + + /// Gets all successful descendents + pub fn successful_edges(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + if let Some(child) = self.underlying(analyzer)?.child { + let mut lineage = vec![]; + match child { + CallFork::Call(call) => { + let call_edges = call.successful_edges(analyzer)?; + if call_edges.is_empty() && !call.is_killed(analyzer)? { + lineage.push(call) + } else { + lineage.extend(call_edges); + } + } + CallFork::Fork(w1, w2) => { + let fork_edges = w1.successful_edges(analyzer)?; + if fork_edges.is_empty() && !w1.is_killed(analyzer)? { + lineage.push(w1) + } else { + lineage.extend(fork_edges); + } + + let fork_edges = w2.successful_edges(analyzer)?; + if fork_edges.is_empty() && !w2.is_killed(analyzer)? { + lineage.push(w2) + } else { + lineage.extend(fork_edges); + } + } + } + Ok(lineage) + } else { + Ok(vec![]) + } + } + + /// Returns the current number of live edges + pub fn number_of_live_edges(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.number_of_live_edges) + } + + /// Returns tail contexts associated with the context + pub fn all_edges(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + if let Some(child) = self.underlying(analyzer)?.child { + let mut lineage = vec![]; + match child { + CallFork::Call(call) => { + let call_edges = call.all_edges(analyzer)?; + if call_edges.is_empty() { + lineage.push(call) + } else { + lineage.extend(call_edges); + } + } + CallFork::Fork(w1, w2) => { + let fork_edges = w1.all_edges(analyzer)?; + if fork_edges.is_empty() { + lineage.push(w1) + } else { + lineage.extend(fork_edges); + } + + let fork_edges = w2.all_edges(analyzer)?; + if fork_edges.is_empty() { + lineage.push(w2) + } else { + lineage.extend(fork_edges); + } + } + } + Ok(lineage) + } else { + Ok(vec![]) + } + } + + /// Gets all descendents recursively + pub fn descendents(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + if let Some(child) = self.underlying(analyzer)?.child { + let mut descendents = vec![child]; + match child { + CallFork::Call(c) => descendents.extend(c.descendents(analyzer)?), + CallFork::Fork(w1, w2) => { + descendents.extend(w1.descendents(analyzer)?); + descendents.extend(w2.descendents(analyzer)?); + } + } + Ok(descendents) + } else { + Ok(vec![]) + } + } + + /// Adds a fork to the context + pub fn set_child_fork( + &self, + w1: ContextNode, + w2: ContextNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + assert!(matches!(analyzer.node(w1), Node::Context(_))); + assert!(matches!(analyzer.node(w2), Node::Context(_))); + assert!(*self != w1 && *self != w2, "Tried to set child to self"); + let context = self.underlying_mut(analyzer)?; + if !context.set_child_fork(w1, w2) { + let child_str = match context.child { + Some(CallFork::Fork(w1, w2)) => { + format!("fork {{ {}, {} }}", w1.path(analyzer), w2.path(analyzer)) + } + Some(CallFork::Call(call)) => format!("call {{ {} }}", call.path(analyzer)), + None => unreachable!(), + }; + Err(GraphError::ChildRedefinition(panic!( + "This is a bug. Tried to redefine a child context, parent:\n{}, current child:\n{},\nnew child: Fork({}, {})", + self.path(analyzer), + child_str, + w1.path(analyzer), + w2.path(analyzer), + ))) + } else { + Ok(()) + } + } + + pub fn set_join_forks( + &self, + loc: Loc, + end_worlds: Vec, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + // if we have 4 worlds we need to represent + // we need to construct a tree like this + // a + // | + // |----------| + // a1 a2 + // | | + // |------| |------| + // a3 a4 a5 a6 + // + // each fork adds 1 world + + let _edges = self.all_edges(analyzer)?; + let mut stack = std::collections::VecDeque::new(); + stack.push_front(*self); + + for _ in 0..end_worlds.len().saturating_sub(1) { + let curr = stack.pop_front().unwrap(); + + let left_ctx = Context::new_subctx( + curr, + None, + loc, + Some("join_left"), + None, + false, + analyzer, + None, + )?; + let left_subctx = ContextNode::from(analyzer.add_node(Node::Context(left_ctx))); + let right_ctx = Context::new_subctx( + curr, + None, + loc, + Some("join_right"), + None, + false, + analyzer, + None, + )?; + let right_subctx = ContextNode::from(analyzer.add_node(Node::Context(right_ctx))); + curr.set_child_fork(left_subctx, right_subctx, analyzer)?; + left_subctx.set_continuation_ctx(analyzer, curr, "join_left")?; + right_subctx.set_continuation_ctx(analyzer, curr, "join_right")?; + + stack.push_back(left_subctx); + stack.push_back(right_subctx); + } + + self.all_edges(analyzer) + } + + /// Adds a child to the context + pub fn set_child_call( + &self, + call: ContextNode, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + assert!(matches!(analyzer.node(call), Node::Context(_))); + assert!(*self != call, "Tried to set child to self"); + let context = self.underlying_mut(analyzer)?; + if !context.set_child_call(call) { + let child_str = match context.child { + Some(CallFork::Fork(w1, w2)) => { + format!("fork {{ {}, {} }}", w1.path(analyzer), w2.path(analyzer)) + } + Some(CallFork::Call(call)) => format!("call {{ {} }}", call.path(analyzer)), + None => unreachable!(), + }; + tracing::trace!("Error setting child as a call"); + Err(GraphError::ChildRedefinition(panic!( + "This is a bug. Tried to redefine a child context, parent: {}, current child: {}, new child: {}", + self.path(analyzer), + child_str, + call.path(analyzer) + ) + )) + } else { + Ok(()) + } + } + + /// Removes the child of this context + pub fn delete_child(&self, analyzer: &mut impl AnalyzerBackend) -> Result<(), GraphError> { + if let Some(child) = self.underlying(analyzer)?.child { + match child { + CallFork::Fork(w1, w2) => { + w1.propogate_end(analyzer)?; + w2.propogate_end(analyzer)?; + } + CallFork::Call(c) => { + c.propogate_end(analyzer)?; + } + } + } + let context = self.underlying_mut(analyzer)?; + context.delete_child(); + Ok(()) + } + + /// Kills the context by denoting it as killed. Recurses up the contexts and kills + /// parent contexts if all subcontexts of that context are killed + pub fn kill( + &self, + analyzer: &mut impl AnalyzerBackend, + kill_loc: Loc, + kill_kind: KilledKind, + ) -> Result<(), GraphError> { + tracing::trace!("killing: {}", self.path(analyzer)); + if let Some(child) = self.underlying(analyzer)?.child { + match child { + CallFork::Call(call) => { + if !call.underlying(analyzer)?.ret.is_empty() { + return Ok(()); + } + call.kill(analyzer, kill_loc, kill_kind)?; + } + CallFork::Fork(w1, w2) => { + if !w1.underlying(analyzer)?.ret.is_empty() { + return Ok(()); + } + + if !w2.underlying(analyzer)?.ret.is_empty() { + return Ok(()); + } + + w1.kill(analyzer, kill_loc, kill_kind)?; + w2.kill(analyzer, kill_loc, kill_kind)?; + } + } + } + + let context = self.underlying_mut(analyzer)?; + let parent = context.parent_ctx; + if context.killed.is_none() { + context.killed = Some((kill_loc, kill_kind)); + } + + if let Some(parent_ctx) = parent { + parent_ctx.end_if_all_forks_ended(analyzer, kill_loc, kill_kind)?; + } + + self.propogate_end(analyzer)?; + + Ok(()) + } + + /// Kills if and only if all subcontexts are killed + pub fn end_if_all_forks_ended( + &self, + analyzer: &mut impl AnalyzerBackend, + kill_loc: Loc, + kill_kind: KilledKind, + ) -> Result<(), GraphError> { + let all_edges = self.all_edges(analyzer)?; + let reverted_edges = self.reverted_edges(analyzer)?; + if reverted_edges.len() == all_edges.len() { + tracing::trace!("killing recursively: {}", self.path(analyzer)); + let context = self.underlying_mut(analyzer)?; + if context.ret.is_empty() { + if context.killed.is_none() { + context.killed = Some((kill_loc, kill_kind)); + } + if let Some(parent_ctx) = context.parent_ctx { + parent_ctx.end_if_all_forks_ended(analyzer, kill_loc, kill_kind)?; + } + } + } + Ok(()) + } + + /// Gets parent list + pub fn parent_list( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + let context = self.underlying(analyzer)?; + let mut parents = vec![]; + if let Some(parent_ctx) = context.parent_ctx { + parents.push(parent_ctx); + parents.extend(parent_ctx.parent_list(analyzer)?); + } + Ok(parents) + } + + /// Gets all calls recursively + pub fn recursive_calls( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + // Ok( + let calls = self.calls(analyzer)?; + Ok(calls + .iter() + .flat_map(|call| { + let mut inner_calls = call.recursive_calls(analyzer).unwrap(); + inner_calls.insert(0, *call); + inner_calls + }) + .collect::>()) + } + + /// Gets the lineage for a context + /// A lineage is of the form `[ancestor N, .. , ancestor0, SELF, call0, .., call N]`. It + /// gives the user a full picture of control flow + pub fn lineage( + &self, + _analyzer: &impl GraphBackend, + _entry: bool, + ) -> Result, GraphError> { + todo!() + } +} diff --git a/shared/src/nodes/contract_ty.rs b/crates/graph/src/nodes/contract_ty.rs similarity index 58% rename from shared/src/nodes/contract_ty.rs rename to crates/graph/src/nodes/contract_ty.rs index e9654238..fc9ebb78 100644 --- a/shared/src/nodes/contract_ty.rs +++ b/crates/graph/src/nodes/contract_ty.rs @@ -1,14 +1,13 @@ -use crate::analyzer::GraphError; -use crate::analyzer::Search; -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::AsDotStr; -use crate::Edge; -use crate::FunctionNode; -use crate::Node; -use crate::NodeIdx; -use crate::StructNode; +use crate::{ + nodes::{Concrete, FunctionNode, SourceUnitNode, SourceUnitPartNode, StructNode, VarNode}, + range::elem::Elem, + AnalyzerBackend, AsDotStr, Edge, GraphBackend, GraphError, Node, +}; +use shared::{NodeIdx, RangeArena, Search}; + use petgraph::{visit::EdgeRef, Direction}; use solang_parser::pt::{ContractDefinition, ContractTy, Identifier, Loc}; + use std::collections::BTreeMap; /// An index in the graph that references a [`Contract`] node @@ -16,7 +15,11 @@ use std::collections::BTreeMap; pub struct ContractNode(pub usize); impl AsDotStr for ContractNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { let underlying = self.underlying(analyzer).unwrap(); format!( "{} {}", @@ -32,16 +35,40 @@ impl AsDotStr for ContractNode { impl ContractNode { /// Gets the underlying node data for the [`Contract`] - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Contract, GraphError> { + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Contract, GraphError> { match analyzer.node(*self) { Node::Contract(contract) => Ok(contract), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be Contract but it was: {e:?}" + ))), + } + } + + /// Gets the underlying node data for the [`Contract`] as mutable + pub fn underlying_mut<'a>( + &self, + analyzer: &'a mut impl GraphBackend, + ) -> Result<&'a mut Contract, GraphError> { + match analyzer.node_mut(*self) { + Node::Contract(contract) => Ok(contract), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Contract but it was: {e:?}" ))), } } - pub fn super_contracts(&self, analyzer: &impl GraphLike) -> Vec { + pub fn super_contracts(&self, analyzer: &impl GraphBackend) -> Vec { analyzer .graph() .edges_directed((*self).into(), Direction::Incoming) @@ -50,10 +77,10 @@ impl ContractNode { .collect() } - pub fn inherit(&self, inherits: Vec, analyzer: &mut (impl GraphLike + AnalyzerLike)) { + pub fn inherit(&self, inherits: Vec, analyzer: &mut impl AnalyzerBackend) { let src = self.associated_source(analyzer); let all_contracts = analyzer.search_children_include_via( - src, + src.into(), &Edge::Contract, &[ Edge::Import, @@ -79,11 +106,11 @@ impl ContractNode { }); } - pub fn direct_inherited_contracts(&self, analyzer: &impl GraphLike) -> Vec { + pub fn direct_inherited_contracts(&self, analyzer: &impl GraphBackend) -> Vec { self.underlying(analyzer).unwrap().inherits.clone() } - pub fn all_inherited_contracts(&self, analyzer: &impl GraphLike) -> Vec { + pub fn all_inherited_contracts(&self, analyzer: &impl GraphBackend) -> Vec { let mut inherits = self.direct_inherited_contracts(analyzer); inherits.extend( inherits @@ -95,7 +122,7 @@ impl ContractNode { } /// Gets the name from the underlying node data for the [`Contract`] - pub fn name(&self, analyzer: &impl GraphLike) -> Result { + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { Ok(self .underlying(analyzer)? .name @@ -105,7 +132,7 @@ impl ContractNode { } /// Tries to Get the name from the underlying node data for the [`Contract`] - pub fn maybe_name(&self, analyzer: &impl GraphLike) -> Result, GraphError> { + pub fn maybe_name(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { if let Some(ident) = self.underlying(analyzer)?.name.clone() { Ok(Some(ident.name)) } else { @@ -114,12 +141,12 @@ impl ContractNode { } /// Gets the sourcecode location from the underlying node data for the [`Contract`] - pub fn loc(&self, analyzer: &impl GraphLike) -> Result { + pub fn loc(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.loc) } /// Gets all associated functions from the underlying node data for the [`Contract`] - pub fn funcs(&self, analyzer: &(impl GraphLike + Search)) -> Vec { + pub fn funcs(&self, analyzer: &(impl GraphBackend + Search)) -> Vec { analyzer .search_children_depth(self.0.into(), &Edge::Func, 1, 0) .into_iter() @@ -127,9 +154,38 @@ impl ContractNode { .collect() } + pub fn constructor(&self, analyzer: &(impl GraphBackend + Search)) -> Option { + analyzer + .search_children_depth(self.0.into(), &Edge::Constructor, 1, 0) + .into_iter() + .map(FunctionNode::from) + .take(1) + .next() + } + + /// Gets all associated storage vars from the underlying node data for the [`Contract`] + pub fn direct_storage_vars(&self, analyzer: &(impl GraphBackend + Search)) -> Vec { + analyzer + .search_children_depth(self.0.into(), &Edge::Var, 1, 0) + .into_iter() + .map(VarNode::from) + .collect() + } + + /// Gets all associated storage vars from the underlying node data for the [`Contract`] + pub fn all_storage_vars(&self, analyzer: &(impl GraphBackend + Search)) -> Vec { + let mut ret = self + .all_inherited_contracts(analyzer) + .iter() + .flat_map(|contract| contract.direct_storage_vars(analyzer)) + .collect::>(); + ret.extend(self.direct_storage_vars(analyzer)); + ret + } + pub fn funcs_mapping( &self, - analyzer: &(impl GraphLike + Search + AnalyzerLike), + analyzer: &mut (impl Search + AnalyzerBackend), ) -> BTreeMap { analyzer .search_children_depth(self.0.into(), &Edge::Func, 1, 0) @@ -143,25 +199,31 @@ impl ContractNode { pub fn linearized_functions( &self, - analyzer: &(impl GraphLike + Search + AnalyzerLike), - ) -> BTreeMap { - let mut mapping = self.funcs_mapping(analyzer); - self.direct_inherited_contracts(analyzer) - .iter() - .for_each(|inherited| { - inherited - .linearized_functions(analyzer) - .iter() - .for_each(|(name, func)| { - if !mapping.contains_key(name) { - mapping.insert(name.to_string(), *func); - } - }); - }); - mapping + analyzer: &mut (impl Search + AnalyzerBackend), + ) -> Result, GraphError> { + if let Some(funcs) = &self.underlying(analyzer)?.cached_functions { + Ok(funcs.clone()) + } else { + let mut mapping = self.funcs_mapping(analyzer); + self.direct_inherited_contracts(analyzer) + .iter() + .for_each(|inherited| { + inherited + .linearized_functions(analyzer) + .unwrap() + .iter() + .for_each(|(name, func)| { + if !mapping.contains_key(name) { + mapping.insert(name.to_string(), *func); + } + }); + }); + self.underlying_mut(analyzer)?.cached_functions = Some(mapping.clone()); + Ok(mapping) + } } - pub fn structs(&self, analyzer: &(impl GraphLike + Search)) -> Vec { + pub fn structs(&self, analyzer: &(impl GraphBackend + Search)) -> Vec { analyzer .search_children_depth(self.0.into(), &Edge::Struct, 1, 0) .into_iter() @@ -169,8 +231,20 @@ impl ContractNode { .collect() } + pub fn visible_structs(&self, analyzer: &(impl GraphBackend + Search)) -> Vec { + let mut structs = self.structs(analyzer); + let inherited = self.all_inherited_contracts(analyzer); + structs.extend( + inherited + .iter() + .flat_map(|c| c.structs(analyzer)) + .collect::>(), + ); + structs + } + /// Gets all associated modifiers from the underlying node data for the [`Contract`] - pub fn modifiers(&self, analyzer: &(impl GraphLike + Search)) -> Vec { + pub fn modifiers(&self, analyzer: &(impl GraphBackend + Search)) -> Vec { analyzer .search_children_depth(self.0.into(), &Edge::Modifier, 1, 0) .into_iter() @@ -178,17 +252,22 @@ impl ContractNode { .collect() } - pub fn associated_source_unit_part(&self, analyzer: &(impl GraphLike + Search)) -> NodeIdx { + pub fn associated_source_unit_part( + &self, + analyzer: &(impl GraphBackend + Search), + ) -> SourceUnitPartNode { analyzer .search_for_ancestor(self.0.into(), &Edge::Contract) .expect("detached contract") + .into() } - pub fn associated_source(&self, analyzer: &(impl GraphLike + Search)) -> NodeIdx { + pub fn associated_source(&self, analyzer: &(impl GraphBackend + Search)) -> SourceUnitNode { let sup = self.associated_source_unit_part(analyzer); analyzer - .search_for_ancestor(sup, &Edge::Part) + .search_for_ancestor(sup.into(), &Edge::Part) .expect("detached source unit part") + .into() } } @@ -215,6 +294,8 @@ pub struct Contract { pub name: Option, /// A list of contracts that this contract inherits (TODO: inheritance linearization) pub inherits: Vec, + /// Cached linearized functions + pub cached_functions: Option>, } impl From for Node { @@ -228,8 +309,8 @@ impl Contract { pub fn from_w_imports( con: ContractDefinition, source: NodeIdx, - imports: &[(Option, String, String, usize)], - analyzer: &impl GraphLike, + imports: &[Option], + analyzer: &impl GraphBackend, ) -> (Contract, Vec) { let mut inherits = vec![]; let mut unhandled_inherits = vec![]; @@ -249,7 +330,7 @@ impl Contract { } if !found { - for entry in imports.iter().filter_map(|import| import.0) { + for entry in imports.iter().filter_map(|&import| import) { for contract in analyzer .search_children_exclude_via(entry, &Edge::Contract, &[Edge::Func]) .into_iter() @@ -274,6 +355,7 @@ impl Contract { ty: con.ty, name: con.name, inherits, + cached_functions: None, }, unhandled_inherits, ) diff --git a/shared/src/nodes/enum_ty.rs b/crates/graph/src/nodes/enum_ty.rs similarity index 78% rename from shared/src/nodes/enum_ty.rs rename to crates/graph/src/nodes/enum_ty.rs index eaf55308..e02b0cdb 100644 --- a/shared/src/nodes/enum_ty.rs +++ b/crates/graph/src/nodes/enum_ty.rs @@ -1,10 +1,9 @@ -use crate::analyzer::GraphError; -use crate::analyzer::GraphLike; -use crate::range::SolcRange; -use crate::AsDotStr; -use crate::Concrete; -use crate::Node; -use crate::NodeIdx; +use crate::{ + nodes::Concrete, range::elem::Elem, AsDotStr, GraphBackend, GraphError, Node, SolcRange, +}; + +use shared::{NodeIdx, RangeArena}; + use ethers_core::types::U256; use solang_parser::pt::{EnumDefinition, Identifier, Loc}; @@ -13,7 +12,11 @@ use solang_parser::pt::{EnumDefinition, Identifier, Loc}; pub struct EnumNode(pub usize); impl AsDotStr for EnumNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { let underlying = self.underlying(analyzer).unwrap(); format!( "enum {} {{ {} }}", @@ -29,9 +32,13 @@ impl AsDotStr for EnumNode { impl EnumNode { /// Gets the underlying node data for the [`Enum`] - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Enum, GraphError> { + pub fn underlying<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a Enum, GraphError> { match analyzer.node(*self) { Node::Enum(e) => Ok(e), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Contract but it was: {e:?}" ))), @@ -39,7 +46,7 @@ impl EnumNode { } /// Gets the name of the enum from the underlying node data for the [`Enum`] - pub fn name(&self, analyzer: &impl GraphLike) -> Result { + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { Ok(self .underlying(analyzer)? .name @@ -48,13 +55,13 @@ impl EnumNode { .name) } - pub fn variants(&self, analyzer: &impl GraphLike) -> Result, GraphError> { + pub fn variants(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { Ok(self.underlying(analyzer)?.variants()) } pub fn maybe_default_range( &self, - analyzer: &impl GraphLike, + analyzer: &impl GraphBackend, ) -> Result, GraphError> { let variants = self.variants(analyzer)?; if !variants.is_empty() { @@ -69,7 +76,7 @@ impl EnumNode { pub fn range_from_variant( &self, variant: String, - analyzer: &impl GraphLike, + analyzer: &impl GraphBackend, ) -> Result { let variants = self.variants(analyzer)?; assert!(variants.contains(&variant)); diff --git a/shared/src/nodes/err_ty.rs b/crates/graph/src/nodes/err_ty.rs similarity index 74% rename from shared/src/nodes/err_ty.rs rename to crates/graph/src/nodes/err_ty.rs index 1830035d..4ecd30e6 100644 --- a/shared/src/nodes/err_ty.rs +++ b/crates/graph/src/nodes/err_ty.rs @@ -1,15 +1,20 @@ -use crate::analyzer::GraphError; -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::AsDotStr; -use crate::{Node, NodeIdx}; +use crate::{ + nodes::Concrete, range::elem::Elem, AnalyzerBackend, AsDotStr, GraphBackend, GraphError, Node, +}; + +use shared::{NodeIdx, RangeArena}; use solang_parser::pt::{ErrorDefinition, ErrorParameter, Expression, Identifier, Loc}; #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct ErrorNode(pub usize); impl ErrorNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Error, GraphError> { + pub fn underlying<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a Error, GraphError> { match analyzer.node(*self) { Node::Error(err) => Ok(err), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Var but it was: {e:?}" ))), @@ -17,7 +22,11 @@ impl ErrorNode { } } impl AsDotStr for ErrorNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { let underlying = self.underlying(analyzer).unwrap(); format!( "error {}", @@ -93,12 +102,13 @@ impl From for Node { impl ErrorParam { pub fn new( - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, param: ErrorParameter, ) -> Self { ErrorParam { loc: param.loc, - ty: analyzer.parse_expr(¶m.ty, None), + ty: analyzer.parse_expr(arena, ¶m.ty, None), name: param.name, } } diff --git a/shared/src/nodes/func_ty.rs b/crates/graph/src/nodes/func_ty.rs similarity index 70% rename from shared/src/nodes/func_ty.rs rename to crates/graph/src/nodes/func_ty.rs index cbcd9f12..cf60328b 100644 --- a/shared/src/nodes/func_ty.rs +++ b/crates/graph/src/nodes/func_ty.rs @@ -1,40 +1,60 @@ -use crate::analyzer::AsDotStr; -use crate::analyzer::GraphError; -use crate::analyzer::Search; -use crate::context::{ContextEdge, ContextNode}; -use crate::nodes::ContractNode; -use crate::range::SolcRange; -use crate::Edge; -use crate::VarType; use crate::{ - analyzer::{AnalyzerLike, GraphLike}, - Node, NodeIdx, + nodes::Concrete, + nodes::{ContextNode, ContractNode, SourceUnitNode, SourceUnitPartNode}, + range::elem::Elem, + AnalyzerBackend, AsDotStr, ContextEdge, Edge, GraphBackend, GraphError, Node, SolcRange, + VarType, }; + +use shared::{NodeIdx, RangeArena, Search, StorageLocation}; + use petgraph::{visit::EdgeRef, Direction}; -use solang_parser::helpers::CodeLocation; -use solang_parser::pt::ParameterList; -use solang_parser::pt::Statement; -use solang_parser::pt::Type; -use solang_parser::pt::VariableDefinition; -use solang_parser::pt::{ - Base, Expression, FunctionAttribute, FunctionDefinition, FunctionTy, Identifier, Loc, - Parameter, StorageLocation, Visibility, +use solang_parser::{ + helpers::CodeLocation, + pt::{ + Base, Expression, FunctionAttribute, FunctionDefinition, FunctionTy, Identifier, Loc, + Mutability, Parameter, ParameterList, Statement, Type, VariableDefinition, Visibility, + }, }; use std::collections::BTreeMap; #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct FunctionNode(pub usize); impl FunctionNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Function, GraphError> { + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Function, GraphError> { match analyzer.node(*self) { Node::Function(func) => Ok(func), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Function but it was: {e:?}" ))), } } - pub fn body_loc(&self, analyzer: &impl GraphLike) -> Result, GraphError> { + pub fn add_gas_cost( + &mut self, + analyzer: &mut impl GraphBackend, + cost: u64, + ) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.add_gas_cost(cost); + Ok(()) + } + + pub fn ty(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.ty) + } + + pub fn is_constructor(&self, analyzer: &impl GraphBackend) -> Result { + Ok(matches!(self.ty(analyzer)?, FunctionTy::Constructor)) + } + + pub fn body_loc(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { if let Some(body_stmt) = &self.underlying(analyzer)?.body { Ok(Some(body_stmt.loc())) } else { @@ -42,13 +62,13 @@ impl FunctionNode { } } - pub fn definition_loc(&self, analyzer: &impl GraphLike) -> Result { + pub fn definition_loc(&self, analyzer: &impl GraphBackend) -> Result { let underlying = &self.underlying(analyzer)?; Ok(underlying.loc) } /// Gets an ordered list of modifiers for a given function - pub fn modifiers(&self, analyzer: &mut (impl GraphLike + AnalyzerLike)) -> Vec { + pub fn modifiers(&self, analyzer: &mut impl AnalyzerBackend) -> Vec { if let Some(mods) = &self.underlying(analyzer).unwrap().cache.modifiers { mods.values().copied().collect() } else { @@ -68,14 +88,14 @@ impl FunctionNode { } } - pub fn modifiers_set(&self, analyzer: &impl GraphLike) -> Result { + pub fn modifiers_set(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.modifiers_set) } pub fn modifier_input_vars( &self, mod_num: usize, - analyzer: &impl GraphLike, + analyzer: &impl GraphBackend, ) -> Result, GraphError> { let modifiers = self.underlying(analyzer)?.modifiers_as_base(); if let Some(modifier) = modifiers.get(mod_num) { @@ -91,17 +111,21 @@ impl FunctionNode { pub fn underlying_mut<'a>( &self, - analyzer: &'a mut (impl GraphLike + AnalyzerLike), + analyzer: &'a mut impl GraphBackend, ) -> Result<&'a mut Function, GraphError> { match analyzer.node_mut(*self) { Node::Function(func) => Ok(func), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Function but it was: {e:?}" ))), } } - pub fn name(&self, analyzer: &impl GraphLike) -> Result { + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { match self.underlying(analyzer)?.ty { FunctionTy::Constructor => Ok(format!( "constructor({})", @@ -122,9 +146,28 @@ impl FunctionNode { } } + pub fn prefix_only_name( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + match self.underlying(analyzer)?.ty { + FunctionTy::Function => Ok(Some( + self.underlying(analyzer)? + .name + .clone() + .expect("Unnamed function") + .name + .chars() + .take_while(|&ch| ch != '(') + .collect::(), + )), + _ => Ok(None), + } + } + pub fn loc_specified_name( &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, ) -> Result { if let Some(con) = self.maybe_associated_contract(analyzer) { Ok(format!("{}.{}", con.name(analyzer)?, self.name(analyzer)?)) @@ -133,7 +176,7 @@ impl FunctionNode { } } - pub fn body_ctx(&self, analyzer: &mut (impl GraphLike + AnalyzerLike)) -> ContextNode { + pub fn body_ctx(&self, analyzer: &mut impl AnalyzerBackend) -> ContextNode { if let Some(body_ctx) = self.underlying(analyzer).unwrap().cache.body_ctx { body_ctx } else { @@ -144,17 +187,16 @@ impl FunctionNode { .map(|edge| ContextNode::from(edge.source())) .take(1) .next() - .expect("No context for function"); + .unwrap_or_else(|| { + panic!("No context for function: {}", self.name(analyzer).unwrap()) + }); self.underlying_mut(analyzer).unwrap().cache.body_ctx = Some(body_ctx); body_ctx } } - pub fn maybe_body_ctx( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Option { + pub fn maybe_body_ctx(&self, analyzer: &mut impl AnalyzerBackend) -> Option { if let Some(body_ctx) = self.underlying(analyzer).unwrap().cache.body_ctx { Some(body_ctx) } else { @@ -175,7 +217,7 @@ impl FunctionNode { pub fn maybe_associated_contract( &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, ) -> Option { if let Some(maybe_contract) = self .underlying(analyzer) @@ -217,15 +259,15 @@ impl FunctionNode { pub fn maybe_associated_source_unit_part( &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Option { + analyzer: &mut impl AnalyzerBackend, + ) -> Option { if let Some(sup) = self .underlying(analyzer) .unwrap() .cache .associated_source_unit_part { - Some(sup) + Some(sup.into()) } else { let parent = analyzer .graph() @@ -247,53 +289,53 @@ impl FunctionNode { Node::Contract(_) => { ContractNode::from(parent).associated_source_unit_part(analyzer) } - Node::SourceUnitPart(..) => parent, + Node::SourceUnitPart(..) => parent.into(), _e => return None, }; self.underlying_mut(analyzer) .unwrap() .cache - .associated_source_unit_part = Some(sup); + .associated_source_unit_part = Some(sup.into()); Some(sup) } } - pub fn associated_source(&self, analyzer: &mut (impl GraphLike + AnalyzerLike)) -> NodeIdx { + pub fn associated_source(&self, analyzer: &mut impl AnalyzerBackend) -> SourceUnitNode { if let Some(src) = self.underlying(analyzer).unwrap().cache.associated_source { - src + src.into() } else { let sup = self .maybe_associated_source_unit_part(analyzer) .expect("No associated source unit part"); let src = analyzer - .search_for_ancestor(sup, &Edge::Part) + .search_for_ancestor(sup.into(), &Edge::Part) .expect("detached function"); self.underlying_mut(analyzer) .unwrap() .cache .associated_source = Some(src); - src + src.into() } } pub fn maybe_associated_source( &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Option { + analyzer: &mut impl AnalyzerBackend, + ) -> Option { if let Some(src) = self.underlying(analyzer).unwrap().cache.associated_source { - Some(src) + Some(src.into()) } else { let sup = self.maybe_associated_source_unit_part(analyzer)?; - let src = analyzer.search_for_ancestor(sup, &Edge::Part)?; + let src = analyzer.search_for_ancestor(sup.into(), &Edge::Part)?; self.underlying_mut(analyzer) .unwrap() .cache .associated_source = Some(src); - Some(src) + Some(src.into()) } } - pub fn params(&self, analyzer: &impl GraphLike) -> Vec { + pub fn params(&self, analyzer: &impl GraphBackend) -> Vec { if let Some(params) = &self.underlying(analyzer).unwrap().cache.params { params.to_vec() } else { @@ -313,9 +355,31 @@ impl FunctionNode { } } + pub fn ordered_param_names(&self, analyzer: &impl GraphBackend) -> Vec { + let param_nodes = self.params(analyzer); + param_nodes + .iter() + .map(|i| i.name(analyzer).unwrap()) + .collect() + } + + pub fn maybe_ordered_param_names(&self, analyzer: &impl GraphBackend) -> Option> { + let param_nodes = self.params(analyzer); + let names: Vec = param_nodes + .iter() + .filter_map(|i| i.maybe_name(analyzer).unwrap()) + .collect(); + if names.len() == param_nodes.len() { + Some(names) + } else { + None + } + } + pub fn set_params_and_ret( &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, ) -> Result<(), GraphError> { let underlying = self.underlying(analyzer)?.clone(); let mut params_strs = vec![]; @@ -325,7 +389,7 @@ impl FunctionNode { .enumerate() .filter_map(|(i, (_loc, input))| { if let Some(input) = input { - let param = FunctionParam::new(analyzer, input, i); + let param = FunctionParam::new(analyzer, arena, input, i); let input_node = analyzer.add_node(param); params_strs.push( FunctionParamNode::from(input_node) @@ -344,7 +408,7 @@ impl FunctionNode { .into_iter() .filter_map(|(_loc, output)| { if let Some(output) = output { - let ret = FunctionReturn::new(analyzer, output); + let ret = FunctionReturn::new(analyzer, arena, output); let output_node = analyzer.add_node(ret); analyzer.add_edge(output_node, *self, Edge::FunctionReturn); Some(output_node.into()) @@ -363,18 +427,52 @@ impl FunctionNode { Ok(()) } - pub fn returns<'a>( + // fn returns_inner( + // &self, + // analyzer: &impl GraphBackend, + // ) -> Vec { + // self.underlying(analyzer).unwrap().cache.returns.iter() + // // } else { + // // analyzer + // // .graph() + // // .edges_directed(self.0.into(), Direction::Incoming) + // // .filter(|edge| Edge::FunctionReturn == *edge.weight()) + // // .map(|edge| FunctionReturnNode::from(edge.source())) + // // .collect() + // // } + // } + + pub fn returns( &self, - analyzer: &'a impl GraphLike, - ) -> impl Iterator + 'a { - analyzer - .graph() - .edges_directed(self.0.into(), Direction::Incoming) - .filter(|edge| Edge::FunctionReturn == *edge.weight()) - .map(|edge| FunctionReturnNode::from(edge.source())) + arena: &mut RangeArena>, + analyzer: &mut impl AnalyzerBackend, + ) -> Vec { + if let Some(cached) = self.underlying(analyzer).unwrap().cache.returns.as_ref() { + cached.to_vec() + } else { + let underlying = self.underlying(analyzer).unwrap().clone(); + let rets = underlying + .returns + .into_iter() + .filter_map(|(_loc, output)| { + if let Some(output) = output { + let ret = FunctionReturn::new(analyzer, arena, output); + let output_node = analyzer.add_node(ret); + analyzer.add_edge(output_node, *self, Edge::FunctionReturn); + Some(output_node.into()) + } else { + None + } + }) + .collect::>(); + + let underlying_mut = self.underlying_mut(analyzer).unwrap(); + underlying_mut.cache.returns = Some(rets.clone()); + rets + } } - pub fn is_public_or_ext(&self, analyzer: &impl GraphLike) -> Result { + pub fn is_public_or_ext(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.attributes.iter().any(|attr| { matches!( attr, @@ -384,10 +482,26 @@ impl FunctionNode { })) } + pub fn is_pure(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self + .underlying(analyzer)? + .attributes + .iter() + .any(|attr| matches!(attr, FunctionAttribute::Mutability(Mutability::Pure(_))))) + } + + pub fn is_view(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self + .underlying(analyzer)? + .attributes + .iter() + .any(|attr| matches!(attr, FunctionAttribute::Mutability(Mutability::View(_))))) + } + pub fn get_overriding( &self, other: &Self, - analyzer: &impl GraphLike, + analyzer: &impl GraphBackend, ) -> Result { let self_attrs = &self.underlying(analyzer)?.attributes; let other_attrs = &other.underlying(analyzer)?.attributes; @@ -422,11 +536,15 @@ impl FunctionNode { } impl AsDotStr for FunctionNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { let inputs = self .params(analyzer) .iter() - .map(|param_node: &FunctionParamNode| param_node.as_dot_str(analyzer)) + .map(|param_node: &FunctionParamNode| param_node.as_dot_str(analyzer, arena)) .collect::>() .join(", "); @@ -446,11 +564,12 @@ impl AsDotStr for FunctionNode { .collect::>() .join(" "); format!( - "{} {}({}) {}", + "{} {}({}) {} -- gas: {}", self.underlying(analyzer).unwrap().ty, - self.name(analyzer).unwrap(), + self.name(analyzer).unwrap().split('(').collect::>()[0], inputs, - attrs + attrs, + self.underlying(analyzer).unwrap().estimated_gas ) } } @@ -479,6 +598,7 @@ pub struct Function { pub returns: ParameterList, pub modifiers_set: bool, pub cache: FunctionCache, + pub estimated_gas: u64, } #[derive(Debug, Clone, Eq, PartialEq, Default)] @@ -505,6 +625,7 @@ impl Default for Function { returns: vec![], modifiers_set: false, cache: Default::default(), + estimated_gas: 0, } } } @@ -519,6 +640,10 @@ impl Function { }) .collect() } + + pub fn add_gas_cost(&mut self, cost: u64) { + self.estimated_gas += cost; + } } impl From for Node { @@ -540,6 +665,7 @@ impl From for Function { returns: func.returns, modifiers_set: false, cache: Default::default(), + estimated_gas: 0, } } } @@ -662,6 +788,7 @@ impl From for Function { returns: vec![ret], modifiers_set: true, cache: Default::default(), + estimated_gas: 0, } } } @@ -670,16 +797,20 @@ impl From for Function { pub struct FunctionParamNode(pub usize); impl AsDotStr for FunctionParamNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { let var_ty = VarType::try_from_idx(analyzer, self.underlying(analyzer).unwrap().ty) .expect("Non-typeable as type"); format!( "{}{}{}", - var_ty.as_dot_str(analyzer), + var_ty.as_dot_str(analyzer, arena), if let Some(stor) = &self.underlying(analyzer).unwrap().storage { format!(" {stor} ") } else { - "".to_string() + " ".to_string() }, if let Some(name) = self.maybe_name(analyzer).unwrap() { name @@ -693,17 +824,21 @@ impl AsDotStr for FunctionParamNode { impl FunctionParamNode { pub fn underlying<'a>( &self, - analyzer: &'a impl GraphLike, + analyzer: &'a impl GraphBackend, ) -> Result<&'a FunctionParam, GraphError> { match analyzer.node(*self) { Node::FunctionParam(param) => Ok(param), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be FunctionParam but it was: {e:?}" ))), } } - pub fn name(&self, analyzer: &'_ impl GraphLike) -> Result { + pub fn name(&self, analyzer: &'_ impl GraphBackend) -> Result { Ok(self .underlying(analyzer)? .name @@ -712,7 +847,7 @@ impl FunctionParamNode { .name) } - pub fn maybe_name(&self, analyzer: &impl GraphLike) -> Result, GraphError> { + pub fn maybe_name(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { if let Some(ident) = self.underlying(analyzer)?.name.clone() { Ok(Some(ident.name)) } else { @@ -720,7 +855,7 @@ impl FunctionParamNode { } } - pub fn range(&self, analyzer: &impl GraphLike) -> Result, GraphError> { + pub fn range(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { let ty_node = self.underlying(analyzer)?.ty; if let Some(var_ty) = VarType::try_from_idx(analyzer, ty_node) { Ok(var_ty.range(analyzer)?) @@ -729,18 +864,18 @@ impl FunctionParamNode { } } - pub fn loc(&self, analyzer: &impl GraphLike) -> Result { + pub fn loc(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.loc) } - pub fn ty_str(&self, analyzer: &impl GraphLike) -> Result { + pub fn ty_str(&self, analyzer: &impl GraphBackend) -> Result { let var_ty = VarType::try_from_idx(analyzer, self.underlying(analyzer)?.ty).ok_or( GraphError::NodeConfusion("Non-typeable as type".to_string()), )?; - Ok(var_ty.as_dot_str(analyzer)) + var_ty.as_string(analyzer) } - pub fn ty(&self, analyzer: &impl GraphLike) -> Result { + pub fn ty(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.ty) } } @@ -774,15 +909,16 @@ impl From for Node { impl FunctionParam { pub fn new( - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, param: Parameter, order: usize, ) -> Self { FunctionParam { loc: param.loc, - ty: analyzer.parse_expr(¶m.ty, None), + ty: analyzer.parse_expr(arena, ¶m.ty, None), order, - storage: param.storage, + storage: param.storage.map(|s| s.into()), name: param.name, } } @@ -792,16 +928,20 @@ impl FunctionParam { pub struct FunctionReturnNode(pub usize); impl AsDotStr for FunctionReturnNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { let var_ty = VarType::try_from_idx(analyzer, self.underlying(analyzer).unwrap().ty) .expect("Non-typeable as type"); format!( "{}{}{}", - var_ty.as_dot_str(analyzer), + var_ty.as_dot_str(analyzer, arena), if let Some(stor) = &self.underlying(analyzer).unwrap().storage { format!(" {stor} ") } else { - "".to_string() + " ".to_string() }, if let Some(name) = self.maybe_name(analyzer).unwrap() { name @@ -815,17 +955,21 @@ impl AsDotStr for FunctionReturnNode { impl FunctionReturnNode { pub fn underlying<'a>( &self, - analyzer: &'a impl GraphLike, + analyzer: &'a impl GraphBackend, ) -> Result<&'a FunctionReturn, GraphError> { match analyzer.node(*self) { Node::FunctionReturn(ret) => Ok(ret), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be FunctionReturn but it was: {e:?}" ))), } } - pub fn maybe_name(&self, analyzer: &impl GraphLike) -> Result, GraphError> { + pub fn maybe_name(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { if let Some(ident) = self.underlying(analyzer)?.name.clone() { Ok(Some(ident.name)) } else { @@ -833,9 +977,13 @@ impl FunctionReturnNode { } } - pub fn loc(&self, analyzer: &impl GraphLike) -> Result { + pub fn loc(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.loc) } + + pub fn ty(&self, analyzer: &impl GraphBackend) -> Result { + Ok(self.underlying(analyzer)?.ty) + } } impl From for NodeIdx { @@ -866,13 +1014,14 @@ pub struct FunctionReturn { impl FunctionReturn { pub fn new( - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, param: Parameter, ) -> Self { FunctionReturn { loc: param.loc, - ty: analyzer.parse_expr(¶m.ty, None), - storage: param.storage, + ty: analyzer.parse_expr(arena, ¶m.ty, None), + storage: param.storage.map(|s| s.into()), name: param.name, } } diff --git a/crates/graph/src/nodes/mod.rs b/crates/graph/src/nodes/mod.rs new file mode 100644 index 00000000..819a05b2 --- /dev/null +++ b/crates/graph/src/nodes/mod.rs @@ -0,0 +1,41 @@ +mod contract_ty; +pub use contract_ty::*; + +mod enum_ty; +pub use enum_ty::*; + +mod struct_ty; +pub use struct_ty::*; + +mod func_ty; +pub use func_ty::*; + +mod err_ty; +pub use err_ty::*; + +mod var_ty; +pub use var_ty::*; + +mod ty_ty; +pub use ty_ty::*; + +mod concrete; +pub use concrete::*; + +mod msg; +pub use msg::*; + +mod block; +pub use block::*; + +mod builtin; +pub use builtin::*; + +mod context; +pub use context::*; + +mod source_unit_part; +pub use source_unit_part::*; + +mod source_unit; +pub use source_unit::*; diff --git a/shared/src/nodes/msg.rs b/crates/graph/src/nodes/msg.rs similarity index 89% rename from shared/src/nodes/msg.rs rename to crates/graph/src/nodes/msg.rs index b7f24095..c7f71544 100644 --- a/shared/src/nodes/msg.rs +++ b/crates/graph/src/nodes/msg.rs @@ -1,24 +1,25 @@ -use crate::analyzer::AsDotStr; -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::nodes::GraphError; -use crate::Builtin; -use crate::Concrete; -use crate::ContextNode; -use crate::ContextVar; +use crate::{ + nodes::{Builtin, Concrete, ContextNode, ContextVar}, + range::elem::Elem, + AnalyzerBackend, AsDotStr, GraphBackend, GraphError, Node, +}; -use crate::Node; -use crate::NodeIdx; -use ethers_core::types::Address; -use ethers_core::types::U256; +use shared::{NodeIdx, RangeArena}; + +use ethers_core::types::{Address, U256}; use solang_parser::pt::Loc; #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct MsgNode(pub usize); impl MsgNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Msg, GraphError> { + pub fn underlying<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a Msg, GraphError> { match analyzer.node(*self) { Node::Msg(st) => Ok(st), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Msg but it was: {e:?}" ))), @@ -27,7 +28,11 @@ impl MsgNode { } impl AsDotStr for MsgNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { format!("msg {{ {:?} }}", self.underlying(analyzer).unwrap()) } } @@ -61,7 +66,7 @@ impl Msg { elem: &str, loc: Loc, ctx: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, ) -> Result { let (node, name) = match elem { "data" => { @@ -181,7 +186,7 @@ impl Msg { }; let mut var = ContextVar::new_from_concrete(loc, ctx, node.into(), analyzer)?; - var.name = name.clone(); + var.name.clone_from(&name); var.display_name = name; var.is_tmp = false; var.is_symbolic = true; diff --git a/crates/graph/src/nodes/source_unit.rs b/crates/graph/src/nodes/source_unit.rs new file mode 100644 index 00000000..dce79147 --- /dev/null +++ b/crates/graph/src/nodes/source_unit.rs @@ -0,0 +1,146 @@ +use crate::{ + nodes::{Concrete, ContractNode, FunctionNode, SourceUnitPartNode, StructNode, VarNode}, + range::elem::Elem, + AsDotStr, GraphBackend, GraphError, Node, +}; + +use shared::{NodeIdx, RangeArena}; + +#[derive(Default, Clone, Debug, PartialOrd, PartialEq, Ord, Eq)] +pub struct SourceUnit { + pub file: usize, + pub parts: Vec, +} + +impl SourceUnit { + pub fn new(file: usize) -> Self { + Self { + file, + ..Default::default() + } + } +} + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct SourceUnitNode(pub usize); + +impl From for NodeIdx { + fn from(val: SourceUnitNode) -> Self { + val.0.into() + } +} + +impl From for SourceUnitNode { + fn from(idx: NodeIdx) -> Self { + SourceUnitNode(idx.index()) + } +} + +impl AsDotStr for SourceUnitNode { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { + let underlying = self.underlying(analyzer).unwrap(); + format!("SourceUnit({})", underlying.file) + } +} + +impl SourceUnitNode { + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a SourceUnit, GraphError> { + match analyzer.node(*self) { + Node::SourceUnit(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find source unit part: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be SourceUnit but it was: {e:?}" + ))), + } + } + + pub fn underlying_mut<'a>( + &self, + analyzer: &'a mut impl GraphBackend, + ) -> Result<&'a mut SourceUnit, GraphError> { + match analyzer.node_mut(*self) { + Node::SourceUnit(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find source unit: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be SourceUnit but it was: {e:?}" + ))), + } + } + + pub fn parts<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Vec, GraphError> { + Ok(&self.underlying(analyzer)?.parts) + } + + pub fn visible_funcs( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + let mut nodes = vec![]; + self.parts(analyzer)?.iter().try_for_each(|part| { + nodes.extend(part.visible_funcs(analyzer)?); + Ok(()) + })?; + Ok(nodes) + } + + pub fn visible_structs( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + let mut nodes = vec![]; + self.parts(analyzer)?.iter().try_for_each(|part| { + nodes.extend(part.visible_structs(analyzer)?); + Ok(()) + })?; + Ok(nodes) + } + + pub fn visible_constants( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + let mut nodes = vec![]; + self.parts(analyzer)?.iter().try_for_each(|part| { + nodes.extend(part.visible_constants(analyzer)?); + Ok(()) + })?; + Ok(nodes) + } + + pub fn visible_contracts( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + let mut nodes = vec![]; + self.parts(analyzer)?.iter().try_for_each(|part| { + nodes.extend(part.visible_contracts(analyzer)?); + Ok(()) + })?; + Ok(nodes) + } + + pub fn add_part( + &self, + part: SourceUnitPartNode, + analyzer: &mut impl GraphBackend, + ) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.parts.push(part); + Ok(()) + } +} diff --git a/crates/graph/src/nodes/source_unit_part.rs b/crates/graph/src/nodes/source_unit_part.rs new file mode 100644 index 00000000..34c2e183 --- /dev/null +++ b/crates/graph/src/nodes/source_unit_part.rs @@ -0,0 +1,151 @@ +use crate::{ + nodes::{Concrete, ContractNode, FunctionNode, StructNode, VarNode}, + range::elem::Elem, + AsDotStr, GraphBackend, GraphError, Node, +}; + +use shared::{NodeIdx, RangeArena}; + +#[derive(Default, Clone, Debug, PartialOrd, PartialEq, Ord, Eq)] +pub struct SourceUnitPart { + pub file: usize, + pub part: usize, + pub funcs: Vec, + pub structs: Vec, + pub constants: Vec, + pub contracts: Vec, +} + +impl SourceUnitPart { + pub fn new(file: usize, part: usize) -> Self { + Self { + file, + part, + ..Default::default() + } + } +} + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct SourceUnitPartNode(pub usize); + +impl From for NodeIdx { + fn from(val: SourceUnitPartNode) -> Self { + val.0.into() + } +} + +impl From for SourceUnitPartNode { + fn from(idx: NodeIdx) -> Self { + SourceUnitPartNode(idx.index()) + } +} + +impl AsDotStr for SourceUnitPartNode { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { + let underlying = self.underlying(analyzer).unwrap(); + format!("SourceUnitPart({}, {})", underlying.file, underlying.part) + } +} + +impl SourceUnitPartNode { + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a SourceUnitPart, GraphError> { + match analyzer.node(*self) { + Node::SourceUnitPart(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find source unit part: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be SourceUnitPart but it was: {e:?}" + ))), + } + } + + pub fn underlying_mut<'a>( + &self, + analyzer: &'a mut impl GraphBackend, + ) -> Result<&'a mut SourceUnitPart, GraphError> { + match analyzer.node_mut(*self) { + Node::SourceUnitPart(c) => Ok(c), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find source unit part: {}", + ident.name + ))), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be SourceUnitPart but it was: {e:?}" + ))), + } + } + + pub fn visible_funcs<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Vec, GraphError> { + Ok(&self.underlying(analyzer)?.funcs) + } + + pub fn visible_structs<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Vec, GraphError> { + Ok(&self.underlying(analyzer)?.structs) + } + + pub fn visible_constants<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Vec, GraphError> { + Ok(&self.underlying(analyzer)?.constants) + } + + pub fn visible_contracts<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Vec, GraphError> { + Ok(&self.underlying(analyzer)?.contracts) + } + + pub fn add_func( + &self, + func: FunctionNode, + analyzer: &mut impl GraphBackend, + ) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.funcs.push(func); + Ok(()) + } + + pub fn add_struct( + &self, + strukt: StructNode, + analyzer: &mut impl GraphBackend, + ) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.structs.push(strukt); + Ok(()) + } + + pub fn add_contract( + &self, + contract: ContractNode, + analyzer: &mut impl GraphBackend, + ) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.contracts.push(contract); + Ok(()) + } + + pub fn add_constant( + &self, + constant: VarNode, + analyzer: &mut impl GraphBackend, + ) -> Result<(), GraphError> { + self.underlying_mut(analyzer)?.constants.push(constant); + Ok(()) + } +} diff --git a/shared/src/nodes/struct_ty.rs b/crates/graph/src/nodes/struct_ty.rs similarity index 71% rename from shared/src/nodes/struct_ty.rs rename to crates/graph/src/nodes/struct_ty.rs index 2ede9a0f..18ba87e9 100644 --- a/shared/src/nodes/struct_ty.rs +++ b/crates/graph/src/nodes/struct_ty.rs @@ -1,11 +1,10 @@ -use crate::analyzer::AsDotStr; -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::nodes::GraphError; -use crate::Edge; - -use crate::Node; -use crate::NodeIdx; -use crate::VarType; +use crate::{ + nodes::Concrete, range::elem::Elem, AnalyzerBackend, AsDotStr, Edge, GraphBackend, GraphError, + Node, VarType, +}; + +use shared::{NodeIdx, RangeArena}; + use petgraph::{visit::EdgeRef, Direction}; use solang_parser::pt::{Expression, Identifier, Loc, StructDefinition, VariableDeclaration}; @@ -13,20 +12,27 @@ use solang_parser::pt::{Expression, Identifier, Loc, StructDefinition, VariableD pub struct StructNode(pub usize); impl StructNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Struct, GraphError> { + pub fn underlying<'a>( + &self, + analyzer: &'a impl GraphBackend, + ) -> Result<&'a Struct, GraphError> { match analyzer.node(*self) { Node::Struct(st) => Ok(st), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Struct but it was: {e:?}" ))), } } - pub fn loc(&self, analyzer: &impl GraphLike) -> Result { + pub fn loc(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.loc) } - pub fn name(&self, analyzer: &impl GraphLike) -> Result { + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { Ok(self .underlying(analyzer)? .name @@ -35,7 +41,7 @@ impl StructNode { .to_string()) } - pub fn fields(&self, analyzer: &impl GraphLike) -> Vec { + pub fn fields(&self, analyzer: &impl GraphBackend) -> Vec { let mut fields: Vec<_> = analyzer .graph() .edges_directed(self.0.into(), Direction::Incoming) @@ -46,7 +52,11 @@ impl StructNode { fields } - pub fn find_field(&self, analyzer: &impl GraphLike, ident: &Identifier) -> Option { + pub fn find_field( + &self, + analyzer: &impl GraphBackend, + ident: &Identifier, + ) -> Option { analyzer .graph() .edges_directed(self.0.into(), Direction::Incoming) @@ -57,7 +67,11 @@ impl StructNode { } impl AsDotStr for StructNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { let underlying = self.underlying(analyzer).unwrap(); format!( "struct {} {{ {} }}", @@ -68,7 +82,7 @@ impl AsDotStr for StructNode { }, self.fields(analyzer) .iter() - .map(|field_node| { field_node.as_dot_str(analyzer) }) + .map(|field_node| { field_node.as_dot_str(analyzer, arena) }) .collect::>() .join("; ") ) @@ -121,16 +135,20 @@ impl From for Struct { pub struct FieldNode(pub usize); impl FieldNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Field, GraphError> { + pub fn underlying<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a Field, GraphError> { match analyzer.node(*self) { Node::Field(field) => Ok(field), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Field but it was: {e:?}" ))), } } - pub fn name(&self, analyzer: &impl GraphLike) -> Result { + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { Ok(self .underlying(analyzer)? .name @@ -141,12 +159,16 @@ impl FieldNode { } impl AsDotStr for FieldNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { let underlying = self.underlying(analyzer).unwrap(); format!( "{} {}", if let Some(var_ty) = VarType::try_from_idx(analyzer, underlying.ty) { - var_ty.as_dot_str(analyzer) + var_ty.as_dot_str(analyzer, arena) } else { "".to_string() }, @@ -186,10 +208,11 @@ impl From for Node { impl Field { pub fn new( - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, var_def: VariableDeclaration, ) -> Field { - let ty_idx = analyzer.parse_expr(&var_def.ty, None); + let ty_idx = analyzer.parse_expr(arena, &var_def.ty, None); Field { loc: var_def.loc, ty: ty_idx, diff --git a/shared/src/nodes/ty_ty.rs b/crates/graph/src/nodes/ty_ty.rs similarity index 61% rename from shared/src/nodes/ty_ty.rs rename to crates/graph/src/nodes/ty_ty.rs index 83b76527..b3d4e42d 100644 --- a/shared/src/nodes/ty_ty.rs +++ b/crates/graph/src/nodes/ty_ty.rs @@ -1,24 +1,29 @@ -use crate::analyzer::AsDotStr; -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::nodes::GraphError; -use crate::Node; -use crate::NodeIdx; -use crate::VarType; +use crate::{ + nodes::Concrete, range::elem::Elem, AnalyzerBackend, AsDotStr, GraphBackend, GraphError, Node, + VarType, +}; + +use shared::{NodeIdx, RangeArena}; + use solang_parser::pt::{Expression, Identifier, Loc, TypeDefinition}; #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct TyNode(pub usize); impl TyNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Ty, GraphError> { + pub fn underlying<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a Ty, GraphError> { match analyzer.node(*self) { Node::Ty(ty) => Ok(ty), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be TypeNode but it was: {e:?}" ))), } } - pub fn name(&self, analyzer: &impl GraphLike) -> Result { + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { Ok(self.underlying(analyzer)?.name.to_string()) } } @@ -36,12 +41,16 @@ impl From for TyNode { } impl AsDotStr for TyNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { let underlying = self.underlying(analyzer).unwrap(); format!( "{} {}", if let Some(var_ty) = VarType::try_from_idx(analyzer, underlying.ty) { - var_ty.as_dot_str(analyzer) + var_ty.as_dot_str(analyzer, arena) } else { "".to_string() }, @@ -65,12 +74,13 @@ impl From for Node { impl Ty { pub fn new( - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, ty: TypeDefinition, ) -> Ty { Ty { loc: ty.loc, - ty: analyzer.parse_expr(&ty.ty, None), + ty: analyzer.parse_expr(arena, &ty.ty, None), name: ty.name, } } diff --git a/shared/src/nodes/var_ty.rs b/crates/graph/src/nodes/var_ty.rs similarity index 72% rename from shared/src/nodes/var_ty.rs rename to crates/graph/src/nodes/var_ty.rs index c15d1285..802315cc 100644 --- a/shared/src/nodes/var_ty.rs +++ b/crates/graph/src/nodes/var_ty.rs @@ -1,13 +1,13 @@ -use crate::analyzer::Search; -use crate::nodes::GraphError; - -use crate::ContractNode; -use crate::VarType; use crate::{ - analyzer::{AnalyzerLike, AsDotStr, GraphLike}, - Node, NodeIdx, + nodes::{ + Concrete, ContextVar, ContextVarNode, ContractNode, SourceUnitNode, SourceUnitPartNode, + }, + range::elem::Elem, + AnalyzerBackend, AsDotStr, ContextEdge, Edge, GraphBackend, GraphError, Node, VarType, }; -use crate::{ContextVar, Edge}; + +use shared::{NodeIdx, RangeArena, Search}; + use petgraph::{visit::EdgeRef, Direction}; use solang_parser::pt::{ Expression, Identifier, Loc, VariableAttribute, VariableDefinition, Visibility, @@ -17,9 +17,13 @@ use solang_parser::pt::{ pub struct VarNode(pub usize); impl VarNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Var, GraphError> { + pub fn underlying<'a>(&self, analyzer: &'a impl GraphBackend) -> Result<&'a Var, GraphError> { match analyzer.node(*self) { Node::Var(func) => Ok(func), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Var but it was: {e:?}" ))), @@ -28,10 +32,14 @@ impl VarNode { pub fn underlying_mut<'a>( &self, - analyzer: &'a mut impl GraphLike, + analyzer: &'a mut impl GraphBackend, ) -> Result<&'a mut Var, GraphError> { match analyzer.node_mut(*self) { Node::Var(func) => Ok(func), + Node::Unresolved(ident) => Err(GraphError::UnknownVariable(format!( + "Could not find variable: {}", + ident.name + ))), e => Err(GraphError::NodeConfusion(format!( "Node type confusion: expected node to be Var but it was: {e:?}" ))), @@ -40,12 +48,13 @@ impl VarNode { pub fn parse_initializer( &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, parent: NodeIdx, ) -> Result<(), GraphError> { if let Some(expr) = self.underlying(analyzer)?.initializer_expr.clone() { tracing::trace!("Parsing variable initializer"); - let init = analyzer.parse_expr(&expr, Some(parent)); + let init = analyzer.parse_expr(arena, &expr, Some(parent)); let underlying = self.underlying(analyzer)?.clone(); let mut set = false; if let Some(ty) = VarType::try_from_idx(analyzer, underlying.ty) { @@ -64,7 +73,7 @@ impl VarNode { Ok(()) } - pub fn maybe_associated_contract(&self, analyzer: &impl GraphLike) -> Option { + pub fn maybe_associated_contract(&self, analyzer: &impl GraphBackend) -> Option { analyzer .graph() .edges_directed(self.0.into(), Direction::Outgoing) @@ -81,7 +90,10 @@ impl VarNode { .map(ContractNode::from) } - pub fn maybe_associated_source_unit_part(&self, analyzer: &impl GraphLike) -> Option { + pub fn maybe_associated_source_unit_part( + &self, + analyzer: &impl GraphBackend, + ) -> Option { if let Some(con) = self.maybe_associated_contract(analyzer) { Some(con.associated_source_unit_part(analyzer)) } else { @@ -92,7 +104,7 @@ impl VarNode { .filter_map(|edge| { let node = edge.target(); match analyzer.node(node) { - Node::SourceUnitPart(..) => Some(node), + Node::SourceUnitPart(..) => Some(node.into()), _ => None, } }) @@ -101,12 +113,17 @@ impl VarNode { } } - pub fn maybe_associated_source(&self, analyzer: &(impl GraphLike + Search)) -> Option { + pub fn maybe_associated_source( + &self, + analyzer: &(impl GraphBackend + Search), + ) -> Option { let sup = self.maybe_associated_source_unit_part(analyzer)?; - analyzer.search_for_ancestor(sup, &Edge::Part) + analyzer + .search_for_ancestor(sup.into(), &Edge::Part) + .map(Into::into) } - pub fn name(&self, analyzer: &impl GraphLike) -> Result { + pub fn name(&self, analyzer: &impl GraphBackend) -> Result { Ok(self .underlying(analyzer)? .name @@ -118,7 +135,7 @@ impl VarNode { pub fn const_value( &self, loc: Loc, - analyzer: &impl GraphLike, + analyzer: &impl GraphBackend, ) -> Result, GraphError> { let attrs = &self.underlying(analyzer)?.attrs; if attrs @@ -133,6 +150,7 @@ impl VarNode { display_name: self.name(analyzer)?, storage: None, is_tmp: false, + dep_on: None, tmp_of: None, is_symbolic: true, is_return: false, @@ -143,15 +161,33 @@ impl VarNode { } Ok(None) } + + pub fn inherited_into(&self, analyzer: &impl GraphBackend) -> Vec { + analyzer + .graph() + .edges_directed(self.0.into(), Direction::Incoming) + .filter(|edge| { + matches!( + *edge.weight(), + Edge::Context(ContextEdge::InheritedStorageVariable) + ) + }) + .map(|edge| ContextVarNode::from(edge.source())) + .collect() + } } impl AsDotStr for VarNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { let underlying = self.underlying(analyzer).unwrap(); format!( "{}{} {}", if let Some(var_ty) = VarType::try_from_idx(analyzer, underlying.ty) { - var_ty.as_dot_str(analyzer) + var_ty.as_dot_str(analyzer, arena) } else { "".to_string() }, @@ -208,12 +244,13 @@ impl From for Node { impl Var { pub fn new( - analyzer: &mut (impl GraphLike + AnalyzerLike), + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, var: VariableDefinition, in_contract: bool, ) -> Var { tracing::trace!("Parsing Var type"); - let ty = analyzer.parse_expr(&var.ty, None); + let ty = analyzer.parse_expr(arena, &var.ty, None); Var { loc: var.loc, ty, diff --git a/crates/graph/src/range/elem/concrete.rs b/crates/graph/src/range/elem/concrete.rs new file mode 100644 index 00000000..8ee3f789 --- /dev/null +++ b/crates/graph/src/range/elem/concrete.rs @@ -0,0 +1,284 @@ +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::elem::{Elem, RangeArenaLike, RangeElem}, + GraphBackend, GraphError, +}; + +use shared::{NodeIdx, RangeArena}; + +use std::hash::{Hash, Hasher}; + +use ethers_core::types::{I256, U256}; +use solang_parser::pt::Loc; + +/// A concrete value for a range element +#[derive(Default, Clone, Debug, Ord, PartialOrd)] +pub struct RangeConcrete { + /// The value of the concrete + pub val: T, + /// The source code location + pub loc: Loc, +} + +pub fn rc_uint_sized(n: u128) -> RangeConcrete { + let size: u16 = ((32 - ((n.leading_zeros() + 128) / 8)) * 8).max(8) as u16; + RangeConcrete::new(Concrete::Uint(size, U256::from(n)), Loc::Implicit) +} + +pub fn rc_uint256(n: u128) -> RangeConcrete { + RangeConcrete::new(Concrete::Uint(256, U256::from(n)), Loc::Implicit) +} + +pub fn rc_int_sized(n: i128) -> RangeConcrete { + let size: u16 = ((32 - ((n.abs().leading_zeros() + 128) / 8)) * 8).max(8) as u16; + RangeConcrete::new(Concrete::Int(size, I256::from(n)), Loc::Implicit) +} + +pub fn rc_i256_sized(n: I256) -> RangeConcrete { + let size: u16 = ((32 - ((n.abs().leading_zeros()) / 8)) * 8).max(8) as u16; + RangeConcrete::new(Concrete::Int(size, n), Loc::Implicit) +} + +pub fn rc_int256(n: i128) -> RangeConcrete { + RangeConcrete::new(Concrete::Int(256, I256::from(n)), Loc::Implicit) +} + +impl RangeConcrete { + pub fn new(val: T, loc: Loc) -> Self { + Self { val, loc } + } +} + +impl PartialEq for RangeConcrete { + fn eq(&self, other: &Self) -> bool { + self.val == other.val + } +} +impl Eq for RangeConcrete {} + +impl Hash for RangeConcrete { + fn hash(&self, state: &mut H) { + self.val.hash(state); + } +} + +impl From for RangeConcrete { + fn from(c: Concrete) -> Self { + Self { + val: c, + loc: Loc::Implicit, + } + } +} + +impl RangeConcrete { + pub fn as_bytes( + &self, + _analyzer: &impl GraphBackend, + _maximize: bool, + _arena: &mut RangeArena>, + ) -> Option> { + Some(self.val.as_bytes()) + } +} + +impl RangeElem for RangeConcrete { + type GraphError = GraphError; + fn arenaize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + let _ = arena.idx_or_upsert(Elem::Concrete(self.clone()), analyzer); + Ok(()) + } + + fn has_cycle( + &self, + _seen: &mut Vec, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result { + Ok(false) + } + + fn depends_on( + &self, + _var: ContextVarNode, + _seen: &mut Vec, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result { + Ok(false) + } + + fn flatten( + &self, + _maximize: bool, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result, GraphError> { + Ok(Elem::Concrete(self.clone())) + } + + fn is_flatten_cached( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> bool { + true + } + + fn is_min_max_cached( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> (bool, bool) { + (true, true) + } + + fn cache_flatten( + &mut self, + _: &mut impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + Ok(()) + } + + fn range_eq(&self, other: &Self, arena: &mut RangeArena>) -> bool { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(self_val), Some(other_val)) => self_val == other_val, + _ => match (&self.val, &other.val) { + (Concrete::Int(_, s), Concrete::Int(_, o)) => s == o, + (Concrete::DynBytes(s), Concrete::DynBytes(o)) => s == o, + (Concrete::String(s), Concrete::String(o)) => s == o, + (Concrete::DynBytes(s), Concrete::String(o)) => s == o.as_bytes(), + (Concrete::String(s), Concrete::DynBytes(o)) => s.as_bytes() == o, + (Concrete::Array(a), Concrete::Array(b)) => { + if a.len() == b.len() { + a.iter().zip(b.iter()).all(|(a, b)| { + let a = RangeConcrete::new(a.clone(), self.loc); + + let b = RangeConcrete::new(b.clone(), other.loc); + + a.range_eq(&b, arena) + }) + } else { + false + } + } + _ => false, + }, + } + } + + fn range_ord( + &self, + other: &Self, + _arena: &mut RangeArena>, + ) -> Option { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(self_val), Some(other_val)) => Some(self_val.cmp(&other_val)), + (Some(_), _) => { + match other.val { + Concrete::Int(_, _) => { + // if we couldnt convert an int to uint, its negative + // so self must be > other + Some(std::cmp::Ordering::Greater) + } + _ => None, + } + } + (_, Some(_)) => { + match self.val { + Concrete::Int(_, _) => { + // if we couldnt convert an int to uint, its negative + // so self must be < other + Some(std::cmp::Ordering::Less) + } + _ => None, + } + } + _ => { + match (&self.val, &other.val) { + // two negatives + (Concrete::Int(_, s), Concrete::Int(_, o)) => Some(s.cmp(o)), + (Concrete::DynBytes(b0), Concrete::DynBytes(b1)) => Some(b0.cmp(b1)), + _ => None, + } + } + } + } + + fn dependent_on( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Vec { + vec![] + } + + fn filter_recursion( + &mut self, + _: NodeIdx, + _: NodeIdx, + _analyzer: &mut impl GraphBackend, + _arena: &mut RangeArena>, + ) { + } + + fn maximize( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result, GraphError> { + Ok(Elem::Concrete(self.clone())) + } + fn minimize( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result, GraphError> { + Ok(Elem::Concrete(self.clone())) + } + + fn simplify_maximize( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result, GraphError> { + Ok(Elem::Concrete(self.clone())) + } + fn simplify_minimize( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result, GraphError> { + Ok(Elem::Concrete(self.clone())) + } + + fn cache_maximize( + &mut self, + _g: &mut impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + Ok(()) + } + + fn cache_minimize( + &mut self, + _g: &mut impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + Ok(()) + } + fn uncache(&mut self) {} + + fn recursive_dependent_on( + &self, + _: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result, GraphError> { + Ok(vec![]) + } +} diff --git a/crates/graph/src/range/elem/elem_enum/arena.rs b/crates/graph/src/range/elem/elem_enum/arena.rs new file mode 100644 index 00000000..49582f96 --- /dev/null +++ b/crates/graph/src/range/elem/elem_enum/arena.rs @@ -0,0 +1,216 @@ +use crate::GraphBackend; +use crate::{ + nodes::Concrete, + range::elem::{Elem, RangeElem}, +}; +use shared::RangeArena; + +pub trait RangeArenaLike { + fn debug_str(&self, analyzer: &impl GraphBackend) -> String; + fn ranges(&self) -> &Vec; + fn ranges_mut(&mut self) -> &mut Vec; + fn idx_or_upsert(&mut self, elem: T, analyzer: &impl GraphBackend) -> usize; + fn take_nonnull(&mut self, idx: usize) -> Option; + fn idx(&self, elem: &T) -> Option; + fn to_graph( + &mut self, + analyzer: &impl GraphBackend, + ) -> Result, usize, petgraph::Directed, usize>, crate::GraphError>; +} + +impl RangeArenaLike> for RangeArena> { + fn debug_str(&self, analyzer: &impl GraphBackend) -> String { + self.ranges + .iter() + .enumerate() + .map(|(i, elem)| { + fn fmt(elem: &Elem, analyzer: &impl GraphBackend) -> String { + match elem { + Elem::Reference(reference) => { + format!( + "node_{} -- {}", + reference.idx.index(), + crate::nodes::ContextVarNode::from(reference.idx) + .display_name(analyzer) + .unwrap() + ) + } + Elem::Expr(expr) => { + format!( + "{} {} {}", + fmt(&expr.lhs, analyzer), + expr.op.to_string(), + fmt(&expr.rhs, analyzer) + ) + } + _ => format!("{elem}"), + } + }; + + format!("{i}: {}", fmt(elem, analyzer)) + }) + .collect::>() + .join("\n\t") + } + + fn to_graph( + &mut self, + analyzer: &impl GraphBackend, + ) -> Result, usize, petgraph::Directed, usize>, crate::GraphError> + { + let mut graph = petgraph::Graph::default(); + let mut added = vec![]; + let mut ids = vec![]; + + fn get_children( + elem: &Elem, + analyzer: &impl GraphBackend, + ) -> Result>, crate::GraphError> { + match elem { + Elem::Reference(r) => { + let cvar = crate::nodes::ContextVarNode::from(r.idx); + let range = cvar.ref_range(analyzer)?.unwrap(); + let min = range.min.clone(); + let max = range.max.clone(); + Ok(vec![min, max]) + } + _c @ Elem::Concrete(_) => Ok(vec![]), + Elem::ConcreteDyn(d) => { + let mut v = vec![(*d.len).clone()]; + v.extend(d.val.values().map(|(v, _)| v.clone()).collect::>()); + v.extend(d.val.keys().cloned().collect::>()); + Ok(v) + } + Elem::Expr(expr) => Ok(vec![(*expr.lhs).clone(), (*expr.rhs).clone()]), + Elem::Null => Ok(vec![]), + Elem::Arena(_) => Ok(vec![]), + } + } + + fn add_elem_and_children( + graph: &mut petgraph::Graph, usize, petgraph::Directed, usize>, + added: &mut Vec>, + ids: &mut Vec, + elem: &Elem, + analyzer: &impl GraphBackend, + ) -> Result<(), crate::GraphError> { + assert!(added.len() == ids.len()); + + if !added.contains(elem) { + let new_elems: Vec> = get_children(elem, analyzer)?; + let id = graph.add_node(elem.clone()); + added.push(elem.clone()); + ids.push(id.index()); + + new_elems.into_iter().try_for_each(|elem| { + add_elem_and_children(graph, added, ids, &elem, analyzer)?; + let to_id = added.iter().position(|i| i == &elem).unwrap(); + graph.add_edge(id, to_id.into(), 0); + Ok(()) + })?; + } + + Ok(()) + } + + self.ranges.iter().try_for_each(|elem: &Elem| { + add_elem_and_children(&mut graph, &mut added, &mut ids, elem, analyzer) + })?; + Ok(graph) + } + + fn idx_or_upsert(&mut self, elem: Elem, analyzer: &impl GraphBackend) -> usize { + if self.ranges.is_empty() { + self.ranges.push(Elem::Null); + self.map.insert(Elem::Null, 0); + } + + let nulls = self.ranges.iter().fold(0, |mut acc, e| { + if matches!(e, Elem::Null) { + acc += 1; + } + acc + }); + + // println!( + // "{}\nhad cycle:\n{:?}", + // self.debug_str(analyzer), + // petgraph::dot::Dot::new(&self.to_graph(analyzer).unwrap()) // petgraph::algo::toposort(&self.to_graph(analyzer).unwrap(), None).is_err() + // ); + match elem { + Elem::Arena(idx) => return idx, + Elem::Null => return 0, + _ => {} + } + + if let Some(idx) = self.idx(&elem) { + let Some(existing) = self.take_nonnull(idx) else { + self.ranges_mut()[idx] = elem; + return idx; + }; + + let (min_cached, max_cached) = existing.is_min_max_cached(analyzer, self); + let mut existing_count = 0; + if min_cached { + existing_count += 1; + } + if max_cached { + existing_count += 1; + } + if existing.is_flatten_cached(analyzer, self) { + existing_count += 1; + } + + let (min_cached, max_cached) = elem.is_min_max_cached(analyzer, self); + let mut new_count = 0; + if min_cached { + new_count += 1; + } + if max_cached { + new_count += 1; + } + if elem.is_flatten_cached(analyzer, self) { + new_count += 1; + } + + if new_count >= existing_count { + self.ranges_mut()[idx] = elem; + } else { + self.ranges_mut()[idx] = existing; + } + + idx + } else { + let idx = self.ranges.len(); + self.ranges.push(elem.clone()); + self.map.insert(elem, idx); + idx + } + } + + fn ranges(&self) -> &Vec> { + &self.ranges + } + fn ranges_mut(&mut self) -> &mut Vec> { + &mut self.ranges + } + + fn take_nonnull(&mut self, idx: usize) -> Option> { + if let Some(t) = self.ranges.get_mut(idx) { + match t { + Elem::Null => None, + _ => Some(std::mem::take(t)), + } + } else { + None + } + } + + fn idx(&self, elem: &Elem) -> Option { + if let Elem::Arena(idx) = elem { + Some(*idx) + } else { + self.map.get(elem).copied() + } + } +} diff --git a/crates/graph/src/range/elem/elem_enum/impls.rs b/crates/graph/src/range/elem/elem_enum/impls.rs new file mode 100644 index 00000000..f9aa87d8 --- /dev/null +++ b/crates/graph/src/range/elem/elem_enum/impls.rs @@ -0,0 +1,686 @@ +use crate::elem::{MinMaxed, RangeArenaLike}; +use crate::{ + nodes::Concrete, + range::elem::{Elem, RangeConcrete, RangeDyn, RangeElem, RangeExpr, RangeOp, Reference}, + GraphBackend, GraphError, +}; +use shared::{NodeIdx, RangeArena}; + +use ethers_core::types::I256; + +use std::collections::BTreeMap; + +impl Elem { + pub fn wrapping_add(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Add(true), other); + Self::Expr(expr) + } + pub fn wrapping_sub(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Sub(true), other); + Self::Expr(expr) + } + pub fn wrapping_mul(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Mul(true), other); + Self::Expr(expr) + } + pub fn wrapping_div(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Div(true), other); + Self::Expr(expr) + } + + /// Creates a logical AND of two range elements + pub fn and(self, other: Self) -> Self { + let expr = RangeExpr::::new(self, RangeOp::And, other); + Self::Expr(expr) + } + + /// Creates a logical OR of two range elements + pub fn or(self, other: Self) -> Self { + let expr = RangeExpr::::new(self, RangeOp::Or, other); + Self::Expr(expr) + } + + pub fn maybe_elem_min(&self) -> Option { + match self { + Elem::Concrete(RangeConcrete { val, .. }) => { + Some(Elem::from(Concrete::min_of_type(val)?)) + } + _ => None, + } + } + + pub fn maybe_elem_max(&self) -> Option { + match self { + Elem::Concrete(RangeConcrete { val, .. }) => { + Some(Elem::from(Concrete::max_of_type(val)?)) + } + _ => None, + } + } +} + +impl Elem { + pub fn node_idx(&self) -> Option { + match self { + Self::Reference(Reference { idx, .. }) => Some(*idx), + _ => None, + } + } + + pub fn concrete(&self) -> Option { + match self { + Self::Concrete(RangeConcrete { val: c, .. }) => Some(c.clone()), + _ => None, + } + } + + pub fn maybe_concrete(&self) -> Option> { + match self { + Elem::Concrete(a) => Some(a.clone()), + _ => None, + } + } + + pub fn maybe_concrete_value(&self) -> Option> { + match self { + Elem::Concrete(a) => Some(a.clone()), + _ => None, + } + } + + pub fn maybe_range_dyn(&self) -> Option> { + match self { + Elem::ConcreteDyn(a) => Some(a.clone()), + _ => None, + } + } + + pub fn is_conc(&self) -> bool { + match self { + Elem::Concrete(_a) => true, + Elem::ConcreteDyn(a) => { + a.len.maybe_concrete().is_some() + && a.val + .iter() + .all(|(key, (val, _))| key.is_conc() && val.is_conc()) + } + Elem::Expr(expr) => expr.lhs.is_conc() && expr.rhs.is_conc(), + _ => false, + } + } +} + +impl Elem { + pub fn assert_nonnull(&self) { + match self { + Elem::Expr(expr) => { + expr.lhs.assert_nonnull(); + expr.rhs.assert_nonnull(); + } + Elem::Null => panic!("was null"), + _ => {} + } + } + + pub fn contains_node(&self, node_idx: NodeIdx) -> bool { + match self { + Self::Reference(d) => d.idx == node_idx, + Self::Concrete(_) => false, + Self::Expr(expr) => expr.contains_node(node_idx), + Self::ConcreteDyn(d) => d.contains_node(node_idx), + Self::Null => false, + Elem::Arena(_) => todo!(), + } + } + + pub fn expect_into_expr(self) -> RangeExpr { + match self { + Self::Expr(expr) => expr, + _ => panic!("Not expression"), + } + } + + pub fn dyn_map(&self) -> Option<&BTreeMap> { + match self { + Self::ConcreteDyn(dyn_range) => Some(&dyn_range.val), + _ => None, + } + } + + pub fn dyn_map_mut(&mut self) -> Option<&mut BTreeMap> { + match self { + Self::ConcreteDyn(ref mut dyn_range) => Some(&mut dyn_range.val), + _ => None, + } + } + + /// Creates a new range element that is a cast from one type to another + pub fn cast(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Cast, other); + Elem::Expr(expr) + } + + pub fn concat(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Concat, other); + Elem::Expr(expr) + } + + /// Creates a new range element that is the minimum of two range elements + pub fn min(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Min, other); + Elem::Expr(expr) + } + + /// Creates a new range element that is the maximum of two range elements + pub fn max(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Max, other); + Elem::Expr(expr) + } + + /// Creates a new range element that is a boolean of equality of two range elements + pub fn eq(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Eq, other); + Elem::Expr(expr) + } + + /// Creates a new range element that is a boolean of inequality of two range elements + pub fn neq(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Neq, other); + Elem::Expr(expr) + } + + /// Creates a new range element that is one range element to the power of another + pub fn pow(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Exp, other); + Elem::Expr(expr) + } + + /// Creates a new range element that is a memcopy of another + pub fn memcopy(self) -> Self { + let expr = RangeExpr::new(self, RangeOp::Memcopy, Elem::Null); + Elem::Expr(expr) + } + + /// Creates a new range element that applies a setting of indices of a memory object + pub fn set_indices(self, other: RangeDyn) -> Self { + let expr = RangeExpr::new(self, RangeOp::SetIndices, Elem::ConcreteDyn(other)); + Elem::Expr(expr) + } + + /// Creates a new range element that sets the length of a memory object + pub fn set_length(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::SetLength, other); + Elem::Expr(expr) + } + + /// Gets the length of a memory object + pub fn get_length(self) -> Self { + let expr = RangeExpr::new(self, RangeOp::GetLength, Elem::Null); + Elem::Expr(expr) + } + + /// Gets the length of a memory object + pub fn get_index(self, other: Self) -> Self { + let expr = RangeExpr::new(self, RangeOp::GetIndex, other); + Elem::Expr(expr) + } +} + +impl Elem { + pub fn replace_dep( + &mut self, + to_replace: NodeIdx, + replacement: Self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena, + ) { + match self { + Elem::Reference(Reference { idx, .. }) => { + if *idx == to_replace { + *self = replacement; + } + } + Elem::Concrete(_) => {} + Elem::Expr(expr) => { + expr.lhs + .replace_dep(to_replace, replacement.clone(), analyzer, arena); + expr.rhs + .replace_dep(to_replace, replacement, analyzer, arena); + expr.maximized = None; + expr.minimized = None; + } + Elem::ConcreteDyn(d) => { + d.len + .replace_dep(to_replace, replacement.clone(), analyzer, arena); + let vals = std::mem::take(&mut d.val); + d.val = vals + .into_iter() + .map(|(mut k, (mut v, op))| { + k.replace_dep(to_replace, replacement.clone(), analyzer, arena); + v.replace_dep(to_replace, replacement.clone(), analyzer, arena); + (k, (v, op)) + }) + .collect(); + } + Elem::Null => {} + Elem::Arena(_) => { + let mut cloned = self.dearenaize_clone(arena); + cloned.replace_dep(to_replace, replacement, analyzer, arena); + cloned.arenaize(analyzer, arena).unwrap(); + *self = cloned; + } + } + } + + pub fn recurse_dearenaize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> Self { + match self { + Self::Arena(arena_idx) => arena + .ranges + .get(*arena_idx) + .unwrap() + .clone() + .recurse_dearenaize(analyzer, arena), + Self::Expr(expr) => expr.recurse_dearenaize(analyzer, arena), + e => e.clone(), + } + } + + pub fn dearenaize_clone(&self, arena: &mut RangeArena) -> Self { + match self { + Self::Arena(arena_idx) => arena.ranges.get(*arena_idx).cloned().unwrap_or_default(), + _ => unreachable!(), + } + } + + pub fn dearenaize(&self, arena: &mut RangeArena) -> (Self, usize) { + match self { + Self::Arena(arena_idx) => { + ( + arena.take_nonnull(*arena_idx).unwrap_or_default(), + // arena.ranges.get(*arena_idx).cloned().unwrap_or_default(), + *arena_idx, + ) + } + _ => unreachable!(), + } + } + + pub fn rearenaize(&self, elem: Self, idx: usize, arena: &mut RangeArena) { + if !matches!(elem, Elem::Null) { + if let Some(t) = arena.ranges.get_mut(idx) { + *t = elem; + } + } + } + + pub fn arena_eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Arena(a), Self::Arena(b)) => a == b, + (Self::Concrete(a), Self::Concrete(b)) => a == b, + (Self::ConcreteDyn(a), Self::ConcreteDyn(b)) => { + a.len == b.len + && a.val.len() == b.val.len() + && a.val + .iter() + .zip(b.val.iter()) + .all(|((a, op_a), (b, op_b))| a.arena_eq(b) && op_a == op_b) + } + (Self::Reference(a), Self::Reference(b)) => a == b, + (Self::Expr(a), Self::Expr(b)) => { + a.lhs.arena_eq(&b.lhs) && a.rhs.arena_eq(&b.rhs) && a.op == b.op + } + (Elem::Null, Elem::Null) => true, + _ => false, + } + } + pub fn as_bytes( + &self, + analyzer: &impl GraphBackend, + maximize: bool, + arena: &mut RangeArena>, + ) -> Option> { + let evaled = if maximize { + self.maximize(analyzer, arena).ok()? + } else { + self.minimize(analyzer, arena).ok()? + }; + + match evaled { + Elem::Concrete(c) => c.as_bytes(analyzer, maximize, arena), + Elem::ConcreteDyn(c) => c.as_bytes(analyzer, maximize, arena), + _ => None, + } + } + + pub fn overlaps( + &self, + other: &Self, + eval: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + match (self, other) { + (Elem::Concrete(s), Elem::Concrete(o)) => Ok(Some(o.val == s.val)), + (Elem::Reference(s), Elem::Reference(o)) => { + if s == o { + Ok(Some(true)) + } else if eval { + let lhs_min = s.minimize(analyzer, arena)?; + let rhs_max = o.maximize(analyzer, arena)?; + + match lhs_min.range_ord(&rhs_max, arena) { + Some(std::cmp::Ordering::Less) => { + // we know our min is less than the other max + // check that the max is greater than or eq their min + let lhs_max = s.maximize(analyzer, arena)?; + let rhs_min = o.minimize(analyzer, arena)?; + Ok(Some(matches!( + lhs_max.range_ord(&rhs_min, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ))) + } + Some(std::cmp::Ordering::Equal) => Ok(Some(true)), + _ => Ok(Some(false)), + } + } else { + Ok(None) + } + } + (Elem::Reference(s), c @ Elem::Concrete(_)) => { + if eval { + let lhs_min = s.minimize(analyzer, arena)?; + + match lhs_min.range_ord(c, arena) { + Some(std::cmp::Ordering::Less) => { + // we know our min is less than the other max + // check that the max is greater than or eq their min + let lhs_max = s.maximize(analyzer, arena)?; + Ok(Some(matches!( + lhs_max.range_ord(c, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ))) + } + Some(std::cmp::Ordering::Equal) => Ok(Some(true)), + _ => Ok(Some(false)), + } + } else { + Ok(None) + } + } + (Elem::Concrete(_), Elem::Reference(_)) => other.overlaps(self, eval, analyzer, arena), + _ => Ok(None), + } + } + + /// Given an element and a min and max, checks if the element could be equal to the RHS + pub fn overlaps_dual( + &self, + rhs_min: &Self, + rhs_max: &Self, + eval: bool, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + match self { + Self::Reference(d) => { + if eval { + let lhs_min = d.minimize(analyzer, arena)?; + let rhs_max = rhs_max.maximize(analyzer, arena)?; + + match lhs_min.range_ord(&rhs_max, arena) { + Some(std::cmp::Ordering::Less) => { + // we know our min is less than the other max + // check that the max is greater than or eq their min + let lhs_max = d.maximize(analyzer, arena)?; + let rhs_min = rhs_min.minimize(analyzer, arena)?; + Ok(Some(matches!( + lhs_max.range_ord(&rhs_min, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ))) + } + Some(std::cmp::Ordering::Equal) => Ok(Some(true)), + _ => Ok(Some(false)), + } + } else if self == rhs_min || self == rhs_max { + Ok(Some(true)) + } else { + Ok(None) + } + } + Self::Concrete(_) => { + let (min, max) = if eval { + ( + rhs_min.minimize(analyzer, arena)?, + rhs_max.maximize(analyzer, arena)?, + ) + } else { + (rhs_min.clone(), rhs_max.clone()) + }; + + match min.range_ord(self, arena) { + Some(std::cmp::Ordering::Less) => Ok(Some(matches!( + max.range_ord(self, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ))), + Some(std::cmp::Ordering::Equal) => Ok(Some(true)), + _ => Ok(Some(false)), + } + } + _ => Ok(None), + } + } + pub fn is_negative( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let res = match self { + Elem::Concrete(RangeConcrete { + val: Concrete::Int(_, val), + .. + }) if val < &I256::zero() => true, + Elem::Reference(dy) => { + if maximize { + dy.maximize(analyzer, arena)? + .is_negative(maximize, analyzer, arena)? + } else { + dy.minimize(analyzer, arena)? + .is_negative(maximize, analyzer, arena)? + } + } + Elem::Expr(expr) => { + if maximize { + expr.maximize(analyzer, arena)? + .is_negative(maximize, analyzer, arena)? + } else { + expr.minimize(analyzer, arena)? + .is_negative(maximize, analyzer, arena)? + } + } + _ => false, + }; + Ok(res) + } + + pub fn pre_evaled_is_negative(&self) -> bool { + matches!(self, Elem::Concrete(RangeConcrete { val: Concrete::Int(_, val), ..}) if val < &I256::zero()) + } + + pub fn inverse_if_boolean(&self) -> Option { + match self { + Self::Reference(Reference { idx: _, .. }) => Some(Elem::Expr(RangeExpr::new( + self.clone(), + RangeOp::Not, + Elem::Null, + ))), + Self::Concrete(_) => Some(Elem::Expr(RangeExpr::new( + self.clone(), + RangeOp::Not, + Elem::Null, + ))), + Self::Expr(expr) => Some(Elem::Expr(expr.inverse_if_boolean()?)), + Self::ConcreteDyn(_d) => None, + Self::Null => None, + Self::Arena(_) => todo!(), + } + } + + pub fn arenaized_flattened( + &self, + max: bool, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Option>> { + if let Some(idx) = arena.idx(self) { + if let Some(t) = arena.ranges.get(idx) { + match t { + Elem::Expr(ref arenaized) => { + if max { + arenaized.flattened_max.clone() + } else { + arenaized.flattened_min.clone() + } + } + Elem::Reference(ref arenaized) => { + if max { + arenaized.flattened_max.clone() + } else { + arenaized.flattened_min.clone() + } + } + Elem::ConcreteDyn(ref arenaized) => { + if max { + arenaized.flattened_max.clone() + } else { + arenaized.flattened_min.clone() + } + } + c @ Elem::Concrete(_) => Some(Box::new(c.clone())), + c @ Elem::Null => Some(Box::new(c.clone())), + Elem::Arena(idx) => Elem::Arena(*idx).arenaized_flattened(max, analyzer, arena), + } + } else { + None + } + } else { + None + } + } + + pub fn set_arenaized_flattened( + &self, + max: bool, + elem: &Elem, + arena: &mut RangeArena>, + ) { + if let Some(idx) = arena.idx(self) { + if let Some(ref mut t) = arena.ranges.get_mut(idx) { + match &mut *t { + Elem::Expr(ref mut arenaized) => { + if max { + arenaized.flattened_max = Some(Box::new(elem.clone())); + } else { + arenaized.flattened_min = Some(Box::new(elem.clone())); + } + } + Elem::Reference(ref mut arenaized) => { + if max { + arenaized.flattened_max = Some(Box::new(elem.clone())); + } else { + arenaized.flattened_min = Some(Box::new(elem.clone())); + } + } + Elem::ConcreteDyn(ref mut arenaized) => { + if max { + arenaized.flattened_max = Some(Box::new(elem.clone())); + } else { + arenaized.flattened_min = Some(Box::new(elem.clone())); + } + } + _ => {} + } + } + } + } + + pub fn set_arenaized_cache( + &self, + max: bool, + elem: &Elem, + arena: &mut RangeArena>, + ) { + if let Some(idx) = arena.idx(self) { + if let Some(t) = arena.ranges.get_mut(idx) { + match &mut *t { + Elem::Expr(ref mut arenaized) => { + if max { + arenaized.maximized = Some(MinMaxed::Maximized(Box::new(elem.clone()))); + } else { + arenaized.minimized = Some(MinMaxed::Minimized(Box::new(elem.clone()))); + } + } + Elem::Reference(ref mut arenaized) => { + if max { + arenaized.maximized = Some(MinMaxed::Maximized(Box::new(elem.clone()))); + } else { + arenaized.minimized = Some(MinMaxed::Minimized(Box::new(elem.clone()))); + } + } + Elem::ConcreteDyn(ref mut arenaized) => { + if max { + arenaized.maximized = Some(MinMaxed::Maximized(Box::new(elem.clone()))); + } else { + arenaized.minimized = Some(MinMaxed::Minimized(Box::new(elem.clone()))); + } + } + _ => {} + } + } + } + } + + pub fn is_bytes(&self) -> bool { + matches!( + self, + Elem::Concrete(RangeConcrete { + val: Concrete::Bytes(..), + .. + }) + ) + } + + pub fn is_string(&self) -> bool { + matches!( + self, + Elem::Concrete(RangeConcrete { + val: Concrete::String(..), + .. + }) + ) + } + + pub fn is_uint(&self) -> bool { + matches!( + self, + Elem::Concrete(RangeConcrete { + val: Concrete::Uint(..), + .. + }) + ) + } + + pub fn is_int(&self) -> bool { + matches!( + self, + Elem::Concrete(RangeConcrete { + val: Concrete::Int(..), + .. + }) + ) + } +} diff --git a/crates/graph/src/range/elem/elem_enum/mod.rs b/crates/graph/src/range/elem/elem_enum/mod.rs new file mode 100644 index 00000000..4c9bb429 --- /dev/null +++ b/crates/graph/src/range/elem/elem_enum/mod.rs @@ -0,0 +1,28 @@ +mod arena; +mod impls; +mod ops; +mod range_elem; +mod traits; + +use crate::range::elem::{RangeConcrete, RangeDyn, RangeExpr, Reference}; +use shared::RangeArenaIdx; + +pub use arena::RangeArenaLike; + +/// A core range element. +#[derive(Default, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum Elem { + /// A range element that is a reference to another node + Reference(Reference), + /// A concrete range element of type `T`. e.g.: some number like `10` + ConcreteDyn(RangeDyn), + /// A concrete range element of type `T`. e.g.: some number like `10` + Concrete(RangeConcrete), + /// A range element that is an expression composed of other range elements + Expr(RangeExpr), + /// A range element that is a pointer to another expression in an arena + Arena(RangeArenaIdx), + /// A null range element useful in range expressions that dont have a rhs + #[default] + Null, +} diff --git a/crates/graph/src/range/elem/elem_enum/ops.rs b/crates/graph/src/range/elem/elem_enum/ops.rs new file mode 100644 index 00000000..52e17fa1 --- /dev/null +++ b/crates/graph/src/range/elem/elem_enum/ops.rs @@ -0,0 +1,93 @@ +use crate::range::elem::{Elem, RangeExpr, RangeOp}; + +use std::ops::{Add, BitAnd, BitOr, BitXor, Div, Mul, Rem, Shl, Shr, Sub}; + +impl Add for Elem { + type Output = Self; + + fn add(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Add(false), other); + Self::Expr(expr) + } +} + +impl Sub for Elem { + type Output = Self; + + fn sub(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Sub(false), other); + Self::Expr(expr) + } +} + +impl Mul for Elem { + type Output = Self; + + fn mul(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Mul(false), other); + Self::Expr(expr) + } +} + +impl Div for Elem { + type Output = Self; + + fn div(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Div(false), other); + Self::Expr(expr) + } +} + +impl Shl for Elem { + type Output = Self; + + fn shl(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Shl, other); + Self::Expr(expr) + } +} + +impl Shr for Elem { + type Output = Self; + + fn shr(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Shr, other); + Self::Expr(expr) + } +} + +impl Rem for Elem { + type Output = Self; + + fn rem(self, other: Elem) -> Self { + let expr = RangeExpr::new(self, RangeOp::Mod, other); + Self::Expr(expr) + } +} + +impl BitAnd for Elem { + type Output = Self; + + fn bitand(self, other: Self) -> Self::Output { + let expr = RangeExpr::new(self, RangeOp::BitAnd, other); + Self::Expr(expr) + } +} + +impl BitOr for Elem { + type Output = Self; + + fn bitor(self, other: Self) -> Self::Output { + let expr = RangeExpr::new(self, RangeOp::BitOr, other); + Self::Expr(expr) + } +} + +impl BitXor for Elem { + type Output = Self; + + fn bitxor(self, other: Self) -> Self::Output { + let expr = RangeExpr::new(self, RangeOp::BitXor, other); + Self::Expr(expr) + } +} diff --git a/crates/graph/src/range/elem/elem_enum/range_elem.rs b/crates/graph/src/range/elem/elem_enum/range_elem.rs new file mode 100644 index 00000000..dc9898f3 --- /dev/null +++ b/crates/graph/src/range/elem/elem_enum/range_elem.rs @@ -0,0 +1,617 @@ +use crate::elem::{MinMaxed, RangeArenaLike}; +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::elem::{collapse, Elem, MaybeCollapsed, RangeElem}, + GraphBackend, GraphError, +}; + +use shared::{NodeIdx, RangeArena}; + +impl RangeElem for Elem { + type GraphError = GraphError; + + fn arenaize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + match self { + Self::Arena(_) => return Ok(()), + Self::Reference(d) => d.arenaize(analyzer, arena)?, + Self::ConcreteDyn(d) => d.arenaize(analyzer, arena)?, + Self::Expr(expr) => { + expr.arenaize(analyzer, arena)?; + } + Self::Concrete(c) => c.arenaize(analyzer, arena)?, + Self::Null => {} + } + + let self_take = std::mem::take(self); + *self = Elem::Arena(arena.idx_or_upsert(self_take, analyzer)); + Ok(()) + } + + fn range_eq(&self, other: &Self, arena: &mut RangeArena>) -> bool { + match (self, other) { + (Self::Arena(a), Self::Arena(b)) => a == b, + (Self::Concrete(a), Self::Concrete(b)) => a.range_eq(b, arena), + (Self::ConcreteDyn(a), Self::ConcreteDyn(b)) => a.range_eq(b, arena), + (Self::Reference(a), Self::Reference(b)) => a.idx == b.idx, + _ => false, + } + } + + fn range_ord( + &self, + other: &Self, + arena: &mut RangeArena>, + ) -> Option { + match (self, other) { + (Self::Arena(a), Self::Arena(b)) => { + if a == b { + Some(std::cmp::Ordering::Equal) + } else { + let (l, a) = self.dearenaize(arena); + let (r, b) = other.dearenaize(arena); + let res = l.range_ord(&r, arena); + self.rearenaize(l, a, arena); + self.rearenaize(r, b, arena); + res + } + } + (Self::Concrete(a), Self::Concrete(b)) => a.range_ord(b, arena), + (c @ Self::Concrete(_), Self::Reference(r)) => { + if let (Some(MinMaxed::Minimized(min)), Some(MinMaxed::Maximized(max))) = + (&r.minimized, &r.maximized) + { + let min_ord = c.range_ord(min, arena)?; + let max_ord = c.range_ord(max, arena)?; + if min_ord == max_ord { + Some(min_ord) + } else { + None + } + } else { + None + } + } + (Self::Reference(r), c @ Self::Concrete(_)) => { + if let (Some(MinMaxed::Minimized(min)), Some(MinMaxed::Maximized(max))) = + (&r.minimized, &r.maximized) + { + let min_ord = min.range_ord(c, arena)?; + let max_ord = max.range_ord(c, arena)?; + if min_ord == max_ord { + Some(min_ord) + } else { + None + } + } else { + None + } + } + (Self::Reference(a), Self::Reference(b)) => a.range_ord(b, arena), + (Elem::Null, Elem::Null) => None, + (_a, Elem::Null) => Some(std::cmp::Ordering::Greater), + (Elem::Null, _a) => Some(std::cmp::Ordering::Less), + _ => None, + } + } + + fn flatten( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + match self { + Self::Reference(d) => d.flatten(maximize, analyzer, arena), + Self::Concrete(c) => c.flatten(maximize, analyzer, arena), + Self::Expr(expr) => expr.flatten(maximize, analyzer, arena), + Self::ConcreteDyn(d) => d.flatten(maximize, analyzer, arena), + Self::Null => Ok(Elem::Null), + Self::Arena(_) => { + let (de, idx) = self.dearenaize(arena); + let res = de.flatten(maximize, analyzer, arena)?; + self.rearenaize(de, idx, arena); + Ok(res) + } + } + } + + fn cache_flatten( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if self.is_flatten_cached(analyzer, arena) { + return Ok(()); + } + + match self { + Self::Reference(d) => d.cache_flatten(analyzer, arena), + Self::Concrete(c) => c.cache_flatten(analyzer, arena), + Self::Expr(expr) => expr.cache_flatten(analyzer, arena), + Self::ConcreteDyn(d) => d.cache_flatten(analyzer, arena), + Self::Null => Ok(()), + Self::Arena(idx) => { + tracing::trace!("flattening for arena idx: {idx}"); + let (mut dearenaized, idx) = self.dearenaize(arena); + dearenaized.cache_flatten(analyzer, arena)?; + self.rearenaize(dearenaized, idx, arena); + Ok(()) + } + } + } + + fn is_flatten_cached( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + match self { + Self::Reference(d) => d.is_flatten_cached(analyzer, arena), + Self::Concrete(c) => c.is_flatten_cached(analyzer, arena), + Self::Expr(expr) => expr.is_flatten_cached(analyzer, arena), + Self::ConcreteDyn(d) => d.is_flatten_cached(analyzer, arena), + Self::Null => true, + Self::Arena(_) => { + let (t, idx) = self.dearenaize(arena); + let res = t.is_flatten_cached(analyzer, arena); + self.rearenaize(t, idx, arena); + res + } + } + } + + fn is_min_max_cached( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> (bool, bool) { + match self { + Self::Reference(d) => d.is_min_max_cached(analyzer, arena), + Self::Concrete(_c) => (true, true), + Self::Expr(expr) => expr.is_min_max_cached(analyzer, arena), + Self::ConcreteDyn(d) => d.is_min_max_cached(analyzer, arena), + Self::Null => (true, true), + Self::Arena(_) => { + let (t, idx) = self.dearenaize(arena); + let res = t.is_min_max_cached(analyzer, arena); + self.rearenaize(t, idx, arena); + res + } + } + } + + fn dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec { + match self { + Self::Reference(d) => d.dependent_on(analyzer, arena), + Self::Concrete(_) => vec![], + Self::Expr(expr) => expr.dependent_on(analyzer, arena), + Self::ConcreteDyn(d) => d.dependent_on(analyzer, arena), + Self::Null => vec![], + Self::Arena(_) => { + let (t, idx) = self.dearenaize(arena); + let res = t.dependent_on(analyzer, arena); + self.rearenaize(t, idx, arena); + res + } + } + } + + fn recursive_dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + match self { + Self::Reference(d) => d.recursive_dependent_on(analyzer, arena), + Self::Concrete(_) => Ok(vec![]), + Self::Expr(expr) => expr.recursive_dependent_on(analyzer, arena), + Self::ConcreteDyn(d) => d.recursive_dependent_on(analyzer, arena), + Self::Null => Ok(vec![]), + Self::Arena(_) => { + let (dearenaized, idx) = self.dearenaize(arena); + let res = dearenaized.recursive_dependent_on(analyzer, arena); + self.rearenaize(dearenaized, idx, arena); + res + } + } + } + + fn has_cycle( + &self, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + match self { + Self::Reference(d) => d.has_cycle(seen, analyzer, arena), + Self::Concrete(_) => Ok(false), + Self::Expr(expr) => expr.has_cycle(seen, analyzer, arena), + Self::ConcreteDyn(d) => d.has_cycle(seen, analyzer, arena), + Self::Null => Ok(false), + Self::Arena(_) => { + let (dearenaized, idx) = self.dearenaize(arena); + let res = dearenaized.has_cycle(seen, analyzer, arena); + self.rearenaize(dearenaized, idx, arena); + res + } + } + } + + fn depends_on( + &self, + var: ContextVarNode, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + match self { + Self::Reference(d) => d.depends_on(var, seen, analyzer, arena), + Self::Concrete(_) => Ok(false), + Self::Expr(expr) => expr.depends_on(var, seen, analyzer, arena), + Self::ConcreteDyn(d) => d.depends_on(var, seen, analyzer, arena), + Self::Null => Ok(false), + Self::Arena(_) => { + let (dearenaized, idx) = self.dearenaize(arena); + let res = dearenaized.depends_on(var, seen, analyzer, arena); + self.rearenaize(dearenaized, idx, arena); + res + } + } + } + + fn filter_recursion( + &mut self, + node_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + match self { + Self::Reference(ref mut d) => { + if d.idx == node_idx { + d.idx = new_idx + } + } + Self::Concrete(_) => {} + Self::Expr(expr) => expr.filter_recursion(node_idx, new_idx, analyzer, arena), + Self::ConcreteDyn(d) => d.filter_recursion(node_idx, new_idx, analyzer, arena), + Self::Null => {} + Self::Arena(_) => { + let (mut dearenaized, idx) = self.dearenaize(arena); + dearenaized.filter_recursion(node_idx, new_idx, analyzer, arena); + self.rearenaize(dearenaized, idx, arena); + } + } + } + + fn maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + use Elem::*; + let res = match self { + Reference(dy) => dy.maximize(analyzer, arena)?, + Concrete(inner) => inner.maximize(analyzer, arena)?, + ConcreteDyn(inner) => inner.maximize(analyzer, arena)?, + Expr(expr) => expr.maximize(analyzer, arena)?, + Null => Elem::Null, + Arena(_) => { + let (dearenaized, idx) = self.dearenaize(arena); + let res = dearenaized.maximize(analyzer, arena)?; + self.rearenaize(dearenaized, idx, arena); + + match arena.ranges.get_mut(idx) { + Some(Self::Reference(ref mut d)) => { + if d.maximized.is_none() { + d.maximized = Some(MinMaxed::Maximized(Box::new(res.clone()))); + } + } + Some(Self::Expr(ref mut expr)) => { + if expr.maximized.is_none() { + expr.maximized = Some(MinMaxed::Maximized(Box::new(res.clone()))); + } + } + Some(Self::ConcreteDyn(ref mut d)) => { + if d.maximized.is_none() { + d.maximized = Some(MinMaxed::Maximized(Box::new(res.clone()))); + } + } + _ => {} + } + + let (_min, max) = self.is_min_max_cached(analyzer, arena); + assert!(max, "????"); + + res + } + }; + Ok(res) + } + + fn minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + use Elem::*; + let res = match self { + Reference(dy) => dy.minimize(analyzer, arena)?, + Concrete(inner) => inner.minimize(analyzer, arena)?, + ConcreteDyn(inner) => inner.minimize(analyzer, arena)?, + Expr(expr) => expr.minimize(analyzer, arena)?, + Null => Elem::Null, + Arena(_) => { + let (dearenaized, idx) = self.dearenaize(arena); + let res = dearenaized.minimize(analyzer, arena)?; + self.rearenaize(dearenaized, idx, arena); + + match arena.ranges.get_mut(idx) { + Some(Self::Reference(ref mut d)) => { + if d.minimized.is_none() { + d.minimized = Some(MinMaxed::Minimized(Box::new(res.clone()))); + } + } + Some(Self::Expr(ref mut expr)) => { + if expr.minimized.is_none() { + expr.minimized = Some(MinMaxed::Minimized(Box::new(res.clone()))); + } + } + Some(Self::ConcreteDyn(ref mut d)) => { + if d.minimized.is_none() { + d.minimized = Some(MinMaxed::Minimized(Box::new(res.clone()))); + } + } + _ => {} + } + + let (min, _max) = self.is_min_max_cached(analyzer, arena); + assert!(min, "????"); + res + } + }; + Ok(res) + } + + fn simplify_maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + use Elem::*; + + if let Some(idx) = arena.idx(self) { + if let Some(t) = arena.ranges.get(idx) { + match t { + Reference(dy) => { + if let Some(max) = &dy.flattened_max { + return Ok(*max.clone()); + } + } + c @ Concrete(_) => return Ok(c.clone()), + ConcreteDyn(inner) => { + if let Some(max) = &inner.flattened_max { + return Ok(*max.clone()); + } + } + Expr(expr) => { + if let Some(max) = &expr.flattened_max { + return Ok(*max.clone()); + } + } + _ => {} + } + } + } + + match self { + Reference(dy) => dy.simplify_maximize(analyzer, arena), + Concrete(inner) => inner.simplify_maximize(analyzer, arena), + ConcreteDyn(inner) => inner.simplify_maximize(analyzer, arena), + Expr(expr) => match collapse(*expr.lhs.clone(), expr.op, *expr.rhs.clone(), arena) { + MaybeCollapsed::Collapsed(collapsed) => { + let res = collapsed.simplify_maximize(analyzer, arena)?; + collapsed.set_arenaized_flattened(true, &res, arena); + Ok(res) + } + _ => { + let res = expr.simplify_maximize(analyzer, arena)?; + expr.set_arenaized_flattened(true, res.clone(), arena); + Ok(res) + } + }, + Null => Ok(Elem::Null), + Arena(_) => { + let (dearenaized, idx) = self.dearenaize(arena); + let flat = dearenaized.flatten(true, analyzer, arena)?; + let max = flat.simplify_maximize(analyzer, arena)?; + self.rearenaize(dearenaized, idx, arena); + + match arena.ranges.get_mut(idx) { + Some(Self::Reference(ref mut d)) => { + tracing::trace!("simplify maximize cache MISS: {self}"); + d.flattened_max = Some(Box::new(max.clone())); + } + Some(Self::Expr(ref mut expr)) => { + tracing::trace!("simplify maximize cache MISS: {self}"); + expr.flattened_max = Some(Box::new(max.clone())); + } + Some(Self::ConcreteDyn(ref mut d)) => { + tracing::trace!("simplify maximize cache MISS: {self}"); + d.flattened_max = Some(Box::new(max.clone())); + } + _ => {} + } + + Ok(max) + } + } + } + + fn simplify_minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + use Elem::*; + + if let Some(idx) = arena.idx(self) { + if let Some(t) = arena.ranges.get(idx) { + match t { + Reference(dy) => { + if let Some(min) = &dy.flattened_min { + return Ok(*min.clone()); + } + } + c @ Concrete(_) => return Ok(c.clone()), + ConcreteDyn(inner) => { + if let Some(min) = &inner.flattened_min { + return Ok(*min.clone()); + } + } + Expr(expr) => { + if let Some(min) = &expr.flattened_min { + return Ok(*min.clone()); + } + } + Null => return Ok(Elem::Null), + _ => {} + } + } + } + + let res = match self { + Reference(dy) => dy.simplify_minimize(analyzer, arena), + Concrete(inner) => inner.simplify_minimize(analyzer, arena), + ConcreteDyn(inner) => inner.simplify_minimize(analyzer, arena), + Expr(expr) => match collapse(*expr.lhs.clone(), expr.op, *expr.rhs.clone(), arena) { + MaybeCollapsed::Collapsed(collapsed) => { + let res = collapsed.simplify_minimize(analyzer, arena)?; + collapsed.set_arenaized_flattened(false, &res, arena); + Ok(res) + } + _ => { + let res = expr.simplify_minimize(analyzer, arena)?; + expr.set_arenaized_flattened(false, res.clone(), arena); + Ok(res) + } + }, + Null => Ok(Elem::Null), + Arena(_) => { + let (dearenaized, idx) = self.dearenaize(arena); + let flat = dearenaized.flatten(false, analyzer, arena)?; + let min = flat.simplify_minimize(analyzer, arena)?; + self.rearenaize(dearenaized, idx, arena); + + match arena.ranges.get_mut(idx) { + Some(Self::Reference(ref mut d)) => { + tracing::trace!("simplify minimize cache MISS: {self}"); + d.flattened_min = Some(Box::new(min.clone())); + } + Some(Self::Expr(ref mut expr)) => { + tracing::trace!("simplify minimize cache MISS: {self}"); + expr.flattened_min = Some(Box::new(min.clone())); + } + Some(Self::ConcreteDyn(ref mut d)) => { + tracing::trace!("simplify minimize cache MISS: {self}"); + d.flattened_min = Some(Box::new(min.clone())); + } + _ => {} + } + + Ok(min) + } + }?; + + Ok(res) + } + + fn cache_maximize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + use Elem::*; + match self { + Reference(dy) => dy.cache_maximize(analyzer, arena), + Concrete(inner) => inner.cache_maximize(analyzer, arena), + ConcreteDyn(inner) => inner.cache_maximize(analyzer, arena), + Expr(expr) => match collapse(*expr.lhs.clone(), expr.op, *expr.rhs.clone(), arena) { + MaybeCollapsed::Collapsed(mut collapsed) => { + collapsed.cache_maximize(analyzer, arena)?; + let max = collapsed.maximize(analyzer, arena)?; + self.set_arenaized_flattened(true, &max, arena); + *self = collapsed; + Ok(()) + } + _ => { + expr.cache_maximize(analyzer, arena)?; + let max = expr.maximize(analyzer, arena)?; + self.set_arenaized_flattened(true, &max, arena); + Ok(()) + } + }, + Null => Ok(()), + Arena(_) => { + let (mut dearenaized, idx) = self.dearenaize(arena); + dearenaized.cache_maximize(analyzer, arena)?; + self.rearenaize(dearenaized, idx, arena); + Ok(()) + } + } + } + + fn cache_minimize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + use Elem::*; + match self { + Reference(dy) => dy.cache_minimize(analyzer, arena), + Concrete(inner) => inner.cache_minimize(analyzer, arena), + ConcreteDyn(inner) => inner.cache_minimize(analyzer, arena), + Expr(expr) => match collapse(*expr.lhs.clone(), expr.op, *expr.rhs.clone(), arena) { + MaybeCollapsed::Collapsed(mut collapsed) => { + collapsed.cache_minimize(analyzer, arena)?; + let min = collapsed.minimize(analyzer, arena)?; + self.set_arenaized_flattened(false, &min, arena); + *self = collapsed; + Ok(()) + } + _ => { + expr.cache_minimize(analyzer, arena)?; + let min = expr.minimize(analyzer, arena)?; + self.set_arenaized_flattened(false, &min, arena); + Ok(()) + } + }, + Null => Ok(()), + Arena(_) => { + let (mut dearenaized, idx) = self.dearenaize(arena); + dearenaized.cache_minimize(analyzer, arena)?; + self.rearenaize(dearenaized, idx, arena); + Ok(()) + } + } + } + fn uncache(&mut self) { + use Elem::*; + match self { + Reference(dy) => dy.uncache(), + Concrete(inner) => inner.uncache(), + ConcreteDyn(inner) => inner.uncache(), + Expr(expr) => expr.uncache(), + Null => {} + Arena(_idx) => {} + } + } +} diff --git a/crates/graph/src/range/elem/elem_enum/traits.rs b/crates/graph/src/range/elem/elem_enum/traits.rs new file mode 100644 index 00000000..a2faced0 --- /dev/null +++ b/crates/graph/src/range/elem/elem_enum/traits.rs @@ -0,0 +1,114 @@ +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::elem::{Elem, RangeConcrete, RangeExpr, RangeOp, Reference}, +}; +use shared::NodeIdx; + +use solang_parser::pt::Loc; + +use std::{ + borrow::Cow, + hash::{Hash, Hasher}, +}; + +impl Hash for Elem { + fn hash(&self, state: &mut H) { + match self { + Elem::Reference(r) => r.hash(state), + Elem::Concrete(c) => c.hash(state), + Elem::Expr(expr) => expr.hash(state), + Elem::ConcreteDyn(d) => d.hash(state), + Elem::Null => (-1i32).hash(state), + Elem::Arena(idx) => idx.hash(state), + } + } +} + +impl<'a> From<&'a Elem> for Cow<'a, Elem> { + fn from(val: &'a Elem) -> Self { + Cow::Borrowed(val) + } +} + +impl<'a> From> for Cow<'a, Elem> { + fn from(val: Elem) -> Self { + Cow::Owned(val) + } +} + +impl From for Elem { + fn from(c: bool) -> Self { + Elem::Concrete(RangeConcrete::new(Concrete::from(c), Loc::Implicit)) + } +} + +impl From> for Elem { + fn from(dy: Reference) -> Self { + Elem::Reference(dy) + } +} + +impl From> for Elem { + fn from(c: RangeConcrete) -> Self { + Elem::Concrete(c) + } +} + +impl From for Elem { + fn from(idx: NodeIdx) -> Self { + Elem::Reference(Reference::new(idx)) + } +} + +impl From for Elem { + fn from(c: Concrete) -> Self { + Elem::Concrete(RangeConcrete::new(c, Loc::Implicit)) + } +} + +impl From for Elem { + fn from(c: ContextVarNode) -> Self { + Elem::Reference(Reference::new(c.into())) + } +} + +impl std::fmt::Display for Elem { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Elem::Reference(Reference { idx, .. }) => write!(f, "idx_{}", idx.index()), + Elem::ConcreteDyn(d) => { + write!(f, "{{len: {}, values: {{", d.len)?; + d.val + .iter() + .try_for_each(|(key, (val, op))| write!(f, " {key}: ({val}, {op}),"))?; + write!(f, "}}}}") + } + Elem::Concrete(RangeConcrete { val, .. }) => { + write!(f, "{}", val.as_string()) + } + Elem::Expr(RangeExpr { lhs, op, rhs, .. }) => match op { + RangeOp::Min | RangeOp::Max => { + write!(f, "{}{{{}, {}}}", op.to_string(), lhs, rhs) + } + RangeOp::Cast => match &**rhs { + Elem::Concrete(RangeConcrete { val, .. }) => { + write!( + f, + "{}({}, {})", + op.to_string(), + lhs, + val.as_builtin().basic_as_string() + ) + } + _ => write!(f, "{}({}, {})", op.to_string(), lhs, rhs), + }, + RangeOp::BitNot => { + write!(f, "~{}", lhs) + } + _ => write!(f, "({} {} {})", lhs, op.to_string(), rhs), + }, + Elem::Arena(idx) => write!(f, "arena_idx_{idx}"), + Elem::Null => write!(f, ""), + } + } +} diff --git a/crates/graph/src/range/elem/elem_trait.rs b/crates/graph/src/range/elem/elem_trait.rs new file mode 100644 index 00000000..c4bfcd28 --- /dev/null +++ b/crates/graph/src/range/elem/elem_trait.rs @@ -0,0 +1,126 @@ +use crate::{nodes::ContextVarNode, range::elem::Elem, GraphBackend, GraphError}; + +use shared::{NodeIdx, RangeArena}; +use std::hash::Hash; + +pub trait RangeElem: Hash { + type GraphError; + /// Flattens an element into an expression or concrete based purely on inputs, calldata, storage, or environment data variables + fn flatten( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + /// Returns whether `cache_flatten` has been called + fn is_flatten_cached( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool; + fn is_min_max_cached( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> (bool, bool); + /// Flattens an element and caches the result + fn cache_flatten( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), Self::GraphError>; + /// Tries to evaluate a range element down to a concrete or maximally simplified expression to its maximum value + fn maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + /// Maximizes the element and caches the result for quicker use later + fn cache_maximize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), Self::GraphError>; + /// Tries to evaluate a range element down to a concrete or maximally simplified expression to its minimum value + fn minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + /// Minimizes the element and caches the result for quicker use later + fn cache_minimize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), Self::GraphError>; + /// Uncaches the minimum and maximum + fn uncache(&mut self); + /// Tries to simplify to maximum(i.e.: leaves symbolic/dynamic values as they are) + fn simplify_maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + /// Tries to simplify to minimum (i.e.: leaves symbolic/dynamic values as they are) + fn simplify_minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + /// Checks if two range elements are equal + fn range_eq(&self, other: &Self, arena: &mut RangeArena>) -> bool; + /// Tries to compare the ordering of two range elements + fn range_ord( + &self, + other: &Self, + arena: &mut RangeArena>, + ) -> Option; + /// Traverses the range expression and finds all nodes that are dynamically pointed to + /// and returns it in a vector. + fn dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec; + + fn recursive_dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + + fn has_cycle( + &self, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result; + + fn depends_on( + &self, + var: ContextVarNode, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result; + /// Attempts to replace range elements that form a cyclic dependency by replacing + /// it with a new node. Ideally no cyclic dependencies occur in ranges as of now + /// but in theory it can make sense. + /// + /// e.g.: take the basic expression `x + y`, in normal checked solidity math + /// both x and y have the requirement `var <= 2**256 - 1 - other_var`, forming a + /// cyclic dependency. + fn filter_recursion( + &mut self, + node_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ); + + fn arenaize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError>; +} diff --git a/crates/graph/src/range/elem/expr/collapse.rs b/crates/graph/src/range/elem/expr/collapse.rs new file mode 100644 index 00000000..d043005b --- /dev/null +++ b/crates/graph/src/range/elem/expr/collapse.rs @@ -0,0 +1,633 @@ +use crate::elem::expr::simplify::*; + +use crate::{ + nodes::Concrete, + range::{ + elem::{Elem, RangeConcrete, RangeElem, RangeExpr, RangeOp}, + exec_traits::*, + }, +}; + +use ethers_core::types::U256; +use shared::RangeArena; + +pub static ORD_OPS: &[RangeOp] = &[ + RangeOp::Eq, + RangeOp::Neq, + RangeOp::Lt, + RangeOp::Lte, + RangeOp::Gt, + RangeOp::Gte, + RangeOp::Min, + RangeOp::Max, +]; + +pub static EQ_OPS: &[RangeOp] = &[ + RangeOp::Eq, + RangeOp::Neq, + RangeOp::Lt, + RangeOp::Lte, + RangeOp::Gt, + RangeOp::Gte, + RangeOp::And, + RangeOp::Or, +]; + +pub static SINGLETON_EQ_OPS: &[RangeOp] = &[ + RangeOp::Eq, + RangeOp::Neq, + RangeOp::Lt, + RangeOp::Lte, + RangeOp::Gt, + RangeOp::Gte, +]; + +pub static FLIP_INEQ_OPS: &[RangeOp] = &[RangeOp::Lt, RangeOp::Lte, RangeOp::Gt, RangeOp::Gte]; + +#[derive(Debug)] +pub enum MaybeCollapsed { + Concretes(Elem, Elem), + Collapsed(Elem), + Not(Elem, Elem), +} + +pub fn collapse( + l: Elem, + op: RangeOp, + r: Elem, + arena: &mut RangeArena>, +) -> MaybeCollapsed { + let l = if let Elem::Expr(e) = l { + match collapse(*e.lhs, e.op, *e.rhs, arena) { + MaybeCollapsed::Not(l, r) => Elem::Expr(RangeExpr::new(l, e.op, r)), + MaybeCollapsed::Concretes(l, r) => Elem::Expr(RangeExpr::new(l, e.op, r)), + MaybeCollapsed::Collapsed(e) => e, + } + } else { + l + }; + + let r = if let Elem::Expr(e) = r { + match collapse(*e.lhs, e.op, *e.rhs, arena) { + MaybeCollapsed::Not(l, r) => Elem::Expr(RangeExpr::new(l, e.op, r)), + MaybeCollapsed::Concretes(l, r) => Elem::Expr(RangeExpr::new(l, e.op, r)), + MaybeCollapsed::Collapsed(e) => e, + } + } else { + r + }; + + if let Some(e) = ident_rules(&l, op, &r, arena) { + return MaybeCollapsed::Collapsed(e); + } + + let res = match (l, r) { + (l @ Elem::Arena(_), r) => { + let t = l.dearenaize_clone(arena); + match collapse(t, op, r, arena) { + MaybeCollapsed::Not(l, r) => MaybeCollapsed::Not(l, r), + MaybeCollapsed::Concretes(l, r) => MaybeCollapsed::Not(l, r), + MaybeCollapsed::Collapsed(e) => MaybeCollapsed::Collapsed(e), + } + } + (l, r @ Elem::Arena(_)) => { + let t = r.dearenaize_clone(arena); + match collapse(l, op, t, arena) { + MaybeCollapsed::Not(l, r) => MaybeCollapsed::Not(l, r), + MaybeCollapsed::Concretes(l, r) => MaybeCollapsed::Not(l, r), + MaybeCollapsed::Collapsed(e) => MaybeCollapsed::Collapsed(e), + } + } + (l @ Elem::Concrete(_), r @ Elem::Concrete(_)) => MaybeCollapsed::Concretes(l, r), + (Elem::Expr(expr), d @ Elem::Reference(_)) => { + // try to collapse the expression + let x = &*expr.lhs; + let y = &*expr.rhs; + let z = d; + + let ords = Ords::new(x, y, &z, arena); + + match (expr.op, op) { + (RangeOp::Sub(false), _) if ORD_OPS.contains(&op) => { + if let Some(res) = sub_ord_rules(x, y, op, &z, ords, arena) { + MaybeCollapsed::Collapsed(res) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Div(_), RangeOp::Eq) => { + if ords.x_eq_z() && !ords.y_eq_one() { + // (x -|/ y) == x ==> false + MaybeCollapsed::Collapsed(Elem::from(Concrete::from(false))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Add(_), RangeOp::Eq) => { + if (ords.x_eq_z() && !ords.y_eq_zero()) || (ords.y_eq_z() && !ords.x_eq_zero()) + { + // (x +|* k) == x ==> false + MaybeCollapsed::Collapsed(Elem::from(Concrete::from(false))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Mul(_), RangeOp::Eq) => { + if (ords.x_eq_z() && !ords.y_eq_one()) || (ords.y_eq_z() && !ords.x_eq_one()) { + // (x +|* k) == x ==> false + MaybeCollapsed::Collapsed(Elem::from(Concrete::from(false))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Max, RangeOp::Gte) => { + if ords.x_eq_z() || ords.y_eq_z() { + // max{ x, y } >= + MaybeCollapsed::Collapsed(Elem::from(Concrete::from(true))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Min, RangeOp::Lte) => { + if ords.x_eq_z() || ords.y_eq_z() { + // min{ x, y } <= + MaybeCollapsed::Collapsed(Elem::from(Concrete::from(true))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + _ => MaybeCollapsed::Not(Elem::Expr(expr), z), + } + } + // if we have an expression, it fundamentally must have a dynamic in it + (Elem::Expr(expr), c @ Elem::Concrete(_)) => { + // potentially collapsible + let x = &*expr.lhs; + let y = &*expr.rhs; + let z = c; + + let ords = Ords::new(x, y, &z, arena); + + match (expr.op, op) { + (RangeOp::Sub(false), _) if ORD_OPS.contains(&op) => { + if let Some(res) = sub_ord_rules(x, y, op, &z, ords, arena) { + MaybeCollapsed::Collapsed(res) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Add(false), _) if ORD_OPS.contains(&op) => { + if let Some(res) = add_ord_rules(x, y, op, &z, ords, arena) { + MaybeCollapsed::Collapsed(res) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Eq, RangeOp::Eq) => { + // ((x == y) == z) + // can skip if x and z eq + if ords.x_eq_z() || ords.y_eq_z() { + MaybeCollapsed::Collapsed(Elem::Expr(expr)) + } else if z.range_eq(&Elem::from(Concrete::from(true)), arena) { + MaybeCollapsed::Collapsed(Elem::Expr(expr)) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Add(l_op), RangeOp::Add(r_op)) => { + // ((x + y) + z) + let op_fn = if l_op && r_op { + // unchecked + RangeAdd::range_wrapping_add + } else { + // checked + as RangeAdd>::range_add + }; + if let Some(new) = op_fn(x, &z) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new(y.clone(), op, new))) + } else if let Some(new) = op_fn(y, &z) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new(x.clone(), op, new))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Add(l_op), RangeOp::Sub(r_op)) => { + // ((x + y) - z) => k - y || x + k + if l_op == r_op { + match y.range_ord(&z, arena) { + Some(std::cmp::Ordering::Equal) | Some(std::cmp::Ordering::Greater) => { + // y and z are concrete && y >= z ==> x + (y - z) + let op_fn = if l_op { + // unchecked + RangeSub::range_wrapping_sub + } else { + // checked + as RangeSub>::range_sub + }; + if let Some(new) = op_fn(y, &z) { + let new_expr = + Elem::Expr(RangeExpr::new(x.clone(), expr.op, new)); + MaybeCollapsed::Collapsed(new_expr) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + Some(std::cmp::Ordering::Less) => { + // y and z are concrete && y < z ==> x - (z - y) + let op_fn = if l_op { + // unchecked + RangeSub::range_wrapping_sub + } else { + // checked + as RangeSub>::range_sub + }; + if let Some(new) = op_fn(&z, y) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + x.clone(), + RangeOp::Sub(l_op), + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + None => { + // x and z are concrete, if x >= z, just do (x - z) + y + // else do (y - (z - x)) + match x.range_ord(&z, arena) { + Some(std::cmp::Ordering::Equal) + | Some(std::cmp::Ordering::Greater) => { + let op_fn = if l_op { + // unchecked + RangeSub::range_wrapping_sub + } else { + // checked + as RangeSub>::range_sub + }; + if let Some(new) = op_fn(y, &z) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + x.clone(), + expr.op, + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + Some(std::cmp::Ordering::Less) => { + // (y - (z - x)) because z > x, therefore its (-k + y) ==> (y - k) where k = (x - z) + let op_fn = if l_op { + // unchecked + RangeSub::range_wrapping_sub + } else { + // checked + as RangeSub>::range_sub + }; + if let Some(new) = op_fn(&z, x) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + y.clone(), + RangeOp::Sub(l_op), + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + None => MaybeCollapsed::Not(Elem::Expr(expr), z), + } + } + } + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Sub(l_op), RangeOp::Add(r_op)) => { + // ((x - y) + z) => k - y || x + k + if l_op == r_op { + match y.range_ord(&z, arena) { + Some(std::cmp::Ordering::Equal) | Some(std::cmp::Ordering::Greater) => { + // y and z are concrete && z <= y ==> x - (y - z) + let op_fn = if l_op { + // unchecked + RangeSub::range_wrapping_sub + } else { + // checked + as RangeSub>::range_sub + }; + if let Some(new) = op_fn(y, &z) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + x.clone(), + expr.op, + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + Some(std::cmp::Ordering::Less) => { + // y and z are concrete && y < z ==> x + (z - y) + let op_fn = if l_op { + // unchecked + RangeSub::range_wrapping_sub + } else { + // checked + as RangeSub>::range_sub + }; + if let Some(new) = op_fn(&z, y) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + x.clone(), + RangeOp::Add(l_op), + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + None => { + // x and z are concrete, just add them ==> (x + z) - y + let op_fn = if l_op { + // unchecked + RangeAdd::range_wrapping_add + } else { + // checked + as RangeAdd>::range_add + }; + if let Some(new) = op_fn(x, &z) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + new, + expr.op, + y.clone(), + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + } + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Mul(l_op), RangeOp::Mul(r_op)) => { + // ((x * y) * z) + if l_op == r_op { + let op_fn = if l_op { + // unchecked + RangeMul::range_wrapping_mul + } else { + // checked + as RangeMul>::range_mul + }; + if let Some(new) = op_fn(x, &z) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + y.clone(), + op, + new, + ))) + } else if let Some(new) = op_fn(y, &z) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + x.clone(), + op, + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Add(wrapping), op) if EQ_OPS.contains(&op) => { + let const_op = if wrapping { + RangeSub::range_wrapping_sub + } else { + RangeSub::range_sub + }; + // ((x + y) == z) => (x == (z - y)) || (y == (z - x)) + // .. + // ((x + y) != z) => (x != (z - y)) || (y != (z - x)) + if let Some(new) = const_op(&z, y) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new(y.clone(), op, new))) + } else if let Some(new) = const_op(&z, x) { + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new(x.clone(), op, new))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Sub(wrapping), op) if EQ_OPS.contains(&op) => { + let op_y = if wrapping { + as RangeAdd>::range_wrapping_add + } else { + as RangeAdd>::range_add + }; + let op_x = if wrapping { + as RangeSub>::range_wrapping_sub + } else { + as RangeSub>::range_sub + }; + // ((x - y) == z) => (x == (z + y)) || (y == (x - z)) + // ((x - y) != z) => (x != (z + y)) || (y != (x - z)) + if let Some(new) = op_y(y, &z) { + let new_expr = RangeExpr::new(x.clone(), op, new); + MaybeCollapsed::Collapsed(Elem::Expr(new_expr)) + } else if let Some(new) = op_x(x, &z) { + let new_expr = RangeExpr::new(y.clone(), op, new); + MaybeCollapsed::Collapsed(Elem::Expr(new_expr)) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Mul(wrapping), op) if EQ_OPS.contains(&op) => { + let div_op = if wrapping { + RangeDiv::range_wrapping_div + } else { + RangeDiv::range_div + }; + // ((x * y) == z) => (x == (z / y)) || (y == (z / x)) + // ((x * y) != z) => (x != (z / y)) || (y != (z / x)) + if let Some(new) = div_op(&z, x) { + let new_op = if ords.x_lt_zero() && FLIP_INEQ_OPS.contains(&op) { + op.inverse().unwrap() + } else { + op + }; + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + y.clone(), + new_op, + new, + ))) + } else if let Some(new) = div_op(&z, y) { + let new_op = if ords.y_lt_zero() && FLIP_INEQ_OPS.contains(&op) { + op.inverse().unwrap() + } else { + op + }; + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + x.clone(), + new_op, + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (RangeOp::Div(wrapping), op) if EQ_OPS.contains(&op) => { + let mul_op = if wrapping { + as RangeMul>::range_wrapping_mul + } else { + as RangeMul>::range_mul + }; + let div_op = if wrapping { + as RangeDiv>::range_wrapping_div + } else { + as RangeDiv>::range_div + }; + + // ((x / y) == z) => (x == (z * y)) || (y == (x / z)) + // .. + // ((x / y) != z) => (x != (z / y)) || (y != (x / z)) + if let Some(new) = mul_op(&z, y) { + let new_op = if ords.y_lt_zero() && FLIP_INEQ_OPS.contains(&op) { + op.inverse().unwrap() + } else { + op + }; + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + x.clone(), + new_op, + new, + ))) + } else if !FLIP_INEQ_OPS.contains(&op) { + if let Some(new) = div_op(x, &z) { + // y is the dynamic element + // we cant do flip ops here because we do (x / y) * y >= z * y which is a flip potentially + // but we dont know if y was negative. so we limit to just eq & neq + MaybeCollapsed::Collapsed(Elem::Expr(RangeExpr::new( + y.clone(), + op, + new, + ))) + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (_, RangeOp::Eq) => { + // (x _ y) == z ==> (x _ y if z == true) + if z.range_eq(&Elem::from(Concrete::from(true)), arena) { + MaybeCollapsed::Collapsed(Elem::Expr(expr)) + } else if z.range_eq(&Elem::from(Concrete::from(false)), arena) { + // (!x && !y) + match ( + x.inverse_if_boolean(), + y.inverse_if_boolean(), + expr.op.logical_inverse(), + ) { + (Some(new_x), Some(new_y), Some(new_op)) => MaybeCollapsed::Collapsed( + Elem::Expr(RangeExpr::new(new_x, new_op, new_y)), + ), + _ => MaybeCollapsed::Not(Elem::Expr(expr), z), + } + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + (_, RangeOp::Neq) => { + // (x _ y) != z ==> (x _ y if z != false) + if z.range_eq(&Elem::from(Concrete::from(false)), arena) { + // != false is == true + MaybeCollapsed::Collapsed(Elem::Expr(expr)) + } else if z.range_eq(&Elem::from(Concrete::from(true)), arena) { + // != true is == false, to make it == true, inverse everything + match ( + x.inverse_if_boolean(), + y.inverse_if_boolean(), + expr.op.logical_inverse(), + ) { + (Some(new_x), Some(new_y), Some(new_op)) => MaybeCollapsed::Collapsed( + Elem::Expr(RangeExpr::new(new_x, new_op, new_y)), + ), + _ => MaybeCollapsed::Not(Elem::Expr(expr), z), + } + } else { + MaybeCollapsed::Not(Elem::Expr(expr), z) + } + } + _ => MaybeCollapsed::Not(Elem::Expr(expr), z), + } + } + (l @ Elem::Concrete(_), r @ Elem::Expr(_)) => { + if op.commutative() { + match collapse(r, op, l, arena) { + MaybeCollapsed::Collapsed(inner) => MaybeCollapsed::Collapsed(inner.clone()), + MaybeCollapsed::Not(r, l) => MaybeCollapsed::Not(l, r), + MaybeCollapsed::Concretes(r, l) => MaybeCollapsed::Concretes(l, r), + } + } else if let Some(inv) = op.non_commutative_logical_inverse() { + match collapse(r, inv, l, arena) { + MaybeCollapsed::Collapsed(inner) => MaybeCollapsed::Collapsed(inner.clone()), + MaybeCollapsed::Not(r, l) => MaybeCollapsed::Not(l, r), + MaybeCollapsed::Concretes(r, l) => MaybeCollapsed::Concretes(l, r), + } + } else { + MaybeCollapsed::Not(l, r) + } + } + (le @ Elem::Reference(_), c @ Elem::Concrete(_)) => { + let zero = Elem::from(Concrete::from(U256::zero())); + let one = Elem::from(Concrete::from(U256::one())); + match op { + RangeOp::Sub(_) | RangeOp::Add(_) => { + if matches!(c.range_ord(&zero, arena), Some(std::cmp::Ordering::Equal)) { + MaybeCollapsed::Collapsed(le.clone()) + } else { + MaybeCollapsed::Not(le, c) + } + } + RangeOp::Mul(_) | RangeOp::Div(_) => { + if matches!(c.range_ord(&one, arena), Some(std::cmp::Ordering::Equal)) { + MaybeCollapsed::Collapsed(le.clone()) + } else { + MaybeCollapsed::Not(le, c) + } + } + _ => MaybeCollapsed::Not(le, c), + } + } + (Elem::Null, real) => match op { + RangeOp::Max | RangeOp::Min => MaybeCollapsed::Collapsed(real.clone()), + RangeOp::Not => match real { + Elem::Concrete(RangeConcrete { + val: Concrete::Bool(c), + loc, + }) => MaybeCollapsed::Collapsed(Elem::Concrete(RangeConcrete::new( + Concrete::from(!c), + loc, + ))), + _ => MaybeCollapsed::Not(Elem::Null, real), + }, + _ => MaybeCollapsed::Not(Elem::Null, real), + }, + (real, Elem::Null) => match op { + RangeOp::Max | RangeOp::Min => MaybeCollapsed::Collapsed(real.clone()), + RangeOp::Not => match real { + Elem::Concrete(RangeConcrete { + val: Concrete::Bool(c), + loc, + }) => MaybeCollapsed::Collapsed(Elem::Concrete(RangeConcrete::new( + Concrete::from(!c), + loc, + ))), + _ => MaybeCollapsed::Not(real, Elem::Null), + }, + _ => MaybeCollapsed::Not(real, Elem::Null), + }, + (l, r) => return MaybeCollapsed::Not(l, r), + }; + + match res { + MaybeCollapsed::Collapsed(Elem::Expr(e)) => collapse(*e.lhs, e.op, *e.rhs, arena), + other => other, + } +} diff --git a/crates/graph/src/range/elem/expr/mod.rs b/crates/graph/src/range/elem/expr/mod.rs new file mode 100644 index 00000000..f69aaeb3 --- /dev/null +++ b/crates/graph/src/range/elem/expr/mod.rs @@ -0,0 +1,728 @@ +mod collapse; +pub use collapse::*; + +mod simplify; + +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::{ + elem::{Elem, MinMaxed, RangeArenaLike, RangeConcrete, RangeElem, RangeOp}, + exec_traits::*, + }, + GraphBackend, GraphError, +}; +use std::hash::Hash; +use std::hash::Hasher; + +use ethers_core::types::U256; +use shared::{NodeIdx, RangeArena}; + +/// A range expression composed of other range [`Elem`] +#[derive(Clone, Debug, Ord, PartialOrd)] +pub struct RangeExpr { + pub maximized: Option>, + pub minimized: Option>, + pub flattened_min: Option>>, + pub flattened_max: Option>>, + pub lhs: Box>, + pub op: RangeOp, + pub rhs: Box>, +} + +impl std::fmt::Display for RangeExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.op { + RangeOp::Min | RangeOp::Max => { + write!(f, "{}{{{}, {}}}", self.op.to_string(), self.lhs, self.rhs) + } + RangeOp::Cast => match &*self.rhs { + Elem::Concrete(RangeConcrete { val, .. }) => { + write!( + f, + "{}({}, {})", + self.op.to_string(), + self.lhs, + val.as_builtin().basic_as_string() + ) + } + _ => write!(f, "{}({}, {})", self.op.to_string(), self.lhs, self.rhs), + }, + RangeOp::BitNot => { + write!(f, "~{}", self.lhs) + } + _ => write!(f, "({} {} {})", self.lhs, self.op.to_string(), self.rhs), + } + } +} + +impl PartialEq for RangeExpr { + fn eq(&self, other: &Self) -> bool { + self.op == other.op && self.lhs == other.lhs && self.rhs == other.rhs + } +} +impl Eq for RangeExpr {} + +impl Hash for RangeExpr { + fn hash(&self, state: &mut H) { + (*self.lhs).hash(state); + self.op.hash(state); + (*self.rhs).hash(state); + } +} + +impl RangeExpr { + pub fn is_noop(&self) -> (bool, usize) { + let one = Elem::from(Concrete::from(U256::one())); + let zero = Elem::from(Concrete::from(U256::zero())); + match self.op { + RangeOp::Mul(_) | RangeOp::Div(_) => { + if *self.lhs == one { + (true, 0) + } else if *self.rhs == one { + (true, 1) + } else { + (false, 0) + } + } + RangeOp::Add(_) | RangeOp::Sub(_) => { + if *self.lhs == zero { + (true, 0) + } else if *self.rhs == zero { + (true, 1) + } else { + (false, 0) + } + } + _ => (false, 0), + } + } + + pub fn inverse_if_boolean(&self) -> Option { + if EQ_OPS.contains(&self.op) { + if SINGLETON_EQ_OPS.contains(&self.op) { + let mut new_self = self.clone(); + new_self.uncache(); + new_self.op = new_self.op.logical_inverse()?; + Some(new_self) + } else { + // non-singleton, i.e. AND or OR + let mut new_self = self.clone(); + new_self.uncache(); + new_self.op = new_self.op.inverse()?; + if let Some(new_lhs) = new_self.inverse_if_boolean() { + *new_self.lhs = Elem::Expr(new_lhs); + } + if let Some(new_rhs) = new_self.inverse_if_boolean() { + *new_self.rhs = Elem::Expr(new_rhs); + } + Some(new_self) + } + } else { + None + } + } + + pub fn recurse_dearenaize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Elem { + Elem::Expr(Self::new( + self.lhs.recurse_dearenaize(analyzer, arena).clone(), + self.op, + self.rhs.recurse_dearenaize(analyzer, arena).clone(), + )) + } + + pub fn arena_idx(&self, arena: &RangeArena>) -> Option { + let expr = Elem::Expr(RangeExpr::new( + Elem::Arena(arena.idx(&self.lhs)?), + self.op, + Elem::Arena(arena.idx(&self.rhs)?), + )); + arena.idx(&expr) + } + + pub fn arenaized_cache( + &self, + max: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Option> { + if let Some(idx) = self.arena_idx(arena) { + let Some(ref mut t) = arena.ranges.get_mut(idx) else { + return None; + }; + let Elem::Expr(ref mut arenaized) = *t else { + return None; + }; + return if max { + arenaized.maximized.clone() + } else { + arenaized.minimized.clone() + }; + } + None + } + + pub fn arenaized_flat_cache( + &self, + max: bool, + arena: &mut RangeArena>, + ) -> Option>> { + if let Some(idx) = self.arena_idx(arena) { + let Some(ref mut t) = arena.ranges.get_mut(idx) else { + return None; + }; + let Elem::Expr(ref mut arenaized) = *t else { + return None; + }; + return if max { + arenaized.flattened_max.clone() + } else { + arenaized.flattened_min.clone() + }; + } + None + } + + pub fn set_arenaized_flattened( + &self, + max: bool, + elem: Elem, + arena: &mut RangeArena>, + ) { + if let Some(idx) = self.arena_idx(arena) { + if let Some(t) = arena.ranges.get_mut(idx) { + let Elem::Expr(arenaized) = &mut *t else { + return; + }; + + if max { + arenaized.flattened_max = Some(Box::new(elem)); + } else { + arenaized.flattened_min = Some(Box::new(elem)); + } + } + } + } +} + +impl RangeExpr { + /// Creates a new range expression given a left hand side range [`Elem`], a [`RangeOp`], and a a right hand side range [`Elem`]. + pub fn new(lhs: Elem, op: RangeOp, rhs: Elem) -> RangeExpr { + RangeExpr { + maximized: None, + minimized: None, + flattened_max: None, + flattened_min: None, + lhs: Box::new(lhs), + op, + rhs: Box::new(rhs), + } + } + + pub fn contains_node(&self, node_idx: NodeIdx) -> bool { + self.lhs.contains_node(node_idx) || self.rhs.contains_node(node_idx) + } +} + +impl RangeElem for RangeExpr { + type GraphError = GraphError; + + // #[tracing::instrument(level = "trace", skip_all)] + fn arenaize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if self.arena_idx(arena).is_none() { + let lhs = std::mem::take(&mut self.lhs); + let rhs = std::mem::take(&mut self.rhs); + self.lhs = Box::new(Elem::Arena(arena.idx_or_upsert(*lhs, analyzer))); + self.rhs = Box::new(Elem::Arena(arena.idx_or_upsert(*rhs, analyzer))); + let _ = arena.idx_or_upsert(Elem::Expr(self.clone()), analyzer); + } + Ok(()) + } + + fn range_eq(&self, _other: &Self, _arena: &mut RangeArena>) -> bool { + false + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn flatten( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + match (maximize, &self.flattened_min, &self.flattened_max) { + (true, _, Some(flat)) | (false, Some(flat), _) => { + return Ok(*flat.clone()); + } + _ => {} + } + + if let Some(arenaized) = self.arenaized_flat_cache(maximize, arena) { + return Ok(*arenaized); + } + + Ok(Elem::Expr(RangeExpr::new( + self.lhs.flatten(maximize, analyzer, arena)?, + self.op, + self.rhs.flatten(maximize, analyzer, arena)?, + ))) + } + + fn is_flatten_cached( + &self, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + self.flattened_min.is_some() && self.flattened_max.is_some() || { + if let Some(idx) = self.arena_idx(arena) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Expr(ref arenaized) = *t { + arenaized.flattened_min.is_some() && arenaized.flattened_max.is_some() + } else { + false + } + } else { + false + } + } else { + false + } + } + } + + fn is_min_max_cached( + &self, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> (bool, bool) { + let (arena_cached_min, arena_cached_max) = { + if let Some(idx) = self.arena_idx(arena) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Expr(ref arenaized) = *t { + (arenaized.minimized.is_some(), arenaized.maximized.is_some()) + } else { + (false, false) + } + } else { + (false, false) + } + } else { + (false, false) + } + }; + ( + self.minimized.is_some() || arena_cached_min, + self.maximized.is_some() || arena_cached_max, + ) + } + + fn range_ord( + &self, + _other: &Self, + _arena: &mut RangeArena>, + ) -> Option { + todo!() + } + + fn dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec { + let mut deps = self.lhs.dependent_on(analyzer, arena); + deps.extend(self.rhs.dependent_on(analyzer, arena)); + deps + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn recursive_dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let mut deps = self.lhs.recursive_dependent_on(analyzer, arena)?; + deps.extend(self.rhs.recursive_dependent_on(analyzer, arena)?); + Ok(deps) + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn has_cycle( + &self, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let lhs_has_cycle = self.lhs.has_cycle(seen, analyzer, arena)?; + let rhs_has_cycle = self.rhs.has_cycle(seen, analyzer, arena)?; + Ok(lhs_has_cycle || rhs_has_cycle) + } + + fn depends_on( + &self, + var: ContextVarNode, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let lhs_deps_on = self.lhs.depends_on(var, seen, analyzer, arena)?; + let rhs_deps_on = self.rhs.depends_on(var, seen, analyzer, arena)?; + Ok(lhs_deps_on || rhs_deps_on) + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn filter_recursion( + &mut self, + node_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + let _ = self.arenaize(analyzer, arena); + self.lhs + .filter_recursion(node_idx, new_idx, analyzer, arena); + self.rhs + .filter_recursion(node_idx, new_idx, analyzer, arena); + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(MinMaxed::Maximized(cached)) = self.maximized.clone() { + Ok(*cached) + } else if let Some(MinMaxed::Maximized(cached)) = + self.arenaized_cache(true, analyzer, arena) + { + Ok(*cached) + } else if self.op == RangeOp::SetIndices { + self.simplify_exec_op(true, analyzer, arena) + } else { + self.exec_op(true, analyzer, arena) + } + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(MinMaxed::Minimized(cached)) = self.minimized.clone() { + Ok(*cached) + } else if let Some(MinMaxed::Minimized(cached)) = + self.arenaized_cache(false, analyzer, arena) + { + Ok(*cached) + } else if self.op == RangeOp::SetIndices { + self.simplify_exec_op(false, analyzer, arena) + } else { + self.exec_op(false, analyzer, arena) + } + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn simplify_maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(simp_max) = &self.flattened_max { + return Ok(*simp_max.clone()); + } + + if let Some(arenaized) = self.arenaized_flat_cache(true, arena) { + return Ok(*arenaized); + } + + let l = self.lhs.simplify_maximize(analyzer, arena)?; + let r = self.rhs.simplify_maximize(analyzer, arena)?; + let collapsed = collapse(l, self.op, r, arena); + let res = match collapsed { + MaybeCollapsed::Concretes(l, r) => { + RangeExpr::new(l, self.op, r).exec_op(true, analyzer, arena) + } + MaybeCollapsed::Collapsed(collapsed) => Ok(collapsed), + MaybeCollapsed::Not(l, r) => { + let res = RangeExpr::new(l, self.op, r).simplify_exec_op(true, analyzer, arena)?; + match res { + Elem::Expr(expr) => match collapse(*expr.lhs, expr.op, *expr.rhs, arena) { + MaybeCollapsed::Concretes(l, r) => { + RangeExpr::new(l, expr.op, r).exec_op(true, analyzer, arena) + } + MaybeCollapsed::Collapsed(collapsed) => Ok(collapsed), + MaybeCollapsed::Not(l, r) => Ok(Elem::Expr(RangeExpr::new(l, expr.op, r))), + }, + other => Ok(other), + } + } + }?; + self.set_arenaized_flattened(true, res.clone(), arena); + Ok(res) + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn simplify_minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(simp_min) = &self.flattened_min { + return Ok(*simp_min.clone()); + } + + if let Some(arenaized) = self.arenaized_flat_cache(false, arena) { + return Ok(*arenaized); + } + + let l = self.lhs.simplify_minimize(analyzer, arena)?; + self.lhs.set_arenaized_flattened(false, &l, arena); + let r = self.rhs.simplify_minimize(analyzer, arena)?; + self.rhs.set_arenaized_flattened(false, &r, arena); + + let collapsed = collapse(l, self.op, r, arena); + let res = match collapsed { + MaybeCollapsed::Concretes(l, r) => { + RangeExpr::new(l, self.op, r).exec_op(false, analyzer, arena) + } + MaybeCollapsed::Collapsed(collapsed) => Ok(collapsed), + MaybeCollapsed::Not(l, r) => { + let res = RangeExpr::new(l, self.op, r).simplify_exec_op(false, analyzer, arena)?; + match res { + Elem::Expr(expr) => match collapse(*expr.lhs, expr.op, *expr.rhs, arena) { + MaybeCollapsed::Concretes(l, r) => { + return RangeExpr::new(l, self.op, r).exec_op(false, analyzer, arena) + } + MaybeCollapsed::Collapsed(collapsed) => return Ok(collapsed), + MaybeCollapsed::Not(l, r) => Ok(Elem::Expr(RangeExpr::new(l, expr.op, r))), + }, + other => Ok(other), + } + } + }?; + + self.set_arenaized_flattened(false, res.clone(), arena); + Ok(res) + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn cache_flatten( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + self.arenaize(g, arena)?; + + fn simp_minimize( + this: &mut Elem, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let Elem::Expr(this) = this else { + this.cache_flatten(analyzer, arena)?; + if let Some(t) = this.arenaized_flattened(false, analyzer, arena) { + return Ok(*t); + } else { + return Ok(this.clone()); + } + }; + + if let Some(simp_min) = &this.flattened_min { + return Ok(*simp_min.clone()); + } + + if let Some(arenaized) = this.arenaized_flat_cache(false, arena) { + return Ok(*arenaized); + } + + let l = simp_minimize(&mut this.lhs, analyzer, arena)?; + let r = simp_minimize(&mut this.rhs, analyzer, arena)?; + let collapsed = collapse(l, this.op, r, arena); + let res = match collapsed { + MaybeCollapsed::Concretes(l, r) => { + RangeExpr::new(l, this.op, r).exec_op(false, analyzer, arena) + } + MaybeCollapsed::Collapsed(collapsed) => Ok(collapsed), + MaybeCollapsed::Not(l, r) => { + let res = + RangeExpr::new(l, this.op, r).simplify_exec_op(false, analyzer, arena)?; + + let idx = arena.idx_or_upsert(res.clone(), analyzer); + match res { + Elem::Expr(expr) => match collapse(*expr.lhs, expr.op, *expr.rhs, arena) { + MaybeCollapsed::Concretes(l, r) => { + let exec_res = RangeExpr::new(l, expr.op, r) + .exec_op(false, analyzer, arena)?; + Elem::Arena(idx).set_arenaized_flattened(false, &exec_res, arena); + Ok(exec_res) + } + MaybeCollapsed::Collapsed(collapsed) => { + Elem::Arena(idx).set_arenaized_flattened(false, &collapsed, arena); + Ok(collapsed) + } + MaybeCollapsed::Not(l, r) => { + Ok(Elem::Expr(RangeExpr::new(l, expr.op, r))) + } + }, + other => Ok(other), + } + } + }?; + + this.set_arenaized_flattened(false, res.clone(), arena); + Ok(res) + } + + fn simp_maximize( + this: &mut Elem, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let Elem::Expr(this) = this else { + this.cache_flatten(analyzer, arena)?; + if let Some(t) = this.arenaized_flattened(true, analyzer, arena) { + return Ok(*t); + } else { + return Ok(this.clone()); + } + }; + + if let Some(simp_min) = &this.flattened_max { + return Ok(*simp_min.clone()); + } + + if let Some(arenaized) = this.arenaized_flat_cache(false, arena) { + return Ok(*arenaized); + } + + let l = simp_maximize(&mut this.lhs, analyzer, arena)?; + let r = simp_maximize(&mut this.rhs, analyzer, arena)?; + let collapsed = collapse(l, this.op, r, arena); + let res = match collapsed { + MaybeCollapsed::Concretes(l, r) => { + RangeExpr::new(l, this.op, r).exec_op(true, analyzer, arena) + } + MaybeCollapsed::Collapsed(collapsed) => Ok(collapsed), + MaybeCollapsed::Not(l, r) => { + let res = + RangeExpr::new(l, this.op, r).simplify_exec_op(true, analyzer, arena)?; + + let idx = arena.idx_or_upsert(res.clone(), analyzer); + match res { + Elem::Expr(expr) => match collapse(*expr.lhs, expr.op, *expr.rhs, arena) { + MaybeCollapsed::Concretes(l, r) => { + let exec_res = + RangeExpr::new(l, expr.op, r).exec_op(true, analyzer, arena)?; + Elem::Arena(idx).set_arenaized_flattened(true, &exec_res, arena); + Ok(exec_res) + } + MaybeCollapsed::Collapsed(collapsed) => { + Elem::Arena(idx).set_arenaized_flattened(true, &collapsed, arena); + Ok(collapsed) + } + MaybeCollapsed::Not(l, r) => { + Ok(Elem::Expr(RangeExpr::new(l, expr.op, r))) + } + }, + other => Ok(other), + } + } + }?; + + this.set_arenaized_flattened(false, res.clone(), arena); + Ok(res) + } + + if self.flattened_max.is_none() { + if let Some(idx) = self.arena_idx(arena) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Expr(ref arenaized) = *t { + if arenaized.flattened_max.is_some() { + return Ok(()); + } + } + }; + } else { + self.arenaize(g, arena)?; + } + + self.lhs.cache_flatten(g, arena)?; + self.rhs.cache_flatten(g, arena)?; + + let mut flat_max = self.flatten(true, g, arena)?; + let simplified_flat_max = simp_maximize(&mut flat_max, g, arena)?; + simplified_flat_max.clone().arenaize(g, arena)?; + self.flattened_max = Some(Box::new(simplified_flat_max)); + } + + if self.flattened_min.is_none() { + if let Some(idx) = self.arena_idx(arena) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Expr(ref arenaized) = *t { + if arenaized.flattened_min.is_some() { + return Ok(()); + } + } + }; + } else { + self.arenaize(g, arena)?; + } + + self.lhs.cache_flatten(g, arena)?; + self.rhs.cache_flatten(g, arena)?; + + let mut flat_min = self.flatten(false, g, arena)?; + let simplified_flat_min = simp_minimize(&mut flat_min, g, arena)?; + simplified_flat_min.clone().arenaize(g, arena)?; + self.flattened_min = Some(Box::new(simplified_flat_min)); + } + Ok(()) + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn cache_maximize( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + tracing::trace!("cache maximizing: {}", Elem::Expr(self.clone())); + self.arenaize(g, arena)?; + if self.maximized.is_none() { + self.lhs.cache_maximize(g, arena)?; + self.rhs.cache_maximize(g, arena)?; + self.cache_exec_op(true, g, arena)?; + } + Ok(()) + } + + // #[tracing::instrument(level = "trace", skip_all)] + fn cache_minimize( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + tracing::trace!("cache minimizing: {}", Elem::Expr(self.clone())); + self.arenaize(g, arena)?; + if self.minimized.is_none() { + tracing::trace!("cache minimize lhs"); + self.lhs.cache_minimize(g, arena)?; + tracing::trace!("cache minimize rhs"); + self.rhs.cache_minimize(g, arena)?; + tracing::trace!("minimizing expr"); + self.cache_exec_op(false, g, arena)?; + } + Ok(()) + } + + fn uncache(&mut self) { + self.uncache_exec(); + } +} diff --git a/crates/graph/src/range/elem/expr/simplify/add.rs b/crates/graph/src/range/elem/expr/simplify/add.rs new file mode 100644 index 00000000..463d0885 --- /dev/null +++ b/crates/graph/src/range/elem/expr/simplify/add.rs @@ -0,0 +1,162 @@ +use crate::{ + nodes::Concrete, + range::{ + elem::expr::simplify::Ords, + elem::{Elem, RangeConcrete, RangeExpr, RangeOp}, + }, +}; + +use shared::RangeArena; + +pub fn add_ord_rules( + x: &Elem, + y: &Elem, + ord_op: RangeOp, + z: &Elem, + ords: Ords, + arena: &mut RangeArena>, +) -> Option> { + match ord_op { + RangeOp::Eq => { + if !ords.x_eq_z() { + return None; + } + // x + y == x + // ==> true iff y == 0, false otherwise + let res = if ords.y_eq_zero() { + Elem::from(true) + } else { + Elem::from(false) + }; + Some(res) + } + RangeOp::Neq => { + if !ords.x_eq_z() { + return None; + } + // x + y != x + // ==> true iff y != 0, false otherwise + let res = if ords.y_eq_zero() { + Elem::from(false) + } else { + Elem::from(true) + }; + Some(res) + } + RangeOp::Lt => { + // x + y < z + // ==> true if: + // x < z && y <= 0 + // ==> false if + // x >= z && y > 0 + if ords.x_lt_z() && ords.y_lte_zero() { + Some(Elem::from(true)) + } else if ords.x_gte_z() && ords.y_gt_zero() { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Gt => { + // x + y > z + // ==> true if: + // x >= z && y > 0 || x > z && y >= 0 + let true_lhs = ords.x_gte_z() && ords.y_gt_zero(); + let true_rhs = ords.x_gt_z() && ords.y_gte_zero(); + // ==> false if + // x <= z && y < 0 + let false_cond = ords.x_lte_z() && ords.y_lt_zero(); + + if true_lhs || true_rhs { + Some(Elem::from(true)) + } else if false_cond { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Lte => { + // x + y <= z + + // ==> true if: + // x <= z && y <= 0 + let true_cond = ords.x_lte_z() && ords.y_lte_zero(); + + // ==> false if: + // x > z && y >= 0 || x >= z && y > 0 + let false_lhs = ords.x_gt_z() && ords.y_gte_zero(); + let false_rhs = ords.x_gte_z() && ords.y_gt_zero(); + + if true_cond { + Some(Elem::from(true)) + } else if false_lhs || false_rhs { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Gte => { + // x + y >= z + + // ==> true if: + // x >= z && y >= 0 + let true_cond = ords.x_gte_z() && ords.y_gte_zero(); + + // ==> false if: + // x < z && y <= 0 || x <= z && y < 0 + let false_lhs = ords.x_lt_z() && ords.y_lte_zero(); + let false_rhs = ords.x_lte_z() && ords.y_lt_zero(); + + if true_cond { + Some(Elem::from(true)) + } else if false_lhs || false_rhs { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Max => { + // max{x + y, z} + // same as gt but return lhs or rhs instead + match add_ord_rules(x, y, RangeOp::Gt, z, ords, arena) { + Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(b), + .. + })) => { + if b { + Some(Elem::Expr(RangeExpr::new( + x.clone(), + RangeOp::Add(false), + y.clone(), + ))) + } else { + Some(z.clone()) + } + } + _ => None, + } + } + RangeOp::Min => { + // min{x - y, z} + // same as lt but return lhs or rhs instead + match add_ord_rules(x, y, RangeOp::Lt, z, ords, arena) { + Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(b), + .. + })) => { + if b { + Some(Elem::Expr(RangeExpr::new( + x.clone(), + RangeOp::Add(false), + y.clone(), + ))) + } else { + Some(z.clone()) + } + } + _ => None, + } + } + _ => None, + } +} diff --git a/crates/graph/src/range/elem/expr/simplify/mod.rs b/crates/graph/src/range/elem/expr/simplify/mod.rs new file mode 100644 index 00000000..efd55aff --- /dev/null +++ b/crates/graph/src/range/elem/expr/simplify/mod.rs @@ -0,0 +1,55 @@ +mod add; +mod ords; +mod sub; + +pub use add::*; +pub use ords::*; +pub use sub::*; + +use crate::{ + nodes::Concrete, + range::elem::{Elem, RangeElem, RangeOp}, +}; + +use ethers_core::types::U256; +use shared::RangeArena; + +pub(crate) fn ident_rules( + l: &Elem, + exec_op: RangeOp, + r: &Elem, + arena: &mut RangeArena>, +) -> Option> { + let zero = Elem::from(Concrete::from(U256::zero())); + let one = Elem::from(Concrete::from(U256::one())); + match exec_op { + RangeOp::Add(_) | RangeOp::Sub(_) => { + let lhs_zero = matches!(l.range_ord(&zero, arena), Some(std::cmp::Ordering::Equal)); + let rhs_zero = matches!(r.range_ord(&zero, arena), Some(std::cmp::Ordering::Equal)); + match (lhs_zero, rhs_zero) { + (true, true) => Some(Elem::from(Concrete::from(U256::zero()))), + (true, false) => Some((*r).clone()), + (false, true) => Some((*l).clone()), + _ => None, + } + } + RangeOp::Mul(_) | RangeOp::Div(_) => { + let lhs_one = matches!(l.range_ord(&one, arena), Some(std::cmp::Ordering::Equal)); + let rhs_one = matches!(r.range_ord(&one, arena), Some(std::cmp::Ordering::Equal)); + match (lhs_one, rhs_one) { + (true, true) => Some(Elem::from(Concrete::from(U256::one()))), + (true, false) => Some((*r).clone()), + (false, true) => Some((*l).clone()), + _ => None, + } + } + RangeOp::Exp => { + if matches!(r.range_ord(&zero, arena), Some(std::cmp::Ordering::Equal)) { + Some(Elem::from(Concrete::from(U256::one()))) + } else { + None + } + } + _ => None, + } +} diff --git a/crates/graph/src/range/elem/expr/simplify/ords.rs b/crates/graph/src/range/elem/expr/simplify/ords.rs new file mode 100644 index 00000000..f140a5db --- /dev/null +++ b/crates/graph/src/range/elem/expr/simplify/ords.rs @@ -0,0 +1,122 @@ +use crate::{ + nodes::Concrete, + range::elem::{Elem, RangeElem}, +}; + +use ethers_core::types::U256; +use shared::RangeArena; + +pub struct Ords { + pub x_ord_z: Option, + pub y_ord_z: Option, + pub y_ord_zero: Option, + pub x_ord_zero: Option, + pub y_ord_one: Option, + pub x_ord_one: Option, +} + +impl Ords { + pub fn new( + x: &Elem, + y: &Elem, + z: &Elem, + arena: &mut RangeArena>, + ) -> Self { + let zero = Elem::from(Concrete::from(U256::zero())); + let one = Elem::from(Concrete::from(U256::one())); + Self { + x_ord_z: x.range_ord(z, arena), + y_ord_z: y.range_ord(z, arena), + y_ord_zero: y.range_ord(&zero, arena), + x_ord_zero: x.range_ord(&zero, arena), + y_ord_one: y.range_ord(&one, arena), + x_ord_one: x.range_ord(&one, arena), + } + } + + pub fn x_gte_z(&self) -> bool { + self.x_gt_z() || self.x_eq_z() + } + pub fn y_gte_z(&self) -> bool { + self.y_gt_z() || self.y_eq_z() + } + + pub fn x_lte_z(&self) -> bool { + self.x_lt_z() || self.x_eq_z() + } + pub fn y_lte_z(&self) -> bool { + self.y_lt_z() || self.y_eq_z() + } + + pub fn x_gt_z(&self) -> bool { + matches!(self.x_ord_z, Some(std::cmp::Ordering::Greater)) + } + + pub fn y_gt_z(&self) -> bool { + matches!(self.x_ord_z, Some(std::cmp::Ordering::Greater)) + } + + pub fn x_lt_z(&self) -> bool { + matches!(self.x_ord_z, Some(std::cmp::Ordering::Less)) + } + + pub fn y_lt_z(&self) -> bool { + matches!(self.x_ord_z, Some(std::cmp::Ordering::Less)) + } + + pub fn x_eq_z(&self) -> bool { + matches!(self.x_ord_z, Some(std::cmp::Ordering::Equal)) + } + + pub fn y_eq_z(&self) -> bool { + matches!(self.y_ord_z, Some(std::cmp::Ordering::Equal)) + } + + pub fn x_lt_zero(&self) -> bool { + matches!(self.x_ord_zero, Some(std::cmp::Ordering::Less)) + } + + pub fn y_lt_zero(&self) -> bool { + matches!(self.y_ord_zero, Some(std::cmp::Ordering::Less)) + } + + pub fn x_eq_zero(&self) -> bool { + matches!(self.x_ord_zero, Some(std::cmp::Ordering::Equal)) + } + + pub fn x_gt_zero(&self) -> bool { + matches!(self.x_ord_zero, Some(std::cmp::Ordering::Greater)) + } + + pub fn y_gt_zero(&self) -> bool { + matches!(self.y_ord_zero, Some(std::cmp::Ordering::Greater)) + } + + pub fn y_eq_zero(&self) -> bool { + matches!(self.y_ord_zero, Some(std::cmp::Ordering::Equal)) + } + + pub fn x_gte_zero(&self) -> bool { + self.x_gt_zero() || self.x_eq_zero() + } + + pub fn y_gte_zero(&self) -> bool { + self.y_gt_zero() || self.y_eq_zero() + } + + pub fn x_lte_zero(&self) -> bool { + self.x_lt_zero() || self.x_eq_zero() + } + + pub fn y_lte_zero(&self) -> bool { + self.y_lt_zero() || self.y_eq_zero() + } + + pub fn x_eq_one(&self) -> bool { + matches!(self.x_ord_one, Some(std::cmp::Ordering::Equal)) + } + + pub fn y_eq_one(&self) -> bool { + matches!(self.y_ord_one, Some(std::cmp::Ordering::Equal)) + } +} diff --git a/crates/graph/src/range/elem/expr/simplify/sub.rs b/crates/graph/src/range/elem/expr/simplify/sub.rs new file mode 100644 index 00000000..d99f2d85 --- /dev/null +++ b/crates/graph/src/range/elem/expr/simplify/sub.rs @@ -0,0 +1,177 @@ +use crate::{ + nodes::Concrete, + range::{ + elem::expr::simplify::Ords, + elem::{Elem, RangeConcrete, RangeExpr, RangeOp}, + }, +}; + +use shared::RangeArena; + +pub fn sub_ord_rules( + x: &Elem, + y: &Elem, + ord_op: RangeOp, + z: &Elem, + ords: Ords, + arena: &mut RangeArena>, +) -> Option> { + match ord_op { + RangeOp::Eq => { + if !ords.x_eq_z() { + return None; + } + // x - y == x + // ==> true iff y == 0, false otherwise + let res = if ords.y_eq_zero() { + Elem::from(true) + } else { + Elem::from(false) + }; + Some(res) + } + RangeOp::Neq => { + if !ords.x_eq_z() { + return None; + } + // x - y != x + // ==> true iff y != 0, false otherwise + let res = if ords.y_eq_zero() { + Elem::from(false) + } else { + Elem::from(true) + }; + Some(res) + } + RangeOp::Lt => { + // x - y < z + // ==> true if: + // x <= z && y > 0 + // ==> false if + // x == z && y < 0 + let x_lte_z = ords.x_eq_z() || ords.x_lt_z(); + if x_lte_z && ords.y_gt_zero() { + Some(Elem::from(true)) + } else if ords.x_eq_z() && ords.y_lt_zero() { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Gt => { + // x - y > z + // ==> true if: + // x > z && y <= 0 || x >= z && y < 0 + // ==> false if + // x <= z && y > 0 + let true_lhs = ords.x_gt_z() && (ords.y_lt_zero() || ords.y_eq_zero()); + let true_rhs = (ords.x_gt_z() || ords.x_eq_z()) && ords.y_lt_zero(); + let x_lte_z = ords.x_eq_z() || ords.x_lt_z(); + + if true_lhs || true_rhs { + Some(Elem::from(true)) + } else if x_lte_z && ords.y_gt_zero() { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Lte => { + // x - y <= z + + // ==> true if: + // x <= z && y >= 0 + let x_lte_z = ords.x_eq_z() || ords.x_lt_z(); + let y_gte_zero = ords.y_gt_zero() || ords.y_eq_zero(); + + // ==> false if: + // x > z && y <= 0 || x >= z && y < 0 + let x_gt_z = ords.x_gt_z(); + let y_lte_zero = ords.y_lt_zero() || ords.y_eq_zero(); + let lhs = x_gt_z && y_lte_zero; + + let x_gte_z = ords.x_gt_z() || ords.x_eq_z(); + let y_lt_zero = ords.y_lt_zero(); + let rhs = x_gte_z && y_lt_zero; + let false_cond = lhs || rhs; + + if x_lte_z && y_gte_zero { + Some(Elem::from(true)) + } else if false_cond { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Gte => { + // x - y >= z + + // ==> true if: + // x >= z && y <= 0 + let x_gte_z = ords.x_eq_z() || ords.x_gt_z(); + let y_lte_zero = ords.y_lt_zero() || ords.y_eq_zero(); + + // ==> false if: + // x < z && y >= 0 || x <= z && y > 0 + let x_lt_z = ords.x_lt_z(); + let y_gte_zero = ords.y_gt_zero() || ords.y_eq_zero(); + let lhs = x_lt_z && y_gte_zero; + + let x_lte_z = ords.x_lt_z() || ords.x_eq_z(); + let y_gt_zero = ords.y_gt_zero(); + let rhs = x_lte_z && y_gt_zero; + let false_cond = lhs || rhs; + + if x_lte_z && y_gte_zero { + Some(Elem::from(true)) + } else if false_cond { + Some(Elem::from(false)) + } else { + None + } + } + RangeOp::Max => { + // max{x - y, z} + // same as gt but return lhs or rhs instead + match sub_ord_rules(x, y, RangeOp::Gt, z, ords, arena) { + Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(b), + .. + })) => { + if b { + Some(Elem::Expr(RangeExpr::new( + x.clone(), + RangeOp::Sub(false), + y.clone(), + ))) + } else { + Some(z.clone()) + } + } + _ => None, + } + } + RangeOp::Min => { + // min{x - y, z} + // same as lt but return lhs or rhs instead + match sub_ord_rules(x, y, RangeOp::Lt, z, ords, arena) { + Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(b), + .. + })) => { + if b { + Some(Elem::Expr(RangeExpr::new( + x.clone(), + RangeOp::Sub(false), + y.clone(), + ))) + } else { + Some(z.clone()) + } + } + _ => None, + } + } + _ => None, + } +} diff --git a/crates/graph/src/range/elem/map_or_array.rs b/crates/graph/src/range/elem/map_or_array.rs new file mode 100644 index 00000000..2f1f0e08 --- /dev/null +++ b/crates/graph/src/range/elem/map_or_array.rs @@ -0,0 +1,668 @@ +use crate::{ + nodes::{Builtin, Concrete, ContextVarNode}, + range::{ + elem::{Elem, MinMaxed, RangeConcrete, RangeElem}, + exec_traits::{RangeCast, RangeMemLen}, + }, + GraphBackend, GraphError, +}; + +use shared::{NodeIdx, RangeArena}; + +use ethers_core::types::{H256, U256}; +use solang_parser::pt::Loc; + +use std::collections::BTreeMap; +use std::hash::Hash; +use std::hash::Hasher; + +use super::rc_uint256; + +/// A concrete value for a range element +#[derive(Clone, Debug, Ord, PartialOrd)] +pub struct RangeDyn { + /// Cached minimized value + pub minimized: Option>, + /// Cached maximized value + pub maximized: Option>, + pub flattened_min: Option>>, + pub flattened_max: Option>>, + /// Length of the dynamic variable + pub len: Box>, + /// Values of the dynamic variable + pub val: BTreeMap, (Elem, usize)>, + /// An operations log + pub op_num: usize, + /// For recursion, sets whether to filter nulls + // pub filter_null: bool, + /// Sourcecode location + pub loc: Loc, +} + +impl PartialEq for RangeDyn { + fn eq(&self, other: &Self) -> bool { + self.len == other.len && self.val == other.val && self.op_num == other.op_num + } +} +impl Eq for RangeDyn {} + +impl Hash for RangeDyn { + fn hash(&self, state: &mut H) { + (*self.len).hash(state); + self.val.hash(state); + self.op_num.hash(state); + } +} + +impl RangeDyn { + pub fn new_w_op_nums(len: Elem, val: BTreeMap, (Elem, usize)>, loc: Loc) -> Self { + let op_num = val.iter().fold(0, |mut acc, (_k, (_v, i))| { + if i > &acc { + acc = *i; + acc + } else { + acc + } + }); + Self { + minimized: None, + maximized: None, + flattened_min: None, + flattened_max: None, + len: Box::new(len), + val, + op_num, + loc, + } + } + pub fn new(len: Elem, val: BTreeMap, Elem>, loc: Loc) -> Self { + let mut op_num = 0; + let val = val + .into_iter() + .map(|(k, v)| { + let res = (k, (v, op_num)); + op_num += 1; + res + }) + .collect(); + Self { + minimized: None, + maximized: None, + flattened_min: None, + flattened_max: None, + len: Box::new(len), + val, + op_num, + loc, + } + } + + pub fn new_for_indices(vals: Vec<(Elem, Elem)>, loc: Loc) -> Self { + let mut op_num = 0; + let val = vals + .into_iter() + .map(|(k, v)| { + let res = (k, (v, op_num)); + op_num += 1; + res + }) + .collect(); + Self { + minimized: None, + maximized: None, + flattened_min: None, + flattened_max: None, + len: Box::new(Elem::Null), + val, + op_num: op_num - 1, + loc, + } + } + + /// Set the length + pub fn set_len(&mut self, new_len: Elem) { + self.len = Box::new(new_len); + } + + /// Check if the node contains a reference to a node index + pub fn contains_node(&self, node_idx: NodeIdx) -> bool { + self.len.contains_node(node_idx) + // || self.val.iter().any(|(k, v)| k.contains_node(node_idx) || v.contains_node(node_idx)) + } +} + +impl RangeDyn { + pub fn as_sized_bytes(&self) -> Option> { + let len = self.range_get_length()?; + let uint_val = len.maybe_concrete()?.val.uint_val()?; + if uint_val <= 32.into() { + let v = vec![0u8; uint_val.as_usize()]; + let conc = Concrete::from(v) + .cast(Builtin::Bytes(uint_val.as_usize() as u8)) + .unwrap(); + let to_cast = RangeConcrete::new(conc, Loc::Implicit); + self.range_cast(&to_cast) + } else { + None + } + } + pub fn as_bytes( + &self, + analyzer: &impl GraphBackend, + maximize: bool, + arena: &mut RangeArena>, + ) -> Option> { + let len = if maximize { + let as_u256 = self + .len + .maximize(analyzer, arena) + .ok()? + .concrete()? + .into_u256()?; + if as_u256 > usize::MAX.into() { + usize::MAX + } else { + as_u256.as_usize() + } + } else { + let mut as_u256 = self + .len + .minimize(analyzer, arena) + .ok()? + .concrete()? + .into_u256()?; + if let Some(max_key) = self.evaled_max_key(analyzer, arena) { + if let Some(max_key) = max_key.into_u256() { + as_u256 = as_u256.max(max_key); + } + } + + if as_u256 > usize::MAX.into() { + usize::MAX + } else { + as_u256.as_usize() + } + }; + + Some( + self.val + .values() + .map(|v| v.0.as_bytes(analyzer, maximize, arena)) + .collect::>>>()? + .into_iter() + .flatten() + .take(len) + .collect(), + ) + } + + pub fn evaled_max_key( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Option { + let mut evaled = self + .val + .keys() + .filter_map(|key| key.maximize(analyzer, arena).ok()) + .collect::>(); + evaled.sort_by(|a, b| a.range_ord(b, arena).unwrap_or(std::cmp::Ordering::Less)); + + evaled.iter().take(1).next()?.concrete() + } + + pub fn from_concrete(concrete: Concrete, loc: Loc) -> Option { + let (vals, len) = match concrete { + Concrete::Bytes(size, b) => ( + Some( + b.0.into_iter() + .take((size).into()) + .enumerate() + .map(|(i, b)| { + let mut h = H256::default(); + h.0[0] = b; + ( + rc_uint256(i as u128).into(), + RangeConcrete::new(Concrete::Bytes(1, h), Loc::Implicit).into(), + ) + }) + .collect::>(), + ), + Concrete::Uint(256, U256::from(size)), + ), + Concrete::DynBytes(b) => ( + Some( + b.iter() + .enumerate() + .map(|(i, by)| { + let mut h = H256::default(); + h.0[0] = *by; + ( + rc_uint256(i as u128).into(), + RangeConcrete::new(Concrete::Bytes(1, h), Loc::Implicit).into(), + ) + }) + .collect::>(), + ), + Concrete::Uint(256, U256::from(b.len())), + ), + Concrete::String(s) => ( + Some( + s.chars() + .enumerate() + .map(|(i, b): (usize, char)| { + let mut h = H256::default(); + h.0[0] = b as u8; + ( + rc_uint256(i as u128).into(), + RangeConcrete::new(Concrete::Bytes(1, h), Loc::Implicit).into(), + ) + }) + .collect::>(), + ), + Concrete::Uint(256, U256::from(s.len())), + ), + _ => (None, Concrete::Uint(256, 0.into())), + }; + + let mut s = Self::new_for_indices(vals?, loc); + s.len = Box::new(RangeConcrete::new(len, loc).into()); + Some(s) + } +} + +impl RangeElem for RangeDyn { + type GraphError = GraphError; + + fn arenaize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + self.len.arenaize(analyzer, arena)?; + self.val = self + .val + .iter_mut() + .map(|(k, (v, op))| { + let mut new_k = k.clone(); + let mut new_v = v.clone(); + new_k.arenaize(analyzer, arena); + new_v.arenaize(analyzer, arena); + (new_k, (new_v, *op)) + }) + .collect(); + Ok(()) + } + + fn range_eq(&self, other: &Self, arena: &mut RangeArena>) -> bool { + matches!( + self.range_ord(other, arena), + Some(std::cmp::Ordering::Equal) + ) + } + + fn range_ord( + &self, + other: &Self, + arena: &mut RangeArena>, + ) -> Option { + match self.len.range_ord(&other.len, arena) { + Some(std::cmp::Ordering::Equal) => { + let mut eq = 0; + let mut self_lt = 0; + let mut self_gt = 0; + self.val.iter().zip(other.val.iter()).for_each( + |((self_key, self_val), (other_key, other_val))| { + if let Some(std::cmp::Ordering::Equal) = + self_key.clone().range_ord(other_key, arena) + { + match self_val.0.clone().range_ord(&other_val.0, arena) { + Some(std::cmp::Ordering::Equal) => eq += 1, + Some(std::cmp::Ordering::Less) => self_lt += 1, + Some(std::cmp::Ordering::Greater) => self_gt += 1, + _ => {} + } + } + }, + ); + + if self_lt == self.val.len() { + Some(std::cmp::Ordering::Less) + } else if eq == self.val.len() { + Some(std::cmp::Ordering::Equal) + } else if self_gt == self.val.len() { + Some(std::cmp::Ordering::Greater) + } else { + None + } + } + other => other, + } + } + + fn dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec { + let mut deps: Vec = self.len.dependent_on(analyzer, arena); + deps.extend( + self.val + .iter() + .flat_map(|(_, val)| val.0.dependent_on(analyzer, arena)) + .collect::>(), + ); + deps + } + + fn recursive_dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let mut deps: Vec = self.len.recursive_dependent_on(analyzer, arena)?; + deps.extend( + self.val + .values() + .map(|val| val.0.recursive_dependent_on(analyzer, arena)) + .collect::>, _>>()? + .iter() + .flatten() + .collect::>(), + ); + deps.extend( + self.val + .keys() + .map(|key| key.recursive_dependent_on(analyzer, arena)) + .collect::>, _>>()? + .iter() + .flatten() + .collect::>(), + ); + Ok(deps) + } + + fn has_cycle( + &self, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let mut has_cycle = false; + has_cycle = has_cycle || self.len.has_cycle(seen, analyzer, arena)?; + self.val.iter().try_for_each(|(_, val)| { + has_cycle = has_cycle || val.0.has_cycle(seen, analyzer, arena)?; + Ok(()) + })?; + Ok(has_cycle) + } + + fn depends_on( + &self, + var: ContextVarNode, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let mut deps_on = false; + deps_on |= self.len.depends_on(var, seen, analyzer, arena)?; + self.val.iter().try_for_each(|(_, val)| { + deps_on |= val.0.depends_on(var, seen, analyzer, arena)?; + Ok(()) + })?; + Ok(deps_on) + } + + fn flatten( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + match (maximize, &self.flattened_min, &self.flattened_max) { + (true, _, Some(flat)) | (false, Some(flat), _) => return Ok(*flat.clone()), + _ => {} + } + Ok(Elem::ConcreteDyn(Self { + minimized: None, + maximized: None, + flattened_min: None, + flattened_max: None, + len: Box::new(self.len.flatten(maximize, analyzer, arena)?), + val: { + let mut map = BTreeMap::default(); + for (idx, val) in self.val.clone().into_iter() { + map.insert( + idx.flatten(maximize, analyzer, arena)?, + (val.0.flatten(maximize, analyzer, arena)?, val.1), + ); + } + map + }, + op_num: self.op_num, + loc: self.loc, + })) + } + + fn cache_flatten( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if self.flattened_max.is_none() { + self.len.cache_flatten(analyzer, arena)?; + let mapping = std::mem::take(&mut self.val); + self.val = mapping + .into_iter() + .map(|(mut idx, mut val)| { + idx.cache_flatten(analyzer, arena).unwrap(); + val.0.cache_flatten(analyzer, arena).unwrap(); + (idx, val) + }) + .collect(); + let flat_max = self.flatten(true, analyzer, arena)?; + let simplified_flat_max = flat_max.simplify_maximize(analyzer, arena)?; + self.flattened_max = Some(Box::new(simplified_flat_max)); + } + if self.flattened_min.is_none() { + self.len.cache_flatten(analyzer, arena)?; + let mapping = std::mem::take(&mut self.val); + self.val = mapping + .into_iter() + .map(|(mut idx, mut val)| { + idx.cache_flatten(analyzer, arena).unwrap(); + val.0.cache_flatten(analyzer, arena).unwrap(); + (idx, val) + }) + .collect(); + let flat_min = self.flatten(false, analyzer, arena)?; + let simplified_flat_min = flat_min.simplify_minimize(analyzer, arena)?; + self.flattened_min = Some(Box::new(simplified_flat_min)); + } + Ok(()) + } + + fn is_flatten_cached( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> bool { + self.flattened_min.is_some() && self.flattened_max.is_some() + } + + fn is_min_max_cached( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> (bool, bool) { + (self.minimized.is_some(), self.maximized.is_some()) + } + + fn filter_recursion( + &mut self, + node_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + self.len + .filter_recursion(node_idx, new_idx, analyzer, arena); + self.val = self + .val + .clone() + .into_iter() + .map(|(mut k, mut v)| { + k.filter_recursion(node_idx, new_idx, analyzer, arena); + v.0.filter_recursion(node_idx, new_idx, analyzer, arena); + (k, v) + }) + .collect(); + } + + fn maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(MinMaxed::Maximized(cached)) = self.maximized.clone() { + return Ok(*cached); + } + + Ok(Elem::ConcreteDyn(Self::new_w_op_nums( + self.len.maximize(analyzer, arena)?, + { + let mut map: BTreeMap<_, (Elem, usize)> = BTreeMap::default(); + for (idx, val) in self.val.clone().into_iter() { + // We dont maximize the key so that any subsequent + // `get_index` can find potential values + let maximized = val.0.maximize(analyzer, arena)?; + map.insert(idx.simplify_maximize(analyzer, arena)?, (maximized, val.1)); + } + + map + }, + self.loc, + ))) + } + + fn minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(MinMaxed::Minimized(cached)) = self.minimized.clone() { + return Ok(*cached); + } + + Ok(Elem::ConcreteDyn(Self::new_w_op_nums( + self.len.minimize(analyzer, arena)?, + { + let mut map: BTreeMap<_, (Elem, usize)> = BTreeMap::default(); + for (idx, val) in self.val.clone().into_iter() { + // We dont minimize the key so that any subsequent + // `get_index` can find potential values + let minimized = val.0.minimize(analyzer, arena)?; + map.insert(idx.simplify_minimize(analyzer, arena)?, (minimized, val.1)); + } + + map + }, + self.loc, + ))) + } + + fn simplify_maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(max) = &self.flattened_max { + return Ok(*max.clone()); + } + Ok(Elem::ConcreteDyn(Self::new_w_op_nums( + self.len.simplify_maximize(analyzer, arena)?, + { + let mut map = BTreeMap::default(); + for (idx, val) in self.val.clone().into_iter() { + // We dont minimize the key so that any subsequent + // `get_index` can find potential values + let simplified = val.0.simplify_maximize(analyzer, arena)?; + map.insert(idx, (simplified, val.1)); + } + map + }, + self.loc, + ))) + } + fn simplify_minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(min) = &self.flattened_min { + return Ok(*min.clone()); + } + + Ok(Elem::ConcreteDyn(Self::new_w_op_nums( + self.len.simplify_minimize(analyzer, arena)?, + { + let mut map = BTreeMap::default(); + for (idx, val) in self.val.clone().into_iter() { + // We dont minimize the key so that any subsequent + // `get_index` can find potential values + let simplified = val.0.simplify_minimize(analyzer, arena)?; + map.insert(idx, (simplified, val.1)); + } + map + }, + self.loc, + ))) + } + + fn cache_maximize( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if self.maximized.is_none() { + self.len.cache_maximize(g, arena)?; + let mapping = std::mem::take(&mut self.val); + self.val = mapping + .into_iter() + .map(|(mut idx, mut val)| { + idx.cache_maximize(g, arena).unwrap(); + val.0.cache_maximize(g, arena).unwrap(); + (idx, val) + }) + .collect(); + self.maximized = Some(MinMaxed::Maximized(Box::new(self.maximize(g, arena)?))); + } + Ok(()) + } + + fn cache_minimize( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + if self.minimized.is_none() { + self.len.cache_minimize(g, arena)?; + let mapping = std::mem::take(&mut self.val); + self.val = mapping + .into_iter() + .map(|(mut idx, mut val)| { + idx.cache_minimize(g, arena).unwrap(); + val.0.cache_minimize(g, arena).unwrap(); + (idx, val) + }) + .collect(); + self.minimized = Some(MinMaxed::Minimized(Box::new(self.minimize(g, arena)?))); + } + Ok(()) + } + + fn uncache(&mut self) { + self.minimized = None; + self.maximized = None; + } +} diff --git a/shared/src/range/elem.rs b/crates/graph/src/range/elem/mod.rs similarity index 50% rename from shared/src/range/elem.rs rename to crates/graph/src/range/elem/mod.rs index 5b8d8aaf..dd1c7401 100644 --- a/shared/src/range/elem.rs +++ b/crates/graph/src/range/elem/mod.rs @@ -1,11 +1,38 @@ -use crate::analyzer::GraphError; -use crate::analyzer::GraphLike; -use crate::context::ContextVarNode; -use crate::range::elem_ty::Elem; -use crate::range::elem_ty::RangeExpr; +mod concrete; +mod elem_enum; +mod elem_trait; +mod expr; +mod map_or_array; +mod reference; -use crate::NodeIdx; -use std::collections::BTreeMap; +pub use concrete::*; +pub use elem_enum::*; +pub use elem_trait::*; +pub use expr::*; +pub use map_or_array::*; +pub use reference::*; + +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum MinMaxed { + Minimized(Box>), + Maximized(Box>), +} + +impl MinMaxed { + pub fn maxed(self) -> Elem { + match self { + Self::Maximized(t) => *t, + _ => panic!("MinMaxed was min not max"), + } + } + + pub fn mined(self) -> Elem { + match self { + Self::Minimized(t) => *t, + _ => panic!("MinMaxed was max not min"), + } + } +} /// An operation to be performed on a range element #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] @@ -46,8 +73,6 @@ pub enum RangeOp { And, /// Logical OR Or, - /// Catch-all requirement statement - Where, /// Cast from one type to another Cast, /// Bitwise AND @@ -62,9 +87,70 @@ pub enum RangeOp { Exp, /// Concatenation Concat, + /// Memcopy + Memcopy, + /// Set memory indices of a memory object + SetIndices, + /// Gets an index of a memory object + GetIndex, + /// Set length of a memory object + SetLength, + /// Get Length of a memory object + GetLength, } impl RangeOp { + pub fn commutative(&self) -> bool { + use RangeOp::*; + match self { + Add(_i) => true, + Mul(_i) => true, + Sub(_i) => false, + Div(_i) => false, + Mod => false, + Exp => false, + Min => true, + Max => true, + + Eq => true, + Neq => true, + Lt => false, + Lte => false, + Gt => false, + Gte => false, + And => true, + Or => true, + Not => false, + + BitNot => false, + BitAnd => false, + BitXor => false, + BitOr => false, + Shl => false, + Shr => false, + + Cast => false, + + SetLength => false, + Memcopy => false, + GetLength => false, + SetIndices => false, + GetIndex => false, + Concat => false, + } + } + + pub fn non_commutative_logical_inverse(&self) -> Option { + use RangeOp::*; + match self { + Lt => Some(Gt), + Lte => Some(Gte), + Gt => Some(Lt), + Gte => Some(Lte), + _ => None, + } + } + /// Attempts to return the inverse range operation (e.g.: `RangeOp::Add => RangeOp::Sub`) pub fn inverse(self) -> Option { use RangeOp::*; @@ -77,6 +163,26 @@ impl RangeOp { Shr => Some(Shl), Eq => Some(Neq), Neq => Some(Eq), + Lt => Some(Gt), + Lte => Some(Gte), + Gt => Some(Lt), + Gte => Some(Lte), + _ => None, // e => panic!("tried to inverse unreversable op: {:?}", e), + } + } + + /// Gets the logical inverse of a boolean operation + pub fn logical_inverse(self) -> Option { + use RangeOp::*; + match self { + Eq => Some(Neq), + Neq => Some(Eq), + Lt => Some(Gte), + Lte => Some(Gt), + Gt => Some(Lte), + Gte => Some(Lt), + Or => Some(And), + And => Some(Or), _ => None, // e => panic!("tried to inverse unreversable op: {:?}", e), } } @@ -119,57 +225,17 @@ impl ToString for RangeOp { Not => "!".to_string(), And => "&&".to_string(), Or => "||".to_string(), - Where => "where".to_string(), Cast => "cast".to_string(), BitAnd => "&".to_string(), BitOr => "|".to_string(), BitXor => "^".to_string(), BitNot => "~".to_string(), Concat => "concat".to_string(), + Memcopy => "memcopy".to_string(), + SetIndices => "set_indices".to_string(), + GetIndex => "get_index".to_string(), + GetLength => "get_length".to_string(), + SetLength => "set_length".to_string(), } } } - -pub trait RangeElem { - /// Tries to evaluate a range element down to a concrete or maximally simplified expression to its maximum value - fn maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError>; - fn cache_maximize(&mut self, analyzer: &impl GraphLike) -> Result<(), GraphError>; - /// Tries to evaluate a range element down to a concrete or maximally simplified expression to its minimum value - fn minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError>; - fn cache_minimize(&mut self, analyzer: &impl GraphLike) -> Result<(), GraphError>; - fn uncache(&mut self); - /// Tries to simplify to maximum(i.e.: leaves symbolic/dynamic values as they are) - fn simplify_maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError>; - /// Tries to simplify to minimum (i.e.: leaves symbolic/dynamic values as they are) - fn simplify_minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError>; - /// Checks if two range elements are equal - fn range_eq(&self, other: &Self) -> bool; - /// Tries to compare the ordering of two range elements - fn range_ord(&self, other: &Self) -> Option; - /// Constructs a range `Elem::Expr` given a lhs, rhs, and operation ([`RangeOp`]). - fn range_op(lhs: Elem, rhs: Elem, op: RangeOp) -> Elem - where - Self: Sized, - { - Elem::Expr(RangeExpr::new(lhs, op, rhs)) - } - /// Traverses the range expression and finds all nodes that are dynamically pointed to - /// and returns it in a vector. - fn dependent_on(&self) -> Vec; - /// Traverses the range expression and updates stale pointers from older versions - /// of a variable to a newer version. - /// - /// e.g.: `uint256 z = x + 100`, followed by `require(x < 100)`. Initially, - /// without the `require` statement, `z`'s max is `2**256 - 1`, but with - /// the introduction of the `require` statement, we do a little backtracking - /// and can update `z`'s max to be `200`. - fn update_deps(&mut self, mapping: &BTreeMap); - /// Attempts to replace range elements that form a cyclic dependency by replacing - /// it with a new node. Ideally no cyclic dependencies occur in ranges as of now - /// but in theory it can make sense. - /// - /// e.g.: take the basic expression `x + y`, in normal checked solidity math - /// both x and y have the requirement `var <= 2**256 - 1 - other_var`, forming a - /// cyclic dependency. - fn filter_recursion(&mut self, node_idx: NodeIdx, new_idx: NodeIdx); -} diff --git a/crates/graph/src/range/elem/reference.rs b/crates/graph/src/range/elem/reference.rs new file mode 100644 index 00000000..84af8ef5 --- /dev/null +++ b/crates/graph/src/range/elem/reference.rs @@ -0,0 +1,468 @@ +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::{ + elem::{Elem, MinMaxed, RangeArenaLike, RangeConcrete, RangeElem}, + Range, + }, + GraphBackend, GraphError, TypeNode, VarType, +}; +use std::hash::Hash; +use std::hash::Hasher; + +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::Loc; + +/// A dynamic range element value +#[derive(Clone, Debug, Ord, PartialOrd)] +pub struct Reference { + /// Index of the node that is referenced + pub idx: NodeIdx, + /// Cached minimized value + pub minimized: Option>, + /// Cached maximized value + pub maximized: Option>, + /// Cached minimized flatten value + pub flattened_min: Option>>, + /// Cached maximized flatten value + pub flattened_max: Option>>, +} + +impl Hash for Reference { + fn hash(&self, state: &mut H) { + self.idx.hash(state); + } +} + +impl PartialEq for Reference { + fn eq(&self, other: &Self) -> bool { + self.idx == other.idx + } +} +impl Eq for Reference {} + +impl Reference { + pub fn new(idx: NodeIdx) -> Self { + Self { + idx, + minimized: None, + maximized: None, + flattened_min: None, + flattened_max: None, + } + } +} + +impl RangeElem for Reference { + type GraphError = GraphError; + + fn arenaize( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + // let smol = Elem::Reference(Reference::new(self.idx)); + // if analyzer.range_arena_idx(&smol).is_none() { + let _ = arena.idx_or_upsert(Elem::Reference(self.clone()), analyzer); + // } + Ok(()) + } + + fn range_eq(&self, _other: &Self, _arena: &mut RangeArena>) -> bool { + false + } + + fn range_ord( + &self, + other: &Self, + _arena: &mut RangeArena>, + ) -> Option { + if self.idx == other.idx { + Some(std::cmp::Ordering::Equal) + } else { + None + } + } + + fn dependent_on( + &self, + _analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Vec { + vec![self.idx.into()] + } + + fn recursive_dependent_on( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> Result, GraphError> { + let mut deps = ContextVarNode(self.idx.index()).dependent_on(analyzer, true)?; + deps.push(ContextVarNode(self.idx.index())); + Ok(deps) + } + + fn has_cycle( + &self, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let cvar = ContextVarNode::from(self.idx); + let mut has_cycle = false; + if seen.contains(&cvar) { + Ok(true) + } else { + seen.push(cvar); + if let Some(range) = cvar.ref_range(analyzer)? { + has_cycle = has_cycle || range.min.has_cycle(seen, analyzer, arena)?; + has_cycle = has_cycle || range.max.has_cycle(seen, analyzer, arena)?; + Ok(has_cycle) + } else { + Ok(false) + } + } + } + + fn depends_on( + &self, + var: ContextVarNode, + seen: &mut Vec, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let cvar = ContextVarNode::from(self.idx); + if seen.contains(&cvar) { + return Ok(false); + } + + if cvar == var || cvar.name(analyzer)? == var.name(analyzer)? && self.idx >= var.0.into() { + Ok(true) + } else if let Some(range) = cvar.ref_range(analyzer)? { + seen.push(cvar); + let mut deps_on = range.min.depends_on(var, seen, analyzer, arena)?; + deps_on |= range.max.depends_on(var, seen, analyzer, arena)?; + Ok(deps_on) + } else { + Ok(false) + } + } + + fn flatten( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + match (maximize, &self.flattened_min, &self.flattened_max) { + (true, _, Some(flat)) | (false, Some(flat), _) => { + return Ok(*flat.clone()); + } + _ => {} + } + + let cvar = ContextVarNode::from(self.idx); + if cvar.is_fundamental(analyzer)? { + return Ok(Elem::Reference(Reference::new( + cvar.global_first_version(analyzer).into(), + ))); + } + if maximize { + cvar.range_max(analyzer)? + .unwrap_or(Elem::Null) + .flatten(maximize, analyzer, arena) + } else { + let flattened = cvar + .range_min(analyzer)? + .unwrap_or(Elem::Null) + .flatten(maximize, analyzer, arena)?; + Ok(flattened) + } + } + + fn is_flatten_cached( + &self, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + self.flattened_min.is_some() && self.flattened_max.is_some() || { + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + arenaized.flattened_min.is_some() && arenaized.flattened_max.is_some() + } else { + false + } + } else { + false + } + } else { + false + } + } + } + + fn is_min_max_cached( + &self, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> (bool, bool) { + let (arena_cached_min, arena_cached_max) = { + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + (arenaized.minimized.is_some(), arenaized.maximized.is_some()) + } else { + (false, false) + } + } else { + (false, false) + } + } else { + (false, false) + } + }; + ( + self.minimized.is_some() || arena_cached_min, + self.maximized.is_some() || arena_cached_max, + ) + } + + fn cache_flatten( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + self.arenaize(g, arena)?; + + if self.flattened_max.is_none() { + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + if arenaized.flattened_max.is_some() { + tracing::trace!("reference cache flatten hit"); + return Ok(()); + } + } + } + } + + let cvar = ContextVarNode::from(self.idx); + cvar.cache_flattened_range(g, arena)?; + let flat_max = self.flatten(true, g, arena)?; + let simplified_flat_max = flat_max.simplify_maximize(g, arena)?; + self.flattened_max = Some(Box::new(simplified_flat_max)); + } + if self.flattened_min.is_none() { + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + if arenaized.flattened_min.is_some() { + tracing::trace!("reference cache flatten hit"); + return Ok(()); + } + } + } + } + let cvar = ContextVarNode::from(self.idx); + cvar.cache_flattened_range(g, arena)?; + let flat_min = self.flatten(false, g, arena)?; + let simplified_flat_min = flat_min.simplify_minimize(g, arena)?; + self.flattened_min = Some(Box::new(simplified_flat_min)); + } + Ok(()) + } + + fn filter_recursion( + &mut self, + _: NodeIdx, + _: NodeIdx, + _analyzer: &mut impl GraphBackend, + _arena: &mut RangeArena>, + ) { + } + + fn maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(MinMaxed::Maximized(cached)) = self.maximized.clone() { + return Ok(*cached); + } + + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + tracing::trace!("reference maximize cache hit"); + if let Some(MinMaxed::Maximized(cached)) = arenaized.maximized.clone() { + return Ok(*cached); + } + } + } + } + + let cvar = ContextVarNode::from(self.idx).underlying(analyzer)?; + match &cvar.ty { + VarType::User(TypeNode::Contract(_), maybe_range) + | VarType::User(TypeNode::Enum(_), maybe_range) + | VarType::User(TypeNode::Ty(_), maybe_range) + | VarType::BuiltIn(_, maybe_range) => { + if let Some(range) = maybe_range { + range.evaled_range_max(analyzer, arena) + } else { + Ok(Elem::Reference(self.clone())) + } + } + VarType::Concrete(concrete_node) => Ok(Elem::Concrete(RangeConcrete::new( + concrete_node.underlying(analyzer)?.clone(), + cvar.loc.unwrap_or(Loc::Implicit), + ))), + _e => Ok(Elem::Reference(self.clone())), + } + } + + fn minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(MinMaxed::Minimized(cached)) = self.minimized.clone() { + return Ok(*cached); + } + + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + if let Some(MinMaxed::Minimized(cached)) = arenaized.minimized.clone() { + tracing::trace!("reference minimize cache hit"); + return Ok(*cached); + } + } + } + } + + let cvar = ContextVarNode::from(self.idx).underlying(analyzer)?; + match &cvar.ty { + VarType::User(TypeNode::Contract(_), maybe_range) + | VarType::User(TypeNode::Enum(_), maybe_range) + | VarType::User(TypeNode::Ty(_), maybe_range) + | VarType::BuiltIn(_, maybe_range) => { + if let Some(range) = maybe_range { + range.evaled_range_min(analyzer, arena) + } else { + Ok(Elem::Reference(self.clone())) + } + } + VarType::Concrete(concrete_node) => Ok(Elem::Concrete(RangeConcrete::new( + concrete_node.underlying(analyzer)?.clone(), + cvar.loc.unwrap_or(Loc::Implicit), + ))), + _e => Ok(Elem::Reference(self.clone())), + } + } + + fn simplify_maximize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if let Some(simp_max) = &self.flattened_max { + return Ok(*simp_max.clone()); + } + + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + if arenaized.flattened_max.is_some() { + tracing::trace!("reference simplify maximize cache hit"); + return Ok(*arenaized.flattened_max.clone().unwrap()); + } + } + } + } + + let cvar = ContextVarNode::from(self.idx); + + let independent = cvar.is_fundamental(analyzer)?; + if independent { + Ok(Elem::Reference(Reference::new( + cvar.global_first_version(analyzer).into(), + ))) + } else { + self.flatten(true, analyzer, arena)? + .simplify_maximize(analyzer, arena) + } + } + + fn simplify_minimize( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let cvar = ContextVarNode::from(self.idx); + if let Some(simp_min) = &self.flattened_min { + return Ok(*simp_min.clone()); + } + + if let Some(idx) = arena.idx(&Elem::Reference(Reference::new(self.idx))) { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Reference(ref arenaized) = *t { + if arenaized.flattened_min.is_some() { + tracing::trace!("reference simplify minimize cache hit"); + return Ok(*arenaized.flattened_min.clone().unwrap()); + } + } + } + } + + if cvar.is_fundamental(analyzer)? { + Ok(Elem::Reference(Reference::new( + cvar.global_first_version(analyzer).into(), + ))) + } else { + self.flatten(false, analyzer, arena)? + .simplify_minimize(analyzer, arena) + } + } + + fn cache_maximize( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + self.arenaize(g, arena)?; + if self.maximized.is_none() { + let cvar = ContextVarNode::from(self.idx); + cvar.cache_eval_range(g, arena)?; + let max = self.maximize(g, arena)?; + Elem::Reference(Reference::new(self.idx)).set_arenaized_cache(true, &max, arena); + self.maximized = Some(MinMaxed::Maximized(Box::new(max))); + } + Ok(()) + } + + fn cache_minimize( + &mut self, + g: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + self.arenaize(g, arena)?; + if self.minimized.is_none() { + let cvar = ContextVarNode::from(self.idx); + cvar.cache_eval_range(g, arena)?; + let min = self.minimize(g, arena)?; + Elem::Reference(Reference::new(self.idx)).set_arenaized_cache(false, &min, arena); + self.minimized = Some(MinMaxed::Minimized(Box::new(min))); + } + + Ok(()) + } + + fn uncache(&mut self) { + self.minimized = None; + self.maximized = None; + self.flattened_min = None; + self.flattened_max = None; + } +} diff --git a/crates/graph/src/range/exec/bitwise.rs b/crates/graph/src/range/exec/bitwise.rs new file mode 100644 index 00000000..087f8165 --- /dev/null +++ b/crates/graph/src/range/exec/bitwise.rs @@ -0,0 +1,816 @@ +use crate::nodes::{Builtin, Concrete}; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{H256, I256, U256}; + +impl RangeBitwise for RangeConcrete { + fn range_bit_and(&self, other: &Self) -> Option> { + match (&self.val, &other.val) { + (Concrete::Uint(s, a), Concrete::Uint(s2, b)) => { + let op_res = *a & *b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Uint(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(s, a), Concrete::Int(s2, b)) => { + let op_res = *a & *b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Int(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Uint(s, u), Concrete::Int(s2, i)) + | (Concrete::Int(s, i), Concrete::Uint(s2, u)) => { + let op_res = *u & i.into_raw(); + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Uint(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Bytes(s, a), Concrete::Bytes(s2, b)) => { + let op_res = a & b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Bytes(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::DynBytes(v), _) if v.len() <= 32 => RangeConcrete::new( + Concrete::DynBytes(v.clone()).cast(Builtin::Bytes(v.len() as u8))?, + self.loc, + ) + .range_bit_and(other), + (_, Concrete::DynBytes(v)) if v.len() <= 32 => self.range_bit_and(&RangeConcrete::new( + Concrete::DynBytes(v.clone()).cast(Builtin::Bytes(v.len() as u8))?, + self.loc, + )), + _ => { + if let (Some(l), Some(r)) = (self.val.into_u256(), other.val.into_u256()) { + let op_res = l & r; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + None + } + } + } + } + + fn range_bit_or(&self, other: &Self) -> Option> { + match (&self.val, &other.val) { + (Concrete::Uint(s, a), Concrete::Uint(s2, b)) => { + let op_res = *a | *b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Uint(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(s, a), Concrete::Int(s2, b)) => { + let op_res = *a | *b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Int(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Bytes(s, a), Concrete::Bytes(s2, b)) => { + let op_res = a | b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Bytes(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::DynBytes(v), _) if v.len() <= 32 => RangeConcrete::new( + Concrete::DynBytes(v.clone()).cast(Builtin::Bytes(v.len() as u8))?, + self.loc, + ) + .range_bit_or(other), + (_, Concrete::DynBytes(v)) if v.len() <= 32 => self.range_bit_or(&RangeConcrete::new( + Concrete::DynBytes(v.clone()).cast(Builtin::Bytes(v.len() as u8))?, + self.loc, + )), + _ => { + if let (Some(l), Some(r)) = (self.val.into_u256(), other.val.into_u256()) { + let op_res = l | r; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + None + } + } + } + } + + fn range_bit_xor(&self, other: &Self) -> Option> { + match (&self.val, &other.val) { + (Concrete::Uint(s, a), Concrete::Uint(s2, b)) => { + let op_res = *a ^ *b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Uint(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(s, a), Concrete::Int(s2, b)) => { + let op_res = *a ^ *b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Int(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Bytes(s, a), Concrete::Bytes(s2, b)) => { + let op_res = a ^ b; + let size = if s > s2 { s } else { s2 }; + let val = Concrete::Bytes(*size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::DynBytes(v), _) if v.len() <= 32 => RangeConcrete::new( + Concrete::DynBytes(v.clone()).cast(Builtin::Bytes(v.len() as u8))?, + self.loc, + ) + .range_bit_xor(other), + (_, Concrete::DynBytes(v)) if v.len() <= 32 => self.range_bit_xor(&RangeConcrete::new( + Concrete::DynBytes(v.clone()).cast(Builtin::Bytes(v.len() as u8))?, + self.loc, + )), + _ => { + if let (Some(l), Some(r)) = (self.val.into_u256(), other.val.into_u256()) { + let op_res = l ^ r; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + None + } + } + } + } + + fn range_bit_not(&self) -> Option> { + match &self.val { + Concrete::Uint(size, a) => { + let max = Concrete::max_of_type(&self.val) + .unwrap() + .uint_val() + .unwrap(); + let val = U256( + a.0.into_iter() + .map(|i| !i) + .collect::>() + .try_into() + .unwrap(), + ); + let op_res = val & max; + let rc = RangeConcrete::new(Concrete::Uint(*size, op_res), self.loc); + Some(rc.into()) + } + Concrete::Int(size, a) => { + let (op_res, _) = a.overflowing_neg(); + let (op_res, _) = op_res.overflowing_sub(1.into()); + let rc = RangeConcrete::new(Concrete::Int(*size, op_res), self.loc); + Some(rc.into()) + } + Concrete::Bytes(s, a) => { + let mut op_res = H256::default(); + (0..*s).for_each(|i| { + op_res.0[i as usize] = !a.0[i as usize]; + }); + let rc = RangeConcrete::new(Concrete::Bytes(*s, op_res), self.loc); + Some(rc.into()) + } + Concrete::DynBytes(v) if v.len() <= 32 => RangeConcrete::new( + Concrete::DynBytes(v.clone()).cast(Builtin::Bytes(v.len() as u8))?, + self.loc, + ) + .range_bit_not(), + _ => None, + } + } +} + +impl RangeBitwise for Elem { + fn range_bit_and(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_bit_and(b), + _ => None, + } + } + fn range_bit_or(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_bit_or(b), + _ => None, + } + } + fn range_bit_xor(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_bit_xor(b), + _ => None, + } + } + + fn range_bit_not(&self) -> Option> { + match self { + Elem::Concrete(a) => a.range_bit_not(), + _ => None, + } + } +} + +/// Executes a bitwise `and` given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked div` operation: +/// +/// `todo!()` +/// +/// Truth table for `wrapping div` operation: +/// +/// `todo!()` +/// +pub fn exec_bit_and( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + match (lhs_min, lhs_max, rhs_min, rhs_max) { + (Elem::ConcreteDyn(d), _, _, _) => { + return exec_bit_and( + &d.as_sized_bytes()?, + lhs_max, + rhs_min, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, Elem::ConcreteDyn(d), _, _) => { + return exec_bit_and( + lhs_min, + &d.as_sized_bytes()?, + rhs_min, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, _, Elem::ConcreteDyn(d), _) => { + return exec_bit_and( + lhs_min, + lhs_max, + &d.as_sized_bytes()?, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, _, _, Elem::ConcreteDyn(d)) => { + return exec_bit_and( + lhs_min, + lhs_max, + rhs_min, + &d.as_sized_bytes()?, + maximize, + analyzer, + arena, + ); + } + _ => {} + } + + let mut candidates = vec![]; + let bit_and = |lhs: &Elem<_>, rhs: &Elem<_>, candidates: &mut Vec>| { + if let Some(c) = lhs.range_bit_and(rhs) { + candidates.push(c); + } + }; + + // the max is the min of the maxes + match lhs_max.range_ord(rhs_max, arena) { + Some(std::cmp::Ordering::Less) => { + candidates.push(lhs_max.clone()); + } + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) => { + candidates.push(rhs_max.clone()); + } + _ => {} + } + + bit_and(lhs_min, rhs_min, &mut candidates); + bit_and(lhs_min, rhs_max, &mut candidates); + bit_and(lhs_max, rhs_min, &mut candidates); + bit_and(lhs_max, rhs_max, &mut candidates); + + let zero = Elem::from(Concrete::from(U256::from(0))); + let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); + + let min_contains = matches!( + rhs_min.range_ord(&zero, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + rhs_max.range_ord(&zero, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + if min_contains && max_contains { + candidates.push(zero); + } + + let min_contains = matches!( + rhs_min.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + rhs_max.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + if min_contains && max_contains { + candidates.push(lhs_min.clone()); + candidates.push(lhs_max.clone()); + } + + // Sort the candidates + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +/// Executes a bitwise `or` given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked div` operation: +/// +/// `todo!()` +/// +/// Truth table for `wrapping div` operation: +/// +/// `todo!()` +/// +pub fn exec_bit_or( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + match (lhs_min, lhs_max, rhs_min, rhs_max) { + (Elem::ConcreteDyn(d), _, _, _) => { + return exec_bit_or( + &d.as_sized_bytes()?, + lhs_max, + rhs_min, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, Elem::ConcreteDyn(d), _, _) => { + return exec_bit_or( + lhs_min, + &d.as_sized_bytes()?, + rhs_min, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, _, Elem::ConcreteDyn(d), _) => { + return exec_bit_or( + lhs_min, + lhs_max, + &d.as_sized_bytes()?, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, _, _, Elem::ConcreteDyn(d)) => { + return exec_bit_or( + lhs_min, + lhs_max, + rhs_min, + &d.as_sized_bytes()?, + maximize, + analyzer, + arena, + ); + } + _ => {} + } + + let mut candidates = vec![]; + let bit_or = |lhs: &Elem<_>, rhs: &Elem<_>, candidates: &mut Vec>| { + if let Some(c) = lhs.range_bit_or(rhs) { + candidates.push(c); + } + }; + + bit_or(lhs_min, rhs_min, &mut candidates); + bit_or(lhs_min, rhs_max, &mut candidates); + bit_or(lhs_max, rhs_min, &mut candidates); + bit_or(lhs_max, rhs_max, &mut candidates); + + let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); + + let min_contains = matches!( + rhs_min.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + rhs_max.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + if min_contains && max_contains { + candidates.push(negative_one.clone()); + candidates.push(negative_one.clone()); + } + + // Sort the candidates + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +/// Executes a bitwise `xor` given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked div` operation: +/// +/// `todo!()` +/// +/// Truth table for `wrapping div` operation: +/// +/// `todo!()` +/// +pub fn exec_bit_xor( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + match (lhs_min, lhs_max, rhs_min, rhs_max) { + (Elem::ConcreteDyn(d), _, _, _) => { + return exec_bit_xor( + &d.as_sized_bytes()?, + lhs_max, + rhs_min, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, Elem::ConcreteDyn(d), _, _) => { + return exec_bit_xor( + lhs_min, + &d.as_sized_bytes()?, + rhs_min, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, _, Elem::ConcreteDyn(d), _) => { + return exec_bit_xor( + lhs_min, + lhs_max, + &d.as_sized_bytes()?, + rhs_max, + maximize, + analyzer, + arena, + ); + } + (_, _, _, Elem::ConcreteDyn(d)) => { + return exec_bit_xor( + lhs_min, + lhs_max, + rhs_min, + &d.as_sized_bytes()?, + maximize, + analyzer, + arena, + ); + } + _ => {} + } + + let mut candidates = vec![ + lhs_min.range_bit_xor(rhs_min), + lhs_min.range_bit_xor(rhs_max), + lhs_max.range_bit_xor(rhs_min), + lhs_max.range_bit_xor(rhs_max), + ]; + + let zero = Elem::from(Concrete::from(U256::from(0))); + let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); + + let min_contains = matches!( + rhs_min.range_ord(&zero, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + rhs_max.range_ord(&zero, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + if min_contains && max_contains { + // if the rhs contains zero, in xor, thats just itself + candidates.push(lhs_max.range_bit_xor(&zero)); + } + + let min_contains = matches!( + rhs_min.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + rhs_max.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + if min_contains && max_contains { + candidates.push(lhs_min.range_bit_xor(&negative_one)); + candidates.push(lhs_max.range_bit_xor(&negative_one)); + } + + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +/// Executes a bitwise `not` given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked div` operation: +/// +/// `todo!()` +/// +/// Truth table for `wrapping div` operation: +/// +/// `todo!()` +/// +pub fn exec_bit_not( + lhs_min: &Elem, + lhs_max: &Elem, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + match (lhs_min, lhs_max) { + (Elem::ConcreteDyn(d), _) => { + return exec_bit_not(&d.as_sized_bytes()?, lhs_max, maximize, analyzer, arena); + } + (_, Elem::ConcreteDyn(d)) => { + return exec_bit_not(lhs_min, &d.as_sized_bytes()?, maximize, analyzer, arena); + } + _ => {} + } + let mut candidates = vec![lhs_min.range_bit_not(), lhs_max.range_bit_not()]; + + let zero = Elem::from(Concrete::from(U256::from(0))); + + let min_contains = matches!( + lhs_min.range_ord(&zero, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + lhs_max.range_ord(&zero, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + if min_contains && max_contains { + match lhs_min { + Elem::Concrete( + ref r @ RangeConcrete { + val: Concrete::Uint(..), + .. + }, + ) => candidates.push(Some(Concrete::max_of_type(&r.val).unwrap().into())), + Elem::Concrete( + ref r @ RangeConcrete { + val: Concrete::Int(..), + .. + }, + ) => candidates.push(Some(Concrete::min_of_type(&r.val).unwrap().into())), + _ => {} + } + } + + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use ethers_core::types::{I256, U256}; + use solang_parser::pt::Loc; + + #[test] + fn and_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_bit_and(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(5))); + } + + #[test] + fn and_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-5i32)), Loc::Implicit); + let result = x.range_bit_and(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-15))); + } + + #[test] + fn and_bytes_bytes() { + let mut h: [u8; 32] = [0; 32]; + h[0..4].copy_from_slice(&[1, 1, 1, 1][..]); + let mut h2: [u8; 32] = [0; 32]; + h2[0..4].copy_from_slice(&[0, 1, 0, 1][..]); + let x = RangeConcrete::new(Concrete::from(h), Loc::Implicit); + let y = RangeConcrete::new(Concrete::from(h2), Loc::Implicit); + let result = x.range_bit_and(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::from(h2)); + } + + #[test] + fn or_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_bit_or(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(15))); + } + + #[test] + fn or_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-5i32)), Loc::Implicit); + let result = x.range_bit_or(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-5))); + } + + #[test] + fn or_bytes_bytes() { + let mut h: [u8; 32] = [0; 32]; + h[0..4].copy_from_slice(&[1, 1, 1, 1][..]); + let mut h2: [u8; 32] = [0; 32]; + h2[0..4].copy_from_slice(&[0, 1, 0, 1][..]); + let x = RangeConcrete::new(Concrete::from(h), Loc::Implicit); + let y = RangeConcrete::new(Concrete::from(h2), Loc::Implicit); + let result = x.range_bit_or(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::from(h)); + } + + #[test] + fn xor_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_bit_xor(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(10))); + } + + #[test] + fn xor_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-5i32)), Loc::Implicit); + let result = x.range_bit_xor(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(10))); + } + + #[test] + fn xor_bytes_bytes() { + let mut h: [u8; 32] = [0; 32]; + h[0..4].copy_from_slice(&[1, 1, 1, 1][..]); + let mut h2: [u8; 32] = [0; 32]; + h2[0..4].copy_from_slice(&[0, 1, 0, 1][..]); + let x = RangeConcrete::new(Concrete::from(h), Loc::Implicit); + let y = RangeConcrete::new(Concrete::from(h2), Loc::Implicit); + let result = x.range_bit_xor(&y).unwrap().maybe_concrete_value().unwrap(); + + let mut expected: [u8; 32] = [0; 32]; + expected[0..3].copy_from_slice(&[1, 0, 1][..]); + assert_eq!(result.val, Concrete::from(expected)); + } + + #[test] + fn not_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let result = x.range_bit_not().unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::MAX << 4)); + } + + #[test] + fn not_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let result = x.range_bit_not().unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(14))); + } + + #[test] + fn not_bytes() { + let mut h: [u8; 32] = [0; 32]; + h[0..4].copy_from_slice(&[1; 4][..]); + let x = RangeConcrete::new(Concrete::from(h), Loc::Implicit); + let result = x.range_bit_not().unwrap().maybe_concrete_value().unwrap(); + + let mut expected: [u8; 32] = [255; 32]; + expected[0..4].copy_from_slice(&[254, 254, 254, 254][..]); + assert_eq!(result.val, Concrete::from(expected)); + } +} diff --git a/crates/graph/src/range/exec/cast.rs b/crates/graph/src/range/exec/cast.rs new file mode 100644 index 00000000..0c02d5a9 --- /dev/null +++ b/crates/graph/src/range/exec/cast.rs @@ -0,0 +1,262 @@ +use crate::nodes::{Builtin, Concrete}; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{H256, U256}; +use std::collections::BTreeMap; + +impl RangeCast for RangeConcrete { + fn range_cast(&self, other: &Self) -> Option> { + Some(Elem::Concrete(RangeConcrete::new( + self.val.clone().cast_from(&other.val)?, + self.loc, + ))) + } +} + +impl RangeCast> for RangeConcrete { + fn range_cast(&self, other: &RangeDyn) -> Option> { + match ( + self.val.clone(), + other.val.values().take(1).next().map(|(a, _)| a), + ) { + (Concrete::Uint(size, val), o) if o.is_none() || o.unwrap().is_bytes() => { + RangeConcrete::new( + Concrete::Uint(size, val).cast(Builtin::Bytes((size / 8) as u8))?, + self.loc, + ) + .range_cast(other) + } + (Concrete::Bytes(size, val), o) if o.is_none() || o.unwrap().is_bytes() => { + let new = val + .0 + .iter() + .enumerate() + .map(|(i, v)| { + let idx = Elem::from(Concrete::from(U256::from(i))); + let mut bytes = [0x00; 32]; + bytes[0] = *v; + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (idx, v) + }) + .collect::>(); + Some(Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::from(size))), + new, + other.loc, + ))) + } + (Concrete::DynBytes(val), o) if o.is_none() || o.unwrap().is_bytes() => { + let new = val + .iter() + .enumerate() + .map(|(i, v)| { + let idx = Elem::from(Concrete::from(U256::from(i))); + let mut bytes = [0x00; 32]; + bytes[0] = *v; + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (idx, v) + }) + .collect::>(); + Some(Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::from(val.len()))), + new, + other.loc, + ))) + } + (Concrete::String(val), o) if o.is_none() || o.unwrap().is_string() => { + let new = val + .chars() + .enumerate() + .map(|(i, v)| { + let idx = Elem::from(Concrete::from(U256::from(i))); + let mut bytes = [0x00; 32]; + v.encode_utf8(&mut bytes[..]); + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (idx, v) + }) + .collect::>(); + Some(Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::from(val.len()))), + new, + other.loc, + ))) + } + _e => None, + } + } +} + +impl RangeCast> for RangeDyn { + fn range_cast(&self, other: &Self) -> Option> { + let val: Option<&Elem> = self.val.values().take(1).next().map(|(a, _)| a); + let o_val: Option<&Elem> = other.val.values().take(1).next().map(|(a, _)| a); + + match (val, o_val) { + (Some(elem), Some(o_elem)) + if elem.is_bytes() && o_elem.is_bytes() + || elem.is_uint() && o_elem.is_uint() + || elem.is_int() && o_elem.is_int() => + { + Some(Elem::ConcreteDyn(self.clone())) + } + (Some(elem), None) if elem.is_bytes() || elem.is_uint() || elem.is_int() => { + Some(Elem::ConcreteDyn(self.clone())) + } + (Some(Elem::Reference(_)), None) => Some(Elem::ConcreteDyn(self.clone())), + (None, Some(Elem::Reference(_))) => Some(Elem::ConcreteDyn(self.clone())), + (None, None) => Some(Elem::ConcreteDyn(self.clone())), + _ => None, + } + } +} + +impl RangeCast> for RangeDyn { + fn range_cast(&self, other: &RangeConcrete) -> Option> { + let val: &Elem<_> = self.val.values().take(1).next().map(|(a, _)| a)?; + let o_val = &other.val; + match (val, o_val) { + ( + &Elem::Concrete(RangeConcrete { + val: Concrete::Bytes(1, ..), + .. + }), + Concrete::Bytes(size, _), + ) => { + let mut h = H256::default(); + for (i, val) in self.val.values().take(*size as usize).enumerate() { + match val { + ( + Elem::Concrete(RangeConcrete { + val: Concrete::Bytes(1, v), + .. + }), + _, + ) => { + // consume as many as we can + h.0[i] = v.0[0]; + } + _ => break, + } + } + Some(Elem::Concrete(Concrete::Bytes(*size, h).into())) + } + _e => None, + } + } +} + +impl RangeCast for Elem { + fn range_cast(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_cast(b), + (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => { + // super dumb type stuff here that makes it so we have to specify + as RangeCast>::range_cast(a, b) + } + (Elem::ConcreteDyn(a), Elem::Concrete(b)) => a.range_cast(b), + (Elem::Concrete(a), Elem::ConcreteDyn(b)) => a.range_cast(b), + _e => None, + } + } +} + +pub fn exec_cast( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + // the weird thing about cast is that we really dont know until after the cast due to sizing things + // so we should just try them all then compare + let candidates = vec![ + lhs_min.range_cast(rhs_min), + lhs_min.range_cast(rhs_max), + lhs_max.range_cast(rhs_min), + lhs_max.range_cast(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use ethers_core::types::I256; + use solang_parser::pt::Loc; + + #[test] + fn int_downcast() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-1500)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(0)), Loc::Implicit); + let result = x.range_cast(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(36))); + } + + #[test] + fn uint_downcast() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(1500)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(8, U256::from(0)), Loc::Implicit); + let result = x.range_cast(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::from(220))); + } + + #[test] + fn int_weirdness() { + // type(int64).max + let v = Concrete::max_of_type(&Concrete::Int(64, I256::from(0i32))) + .unwrap() + .int_val() + .unwrap(); + // int128(type(int64).max) + let x = RangeConcrete::new(Concrete::Int(128, v), Loc::Implicit); + // int128(type(int64).max) + 1 + let x = x + .range_add(&RangeConcrete::new( + Concrete::Int(256, I256::from(1)), + Loc::Implicit, + )) + .unwrap() + .maybe_concrete_value() + .unwrap(); + let expected = x.val.int_val().unwrap() * I256::from(-1i32); + let y = RangeConcrete::new(Concrete::Int(64, I256::from(0)), Loc::Implicit); + // int64(int128(type(int64).max) + 1) + let result = x.range_cast(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(64, expected)); + } + + #[test] + fn int_upcast() { + let x = rc_int_sized(-101); + let y = rc_int256(-101); + let result = x.range_cast(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-101))); + } + + #[test] + fn bytes_upcast() { + let x = RangeConcrete::new(Concrete::from(vec![19, 55]), Loc::Implicit); + let y = RangeConcrete::new(Concrete::from(vec![0, 0, 0, 0]), Loc::Implicit); + let result = x.range_cast(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::from(vec![19, 55, 0, 0])); + } +} diff --git a/crates/graph/src/range/exec/exec_op.rs b/crates/graph/src/range/exec/exec_op.rs new file mode 100644 index 00000000..8f774fbe --- /dev/null +++ b/crates/graph/src/range/exec/exec_op.rs @@ -0,0 +1,454 @@ +use crate::{ + nodes::Concrete, + range::{elem::*, exec::*, exec_traits::*}, + GraphBackend, GraphError, +}; +use shared::RangeArena; + +impl ExecOp for RangeExpr { + type GraphError = GraphError; + + #[tracing::instrument(level = "trace", skip_all)] + fn exec_op( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError> { + let idx = self.arena_idx(arena); + if let Some(idx) = idx { + if let Some(t) = arena.ranges.get(idx) { + if let Elem::Expr(expr) = t { + tracing::trace!("hitting cache"); + if maximize { + if let Some(MinMaxed::Maximized(max)) = &expr.maximized { + return Ok(*max.clone()); + } + } else if let Some(MinMaxed::Minimized(min)) = &expr.minimized { + return Ok(*min.clone()); + } + } + } + } + + let res = self.exec(self.spread(analyzer, arena)?, maximize, analyzer, arena)?; + + if let Some(idx) = idx { + if let Some(t) = arena.ranges.get_mut(idx) { + if let Elem::Expr(expr) = &mut *t { + tracing::trace!("setting cache"); + if maximize { + expr.maximized = Some(MinMaxed::Maximized(Box::new(res.clone()))); + } else { + expr.minimized = Some(MinMaxed::Minimized(Box::new(res.clone()))); + } + } + } + } + + Ok(res) + } + + #[tracing::instrument(level = "trace", skip_all)] + fn cache_exec_op( + &mut self, + maximize: bool, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + tracing::trace!("minimize lhs"); + self.lhs.cache_minimize(analyzer, arena)?; + tracing::trace!("maximize lhs"); + self.lhs.cache_maximize(analyzer, arena)?; + tracing::trace!("minimize rhs"); + self.rhs.cache_minimize(analyzer, arena)?; + tracing::trace!("maximize rhs"); + self.rhs.cache_maximize(analyzer, arena)?; + tracing::trace!("exec"); + + let res = self.exec_op(maximize, analyzer, arena)?; + + if maximize { + self.maximized = Some(MinMaxed::Maximized(Box::new(res))); + } else { + self.minimized = Some(MinMaxed::Minimized(Box::new(res))); + } + + if let Some(idx) = self.arena_idx(arena) { + if let Some(t) = arena.ranges.get_mut(idx) { + if let Elem::Expr(expr) = &mut *t { + if maximize { + expr.maximized.clone_from(&self.maximized); + } else { + expr.minimized.clone_from(&self.minimized); + } + } + } + } + + Ok(()) + } + + fn uncache_exec(&mut self) { + self.lhs.uncache(); + self.rhs.uncache(); + } + + #[tracing::instrument(level = "trace", skip_all)] + fn simplify_exec_op( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if maximize { + if let Some(v) = &self.flattened_max { + return Ok(*v.clone()); + } + } else if let Some(v) = &self.flattened_min { + return Ok(*v.clone()); + } + + if let Some(v) = self.arenaized_flat_cache(maximize, arena) { + return Ok(*v); + } + + let (lhs_min, lhs_max, rhs_min, rhs_max) = self.simplify_spread(analyzer, arena)?; + tracing::trace!( + "simplifying op: {} {} {}, lhs_min: {}, lhs_max: {}, rhs_min: {}, rhs_max: {}", + self.lhs, + self.op.to_string(), + self.rhs, + lhs_min, + lhs_max, + rhs_min, + rhs_max + ); + let lhs_is_conc = lhs_min.is_conc() && lhs_max.is_conc(); + let rhs_is_conc = rhs_min.is_conc() && rhs_max.is_conc(); + + let finished = false; + let mut ret = Ok(Elem::Null); + // if self.op == RangeOp::Cast { + // // for a cast we can *actually* evaluate dynamic elem if lhs side is concrete + // if lhs_is_conc { + // ret = self.exec_op(maximize, analyzer); + // finished = true; + // } else { + // // we can drop the cast if the max of the dynamic lhs is less than the cast + // let concretized_lhs = self.lhs.maximize(analyzer, arena)?; + // if matches!( + // concretized_lhs.range_ord(&self.rhs, analyzer), + // Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + // ) { + // ret = Ok(*self.lhs.clone()); + // finished = true; + // } + // } + // } else if matches!(self.op, RangeOp::Concat | RangeOp::Memcopy) { + // // we can always execute a concat or memcopy + // ret = self.exec_op(maximize, analyzer); + // finished = true; + // } else if matches!( + // self.op, + // RangeOp::SetIndices | RangeOp::SetLength | RangeOp::GetLength | RangeOp::GetIndex + // ) { + // match self.op { + // RangeOp::GetLength => { + // ret = if maximize { + // Ok(lhs_max + // .range_get_length() + // .unwrap_or_else(|| Elem::Expr(self.clone()))) + // } else { + // Ok(lhs_min + // .range_get_length() + // .unwrap_or_else(|| Elem::Expr(self.clone()))) + // }; + // finished = true; + // } + // RangeOp::SetLength => { + // ret = if maximize { + // Ok(lhs_max + // .range_set_length(&rhs_max) + // .unwrap_or_else(|| Elem::Expr(self.clone()))) + // } else { + // Ok(lhs_min + // .range_set_length(&rhs_min) + // .unwrap_or_else(|| Elem::Expr(self.clone()))) + // }; + // finished = true; + // } + // RangeOp::GetIndex => { + // if maximize { + // let res = match lhs_max { + // Elem::ConcreteDyn(RangeDyn { ref val, .. }) => val + // .iter() + // .try_fold( + // None, + // |mut acc: Option>, (key, (val, _))| { + // if matches!( + // key.overlaps_dual(&rhs_min, &rhs_max, true, analyzer)?, + // Some(true) + // ) { + // if acc.is_none() + // || matches!( + // acc.clone().unwrap().range_ord(val, arena), + // Some(std::cmp::Ordering::Greater) + // ) + // { + // acc = Some(val.clone()); + // Ok(acc) + // } else { + // Ok(acc) + // } + // } else { + // Ok(acc) + // } + // }, + // )? + // .unwrap_or_else(|| Elem::Null), + // _ => Elem::Expr(self.clone()), + // }; + // ret = Ok(res); + // finished = true; + // } else { + // let res = match lhs_max { + // Elem::ConcreteDyn(RangeDyn { ref val, .. }) => val + // .iter() + // .try_fold( + // None, + // |mut acc: Option>, (key, (val, _))| { + // if matches!( + // key.overlaps_dual(&rhs_min, &rhs_max, true, analyzer)?, + // Some(true) + // ) { + // if acc.is_none() + // || matches!( + // acc.clone().unwrap().range_ord(val, arena), + // Some(std::cmp::Ordering::Less) + // ) + // { + // acc = Some(val.clone()); + // Ok(acc) + // } else { + // Ok(acc) + // } + // } else { + // Ok(acc) + // } + // }, + // )? + // .unwrap_or_else(|| Elem::Null), + // _ => Elem::Expr(self.clone()), + // }; + // ret = Ok(res); + // finished = true; + // } + // } + // RangeOp::SetIndices => { + // ret = if maximize { + // Ok(lhs_max + // .range_set_indices(&rhs_max) + // .unwrap_or_else(|| Elem::Expr(self.clone()))) + // } else { + // Ok(lhs_min + // .range_set_indices(&rhs_min) + // .unwrap_or_else(|| Elem::Expr(self.clone()))) + // }; + // finished = true; + // } + // _ => unreachable!(), + // } + // } + + let parts = (lhs_min, lhs_max, rhs_min, rhs_max); + match (lhs_is_conc, rhs_is_conc, finished) { + (true, true, false) => { + ret = self.exec(parts, maximize, analyzer, arena); + } + (_, _, false) => { + ret = Ok(Elem::Expr(self.clone())); + } + _ => {} + } + + if let Some(_idx) = self.arena_idx(arena) { + self.set_arenaized_flattened(maximize, ret.clone()?, arena); + } + ret + } + + fn spread( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result< + ( + Elem, + Elem, + Elem, + Elem, + ), + GraphError, + > { + let lhs_min = self.lhs.minimize(analyzer, arena)?; + self.lhs.set_arenaized_cache(false, &lhs_min, arena); + + let lhs_max = self.lhs.maximize(analyzer, arena)?; + self.lhs.set_arenaized_cache(true, &lhs_max, arena); + + let rhs_min = self.rhs.minimize(analyzer, arena)?; + self.rhs.set_arenaized_cache(false, &rhs_min, arena); + + let rhs_max = self.rhs.maximize(analyzer, arena)?; + self.rhs.set_arenaized_cache(true, &rhs_max, arena); + + Ok((lhs_min, lhs_max, rhs_min, rhs_max)) + } + + fn simplify_spread( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result< + ( + Elem, + Elem, + Elem, + Elem, + ), + GraphError, + > { + let lhs_min = self.lhs.simplify_minimize(analyzer, arena)?; + self.lhs.set_arenaized_flattened(false, &lhs_min, arena); + + let lhs_max = self.lhs.simplify_maximize(analyzer, arena)?; + self.lhs.set_arenaized_flattened(true, &lhs_max, arena); + + let rhs_min = self.rhs.simplify_minimize(analyzer, arena)?; + self.rhs.set_arenaized_flattened(false, &rhs_min, arena); + + let rhs_max = self.rhs.simplify_maximize(analyzer, arena)?; + self.rhs.set_arenaized_flattened(true, &rhs_max, arena); + + Ok((lhs_min, lhs_max, rhs_min, rhs_max)) + } + + #[tracing::instrument(level = "trace", skip_all)] + fn exec( + &self, + (lhs_min, lhs_max, rhs_min, rhs_max): ( + Elem, + Elem, + Elem, + Elem, + ), + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + if maximize { + if let Some(MinMaxed::Maximized(v)) = self.arenaized_cache(maximize, analyzer, arena) { + tracing::trace!("avoided execing via cache"); + return Ok(*v); + } + } + + if !maximize { + if let Some(MinMaxed::Minimized(v)) = self.arenaized_cache(maximize, analyzer, arena) { + tracing::trace!("avoided execing via cache"); + return Ok(*v); + } + } + + tracing::trace!( + "executing {}: {} {} {}, lhs_min: {}, lhs_max: {}, rhs_min: {}, rhs_max: {}", + if maximize { "maximum" } else { "minimum" }, + self.lhs, + self.op.to_string(), + self.rhs, + lhs_min, + lhs_max, + rhs_min, + rhs_max + ); + + let res = match self.op { + RangeOp::GetLength => exec_get_length(&lhs_min, &lhs_max, maximize, analyzer, arena), + RangeOp::GetIndex => exec_get_index(&self.lhs, &self.rhs, maximize, analyzer, arena), + RangeOp::SetLength => exec_set_length(&lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize), + RangeOp::SetIndices => exec_set_indices( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, &self.rhs, maximize, analyzer, arena, + ), + RangeOp::Memcopy => exec_memcopy(&lhs_min, &lhs_max, maximize), + RangeOp::Concat => exec_concat( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Add(unchecked) => exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, unchecked, analyzer, arena, + ), + RangeOp::Sub(unchecked) => exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, unchecked, analyzer, arena, + ), + RangeOp::Mul(unchecked) => exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, unchecked, analyzer, arena, + ), + RangeOp::Div(unchecked) => exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, unchecked, analyzer, arena, + ), + RangeOp::Mod => exec_mod( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Exp => exec_exp( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Min => exec_min( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Max => exec_max( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Gt => exec_gt(&lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize), + RangeOp::Lt => exec_lt(&lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize), + RangeOp::Gte => exec_gte(&lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize), + RangeOp::Lte => exec_lte(&lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize), + RangeOp::Eq => exec_eq_neq( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, true, analyzer, arena, + ), + RangeOp::Neq => exec_eq_neq( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, false, analyzer, arena, + ), + RangeOp::And => exec_and( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Or => exec_or( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Not => exec_not( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::BitAnd => exec_bit_and( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::BitOr => exec_bit_or( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::BitXor => exec_bit_xor( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::BitNot => exec_bit_not(&lhs_min, &lhs_max, maximize, analyzer, arena), + RangeOp::Shl => exec_shl( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Shr => exec_shr( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + RangeOp::Cast => exec_cast( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, maximize, analyzer, arena, + ), + } + .unwrap_or_else(|| Elem::Expr(self.clone())); + tracing::trace!("result: {res}"); + Ok(res) + } +} diff --git a/crates/graph/src/range/exec/math_ops/add.rs b/crates/graph/src/range/exec/math_ops/add.rs new file mode 100644 index 00000000..980ead1f --- /dev/null +++ b/crates/graph/src/range/exec/math_ops/add.rs @@ -0,0 +1,569 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{I256, U256}; +use solang_parser::pt::Loc; + +impl RangeAdd for RangeConcrete { + fn range_add(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + // `max_of_type` cannot fail on uint + let max_uint = Concrete::max_of_type(&self.val) + .unwrap() + .into_u256() + .unwrap(); + // min { a + b, max } to cap at maximum of lhs sizing + let op_res = lhs_val.saturating_add(rhs_val).min(max_uint); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => { + match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) + | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + // neg_v guaranteed to be negative here + let abs = neg_v.unsigned_abs(); + if abs > *val { + // |b| - a + let op_res = + I256::from_raw(abs.saturating_sub(*val)) * I256::from(-1i32); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + // a - |b| + let op_res = val.saturating_sub(abs); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + // `min_of_type` cannot fail on int + let min = Concrete::min_of_type(&self.val).unwrap().int_val().unwrap(); + // lhs + rhs when both are negative is effectively lhs - rhs which means + // we saturate at the minimum value of the left hand side. + // therefore, max{ l + r, min } is the result + let op_res = l.saturating_add(*r).max(min); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + } + } + } + } + + fn range_wrapping_add(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + let op_res = lhs_val.overflowing_add(rhs_val).0; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(..), Concrete::Int(..)) + | (Concrete::Int(..), Concrete::Uint(..)) => { + // just fall back to normal implementation because + // a positive and negative cannot overflow in addition + self.range_add(other) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + let op_res = l.overflowing_add(*r).0; + let val = Concrete::Int(*lhs_size, op_res).size_wrap(); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } +} + +impl RangeAdd for Elem { + fn range_add(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), _) if a.val.is_zero() => Some(other.clone()), + (_, Elem::Concrete(b)) if b.val.is_zero() => Some(self.clone()), + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_add(b), + _ => None, + } + } + fn range_wrapping_add(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), _) if a.val.is_zero() => Some(other.clone()), + (_, Elem::Concrete(b)) if b.val.is_zero() => Some(self.clone()), + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_wrapping_add(b), + _ => None, + } + } +} + +/// Executes an addition given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound of the operation. +/// +/// ### Explanation +/// A fact about addition is that the smallest value possible (in unbounded integer space), is between two _minimum_ values and the largest +/// is between two _maximum_ values. This fact is used in normal "unbounded" (really, saturating) addition calculations as well as wrapping addition as basis for another fact: +/// +/// In wrapping addition, if the bounds allow for optionally wrapping (e.g.: minimum + minimum does not wrap, but maximum + maximum does wrap), we can +/// by extension include *both* the type's maximum and minimum. +/// +/// For example, assume: +///uint256 x: [100, 2256-1] +///uint256 y: [100, 2256-1] +///unchecked { x + y } +/// +/// +/// In this addition of `x+y`, `100+100` does not wrap, but 2256-1 + 2256-1 does. We can construct a value of x and y such that +/// the result of `x+y` is equal to 2256-1 (100 + 2256-101) or `0` (100 + 2256-99). Therefore, the new bounds +/// on `unchecked { x + y }` is [0, 2256-1]. +/// +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked add` operation: +/// +///| Add | Uint | Int | BytesX | Address | Bytes | String | +///|-----------------|------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------|--------|---------|-------|--------| +///| **Uint** | min: lhsmin + rhsmin
max: lhsmax + rhsmax | min: lhsmin + rhsmin
max: lhsmax + rhsmax | N/A | N/A | N/A | N/A | +///| **Int** | min: lhsmin + rhsmin
max: lhsmax + rhsmax | min: lhsmin + rhsmin
max: lhsmax + rhsmax | N/A | N/A | N/A | N/A | +///| **BytesX** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Address** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Bytes** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **String** | N/A | N/A | N/A | N/A | N/A | N/A | +/// +/// Truth table for `wrapping add` operation: +/// +///| Wrapping Add | Uint | Int | BytesX | Address | Bytes | String | +///|---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------|--------|---------|-------|--------| +///| **Uint** | min: {0, lhsmin + rhsmin}
max: {2size - 1, lhsmax + rhsmax} | min: {0, lhsmin + rhsmin}
max: {2size - 1, lhsmax + rhsmax} | N/A | N/A | N/A | N/A | +///| **Int** | min: {-2size-1, lhsmin + rhsmin}
max: {2size - 1, lhsmax + rhsmax} | min: {0, lhsmin + rhsmin}
max: {2size - 1, lhsmax + rhsmax} | N/A | N/A | N/A | N/A | +///| **BytesX** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Address** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Bytes** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **String** | N/A | N/A | N/A | N/A | N/A | N/A | +pub fn exec_add( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + wrapping: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + tracing::trace!("exec add: unchecked - {wrapping}; maximize - {maximize};"); + if wrapping { + let mut candidates = vec![]; + let mut all_overflowed = true; + let mut one_overflowed = false; + + let zero = Elem::Concrete(RangeConcrete::new( + Concrete::from(U256::zero()), + Loc::Implicit, + )); + let add_candidate = |lhs: &Elem, + rhs: &Elem, + candidates: &mut Vec>, + all_overflowed: &mut bool, + one_overflowed: &mut bool, + arena: &mut RangeArena>| { + if let Some(c) = lhs.range_wrapping_add(rhs) { + let lhs_neg = matches!(lhs.range_ord(&zero, arena), Some(std::cmp::Ordering::Less)); + let rhs_neg = matches!(rhs.range_ord(&zero, arena), Some(std::cmp::Ordering::Less)); + let signed = lhs_neg || rhs_neg; + + let overflowed = if signed { + // signed safemath: (rhs >= 0 && c >= lhs) || (rhs < 0 && c < lhs) ==> no overflowed --invert-> overflowed + (rhs_neg + || matches!(c.range_ord(lhs, arena), Some(std::cmp::Ordering::Greater))) + && (!rhs_neg + || matches!( + c.range_ord(lhs, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + )) + } else { + // unsigned safemath: c < a ==> overflowed + matches!(c.range_ord(lhs, arena), Some(std::cmp::Ordering::Less)) + }; + + if *all_overflowed && !overflowed { + *all_overflowed = false; + } + + if !*one_overflowed && overflowed { + *one_overflowed = true; + } + + candidates.push(c); + } + }; + + add_candidate( + lhs_min, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_min, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + + // We need to check if there is a value in [lhs_min, lhs_max] that when added to a value in [rhs_min, rhs_max] + // will not overflow + // + // If that is the case we can additionally compare saturating addition cases to the candidates + if !all_overflowed { + // We didnt overflow in some case, add saturating addition candidates + let saturating_add = + |lhs: &Elem<_>, rhs: &Elem<_>, candidates: &mut Vec>| -> bool { + if let Some(c) = lhs.range_add(rhs) { + candidates.push(c); + true + } else { + false + } + }; + // if max + max returned a result, that saturating addition will be largest possible candidate + if !saturating_add(lhs_max, rhs_max, &mut candidates) { + saturating_add(lhs_min, rhs_min, &mut candidates); + saturating_add(lhs_min, rhs_max, &mut candidates); + saturating_add(lhs_max, rhs_min, &mut candidates); + } + } + + // We need to check if there is a value in [lhs_min, lhs_max] that when added to a value in [rhs_min, rhs_max] + // will overflow and can result in the minimum value of the type + // + // We can do this by checking if we can conditionally overflow. + let conditional_overflow = !all_overflowed && one_overflowed; + if conditional_overflow { + let add_min = |elem: &Elem, candidates: &mut Vec>| { + if let Some(c) = elem.maybe_concrete() { + if let Some(min) = Concrete::min_of_type(&c.val) { + candidates.push(RangeConcrete::new(min, c.loc).into()); + } + + if let Some(max) = Concrete::max_of_type(&c.val) { + candidates.push(RangeConcrete::new(max, c.loc).into()); + } + } + }; + // We are able to conditionally overflow, so add min + add_min(lhs_min, &mut candidates); + add_min(lhs_max, &mut candidates); + } + + // Sort the candidates + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } + } else if maximize { + // if we are maximizing, the largest value will always just be the the largest value + the largest value + lhs_max.range_add(rhs_max) + } else { + // if we are minimizing, the smallest value will always just be the the smallest value + the smallest value + lhs_min.range_add(rhs_min) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use ethers_core::types::U256; + use solang_parser::pt::Loc; + + #[test] + fn uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(20))); + } + + #[test] + fn saturating_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::MAX), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::MAX), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::MAX)); + } + + #[test] + fn sized_saturating_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(8, U256::from(254)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(8, U256::from(254)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::from(255))); + } + + #[test] + fn int_big_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-1i32)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(14))); + } + + #[test] + fn big_int_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(1)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-14i32))); + } + + #[test] + fn int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-30i32))); + } + + #[test] + fn min_int_min_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MIN)); + } + + #[test] + fn saturating_int_int() { + let x = RangeConcrete::new( + Concrete::Int(256, I256::MIN + I256::from(1i32)), + Loc::Implicit, + ); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-2i32)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MIN)); + } + + #[test] + fn sized_saturating_int_int() { + let x = RangeConcrete::new(Concrete::Int(8, I256::from(-127i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(-2i32)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn wrapping_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::MAX), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(2)), Loc::Implicit); + let result = x + .range_wrapping_add(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(1))); + } + + #[test] + fn sized_wrapping_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(8, U256::from(255)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(8, U256::from(2)), Loc::Implicit); + let result = x + .range_wrapping_add(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::from(1))); + } + + #[test] + fn wrapping_big_int_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(1)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-14i32))); + } + + #[test] + fn wrapping_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-1i32)), Loc::Implicit); + let result = x + .range_wrapping_add(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MAX)); + } + + #[test] + fn sized_wrapping_int_int() { + let x = RangeConcrete::new(Concrete::Int(8, I256::from(-128i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(-1i32)), Loc::Implicit); + let result = x + .range_wrapping_add(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(127i32))); + } + + #[test] + fn exec_wrapping_min_int_min_int() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let min = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit).into(); + let max = RangeConcrete::new(Concrete::Int(256, I256::MAX), Loc::Implicit).into(); + let max_result = exec_add(&min, &min, &min, &max, true, true, &g, &mut arena) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(256, I256::MAX)); + let min_result = exec_add(&min, &min, &min, &max, false, true, &g, &mut arena) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(256, I256::MIN)); + } + + #[test] + fn exec_sized_uint_uint_saturating() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(105).into(); + let lhs_max = rc_uint_sized(150).into(); + let rhs_min = rc_uint_sized(10).into(); + let rhs_max = rc_uint_sized(200).into(); + + let max_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(255))); + let min_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(115))); + } + + #[test] + fn exec_sized_wrapping_uint_uint() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(105).into(); + let lhs_max = rc_uint_sized(150).into(); + let rhs_min = rc_uint_sized(10).into(); + let rhs_max = rc_uint_sized(200).into(); + + let max_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(255))); + let min_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_wrapping_int_uint() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(127).into(); + let rhs_min = rc_uint_sized(0).into(); + let rhs_max = rc_uint_sized(255).into(); + + let max_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn exec_sized_wrapping_int_int_max() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(-100).into(); + let rhs_min = rc_int_sized(-5).into(); + let rhs_max = rc_int_sized(5).into(); + + let max_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_add( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } +} diff --git a/crates/graph/src/range/exec/math_ops/div.rs b/crates/graph/src/range/exec/math_ops/div.rs new file mode 100644 index 00000000..00ed109d --- /dev/null +++ b/crates/graph/src/range/exec/math_ops/div.rs @@ -0,0 +1,494 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{I256, U256}; +use solang_parser::pt::Loc; + +impl RangeDiv for RangeConcrete { + fn range_div(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + if rhs_val == 0.into() { + None + } else { + let op_res = lhs_val / rhs_val; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) => { + // Divisor cannot be zero because it would have been converted + // to a uint + let abs = neg_v.into_sign_and_abs().1; + let op_res = I256::from_raw(val / abs).saturating_div(I256::from(-1i32)); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + if val == &U256::from(0) { + None + } else { + let abs = neg_v.into_sign_and_abs().1; + let op_res = I256::from_raw(abs / *val).saturating_div(I256::from(-1i32)); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + if r == &I256::from(0) { + None + } else { + let (op_res, overflow) = l.overflowing_div(*r); + if overflow { + let max = Concrete::max_of_type(&self.val).unwrap().int_val().unwrap(); + let val = Concrete::Int(*lhs_size, max); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + } + _ => None, + }, + } + } + + fn range_wrapping_div(&self, other: &Self) -> Option> { + // Only negative Int / negative Int needs overflowing_div + match (&self.val, &other.val) { + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) + if *l < I256::from(0i32) && *r < I256::from(0i32) => + { + let op_res = l.overflowing_div(*r).0; + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => self.range_div(other), + } + } +} + +impl RangeDiv for Elem { + fn range_div(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_div(b), + _ => None, + } + } + + fn range_wrapping_div(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_div(b), + _ => None, + } + } +} + +/// Executes an division given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked div` operation: +/// +/// `todo!()` +/// +/// Truth table for `wrapping div` operation: +/// +/// `todo!()` +/// +pub fn exec_div( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + wrapping: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let mut candidates = vec![]; + let saturating_div = |lhs: &Elem<_>, rhs: &Elem<_>, candidates: &mut Vec>| { + if let Some(c) = lhs.range_div(rhs) { + candidates.push(c); + } + }; + + let one = Elem::from(Concrete::from(U256::from(1))); + let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); + + let min_contains = matches!( + rhs_min.range_ord(&one, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + rhs_max.range_ord(&one, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + // for division, if 1 is contained by the denominator, we can just add the left hand side as candidates + if min_contains && max_contains { + candidates.push(lhs_min.clone()); + candidates.push(lhs_max.clone()); + } + + let min_contains_neg_one = matches!( + rhs_min.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains_neg_one = matches!( + rhs_max.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + if min_contains_neg_one && max_contains_neg_one { + // if the divisor contains -1, we can just saturating multiply by -1 + if matches!( + lhs_min.range_ord(&negative_one, arena), + Some(std::cmp::Ordering::Less) + ) { + // lhs can be negative, check if it contains int_min + let type_min = Concrete::min_of_type(&lhs_min.maybe_concrete().unwrap().val).unwrap(); + let int_val = type_min.int_val().unwrap(); + let min = Elem::from(type_min); + let min_plus_one = Elem::Concrete(rc_i256_sized(int_val + I256::from(1i32))); + + let lhs_contains_int_min = matches!( + lhs_min.range_ord(&min, arena), + Some(std::cmp::Ordering::Equal) + ); + + let max_contains_int_min_plus_one = matches!( + rhs_max.range_ord(&min_plus_one, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + // add int max to candidates + if lhs_contains_int_min && max_contains_int_min_plus_one { + if let Some(c) = min_plus_one.range_mul(&negative_one) { + candidates.push(c); + } + } else if let Some(c) = lhs_min.range_mul(&negative_one) { + // add min * -1 + candidates.push(c); + } + } else if let Some(c) = lhs_min.range_mul(&negative_one) { + // add min * -1 + candidates.push(c); + } + + if let Some(c) = lhs_max.range_mul(&negative_one) { + candidates.push(c); + } + } + + if wrapping { + let mut all_overflowed = true; + let mut one_overflowed = false; + let add_candidate = |lhs: &Elem, + rhs: &Elem, + candidates: &mut Vec>, + all_overflowed: &mut bool, + one_overflowed: &mut bool, + arena: &mut RangeArena>| { + if let Some(c) = lhs.range_wrapping_div(rhs) { + let mut overflowed = false; + let neg_one = + RangeConcrete::new(Concrete::Int(8, I256::from(-1i32)), Loc::Implicit).into(); + if matches!( + lhs.range_ord(&neg_one, arena), + Some(std::cmp::Ordering::Less) + ) { + // rhs == -1 + let div_neg_one = matches!( + rhs.range_ord(&neg_one, arena), + Some(std::cmp::Ordering::Equal) + ); + + let type_min = + Concrete::min_of_type(&lhs.maybe_concrete().unwrap().val).unwrap(); + let min = RangeConcrete::new(type_min, Loc::Implicit).into(); + + // lhs == INT_MIN + let num_int_min = + matches!(lhs.range_ord(&min, arena), Some(std::cmp::Ordering::Equal)); + if div_neg_one && num_int_min { + overflowed = true; + } + } + + if *all_overflowed && !overflowed { + *all_overflowed = false; + } + + if !*one_overflowed && overflowed { + *one_overflowed = true; + } + + candidates.push(c); + } + }; + + add_candidate( + lhs_min, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_min, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + + if one_overflowed { + let add_min = |elem: &Elem, candidates: &mut Vec>| { + if let Some(c) = elem.maybe_concrete() { + if let Some(min) = Concrete::min_of_type(&c.val) { + candidates.push(RangeConcrete::new(min, c.loc).into()); + } + } + }; + add_min(lhs_min, &mut candidates); + add_min(lhs_max, &mut candidates); + } + } else { + // without inspecting types of lhs and rhs, its easiest just to run them all and + // sort + saturating_div(lhs_min, rhs_min, &mut candidates); + saturating_div(lhs_min, rhs_max, &mut candidates); + saturating_div(lhs_max, rhs_min, &mut candidates); + saturating_div(lhs_max, rhs_max, &mut candidates); + } + + // Sort the candidates + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use solang_parser::pt::Loc; + + #[test] + fn uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_div(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(3))); + } + + #[test] + fn uint_int() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(5i32)), Loc::Implicit); + let result = x.range_div(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(3))); + } + + #[test] + fn uint_neg_int() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-5i32)), Loc::Implicit); + let result = x.range_div(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-3i32))); + } + + #[test] + fn neg_int_uint() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_div(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-3i32))); + } + + #[test] + fn neg_int_neg_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-5i32)), Loc::Implicit); + let result = x.range_div(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(3i32))); + } + + #[test] + fn uint_zero() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(0)), Loc::Implicit); + assert!(x.range_div(&y).is_none()); + } + + #[test] + fn int_zero() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(0)), Loc::Implicit); + assert!(x.range_div(&y).is_none()); + } + + #[test] + fn wrapping_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-1i32)), Loc::Implicit); + let result = x.range_wrapping_div(&y).unwrap(); + let expected = x.clone(); + assert_eq!(result, expected.into()); + } + + #[test] + fn nonwrapping_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-1i32)), Loc::Implicit); + let result = x.range_div(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MAX)); + } + + #[test] + fn exec_sized_uint_uint_saturating() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(5).into(); + let lhs_max = rc_uint_sized(15).into(); + let rhs_min = rc_uint_sized(1).into(); + let rhs_max = rc_uint_sized(20).into(); + + let max_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(15))); + let min_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_wrapping_uint_uint() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(5).into(); + let lhs_max = rc_uint_sized(15).into(); + let rhs_min = rc_uint_sized(1).into(); + let rhs_max = rc_uint_sized(16).into(); + + let max_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(15))); + let min_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_wrapping_int_uint() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(127).into(); + let rhs_min = rc_uint_sized(0).into(); + let rhs_max = rc_uint_sized(255).into(); + + let max_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn exec_sized_wrapping_int_int_max() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(-100).into(); + let rhs_min = rc_int_sized(-5).into(); + let rhs_max = rc_int_sized(5).into(); + + let max_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_div( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } +} diff --git a/crates/graph/src/range/exec/math_ops/exp.rs b/crates/graph/src/range/exec/math_ops/exp.rs new file mode 100644 index 00000000..aad9faeb --- /dev/null +++ b/crates/graph/src/range/exec/math_ops/exp.rs @@ -0,0 +1,201 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::U256; + +impl RangeExp for RangeConcrete { + fn range_exp(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + let max = Concrete::max_of_type(&self.val).unwrap(); + + let op_res = lhs_val.checked_pow(rhs_val); + let op_res = if let Some(num) = op_res { + num.min(max.into_u256().unwrap()) + } else { + max.into_u256().unwrap() + }; + + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val.into_u256()) { + // exponent must be positive otherwise return None. + (Concrete::Int(lhs_size, neg_v), Some(val)) => { + let pow2 = val % U256::from(2) == 0.into(); + let val = if val > &U256::from(u32::MAX) { + if pow2 { + Concrete::max_of_type(&self.val).unwrap() + } else { + Concrete::min_of_type(&self.val).unwrap() + } + } else { + let min = Concrete::min_of_type(&self.val).unwrap().int_val().unwrap(); + let max = Concrete::max_of_type(&self.val).unwrap().int_val().unwrap(); + + let op_res = neg_v.checked_pow(val.as_u32()); + if let Some(num) = op_res { + if pow2 { + Concrete::Int(*lhs_size, num.min(max)) + } else { + Concrete::Int(*lhs_size, num.max(min)) + } + } else if pow2 { + Concrete::max_of_type(&self.val).unwrap() + } else { + Concrete::min_of_type(&self.val).unwrap() + } + }; + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } +} + +impl RangeExp for Elem { + fn range_exp(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_exp(b), + (Elem::Concrete(a), _) if a.val.is_zero() => Some(Concrete::from(U256::from(1)).into()), + (_, Elem::Concrete(b)) if b.val.is_zero() => Some(other.clone()), + _ => None, + } + } +} + +/// Executes the `exponentiation` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// TODO: Add wrapping/unchecked version +pub fn exec_exp( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + // TODO: Improve this + let candidates = vec![ + lhs_min.range_exp(rhs_min), + lhs_min.range_exp(rhs_max), + lhs_max.range_exp(rhs_min), + lhs_max.range_exp(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use ethers_core::types::{I256, U256}; + use solang_parser::pt::Loc; + + #[test] + fn uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_exp(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(759375))); + } + + #[test] + fn saturating_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::MAX), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::MAX), Loc::Implicit); + let result = x.range_exp(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::MAX)); + } + + #[test] + fn sized_saturating_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(8, U256::from(254)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(8, U256::from(254)), Loc::Implicit); + let result = x.range_exp(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::from(255))); + } + + #[test] + fn int_uint() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-1i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let result = x.range_exp(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-1i32))); + } + + #[test] + fn int_uint_2() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(15i32)), Loc::Implicit); + let result = x.range_exp(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!( + result.val, + Concrete::Int(256, I256::from(-437893890380859375i128)) + ); + } + + #[test] + fn saturating_int_int() { + let x = RangeConcrete::new( + Concrete::Int(256, I256::MIN + I256::from(1i32)), + Loc::Implicit, + ); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(2i32)), Loc::Implicit); + let result = x.range_exp(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MAX)); + } + + #[test] + fn sized_saturating_int_int() { + let x = RangeConcrete::new(Concrete::Int(8, I256::from(-127i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(-2i32)), Loc::Implicit); + let result = x.range_add(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn exec_sized_uint_uint_saturating() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(2).into(); + let lhs_max = rc_uint_sized(150).into(); + let rhs_min = rc_uint_sized(3).into(); + let rhs_max = rc_uint_sized(200).into(); + + let max_result = exec_exp(&lhs_min, &lhs_max, &rhs_min, &rhs_max, true, &g, &mut arena) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(255))); + let min_result = exec_exp( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(8))); + } +} diff --git a/crates/graph/src/range/exec/math_ops/mod.rs b/crates/graph/src/range/exec/math_ops/mod.rs new file mode 100644 index 00000000..fe4a92c5 --- /dev/null +++ b/crates/graph/src/range/exec/math_ops/mod.rs @@ -0,0 +1,12 @@ +mod add; +mod div; +mod exp; +mod modulo; +mod mul; +mod sub; +pub use add::exec_add; +pub use div::exec_div; +pub use exp::exec_exp; +pub use modulo::exec_mod; +pub use mul::exec_mul; +pub use sub::exec_sub; diff --git a/crates/graph/src/range/exec/math_ops/modulo.rs b/crates/graph/src/range/exec/math_ops/modulo.rs new file mode 100644 index 00000000..54e653eb --- /dev/null +++ b/crates/graph/src/range/exec/math_ops/modulo.rs @@ -0,0 +1,329 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{I256, U256}; + +impl RangeMod for RangeConcrete { + fn range_mod(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + if rhs_val == 0.into() { + return None; + } + let op_res = lhs_val % rhs_val; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) => { + let op_res = I256::from_raw(*val) % *neg_v; + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) if *val != 0.into() => { + let op_res = *neg_v % I256::from_raw(*val); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + // the wrapping never actually occurs mathematically. See ethers-rs docs for more info + let op_res = l.wrapping_rem(*r); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } +} + +impl RangeMod for Elem { + fn range_mod(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_mod(b), + _ => None, + } + } +} + +/// Executes an modulus given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked mod` operation: +/// +/// `todo!()` +pub fn exec_mod( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let is_const = |l: &Elem<_>, r: &Elem<_>, arena: &mut RangeArena>| -> bool { + matches!(l.range_ord(r, arena), Some(std::cmp::Ordering::Equal)) + }; + + if is_const(lhs_min, lhs_max, arena) && is_const(rhs_min, rhs_max, arena) { + return lhs_min.range_mod(rhs_min); + } + + let zero = Elem::from(Concrete::from(U256::zero())); + + let lhs_min_is_pos = matches!( + lhs_min.range_ord(&zero, arena), + Some(std::cmp::Ordering::Equal) | Some(std::cmp::Ordering::Greater) + ); + + let lhs_max_is_pos = matches!( + lhs_max.range_ord(&zero, arena), + Some(std::cmp::Ordering::Equal) | Some(std::cmp::Ordering::Greater) + ); + let mod_min_is_pos = matches!( + rhs_min.range_ord(&zero, arena), + Some(std::cmp::Ordering::Equal) | Some(std::cmp::Ordering::Greater) + ); + + let mod_max_is_pos = matches!( + rhs_max.range_ord(&zero, arena), + Some(std::cmp::Ordering::Equal) | Some(std::cmp::Ordering::Greater) + ); + + // check if all lhs values are less than rhs values + if maximize + && lhs_max_is_pos + && mod_max_is_pos + && matches!( + lhs_max.range_ord(rhs_max, arena), + Some(std::cmp::Ordering::Less) + ) + { + // the lhs is entirely smaller than the modulo, so its effectively a noop, just return + // the min or max + return Some(lhs_max.clone()); + } + + let mut candidates = vec![]; + let one = Elem::from(Concrete::from(U256::from(1))); + let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); + if !mod_min_is_pos { + if let Some(r) = rhs_min.range_add(&one) { + candidates.push(r); + } + } else if let Some(r) = rhs_min.range_sub(&one) { + candidates.push(r); + } + + if !mod_max_is_pos { + if let Some(r) = rhs_max.range_add(&one) { + candidates.push(r); + } + } else if let Some(r) = rhs_max.range_sub(&one) { + candidates.push(r); + } + + if !lhs_min_is_pos { + if let Some(neg_max) = rhs_max.range_mul(&negative_one) { + match neg_max.range_ord(lhs_min, arena) { + None => {} + Some(std::cmp::Ordering::Less) => candidates.push(lhs_min.clone()), + Some(std::cmp::Ordering::Greater) => { + candidates.push(neg_max.range_add(&one).unwrap()) + } + _ => candidates.push(lhs_min.clone()), + } + } + } + + // Sort the candidates + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use solang_parser::pt::Loc; + + #[test] + fn uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(17)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_mod(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(2))); + } + + #[test] + fn uint_int() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(17)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(5i32)), Loc::Implicit); + let result = x.range_mod(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(2))); + } + + #[test] + fn uint_neg_int() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(17)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-5i32)), Loc::Implicit); + let result = x.range_mod(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(2))); + } + + #[test] + fn neg_int_uint() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-17i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_mod(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-2i32))); + } + + #[test] + fn neg_int_neg_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-17i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-5i32)), Loc::Implicit); + let result = x.range_mod(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-2i32))); + } + + #[test] + fn uint_zero() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(17)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(0)), Loc::Implicit); + assert!(x.range_mod(&y).is_none()); + } + + #[test] + fn int_zero() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-17i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(0)), Loc::Implicit); + assert!(x.range_mod(&y).is_none()); + } + + #[test] + fn int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-1i32)), Loc::Implicit); + let result = x.range_mod(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(0i32))); + } + + #[test] + fn exec_sized_uint_uint_1() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(5).into(); + let lhs_max = rc_uint_sized(15).into(); + let rhs_min = rc_uint_sized(1).into(); + let rhs_max = rc_uint_sized(20).into(); + + let max_result = exec_mod(&lhs_min, &lhs_max, &rhs_min, &rhs_max, true, &g, &mut arena) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(15))); + let min_result = exec_mod( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_uint_uint_2() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(16).into(); + let lhs_max = rc_uint_sized(160).into(); + let rhs_min = rc_uint_sized(1).into(); + let rhs_max = rc_uint_sized(16).into(); + + let max_result = exec_mod(&lhs_min, &lhs_max, &rhs_min, &rhs_max, true, &g, &mut arena) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(15))); + let min_result = exec_mod( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_int_uint() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(127).into(); + let rhs_min = rc_uint_sized(0).into(); + let rhs_max = rc_uint_sized(255).into(); + + let max_result = exec_mod(&lhs_min, &lhs_max, &rhs_min, &rhs_max, true, &g, &mut arena) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_mod( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn exec_sized_int_int_max() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(-100).into(); + let rhs_min = rc_int_sized(-5).into(); + let rhs_max = rc_int_sized(5).into(); + + let max_result = exec_mod(&lhs_min, &lhs_max, &rhs_min, &rhs_max, true, &g, &mut arena) + .unwrap() + .maybe_concrete() + .unwrap(); + // TODO: improve mod calc to consider lhs being entirely negative + // assert_eq!(max_result.val, Concrete::Int(8, I256::from(0i32))); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(4i32))); + let min_result = exec_mod( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-4i32))); + } +} diff --git a/crates/graph/src/range/exec/math_ops/mul.rs b/crates/graph/src/range/exec/math_ops/mul.rs new file mode 100644 index 00000000..a1cb33a0 --- /dev/null +++ b/crates/graph/src/range/exec/math_ops/mul.rs @@ -0,0 +1,507 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{I256, U256}; + +impl RangeMul for RangeConcrete { + fn range_mul(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + let max = Concrete::max_of_type(&self.val) + .unwrap() + .into_u256() + .unwrap(); + let mut op_res = lhs_val.saturating_mul(rhs_val).min(max); + if let Some(min) = Concrete::min_of_type(&self.val).unwrap().into_u256() { + op_res = op_res.max(min); + } + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) + | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + let tmp = Concrete::Int(*lhs_size, I256::from(0i32)); + let min = Concrete::min_of_type(&tmp).unwrap().int_val().unwrap(); + let max = Concrete::max_of_type(&tmp).unwrap().int_val().unwrap(); + + let op_res = neg_v.saturating_mul(I256::from_raw(*val)).max(min).min(max); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + let tmp = Concrete::Int(*lhs_size, I256::from(0i32)); + let min = Concrete::min_of_type(&tmp).unwrap().int_val().unwrap(); + let max = Concrete::max_of_type(&tmp).unwrap().int_val().unwrap(); + + let op_res = l.saturating_mul(*r).min(max).max(min); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } + + fn range_wrapping_mul(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + let op_res = lhs_val.overflowing_mul(rhs_val).0; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) + | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + let op_res = neg_v.overflowing_mul(I256::from_raw(*val)).0; + let val = Concrete::Int(*lhs_size, op_res).size_wrap(); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + let op_res = l.overflowing_mul(*r).0; + let val = Concrete::Int(*lhs_size, op_res).size_wrap(); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } +} + +impl RangeMul for Elem { + fn range_mul(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_mul(b), + (Elem::Concrete(a), _) if a.val.is_zero() => Some(self.clone()), + (_, Elem::Concrete(b)) if b.val.is_zero() => Some(other.clone()), + (Elem::Concrete(a), b) if a.val.is_one() => Some(b.clone()), + (a, Elem::Concrete(b)) if b.val.is_one() => Some(a.clone()), + _ => None, + } + } + + fn range_wrapping_mul(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_wrapping_mul(b), + (Elem::Concrete(a), _) if a.val.is_zero() => Some(self.clone()), + (_, Elem::Concrete(b)) if b.val.is_zero() => Some(other.clone()), + (Elem::Concrete(a), b) if a.val.is_one() => Some(b.clone()), + (a, Elem::Concrete(b)) if b.val.is_one() => Some(a.clone()), + _ => None, + } + } +} + +/// Executes an multiplication given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +/// +/// ### Explanation +/// A fact about multiplication is that the smallest value possible (in unbounded unsigned integer space), is between two _minimum_ +/// values and the largest is between two _maximum_ values. In the unbounded signed integer space, the smallest value will be +/// the product of the most negative value and most positive value. +/// +/// Pyrometer _overestimates_ products of multiplication in both the saturating and wrapping cases. This is due to +/// to the fact that the multiplicants may not contain factors of the maximum/minimum. That is to say, +/// the factors of, for example, 2size-1 may not exactly be in the left hand side +/// and right hand side. By default, we allow this overestimation. The only case we cover is when both elements' minimums +/// product always overflows. +/// +/// +/// For example, assume: +///uint256 x: [2240, 2256-1] +///uint256 y: [216, 2256-1] +///unchecked { x * y } +/// +/// +/// In this multiplication of `x*y`, it will always overflow so the minimum is still `x.min * y.min`, and the maximum is still `x.max * y.max`. However, +/// had `x.min * y.min` _not_ overflowed, the maximum would have been `type(uint256).max` (despite not knowing if the factors of `type(uint256).max` are contained +/// in x & y) and the minimum would be `type(uint256).min` (despite not knowing if `unchecked { type(uint256).max + 1 }`'s factors are contained in x & y). Therefore, +/// we have potentially underestimated the minimum and overestimated the maximum of the product. Factorization of large integers is untenable from a performance standpoint +/// so this concession on precision is accepted (and remains sound but can result in false positive analyses if depended on). +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked mul` operation: +/// +///| Mul | Uint | Int | BytesX | Address | Bytes | String | +///|-----------------|------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------|--------|---------|-------|--------| +///| **Uint** | min: lhsmin * rhsmin
max: lhsmax * rhsmax | min: lhsmin * rhsmin
max: lhsmax * rhsmax | N/A | N/A | N/A | N/A | +///| **Int** | min: lhsmin * rhsmin
max: lhsmax * rhsmax | min: lhsmin * rhsmin
max: lhsmax * rhsmax | N/A | N/A | N/A | N/A | +///| **BytesX** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Address** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Bytes** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **String** | N/A | N/A | N/A | N/A | N/A | N/A | +/// +/// Truth table for `wrapping mul` operation: +/// +/// `todo!()` +/// +// | Wrapping Mul | Uint | Int | BytesX | Address | Bytes | String | +// |---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------|--------|---------|-------|--------| +// | **Uint** | min: {0, lhsmin * rhsmin}
max: {2size - 1, lhsmax * rhsmax} | min: {0, lhsmin + rhsmin}
max: {2size - 1, lhsmax + rhsmax} | N/A | N/A | N/A | N/A | +// | **Int** | min: {
   -2size-1,
   lhsmin * rhsmin,
   lhsmin * rhsmax,
   lhsmax * rhsmin
}
max: {
   2size - 1,
   lhsmax * rhsmax
} | min: {0, lhsmin * rhsmin}
max: {2size - 1, lhsmax + rhsmax} | N/A | N/A | N/A | N/A | +// | **BytesX** | N/A | N/A | N/A | N/A | N/A | N/A | +// | **Address** | N/A | N/A | N/A | N/A | N/A | N/A | +// | **Bytes** | N/A | N/A | N/A | N/A | N/A | N/A | +// | **String** | N/A | N/A | N/A | N/A | N/A | N/A | +pub fn exec_mul( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + wrapping: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let mut candidates = vec![]; + let saturating_mul = |lhs: &Elem<_>, rhs: &Elem<_>, candidates: &mut Vec>| { + if let Some(c) = lhs.range_mul(rhs) { + candidates.push(c); + } + }; + + if wrapping { + let zero = Elem::from(Concrete::from(U256::zero())); + let mut all_overflowed = true; + let mut one_overflowed = false; + let add_candidate = |lhs: &Elem, + rhs: &Elem, + candidates: &mut Vec>, + all_overflowed: &mut bool, + one_overflowed: &mut bool, + arena: &mut RangeArena<_>| { + if let Some(c) = lhs.range_wrapping_mul(rhs) { + if !matches!(lhs.range_ord(&zero, arena), Some(std::cmp::Ordering::Equal)) { + let reverse = c.range_div(lhs).unwrap(); + let overflowed = !matches!( + reverse.range_ord(rhs, arena).unwrap(), + std::cmp::Ordering::Equal + ); + if *all_overflowed && !overflowed { + *all_overflowed = false; + } + + if !*one_overflowed && overflowed { + *one_overflowed = true; + } + } + + candidates.push(c); + } + }; + + add_candidate( + lhs_min, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_min, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + + if all_overflowed || one_overflowed { + // We overflowed in every case, or had a conditional overflow. + // In this case we just under/overestimate + saturating_mul(lhs_max, rhs_max, &mut candidates); + saturating_mul(lhs_min, rhs_min, &mut candidates); + saturating_mul(lhs_min, rhs_max, &mut candidates); + saturating_mul(lhs_max, rhs_min, &mut candidates); + + let add_min = |elem: &Elem, candidates: &mut Vec>| { + if let Some(c) = elem.maybe_concrete() { + if let Some(min) = Concrete::min_of_type(&c.val) { + candidates.push(RangeConcrete::new(min, c.loc).into()); + } + } + }; + // We are able to conditionally overflow, so add min of both types + add_min(lhs_min, &mut candidates); + add_min(lhs_max, &mut candidates); + add_min(rhs_min, &mut candidates); + add_min(rhs_max, &mut candidates); + } + } else { + // without inspecting types of lhs and rhs, its easiest just to run them all and + // sort + saturating_mul(lhs_min, rhs_min, &mut candidates); + saturating_mul(lhs_min, rhs_max, &mut candidates); + saturating_mul(lhs_max, rhs_min, &mut candidates); + saturating_mul(lhs_max, rhs_max, &mut candidates); + } + + // Sort the candidates + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use ethers_core::types::U256; + use solang_parser::pt::Loc; + + #[test] + fn sized_uint_uint() { + let x = rc_uint_sized(255); + let y = rc_uint_sized(255); + let result = x.range_mul(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::from(255))); + } + + #[test] + fn sized_wrapping_uint_uint() { + let x = rc_uint_sized(255); + let y = rc_uint_sized(255); + let result = x + .range_wrapping_mul(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::from(1))); + } + + #[test] + fn sized_int_int() { + let x = rc_int_sized(-127); + let y = rc_int_sized(-127); + let result = x.range_mul(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(127i32))); + } + + #[test] + fn sized_int_int_one() { + let x = rc_int_sized(-1); + let y = rc_int_sized(-128); + let result = x.range_mul(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(127i32))); + } + + #[test] + fn sized_int_uint() { + let x = rc_int_sized(-127); + let y = rc_int_sized(127); + let y2 = rc_uint_sized(127); + let result = x.range_mul(&y).unwrap().maybe_concrete_value().unwrap(); + let result2 = x.range_mul(&y2).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result, result2); + assert_eq!(result.val, Concrete::Int(8, I256::from(-128i32))); + } + #[test] + fn sized_uint_int() { + let x = rc_int_sized(127); + let x2 = rc_uint_sized(127); + let y = rc_int_sized(-127); + let result = x.range_mul(&y).unwrap().maybe_concrete_value().unwrap(); + let result2 = x2.range_mul(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result, result2); + assert_eq!(result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn sized_wrapping_int_int() { + let x = RangeConcrete::new(Concrete::Int(8, I256::from(-127i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(-127i32)), Loc::Implicit); + let result = x + .range_wrapping_mul(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(1i32))); + } + + #[test] + fn sized_wrapping_int_uint() { + let x = RangeConcrete::new(Concrete::Int(8, I256::from(-127i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(127i32)), Loc::Implicit); + let result = x + .range_wrapping_mul(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + let y2 = RangeConcrete::new(Concrete::Uint(8, U256::from(127i32)), Loc::Implicit); + let result2 = x + .range_wrapping_mul(&y2) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result, result2); + assert_eq!(result.val, Concrete::Int(8, I256::from(-1i32))); + } + + #[test] + fn exec_sized_uint_uint_saturating() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(5).into(); + let lhs_max = rc_uint_sized(15).into(); + let rhs_min = rc_uint_sized(1).into(); + let rhs_max = rc_uint_sized(20).into(); + + let max_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(255))); + let min_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(5))); + } + + #[test] + fn exec_sized_wrapping_uint_uint_no_overflow() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(5).into(); + let lhs_max = rc_uint_sized(15).into(); + let rhs_min = rc_uint_sized(1).into(); + let rhs_max = rc_uint_sized(16).into(); + + let max_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(240))); + let min_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(5))); + } + + #[test] + fn exec_sized_wrapping_uint_uint_full_overflow() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(126).into(); + let lhs_max = rc_uint_sized(127).into(); + let rhs_min = rc_uint_sized(252).into(); + let rhs_max = rc_uint_sized(255).into(); + + let max_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + // we just have to overestimate + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(255))); + let min_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + // we just have to underestimate + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_wrapping_int_uint_cond_overflow() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(127).into(); + let rhs_min = rc_uint_sized(0).into(); + let rhs_max = rc_uint_sized(255).into(); + + let max_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn exec_sized_wrapping_int_uint_no_overflow() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-5).into(); + let lhs_max = rc_int_sized(5).into(); + let rhs_min = rc_uint_sized(0).into(); + let rhs_max = rc_uint_sized(3).into(); + + let max_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(15i32))); + let min_result = exec_mul( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-15i32))); + } +} diff --git a/crates/graph/src/range/exec/math_ops/sub.rs b/crates/graph/src/range/exec/math_ops/sub.rs new file mode 100644 index 00000000..acc5e1d1 --- /dev/null +++ b/crates/graph/src/range/exec/math_ops/sub.rs @@ -0,0 +1,567 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{I256, U256}; +use solang_parser::pt::Loc; + +impl RangeSub for RangeConcrete { + fn range_sub(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + if lhs_val > rhs_val { + let op_res = lhs_val.saturating_sub(rhs_val); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + match self.val { + Concrete::Int(size, val) => { + let min_int = + Concrete::min_of_type(&self.val).unwrap().int_val().unwrap(); + let op_res = val.saturating_sub(I256::from_raw(rhs_val)).max(min_int); + let val = Concrete::Int(size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => { + // TODO: this should cause a revert + let op_res = lhs_val.saturating_sub(rhs_val); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + } + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) => { + let tmp = Concrete::Uint(*lhs_size, U256::zero()); + let max = Concrete::max_of_type(&tmp).unwrap().uint_val().unwrap(); + let abs = neg_v.unsigned_abs(); + let op_res = val.saturating_add(abs).min(max); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + let tmp = Concrete::Int(*lhs_size, I256::from(0i32)); + let min = Concrete::min_of_type(&tmp).unwrap().int_val().unwrap(); + + let op_res = neg_v.saturating_sub(I256::from_raw(*val).max(min)); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + let tmp = Concrete::Int(*lhs_size, I256::from(0i32)); + let min = Concrete::min_of_type(&tmp).unwrap().int_val().unwrap(); + let max = Concrete::max_of_type(&tmp).unwrap().int_val().unwrap(); + + let op_res = l.saturating_sub(*r).max(min).min(max); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } + + fn range_wrapping_sub(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + if lhs_val > rhs_val { + let op_res = lhs_val.overflowing_sub(rhs_val).0; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + match self.val { + Concrete::Int(size, val) => { + let op_res = val.overflowing_sub(I256::from_raw(rhs_val)).0; + let val = Concrete::Int(size, op_res).size_wrap(); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => { + let op_res = lhs_val.overflowing_sub(rhs_val).0; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + } + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(_lhs_size, val), Concrete::Int(_, neg_v)) => { + let op_res = val.overflowing_add(neg_v.into_raw()).0; + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + let op_res = I256::from_raw(neg_v.into_raw().overflowing_sub(*val).0); + let val = Concrete::Int(*lhs_size, op_res).size_wrap(); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + Some(Elem::Concrete(RangeConcrete::new( + Concrete::Int(*lhs_size, l.overflowing_sub(*r).0).size_wrap(), + self.loc, + ))) + } + _ => None, + }, + } + } +} + +impl RangeSub for Elem { + fn range_sub(&self, other: &Self) -> Option> { + match (self, other) { + (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => Some(self.clone()), + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_sub(b), + _ => None, + } + } + + fn range_wrapping_sub(&self, other: &Self) -> Option> { + match (self, other) { + (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => Some(self.clone()), + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_wrapping_sub(b), + _ => None, + } + } +} + +/// Executes an subtraction given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound of the operation. +/// +/// ### Explanation +/// A fact about subtraction is that the largest value possible (in unbounded integer space), is between the left hand side _maximum_ and right hand side _minimum_ and the smallest +/// is between the left hand side _minimum_ and right hand side _maximum_ values. This fact is used in normal "unbounded" (really, saturating) subtraction calculations as well as wrapping subtraction as basis for another fact: +/// +/// In wrapping subtraction, if the bounds allow for optionally wrapping (e.g.: maximum - minimum does not wrap, but minimum - maximum does wrap), we can +/// by extension include *both* the type's maximum and minimum. +/// +/// For example, assume: +///uint256 x: [101, 2256-1] +///uint256 y: [100, 2256-1] +///unchecked { x - y } +/// +/// +/// In this subtraction of `x - y`, `101 - 100` does not wrap, but `101 - 102` does (unsigned integer). We can construct a value of x and y such that +/// the result of `x - y` is equal to 2256-1 (`101 - 102`) or `0` (`101 - 101`). Therefore, the new bounds +/// on `unchecked { x - y }` is [0, 2256-1]. +/// +/// ### Note +/// Signed integers use 2's complement representation so the maximum is 2size - 1 - 1, while unsigned integers are 2size - 1 +/// +/// +/// ### Truth Tables +/// Truth table for `checked sub` operation: +/// +///| Sub | Uint | Int | BytesX | Address | Bytes | String | +///|-----------------|----------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------|--------|---------|-------|--------| +///| **Uint** | _min_: lhsmin - rhsmax
_max_: lhsmax - rhsmin | _min_: lhsmin - rhsmax
_max_: lhsmax - rhsmin | N/A | N/A | N/A | N/A | +///| **Int** | _min_: lhsmin - rhsmax
_max_: lhsmax - rhsmin | _min_: lhsmin - rhsmax
_max_: lhsmax - rhsmin | N/A | N/A | N/A | N/A | +///| **BytesX** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Address** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Bytes** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **String** | N/A | N/A | N/A | N/A | N/A | N/A | +/// +/// Truth table for `wrapping sub` operation: +/// +///| Wrapping Sub | Uint | Int | BytesX | Address | Bytes | String | +///|---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------|--------|---------|-------|--------| +///| **Uint** | _min_: {0, lhsmin - rhsmax}
_max_: {2size - 1, lhsmax - rhsmin} | _min_: {0, lhsmin - rhsmax}
_max_: {2size - 1, lhsmax - rhsmin} | N/A | N/A | N/A | N/A | +///| **Int** | _min_: {-2size-1, lhsmin - rhsmax}
_max_: {2size-1 - 1, lhsmax - rhsmin} | _min_: {-2size-1, lhsmin - rhsmax}
_max_: {2size-1 - 1, lhsmax - rhsmin} | N/A | N/A | N/A | N/A | +///| **BytesX** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Address** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **Bytes** | N/A | N/A | N/A | N/A | N/A | N/A | +///| **String** | N/A | N/A | N/A | N/A | N/A | N/A | +pub fn exec_sub( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + wrapping: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + // quick check if rhs is const and zero, if so return min or max + if wrapping { + let mut candidates = vec![]; + let mut all_overflowed = true; + let mut one_overflowed = false; + let zero = Elem::Concrete(RangeConcrete::new( + Concrete::from(U256::zero()), + Loc::Implicit, + )); + let add_candidate = |lhs: &Elem, + rhs: &Elem, + candidates: &mut Vec>, + all_overflowed: &mut bool, + one_overflowed: &mut bool, + arena: &mut RangeArena>| { + if let Some(c) = lhs.range_wrapping_sub(rhs) { + let lhs_neg = matches!(lhs.range_ord(&zero, arena), Some(std::cmp::Ordering::Less)); + let rhs_neg = matches!(rhs.range_ord(&zero, arena), Some(std::cmp::Ordering::Less)); + let signed = lhs_neg || rhs_neg; + + let overflowed = if signed { + // signed safemath: (rhs >= 0 && c <= lhs) || (rhs < 0 && c > lhs) ==> no overflowed --invert-> overflowed + // ( rhs < 0 ∣∣ c > lhs) && ( rhs ≥ 0 ∣∣ c ≤ lhs) + + (rhs_neg + || matches!(c.range_ord(lhs, arena), Some(std::cmp::Ordering::Greater))) + && (!rhs_neg + || matches!( + c.range_ord(lhs, arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + )) + } else { + // unsigned safemath: c < a ==> overflowed + matches!(c.range_ord(lhs, arena), Some(std::cmp::Ordering::Greater)) + }; + + if *all_overflowed && !overflowed { + *all_overflowed = false; + } + + if !*one_overflowed && overflowed { + *one_overflowed = true; + } + + candidates.push(c); + } + }; + + add_candidate( + lhs_min, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_min, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_min, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + add_candidate( + lhs_max, + rhs_max, + &mut candidates, + &mut all_overflowed, + &mut one_overflowed, + arena, + ); + + // If we have a conditional overflow, add the min and max of the type of lhs to the candidates + if !all_overflowed && one_overflowed { + let add_extremes = |elem: &Elem, candidates: &mut Vec>| { + if let Some(c) = elem.maybe_concrete() { + if let Some(max) = Concrete::max_of_type(&c.val) { + candidates.push(RangeConcrete::new(max, c.loc).into()); + } + + if let Some(min) = Concrete::min_of_type(&c.val) { + candidates.push(RangeConcrete::new(min, c.loc).into()); + } + } + }; + + add_extremes(lhs_min, &mut candidates); + add_extremes(lhs_max, &mut candidates); + } + + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } + } else if maximize { + // if we are maximizing, the largest value will always just be the the largest value - the smallest value + lhs_max.range_sub(rhs_min) + } else { + // if we are minimizing, the smallest value will always be smallest value - largest value + lhs_min.range_sub(rhs_max) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use ethers_core::types::U256; + use solang_parser::pt::Loc; + + #[test] + fn uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(5)), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(10))); + } + + #[test] + fn saturating_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(1)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::MAX), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::zero())); + } + + #[test] + fn sized_saturating_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(8, U256::from(254)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(8, U256::from(255)), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::zero())); + } + + #[test] + fn int_big_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(15)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-1i32)), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(16))); + } + + #[test] + fn big_int_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::from(1)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(16))); + } + + #[test] + fn int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(-15i32)), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(0i32))); + } + + #[test] + fn max_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MAX), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MAX)); + } + + #[test] + fn int_max_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::MAX), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MIN)); + } + + #[test] + fn saturating_int_int() { + let x = RangeConcrete::new( + Concrete::Int(256, I256::MIN + I256::from(1i32)), + Loc::Implicit, + ); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(2i32)), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MIN)); + } + + #[test] + fn sized_saturating_int_int() { + let x = RangeConcrete::new(Concrete::Int(8, I256::from(-127i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(2i32)), Loc::Implicit); + let result = x.range_sub(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn wrapping_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(256, U256::zero()), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(256, U256::from(1)), Loc::Implicit); + let result = x + .range_wrapping_sub(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::MAX)); + } + + #[test] + fn sized_wrapping_uint_uint() { + let x = RangeConcrete::new(Concrete::Uint(8, U256::zero()), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Uint(8, U256::from(1)), Loc::Implicit); + let result = x + .range_wrapping_sub(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(8, U256::from(255))); + } + + #[test] + fn wrapping_int_int() { + let x = RangeConcrete::new(Concrete::Int(256, I256::from(-1)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(15i32)), Loc::Implicit); + let result = x + .range_wrapping_sub(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::from(-16i32))); + } + + #[test] + fn wrapping_int_int_2() { + let x = RangeConcrete::new(Concrete::Int(256, I256::MIN), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(256, I256::from(1i32)), Loc::Implicit); + let result = x + .range_wrapping_sub(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Int(256, I256::MAX)); + } + + #[test] + fn sized_wrapping_int_int() { + let x = RangeConcrete::new(Concrete::Int(8, I256::from(-128i32)), Loc::Implicit); + let y = RangeConcrete::new(Concrete::Int(8, I256::from(1i32)), Loc::Implicit); + let result = x + .range_wrapping_sub(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Int(8, I256::from(127i32))); + } + + #[test] + fn exec_sized_uint_uint_saturating() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(105).into(); + let lhs_max = rc_uint_sized(150).into(); + let rhs_min = rc_uint_sized(10).into(); + let rhs_max = rc_uint_sized(200).into(); + + let max_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(140))); + let min_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, false, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_wrapping_uint_uint() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_uint_sized(105).into(); + let lhs_max = rc_uint_sized(150).into(); + let rhs_min = rc_uint_sized(10).into(); + let rhs_max = rc_uint_sized(200).into(); + + let max_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Uint(8, U256::from(255))); + let min_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Uint(8, U256::from(0))); + } + + #[test] + fn exec_sized_wrapping_int_uint() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(127).into(); + let rhs_min = rc_uint_sized(0).into(); + let rhs_max = rc_uint_sized(255).into(); + + let max_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } + + #[test] + fn exec_sized_wrapping_int_int_max() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let lhs_min = rc_int_sized(-128).into(); + let lhs_max = rc_int_sized(-100).into(); + let rhs_min = rc_int_sized(-5).into(); + let rhs_max = rc_int_sized(5).into(); + + let max_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, true, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(max_result.val, Concrete::Int(8, I256::from(127i32))); + let min_result = exec_sub( + &lhs_min, &lhs_max, &rhs_min, &rhs_max, false, true, &g, &mut arena, + ) + .unwrap() + .maybe_concrete() + .unwrap(); + assert_eq!(min_result.val, Concrete::Int(8, I256::from(-128i32))); + } +} diff --git a/crates/graph/src/range/exec/max.rs b/crates/graph/src/range/exec/max.rs new file mode 100644 index 00000000..b2b32de6 --- /dev/null +++ b/crates/graph/src/range/exec/max.rs @@ -0,0 +1,82 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +impl RangeMax for RangeConcrete { + fn range_max(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + let op_res = lhs_val.max(rhs_val); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, val), Concrete::Int(_, _)) + | (Concrete::Int(lhs_size, _), Concrete::Uint(_, val)) => { + let val = Concrete::Uint(*lhs_size, *val); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + let val = Concrete::Int(*lhs_size, *l.max(r)); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } +} + +impl RangeMax for Elem { + fn range_max(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_max(b), + (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => match a.op_num.cmp(&b.op_num) { + std::cmp::Ordering::Greater => Some(self.clone()), + std::cmp::Ordering::Less => Some(other.clone()), + _ => None, + }, + (_, Elem::Null) => Some(self.clone()), + (Elem::Null, _) => Some(other.clone()), + _ => None, + } + } +} + +/// Executes the maximum given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_max( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let candidates = vec![ + lhs_min.range_max(rhs_min), + lhs_min.range_max(rhs_max), + lhs_max.range_max(rhs_min), + lhs_max.range_max(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} diff --git a/crates/graph/src/range/exec/mem_ops/concat.rs b/crates/graph/src/range/exec/mem_ops/concat.rs new file mode 100644 index 00000000..3abf3bef --- /dev/null +++ b/crates/graph/src/range/exec/mem_ops/concat.rs @@ -0,0 +1,318 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{H256, U256}; +use std::collections::BTreeMap; + +impl RangeConcat for RangeConcrete { + fn range_concat(&self, other: &Self) -> Option> { + Some(Elem::Concrete(RangeConcrete::new( + self.val.clone().concat(&other.val)?, + self.loc, + ))) + } +} + +impl RangeConcat> for RangeDyn { + fn range_concat(&self, other: &RangeConcrete) -> Option> { + let inner = self.val.values().take(1).next().map(|(a, _)| a); + match (other.val.clone(), inner) { + (Concrete::DynBytes(val), inner) if inner.is_none() || inner.unwrap().is_bytes() => { + let last = self.len.clone(); + let mut existing = self.val.clone(); + let new = val + .iter() + .enumerate() + .map(|(i, v)| { + let idx = Elem::from(Concrete::from(U256::from(i))); + let idx = *last.clone() + idx; + let mut bytes = [0x00; 32]; + bytes[0] = *v; + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (idx, (v, self.op_num + i + 1)) + }) + .collect::>(); + existing.extend(new); + Some(Elem::ConcreteDyn(RangeDyn::new_w_op_nums( + (*self.len).clone() + Elem::from(Concrete::from(U256::from(val.len()))), + existing, + other.loc, + ))) + } + (Concrete::String(val), inner) + if inner.is_none() || inner.unwrap().is_string() || inner.unwrap().is_bytes() => + { + let last = self.len.clone(); + let mut existing = self.val.clone(); + let new = val + .chars() + .enumerate() + .map(|(i, v)| { + let idx = Elem::from(Concrete::from(U256::from(i))); + let idx = *last.clone() + idx; + let mut bytes = [0x00; 32]; + v.encode_utf8(&mut bytes[..]); + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (idx, (v, self.op_num + i + 1)) + }) + .collect::>(); + existing.extend(new); + Some(Elem::ConcreteDyn(RangeDyn::new_w_op_nums( + (*self.len).clone() + Elem::from(Concrete::from(U256::from(val.len()))), + existing, + other.loc, + ))) + } + e => { + debug_assert!(false, "was not concattable type: {e:#?}"); + None + } + } + } +} + +impl RangeConcat> for RangeDyn { + fn range_concat(&self, other: &Self) -> Option> { + let val = self.val.values().take(1).next().map(|(a, _)| a); + let o_val = other.val.values().take(1).next().map(|(a, _)| a); + match (val, o_val) { + (Some(v), Some(o)) if v.is_bytes() && o.is_bytes() => { + let last = self.len.clone(); + let mut existing = self.val.clone(); + let other_vals = other + .val + .clone() + .into_iter() + .enumerate() + .map(|(i, (key, (v, _op)))| (key + *last.clone(), (v, self.op_num + i + 1))) + .collect::>(); + + existing.extend(other_vals); + + Some(Elem::ConcreteDyn(RangeDyn::new_w_op_nums( + *self.len.clone() + *other.len.clone(), + existing, + other.loc, + ))) + } + (Some(l @ Elem::Reference(_)), None) => Some(l.clone()), + (None, Some(r @ Elem::Reference(_))) => Some(r.clone()), + (None, None) => Some(Elem::ConcreteDyn(self.clone())), + _e => None, + } + } +} + +impl RangeConcat for Elem { + fn range_concat(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_concat(b), + (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => a.range_concat(b), + (Elem::Concrete(c), Elem::ConcreteDyn(d)) + | (Elem::ConcreteDyn(d), Elem::Concrete(c)) => d.range_concat(c), + _e => None, + } + } +} + +/// Executes a concatenation of bytes. +pub fn exec_concat( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + // TODO: improve with smarter stuff + let candidates = vec![ + lhs_min.range_concat(rhs_min), + lhs_min.range_concat(rhs_max), + lhs_max.range_concat(rhs_min), + lhs_max.range_concat(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use pretty_assertions::assert_eq; + use solang_parser::pt::Loc; + + #[test] + fn concrete_concrete_bytes() { + let x = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let y = RangeConcrete::new( + Concrete::from(vec![b'w', b'o', b'r', b'l', b'd']), + Loc::Implicit, + ); + let expected = Concrete::from(vec![ + b'h', b'e', b'l', b'l', b'o', b'w', b'o', b'r', b'l', b'd', + ]); + let result = x.range_concat(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, expected); + } + + #[test] + fn concrete_concrete_bytes_str_fail() { + let x = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let y = RangeConcrete::new(Concrete::from("world"), Loc::Implicit); + assert!(x.range_concat(&y).is_none()); + } + + #[test] + fn concrete_concrete_bytes_none() { + let x = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let y = RangeConcrete::new(Concrete::DynBytes(vec![]), Loc::Implicit); + let result = x.range_concat(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, x.val); + } + + #[test] + fn concrete_concrete_str() { + let x = RangeConcrete::new(Concrete::from("hello"), Loc::Implicit); + let y = RangeConcrete::new(Concrete::from("world"), Loc::Implicit); + let expected = Concrete::from("helloworld"); + let result = x.range_concat(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, expected); + } + + #[test] + fn concrete_concrete_str_bytes_fail() { + let x = RangeConcrete::new(Concrete::from("world"), Loc::Implicit); + let y = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + assert!(x.range_concat(&y).is_none()); + } + + #[test] + fn concrete_concrete_str_none() { + let x = RangeConcrete::new(Concrete::from("hello"), Loc::Implicit); + let y = RangeConcrete::new(Concrete::from(""), Loc::Implicit); + let result = x.range_concat(&y).unwrap().maybe_concrete_value().unwrap(); + assert_eq!(result.val, x.val); + } + + #[test] + fn dyn_concrete_bytes() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let x = RangeDyn::from_concrete( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ) + .unwrap(); + let y = RangeConcrete::new( + Concrete::from(vec![b'w', b'o', b'r', b'l', b'd']), + Loc::Implicit, + ); + let expected: Elem<_> = Elem::ConcreteDyn( + RangeDyn::from_concrete( + Concrete::from(vec![ + b'h', b'e', b'l', b'l', b'o', b'w', b'o', b'r', b'l', b'd', + ]), + Loc::Implicit, + ) + .unwrap(), + ); + let result = x + .range_concat(&y) + .unwrap() + .maximize(&g, &mut arena) + .unwrap(); + assert_eq!(result, expected); + } + + #[test] + fn dyn_dyn_bytes() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let x = RangeDyn::from_concrete( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ) + .unwrap(); + let y = RangeDyn::from_concrete( + Concrete::from(vec![b'w', b'o', b'r', b'l', b'd']), + Loc::Implicit, + ) + .unwrap(); + let expected: Elem<_> = Elem::ConcreteDyn( + RangeDyn::from_concrete( + Concrete::from(vec![ + b'h', b'e', b'l', b'l', b'o', b'w', b'o', b'r', b'l', b'd', + ]), + Loc::Implicit, + ) + .unwrap(), + ); + let result = x + .range_concat(&y) + .unwrap() + .maximize(&g, &mut arena) + .unwrap(); + assert_eq!(result, expected); + } + + #[test] + fn dyn_concrete_str() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let x = RangeDyn::from_concrete(Concrete::from("hello"), Loc::Implicit).unwrap(); + let y = RangeConcrete::new(Concrete::from("world"), Loc::Implicit); + let expected: Elem<_> = Elem::ConcreteDyn( + RangeDyn::from_concrete(Concrete::from("helloworld"), Loc::Implicit).unwrap(), + ); + let result = x.range_concat(&y).unwrap(); + let result = result.maximize(&g, &mut arena).unwrap(); + assert_eq!(result, expected); + } + + #[test] + fn dyn_dyn_str() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let x = RangeDyn::from_concrete(Concrete::from("hello"), Loc::Implicit).unwrap(); + let y = RangeDyn::from_concrete(Concrete::from("world"), Loc::Implicit).unwrap(); + let expected: Elem<_> = Elem::ConcreteDyn( + RangeDyn::from_concrete(Concrete::from("helloworld"), Loc::Implicit).unwrap(), + ); + let result = x + .range_concat(&y) + .unwrap() + .maximize(&g, &mut arena) + .unwrap(); + assert_eq!(result, expected); + } +} diff --git a/crates/graph/src/range/exec/mem_ops/mem_get.rs b/crates/graph/src/range/exec/mem_ops/mem_get.rs new file mode 100644 index 00000000..62d3c99e --- /dev/null +++ b/crates/graph/src/range/exec/mem_ops/mem_get.rs @@ -0,0 +1,409 @@ +use crate::GraphBackend; +use crate::{ + nodes::Concrete, + range::{elem::*, exec_traits::*}, +}; + +use shared::RangeArena; + +use ethers_core::types::U256; +use solang_parser::pt::Loc; + +impl RangeMemLen for RangeDyn { + fn range_get_length(&self) -> Option> { + Some(*self.len.clone()) + } +} + +impl> + Clone> RangeMemGet for RangeDyn { + fn range_get_index(&self, index: &Rhs) -> Option> { + self.val + .get(&(index.clone().into())) + .map(|(v, _)| v.clone()) + } +} + +impl RangeMemGet for RangeConcrete { + fn range_get_index(&self, index: &RangeConcrete) -> Option> { + self.val.get_index(&index.val).map(Elem::from) + } +} + +impl RangeMemLen for RangeConcrete { + fn range_get_length(&self) -> Option> { + Some(RangeConcrete::new(Concrete::from(self.val.maybe_array_size()?), self.loc).into()) + } +} + +impl RangeMemLen for Elem { + fn range_get_length(&self) -> Option> { + match self { + Elem::Concrete(a) => a.range_get_length(), + Elem::ConcreteDyn(a) => Some(*a.len.clone()), + _e => None, + } + } +} + +impl RangeMemGet> for Elem { + fn range_get_index(&self, index: &Elem) -> Option> { + match (self, index) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_get_index(b), + (Elem::ConcreteDyn(a), idx @ Elem::Concrete(_)) => { + if let Some((val, _)) = a.val.get(idx).cloned() { + Some(val) + } else { + None + } + } + (Elem::ConcreteDyn(a), idx @ Elem::Reference(_)) => { + if let Some((val, _)) = a.val.get(idx).cloned() { + Some(val) + } else { + None + } + } + _e => None, + } + } +} + +/// Executes the `get_length` operation given the minimum and maximum of an element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_get_length( + lhs_min: &Elem, + lhs_max: &Elem, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + if maximize { + let new = lhs_max.clone(); + let new_max = new.simplify_maximize(analyzer, arena).ok()?; + + new_max.range_get_length() + } else { + let new_min = lhs_min.simplify_minimize(analyzer, arena).ok()?; + + new_min.range_get_length() + } +} + +/// Executes the `range_get_index` operation given the minimum and maximum of an element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_get_index( + lhs: &Elem, + rhs: &Elem, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + // for each key in LHS, check if it overlaps the RHS index range + // e.g.: + // lhs: { + // [12, 100]: val, + // [220, 1000]: val, + // } + // + // if: + // rhs: [0, 2**224] + // all values would be added to candidates + // + // if: + // rhs: [0, 2] + // No values would be added to candidates + // + // if: + // rhs: [50, 50] + // the first value would be added to candidates + + let mut candidates = vec![]; + fn match_lhs( + lhs: &Elem, + rhs: &Elem, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + candidates: &mut Vec>, + ) { + match lhs { + Elem::Arena(_) => { + let (d, idx) = lhs.dearenaize(arena); + match_lhs(&d, rhs, analyzer, arena, candidates); + lhs.rearenaize(d, idx, arena); + } + Elem::Reference(_) => { + if let Ok(min) = lhs.minimize(analyzer, arena) { + match_lhs(&min, rhs, analyzer, arena, candidates); + } + + if let Ok(max) = lhs.maximize(analyzer, arena) { + match_lhs(&max, rhs, analyzer, arena, candidates); + } + } + Elem::ConcreteDyn(d) => { + d.val.iter().for_each(|(k, (v, _op))| { + if let Ok(Some(true)) = k.overlaps(rhs, true, analyzer, arena) { + candidates.push(v.clone()) + } + }); + } + Elem::Concrete(c) => { + if let Some(size) = c.val.maybe_array_size() { + let min = U256::zero(); + // Iterates through concrete indices to check if RHS contains the index + let mut curr = min; + while curr < size { + let as_rc = RangeConcrete::new(Concrete::from(curr), Loc::Implicit); + let as_elem = Elem::from(as_rc.clone()); + if let Ok(Some(true)) = as_elem.overlaps(rhs, true, analyzer, arena) { + if let Some(val) = c.range_get_index(&as_rc) { + candidates.push(val) + } + } + curr += U256::from(1); + } + } + } + _ => {} + }; + } + + match_lhs(lhs, rhs, analyzer, arena, &mut candidates); + + candidates = candidates + .into_iter() + .filter_map(|val| { + if maximize { + val.maximize(analyzer, arena).ok() + } else { + val.minimize(analyzer, arena).ok() + } + }) + .collect(); + + // Sort the candidates + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return Some(Elem::Null); + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::DummyGraph; + use ethers_core::types::U256; + use pretty_assertions::assert_eq; + use solang_parser::pt::Loc; + + #[test] + fn concrete_len() { + let x: RangeConcrete = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let expected = rc_uint256(5); + let result = Elem::from(x) + .range_get_length() + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, expected.val); + } + + #[test] + fn dyn_len() { + let x = RangeDyn::from_concrete( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ) + .unwrap(); + let expected = rc_uint256(5); + let result = x + .range_get_length() + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, expected.val); + } + + #[test] + fn concrete_concrete_index() { + let x = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let idx = rc_uint256(2); + let result = x + .range_get_index(&idx) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::from(b'l')); + } + + #[test] + fn dyn_concrete_index() { + let x = RangeDyn::from_concrete( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ) + .unwrap(); + let idx = rc_uint256(2); + let result = x + .range_get_index(&idx) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::from(b'l')); + } + + #[test] + fn dyn_ref_index() { + let idx = Elem::Reference(Reference::new(1.into())); + let rand: Elem<_> = rc_uint256(0).into(); + let val = rc_uint256(200).into(); + let x = RangeDyn::new_for_indices( + vec![(rand.clone(), rand), (idx.clone(), val)], + Loc::Implicit, + ); + + let result = x + .range_get_index(&idx) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(200))); + } + + #[test] + fn exec_dyn_get_ref_idx_low() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let idx0 = test_reference(1, 12.into(), 100.into()); + let idx1 = test_reference(2, 220.into(), 1000.into()); + let val0 = rc_uint256(200).into(); + let val1 = rc_uint256(201).into(); + let x = RangeDyn::new_for_indices(vec![(idx0, val0), (idx1, val1)], Loc::Implicit); + + let get_idx = test_reference(3, 0.into(), 12.into()); + + let result = exec_get_index(&Elem::ConcreteDyn(x), &get_idx, true, &g, &mut arena) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(200))); + } + + #[test] + fn exec_dyn_get_ref_idx_high() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let idx0 = test_reference(1, 12.into(), 100.into()); + let idx1 = test_reference(2, 220.into(), 1000.into()); + let val0 = rc_uint256(200).into(); + let val1 = rc_uint256(201).into(); + let x = RangeDyn::new_for_indices(vec![(idx0, val0), (idx1, val1)], Loc::Implicit); + + let get_idx = test_reference(3, 400.into(), 400.into()); + + let result = exec_get_index(&Elem::ConcreteDyn(x), &get_idx, true, &g, &mut arena) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(201))); + } + + #[test] + fn exec_dyn_get_ref_idx_all() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let idx0 = test_reference(1, 12.into(), 100.into()); + let idx1 = test_reference(2, 220.into(), 1000.into()); + let val0 = rc_uint256(200).into(); + let val1 = rc_uint256(201).into(); + let x = RangeDyn::new_for_indices(vec![(idx0, val0), (idx1, val1)], Loc::Implicit); + + let get_idx = test_reference(3, 0.into(), U256::MAX); + + let result = exec_get_index(&Elem::ConcreteDyn(x), &get_idx, true, &g, &mut arena) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::Uint(256, U256::from(201))); + } + + #[test] + fn exec_dyn_get_ref_idx_null() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let idx0 = test_reference(1, 12.into(), 100.into()); + let idx1 = test_reference(2, 220.into(), 1000.into()); + let val0 = rc_uint256(200).into(); + let val1 = rc_uint256(201).into(); + let x = RangeDyn::new_for_indices(vec![(idx0, val0), (idx1, val1)], Loc::Implicit); + + let get_idx = test_reference(3, 0.into(), 2.into()); + + let result = exec_get_index(&Elem::ConcreteDyn(x), &get_idx, true, &g, &mut arena); + assert_eq!(result.unwrap(), Elem::Null); + } + + #[test] + fn exec_concrete_get_ref_idx_low() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let x: RangeConcrete = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let get_idx = test_reference(1, 0.into(), 2.into()); + + let result = exec_get_index(&Elem::Concrete(x), &get_idx, true, &g, &mut arena) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, Concrete::from(b'l')); + } + + #[test] + fn exec_concrete_get_ref_idx_null() { + let g = DummyGraph::default(); + let mut arena = Default::default(); + let x: RangeConcrete = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let get_idx = test_reference(1, 6.into(), 8.into()); + + let result = exec_get_index(&Elem::Concrete(x), &get_idx, true, &g, &mut arena); + assert_eq!(result.unwrap(), Elem::Null); + } + + fn test_reference(id: usize, min: U256, max: U256) -> Elem { + let mut re = Reference::new(id.into()); + let mi = Box::new(Elem::Concrete(RangeConcrete::new( + Concrete::from(min), + Loc::Implicit, + ))); + let ma = Box::new(Elem::Concrete(RangeConcrete::new( + Concrete::from(max), + Loc::Implicit, + ))); + re.minimized = Some(MinMaxed::Minimized(mi.clone())); + re.maximized = Some(MinMaxed::Maximized(ma.clone())); + re.flattened_min = Some(mi); + re.flattened_max = Some(ma); + Elem::Reference(re) + } +} diff --git a/crates/graph/src/range/exec/mem_ops/mem_set.rs b/crates/graph/src/range/exec/mem_ops/mem_set.rs new file mode 100644 index 00000000..acc6ab72 --- /dev/null +++ b/crates/graph/src/range/exec/mem_ops/mem_set.rs @@ -0,0 +1,427 @@ +use crate::GraphBackend; +use crate::{ + nodes::Concrete, + range::{elem::*, exec_traits::*}, +}; + +use shared::RangeArena; + +use ethers_core::types::{H256, U256}; + +use std::collections::BTreeMap; + +impl RangeMemSet for RangeDyn { + fn range_set_indices(&self, range: &Self) -> Option> { + let mut new_val = self.val.clone(); + let mut op_num = self.op_num; + range.val.iter().for_each(|(k, (v, _))| { + op_num += 1; + new_val.insert(k.clone(), (v.clone(), op_num)); + }); + + Some(Elem::ConcreteDyn(RangeDyn::new_w_op_nums( + *self.len.clone(), + new_val, + range.loc, + ))) + } + + fn range_set_length(&self, other: &Self) -> Option> { + let mut a = self.clone(); + a.len.clone_from(&other.len); + Some(Elem::ConcreteDyn(a)) + } +} + +impl RangeMemSet> for RangeDyn { + fn range_set_indices(&self, range: &RangeConcrete) -> Option> { + match ( + range.val.clone(), + self.val.values().take(1).next().map(|(a, _)| a), + ) { + (Concrete::DynBytes(val), s) if s.is_none() || s.unwrap().is_bytes() => { + let mut existing = self.val.clone(); + let new = val + .iter() + .enumerate() + .map(|(i, v)| { + let mut bytes = [0x00; 32]; + bytes[0] = *v; + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + ( + Elem::from(Concrete::from(U256::from(i))), + (v, self.op_num + i), + ) + }) + .collect::>(); + existing.extend(new); + Some(Elem::ConcreteDyn(RangeDyn::new_w_op_nums( + *self.len.clone(), + existing, + range.loc, + ))) + } + (Concrete::String(val), s) if s.is_none() || s.unwrap().is_string() => { + let mut existing = self.val.clone(); + let new = val + .chars() + .enumerate() + .map(|(i, v)| { + let mut bytes = [0x00; 32]; + v.encode_utf8(&mut bytes[..]); + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + ( + Elem::from(Concrete::from(U256::from(i))), + (v, i + self.op_num), + ) + }) + .collect::>(); + existing.extend(new); + Some(Elem::ConcreteDyn(RangeDyn::new_w_op_nums( + *self.len.clone(), + existing, + range.loc, + ))) + } + _e => None, + } + } + + fn range_set_length(&self, other: &RangeConcrete) -> Option> { + let mut a = self.clone(); + a.len = Box::new(Elem::Concrete(other.clone())); + Some(Elem::ConcreteDyn(a)) + } +} + +impl RangeMemSet for RangeConcrete { + fn range_set_indices(&self, range: &Self) -> Option> { + let mut new_val = self.val.clone(); + new_val.set_indices(&range.val); + Some(Elem::Concrete(RangeConcrete::new(new_val, range.loc))) + } + + fn range_set_length(&self, other: &Self) -> Option> { + match other.val.into_u256() { + Some(len) if len <= U256::from(32) => match self.val { + Concrete::DynBytes(ref val) => Some(Elem::Concrete(RangeConcrete::new( + Concrete::DynBytes({ + let mut v = val.clone(); + v.resize(len.as_usize(), 0); + v + }), + self.loc, + ))), + Concrete::String(ref val) => Some(Elem::Concrete(RangeConcrete::new( + Concrete::String({ + let mut v = val.clone(); + v.push_str(&" ".repeat(len.as_usize() - v.chars().count())); + v + }), + self.loc, + ))), + Concrete::Bytes(_, val) => Some(Elem::Concrete(RangeConcrete::new( + Concrete::Bytes(len.as_u32() as u8, val), + self.loc, + ))), + _ => None, + }, + _ => { + let new = match self.val { + Concrete::DynBytes(ref val) => Some( + val.iter() + .enumerate() + .map(|(i, v)| { + let mut bytes = [0x00; 32]; + bytes[0] = *v; + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (Elem::from(Concrete::from(U256::from(i))), (v, i)) + }) + .collect::>(), + ), + Concrete::String(ref val) => Some( + val.chars() + .enumerate() + .map(|(i, v)| { + let mut bytes = [0x00; 32]; + v.encode_utf8(&mut bytes[..]); + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (Elem::from(Concrete::from(U256::from(i))), (v, i)) + }) + .collect::>(), + ), + Concrete::Array(ref val) => Some( + val.iter() + .enumerate() + .map(|(i, v)| { + let t = Elem::Concrete(RangeConcrete::new(v.clone(), self.loc)); + (Elem::from(Concrete::from(U256::from(i))), (t, i)) + }) + .collect::>(), + ), + Concrete::Bytes(size, val) => Some( + val.0 + .iter() + .take(size as usize) + .enumerate() + .map(|(i, v)| { + let mut bytes = [0x00; 32]; + bytes[0] = *v; + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (Elem::from(Concrete::from(U256::from(i))), (v, i)) + }) + .collect::>(), + ), + _ => None, + }; + Some(Elem::ConcreteDyn(RangeDyn::new_w_op_nums( + Elem::Concrete(other.clone()), + new?, + self.loc, + ))) + } + } + } +} + +impl RangeMemSet> for RangeConcrete { + fn range_set_indices(&self, _range: &RangeDyn) -> Option> { + todo!() + } + + fn range_set_length(&self, _other: &RangeDyn) -> Option> { + unreachable!() + } +} + +impl RangeMemSet for Elem { + fn range_set_indices(&self, other: &Elem) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_set_indices(b), + (Elem::ConcreteDyn(a), Elem::Concrete(b)) => a.range_set_indices(b), + (Elem::Concrete(a), Elem::ConcreteDyn(b)) => a.range_set_indices(b), + (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => a.range_set_indices(b), + _e => None, + } + } + + fn range_set_length(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => a.range_set_length(b), + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_set_length(b), + (Elem::ConcreteDyn(a), _) => { + let mut a = a.clone(); + a.len = Box::new(other.clone()); + Some(Elem::ConcreteDyn(a)) + } + _e => None, + } + } +} + +pub fn exec_set_length( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, +) -> Option> { + if maximize { + lhs_max.range_set_length(rhs_max) + } else { + lhs_min.range_set_length(rhs_min) + } +} + +pub fn exec_set_indices( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + rhs: &Elem, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + if maximize { + if let Some(t) = lhs_max.range_set_indices(rhs_max) { + Some(t) + } else { + let max = rhs.simplify_maximize(analyzer, arena).ok()?; + lhs_max.range_set_indices(&max) + } + } else if let Some(t) = lhs_min.range_set_indices(rhs_min) { + Some(t) + } else { + let min = rhs.simplify_minimize(analyzer, arena).ok()?; + lhs_min.range_set_indices(&min) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use ethers_core::types::U256; + use pretty_assertions::assert_eq; + use solang_parser::pt::Loc; + + #[test] + fn concrete_set_len() { + let x: RangeConcrete = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ); + let new_len = rc_uint256(10); + let result = x.range_set_length(&new_len).unwrap(); + assert_eq!(result.range_get_length().unwrap(), Elem::Concrete(new_len)); + } + + #[test] + fn dyn_set_len() { + let x = RangeDyn::from_concrete( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ) + .unwrap(); + let new_len = rc_uint256(10); + let result = x.range_set_length(&new_len).unwrap(); + assert_eq!(result.range_get_length().unwrap(), Elem::Concrete(new_len)); + } + + #[test] + fn dyn_set_ref_len() { + let x = RangeDyn::from_concrete( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o']), + Loc::Implicit, + ) + .unwrap(); + let new_len = test_reference(0, 6.into(), 10.into()); + let result = Elem::ConcreteDyn(x).range_set_length(&new_len).unwrap(); + assert_eq!(result.range_get_length().unwrap(), new_len); + } + + #[test] + fn concrete_concrete_set_indices() { + let x = RangeConcrete::new( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o', b's']), + Loc::Implicit, + ); + let y = RangeConcrete::new( + Concrete::from(vec![b'w', b'o', b'r', b'l', b'd']), + Loc::Implicit, + ); + + let expected = RangeConcrete::new( + Concrete::from(vec![b'w', b'o', b'r', b'l', b'd', b's']), + Loc::Implicit, + ); + let result = x + .range_set_indices(&y) + .unwrap() + .maybe_concrete_value() + .unwrap(); + assert_eq!(result.val, expected.val); + } + + #[test] + fn dyn_concrete_index() { + let x = RangeDyn::from_concrete( + Concrete::from(vec![b'h', b'e', b'l', b'l', b'o', b's']), + Loc::Implicit, + ) + .unwrap(); + let y = RangeConcrete::new( + Concrete::from(vec![b'w', b'o', b'r', b'l', b'd']), + Loc::Implicit, + ); + + let expected = RangeDyn::new_w_op_nums( + rc_uint256(6).into(), + vec![ + ( + Elem::from(rc_uint256(0)), + ( + Elem::Concrete(RangeConcrete::new(Concrete::from(b'w'), Loc::Implicit)), + 5usize, + ), + ), + ( + Elem::from(rc_uint256(1)), + ( + Elem::Concrete(RangeConcrete::new(Concrete::from(b'o'), Loc::Implicit)), + 6usize, + ), + ), + ( + Elem::from(rc_uint256(2)), + ( + Elem::Concrete(RangeConcrete::new(Concrete::from(b'r'), Loc::Implicit)), + 7usize, + ), + ), + ( + Elem::from(rc_uint256(3)), + ( + Elem::Concrete(RangeConcrete::new(Concrete::from(b'l'), Loc::Implicit)), + 8usize, + ), + ), + ( + Elem::from(rc_uint256(4)), + ( + Elem::Concrete(RangeConcrete::new(Concrete::from(b'd'), Loc::Implicit)), + 9usize, + ), + ), + ( + Elem::from(rc_uint256(5)), + ( + Elem::Concrete(RangeConcrete::new(Concrete::from(b's'), Loc::Implicit)), + 5usize, + ), + ), + ] + .into_iter() + .collect::, (Elem<_>, usize)>>(), + Loc::Implicit, + ); + + let result = x.range_set_indices(&y).unwrap(); + assert_eq!(result.dyn_map().unwrap(), &expected.val); + } + + #[test] + fn dyn_ref_set_indices() { + let idx = test_reference(0, 0.into(), 2000.into()); + let rand: Elem<_> = rc_uint256(1337).into(); + let val: Elem<_> = rc_uint256(200).into(); + let x = RangeDyn::new_for_indices(vec![(rand.clone(), rand.clone())], Loc::Implicit); + + let y = RangeDyn::new_for_indices(vec![(idx.clone(), val.clone())], Loc::Implicit); + + let expected = Elem::ConcreteDyn(RangeDyn::new_for_indices( + vec![(rand.clone(), rand), (idx.clone(), val)], + Loc::Implicit, + )); + let result = x.range_set_indices(&y).unwrap(); + assert_eq!(result, expected); + } + + fn test_reference(id: usize, min: U256, max: U256) -> Elem { + let mut re = Reference::new(id.into()); + let mi = Box::new(Elem::Concrete(RangeConcrete::new( + Concrete::from(min), + Loc::Implicit, + ))); + let ma = Box::new(Elem::Concrete(RangeConcrete::new( + Concrete::from(max), + Loc::Implicit, + ))); + re.minimized = Some(MinMaxed::Minimized(mi.clone())); + re.maximized = Some(MinMaxed::Maximized(ma.clone())); + re.flattened_min = Some(mi); + re.flattened_max = Some(ma); + Elem::Reference(re) + } +} diff --git a/crates/graph/src/range/exec/mem_ops/memcopy.rs b/crates/graph/src/range/exec/mem_ops/memcopy.rs new file mode 100644 index 00000000..1df79ff9 --- /dev/null +++ b/crates/graph/src/range/exec/mem_ops/memcopy.rs @@ -0,0 +1,25 @@ +use crate::elem::Elem; +use crate::exec_traits::RangeMemOps; +use crate::nodes::Concrete; + +impl RangeMemOps for Elem { + fn range_memcopy(&self) -> Option> { + match self { + Elem::Concrete(_a) => Some(self.clone()), + Elem::ConcreteDyn(_a) => Some(self.clone()), + _e => None, + } + } +} + +pub fn exec_memcopy( + lhs_min: &Elem, + lhs_max: &Elem, + maximize: bool, +) -> Option> { + if maximize { + Some(lhs_max.clone()) + } else { + Some(lhs_min.clone()) + } +} diff --git a/crates/graph/src/range/exec/mem_ops/mod.rs b/crates/graph/src/range/exec/mem_ops/mod.rs new file mode 100644 index 00000000..2b8c69f7 --- /dev/null +++ b/crates/graph/src/range/exec/mem_ops/mod.rs @@ -0,0 +1,9 @@ +mod concat; +mod mem_get; +mod mem_set; +mod memcopy; + +pub use concat::exec_concat; +pub use mem_get::{exec_get_index, exec_get_length}; +pub use mem_set::{exec_set_indices, exec_set_length}; +pub use memcopy::exec_memcopy; diff --git a/crates/graph/src/range/exec/min.rs b/crates/graph/src/range/exec/min.rs new file mode 100644 index 00000000..adb818ad --- /dev/null +++ b/crates/graph/src/range/exec/min.rs @@ -0,0 +1,90 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +impl RangeMin for RangeConcrete { + fn range_min(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + let op_res = lhs_val.min(rhs_val); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(lhs_size, _), Concrete::Int(_, neg_v)) + | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, _)) => { + let val = Concrete::Int(*lhs_size, *neg_v); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { + let val = Concrete::Int(*lhs_size, *l.min(r)); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + _ => None, + }, + } + } +} + +impl RangeMin for Elem { + fn range_min(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_min(b), + (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => match a.op_num.cmp(&b.op_num) { + std::cmp::Ordering::Greater => Some(self.clone()), + std::cmp::Ordering::Less => Some(other.clone()), + _ => None, + }, + (c @ Elem::Concrete(_), Elem::ConcreteDyn(b)) + | (Elem::ConcreteDyn(b), c @ Elem::Concrete(_)) => { + if b.op_num == 0 { + Some(c.clone()) + } else { + None + } + } + (_, Elem::Null) => Some(self.clone()), + (Elem::Null, _) => Some(other.clone()), + _ => None, + } + } +} + +/// Executes the minimum given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_min( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let candidates = vec![ + lhs_min.range_min(rhs_min), + lhs_min.range_min(rhs_max), + lhs_max.range_min(rhs_min), + lhs_max.range_min(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} diff --git a/crates/graph/src/range/exec/mod.rs b/crates/graph/src/range/exec/mod.rs new file mode 100644 index 00000000..a12a21e8 --- /dev/null +++ b/crates/graph/src/range/exec/mod.rs @@ -0,0 +1,29 @@ +pub mod exec_op; + +mod bitwise; +pub use bitwise::{exec_bit_and, exec_bit_not, exec_bit_or, exec_bit_xor}; + +mod cast; +pub use cast::exec_cast; + +mod max; +pub use max::exec_max; + +mod min; +pub use min::exec_min; + +mod shift; +pub use shift::{exec_shl, exec_shr}; + +mod math_ops; +pub use math_ops::{exec_add, exec_div, exec_exp, exec_mod, exec_mul, exec_sub}; + +mod truthy_ops; +pub use truthy_ops::{ + exec_and, exec_eq_neq, exec_gt, exec_gte, exec_lt, exec_lte, exec_not, exec_or, +}; + +mod mem_ops; +pub use mem_ops::{ + exec_concat, exec_get_index, exec_get_length, exec_memcopy, exec_set_indices, exec_set_length, +}; diff --git a/crates/graph/src/range/exec/shift.rs b/crates/graph/src/range/exec/shift.rs new file mode 100644 index 00000000..13436542 --- /dev/null +++ b/crates/graph/src/range/exec/shift.rs @@ -0,0 +1,213 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use ethers_core::types::{I256, U256}; + +impl RangeShift for RangeConcrete { + fn range_shl(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + if rhs_val > 256.into() { + let val = self.val.u256_as_original(U256::zero()); + let rc = RangeConcrete::new(val, self.loc); + return Some(rc.into()); + } + + let max = Concrete::max_of_type(&self.val) + .unwrap() + .into_u256() + .unwrap(); + if self.val.int_val().is_some() { + // ints get weird treatment because they can push into the negatives + let size = self.val.int_size().unwrap(); + let op_res = I256::from_raw(lhs_val << rhs_val); + let val = Concrete::Int(size, op_res); + Some(RangeConcrete::new(val, self.loc).into()) + } else if rhs_val > lhs_val.leading_zeros().into() { + Some(RangeConcrete::new(max.into(), self.loc).into()) + } else { + let op_res = (lhs_val << rhs_val).min(max); + let val = self.val.u256_as_original(op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + _ => match (&self.val, &other.val) { + (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + if val == &U256::zero() { + return Some(Elem::Concrete(self.clone())); + } + + let tmp = Concrete::Int(*lhs_size, I256::from(0i32)); + let min = Concrete::min_of_type(&tmp).unwrap().int_val().unwrap(); + + let (abs, is_min) = neg_v.overflowing_abs(); + if is_min { + if val > &U256::zero() { + Some(self.clone().into()) + } else { + let val = Concrete::Int(*lhs_size, I256::zero()); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } else if val > &U256::from(abs.leading_zeros()) { + let val = Concrete::Int(*lhs_size, I256::zero()); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + let raw = I256::from_raw(abs.into_raw() << val); + let as_int = if raw == I256::MIN { + raw + } else { + I256::from(-1i32) * raw + }; + + let op_res = as_int.max(min); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + _ => None, + }, + } + } + + fn range_shr(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + if rhs_val == U256::zero() { + Some(Elem::Concrete(self.clone())) + } else if rhs_val > U256::from(256) { + let op_res = self.val.u256_as_original(U256::zero()); + let rc = RangeConcrete::new(op_res, self.loc); + Some(rc.into()) + } else { + let op_res = self.val.u256_as_original(lhs_val >> rhs_val); + let rc = RangeConcrete::new(op_res, self.loc); + Some(rc.into()) + } + } + _ => match (&self.val, &other.val) { + (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { + if val == &U256::zero() { + Some(Elem::Concrete(self.clone())) + } else if val > &U256::from(*lhs_size) { + let val = Concrete::Int(*lhs_size, I256::from(-1i32)); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + let tmp = Concrete::Int(*lhs_size, I256::from(0i32)); + let min = Concrete::min_of_type(&tmp).unwrap().int_val().unwrap(); + let (abs, is_min) = neg_v.overflowing_abs(); + let bits = if is_min { + 255 + } else { + 255 - abs.leading_zeros() + }; + + if val >= &U256::from(bits) { + let val = Concrete::Int(*lhs_size, I256::from(-1i32)); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } else { + let shr_val = abs.into_raw() >> val; + let as_int = I256::from_raw(shr_val); + let op_res = (I256::from(-1i32) * as_int).max(min); + let val = Concrete::Int(*lhs_size, op_res); + let rc = RangeConcrete::new(val, self.loc); + Some(rc.into()) + } + } + } + _ => None, + }, + } + } +} + +impl RangeShift for Elem { + fn range_shl(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_shl(b), + _ => None, + } + } + fn range_shr(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_shr(b), + _ => None, + } + } +} + +/// Executes the `shift left` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_shl( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let candidates = vec![ + lhs_min.range_shl(rhs_min), + lhs_min.range_shl(rhs_max), + lhs_max.range_shl(rhs_min), + lhs_max.range_shl(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +/// Executes the `shift right` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_shr( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let candidates = vec![ + lhs_min.range_shr(rhs_min), + lhs_min.range_shr(rhs_max), + lhs_max.range_shr(rhs_min), + lhs_max.range_shr(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} diff --git a/crates/graph/src/range/exec/truthy_ops/logical.rs b/crates/graph/src/range/exec/truthy_ops/logical.rs new file mode 100644 index 00000000..aa4796b7 --- /dev/null +++ b/crates/graph/src/range/exec/truthy_ops/logical.rs @@ -0,0 +1,145 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +impl RangeUnary for RangeConcrete { + fn range_not(&self) -> Option> { + match self.val { + Concrete::Bool(b) => Some(RangeConcrete::new(Concrete::Bool(!b), self.loc).into()), + _ => None, + } + } + + fn range_and(&self, other: &Self) -> Option> { + match (&self.val, &other.val) { + (Concrete::Bool(a), Concrete::Bool(b)) => { + Some(RangeConcrete::new(Concrete::Bool(*a && *b), self.loc).into()) + } + _ => None, + } + } + + fn range_or(&self, other: &Self) -> Option> { + match (&self.val, &other.val) { + (Concrete::Bool(a), Concrete::Bool(b)) => { + Some(RangeConcrete::new(Concrete::Bool(*a || *b), self.loc).into()) + } + _ => None, + } + } +} + +impl RangeUnary for Elem { + fn range_not(&self) -> Option> { + match self { + Elem::Concrete(a) => a.range_not(), + _ => None, + } + } + fn range_and(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_and(b), + _ => None, + } + } + fn range_or(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_or(b), + _ => None, + } + } +} + +pub fn exec_and( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let candidates = vec![ + lhs_min.range_and(rhs_min), + lhs_min.range_and(rhs_max), + lhs_max.range_and(rhs_min), + lhs_max.range_and(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +pub fn exec_or( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + let candidates = vec![ + lhs_min.range_or(rhs_min), + lhs_min.range_or(rhs_max), + lhs_max.range_or(rhs_min), + lhs_max.range_or(rhs_max), + ]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} + +pub fn exec_not( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + _analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + assert!(matches!(rhs_min, Elem::Null) && matches!(rhs_max, Elem::Null)); + let candidates = vec![lhs_min.range_not(), lhs_max.range_not()]; + let mut candidates = candidates.into_iter().flatten().collect::>(); + candidates.sort_by(|a, b| match a.range_ord(b, arena) { + Some(r) => r, + _ => std::cmp::Ordering::Less, + }); + + if candidates.is_empty() { + return None; + } + + if maximize { + Some(candidates.remove(candidates.len() - 1)) + } else { + Some(candidates.remove(0)) + } +} diff --git a/crates/graph/src/range/exec/truthy_ops/mod.rs b/crates/graph/src/range/exec/truthy_ops/mod.rs new file mode 100644 index 00000000..4a100732 --- /dev/null +++ b/crates/graph/src/range/exec/truthy_ops/mod.rs @@ -0,0 +1,5 @@ +mod logical; +mod ord; + +pub use logical::{exec_and, exec_not, exec_or}; +pub use ord::{exec_eq_neq, exec_gt, exec_gte, exec_lt, exec_lte}; diff --git a/crates/graph/src/range/exec/truthy_ops/ord.rs b/crates/graph/src/range/exec/truthy_ops/ord.rs new file mode 100644 index 00000000..b5bc1046 --- /dev/null +++ b/crates/graph/src/range/exec/truthy_ops/ord.rs @@ -0,0 +1,333 @@ +use crate::nodes::Concrete; +use crate::range::{elem::*, exec_traits::*}; +use crate::GraphBackend; + +use shared::RangeArena; + +use solang_parser::pt::Loc; + +impl RangeOrd for RangeConcrete { + fn range_ord_eq(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + let rc = RangeConcrete::new(Concrete::Bool(lhs_val == rhs_val), self.loc); + Some(rc.into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(_, _), Concrete::Int(_, _)) + | (Concrete::Int(_, _), Concrete::Uint(_, _)) => { + Some(RangeConcrete::new(Concrete::Bool(false), self.loc).into()) + } + (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { + Some(RangeConcrete::new(Concrete::Bool(l == r), self.loc).into()) + } + _ => None, + }, + } + } + + fn range_neq(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + Some(RangeConcrete::new(Concrete::Bool(lhs_val != rhs_val), self.loc).into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(_, _), Concrete::Int(_, _)) + | (Concrete::Int(_, _), Concrete::Uint(_, _)) => { + Some(RangeConcrete::new(Concrete::Bool(true), self.loc).into()) + } + (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { + Some(RangeConcrete::new(Concrete::Bool(l != r), self.loc).into()) + } + _ => None, + }, + } + } + + fn range_gt(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + Some(RangeConcrete::new(Concrete::Bool(lhs_val > rhs_val), self.loc).into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { + Some(RangeConcrete::new(Concrete::Bool(true), self.loc).into()) + } + (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { + Some(RangeConcrete::new(Concrete::Bool(false), self.loc).into()) + } + (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { + Some(RangeConcrete::new(Concrete::Bool(l > r), self.loc).into()) + } + _ => None, + }, + } + } + + fn range_lt(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + Some(RangeConcrete::new(Concrete::Bool(lhs_val < rhs_val), self.loc).into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { + Some(RangeConcrete::new(Concrete::Bool(false), self.loc).into()) + } + (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { + Some(RangeConcrete::new(Concrete::Bool(true), self.loc).into()) + } + (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { + Some(RangeConcrete::new(Concrete::Bool(l < r), self.loc).into()) + } + _ => None, + }, + } + } + + fn range_gte(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + Some(RangeConcrete::new(Concrete::Bool(lhs_val >= rhs_val), self.loc).into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { + Some(RangeConcrete::new(Concrete::Bool(true), self.loc).into()) + } + (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { + Some(RangeConcrete::new(Concrete::Bool(false), self.loc).into()) + } + (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { + Some(RangeConcrete::new(Concrete::Bool(l >= r), self.loc).into()) + } + _ => None, + }, + } + } + + fn range_lte(&self, other: &Self) -> Option> { + match (self.val.into_u256(), other.val.into_u256()) { + (Some(lhs_val), Some(rhs_val)) => { + Some(RangeConcrete::new(Concrete::Bool(lhs_val <= rhs_val), self.loc).into()) + } + _ => match (&self.val, &other.val) { + (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { + Some(RangeConcrete::new(Concrete::Bool(false), self.loc).into()) + } + (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { + Some(RangeConcrete::new(Concrete::Bool(true), self.loc).into()) + } + (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { + Some(RangeConcrete::new(Concrete::Bool(l <= r), self.loc).into()) + } + _ => None, + }, + } + } +} + +impl RangeOrd for Elem { + fn range_ord_eq(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_ord_eq(b), + _ => None, + } + } + fn range_neq(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_neq(b), + _ => None, + } + } + fn range_gt(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_gt(b), + _ => None, + } + } + + fn range_lt(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_lt(b), + _ => None, + } + } + + fn range_gte(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_gte(b), + _ => None, + } + } + + fn range_lte(&self, other: &Self) -> Option> { + match (self, other) { + (Elem::Concrete(a), Elem::Concrete(b)) => a.range_lte(b), + _ => None, + } + } +} + +/// Executes the `greater than` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_gt( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, +) -> Option> { + if maximize { + lhs_max.range_gt(rhs_min) + } else { + lhs_min.range_gt(rhs_max) + } +} + +/// Executes the `less than` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_lt( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, +) -> Option> { + if maximize { + lhs_min.range_lt(rhs_max) + } else { + lhs_max.range_lt(rhs_min) + } +} + +/// Executes the `greater than or equal` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_gte( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, +) -> Option> { + if maximize { + lhs_max.range_gte(rhs_min) + } else { + lhs_min.range_gte(rhs_max) + } +} + +/// Executes the `less than or equal` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_lte( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, +) -> Option> { + if maximize { + lhs_min.range_lte(rhs_max) + } else { + lhs_max.range_lte(rhs_min) + } +} + +/// Executes the `equal` operation or `not equal` operation given the minimum and maximum of each element. It returns either the _minimum_ bound or _maximum_ bound +/// of the operation. +pub fn exec_eq_neq( + lhs_min: &Elem, + lhs_max: &Elem, + rhs_min: &Elem, + rhs_max: &Elem, + maximize: bool, + eq: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, +) -> Option> { + // prevent trying to eval when we have dependents + if !lhs_min.dependent_on(analyzer, arena).is_empty() + || !lhs_max.dependent_on(analyzer, arena).is_empty() + || !rhs_min.dependent_on(analyzer, arena).is_empty() + || !rhs_max.dependent_on(analyzer, arena).is_empty() + { + return None; + } + + let loc = if let Some(c) = lhs_min.maybe_concrete() { + c.loc + } else if let Some(c) = lhs_max.maybe_concrete() { + c.loc + } else if let Some(c) = rhs_min.maybe_concrete() { + c.loc + } else if let Some(c) = rhs_max.maybe_concrete() { + c.loc + } else { + Loc::Implicit + }; + + // We want to prove that there exists some values for LHS and RHS that are equal + // We do this for equality maximization and inequality minimization + let overlap_test = eq && maximize || !eq && !maximize; + + if overlap_test { + // check for any overlap + // + // Check if lhs max > rhs min + // LHS: <--?---| max + // RHS: min |----?----> + let lhs_max_rhs_min_ord = lhs_max.range_ord(rhs_min, arena); + + // Check if lhs min < rhs max + // LHS: min |----?----> + // RHS: <--?---| max + let lhs_min_rhs_max_ord = lhs_min.range_ord(rhs_max, arena); + + // if lhs max is less than the rhs min, it has to be false + if matches!(lhs_max_rhs_min_ord, Some(std::cmp::Ordering::Less)) { + return Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(!eq), + loc, + })); + } + + // if lhs min is greater than the rhs max, it has to be false + if matches!(lhs_min_rhs_max_ord, Some(std::cmp::Ordering::Greater)) { + return Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(!eq), + loc, + })); + } + + // lhs_max >= rhs_min + // lhs_min <= rhs_max + Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(eq), + loc, + })) + } else { + // We want to check that there is *some* case in which they can be *not* equal. + // This only occurs when both sides are constant and equal + match ( + // check if lhs is constant + lhs_min.range_ord(lhs_max, arena), + // check if rhs is constant + rhs_min.range_ord(rhs_max, arena), + // check if lhs is equal to rhs + lhs_min.range_ord(rhs_min, arena), + ) { + // LHS & RHS are constant and equal + ( + Some(std::cmp::Ordering::Equal), + Some(std::cmp::Ordering::Equal), + Some(std::cmp::Ordering::Equal), + ) => Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(eq), + loc, + })), + // LHS or RHS is not constant or they are constant and unequal + _ => Some(Elem::Concrete(RangeConcrete { + val: Concrete::Bool(!eq), + loc, + })), + } + } +} diff --git a/crates/graph/src/range/exec_traits.rs b/crates/graph/src/range/exec_traits.rs new file mode 100644 index 00000000..061b466d --- /dev/null +++ b/crates/graph/src/range/exec_traits.rs @@ -0,0 +1,183 @@ +use crate::{range::elem::Elem, GraphBackend}; +use shared::RangeArena; + +use std::hash::Hash; + +/// For execution of operations to be performed on range expressions +pub trait ExecOp { + type GraphError; + /// Attempts to execute ops by evaluating expressions and applying the op for the left-hand-side + /// and right-hand-side + fn exec_op( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + + fn exec( + &self, + parts: (Elem, Elem, Elem, Elem), + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + /// Cache execution + fn cache_exec_op( + &mut self, + maximize: bool, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), Self::GraphError>; + + fn spread( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(Elem, Elem, Elem, Elem), Self::GraphError>; + + fn simplify_spread( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(Elem, Elem, Elem, Elem), Self::GraphError>; + + fn uncache_exec(&mut self); + + fn simplify_exec_op( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError>; + + /// Attempts to simplify an expression (i.e. just apply constant folding) + fn simplify_exec( + &self, + parts: (Elem, Elem, Elem, Elem), + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError> { + self.exec(parts, maximize, analyzer, arena) + } +} + +pub trait RangeAdd { + /// Perform addition between two range elements + fn range_add(&self, other: &Rhs) -> Option>; + fn range_wrapping_add(&self, other: &Rhs) -> Option>; +} + +pub trait RangeSub { + /// Perform subtraction between two range elements + fn range_sub(&self, other: &Rhs) -> Option>; + fn range_wrapping_sub(&self, other: &Rhs) -> Option>; +} + +pub trait RangeDiv { + /// Perform division between two range elements + fn range_div(&self, other: &Rhs) -> Option>; + + fn range_wrapping_div(&self, other: &Rhs) -> Option>; +} + +pub trait RangeExp { + /// Perform exponentiation between two range elements + fn range_exp(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMul { + /// Perform multiplication between two range elements + fn range_mul(&self, other: &Rhs) -> Option>; + fn range_wrapping_mul(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMod { + /// Perform modulo between two range elements + fn range_mod(&self, other: &Rhs) -> Option>; +} + +pub trait RangeBitwise { + /// Perform a bitwise AND + fn range_bit_and(&self, other: &Rhs) -> Option>; + /// Perform a bitwise OR + fn range_bit_or(&self, other: &Rhs) -> Option>; + /// Perform a bitwise XOR + fn range_bit_xor(&self, other: &Rhs) -> Option>; + /// Perform a bitwise NOT + fn range_bit_not(&self) -> Option>; +} + +pub trait RangeShift { + /// Perform a bitwise shift left + fn range_shl(&self, other: &Rhs) -> Option>; + /// Perform a bitwise shift right + fn range_shr(&self, other: &Rhs) -> Option>; +} + +pub trait RangeCast { + /// Perform a cast on an element to the type of the right hand side + fn range_cast(&self, other: &Rhs) -> Option>; +} + +pub trait RangeUnary { + /// Perform a logical NOT + fn range_not(&self) -> Option>; + /// Perform a logical AND + fn range_and(&self, other: &Rhs) -> Option>; + /// Perform a logical OR + fn range_or(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMax { + /// Take the maximum of two range elements + fn range_max(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMin { + /// Take the minimum of two range elements + fn range_min(&self, other: &Rhs) -> Option>; +} + +pub trait RangeOrd { + /// Perform a logical equality test + fn range_ord_eq(&self, other: &Rhs) -> Option>; + /// Perform a logical inequality test + fn range_neq(&self, other: &Rhs) -> Option>; + /// Perform a logical greater than test + fn range_gt(&self, other: &Rhs) -> Option>; + /// Perform a logical less than test + fn range_lt(&self, other: &Rhs) -> Option>; + /// Perform a logical greater than or equal test + fn range_gte(&self, other: &Rhs) -> Option>; + /// Perform a logical less than or equal test + fn range_lte(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMemOps: RangeMemSet + RangeConcat + Sized { + /// Perform a memory copy + fn range_memcopy(&self) -> Option>; +} + +pub trait RangeConcat { + /// Perform a cast on an element to the type of the right hand side + fn range_concat(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMemSet { + /// Applies a transformation of indices + fn range_set_indices(&self, other: &Rhs) -> Option>; + /// Applies a transformation of length + fn range_set_length(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMemGet: RangeMemLen { + /// Gets an index + fn range_get_index(&self, other: &Rhs) -> Option>; +} + +pub trait RangeMemLen { + /// Gets the length + fn range_get_length(&self) -> Option>; +} diff --git a/crates/graph/src/range/mod.rs b/crates/graph/src/range/mod.rs new file mode 100644 index 00000000..119e48ac --- /dev/null +++ b/crates/graph/src/range/mod.rs @@ -0,0 +1,20 @@ +//! Ranges consist of a minimum range element and a maximum range element. +//! +//! +//! +//! We define an algebra of types. This means we can perform calculations between two range elements. +//! +//! +//! +//! +//! + +pub mod elem; +pub mod exec; +pub mod exec_traits; +pub mod range_string; +mod range_trait; +mod solc_range; + +pub use range_trait::*; +pub use solc_range::*; diff --git a/shared/src/range/range_string.rs b/crates/graph/src/range/range_string.rs similarity index 53% rename from shared/src/range/range_string.rs rename to crates/graph/src/range/range_string.rs index 98bf18ec..d845012f 100644 --- a/shared/src/range/range_string.rs +++ b/crates/graph/src/range/range_string.rs @@ -1,16 +1,12 @@ -use crate::analyzer::GraphLike; -use crate::context::ContextVarNode; -use crate::range::elem::RangeElem; -use crate::range::elem::RangeOp; -use crate::range::elem_ty::Dynamic; -use crate::range::elem_ty::RangeExpr; -use crate::range::Elem; -use crate::range::RangeDyn; -use crate::Concrete; - -use std::collections::BTreeMap; +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::elem::*, + GraphBackend, +}; +use shared::RangeArena; use solang_parser::pt::Loc; +use std::collections::BTreeMap; /// A range element string consisting of a string and a location #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] @@ -43,59 +39,80 @@ impl RangeString { /// String related functions for ranges pub trait ToRangeString { /// Gets the definition string of the range element - fn def_string(&self, analyzer: &impl GraphLike) -> RangeElemString; + fn def_string( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString; /// Converts a range to a human string - fn to_range_string(&self, maximize: bool, analyzer: &impl GraphLike) -> RangeElemString; + fn to_range_string( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString; } impl ToRangeString for Elem { - fn def_string(&self, analyzer: &impl GraphLike) -> RangeElemString { + fn def_string( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { match self { Elem::Concrete(c) => RangeElemString::new(c.val.as_human_string(), c.loc), - Elem::Dynamic(Dynamic { idx, .. }) => { + Elem::Reference(Reference { idx, .. }) => { let cvar = ContextVarNode::from(*idx) .first_version(analyzer) .underlying(analyzer) .unwrap(); RangeElemString::new(cvar.display_name.clone(), cvar.loc.unwrap_or(Loc::Implicit)) } - Elem::ConcreteDyn(rd) => rd.def_string(analyzer), - Elem::Expr(expr) => expr.def_string(analyzer), + Elem::ConcreteDyn(rd) => rd.def_string(analyzer, arena), + Elem::Expr(expr) => expr.def_string(analyzer, arena), Elem::Null => RangeElemString::new("null".to_string(), Loc::Implicit), + Elem::Arena(_) => self.dearenaize_clone(arena).def_string(analyzer, arena), } } - fn to_range_string(&self, maximize: bool, analyzer: &impl GraphLike) -> RangeElemString { + fn to_range_string( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { match self { Elem::Concrete(c) => RangeElemString::new(c.val.as_human_string(), c.loc), - Elem::Dynamic(Dynamic { idx, .. }) => { + Elem::Reference(Reference { idx, .. }) => { let as_var = ContextVarNode::from(*idx); - let name = as_var.display_name(analyzer).unwrap(); + let name = as_var.as_controllable_name(analyzer, arena).unwrap(); RangeElemString::new(name, as_var.loc(analyzer).unwrap()) } - Elem::ConcreteDyn(rd) => rd.to_range_string(maximize, analyzer), - Elem::Expr(expr) => expr.to_range_string(maximize, analyzer), + Elem::ConcreteDyn(rd) => rd.to_range_string(maximize, analyzer, arena), + Elem::Expr(expr) => expr.to_range_string(maximize, analyzer, arena), Elem::Null => RangeElemString::new("null".to_string(), Loc::Implicit), + Elem::Arena(_) => self + .dearenaize_clone(arena) + .to_range_string(maximize, analyzer, arena), } } } impl ToRangeString for RangeDyn { - fn def_string(&self, analyzer: &impl GraphLike) -> RangeElemString { - let displayed_vals = self - .val - .iter() - .take(20) - .map(|(key, val)| (key.minimize(analyzer).unwrap(), val)) - .collect::>(); + fn def_string( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { + let displayed_vals = self.val.iter().take(20).collect::>(); let val_str = displayed_vals .iter() - .map(|(key, val)| { + .map(|(key, (val, _))| { format!( "{}: {}", - key.def_string(analyzer).s, - val.def_string(analyzer).s + key.def_string(analyzer, arena).s, + val.def_string(analyzer, arena).s ) }) .collect::>() @@ -104,37 +121,36 @@ impl ToRangeString for RangeDyn { RangeElemString::new( format!( "{{len: {}, indices: [{}]}}", - self.len.to_range_string(false, analyzer).s, + self.len.to_range_string(false, analyzer, arena).s, val_str ), self.loc, ) } - fn to_range_string(&self, maximize: bool, analyzer: &impl GraphLike) -> RangeElemString { + fn to_range_string( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { let val_str = if self.val.len() > 10 { let displayed_vals = self .val .iter() .take(5) - .map(|(key, val)| { - if maximize { - (key.maximize(analyzer).unwrap(), val) - } else { - (key.minimize(analyzer).unwrap(), val) - } + .filter(|(_key, (val, _op))| *val != Elem::Null) + .map(|(key, (val, _op))| { + ( + key.to_range_string(maximize, analyzer, arena).s, + val.to_range_string(maximize, analyzer, arena).s, + ) }) .collect::>(); let val_str_head = displayed_vals .iter() - .map(|(key, val)| { - format!( - "{}: {}", - key.def_string(analyzer).s, - val.def_string(analyzer).s - ) - }) + .map(|(key, val)| format!("{}: {}", key, val)) .collect::>() .join(", "); @@ -143,24 +159,19 @@ impl ToRangeString for RangeDyn { .iter() .rev() .take(5) - .map(|(key, val)| { - if maximize { - (key.maximize(analyzer).unwrap(), val) - } else { - (key.minimize(analyzer).unwrap(), val) - } + .filter(|(_key, (val, _op))| *val != Elem::Null) + .map(|(key, (val, _op))| { + // (key.to_range_string(maximize, analyzer).s, val.to_range_string(maximize, analyzer).s) + ( + key.to_range_string(maximize, analyzer, arena).s, + val.to_range_string(maximize, analyzer, arena).s, + ) }) .collect::>(); let val_str_tail = displayed_vals_tail .iter() - .map(|(key, val)| { - format!( - "{}: {}", - key.def_string(analyzer).s, - val.def_string(analyzer).s - ) - }) + .map(|(key, val)| format!("{}: {}", key, val)) .collect::>() .join(", "); format!("{val_str_head} ... {val_str_tail}") @@ -169,24 +180,18 @@ impl ToRangeString for RangeDyn { .val .iter() .take(10) - .map(|(key, val)| { - if maximize { - (key.maximize(analyzer).unwrap(), val) - } else { - (key.minimize(analyzer).unwrap(), val) - } + .filter(|(_key, (val, _op))| *val != Elem::Null) + .map(|(key, (val, _op))| { + ( + key.to_range_string(maximize, analyzer, arena).s, + val.to_range_string(maximize, analyzer, arena).s, + ) }) .collect::>(); displayed_vals .iter() - .map(|(key, val)| { - format!( - "{}: {}", - key.def_string(analyzer).s, - val.def_string(analyzer).s - ) - }) + .map(|(key, val)| format!("{}: {}", key, val,)) .collect::>() .join(", ") }; @@ -194,7 +199,7 @@ impl ToRangeString for RangeDyn { RangeElemString::new( format!( "{{len: {}, indices: {{{}}}}}", - self.len.to_range_string(maximize, analyzer).s, + self.len.to_range_string(maximize, analyzer, arena).s, val_str ), self.loc, @@ -203,12 +208,26 @@ impl ToRangeString for RangeDyn { } impl ToRangeString for RangeExpr { - fn def_string(&self, analyzer: &impl GraphLike) -> RangeElemString { - self.lhs.def_string(analyzer) + fn def_string( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { + self.lhs.def_string(analyzer, arena) } - fn to_range_string(&self, maximize: bool, analyzer: &impl GraphLike) -> RangeElemString { - let lhs_r_str = self.lhs.to_range_string(maximize, analyzer); + fn to_range_string( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { + if let MaybeCollapsed::Collapsed(collapsed) = + collapse(*self.lhs.clone(), self.op, *self.rhs.clone(), arena) + { + return collapsed.to_range_string(maximize, analyzer, arena); + } + let lhs_r_str = self.lhs.to_range_string(maximize, analyzer, arena); let lhs_str = match *self.lhs { Elem::Expr(_) => { let new_str = format!("({})", lhs_r_str.s); @@ -217,7 +236,7 @@ impl ToRangeString for RangeExpr { _ => lhs_r_str, }; - let rhs_r_str = self.rhs.to_range_string(maximize, analyzer); + let rhs_r_str = self.rhs.to_range_string(maximize, analyzer, arena); let rhs_str = match *self.rhs { Elem::Expr(_) => { @@ -229,14 +248,14 @@ impl ToRangeString for RangeExpr { if matches!(self.op, RangeOp::Min | RangeOp::Max) { RangeElemString::new( - format!("{}({}, {})", self.op.to_string(), lhs_str.s, rhs_str.s), + format!("{}{{{}, {}}}", self.op.to_string(), lhs_str.s, rhs_str.s), lhs_str.loc, ) - } else if matches!(self.op, RangeOp::Cast | RangeOp::Concat) { + } else if matches!(self.op, RangeOp::Cast) { let rhs = if maximize { - self.rhs.maximize(analyzer).unwrap() + self.rhs.maximize(analyzer, arena).unwrap() } else { - self.rhs.minimize(analyzer).unwrap() + self.rhs.minimize(analyzer, arena).unwrap() }; match rhs { @@ -255,15 +274,29 @@ impl ToRangeString for RangeExpr { } } else if matches!(self.op, RangeOp::BitNot) { let lhs = if maximize { - self.lhs.maximize(analyzer).unwrap() + self.lhs.maximize(analyzer, arena).unwrap() } else { - self.lhs.minimize(analyzer).unwrap() + self.lhs.minimize(analyzer, arena).unwrap() }; match lhs { Elem::Concrete(_c) => RangeElemString::new(format!("~{}", lhs_str.s), lhs_str.loc), _ => RangeElemString::new(format!("~{}", lhs_str.s), lhs_str.loc), } + } else if matches!(self.op, RangeOp::SetIndices) { + RangeElemString::new( + format!("set_indicies({}, {})", lhs_str.s, rhs_str.s), + lhs_str.loc, + ) + } else if matches!(self.op, RangeOp::GetLength) { + RangeElemString::new(format!("get_length({})", lhs_str.s), lhs_str.loc) + } else if matches!(self.op, RangeOp::SetLength) { + RangeElemString::new( + format!("set_length({}, {})", lhs_str.s, rhs_str.s), + lhs_str.loc, + ) + } else if matches!(self.op, RangeOp::Concat) { + RangeElemString::new(format!("concat({}, {})", lhs_str.s, rhs_str.s), lhs_str.loc) } else { RangeElemString::new( format!("{} {} {}", lhs_str.s, self.op.to_string(), rhs_str.s), diff --git a/crates/graph/src/range/range_trait.rs b/crates/graph/src/range/range_trait.rs new file mode 100644 index 00000000..e5b1bd75 --- /dev/null +++ b/crates/graph/src/range/range_trait.rs @@ -0,0 +1,134 @@ +use crate::FlattenedRange; +use crate::{range::elem::RangeElem, GraphBackend}; +use shared::{NodeIdx, RangeArena}; +use std::{borrow::Cow, hash::Hash}; + +pub trait Range { + type GraphError; + type ElemTy: RangeElem + Clone + Hash; + /// Evaluate both the minimum and the maximum - cache along the way + fn cache_eval( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena, + ) -> Result<(), Self::GraphError>; + /// Evaluate the range minimum + fn evaled_range_min( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> Result; + /// Evaluate the range maximum + fn evaled_range_max( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> Result; + /// Simplify the minimum, leaving references in place + fn simplified_range_min( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> Result; + /// Simplify the maximum, leaving references in place + fn simplified_range_max( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> Result; + /// Return the range minimum + fn range_min(&self) -> std::borrow::Cow<'_, Self::ElemTy>; + /// Return the range maximum + fn range_max(&self) -> std::borrow::Cow<'_, Self::ElemTy>; + /// Uncache the range minimum + fn uncache_range_min(&mut self) { + self.range_min_mut().uncache(); + } + /// Uncache the range maximum + fn uncache_range_max(&mut self) { + self.range_max_mut().uncache(); + } + /// Get a mutable reference to the minimum + fn range_min_mut(&mut self) -> &mut Self::ElemTy; + /// Get a mutable reference to the maximum + fn range_max_mut(&mut self) -> &mut Self::ElemTy; + /// Get the range exclusions + fn range_exclusions(&self) -> Vec + where + Self: std::marker::Sized; + /// Set the range minimum + fn set_range_min(&mut self, new: Self::ElemTy); + /// Set the range maximum + fn set_range_max(&mut self, new: Self::ElemTy); + /// Set the range exclusions + fn set_range_exclusions(&mut self, new: Vec) + where + Self: std::marker::Sized; + /// Add an exclusion value to the range + fn add_range_exclusion(&mut self, new: usize) + where + Self: std::marker::Sized; + /// Replace a potential recursion causing node index with a new index + fn filter_min_recursion( + &mut self, + self_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena, + ); + /// Replace a potential recursion causing node index with a new index + fn filter_max_recursion( + &mut self, + self_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena, + ); + /// Cache the flattened range + fn cache_flatten( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena, + ) -> Result<(), Self::GraphError>; + /// Produce a flattened range or use the cached flattened range + fn flattened_range<'a>( + &'a mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena, + ) -> Result, Self::GraphError> + where + Self: Sized + Clone; + + fn take_flattened_range( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena, + ) -> Result + where + Self: Sized; +} + +pub trait RangeEval + Hash> { + fn sat(&self, analyzer: &impl GraphBackend, arena: &mut RangeArena) -> bool; + fn unsat(&self, analyzer: &impl GraphBackend, arena: &mut RangeArena) -> bool { + !self.sat(analyzer, arena) + } + fn contains( + &self, + other: &Self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> bool; + fn contains_elem( + &self, + other: &T, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> bool; + fn overlaps( + &self, + other: &Self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena, + ) -> bool; +} diff --git a/crates/graph/src/range/solc_range.rs b/crates/graph/src/range/solc_range.rs new file mode 100644 index 00000000..eddff194 --- /dev/null +++ b/crates/graph/src/range/solc_range.rs @@ -0,0 +1,887 @@ +use crate::{ + nodes::{Builtin, Concrete, ContextVarNode}, + range::{elem::*, range_string::*, Range, RangeEval}, + AsDotStr, GraphBackend, GraphError, +}; + +use shared::{NodeIdx, RangeArena}; + +use ethers_core::types::{Address, H256, I256, U256}; +use solang_parser::pt::Loc; + +use std::{borrow::Cow, collections::BTreeMap}; + +#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] +pub struct FlattenedRange { + pub min: usize, + pub max: usize, + pub exclusions: Vec, +} + +impl From for SolcRange { + fn from(range: FlattenedRange) -> Self { + SolcRange::new( + Elem::Arena(range.min), + Elem::Arena(range.max), + range.exclusions, + ) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] +pub struct SolcRange { + pub min: Elem, + pub min_cached: Option, + pub max: Elem, + pub max_cached: Option, + pub exclusions: Vec, + pub flattened: Option, +} + +impl AsDotStr for SolcRange { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> String { + format!( + "[{}, {}] excluding: [{}]", + self.evaled_range_min(analyzer, arena) + .unwrap() + .to_range_string(false, analyzer, arena) + .s, + self.evaled_range_max(analyzer, arena) + .unwrap() + .to_range_string(true, analyzer, arena) + .s, + self.exclusions + .iter() + .map(|excl| Elem::Arena(*excl).to_range_string(false, analyzer, arena).s) + .collect::>() + .join(", ") + ) + } +} + +impl From for SolcRange { + fn from(b: bool) -> Self { + let val = Elem::Concrete(RangeConcrete::new(Concrete::Bool(b), Loc::Implicit)); + Self::new(val.clone(), val, vec![]) + } +} + +impl From> for SolcRange { + fn from(elem: Elem) -> Self { + Self::new(elem.clone(), elem, vec![]) + } +} + +impl SolcRange { + /// Get all ContextVarNodes that this range references + pub fn dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec { + let mut deps = self.range_min().dependent_on(analyzer, arena); + deps.extend(self.range_max().dependent_on(analyzer, arena)); + deps.dedup(); + + deps.into_iter().map(ContextVarNode::from).collect() + } + + pub fn recursive_dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let mut deps = self.range_min().recursive_dependent_on(analyzer, arena)?; + deps.extend(self.range_max().recursive_dependent_on(analyzer, arena)?); + deps.dedup(); + + Ok(deps) + } + + pub fn new(min: Elem, max: Elem, exclusions: Vec) -> Self { + Self { + min, + min_cached: None, + max, + max_cached: None, + exclusions, + flattened: None, + } + } + + pub fn replace_dep( + &mut self, + to_replace: NodeIdx, + replacement: Elem, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + if let Some(ref mut flattened) = &mut self.flattened { + Elem::Arena(flattened.min).replace_dep( + to_replace, + replacement.clone(), + analyzer, + arena, + ); + Elem::Arena(flattened.max).replace_dep( + to_replace, + replacement.clone(), + analyzer, + arena, + ); + } + self.min + .replace_dep(to_replace, replacement.clone(), analyzer, arena); + self.max + .replace_dep(to_replace, replacement, analyzer, arena); + self.min_cached = None; + self.max_cached = None; + } + + pub fn is_const( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + let min = self.evaled_range_min(analyzer, arena)?; + let max = self.evaled_range_max(analyzer, arena)?; + Ok(min.range_eq(&max, arena)) + } + + pub fn min_is_negative( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + self.min.is_negative(false, analyzer, arena) + } + + pub fn default_bool() -> Self { + let min = Elem::Concrete(RangeConcrete::new(Concrete::Bool(false), Loc::Implicit)); + let max = Elem::Concrete(RangeConcrete::new(Concrete::Bool(true), Loc::Implicit)); + Self::new(min, max, vec![]) + } + pub fn from(c: Concrete) -> Option { + match c { + c @ Concrete::Uint(_, _) + | c @ Concrete::Int(_, _) + | c @ Concrete::Bool(_) + | c @ Concrete::Address(_) + | c @ Concrete::Bytes(_, _) => Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new(c.clone(), Loc::Implicit)), + Elem::Concrete(RangeConcrete::new(c, Loc::Implicit)), + vec![], + )), + Concrete::String(s) => { + let val = s + .chars() + .enumerate() + .map(|(i, v)| { + let idx = Elem::from(Concrete::from(U256::from(i))); + let mut bytes = [0x00; 32]; + v.encode_utf8(&mut bytes[..]); + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (idx, v) + }) + .collect::>(); + let r = Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::from(s.len()))), + val, + Loc::Implicit, + )); + Some(SolcRange::new(r.clone(), r, vec![])) + } + Concrete::DynBytes(b) => { + let val = b + .iter() + .enumerate() + .map(|(i, v)| { + let idx = Elem::from(Concrete::from(U256::from(i))); + let mut bytes = [0x00; 32]; + bytes[0] = *v; + let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + (idx, v) + }) + .collect::>(); + let r = Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::from(b.len()))), + val, + Loc::Implicit, + )); + Some(SolcRange::new(r.clone(), r, vec![])) + } + _e => None, + } + } + + pub fn try_from_builtin(builtin: &Builtin) -> Option { + match builtin { + Builtin::Uint(size) => { + if *size == 256 { + Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new( + Concrete::Uint(*size, 0.into()), + Loc::Implicit, + )), + Elem::Concrete(RangeConcrete::new( + Concrete::Uint(*size, U256::MAX), + Loc::Implicit, + )), + vec![], + )) + } else { + Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new( + Concrete::Uint(*size, 0.into()), + Loc::Implicit, + )), + Elem::Concrete(RangeConcrete::new( + Concrete::Uint(*size, U256::from(2).pow(U256::from(*size)) - 1), + Loc::Implicit, + )), + vec![], + )) + } + } + Builtin::Int(size) => { + if *size == 256 { + Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new( + Concrete::Int(*size, I256::MIN), + Loc::Implicit, + )), + Elem::Concrete(RangeConcrete::new( + Concrete::Int(*size, I256::MAX), + Loc::Implicit, + )), + vec![], + )) + } else { + let max: I256 = + I256::from_raw(U256::from(1u8) << U256::from(size - 1)) - I256::from(1); + let min = max * I256::from(-1i32) - I256::from(1i32); + Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new( + Concrete::Int(*size, min), + Loc::Implicit, + )), + Elem::Concrete(RangeConcrete::new( + Concrete::Int(*size, max), + Loc::Implicit, + )), + vec![], + )) + } + } + Builtin::Bool => Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new(Concrete::Bool(false), Loc::Implicit)), + Elem::Concrete(RangeConcrete::new(Concrete::Bool(true), Loc::Implicit)), + vec![], + )), + Builtin::Address | Builtin::Payable | Builtin::AddressPayable => Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new( + Concrete::Address(Address::from_slice(&[0x00; 20])), + Loc::Implicit, + )), + Elem::Concrete(RangeConcrete::new( + Concrete::Address(Address::from_slice(&[0xff; 20])), + Loc::Implicit, + )), + vec![], + )), + Builtin::Bytes(size) => { + let v: Vec<_> = (0..32u8) + .map(|i| if i < *size { 0xff } else { 0x00 }) + .collect(); + Some(SolcRange::new( + Elem::Concrete(RangeConcrete::new( + Concrete::Bytes(*size, H256::from_slice(&[0x00; 32])), + Loc::Implicit, + )), + Elem::Concrete(RangeConcrete::new( + Concrete::Bytes(*size, H256::from_slice(&v[..])), + Loc::Implicit, + )), + vec![], + )) + } + Builtin::DynamicBytes + | Builtin::String + | Builtin::Array(_) + | Builtin::Mapping(_, _) => Some(SolcRange::new( + Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::zero())), + Default::default(), + Loc::Implicit, + )), + Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(U256::MAX)), + Default::default(), + Loc::Implicit, + )), + vec![], + )), + Builtin::SizedArray(s, _) => Some(SolcRange::new( + Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(*s)), + Default::default(), + Loc::Implicit, + )), + Elem::ConcreteDyn(RangeDyn::new( + Elem::from(Concrete::from(*s)), + Default::default(), + Loc::Implicit, + )), + vec![], + )), + _ => None, + } + } + + pub fn lte_dyn(self, other: ContextVarNode) -> Self { + Self::new(self.min, self.max.min(Elem::from(other)), self.exclusions) + } + + pub fn gte_dyn(self, other: ContextVarNode) -> Self { + Self::new(self.min.max(Elem::from(other)), self.max, self.exclusions) + } + + pub fn lt_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min, + self.max.min( + Elem::from(other) + - Elem::Concrete(RangeConcrete::new(U256::from(1).into(), Loc::Implicit)), + ), + self.exclusions, + ) + } + + pub fn gt_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.max( + Elem::from(other) + + Elem::Concrete(RangeConcrete::new(U256::from(1).into(), Loc::Implicit)), + ), + self.max, + self.exclusions, + ) + } + + pub fn dyn_fn_from_op(op: RangeOp) -> &'static dyn Fn(SolcRange, ContextVarNode) -> SolcRange { + match op { + RangeOp::Add(false) => &Self::add_dyn, + RangeOp::Add(true) => &Self::wrapping_add_dyn, + RangeOp::Sub(false) => &Self::sub_dyn, + RangeOp::Sub(true) => &Self::wrapping_sub_dyn, + RangeOp::Mul(false) => &Self::mul_dyn, + RangeOp::Mul(true) => &Self::wrapping_mul_dyn, + RangeOp::Div(false) => &Self::div_dyn, + RangeOp::Div(true) => &Self::wrapping_mul_dyn, + RangeOp::Shr => &Self::shr_dyn, + RangeOp::Shl => &Self::shl_dyn, + RangeOp::Mod => &Self::mod_dyn, + RangeOp::Min => &Self::min_dyn, + RangeOp::Max => &Self::max_dyn, + RangeOp::Lt => &Self::lt_dyn, + RangeOp::Lte => &Self::lte_dyn, + RangeOp::Gt => &Self::gt_dyn, + RangeOp::Gte => &Self::gte_dyn, + RangeOp::Eq => &Self::eq_dyn, + RangeOp::Neq => &Self::neq_dyn, + RangeOp::Exp => &Self::exp_dyn, + RangeOp::BitAnd => &Self::bit_and_dyn, + RangeOp::BitOr => &Self::bit_or_dyn, + RangeOp::BitXor => &Self::bit_xor_dyn, + e => unreachable!("Comparator operations shouldn't exist in a range: {:?}", e), + } + } + + pub fn add_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min + Elem::from(other), + self.max + Elem::from(other), + self.exclusions, + ) + } + + pub fn wrapping_add_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.wrapping_add(Elem::from(other)), + self.max.wrapping_add(Elem::from(other)), + self.exclusions, + ) + } + + pub fn sub_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min - Elem::from(other), + self.max - Elem::from(other), + self.exclusions, + ) + } + + pub fn wrapping_sub_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.wrapping_sub(Elem::from(other)), + self.max.wrapping_sub(Elem::from(other)), + self.exclusions, + ) + } + + pub fn mul_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min * Elem::from(other), + self.max * Elem::from(other), + self.exclusions, + ) + } + + pub fn wrapping_mul_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.wrapping_mul(Elem::from(other)), + self.max.wrapping_mul(Elem::from(other)), + self.exclusions, + ) + } + + pub fn exp_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.pow(Elem::from(other)), + self.max.pow(Elem::from(other)), + self.exclusions, + ) + } + + pub fn bit_and_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min & Elem::from(other), + self.max & Elem::from(other), + self.exclusions, + ) + } + + pub fn bit_or_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min | Elem::from(other), + self.max | Elem::from(other), + self.exclusions, + ) + } + + pub fn bit_xor_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min ^ Elem::from(other), + self.max ^ Elem::from(other), + self.exclusions, + ) + } + + pub fn div_dyn(self, other: ContextVarNode) -> Self { + let elem = Elem::from(other); + Self::new(self.min / elem.clone(), self.max / elem, self.exclusions) + } + + pub fn wrapping_div_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.wrapping_div(Elem::from(other)), + self.max.wrapping_div(Elem::from(other)), + self.exclusions, + ) + } + + pub fn shl_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min << Elem::from(other), + self.max << Elem::from(other), + self.exclusions, + ) + } + + pub fn shr_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min >> Elem::from(other), + self.max >> Elem::from(other), + self.exclusions, + ) + } + + pub fn mod_dyn(self, other: ContextVarNode) -> Self { + let elem = Elem::from(other); + Self::new( + Elem::from(Concrete::from(U256::zero())), + elem.clone() - Elem::from(Concrete::from(U256::from(1))).cast(elem), + self.exclusions, + ) + } + + pub fn min_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.min(Elem::from(other)), + self.max.min(Elem::from(other)), + self.exclusions, + ) + } + + pub fn max_dyn(self, other: ContextVarNode) -> Self { + Self::new( + self.min.max(Elem::from(other)), + self.max.max(Elem::from(other)), + self.exclusions, + ) + } + + pub fn eq_dyn(self, other: ContextVarNode) -> Self { + let min = self.min.eq(Elem::from(other)); + let max = self.max.eq(Elem::from(other)); + Self::new(min.clone().max(max.clone()), min.max(max), self.exclusions) + } + + pub fn neq_dyn(self, other: ContextVarNode) -> Self { + let min = self.min.neq(Elem::from(other)); + let max = self.max.neq(Elem::from(other)); + Self::new(min.clone().max(max.clone()), min.max(max), self.exclusions) + } + + pub fn into_flattened_range( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + if let Some(cached) = &self.flattened { + return Ok(cached.clone()); + } + + let mut min = Elem::Arena(arena.idx_or_upsert(self.min.clone(), analyzer)); + let mut max = Elem::Arena(arena.idx_or_upsert(self.max.clone(), analyzer)); + min.cache_flatten(analyzer, arena)?; + max.cache_flatten(analyzer, arena)?; + + self.min = min.clone(); + self.max = max.clone(); + + let simp_min = min.simplify_minimize(analyzer, arena)?; + let simp_max = max.simplify_maximize(analyzer, arena)?; + let min = arena.idx_or_upsert(simp_min, analyzer); + let max = arena.idx_or_upsert(simp_max, analyzer); + + let flat_range = FlattenedRange { + min, + max, + exclusions: self.exclusions.clone(), + }; + self.flattened = Some(flat_range.clone()); + + Ok(flat_range) + } +} + +impl Range for SolcRange { + type GraphError = GraphError; + type ElemTy = Elem; + fn range_min(&self) -> std::borrow::Cow<'_, Self::ElemTy> { + std::borrow::Cow::Borrowed(&self.min) + } + fn range_max(&self) -> std::borrow::Cow<'_, Self::ElemTy> { + std::borrow::Cow::Borrowed(&self.max) + } + fn range_min_mut(&mut self) -> &mut Self::ElemTy { + &mut self.min + } + fn range_max_mut(&mut self) -> &mut Self::ElemTy { + &mut self.max + } + + fn cache_eval( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), GraphError> { + let min = std::mem::take(&mut self.min); + let max = std::mem::take(&mut self.max); + self.min = Elem::Arena(arena.idx_or_upsert(min, analyzer)); + self.max = Elem::Arena(arena.idx_or_upsert(max, analyzer)); + if self.max_cached.is_none() { + let max = self.range_max_mut(); + max.cache_maximize(analyzer, arena)?; + let res = self.range_max().maximize(analyzer, arena)?; + self.max_cached = Some(arena.idx_or_upsert(res, analyzer)); + } + if self.min_cached.is_none() { + let min = self.range_min_mut(); + min.cache_minimize(analyzer, arena)?; + let res = self.range_min().minimize(analyzer, arena)?; + self.min_cached = Some(arena.idx_or_upsert(res, analyzer)); + } + Ok(()) + } + + fn evaled_range_min( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + if let Some(cached) = &self.min_cached { + Ok(Elem::Arena(*cached).dearenaize_clone(arena)) + } else { + self.range_min().minimize(analyzer, arena) + } + } + + fn evaled_range_max( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + if let Some(cached) = &self.max_cached { + Ok(Elem::Arena(*cached).dearenaize_clone(arena)) + } else { + self.range_max().maximize(analyzer, arena) + } + } + + fn simplified_range_min( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + self.range_min() + .flatten(false, analyzer, arena)? + .simplify_minimize(analyzer, arena) + } + fn simplified_range_max( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + self.range_max() + .flatten(true, analyzer, arena)? + .simplify_maximize(analyzer, arena) + } + + fn range_exclusions(&self) -> Vec { + self.exclusions + .clone() + .into_iter() + .map(Elem::Arena) + .collect() + } + fn set_range_min(&mut self, new: Self::ElemTy) { + self.min_cached = None; + self.flattened = None; + self.min = new; + } + fn set_range_max(&mut self, new: Self::ElemTy) { + self.max_cached = None; + self.flattened = None; + self.max = new; + } + + fn add_range_exclusion(&mut self, new: usize) { + if !self.exclusions.contains(&new) { + self.exclusions.push(new); + } + } + fn set_range_exclusions(&mut self, new: Vec) { + self.exclusions = new; + } + fn filter_min_recursion( + &mut self, + self_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + self.min + .filter_recursion(self_idx, new_idx, analyzer, arena); + } + fn filter_max_recursion( + &mut self, + self_idx: NodeIdx, + new_idx: NodeIdx, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + self.max + .filter_recursion(self_idx, new_idx, analyzer, arena); + } + + fn cache_flatten( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(), Self::GraphError> { + if self.flattened.is_none() { + self.into_flattened_range(analyzer, arena)?; + } + Ok(()) + } + /// Produce a flattened range or use the cached flattened range + fn flattened_range<'a>( + &'a mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Self::GraphError> + where + Self: Sized + Clone, + { + if self.flattened.is_none() { + self.cache_flatten(analyzer, arena)?; + let Some(flat) = &self.flattened else { + unreachable!(); + }; + return Ok(Cow::Borrowed(flat)); + } else if let Some(flat) = &self.flattened { + return Ok(Cow::Borrowed(flat)); + } else { + unreachable!() + } + } + + /// Produce a flattened range or use the cached flattened range + fn take_flattened_range( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result + where + Self: Sized, + { + let taken = std::mem::take(&mut self.flattened); + if let Some(flat) = taken { + Ok(flat) + } else { + self.cache_flatten(analyzer, arena)?; + self.take_flattened_range(analyzer, arena) + } + } +} + +impl RangeEval> for SolcRange { + #[tracing::instrument(level = "trace", skip_all)] + fn sat(&self, analyzer: &impl GraphBackend, arena: &mut RangeArena>) -> bool { + matches!( + self.evaled_range_min(analyzer, arena) + .unwrap() + .range_ord(&self.evaled_range_max(analyzer, arena).unwrap(), arena), + None | Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ) + } + + fn contains( + &self, + other: &Self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + let min_contains = matches!( + self.evaled_range_min(analyzer, arena) + .unwrap() + .range_ord(&other.evaled_range_min(analyzer, arena).unwrap(), arena), + Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) + ); + + let max_contains = matches!( + self.evaled_range_max(analyzer, arena) + .unwrap() + .range_ord(&other.evaled_range_max(analyzer, arena).unwrap(), arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ); + + min_contains && max_contains + } + + fn contains_elem( + &self, + other: &Elem, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + let min_contains = match self + .evaled_range_min(analyzer, arena) + .unwrap() + .range_ord(&other.minimize(analyzer, arena).unwrap(), arena) + { + Some(std::cmp::Ordering::Less) => true, + Some(std::cmp::Ordering::Equal) => return true, + _ => false, + }; + + let max_contains = match self + .evaled_range_max(analyzer, arena) + .unwrap() + .range_ord(&other.maximize(analyzer, arena).unwrap(), arena) + { + Some(std::cmp::Ordering::Greater) => true, + Some(std::cmp::Ordering::Equal) => return true, + _ => false, + }; + + min_contains && max_contains + } + + fn overlaps( + &self, + other: &Self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + let lhs_min = self.evaled_range_min(analyzer, arena).unwrap(); + let rhs_max = other.evaled_range_max(analyzer, arena).unwrap(); + + match lhs_min.range_ord(&rhs_max, arena) { + Some(std::cmp::Ordering::Less) => { + // we know our min is less than the other max + // check that the max is greater than or eq their min + let lhs_max = self.evaled_range_max(analyzer, arena).unwrap(); + let rhs_min = other.evaled_range_min(analyzer, arena).unwrap(); + matches!( + lhs_max.range_ord(&rhs_min, arena), + Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) + ) + } + Some(std::cmp::Ordering::Equal) => true, + _ => false, + } + } +} + +impl RangeEval> for FlattenedRange { + fn sat(&self, analyzer: &impl GraphBackend, arena: &mut RangeArena>) -> bool { + >::into(self.clone()).sat(analyzer, arena) + } + fn unsat(&self, analyzer: &impl GraphBackend, arena: &mut RangeArena>) -> bool { + !self.sat(analyzer, arena) + } + fn contains( + &self, + other: &Self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + let other = >::into(other.clone()); + >::into(self.clone()).contains(&other, analyzer, arena) + } + fn contains_elem( + &self, + other: &Elem, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + >::into(self.clone()) + .contains_elem(other, analyzer, arena) + } + fn overlaps( + &self, + other: &Self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + let other = >::into(other.clone()); + >::into(self.clone()).overlaps(&other, analyzer, arena) + } +} diff --git a/crates/graph/src/solvers/atoms.rs b/crates/graph/src/solvers/atoms.rs new file mode 100644 index 00000000..27f0d3c3 --- /dev/null +++ b/crates/graph/src/solvers/atoms.rs @@ -0,0 +1,430 @@ +use crate::elem::{collapse, MaybeCollapsed}; +use crate::range::exec_traits::ExecOp; +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::{ + elem::{Elem, RangeElem, RangeExpr, RangeOp, Reference}, + range_string::{RangeElemString, ToRangeString}, + }, + GraphBackend, +}; +use shared::RangeArena; + +use ethers_core::types::U256; +use std::{collections::BTreeMap, rc::Rc}; + +#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub enum AtomOrPart { + Part(Elem), + Atom(SolverAtom), +} + +impl AtomOrPart { + pub fn into_elem(&self) -> Elem { + match self { + AtomOrPart::Part(part) => part.clone(), + AtomOrPart::Atom(atom) => atom.into_expr_elem(), + } + } + + pub fn as_solver_atom(&self) -> SolverAtom { + match self { + AtomOrPart::Part(_) => SolverAtom { + ty: OpType::DL, + lhs: Rc::new(self.clone()), + op: RangeOp::Sub(false), + rhs: Rc::new(AtomOrPart::Part(Elem::from(Concrete::from(U256::zero())))), + }, + AtomOrPart::Atom(atom) => atom.clone(), + } + } + + pub fn replace_deps( + &self, + solves: &BTreeMap>, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Self { + match self { + AtomOrPart::Part(part) => { + let mut new_part = part.clone(); + solves.iter().for_each(|(dep, replacement)| { + new_part.replace_dep(dep.0.into(), replacement.clone(), analyzer, arena) + }); + AtomOrPart::Part(new_part) + } + AtomOrPart::Atom(atom) => AtomOrPart::Atom(atom.replace_deps(solves, analyzer, arena)), + } + } + + pub fn maybe_max_ty(&self) -> Option { + match self { + AtomOrPart::Part(_part) => None, + AtomOrPart::Atom(atom) => Some(atom.max_ty()), + } + } + + pub fn is_part(&self) -> bool { + matches!(self, AtomOrPart::Part(_)) + } + + pub fn is_atom(&self) -> bool { + matches!(self, AtomOrPart::Atom(_)) + } + + pub fn expect_atom(&self) -> SolverAtom { + if let AtomOrPart::Atom(atom) = self { + atom.clone() + } else { + panic!("Expected atom, was part: {self:?}") + } + } + + pub fn expect_part(&self) -> Elem { + if let AtomOrPart::Part(part) = self { + part.clone() + } else { + panic!("Expected part, was atom: {self:?}") + } + } + + pub fn dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec { + match self { + AtomOrPart::Part(e) => e.dependent_on(analyzer, arena), + AtomOrPart::Atom(a) => a.dependent_on(analyzer, arena), + } + } +} + +#[repr(u8)] +#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub enum OpType { + Const, + DL, + Linear, + Other, +} + +impl OpType { + pub fn new(op: RangeOp) -> Self { + if LIA_OPS.contains(&op) { + OpType::Linear + } else if DL_OPS.contains(&op) { + OpType::DL + } else if CONST_OPS.contains(&op) { + OpType::Const + } else { + OpType::Other + } + } +} + +#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub struct SolverAtom { + pub ty: OpType, + pub lhs: Rc, + pub op: RangeOp, + pub rhs: Rc, +} + +impl ToRangeString for SolverAtom { + fn def_string( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { + self.into_expr_elem().def_string(analyzer, arena) + } + fn to_range_string( + &self, + maximize: bool, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> RangeElemString { + self.into_expr_elem() + .to_range_string(maximize, analyzer, arena) + } +} + +impl SolverAtom { + pub fn assert_nonnull(&self) { + self.lhs.into_elem().assert_nonnull(); + self.rhs.into_elem().assert_nonnull(); + } + + pub fn replace_deps( + &self, + solves: &BTreeMap>, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Self { + SolverAtom { + ty: self.ty, + lhs: Rc::new(self.lhs.clone().replace_deps(solves, analyzer, arena)), + op: self.op, + rhs: Rc::new(self.rhs.clone().replace_deps(solves, analyzer, arena)), + } + } + + pub fn max_ty(&self) -> OpType { + let mut max = OpType::new(self.op); + if let Some(lhs_max_ty) = self.lhs.maybe_max_ty() { + if lhs_max_ty > max { + max = lhs_max_ty; + } + } + if let Some(rhs_max_ty) = self.rhs.maybe_max_ty() { + if rhs_max_ty > max { + max = rhs_max_ty; + } + } + max + } + + pub fn update_max_ty(&mut self) { + self.ty = self.max_ty(); + } + + pub fn dependent_on( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Vec { + let mut deps = self.lhs.dependent_on(analyzer, arena); + deps.extend(self.rhs.dependent_on(analyzer, arena)); + deps + } + + pub fn into_expr_elem(&self) -> Elem { + Elem::Expr(RangeExpr::new( + self.lhs.into_elem(), + self.op, + self.rhs.into_elem(), + )) + } + + pub fn add_rhs(&self, op: RangeOp, rhs: AtomOrPart) -> Self { + let new_ty = OpType::new(op); + if self.ty >= new_ty { + // keep ty + Self { + ty: self.ty, + lhs: Rc::new(AtomOrPart::Atom(self.clone())), + op, + rhs: Rc::new(rhs), + } + } else { + Self { + ty: new_ty, + lhs: Rc::new(AtomOrPart::Atom(self.clone())), + op, + rhs: Rc::new(rhs), + } + } + } + + pub fn add_lhs(&self, op: RangeOp, lhs: AtomOrPart) -> Self { + let new_ty = OpType::new(op); + + if self.ty >= new_ty { + // keep ty + Self { + ty: self.ty, + lhs: Rc::new(lhs), + op, + rhs: Rc::new(AtomOrPart::Atom(self.clone())), + } + } else { + Self { + ty: new_ty, + lhs: Rc::new(lhs), + op, + rhs: Rc::new(AtomOrPart::Atom(self.clone())), + } + } + } +} + +pub static CONST_OPS: &[RangeOp] = &[RangeOp::Eq]; +pub static DL_OPS: &[RangeOp] = &[ + RangeOp::Neq, + RangeOp::Add(true), + RangeOp::Add(false), + RangeOp::Sub(true), + RangeOp::Sub(false), + RangeOp::Lt, + RangeOp::Lte, + RangeOp::Gt, + RangeOp::Gte, + RangeOp::And, + RangeOp::Or, +]; +pub static LIA_OPS: &[RangeOp] = &[ + RangeOp::Mul(true), + RangeOp::Mul(false), + RangeOp::Div(true), + RangeOp::Div(false), + RangeOp::Mod, + RangeOp::Exp, +]; + +pub trait Atomize { + fn atoms_or_part( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> AtomOrPart; + fn atomize( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Option; +} + +impl Atomize for Elem { + #[tracing::instrument(level = "trace", skip_all)] + fn atoms_or_part( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> AtomOrPart { + match self { + Elem::Arena(_) => self.dearenaize_clone(arena).atoms_or_part(analyzer, arena), + Elem::Concrete(_) | Elem::Reference(_) => AtomOrPart::Part(self.clone()), + Elem::ConcreteDyn(_) => AtomOrPart::Part(self.clone()), + _e @ Elem::Expr(expr) => { + // println!("collapsing: {e}"); + match collapse(*expr.lhs.clone(), expr.op, *expr.rhs.clone(), arena) { + MaybeCollapsed::Concretes(_l, _r) => { + let exec_res = expr.exec_op(true, analyzer, arena).unwrap(); + return exec_res.atoms_or_part(analyzer, arena); + } + MaybeCollapsed::Collapsed(elem) => { + return elem.atoms_or_part(analyzer, arena); + } + MaybeCollapsed::Not(..) => {} + } + + match ( + expr.lhs.atoms_or_part(analyzer, arena), + expr.rhs.atoms_or_part(analyzer, arena), + ) { + (ref lp @ AtomOrPart::Part(ref l), ref rp @ AtomOrPart::Part(ref r)) => { + // println!("part part"); + match (l, r) { + (_, Elem::Arena(_)) => todo!(), + (Elem::Arena(_), _) => todo!(), + (Elem::Reference(Reference { .. }), Elem::Concrete(_)) + | (Elem::Concrete(_), Elem::Reference(Reference { .. })) => { + let ty = OpType::new(expr.op); + let atom = SolverAtom { + ty, + lhs: Rc::new(lp.clone()), + op: expr.op, + rhs: Rc::new(rp.clone()), + }; + AtomOrPart::Atom(atom) + } + ( + Elem::Reference(Reference { .. }), + Elem::Reference(Reference { .. }), + ) => { + let ty = if DL_OPS.contains(&expr.op) { + OpType::DL + } else if CONST_OPS.contains(&expr.op) { + OpType::Const + } else { + OpType::Other + }; + let atom = SolverAtom { + ty, + lhs: Rc::new(lp.clone()), + op: expr.op, + rhs: Rc::new(rp.clone()), + }; + AtomOrPart::Atom(atom) + } + (Elem::Expr(_), Elem::Expr(_)) => { + todo!("here"); + } + (Elem::Expr(_), Elem::Reference(Reference { .. })) => { + todo!("here1"); + } + (Elem::Reference(Reference { .. }), Elem::Expr(_)) => { + todo!("here2"); + } + (Elem::Expr(_), Elem::Concrete(_)) => { + todo!("here3"); + } + (Elem::Concrete(_), Elem::Expr(_)) => { + todo!("here4"); + } + (Elem::Concrete(_), Elem::Concrete(_)) => { + let _ = expr.clone().arenaize(analyzer, arena); + let res = expr.exec_op(true, analyzer, arena).unwrap(); + if res == Elem::Expr(expr.clone()) { + AtomOrPart::Part(res) + } else { + res.atoms_or_part(analyzer, arena) + } + } + (Elem::ConcreteDyn(_), _) => AtomOrPart::Part(Elem::Null), + (_, Elem::ConcreteDyn(_)) => AtomOrPart::Part(Elem::Null), + (Elem::Null, _) => AtomOrPart::Part(Elem::Null), + (_, Elem::Null) => AtomOrPart::Part(Elem::Null), + } + } + (AtomOrPart::Atom(l_atom), r @ AtomOrPart::Part(_)) => { + // println!("atom part"); + + AtomOrPart::Atom(l_atom.add_rhs(expr.op, r)) + } + (l @ AtomOrPart::Part(_), AtomOrPart::Atom(r_atom)) => { + // println!("part atom"); + AtomOrPart::Atom(r_atom.add_lhs(expr.op, l)) + } + (AtomOrPart::Atom(l_atoms), AtomOrPart::Atom(r_atoms)) => { + // println!("atom atom"); + AtomOrPart::Atom(r_atoms.add_lhs(expr.op, AtomOrPart::Atom(l_atoms))) + } + } + } + Elem::Null => AtomOrPart::Part(self.clone()), + } + } + + #[tracing::instrument(level = "trace", skip_all)] + fn atomize( + &self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Option { + use Elem::*; + tracing::trace!("atomize: {}", self); + match self { + Reference(_) => None, //{ println!("was dyn"); None}, + Null => None, //{ println!("was null"); None}, + Concrete(_c) => None, //{ println!("was conc: {}", _c.val.as_human_string()); None }, + ConcreteDyn(_) => None, //{ println!("was concDyn"); None}, + Expr(_) => { + // println!("atomized: was expr"); + let AtomOrPart::Atom(mut a) = self.atoms_or_part(analyzer, arena) else { + // println!("returning none"); + return None; + }; + a.update_max_ty(); + Some(a) + } + Arena(_) => { + let (dearenized, idx) = self.dearenaize(arena); + let res = dearenized.atomize(analyzer, arena); + self.rearenaize(dearenized, idx, arena); + res + } + } + } +} diff --git a/crates/graph/src/solvers/brute.rs b/crates/graph/src/solvers/brute.rs new file mode 100644 index 00000000..a810c170 --- /dev/null +++ b/crates/graph/src/solvers/brute.rs @@ -0,0 +1,941 @@ +use crate::{ + elem::{Elem, RangeElem}, + nodes::{Concrete, ContextVarNode, VarNode}, + solvers::{ + dl::{DLSolver, SolveStatus}, + Atomize, SolverAtom, + }, + AnalyzerBackend, GraphBackend, GraphError, Range, RangeEval, SolcRange, +}; + +use shared::RangeArena; + +use ethers_core::types::U256; +use std::collections::BTreeMap; + +pub trait SolcSolver { + fn simplify(&mut self, analyzer: &impl AnalyzerBackend, arena: &mut RangeArena>); + fn solve( + &mut self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result; + fn recurse_check( + &mut self, + idx: usize, + solved_atomics: &mut Vec, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result; + fn check( + &mut self, + solved_for: usize, + lmr: (Elem, Elem, Elem), + solved_atomics: &mut Vec, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result<(bool, Option), GraphError>; +} + +pub enum AtomicSolveStatus { + Unsat, + Sat(AtomicSolveMap), + Indeterminate, +} + +pub type AtomicSolveMap = BTreeMap; + +#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)] +pub struct Atomic { + pub idxs: Vec, +} + +#[derive(Clone, Debug)] +pub struct BruteBinSearchSolver { + pub deps: Vec, + pub solves: BTreeMap>, + pub atomics: Vec, + // This is private due to wanting to ensure we construct the ranges correctly via `as_simplified_range` + ranges: BTreeMap, + atomic_ranges: BTreeMap, + pub lmrs: Vec, + pub intermediate_ranges: BTreeMap, + pub intermediate_atomic_ranges: BTreeMap, + pub sat: bool, + pub start_idx: usize, + pub successful_passes: usize, +} + +#[derive(Clone, Debug)] +pub struct LMR { + pub low: Elem, + pub mid: Elem, + pub high: Elem, +} + +impl From<(Elem, Elem, Elem)> for LMR { + fn from((low, mid, high): (Elem, Elem, Elem)) -> Self { + Self { low, mid, high } + } +} + +pub enum HintOrRanges { + Higher, + Lower, + Ranges(BTreeMap), +} + +impl BruteBinSearchSolver { + pub fn maybe_new( + deps: Vec, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, GraphError> { + let mut atomic_idxs = vec![]; + + let mut ranges = BTreeMap::default(); + let mut atomic_ranges = BTreeMap::default(); + deps.iter().try_for_each(|dep| { + let mut range = dep.range(analyzer)?.unwrap(); + if range.unsat(analyzer, arena) { + panic!( + "initial range for {} not sat", + dep.display_name(analyzer).unwrap() + ); + } + let r: SolcRange = range.flattened_range(analyzer, arena)?.into_owned().into(); + atomic_idxs.extend(r.dependent_on(analyzer, arena)); + ranges.insert(*dep, r); + Ok(()) + })?; + + // Sometimes a storage variable will be split due to a context fork. We recombine them here + atomic_idxs.sort(); + atomic_idxs.dedup(); + // atomic_idxs.iter().for_each(|dep| { + // println!( + // "atomic dep: {} - {}", + // dep.display_name(analyzer).unwrap(), + // dep.0 + // ) + // }); + // let atomics = atomic_idxs; + let mut storage_atomics: BTreeMap> = BTreeMap::default(); + let mut calldata_atomics = vec![]; + atomic_idxs.into_iter().try_for_each(|atomic| { + if atomic.is_storage(analyzer)? { + // its a storage variable, get the parent var + if atomic.is_dyn(analyzer)? { + } else { + let entry = storage_atomics + .entry(atomic.maybe_storage_var(analyzer).unwrap()) + .or_default(); + entry.push(atomic); + entry.sort(); + entry.dedup(); + } + } else { + calldata_atomics.push(atomic); + } + Ok(()) + })?; + + let mut atomics: Vec = vec![]; + storage_atomics + .into_iter() + .for_each(|(_k, same_atomics)| atomics.push(Atomic { idxs: same_atomics })); + atomics.extend( + calldata_atomics + .into_iter() + .map(|atomic| Atomic { idxs: vec![atomic] }) + .collect::>(), + ); + + atomics.iter().try_for_each(|atomic| { + let range = atomic.idxs[0].range(analyzer)?.unwrap(); + atomic_ranges.insert(atomic.clone(), range); + Ok(()) + })?; + if let Some((dep, unsat_range)) = ranges + .iter() + .find(|(_, range)| range.unsat(analyzer, arena)) + { + panic!( + "Initial ranges not sat for dep {}: {} {}", + dep.display_name(analyzer).unwrap(), + unsat_range.min, + unsat_range.max + ); + } + + if ranges.len() != deps.len() { + panic!("HERE"); + } + + let mut s = Self { + deps, + solves: Default::default(), + atomics, + intermediate_ranges: ranges.clone(), + ranges, + intermediate_atomic_ranges: atomic_ranges.clone(), + atomic_ranges, + lmrs: vec![], + sat: true, + start_idx: 0, + successful_passes: 0, + }; + + s.reset_lmrs(analyzer, arena); + Ok(Some(s)) + } + + pub fn lmr( + &self, + atomic: &Atomic, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> (Elem, Elem, Elem) { + let range = &self.atomic_ranges[atomic]; + let mut min = range.evaled_range_min(analyzer, arena).unwrap(); + min.cache_minimize(analyzer, arena).unwrap(); + // println!("min: {}", min.minimize(analyzer).unwrap().to_range_string(false, analyzer, arena).s); + let mut max = range.evaled_range_max(analyzer, arena).unwrap(); + max.cache_maximize(analyzer, arena).unwrap(); + let mut mid = (min.clone() + max.clone()) / Elem::from(Concrete::from(U256::from(2))); + mid.cache_maximize(analyzer, arena).unwrap(); + (min, mid, max) + } + + pub fn reset_lmrs( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + self.lmrs = vec![]; + (0..self.atomic_ranges.len()).for_each(|i| { + self.lmrs + .push(self.lmr(&self.atomics[i], analyzer, arena).into()); + }); + } + + pub fn reset_lmr( + &mut self, + i: usize, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) { + self.lmrs[i] = self.lmr(&self.atomics[i], analyzer, arena).into(); + } + + pub fn raise_lmr( + &mut self, + i: usize, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + // move the low to low + mid / 2 + // reset the mid + let mut curr_lmr = self.lmrs[i].clone(); + curr_lmr.low = (curr_lmr.low + curr_lmr.mid) + / Elem::from(Concrete::from(U256::from(2))) + .minimize(analyzer, arena) + .unwrap(); + curr_lmr.mid = (curr_lmr.low.clone() + curr_lmr.high.clone()) + / Elem::from(Concrete::from(U256::from(2))) + .minimize(analyzer, arena) + .unwrap(); + + let new_mid_conc = curr_lmr.mid.maximize(analyzer, arena).unwrap(); + let old_mid_conc = self.lmrs[i].mid.maximize(analyzer, arena).unwrap(); + + if matches!( + new_mid_conc.range_ord(&old_mid_conc, arena), + Some(std::cmp::Ordering::Equal) + ) { + return false; + } + self.lmrs[i] = curr_lmr; + true + } + + pub fn lower_lmr( + &mut self, + i: usize, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> bool { + // println!("lowering mid"); + // move the high to high + mid / 2 + // reset the mid + let mut curr_lmr = self.lmrs[i].clone(); + curr_lmr.high = (curr_lmr.mid.minimize(analyzer, arena).unwrap() + + curr_lmr.high.minimize(analyzer, arena).unwrap()) + / Elem::from(Concrete::from(U256::from(2))) + .minimize(analyzer, arena) + .unwrap(); + curr_lmr.mid = (curr_lmr.low.minimize(analyzer, arena).unwrap() + + curr_lmr.high.minimize(analyzer, arena).unwrap()) + / Elem::from(Concrete::from(U256::from(2))) + .minimize(analyzer, arena) + .unwrap(); + + let new_high_conc = curr_lmr.high.minimize(analyzer, arena).unwrap(); + let old_high_conc = self.lmrs[i].high.minimize(analyzer, arena).unwrap(); + + if matches!( + new_high_conc.range_ord(&old_high_conc, arena), + Some(std::cmp::Ordering::Equal) + ) { + return false; + } + self.lmrs[i] = curr_lmr; + true + } + + pub fn increase_start(&mut self) -> bool { + self.start_idx += 1; + self.start_idx < self.atomic_ranges.len() + } +} + +impl SolcSolver for BruteBinSearchSolver { + fn simplify( + &mut self, + _analyzer: &impl AnalyzerBackend, + _arena: &mut RangeArena>, + ) { + } + + fn solve( + &mut self, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result { + // pick a value for a variable. check if it satisfies all dependendies + // if is sat, try to reduce using bin search? Not sure how that will + // affect other dependencies If it doesnt, + // raise or lower + + let atoms = self + .ranges + .iter() + .filter_map(|(_dep, range)| { + // println!("dep: {}", dep.display_name(analyzer).unwrap()); + + // println!("atom: {atom:#?}"); + if let Some(atom) = range.min.atomize(analyzer, arena) { + Some(atom) + } else { + range.max.atomize(analyzer, arena) + } + }) + .collect::>(); + + let mut dl_solver = DLSolver::new(atoms, analyzer, arena); + let mut atomic_solves: BTreeMap<_, _>; + + match dl_solver.solve_partial(analyzer, arena)? { + SolveStatus::Unsat => { + return Ok(AtomicSolveStatus::Unsat); + } + SolveStatus::Sat { + const_solves, + dl_solves, + } => { + atomic_solves = const_solves + .into_iter() + .filter_map(|(dep, solve)| { + Some(( + self.atomics + .iter() + .find(|atomic| atomic.idxs.contains(&dep))? + .clone(), + solve + .maximize(analyzer, arena) + .unwrap() + .maybe_concrete()? + .val, + )) + }) + .collect(); + atomic_solves.extend( + dl_solves + .into_iter() + .filter_map(|(dep, solve)| { + Some(( + self.atomics + .iter() + .find(|atomic| atomic.idxs.contains(&dep))? + .clone(), + solve + .maximize(analyzer, arena) + .unwrap() + .maybe_concrete()? + .val, + )) + }) + .collect::>(), + ); + } + SolveStatus::Indeterminate { const_solves } => { + atomic_solves = const_solves + .into_iter() + .filter_map(|(dep, solve)| { + Some(( + self.atomics + .iter() + .find(|atomic| atomic.idxs.contains(&dep))? + .clone(), + solve + .maximize(analyzer, arena) + .unwrap() + .maybe_concrete()? + .val, + )) + }) + .collect() + } + } + // println!("solved for: {:#?}", atomic_solves); + + if atomic_solves.len() == self.atomics.len() { + return Ok(AtomicSolveStatus::Sat(atomic_solves)); + } else { + atomic_solves.iter().for_each(|(atomic, val)| { + self.intermediate_ranges.iter_mut().for_each(|(_dep, r)| { + atomic.idxs.iter().for_each(|idx| { + r.replace_dep(idx.0.into(), Elem::from(val.clone()), analyzer, arena) + }); + }); + }); + + atomic_solves.clone().into_iter().for_each(|(atomic, val)| { + self.intermediate_atomic_ranges.insert( + atomic, + SolcRange::new(val.clone().into(), val.into(), vec![]), + ); + }); + } + + let mut solved_for = atomic_solves + .keys() + .filter_map(|k| self.atomics.iter().position(|r| r == k)) + .collect(); + while self.recurse_check(self.start_idx, &mut solved_for, analyzer, arena)? {} + if self.successful_passes == self.atomics.len() { + let mapping = self + .intermediate_atomic_ranges + .iter() + .filter_map(|(name, range)| { + if !range.is_const(analyzer, arena).ok()? { + None + } else { + Some(( + name.clone(), + range + .evaled_range_min(analyzer, arena) + .unwrap() + .maybe_concrete() + .unwrap() + .val, + )) + } + }) + .collect::>(); + if mapping.len() == self.intermediate_atomic_ranges.len() { + let all_good = self.ranges.iter().all(|(_dep, range)| { + let mut new_range = range.clone(); + self.intermediate_atomic_ranges + .iter() + .for_each(|(atomic, range)| { + atomic.idxs.iter().for_each(|idx| { + new_range.replace_dep( + idx.0.into(), + range.min.clone(), + analyzer, + arena, + ); + }); + }); + new_range.cache_eval(analyzer, arena).unwrap(); + // println!("{}, original range: [{}, {}], new range: [{}, {}]", dep.display_name(analyzer).unwrap(), range.min, range.max, new_range.min_cached.clone().unwrap(), new_range.max_cached.clone().unwrap()); + new_range.sat(analyzer, arena) + }); + if all_good { + Ok(AtomicSolveStatus::Sat(mapping)) + } else { + println!("thought we solved but we didnt"); + Ok(AtomicSolveStatus::Indeterminate) + } + } else { + Ok(AtomicSolveStatus::Indeterminate) + } + } else { + Ok(AtomicSolveStatus::Indeterminate) + } + } + + fn recurse_check( + &mut self, + i: usize, + solved_atomics: &mut Vec, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result { + // println!("recurse check for: {}", self.atomics[i].idxs[0].display_name(analyzer).unwrap()); + if i >= self.lmrs.len() { + return Ok(false); + } + + if solved_atomics.contains(&i) { + self.increase_start(); + self.successful_passes += 1; + return Ok(true); + } + + let _atomic = &self.atomics[i]; + + let lmr = self.lmrs[i].clone(); + // println!("solving: {i}, {}, successful passes: {}", atomic.idxs[0].display_name(analyzer).unwrap(), self.successful_passes); + // println!("initial range: [{min_s},{max_s}], is_const: {}", atomic.idxs[0].is_const(analyzer)?); + match self.check( + i, + (lmr.low, lmr.mid, lmr.high), + solved_atomics, + analyzer, + arena, + )? { + (true, Some(HintOrRanges::Ranges(new_ranges))) => { + // sat, try solving next var with new intermediate ranges + solved_atomics.push(i); + self.intermediate_ranges = new_ranges; + self.successful_passes += 1; + self.increase_start(); + Ok(true) + } + (false, Some(HintOrRanges::Higher)) => { + self.successful_passes = 0; + *solved_atomics = vec![]; + // unsat, try raising + if self.raise_lmr(i, analyzer, arena) { + self.recurse_check(i, solved_atomics, analyzer, arena) + } else { + // we couldn't solve, try increasing global start + if self.increase_start() { + self.intermediate_ranges = self.ranges.clone(); + self.recurse_check(self.start_idx, solved_atomics, analyzer, arena) + } else { + Ok(false) + } + } + } + (false, Some(HintOrRanges::Lower)) => { + // unsat, try lowering + self.successful_passes = 0; + *solved_atomics = vec![]; + if self.lower_lmr(i, analyzer, arena) { + self.recurse_check(i, solved_atomics, analyzer, arena) + } else { + // we couldn't solve, try increasing global start + if self.increase_start() { + self.intermediate_ranges = self.ranges.clone(); + self.recurse_check(self.start_idx, solved_atomics, analyzer, arena) + } else { + Ok(false) + } + } + } + (false, None) => { + // unsat, try lowering + self.successful_passes = 0; + *solved_atomics = vec![]; + if self.lower_lmr(i, analyzer, arena) { + self.recurse_check(i, solved_atomics, analyzer, arena) + } else { + // we couldn't solve, try increasing global start + if self.increase_start() { + self.intermediate_ranges = self.ranges.clone(); + self.recurse_check(self.start_idx, solved_atomics, analyzer, arena) + } else { + Ok(false) + } + } + } + _ => unreachable!(), + } + } + + fn check( + &mut self, + solved_for_idx: usize, + (low, mid, high): (Elem, Elem, Elem), + solved_atomics: &mut Vec, + analyzer: &mut impl AnalyzerBackend, + arena: &mut RangeArena>, + ) -> Result<(bool, Option), GraphError> { + let solved_dep = &self.atomics[solved_for_idx].clone(); + + fn match_check( + this: &mut BruteBinSearchSolver, + solved_for_idx: usize, + solved_dep: &Atomic, + (low, mid, high): (Elem, Elem, Elem), + low_done: bool, + mut mid_done: bool, + mut high_done: bool, + solved_atomics: &mut Vec, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(bool, Option), GraphError> { + let res = if !low_done { + check_for_lmr( + this, + solved_for_idx, + solved_dep, + low.clone(), + solved_atomics, + analyzer, + arena, + ) + } else if !mid_done { + check_for_lmr( + this, + solved_for_idx, + solved_dep, + mid.clone(), + solved_atomics, + analyzer, + arena, + ) + } else { + check_for_lmr( + this, + solved_for_idx, + solved_dep, + high.clone(), + solved_atomics, + analyzer, + arena, + ) + }; + + match res { + Ok((true, ranges)) => Ok((true, ranges)), + Ok((false, _)) => { + if high_done { + res + } else { + high_done = mid_done; + mid_done = true; + match_check( + this, + solved_for_idx, + solved_dep, + (low, mid, high), + true, + mid_done, + high_done, + solved_atomics, + analyzer, + arena, + ) + } + } + Err(e) => Err(e), + } + } + + fn check_for_lmr( + this: &mut BruteBinSearchSolver, + solved_for_idx: usize, + solved_dep: &Atomic, + conc: Elem, + solved_atomics: &mut Vec, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result<(bool, Option), GraphError> { + // println!("checking: {}, conc: {}, {}", this.atomics[solved_for_idx].idxs[0].display_name(analyzer).unwrap(), conc.maximize(analyzer, arena)?.to_range_string(true, analyzer, arena).s, conc.minimize(analyzer)?.to_range_string(false, analyzer, arena).s); + solved_atomics.push(solved_for_idx); + let mut new_ranges = BTreeMap::default(); + this.intermediate_atomic_ranges.insert( + solved_dep.clone(), + SolcRange::new(conc.clone(), conc.clone(), vec![]), + ); + let atoms = this + .intermediate_ranges + .iter() + .filter_map(|(_, range)| { + if let Some(atom) = range + .min + .simplify_minimize(analyzer, arena) + .unwrap() + .atomize(analyzer, arena) + { + Some(atom) + } else { + range + .max + .simplify_maximize(analyzer, arena) + .unwrap() + .atomize(analyzer, arena) + } + }) + .collect::>(); + + let mut dl_solver = DLSolver::new(atoms, analyzer, arena); + let mut atomic_solves: BTreeMap<_, _>; + + match dl_solver.solve_partial(analyzer, arena)? { + SolveStatus::Unsat => { + println!("TRUE UNSAT"); + return Ok((false, None)); + } + SolveStatus::Sat { + const_solves, + dl_solves, + } => { + atomic_solves = const_solves + .into_iter() + .filter_map(|(dep, solve)| { + Some(( + this.atomics + .iter() + .find(|atomic| atomic.idxs.contains(&dep))? + .clone(), + solve + .maximize(analyzer, arena) + .unwrap() + .maybe_concrete()? + .val, + )) + }) + .collect(); + atomic_solves.extend( + dl_solves + .into_iter() + .filter_map(|(dep, solve)| { + Some(( + this.atomics + .iter() + .find(|atomic| atomic.idxs.contains(&dep))? + .clone(), + solve + .maximize(analyzer, arena) + .unwrap() + .maybe_concrete()? + .val, + )) + }) + .collect::>(), + ); + } + SolveStatus::Indeterminate { const_solves } => { + atomic_solves = const_solves + .into_iter() + .filter_map(|(dep, solve)| { + Some(( + this.atomics + .iter() + .find(|atomic| atomic.idxs.contains(&dep))? + .clone(), + solve + .maximize(analyzer, arena) + .unwrap() + .maybe_concrete()? + .val, + )) + }) + .collect() + } + } + + atomic_solves.iter().for_each(|(atomic, val)| { + this.intermediate_ranges.iter_mut().for_each(|(_dep, r)| { + atomic.idxs.iter().for_each(|idx| { + r.replace_dep(idx.0.into(), Elem::from(val.clone()), analyzer, arena) + }); + }); + }); + + atomic_solves.clone().into_iter().for_each(|(atomic, val)| { + this.intermediate_atomic_ranges.insert( + atomic, + SolcRange::new(val.clone().into(), val.into(), vec![]), + ); + }); + // println!("new solves: {atomic_solves:#?}"); + + for dep in this.deps.iter() { + let range = this + .intermediate_ranges + .get(dep) + .expect("No range for dep?"); + // if dep.display_name(analyzer).unwrap() == "(p2 < (61 * p3)) == true" { + // println!("range: {:#?}\n{:#?}", range.min, range.max); + // println!("simplified range: {:#?}\n{:#?}", range.min.simplify_minimize(&mut vec![], analyzer), range.max.simplify_maximize(&mut vec![], analyzer)); + // } + // println!("atomizing dep: {}", dep.display_name(analyzer).unwrap()); + // println!("min atomized: {:#?}, max atomized: {:#?}", range.min.simplify_minimize(&mut vec![], analyzer)?.atomize(), range.max.simplify_maximize(&mut vec![], analyzer)?.atomize()); + if solved_dep.idxs.contains(dep) { + // println!("FOR SOLVED DEP"); + continue; + } + // check that the concrete value doesn't break any + let mut new_range = range.clone(); + + // check if const now + // if let Some((Some(idx), const_ineq)) = new_range.min.maybe_const_inequality() { + // println!("min const ineq: {} for {}", const_ineq.maybe_concrete().unwrap().val.as_human_string(), ContextVarNode::from(idx).display_name(analyzer).unwrap()); + + // if let Some(position) = this.atomics.iter().position(|atomic| atomic.idxs.contains(&ContextVarNode::from(idx))) { + // // check and return) + // if !solved_atomics.contains(&position) { + // println!("inner min const ineq"); + // return check_for_lmr(this, position, &this.atomics[position].clone(), const_ineq, solved_atomics, analyzer); + // } + // } + + // } + // if let Some((Some(idx), const_ineq)) = new_range.max.maybe_const_inequality() { + // println!("max const ineq: {} for {} ({}), {:#?}", const_ineq.maybe_concrete().unwrap().val.as_human_string(), ContextVarNode::from(idx).display_name(analyzer).unwrap(), idx.index(), this.atomics); + // if let Some(position) = this.atomics.iter().position(|atomic| atomic.idxs.contains(&ContextVarNode::from(idx))) { + // // check and return + // if !solved_atomics.contains(&position) { + // println!("inner max const ineq"); + // return check_for_lmr(this, position, &this.atomics[position].clone(), const_ineq, solved_atomics, analyzer); + // } + // } + // } + + // check if the new range is dependent on the solved variable + let is_dependent_on_solved = new_range + .dependent_on(analyzer, arena) + .iter() + .any(|dep| solved_dep.idxs.contains(dep)); + + // dont run sat check on non-dependent range + if !is_dependent_on_solved { + new_ranges.insert(*dep, new_range); + continue; + } + + // println!("new range for {} dependent_on: {:?}, replacing {:?}, is dependent on solved: {is_dependent_on_solved}", dep.display_name(analyzer).unwrap(), new_range.dependent_on(), solved_dep.idxs); + // println!("dep {}:\n\tinitial range: [{}, {}],\n\tcurr range: [{}, {}]", + // dep.display_name(analyzer).unwrap(), + // dep.evaled_range_min(analyzer, arena)?.unwrap().to_range_string(false, analyzer, arena).s, + // dep.evaled_range_max(analyzer, arena)?.unwrap().to_range_string(true, analyzer, arena).s, + // new_range.evaled_range_min(analyzer, arena)?.to_range_string(false, analyzer, arena).s, + // new_range.evaled_range_max(analyzer, arena)?.to_range_string(true, analyzer, arena).s, + // // new_range.range_min() + // ); + + // println!("dep {} range: {:#?} {:#?}", dep.display_name(analyzer).unwrap(), new_range.min, new_range.max); + if new_range.unsat(analyzer, arena) { + return Ok((false, None)); + // panic!("initial range unsat???") + } + + this.atomics[solved_for_idx] + .idxs + .iter() + .for_each(|atomic_alias| { + new_range.replace_dep(atomic_alias.0.into(), conc.clone(), analyzer, arena); + }); + new_range.cache_eval(analyzer, arena)?; + + // println!("new range: [{}, {}], [{}, {}]", + // new_range.evaled_range_min(analyzer, arena)?.to_range_string(false, analyzer, arena).s, + // new_range.evaled_range_max(analyzer, arena)?.to_range_string(true, analyzer, arena).s, + // new_range.min.to_range_string(false, analyzer, arena).s, + // new_range.max.to_range_string(true, analyzer, arena).s, + // ); + if new_range.unsat(analyzer, arena) { + // figure out *where* we need to increase or decrease + // work on the unreplace range for now + let min_is_dependent = !range.min.dependent_on(analyzer, arena).is_empty(); + let max_is_dependent = !range.max.dependent_on(analyzer, arena).is_empty(); + + match (min_is_dependent, max_is_dependent) { + (true, true) => { + // both sides dependent + // println!("both"); + } + (false, true) => { + // just max is dependent + // println!("just max"); + } + (true, false) => { + // just min is dependent + // println!("just min"); + } + (false, false) => { + // panic!("this shouldnt happen"); + } + } + + // println!("new unsat range: [{}, {}]", + // new_range.evaled_range_min(analyzer, arena)?.to_range_string(false, analyzer, arena).s, + // new_range.evaled_range_max(analyzer, arena)?.to_range_string(true, analyzer, arena).s, + // ); + // compare new range to prev range to see if they moved down or up + + // panic!("here"); + let min_change = new_range + .evaled_range_min(analyzer, arena)? + .range_ord(&range.evaled_range_min(analyzer, arena)?, arena); + let max_change = new_range + .evaled_range_max(analyzer, arena)? + .range_ord(&range.evaled_range_max(analyzer, arena)?, arena); + match (min_change, max_change) { + (Some(std::cmp::Ordering::Less), Some(std::cmp::Ordering::Greater)) => { + // panic!("initial range must have been unsat to start"); + } + (Some(std::cmp::Ordering::Greater), Some(std::cmp::Ordering::Less)) => { + // we shrank our range, dont give a hint + // println!("None, dep isnt sat: {}, dep initial range: {}", dep.display_name(analyzer).unwrap(), dep.range_string(analyzer).unwrap().unwrap()); + return Ok((false, None)); + } + (Some(std::cmp::Ordering::Greater), _) => { + // both grew, try lowering + // println!("Lower, dep isnt sat: {}, dep initial range: {}", dep.display_name(analyzer).unwrap(), dep.range_string(analyzer).unwrap().unwrap()); + return Ok((false, Some(HintOrRanges::Lower))); + } + + (Some(std::cmp::Ordering::Less), _) => { + // both grew, try lowering + // println!("Higher, dep isnt sat: {}, dep initial range: {}", dep.display_name(analyzer).unwrap(), dep.range_string(analyzer).unwrap().unwrap()); + return Ok((false, Some(HintOrRanges::Higher))); + } + // (Some(std::cmp::Ordering::Equal), _) => { + // panic!("here"); + // } + // (_, Some(std::cmp::Ordering::Equal)) => { + // panic!("here"); + // } + _ => { + // println!("None empty, dep isnt sat: {}, dep initial range: {}", dep.display_name(analyzer).unwrap(), dep.range_string(analyzer).unwrap().unwrap()); + return Ok((false, None)); + } + } + } else { + new_ranges.insert(*dep, new_range); + } + } + Ok((true, Some(HintOrRanges::Ranges(new_ranges)))) + } + + match_check( + self, + solved_for_idx, + solved_dep, + (low, mid, high), + false, + false, + false, + solved_atomics, + analyzer, + arena, + ) + } +} diff --git a/crates/graph/src/solvers/dl.rs b/crates/graph/src/solvers/dl.rs new file mode 100644 index 00000000..68782961 --- /dev/null +++ b/crates/graph/src/solvers/dl.rs @@ -0,0 +1,1215 @@ +use crate::{ + nodes::{Concrete, ContextVarNode}, + range::elem::*, + range::range_string::ToRangeString, + solvers::{AtomOrPart, Atomize, OpType, SolverAtom}, + GraphBackend, GraphError, +}; + +use shared::RangeArena; + +use ethers_core::types::{I256, U256}; +use itertools::Itertools; +use petgraph::{ + graph::NodeIndex, + stable_graph::StableGraph, + visit::{EdgeRef, IntoNodeIdentifiers, NodeIndexable, VisitMap, Visitable}, + Directed, +}; + +use std::{collections::BTreeMap, rc::Rc}; + +pub type DLGraph = StableGraph; + +#[derive(Debug, Clone, Default)] +pub struct DLSolver { + constraints: Vec, + normalized_constraints: BTreeMap>>, + root_node: NodeIndex, + pub const_solves: SolveMap, + pub cached_dl_solves: Option, + pub graph: DLGraph, + pub graph_map: BTreeMap>, + pub var_to_atom_idx: BTreeMap>, +} + +impl PartialEq for DLSolver { + fn eq(&self, other: &Self) -> bool { + self.constraints() == other.constraints() + && self.graph_map == other.graph_map + && self.var_to_atom_idx == other.var_to_atom_idx + } +} + +impl Eq for DLSolver {} + +#[derive(Debug)] +pub enum SolveStatus { + Unsat, + Sat { + const_solves: SolveMap, + dl_solves: SolveMap, + }, + Indeterminate { + const_solves: SolveMap, + }, +} + +pub type SolveMap = BTreeMap>; + +#[derive(Debug)] +pub struct DLSolveResult { + pub status: SolveStatus, + pub added_atoms: Vec, + pub added_deps: Vec, +} + +impl DLSolver { + pub fn new( + mut constraints: Vec, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Self { + constraints.iter_mut().for_each(|c| { + c.update_max_ty(); + }); + let mut graph: DLGraph = Default::default(); + let root_node = graph.add_node(AtomOrPart::Part(Elem::Null)); + let mut s = Self { + constraints, + graph, + root_node, + ..Default::default() + }; + s.add_constraints(vec![], analyzer, arena); + s + } + + pub fn constraints(&self) -> &[SolverAtom] { + &self.constraints + } + + pub fn add_constraint( + &mut self, + mut constraint: SolverAtom, + normalized_forms: Vec>, + ) { + // println!("adding constraint"); + if !self.constraints.contains(&constraint) { + // println!("didnt contain"); + constraint.update_max_ty(); + self.constraints.push(constraint.clone()); + self.normalized_constraints + .insert(constraint.clone(), normalized_forms); + self.cached_dl_solves = None; + } + } + + pub fn add_constraints( + &mut self, + constraints: Vec, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> BTreeMap>> { + // println!("adding constriants: {constraints:#?}"); + let constraints: Vec<_> = constraints + .into_iter() + .flat_map(|mut constraint| { + if let AtomOrPart::Part(c) = &*constraint.lhs { + if let Some(mut c) = c.maybe_concrete() { + c.val = c.val.max_size(); + constraint.lhs = AtomOrPart::Part(Elem::Concrete(c)).into() + } + } + + if let AtomOrPart::Part(c) = &*constraint.rhs { + if let Some(mut c) = c.maybe_concrete() { + c.val = c.val.max_size(); + constraint.rhs = AtomOrPart::Part(Elem::Concrete(c)).into() + } + } + + if constraint.op == RangeOp::And { + vec![ + SolverAtom { + ty: OpType::Const, + lhs: constraint.lhs, + op: RangeOp::Eq, + rhs: Rc::new(AtomOrPart::Part(Elem::from(Concrete::from(true)))), + }, + SolverAtom { + ty: OpType::Const, + lhs: constraint.rhs, + op: RangeOp::Eq, + rhs: Rc::new(AtomOrPart::Part(Elem::from(Concrete::from(true)))), + }, + ] + } else { + vec![constraint] + } + }) + .collect(); + let mut dep_to_solve_ty: BTreeMap> = BTreeMap::default(); + self.constraints.iter().for_each(|constraint| { + let deps = constraint.dependent_on(analyzer, arena); + deps.into_iter().for_each(|dep| { + if let Some(entry) = dep_to_solve_ty.get_mut(&dep) { + if constraint.ty == OpType::Const { + *entry = vec![constraint.clone()]; + } else if entry[0].ty != OpType::Const { + // Constant requirements always take precedent + entry.push(constraint.clone()); + } + } else { + dep_to_solve_ty.insert(dep, vec![constraint.clone()]); + } + }); + }); + + // println!("dep_to_solve_ty: {dep_to_solve_ty:#?}"); + + let constraints: Vec<_> = constraints + .iter() + .filter(|c| !self.constraints.contains(c)) + .collect(); + + // println!("unique constraints: {constraints:#?}"); + constraints.iter().for_each(|constraint| { + let deps = constraint.dependent_on(analyzer, arena); + deps.into_iter().for_each(|dep| { + if let Some(entry) = dep_to_solve_ty.get_mut(&dep) { + if constraint.ty == OpType::Const { + *entry = vec![(*constraint).clone()]; + } else if entry[0].ty != OpType::Const { + // Constant requirements always take precedent + entry.push((*constraint).clone()); + } + } else { + dep_to_solve_ty.insert(dep, vec![(*constraint).clone()]); + } + }); + }); + + // println!("dep_to_solve_ty2: {dep_to_solve_ty:#?}"); + + // filter out self equality + let non_self_equality: Vec<_> = dep_to_solve_ty + .iter() + .filter_map(|(dep, atoms)| { + if atoms.len() == 1 && atoms[0].op == RangeOp::Eq && atoms[0].lhs == atoms[0].rhs { + return None; + } + Some((*dep, atoms)) + }) + .collect(); + + // println!("non_self_equality: {non_self_equality:#?}"); + // solve constant deps + let const_solves = non_self_equality + .iter() + .filter_map(|(dep, atoms)| { + if atoms.len() == 1 && atoms[0].ty == OpType::Const { + if atoms[0].rhs.is_part() { + return Some((*dep, atoms[0].rhs.into_elem())); + } else { + return Some((*dep, atoms[0].lhs.into_elem())); + } + } + None + }) + .collect::>(); + // println!("const_solves: {const_solves:#?}"); + self.const_solves = const_solves.clone(); + + // widdle down constraints based on if we constant solved them + let still_unknown_constraints: Vec<_> = constraints + .into_iter() + .filter(|constraint| { + let deps = constraint.dependent_on(analyzer, arena); + !deps.iter().all(|dep| const_solves.contains_key(dep)) + }) + .cloned() + .collect(); + + // println!("still_unknown_constraints: {still_unknown_constraints:#?}"); + + if still_unknown_constraints.is_empty() { + return Default::default(); + } + + let res = still_unknown_constraints + .into_iter() + .filter(|constraint| { + let deps = constraint.dependent_on(analyzer, arena); + deps.iter().all(|dep| { + dep_to_solve_ty + .get(dep) + .unwrap() + .iter() + .all(|constraint| constraint.ty == OpType::DL) + }) + }) + .collect::>() + .iter() + .map(|constraint| { + let t = Self::dl_atom_normalize(constraint.clone().clone(), analyzer, arena); + t.map(|t| (constraint.clone(), t)) + }) + .collect::>>>>(); + // println!("normalized map: {res:#?}"); + match res { + Some(t) => t, + None => Default::default(), + } + } + + pub fn dl_solvable_constraints(&self) -> Vec>> { + self.normalized_constraints.values().cloned().collect() + } + + pub fn solve_partial( + &mut self, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + // println!("constraints {:#?}", self.constraints); + let mut dep_to_solve_ty: BTreeMap> = BTreeMap::default(); + self.constraints.iter().for_each(|constraint| { + let deps = constraint.dependent_on(analyzer, arena); + deps.into_iter().for_each(|dep| { + if let Some(entry) = dep_to_solve_ty.get_mut(&dep) { + if constraint.ty == OpType::Const { + *entry = vec![constraint.clone()]; + } else if entry[0].ty != OpType::Const { + // Constant requirements always take precedent + entry.push(constraint.clone()); + } + } else { + dep_to_solve_ty.insert(dep, vec![constraint.clone()]); + } + }); + }); + + // println!("dep to solve: {dep_to_solve_ty:#?}"); + + if let Some(_self_inequality) = dep_to_solve_ty.iter().find(|(_dep, atoms)| { + atoms.iter().any(|atom| { + atom.op == RangeOp::Neq + && atom.lhs == atom.rhs + && !atom.lhs.dependent_on(analyzer, arena).is_empty() + }) + }) { + return Ok(SolveStatus::Unsat); + } + + // filter out self equality + let non_self_equality: Vec<_> = dep_to_solve_ty + .iter() + .filter_map(|(dep, atoms)| { + if atoms.len() == 1 && atoms[0].op == RangeOp::Eq && atoms[0].lhs == atoms[0].rhs { + return None; + } + Some((*dep, atoms)) + }) + .collect(); + + // println!("non_self_equality: {non_self_equality:#?}"); + // solve constant deps + let const_solves = non_self_equality + .iter() + .filter_map(|(dep, atoms)| { + if atoms.len() == 1 && atoms[0].ty == OpType::Const { + if atoms[0].rhs.is_part() { + return Some((*dep, atoms[0].rhs.into_elem())); + } else { + return Some((*dep, atoms[0].lhs.into_elem())); + } + } + None + }) + .collect::>(); + + // println!("const solves: {const_solves:#?}"); + + // widdle down constraints based on if we constant solved them + let still_unknown_constraints: Vec<_> = self + .constraints + .clone() + .into_iter() + .filter(|constraint| { + let deps = constraint.dependent_on(analyzer, arena); + !deps.iter().all(|dep| const_solves.contains_key(dep)) + }) + .collect(); + + // println!("still unknown: {still_unknown_constraints:#?}"); + + if still_unknown_constraints.is_empty() { + // TODO: Check that the constraints still hold + return Ok(SolveStatus::Sat { + const_solves, + dl_solves: Default::default(), + }); + } + + let dl_solvable = self.dl_solvable_constraints(); + // println!("dl solvable: {dl_solvable:#?}"); + // constraints -> variable -> paths + + // [ + // var1: [ + // constraint1: [path1, path2], + // constraint2: [path1], + // ] + // ] + let basic: Vec = dl_solvable + .iter() + .filter_map(|var| { + let res: Vec<_> = var + .iter() + .filter_map(|constraint| { + if constraint.len() == 1 { + Some(constraint.clone()) + } else { + None + } + }) + .collect(); + if res.len() == 1 { + Some(res) + } else { + None + } + }) + .flatten() + .flatten() + .collect(); + + // check if basics are unsat, if so the extra constraints wont help that + // so its truly unsat + // println!("basic: {basic:#?}"); + let basic_solve = self.dl_solve(basic.clone(), analyzer, arena)?; + if matches!(basic_solve.status, SolveStatus::Unsat) { + return Ok(SolveStatus::Unsat); + } + + // println!("basic solve: {basic_solve:?}"); + + let multi: Vec<_> = dl_solvable + .iter() + .filter_map(|var| { + let res: Vec<_> = var + .iter() + .filter_map(|constraint| { + if constraint.len() > 1 { + Some(constraint.clone()) + } else { + None + } + }) + .collect(); + if res.len() > 1 { + Some(res) + } else { + None + } + }) + .collect(); + + // println!("multi: {multi:?}"); + + if multi.is_empty() { + // we had no branches, just use the basic solve + return match basic_solve.status { + SolveStatus::Unsat => Ok(SolveStatus::Unsat), + SolveStatus::Sat { dl_solves, .. } => Ok(SolveStatus::Sat { + const_solves, + dl_solves, + }), + SolveStatus::Indeterminate { .. } => { + Ok(SolveStatus::Indeterminate { const_solves }) + } + }; + } else if !basic.is_empty() { + // println!("was multi"); + let mut cnt = 0; + let mut unsat = 0; + for permutation in multi.iter().multi_cartesian_product() { + cnt += 1; + // flatten out the permutation + let mut flattened: Vec = permutation + .into_iter() + .flat_map(|constraint| constraint.clone()) + .collect(); + // add the constant paths + flattened.extend(basic.clone()); + let solve = self.dl_solve(flattened, analyzer, arena)?; + // remove the added constraints, keeping the basic graph in tact + self.remove_added(&solve); + // now that we checked that + match solve.status { + SolveStatus::Sat { dl_solves, .. } => { + return Ok(SolveStatus::Sat { + const_solves, + dl_solves, + }); + } + SolveStatus::Unsat => { + unsat += 1; + continue; + } + SolveStatus::Indeterminate { .. } => continue, + } + } + + if cnt == unsat { + return Ok(SolveStatus::Unsat); + } + } + + Ok(SolveStatus::Indeterminate { const_solves }) + } + + pub fn remove_added(&mut self, result: &DLSolveResult) { + result.added_atoms.iter().for_each(|c| { + let idx = self.graph_map.remove(c).unwrap(); + self.graph.remove_node(idx); + }); + result.added_deps.iter().for_each(|dep| { + self.var_to_atom_idx.remove(dep); + }); + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn dl_solve( + &mut self, + normalized_constraints: Vec, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + if self.graph.node_count() == 0 { + let root_node = self.graph.add_node(AtomOrPart::Part(Elem::Null)); + self.root_node = root_node; + } + + // println!("constraints:"); + // normalized_constraints.iter().for_each(|c| { + // println!("{}", c.into_expr_elem()); + // }); + let mut added_atoms = vec![]; + let mut added_deps = vec![]; + if normalized_constraints.is_empty() { + return Ok(DLSolveResult { + status: SolveStatus::Indeterminate { + const_solves: Default::default(), + }, + added_atoms, + added_deps, + }); + } + let zero_part = AtomOrPart::Part(Elem::from(Concrete::from(U256::zero()))); + let mut indeterminate = false; + normalized_constraints.iter().for_each(|constraint| { + let a = if let Some(idx) = self.graph_map.get(&constraint.lhs) { + *idx + } else { + let idx = self.graph.add_node((*constraint.lhs).clone()); + self.graph_map.insert((*constraint.lhs).clone(), idx); + added_atoms.push((*constraint.lhs).clone()); + idx + }; + + let rhs_atom = constraint.rhs.expect_atom(); + let rhs_lhs_deps = rhs_atom.lhs.dependent_on(analyzer, arena); + let rhs_rhs_deps = rhs_atom.rhs.dependent_on(analyzer, arena); + let ((dyn_elem, dep), const_elem) = + match (!rhs_lhs_deps.is_empty(), !rhs_rhs_deps.is_empty()) { + (true, true) => { + // panic!("here: {} {} {}", constraint.lhs.into_elem(), constraint.op.to_string(), rhs_atom.into_expr_elem()); + indeterminate = true; + return; + } + (true, false) => { + if matches!(rhs_atom.op, RangeOp::Sub(_)) { + let const_elem = (rhs_atom.rhs.into_elem() + * Elem::from(Concrete::from(I256::from(-1)))) + .maximize(analyzer, arena) + .unwrap(); + ( + (rhs_atom.lhs, Some(rhs_lhs_deps[0])), + Rc::new(AtomOrPart::Part(const_elem)), + ) + } else { + ((rhs_atom.lhs, Some(rhs_lhs_deps[0])), rhs_atom.rhs) + } + } + (false, true) => { + if matches!(rhs_atom.op, RangeOp::Sub(_)) { + let const_elem = (rhs_atom.lhs.into_elem() + * Elem::from(Concrete::from(I256::from(-1)))) + .maximize(analyzer, arena) + .unwrap(); + ( + (rhs_atom.rhs, Some(rhs_rhs_deps[0])), + Rc::new(AtomOrPart::Part(const_elem)), + ) + } else { + ((rhs_atom.rhs, Some(rhs_rhs_deps[0])), rhs_atom.lhs) + } + } + (false, false) => { + if *rhs_atom.rhs == zero_part { + ((rhs_atom.rhs, None), rhs_atom.lhs) + } else { + ((rhs_atom.lhs, None), rhs_atom.rhs) + } + } + }; + + let b = if let Some(idx) = self.graph_map.get(&dyn_elem) { + *idx + } else { + let idx = self.graph.add_node((*dyn_elem).clone()); + added_atoms.push((*dyn_elem).clone()); + self.graph_map.insert((*dyn_elem).clone(), idx); + if let Some(dep) = dep { + if self.var_to_atom_idx.get(&dep).is_none() { + added_deps.push(dep); + self.var_to_atom_idx.insert(dep, idx); + } + } + idx + }; + + self.graph.add_edge(a, b, (*const_elem).clone()); + }); + + let root_node = self.root_node; + added_atoms.iter().for_each(|c| { + let idx = self.graph_map.get(c).unwrap(); + self.graph.add_edge( + root_node, + *idx, + AtomOrPart::Part(Elem::from(Concrete::from(U256::zero()))), + ); + }); + + if find_negative_cycle(&self.graph, root_node, analyzer, arena).is_some() { + return Ok(DLSolveResult { + status: SolveStatus::Unsat, + added_atoms, + added_deps, + }); + } + + let (mut dists, _) = bellman_ford_initialize_relax(&self.graph, root_node, analyzer, arena); + + dists = dists + .into_iter() + .map(|dist| { + (dist * Elem::from(Concrete::from(I256::from(-1)))) + .maximize(analyzer, arena) + .unwrap() + }) + .collect(); + + let res = self + .var_to_atom_idx + .iter() + .map(|(dep, idx)| (*dep, dists[idx.index()].clone())) + .collect(); + + if indeterminate { + return Ok(DLSolveResult { + status: SolveStatus::Indeterminate { + const_solves: Default::default(), + }, + added_atoms, + added_deps, + }); + } + + Ok(DLSolveResult { + status: SolveStatus::Sat { + const_solves: Default::default(), + dl_solves: res, + }, + added_atoms, + added_deps, + }) + } + + /// Normalizes a DL atom into x <= y - k, where x and y are variables and k is a constant. + /// Needed for running negative cycle check. Additionally, if we have an `OR`, we + #[tracing::instrument(level = "trace", skip_all)] + pub fn dl_atom_normalize( + constraint: SolverAtom, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, + ) -> Option>> { + constraint.assert_nonnull(); + tracing::trace!( + "constraint: {}, {constraint:#?}", + constraint.to_range_string(false, analyzer, arena).s + ); + let zero_part = AtomOrPart::Part(Elem::from(Concrete::from(U256::zero()))); + let false_part = AtomOrPart::Part(Elem::from(Concrete::from(false))); + let true_part = AtomOrPart::Part(Elem::from(Concrete::from(true))); + + match ( + *constraint.lhs == true_part || *constraint.lhs == false_part, + *constraint.rhs == true_part || *constraint.rhs == false_part, + ) { + (true, true) => { + if constraint.lhs == constraint.rhs { + // true == true || false == false, just disregard this atom + return Some(vec![vec![]]); + } else { + panic!("During normalization of a DL atom, got true == false"); + } + } + (true, false) => { + // lhs is just a boolean, drop it + return Self::dl_atom_normalize(constraint.rhs.as_solver_atom(), analyzer, arena); + } + (false, true) => { + // rhs is just a boolean, drop it + return Self::dl_atom_normalize(constraint.lhs.as_solver_atom(), analyzer, arena); + } + _ => {} + } + + // x <==> y + // x + x + y => AtomOrPart::Atom(Atom { lhs x, op: +, rhs: AtomOrPart::Atom(Atom { lhs: x, op: +, rhs: y})}) + let lhs_symbs = constraint.lhs.dependent_on(analyzer, arena); + let rhs_symbs = constraint.rhs.dependent_on(analyzer, arena); + let constraint = match (!lhs_symbs.is_empty(), !rhs_symbs.is_empty()) { + (true, true) => { + // TODO: in theory could have x x + y + // which should simplify to 0 y + constraint + } + (true, false) => { + // check for two vars on lhs + if lhs_symbs.len() > 1 { + // two or more + let lhs = constraint.lhs.expect_atom(); + match lhs.op { + RangeOp::Sub(_) => { + // x - y z ==> x z + y + SolverAtom { + ty: OpType::DL, + lhs: lhs.lhs, + op: constraint.op, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs, + op: RangeOp::Add(true), + rhs: lhs.rhs, + })), + } + } + RangeOp::Add(_) => { + // x + y z ==> x z - y + SolverAtom { + ty: OpType::DL, + lhs: lhs.lhs, + op: constraint.op, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs, + op: RangeOp::Sub(true), + rhs: lhs.rhs, + })), + } + } + _ => constraint, + } + } else { + // good + constraint + } + } + (false, true) => { + // check for two vars on lhs + if rhs_symbs.len() > 1 { + // two or more + let rhs = constraint.rhs.expect_atom(); + match rhs.op { + RangeOp::Sub(_) => { + // z x - y ==> z + y x + SolverAtom { + ty: OpType::DL, + lhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.lhs, + op: RangeOp::Add(true), + rhs: rhs.rhs, + })), + op: constraint.op, + rhs: rhs.lhs, + } + } + RangeOp::Add(_) => { + // z x + y ==> z - y x + SolverAtom { + ty: OpType::DL, + lhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.lhs, + op: RangeOp::Sub(true), + rhs: rhs.rhs, + })), + op: constraint.op, + rhs: rhs.lhs, + } + } + _ => constraint, + } + } else { + // good + constraint + } + } + _ => constraint, + }; + + // println!("normalizing: {}", constraint.into_expr_elem()); + let res = match constraint.op { + RangeOp::Eq => { + // convert `x == y` into `x <= y - 0 || y <= x - 0` + let mut res = Self::dl_atom_normalize( + SolverAtom { + ty: OpType::DL, + lhs: constraint.lhs.clone(), + op: RangeOp::Lte, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs.clone(), + op: RangeOp::Sub(true), + rhs: Rc::new(zero_part.clone()), + })), + }, + analyzer, + arena, + )?; + + assert!(res.len() == 1); + res[0].extend( + Self::dl_atom_normalize( + SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs, + op: RangeOp::Lte, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.lhs, + op: RangeOp::Sub(true), + rhs: Rc::new(zero_part.clone()), + })), + }, + analyzer, + arena, + )? + .remove(0), + ); + Some(res) + } + RangeOp::Neq => { + // convert `x != y` into `x <= y - 1 || y <= x - 1` + let mut res = Self::dl_atom_normalize( + SolverAtom { + ty: OpType::DL, + lhs: constraint.lhs.clone(), + op: RangeOp::Lte, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs.clone(), + op: RangeOp::Sub(true), + rhs: Rc::new(AtomOrPart::Part(Elem::from(Concrete::from(U256::from( + 1, + ))))), + })), + }, + analyzer, + arena, + )?; + + assert!(res.len() == 1); + + res[0].extend( + Self::dl_atom_normalize( + SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs, + op: RangeOp::Lte, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.lhs, + op: RangeOp::Sub(true), + rhs: Rc::new(AtomOrPart::Part(Elem::from(Concrete::from( + U256::from(1), + )))), + })), + }, + analyzer, + arena, + )? + .remove(0), + ); + Some(res) + } + RangeOp::Lt => { + // x < y + // x <= y - 1 + let new_rhs = constraint + .rhs + .into_elem() + .wrapping_sub(Elem::from(Concrete::from(U256::one()))) + .atoms_or_part(analyzer, arena); + Self::dl_atom_normalize( + SolverAtom { + ty: OpType::DL, + lhs: constraint.lhs, + op: RangeOp::Lte, + rhs: Rc::new(new_rhs), + }, + analyzer, + arena, + ) + } + RangeOp::Gte => Self::dl_atom_normalize( + SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs, + op: RangeOp::Lte, + rhs: constraint.lhs, + }, + analyzer, + arena, + ), + RangeOp::Gt => Self::dl_atom_normalize( + SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs, + op: RangeOp::Lt, + rhs: constraint.lhs, + }, + analyzer, + arena, + ), + RangeOp::Or => { + let mut res = + Self::dl_atom_normalize(constraint.lhs.as_solver_atom(), analyzer, arena)?; + res.extend(Self::dl_atom_normalize( + constraint.rhs.as_solver_atom(), + analyzer, + arena, + )?); + Some(res) + } + RangeOp::Lte => { + if constraint.lhs.is_atom() { + // some form of (x + k <= y) + let lhs_atom = constraint.lhs.expect_atom(); + let atom_lhs_is_symb = !lhs_atom.lhs.dependent_on(analyzer, arena).is_empty(); + let atom_rhs_is_symb = !lhs_atom.rhs.dependent_on(analyzer, arena).is_empty(); + + match lhs_atom.op { + RangeOp::Sub(_) => { + match (atom_lhs_is_symb, atom_rhs_is_symb) { + (false, _) => { + // (k - x <= y) + // ==> (-k + x >= y) + // ==> (y <= x - k) + Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: constraint.rhs, + op: constraint.op, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.rhs, + op: RangeOp::Sub(true), + rhs: lhs_atom.lhs, + })), + }, + analyzer, + arena, + ) + } + _ => { + // (x - k <= y) + // ==> (x <= y + k) + Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.lhs, + op: constraint.op, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: constraint.ty, + lhs: constraint.rhs, + op: RangeOp::Add(true), + rhs: lhs_atom.rhs, + })), + }, + analyzer, + arena, + ) + } + } + } + RangeOp::Add(_) => { + if lhs_atom.lhs == zero_part.clone().into() { + Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.rhs, + op: constraint.op, + rhs: constraint.rhs, + }, + analyzer, + arena, + ) + } else if lhs_atom.rhs == zero_part.into() { + Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.lhs, + op: constraint.op, + rhs: constraint.rhs, + }, + analyzer, + arena, + ) + } else if lhs_atom.lhs.dependent_on(analyzer, arena).is_empty() { + // (k + x <= y) + // ==> (x <= y - k) + Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.rhs, + op: constraint.op, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: constraint.ty, + lhs: constraint.rhs, + op: RangeOp::Sub(true), + rhs: lhs_atom.lhs, + })), + }, + analyzer, + arena, + ) + } else { + // (x + k <= y) + // ==> (x <= y - k) + Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.lhs, + op: constraint.op, + rhs: Rc::new(AtomOrPart::Atom(SolverAtom { + ty: constraint.ty, + lhs: constraint.rhs, + op: RangeOp::Sub(true), + rhs: lhs_atom.rhs, + })), + }, + analyzer, + arena, + ) + } + } + RangeOp::And => { + let mut res = Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.lhs, + op: constraint.op, + rhs: constraint.rhs.clone(), + }, + analyzer, + arena, + )?; + + let mut rhs = Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: lhs_atom.rhs, + op: constraint.op, + rhs: constraint.rhs.clone(), + }, + analyzer, + arena, + )?; + let res = match (res.len() > 1, rhs.len() > 1) { + (true, true) => { + res.extend(rhs); + res + } + (true, false) => { + res.iter_mut().for_each(|path| path.extend(rhs[0].clone())); + res + } + (false, true) => { + rhs.iter_mut().for_each(|path| path.extend(res[0].clone())); + rhs + } + (false, false) => { + res[0].extend(rhs.remove(0)); + res + } + }; + Some(res) + } + other => panic!("other op: {}, {constraint:#?}", other.to_string()), + } + } else if constraint.rhs.is_part() { + let new_rhs = AtomOrPart::Atom(SolverAtom { + ty: OpType::DL, + lhs: constraint.rhs, + op: RangeOp::Sub(true), + rhs: Rc::new(AtomOrPart::Part(Elem::from(Concrete::from(U256::zero())))), + }); + + Self::dl_atom_normalize( + SolverAtom { + ty: constraint.ty, + lhs: constraint.lhs, + op: constraint.op, + rhs: Rc::new(new_rhs), + }, + analyzer, + arena, + ) + } else { + Some(vec![vec![constraint]]) + } + } + other => { + tracing::trace!( + "unable to dl normalize -- other: {}, {}", + other.to_string(), + constraint.into_expr_elem() + ); + None + } + }; + res + } +} + +pub fn find_negative_cycle( + g: &DLGraph, + source: NodeIndex, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, +) -> Option>> { + let ix = |i| g.to_index(i); + let mut path = Vec::>::new(); + + // Step 1: initialize and relax + let (distance, predecessor) = bellman_ford_initialize_relax(g, source, analyzer, arena); + + // Step 2: Check for negative weight cycle + 'outer: for i in g.node_identifiers() { + for edge in g.edges(i) { + let j = edge.target(); + let w = edge.weight(); + let dist = (distance[ix(i)].clone() + w.into_elem()) + .maximize(analyzer, arena) + .unwrap(); + let lt = matches!( + dist.range_ord(&distance[ix(j)], arena), + Some(std::cmp::Ordering::Less) + ); + if lt { + // Step 3: negative cycle found + let start = j; + let mut node = start; + let mut visited = g.visit_map(); + // Go backward in the predecessor chain + loop { + let ancestor = match predecessor[ix(node)] { + Some(predecessor_node) => predecessor_node, + None => node, // no predecessor, self cycle + }; + // We have only 2 ways to find the cycle and break the loop: + // 1. start is reached + if ancestor == start { + path.push(ancestor); + break; + } + // 2. some node was reached twice + else if visited.is_visited(&ancestor) { + // Drop any node in path that is before the first ancestor + let pos = path + .iter() + .position(|&p| p == ancestor) + .expect("we should always have a position"); + path = path[pos..path.len()].to_vec(); + + break; + } + + // None of the above, some middle path node + path.push(ancestor); + visited.visit(ancestor); + node = ancestor; + } + // We are done here + break 'outer; + } + } + } + if !path.is_empty() { + // Users will probably need to follow the path of the negative cycle + // so it should be in the reverse order than it was found by the algorithm. + path.reverse(); + Some(path) + } else { + None + } +} + +// Perform Step 1 and Step 2 of the Bellman-Ford algorithm. +#[inline(always)] +fn bellman_ford_initialize_relax( + g: &DLGraph, + source: NodeIndex, + analyzer: &mut impl GraphBackend, + arena: &mut RangeArena>, +) -> (Vec>, Vec>>) { + // Step 1: initialize graph + let mut predecessor = vec![None; g.node_bound()]; + let mut distance = vec![Elem::from(Concrete::from(U256::MAX)); g.node_bound()]; + let ix = |i| g.to_index(i); + distance[ix(source)] = Elem::from(Concrete::from(U256::zero())); + + // Step 2: relax edges repeatedly + for _ in 1..g.node_count() { + let mut did_update = false; + for i in g.node_identifiers() { + for edge in g.edges(i) { + let j = edge.target(); + let w = edge.weight(); + let dist = (distance[ix(i)].clone() + w.into_elem()) + .maximize(analyzer, arena) + .unwrap(); + let lt = matches!( + dist.range_ord(&distance[ix(j)], arena), + Some(std::cmp::Ordering::Less) + ); + if lt { + distance[ix(j)] = dist; + predecessor[ix(j)] = Some(i); + did_update = true; + } + } + } + if !did_update { + break; + } + } + (distance, predecessor) +} diff --git a/crates/graph/src/solvers/mod.rs b/crates/graph/src/solvers/mod.rs new file mode 100644 index 00000000..9711d3e3 --- /dev/null +++ b/crates/graph/src/solvers/mod.rs @@ -0,0 +1,6 @@ +mod atoms; +mod brute; +pub mod dl; + +pub use atoms::*; +pub use brute::*; diff --git a/crates/graph/src/var_type.rs b/crates/graph/src/var_type.rs new file mode 100644 index 00000000..23112944 --- /dev/null +++ b/crates/graph/src/var_type.rs @@ -0,0 +1,821 @@ +use crate::{ + nodes::{ + BuiltInNode, Builtin, Concrete, ConcreteNode, ContractNode, EnumNode, FunctionNode, + StructNode, TyNode, + }, + range::{ + elem::{Elem, RangeElem}, + Range, SolcRange, + }, + AnalyzerBackend, AsDotStr, GraphBackend, GraphError, Node, +}; +use shared::RangeArena; + +use shared::NodeIdx; + +use ethers_core::types::{Address, U256}; + +#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub enum VarType { + User(TypeNode, Option), + BuiltIn(BuiltInNode, Option), + Concrete(ConcreteNode), +} + +impl AsDotStr for VarType { + fn as_dot_str( + &self, + analyzer: &impl GraphBackend, + _arena: &mut RangeArena>, + ) -> String { + self.as_string(analyzer).unwrap() + } +} + +impl VarType { + pub fn set_range(&mut self, new_range: SolcRange) -> Result<(), GraphError> { + match self { + VarType::User(TypeNode::Enum(_), ref mut r) + | VarType::User(TypeNode::Contract(_), ref mut r) + | VarType::User(TypeNode::Ty(_), ref mut r) + | VarType::BuiltIn(_, ref mut r) => { + *r = Some(new_range); + Ok(()) + } + _ => Err(GraphError::NodeConfusion( + "This type cannot have a range".to_string(), + )), + } + } + + pub fn take_range(&mut self) -> Option { + match self { + VarType::User(TypeNode::Enum(_), ref mut r) + | VarType::User(TypeNode::Contract(_), ref mut r) + | VarType::User(TypeNode::Ty(_), ref mut r) + | VarType::BuiltIn(_, ref mut r) => r.take(), + _ => None, + } + } + + pub fn possible_builtins_from_ty_inf(&self, analyzer: &impl GraphBackend) -> Vec { + match self { + Self::BuiltIn(bn, _) => bn + .underlying(analyzer) + .unwrap() + .possible_builtins_from_ty_inf(), + Self::Concrete(c) => c + .underlying(analyzer) + .unwrap() + .possible_builtins_from_ty_inf(), + _ => vec![], + } + } + + pub fn ty_idx(&self) -> NodeIdx { + match self { + Self::User(ty_node, _) => (*ty_node).into(), + Self::BuiltIn(bn, _) => (*bn).into(), + Self::Concrete(c) => (*c).into(), + } + } + + pub fn empty_ty(&self) -> VarType { + match self { + Self::User(ty_node, _) => Self::User(*ty_node, None), + Self::BuiltIn(bn, _) => Self::BuiltIn(*bn, None), + Self::Concrete(c) => Self::Concrete(*c), + } + } + + pub fn is_dyn_builtin(&self, analyzer: &impl GraphBackend) -> Result { + match self { + Self::BuiltIn(node, _) => node.is_dyn(analyzer), + _ => Ok(false), + } + } + + pub fn unresolved_as_resolved(&self, analyzer: &impl GraphBackend) -> Result { + match self { + VarType::User(TypeNode::Unresolved(n), _) => match analyzer.node(*n) { + Node::Unresolved(ident) => Err(GraphError::NodeConfusion(format!( + "Expected the type \"{}\" to be resolved by now", + ident.name + ))), + _ => { + if let Some(ty) = VarType::try_from_idx(analyzer, *n) { + Ok(ty) + } else { + Err(GraphError::NodeConfusion( + "Tried to type a non-typeable element".to_string(), + )) + } + } + }, + _ => Ok(self.clone()), + } + } + + pub fn resolve_unresolved(&mut self, analyzer: &impl GraphBackend) -> Result<(), GraphError> { + match self { + VarType::User(TypeNode::Unresolved(n), _) => match analyzer.node(*n) { + Node::Unresolved(ident) => Err(GraphError::NodeConfusion(format!( + "Expected the type \"{}\" to be resolved by now", + ident.name + ))), + _ => { + if let Some(ty) = VarType::try_from_idx(analyzer, *n) { + *self = ty; + Ok(()) + } else { + Err(GraphError::NodeConfusion( + "Tried to type a non-typeable element".to_string(), + )) + } + } + }, + _ => Ok(()), + } + } + + pub fn concrete_to_builtin( + &mut self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), GraphError> { + if let VarType::Concrete(cnode) = self { + let c = cnode.underlying(analyzer)?.clone(); + match c { + Concrete::Uint(ref size, _) => { + let new_ty = VarType::BuiltIn( + BuiltInNode::from(analyzer.builtin_or_add(Builtin::Uint(*size))), + SolcRange::from(c), + ); + *self = new_ty; + } + Concrete::Int(ref size, _) => { + let new_ty = VarType::BuiltIn( + BuiltInNode::from(analyzer.builtin_or_add(Builtin::Int(*size))), + SolcRange::from(c), + ); + *self = new_ty; + } + Concrete::Bool(_) => { + let new_ty = VarType::BuiltIn( + BuiltInNode::from(analyzer.builtin_or_add(Builtin::Bool)), + SolcRange::from(c), + ); + *self = new_ty; + } + Concrete::Address(_) => { + let new_ty = VarType::BuiltIn( + BuiltInNode::from(analyzer.builtin_or_add(Builtin::Address)), + SolcRange::from(c), + ); + *self = new_ty; + } + Concrete::Bytes(ref s, _) => { + let new_ty = VarType::BuiltIn( + BuiltInNode::from(analyzer.builtin_or_add(Builtin::Bytes(*s))), + SolcRange::from(c), + ); + *self = new_ty; + } + Concrete::String(_) => { + let new_ty = VarType::BuiltIn( + BuiltInNode::from(analyzer.builtin_or_add(Builtin::String)), + SolcRange::from(c), + ); + *self = new_ty; + } + Concrete::DynBytes(_) => { + let new_ty = VarType::BuiltIn( + BuiltInNode::from(analyzer.builtin_or_add(Builtin::DynamicBytes)), + SolcRange::from(c), + ); + *self = new_ty; + } + // Concrete::Array(Vec), + _ => {} + } + } + Ok(()) + } + + pub fn try_from_idx(analyzer: &impl GraphBackend, node: NodeIdx) -> Option { + // get node, check if typeable and convert idx into vartype + match analyzer.node(node) { + Node::VarType(a) => Some(a.clone()), + Node::Builtin(b) => Some(VarType::BuiltIn( + node.into(), + SolcRange::try_from_builtin(b), + )), + Node::Contract(_) => Some(VarType::User( + TypeNode::Contract(node.into()), + SolcRange::try_from_builtin(&Builtin::Address), + )), + Node::Function(_) => Some(VarType::User(TypeNode::Func(node.into()), None)), + Node::Struct(_) => Some(VarType::User(TypeNode::Struct(node.into()), None)), + Node::Enum(enu) => { + let variants = enu.variants(); + let range = if !variants.is_empty() { + let min = Concrete::from(U256::zero()).into(); + let max = Concrete::from(U256::from(variants.len() - 1)).into(); + Some(SolcRange::new(min, max, vec![])) + } else { + None + }; + Some(VarType::User(TypeNode::Enum(node.into()), range)) + } + Node::Unresolved(_n) => Some(VarType::User(TypeNode::Unresolved(node), None)), + Node::Concrete(_) => Some(VarType::Concrete(node.into())), + Node::ContextVar(cvar) => Some(cvar.ty.clone()), + Node::Var(var) => VarType::try_from_idx(analyzer, var.ty), + Node::Ty(ty) => { + let range = SolcRange::try_from_builtin( + BuiltInNode::from(ty.ty).underlying(analyzer).unwrap(), + )?; + Some(VarType::User(TypeNode::Ty(node.into()), Some(range))) + } + Node::FunctionParam(inner) => VarType::try_from_idx(analyzer, inner.ty), + Node::Error(..) + | Node::ContextFork + | Node::FunctionCall + | Node::FunctionReturn(..) + | Node::ErrorParam(..) + | Node::Field(..) + | Node::SourceUnitPart(..) + | Node::SourceUnit(..) + | Node::Entry + | Node::Context(..) + | Node::Msg(_) + | Node::Block(_) => None, + } + } + + pub fn requires_input(&self, analyzer: &impl GraphBackend) -> Result { + match self { + VarType::BuiltIn(bn, _) => Ok(bn.underlying(analyzer)?.requires_input()), + _ => Ok(false), + } + } + + pub fn try_cast( + self, + other: &Self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + match (self, other) { + (l, Self::User(TypeNode::Ty(ty), o_r)) => { + let t = Self::BuiltIn(BuiltInNode::from(ty.underlying(analyzer)?.ty), o_r.clone()); + l.try_cast(&t, analyzer) + } + (Self::BuiltIn(from_bn, sr), Self::User(TypeNode::Contract(cn), _)) => { + match from_bn.underlying(analyzer)? { + Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { + Ok(Some(Self::User(TypeNode::Contract(*cn), sr))) + } + _ => Ok(None), + } + } + (Self::User(TypeNode::Contract(_cn), sr), Self::BuiltIn(to_bn, _)) => { + match to_bn.underlying(analyzer)? { + Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { + Ok(Some(Self::BuiltIn(*to_bn, sr))) + } + _ => Ok(None), + } + } + (Self::BuiltIn(from_bn, sr), Self::BuiltIn(to_bn, _)) => { + if from_bn.implicitly_castable_to(to_bn, analyzer)? { + Ok(Some(Self::BuiltIn(*to_bn, sr))) + } else { + Ok(None) + } + } + (Self::Concrete(from_c), Self::BuiltIn(to_bn, _)) => { + let c = from_c.underlying(analyzer)?.clone(); + let b = to_bn.underlying(analyzer)?; + if let Some(casted) = c.cast(b.clone()) { + let node = analyzer.add_node(Node::Concrete(casted)); + Ok(Some(Self::Concrete(node.into()))) + } else { + Ok(None) + } + } + (Self::Concrete(from_c), Self::Concrete(to_c)) => { + let c = from_c.underlying(analyzer)?.clone(); + let to_c = to_c.underlying(analyzer)?; + if let Some(casted) = c.cast_from(to_c) { + let node = analyzer.add_node(Node::Concrete(casted)); + Ok(Some(Self::Concrete(node.into()))) + } else { + Ok(None) + } + } + _ => Ok(None), + } + } + + pub fn try_literal_cast( + self, + other: &Self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result, GraphError> { + match (self, other) { + (Self::BuiltIn(from_bn, sr), Self::User(TypeNode::Ty(ty), _)) => { + if ty.underlying(analyzer)?.ty == from_bn.into() { + Ok(Some(Self::User(TypeNode::Ty(*ty), sr))) + } else { + Ok(None) + } + } + (Self::Concrete(from_c), Self::User(TypeNode::Ty(ty), _)) => { + let concrete_underlying = from_c.underlying(analyzer)?.clone(); + let as_bn = analyzer.builtin_or_add(concrete_underlying.as_builtin()); + if ty.underlying(analyzer)?.ty == as_bn { + Ok(Some(Self::User( + TypeNode::Ty(*ty), + SolcRange::from(concrete_underlying), + ))) + } else { + Ok(None) + } + } + (Self::BuiltIn(from_bn, sr), Self::BuiltIn(to_bn, _)) => { + if from_bn.implicitly_castable_to(to_bn, analyzer)? { + Ok(Some(Self::BuiltIn(*to_bn, sr))) + } else { + Ok(None) + } + } + (Self::Concrete(from_c), Self::BuiltIn(to_bn, _)) => { + let c = from_c.underlying(analyzer)?.clone(); + let b = to_bn.underlying(analyzer)?; + if let Some(casted) = c.literal_cast(b.clone()) { + let node = analyzer.add_node(Node::Concrete(casted)); + Ok(Some(Self::Concrete(node.into()))) + } else { + Ok(None) + } + } + (Self::Concrete(from_c), Self::Concrete(to_c)) => { + let c = from_c.underlying(analyzer)?.clone(); + let to_c = to_c.underlying(analyzer)?; + if let Some(casted) = c.literal_cast_from(to_c) { + let node = analyzer.add_node(Node::Concrete(casted)); + Ok(Some(Self::Concrete(node.into()))) + } else { + Ok(None) + } + } + _ => Ok(None), + } + } + + pub fn implicitly_castable_to( + &self, + other: &Self, + analyzer: &impl GraphBackend, + ) -> Result { + match (self, other) { + (Self::BuiltIn(from_bn, _), Self::BuiltIn(to_bn, _)) => { + from_bn.implicitly_castable_to(to_bn, analyzer) + } + (Self::Concrete(from_c), Self::BuiltIn(to_bn, _)) => { + let to = to_bn.underlying(analyzer)?; + Ok(from_c + .underlying(analyzer)? + .as_builtin() + .implicitly_castable_to(to)) + } + _ => Ok(false), + } + } + + pub fn max_size(&self, analyzer: &mut impl AnalyzerBackend) -> Result { + match self { + Self::BuiltIn(from_bn, _r) => { + let bn = from_bn.max_size(analyzer)?; + Ok(Self::BuiltIn( + bn, + SolcRange::try_from_builtin(bn.underlying(analyzer)?), + )) + } + Self::Concrete(from_c) => Ok(Self::Concrete(from_c.max_size(analyzer)?)), + _ => Ok(self.clone()), + } + } + + pub fn range(&self, analyzer: &impl GraphBackend) -> Result, GraphError> { + match self { + Self::User(_, Some(range)) => Ok(Some(range.clone())), + Self::BuiltIn(_, Some(range)) => Ok(Some(range.clone())), + Self::BuiltIn(bn, None) => Ok(SolcRange::try_from_builtin(bn.underlying(analyzer)?)), + Self::Concrete(cnode) => Ok(SolcRange::from(cnode.underlying(analyzer)?.clone())), + _ => Ok(None), + } + } + + pub fn ref_range( + &self, + analyzer: &impl GraphBackend, + ) -> Result>, GraphError> { + match self { + Self::User(_, Some(range)) => Ok(Some(std::borrow::Cow::Borrowed(range))), + Self::BuiltIn(_, Some(range)) => Ok(Some(std::borrow::Cow::Borrowed(range))), + Self::BuiltIn(bn, None) => { + if let Some(r) = SolcRange::try_from_builtin(bn.underlying(analyzer)?) { + Ok(Some(std::borrow::Cow::Owned(r))) + } else { + Ok(None) + } + } + Self::Concrete(cnode) => { + if let Some(r) = SolcRange::from(cnode.underlying(analyzer)?.clone()) { + Ok(Some(std::borrow::Cow::Owned(r))) + } else { + Ok(None) + } + } + _ => Ok(None), + } + } + + pub fn delete_range_result( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + match self { + Self::User(TypeNode::Contract(_), _) => { + let zero = Concrete::Address(Address::from_slice(&[0x00; 20])); + Ok(Some(SolcRange::new( + zero.clone().into(), + zero.into(), + vec![], + ))) + } + Self::User(TypeNode::Enum(enum_node), _) => { + if let Some(first) = enum_node.variants(analyzer)?.first() { + let zero = Concrete::from(first.clone()); + Ok(Some(SolcRange::new( + zero.clone().into(), + zero.into(), + vec![], + ))) + } else { + Ok(None) + } + } + Self::User(TypeNode::Ty(ty), _) => { + BuiltInNode::from(ty.underlying(analyzer)?.ty).zero_range(analyzer) + } + Self::BuiltIn(bn, None) => bn.zero_range(analyzer), + Self::Concrete(cnode) => Ok(cnode.underlying(analyzer)?.as_builtin().zero_range()), + _ => Ok(None), + } + } + + pub fn default_range( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + match self { + Self::User(TypeNode::Contract(_), _) => { + Ok(SolcRange::try_from_builtin(&Builtin::Address)) + } + Self::User(TypeNode::Enum(enu), _) => enu.maybe_default_range(analyzer), + Self::User(TypeNode::Ty(ty), _) => Ok(SolcRange::try_from_builtin( + BuiltInNode::from(ty.underlying(analyzer)?.ty).underlying(analyzer)?, + )), + Self::BuiltIn(bn, _) => Ok(SolcRange::try_from_builtin(bn.underlying(analyzer)?)), + Self::Concrete(cnode) => Ok(SolcRange::from(cnode.underlying(analyzer)?.clone())), + _ => Ok(None), + } + } + + pub fn is_const( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result { + match self { + Self::Concrete(_) => Ok(true), + Self::User(TypeNode::Func(_), _) => Ok(false), + _ => { + if let Some(range) = self.ref_range(analyzer)? { + let min = range.evaled_range_min(analyzer, arena)?; + let max = range.evaled_range_max(analyzer, arena)?; + Ok(min.range_eq(&max, arena)) + } else { + Ok(false) + } + } + } + } + + pub fn func_node(&self, _analyzer: &impl GraphBackend) -> Option { + match self { + Self::User(TypeNode::Func(func_node), _) => Some(*func_node), + _ => None, + } + } + + pub fn evaled_range( + &self, + analyzer: &impl GraphBackend, + arena: &mut RangeArena>, + ) -> Result, Elem)>, GraphError> { + Ok(self.ref_range(analyzer)?.map(|range| { + ( + range.evaled_range_min(analyzer, arena).unwrap(), + range.evaled_range_max(analyzer, arena).unwrap(), + ) + })) + } + + // pub fn try_match_index_dynamic_ty( + // &self, + // index: ContextVarNode, + // analyzer: &mut (impl GraphBackend + AnalyzerBackend), + // ) -> Result, GraphError> { + // match self { + // Self::BuiltIn(_node, None) => Ok(None), + // Self::BuiltIn(node, Some(r)) => { + // if let Builtin::Bytes(size) = node.underlying(analyzer)? { + // if r.is_const(analyzer)? && index.is_const(analyzer)? { + // let Some(min) = r.evaled_range_min(analyzer, arena)?.maybe_concrete() else { + // return Ok(None); + // }; + // let Concrete::Bytes(_, val) = min.val else { + // return Ok(None); + // }; + // let Some(idx) = index.evaled_range_min(analyzer, arena)?.unwrap().maybe_concrete() + // else { + // return Ok(None); + // }; + // let Concrete::Uint(_, idx) = idx.val else { + // return Ok(None); + // }; + // if idx.low_u32() < (*size as u32) { + // let mut h = H256::default(); + // h.0[0] = val.0[idx.low_u32() as usize]; + // let ret_val = Concrete::Bytes(1, h); + // let node = analyzer.add_node(Node::Concrete(ret_val)); + // return Ok(Some(node)); + // } + // } + // Ok(None) + // } else { + // // check if the index exists as a key + // let min = r.range_min(); + // if let Some(map) = min.dyn_map() { + // let name = index.name(analyzer)?; + // let is_const = index.is_const(analyzer)?; + // if let Some((_k, val)) = map.iter().find(|(k, _v)| match k { + // Elem::Reference(Reference { idx, .. }) => match analyzer.node(*idx) { + // Node::ContextVar(_) => { + // let cvar = ContextVarNode::from(*idx); + // cvar.name(analyzer).unwrap() == name + // } + // _ => false, + // }, + // c @ Elem::Concrete(..) if is_const => { + // let index_val = index.evaled_range_min(analyzer, arena).unwrap().unwrap(); + // index_val.range_eq(c) + // } + // _ => false, + // }) { + // if let Some(idx) = val.0.node_idx() { + // return Ok(idx.into()); + // } else if let Some(c) = val.0.concrete() { + // let cnode = analyzer.add_node(Node::Concrete(c)); + // return Ok(cnode.into()); + // } + // } + // } + // Ok(None) + // } + // } + // Self::Concrete(node) => { + // if index.is_const(analyzer)? { + // let idx = index + // .evaled_range_min(analyzer, arena) + // .unwrap() + // .unwrap() + // .concrete() + // .unwrap() + // .uint_val() + // .unwrap(); + // match node.underlying(analyzer)? { + // Concrete::Bytes(size, val) => { + // if idx.low_u32() < (*size as u32) { + // let mut h = H256::default(); + // h.0[0] = val.0[idx.low_u32() as usize]; + // let ret_val = Concrete::Bytes(1, h); + // let node = analyzer.add_node(Node::Concrete(ret_val)); + // return Ok(Some(node)); + // } + // } + // Concrete::DynBytes(elems) => { + // if idx.low_u32() < (elems.len() as u32) { + // let mut h = H256::default(); + // h.0[0] = elems[idx.low_u32() as usize]; + // let ret_val = Concrete::Bytes(1, h); + // let node = analyzer.add_node(Node::Concrete(ret_val)); + // return Ok(Some(node)); + // } + // } + // Concrete::String(st) => { + // if idx.low_u32() < (st.len() as u32) { + // let mut h = H256::default(); + // h.0[0] = st.as_bytes()[idx.low_u32() as usize]; + // let ret_val = Concrete::Bytes(1, h); + // let node = analyzer.add_node(Node::Concrete(ret_val)); + // return Ok(Some(node)); + // } + // } + // Concrete::Array(elems) => { + // if idx.low_u32() < (elems.len() as u32) { + // let elem = &elems[idx.low_u32() as usize]; + // let node = analyzer.add_node(Node::Concrete(elem.clone())); + // return Ok(Some(node)); + // } + // } + // _ => {} + // } + // } + // Ok(None) + // } + // _ => Ok(None), + // } + // } + + pub fn dynamic_underlying_ty( + &self, + analyzer: &mut impl AnalyzerBackend, + ) -> Result { + match self { + Self::BuiltIn(node, _) => node.dynamic_underlying_ty(analyzer), + Self::Concrete(node) => node.dynamic_underlying_ty(analyzer), + e => Err(GraphError::NodeConfusion(format!( + "Node type confusion: expected node to be Builtin but it was: {e:?}" + ))), + } + } + + pub fn is_mapping(&self, analyzer: &impl GraphBackend) -> Result { + match self { + Self::BuiltIn(node, _) => Ok(node.is_mapping(analyzer)?), + _ => Ok(false), + } + } + + pub fn is_sized_array(&self, analyzer: &impl GraphBackend) -> Result { + match self { + Self::BuiltIn(node, _) => node.is_sized_array(analyzer), + Self::Concrete(node) => node.is_sized_array(analyzer), + _ => Ok(false), + } + } + + pub fn maybe_array_size( + &self, + analyzer: &impl GraphBackend, + ) -> Result, GraphError> { + match self { + Self::BuiltIn(node, _) => node.maybe_array_size(analyzer), + Self::Concrete(node) => node.maybe_array_size(analyzer), + _ => Ok(None), + } + } + + pub fn is_dyn(&self, analyzer: &impl GraphBackend) -> Result { + match self { + Self::BuiltIn(node, _) => Ok(node.is_dyn(analyzer)?), + Self::Concrete(node) => Ok(node.is_dyn(analyzer)?), + _ => Ok(false), + } + } + + pub fn is_indexable(&self, analyzer: &impl GraphBackend) -> Result { + match self { + Self::BuiltIn(node, _) => Ok(node.is_indexable(analyzer)?), + Self::Concrete(node) => Ok(node.is_indexable(analyzer)?), + _ => Ok(false), + } + } + + pub fn ty_eq(&self, other: &Self, analyzer: &impl GraphBackend) -> Result { + match (self, other) { + (VarType::User(s, _), VarType::User(o, _)) => { + Ok(s.unresolved_as_resolved(analyzer)? == o.unresolved_as_resolved(analyzer)?) + } + (VarType::BuiltIn(s, _), VarType::BuiltIn(o, _)) => { + match (s.underlying(analyzer)?, o.underlying(analyzer)?) { + (Builtin::Array(l), Builtin::Array(r)) => Ok(l + .unresolved_as_resolved(analyzer)? + == r.unresolved_as_resolved(analyzer)?), + (Builtin::SizedArray(l_size, l), Builtin::SizedArray(r_size, r)) => Ok(l + .unresolved_as_resolved(analyzer)? + == r.unresolved_as_resolved(analyzer)? + && l_size == r_size), + (Builtin::Mapping(lk, lv), Builtin::Mapping(rk, rv)) => Ok(lk + .unresolved_as_resolved(analyzer)? + == rk.unresolved_as_resolved(analyzer)? + && lv.unresolved_as_resolved(analyzer)? + == rv.unresolved_as_resolved(analyzer)?), + (l, r) => Ok(l == r), + } + } + (VarType::Concrete(s), VarType::Concrete(o)) => Ok(s + .underlying(analyzer)? + .equivalent_ty(o.underlying(analyzer)?)), + _ => Ok(false), + } + } + + pub fn as_string(&self, analyzer: &impl GraphBackend) -> Result { + match self { + VarType::User(ty_node, _) => ty_node.as_string(analyzer), + VarType::BuiltIn(bn, _) => match analyzer.node(*bn) { + Node::Builtin(bi) => bi.as_string(analyzer), + _ => unreachable!(), + }, + VarType::Concrete(c) => c.underlying(analyzer)?.as_builtin().as_string(analyzer), + } + } + + pub fn is_int(&self, analyzer: &impl GraphBackend) -> Result { + match self { + VarType::BuiltIn(bn, _) => Ok(bn.underlying(analyzer)?.is_int()), + VarType::Concrete(c) => Ok(c.underlying(analyzer)?.is_int()), + _ => Ok(false), + } + } + + pub fn as_builtin(&self, analyzer: &impl GraphBackend) -> Result { + match self { + VarType::BuiltIn(bn, _) => Ok(bn.underlying(analyzer)?.clone()), + VarType::Concrete(c) => Ok(c.underlying(analyzer)?.as_builtin()), + e => Err(GraphError::NodeConfusion(format!( + "Expected to be builtin castable but wasnt: {e:?}" + ))), + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub enum TypeNode { + Contract(ContractNode), + Struct(StructNode), + Enum(EnumNode), + Ty(TyNode), + Func(FunctionNode), + Unresolved(NodeIdx), +} + +impl TypeNode { + pub fn as_string(&self, analyzer: &impl GraphBackend) -> Result { + match self { + TypeNode::Contract(n) => n.name(analyzer), + TypeNode::Struct(n) => n.name(analyzer), + TypeNode::Enum(n) => n.name(analyzer), + TypeNode::Ty(n) => n.name(analyzer), + TypeNode::Func(n) => Ok(format!("function {}", n.name(analyzer)?)), + TypeNode::Unresolved(n) => Ok(format!("UnresolvedType<{:?}>", analyzer.node(*n))), + } + } + + pub fn unresolved_as_resolved(&self, analyzer: &impl GraphBackend) -> Result { + match self { + TypeNode::Unresolved(n) => match analyzer.node(*n) { + Node::Unresolved(ident) => Err(GraphError::NodeConfusion(format!( + "Expected the type \"{}\" to be resolved by now", + ident.name + ))), + Node::Contract(..) => Ok(TypeNode::Contract((*n).into())), + Node::Struct(..) => Ok(TypeNode::Struct((*n).into())), + Node::Enum(..) => Ok(TypeNode::Enum((*n).into())), + Node::Ty(..) => Ok(TypeNode::Ty((*n).into())), + Node::Function(..) => Ok(TypeNode::Func((*n).into())), + _ => Err(GraphError::NodeConfusion( + "Tried to type a non-typeable element".to_string(), + )), + }, + _ => Ok(*self), + } + } +} + +impl From for NodeIdx { + fn from(val: TypeNode) -> Self { + match val { + TypeNode::Contract(n) => n.into(), + TypeNode::Struct(n) => n.into(), + TypeNode::Enum(n) => n.into(), + TypeNode::Ty(n) => n.into(), + TypeNode::Func(n) => n.into(), + TypeNode::Unresolved(n) => n, + } + } +} diff --git a/crates/pyrometer/Cargo.toml b/crates/pyrometer/Cargo.toml new file mode 100644 index 00000000..fd35e3e8 --- /dev/null +++ b/crates/pyrometer/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "pyrometer" +description = "Core Pyrometer library and analyzer implementation" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +analyzers.workspace = true +graph.workspace = true +solc-expressions.workspace = true +shared.workspace = true + +petgraph.workspace = true +solang-parser.workspace = true +ethers-core.workspace = true +ariadne.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +ahash.workspace = true +serde_json = "1" + + + + +[dev-dependencies] +criterion = { version = "0.4"} # benching + +[[bench]] +name = "parse" +harness = false \ No newline at end of file diff --git a/benches/README.md b/crates/pyrometer/benches/README.md similarity index 100% rename from benches/README.md rename to crates/pyrometer/benches/README.md diff --git a/benches/flamegraphs/parse.svg b/crates/pyrometer/benches/flamegraphs/parse.svg similarity index 100% rename from benches/flamegraphs/parse.svg rename to crates/pyrometer/benches/flamegraphs/parse.svg diff --git a/benches/parse.rs b/crates/pyrometer/benches/parse.rs similarity index 72% rename from benches/parse.rs rename to crates/pyrometer/benches/parse.rs index baf7e115..d4e5a37f 100644 --- a/benches/parse.rs +++ b/crates/pyrometer/benches/parse.rs @@ -1,6 +1,6 @@ use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; -use pyrometer::Analyzer; -use std::path::PathBuf; +use pyrometer::{Analyzer, SourcePath}; +use std::path::{Path, PathBuf}; use std::env::{self}; use std::fs; @@ -23,17 +23,18 @@ criterion_main!(benches); /// /// A vector of tuples representing the targets. fn get_targets(bench_contracts_root: PathBuf) -> Vec<(String, PathBuf, usize)> { - let mut targets = vec![]; - targets.push(( - "ctoken".to_string(), - bench_contracts_root.join("flat_ctoken.sol"), - 50, // range of tens ms - )); - targets.push(( - "comptroller".to_string(), - bench_contracts_root.join("flat_comptroller.sol"), - 10, // range of singles seconds. 10 samples is lowest - )); + let targets = vec![ + ( + "ctoken".to_string(), + bench_contracts_root.join("flat_ctoken.sol"), + 50, // range of tens ms + ), + ( + "comptroller".to_string(), + bench_contracts_root.join("flat_comptroller.sol"), + 10, // range of singles seconds. 10 samples is lowest + ), + ]; targets } @@ -59,8 +60,8 @@ fn bench(c: &mut Criterion) { parsing_group.sample_size(sample_size); let sol = fs::read_to_string(path.clone()).expect("Could not find file"); let bench_id = BenchmarkId::new("parse", sample_size); - parsing_group.bench_with_input(bench_id, &(path, &sol), |b, (path, &ref sol)| { - b.iter(|| parse(path, sol.clone())); + parsing_group.bench_with_input(bench_id, &(path, &sol), |b, (path, sol)| { + b.iter(|| parse(path, sol.to_string())); }); parsing_group.finish(); } @@ -72,10 +73,10 @@ fn bench(c: &mut Criterion) { /// /// * `path` - A `PathBuf` representing the path to the source code file. /// * `sol` - A string containing the Solidity source code. -fn parse(path: &PathBuf, sol: String) { - let mut analyzer = Analyzer { - root: path.clone(), - ..Default::default() - }; - let (_maybe_entry, mut _all_sources) = analyzer.parse(&sol, &path, true); +fn parse(path: &Path, sol: String) { + let mut analyzer = Analyzer::default(); + let mut arena_base = Default::default(); + let arena = &mut arena_base; + let current_path = SourcePath::SolidityFile(path.to_path_buf()); + let _maybe_entry = analyzer.parse(arena, &sol, ¤t_path, true); } diff --git a/crates/pyrometer/src/analyzer.rs b/crates/pyrometer/src/analyzer.rs new file mode 100644 index 00000000..8dee588d --- /dev/null +++ b/crates/pyrometer/src/analyzer.rs @@ -0,0 +1,1452 @@ +use crate::builtin_fns; +use graph::elem::Elem; +use shared::RangeArena; + +use analyzers::LocStrSpan; +use graph::{nodes::*, ContextEdge, Edge, Node, VarType}; +use shared::{AnalyzerLike, GraphLike, JoinStats, NodeIdx, Search}; +use solc_expressions::{ExprErr, FnCallBuilder, IntoExprErr, StatementParser}; + +use ahash::AHashMap; +use ariadne::{Cache, Color, Config, Fmt, Label, Report, ReportKind, Source, Span}; +use petgraph::{graph::*, stable_graph::StableGraph, Directed}; +use serde_json::Value; +use solang_parser::{ + diagnostics::Diagnostic, + helpers::CodeLocation, + pt::{ + ContractDefinition, ContractPart, EnumDefinition, ErrorDefinition, Expression, + FunctionDefinition, FunctionTy, Identifier, Import, SourceUnit, SourceUnitPart, + StructDefinition, TypeDefinition, Using, UsingList, VariableDefinition, + }, +}; + +use std::{ + collections::BTreeMap, + fs, + path::{Path, PathBuf}, +}; + +/// A path to either a single solidity file or a Solc Standard JSON file +#[derive(Debug, Clone)] +pub enum Root { + /// A path to a single solidity file + SolidityFile(PathBuf), + /// A path to a Solc Standard JSON file + SolcJSON(PathBuf), + /// A path to a directory containing a remappings file + RemappingsDirectory(PathBuf), +} + +impl Default for Root { + fn default() -> Self { + Root::SolidityFile(PathBuf::new()) + } +} + +/// An intermediate representation of a path to a solidity source +/// +/// This is done so that any source can be fetched from the filesystem again if needed +#[derive(Debug, Clone, Ord, PartialEq, PartialOrd, Eq)] +pub enum SourcePath { + /// A path to a solidity file + SolidityFile(PathBuf), + /// A path to a Solc JSON file and the path within pointing to the solidity source + SolcJSON(PathBuf, String), +} + +impl SourcePath { + pub fn path_to_solidity_source(&self) -> PathBuf { + match self { + SourcePath::SolidityFile(path) => path.clone(), + SourcePath::SolcJSON(_path_to_json, path) => path.clone().into(), + } + } +} + +#[derive(Debug, Clone, Default)] +pub struct FinalPassItem { + pub funcs: Vec, + pub usings: Vec<(Using, NodeIdx)>, + pub inherits: Vec<(ContractNode, Vec)>, + pub vars: Vec<(VarNode, NodeIdx)>, +} +impl FinalPassItem { + pub fn new( + funcs: Vec, + usings: Vec<(Using, NodeIdx)>, + inherits: Vec<(ContractNode, Vec)>, + vars: Vec<(VarNode, NodeIdx)>, + ) -> Self { + Self { + funcs, + usings, + inherits, + vars, + } + } +} + +#[derive(Debug, Clone)] +pub struct Analyzer { + /// The root of the path to either the contract or solc json file to be analyzed + pub root: Root, + /// Solidity remappings - as would be passed into the solidity compiler + pub remappings: Vec<(String, String)>, + /// Solidity sources - tuple of SourcePath, solidity string, file number (None until parsed), and entry node (None until parsed) + pub sources: Vec<(SourcePath, String, Option, Option)>, + /// Since we use a staged approach to analysis, we analyze all user types first then go through and patch up any missing or unresolved + /// parts of a contract (i.e. we parsed a struct which is used as an input to a function signature, we have to know about the struct) + pub final_pass_items: Vec, + /// The next file number to use when parsing a new file + pub file_no: usize, + /// The index of the current `msg` node + pub msg: MsgNode, + /// The index of the current `msg` node + pub tmp_msg: Option, + /// The index of the current `block` node + pub block: BlockNode, + /// The underlying graph holding all of the elements of the contracts + pub graph: Graph, + /// The entry node - this is the root of the dag, all relevant things should eventually point back to this (otherwise can be discarded) + pub entry: NodeIdx, + /// A mapping of a solidity builtin to the index in the graph + pub builtins: AHashMap, + /// A mapping of a user type's name to the index in the graph (i.e. `struct A` would mapped `A` -> index) + pub user_types: AHashMap, + /// A mapping of solidity builtin function to a [Function] struct, i.e. `ecrecover` -> `Function { name: "ecrecover", ..}` + pub builtin_fns: AHashMap, + /// A mapping of solidity builtin functions to their indices in the graph + pub builtin_fn_nodes: AHashMap, + /// A mapping of solidity builtin function names to their parameters and returns, i.e. `ecrecover` -> `([hash, r, s, v], [signer])` + pub builtin_fn_inputs: AHashMap, Vec)>, + /// Accumulated errors that happened while analyzing + pub expr_errs: Vec, + /// The maximum depth to analyze to (i.e. call depth) + pub max_depth: usize, + /// The maximum number of forks throughout the lifetime of the analysis. + pub max_width: usize, + /// Dummy function used during parsing to attach contexts to for more complex first-pass parsing (i.e. before `final_pass`) + pub parse_fn: FunctionNode, + /// Whether to force a panic on first error encountered + pub debug_panic: bool, + /// Per function, a list of functions that are called + pub fn_calls_fns: BTreeMap>, + + pub join_stats: JoinStats, + /// An arena of ranges + pub range_arena: RangeArena>, + /// Parsed functions + pub handled_funcs: Vec, +} + +impl Default for Analyzer { + fn default() -> Self { + let mut a = Self { + root: Default::default(), + remappings: Default::default(), + sources: Default::default(), + final_pass_items: Default::default(), + file_no: 0, + msg: MsgNode(0), + tmp_msg: None, + block: BlockNode(0), + graph: Default::default(), + entry: NodeIndex::from(0), + builtins: Default::default(), + user_types: Default::default(), + builtin_fns: builtin_fns::builtin_fns(), + builtin_fn_nodes: Default::default(), + builtin_fn_inputs: Default::default(), + expr_errs: Default::default(), + max_depth: 200, + max_width: 2_i32.pow(14) as usize, + parse_fn: NodeIdx::from(0).into(), + debug_panic: false, + fn_calls_fns: Default::default(), + join_stats: JoinStats::default(), + range_arena: RangeArena { + ranges: vec![Elem::Null], + map: { + let mut map: AHashMap, usize> = Default::default(); + map.insert(Elem::Null, 0); + map + }, + }, + handled_funcs: Vec::default(), + }; + a.builtin_fn_inputs = builtin_fns::builtin_fns_inputs(&mut a); + + let msg = Msg::default(); + let block = Block::default(); + let msg = a.graph.add_node(Node::Msg(msg)).into(); + let block = a.graph.add_node(Node::Block(block)).into(); + a.msg = msg; + a.block = block; + a.entry = a.add_node(Node::Entry); + let pf = Function { + name: Some(Identifier { + loc: solang_parser::pt::Loc::Implicit, + name: "".into(), + }), + ..Default::default() + }; + let parser_fn = FunctionNode::from(a.add_node(pf)); + a.add_edge(parser_fn, a.entry, Edge::Func); + a.parse_fn = parser_fn; + a + } +} + +impl Analyzer { + pub fn stats( + &self, + duration: std::time::Duration, + arena: &RangeArena>, + ) -> String { + let num_nodes = self.graph.node_count(); + let num_contracts = self.number_of_contracts(); + let num_funcs = self.number_of_functions(); + let num_vars = self.number_of_variables(); + let num_contexts = self.number_of_contexts(); + + vec![ + format!(""), + format!(" Analyzer stats"), + format!("====================================="), + format!(""), + format!( + " Number of nodes: {}, {} nodes/ms", + num_nodes, + num_nodes as f64 / duration.as_millis() as f64 + ), + format!( + " Number of Contracts: {}, {} contracts/ms", + num_contracts, + num_contracts as f64 / duration.as_millis() as f64 + ), + format!( + " Number of Functions: {}, {} functions/ms", + num_funcs, + num_funcs as f64 / duration.as_millis() as f64 + ), + format!( + " Number of Variables: {}, {} variables/ms", + num_vars, + num_vars as f64 / duration.as_millis() as f64 + ), + format!(""), + format!( + " Unique Range Elements: {}", + arena.ranges.len() + ), + format!(""), + format!( + " Number of Contexts: {}, {} contexts/ms", + num_contexts, + num_contexts as f64 / duration.as_millis() as f64 + ), + format!(" Max depth of Contexts: {}", self.max_context_depth()), + format!(" Max width of Contexts: {}", self.max_context_width()), + format!(""), + format!(" Number of joins: {}, {} completed, {} variables reduced", self.join_stats.total_joins(), self.join_stats.completed_joins(), self.join_stats.reduced_vars()), + format!(" Number of pure joins: {}, {} completed, {} variables reduced", self.join_stats.total_pure_joins(), self.join_stats.completed_pure_joins(), self.join_stats.pure_reduced_vars()), + format!(" Number of simple pure joins: {}, {} completed, {} variables reduced", self.join_stats.pure_no_children_joins.num_joins, self.join_stats.pure_no_children_joins.completed_joins, self.join_stats.pure_no_children_joins.vars_reduced), + format!(" Number of children pure joins: {}, {} completed, {} variables reduced", self.join_stats.pure_children_no_forks_joins.num_joins, self.join_stats.pure_children_no_forks_joins.completed_joins, self.join_stats.pure_children_no_forks_joins.vars_reduced), + format!(" Number of fork children pure joins: {}, {} completed, {} variables reduced", self.join_stats.pure_children_forks_joins.num_joins, self.join_stats.pure_children_forks_joins.completed_joins, self.join_stats.pure_children_forks_joins.vars_reduced), + format!(" Number of view joins: {}, {} completed, {} variables reduced", self.join_stats.total_view_joins(), self.join_stats.completed_view_joins(), self.join_stats.view_reduced_vars()), + format!(" Number of simple view joins: {}, {} completed, {} variables reduced", self.join_stats.view_no_children_joins.num_joins, self.join_stats.view_no_children_joins.completed_joins, self.join_stats.view_no_children_joins.vars_reduced), + format!(" Number of children view joins: {}, {} completed, {} variables reduced", self.join_stats.view_children_no_forks_joins.num_joins, self.join_stats.view_children_no_forks_joins.completed_joins, self.join_stats.view_children_no_forks_joins.vars_reduced), + format!(" Number of fork children view joins: {}, {} completed, {} variables reduced", self.join_stats.view_children_forks_joins.num_joins, self.join_stats.view_children_forks_joins.completed_joins, self.join_stats.view_children_forks_joins.vars_reduced), + format!(" Number of mut joins: {}, {} completed, {} variables reduced", self.join_stats.total_mut_joins(), self.join_stats.completed_mut_joins(), self.join_stats.mut_reduced_vars()), + format!(" Number of simple mut joins: {}, {} completed, {} variables reduced", self.join_stats.mut_no_children_joins.num_joins, self.join_stats.mut_no_children_joins.completed_joins, self.join_stats.mut_no_children_joins.vars_reduced), + format!(" Number of children mut joins: {}, {} completed, {} variables reduced", self.join_stats.mut_children_no_forks_joins.num_joins, self.join_stats.mut_children_no_forks_joins.completed_joins, self.join_stats.mut_children_no_forks_joins.vars_reduced), + format!(" Number of fork children mut joins: {}, {} completed, {} variables reduced", self.join_stats.mut_children_forks_joins.num_joins, self.join_stats.mut_children_forks_joins.completed_joins, self.join_stats.mut_children_forks_joins.vars_reduced), + format!(""), + format!("====================================="), + ] + .join("\n") + } + + pub fn number_of_contexts(&self) -> usize { + self.graph + .node_weights() + .filter(|n| matches!(n, Node::Context(_))) + .count() + } + + pub fn number_of_forks(&self) -> usize { + self.graph + .node_weights() + .filter(|n| matches!(n, Node::ContextFork)) + .count() + } + + pub fn number_of_variables(&self) -> usize { + self.graph + .node_weights() + .filter(|n| matches!(n, Node::ContextVar(_))) + .count() + } + + pub fn number_of_functions(&self) -> usize { + self.graph + .node_weights() + .filter(|n| matches!(n, Node::Function(_))) + .count() + } + + pub fn number_of_contracts(&self) -> usize { + self.graph + .node_weights() + .filter(|n| matches!(n, Node::Contract(_))) + .count() + } + + pub fn max_context_depth(&self) -> usize { + self.graph + .node_weights() + .filter_map(|n| { + if let Node::Context(c) = n { + Some(c) + } else { + None + } + }) + .fold(0, |mut acc, c| { + if c.depth > acc { + acc = c.depth; + acc + } else { + acc + } + }) + } + + pub fn max_context_width(&self) -> usize { + self.graph + .node_weights() + .filter_map(|n| { + if let Node::Context(c) = n { + Some(c) + } else { + None + } + }) + .fold(0, |mut acc, c| { + if c.width > acc { + acc = c.width; + acc + } else { + acc + } + }) + } + + pub fn complicated_parse( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + parent: Option, + ) -> Option { + tracing::trace!("Parsing required compile-time evaluation"); + + let ctx = if let Some(parent) = parent { + let pf = Function { + name: Some(Identifier { + loc: solang_parser::pt::Loc::Implicit, + name: "".into(), + }), + ..Default::default() + }; + let parser_fn = FunctionNode::from(self.add_node(pf)); + self.add_edge(parser_fn, parent, Edge::Func); + + let dummy_ctx = Context::new(parser_fn, "".to_string(), expr.loc()); + let ctx = ContextNode::from(self.add_node(Node::Context(dummy_ctx))); + self.add_edge(ctx, parser_fn, Edge::Context(ContextEdge::Context)); + ctx + } else { + let dummy_ctx = Context::new(self.parse_fn, "".to_string(), expr.loc()); + let ctx = ContextNode::from(self.add_node(Node::Context(dummy_ctx))); + self.add_edge(ctx, self.entry(), Edge::Context(ContextEdge::Context)); + ctx + }; + + let full_stmt = solang_parser::pt::Statement::Return(expr.loc(), Some(expr.clone())); + self.parse_ctx_statement(arena, &full_stmt, false, Some(ctx)); + let edges = self.add_if_err(ctx.successful_edges(self).into_expr_err(expr.loc()))?; + if edges.len() == 1 { + let res = edges[0].return_nodes(self).into_expr_err(expr.loc()); + + let res = self.add_if_err(res); + + if let Some(res) = res { + res.last().map(|last| ExprRet::Single(last.1.into())) + } else { + None + } + } else if edges.is_empty() { + let res = ctx.return_nodes(self).into_expr_err(expr.loc()); + + let res = self.add_if_err(res); + + if let Some(res) = res { + res.last().map(|last| ExprRet::Single(last.1.into())) + } else { + None + } + } else { + self.add_expr_err(ExprErr::ParseError(expr.loc(), "Expected this to be compile-time evaluatable, but it was nondeterministic likely due to an external call via an interface".to_string())); + None + } + } + + pub fn set_remappings_and_root(&mut self, remappings_path: String) { + let parent_path_buf = PathBuf::from(&remappings_path) + .parent() + .unwrap() + .to_path_buf(); + self.root = Root::RemappingsDirectory(parent_path_buf); + + let remappings_file = fs::read_to_string(remappings_path) + .map_err(|err| err.to_string()) + .expect("Remappings file not found"); + + self.remappings = remappings_file + .lines() + .map(|x| x.trim()) + .filter(|x| !x.is_empty()) + .map(|x| x.split_once('=').expect("Invalid remapping")) + .map(|(name, path)| (name.to_owned(), path.to_owned())) + .collect(); + } + + pub fn update_with_solc_json(&mut self, path_to_json: &PathBuf) { + self.root = Root::SolcJSON(path_to_json.clone()); + + // iterate over the Solc JSON and add all the sources + let json_file = fs::read_to_string(path_to_json) + .unwrap_or_else(|_| panic!("Solc JSON file not found: {}", path_to_json.display())); + let solc_json: Value = serde_json::from_str(&json_file).unwrap(); + let sources = solc_json["sources"].as_object().unwrap(); + for (name, value_obj) in sources { + // value_obj is a Value with a `content` field -> save the `content` field's solidity string + let sol_source = value_obj.as_object().unwrap()["content"].as_str().unwrap(); + // create SourcePath with the path to the JSON and the name of the source + let source_path = SourcePath::SolcJSON(path_to_json.clone(), name.to_owned()); + // Don't know the solang file no yet, so set it to None + let source = (source_path.clone(), sol_source.to_owned(), None, None); + self.sources.push(source); + } + + // also iterate over the Solc JSON and add all the remappings + // settings (optional) -> remappings (optional) -> iterate over all remappings + let remappings = solc_json["settings"]["remappings"].as_array(); + if let Some(remappings) = remappings { + // vec of strings + for remapping in remappings { + // split the remapping string into two parts + let remapping = remapping.as_str().unwrap(); + let remapping = remapping.split_once('=').expect("Invalid remapping"); + // remapping.0 is the name of the remapping + // remapping.1 is the path of the remapping + self.remappings.push(( + remapping.0.to_owned().to_string(), + remapping.1.to_owned().to_string(), + )); + } + } + } + + pub fn print_errors( + &self, + file_mapping: &'_ BTreeMap, + mut src: &mut impl Cache, + ) { + if self.expr_errs.is_empty() { + } else { + self.expr_errs.iter().for_each(|error| { + let str_span = LocStrSpan::new(file_mapping, error.loc()); + let report = Report::build(ReportKind::Error, str_span.source(), str_span.start()) + .with_message(error.report_msg()) + .with_config( + Config::default() + .with_cross_gap(false) + .with_underlines(true) + .with_tab_width(4), + ) + .with_label( + Label::new(str_span) + .with_color(Color::Red) + .with_message(format!("{}", error.msg().fg(Color::Red))), + ); + report.finish().print(&mut src).unwrap(); + }); + } + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse( + &mut self, + arena: &mut RangeArena>, + src: &str, + current_path: &SourcePath, + entry: bool, + ) -> Option { + let file_no = self.file_no; + self.sources + .push((current_path.clone(), src.to_string(), Some(file_no), None)); + match solang_parser::parse(src, file_no) { + Ok((source_unit, _comments)) => { + let parent = + self.add_node(Node::SourceUnit(graph::nodes::SourceUnit::new(file_no))); + self.add_edge(parent, self.entry, Edge::Source); + let final_pass_part = self.parse_source_unit( + arena, + source_unit, + file_no, + parent.into(), + current_path, + ); + self.final_pass_items.push(final_pass_part); + if entry { + self.final_pass(arena); + } + + Some(parent) + } + Err(diagnostics) => { + print_diagnostics_report(src, ¤t_path.path_to_solidity_source(), diagnostics) + .unwrap(); + panic!("Failed to parse Solidity code for {current_path:?}."); + } + } + } + + pub fn final_pass(&mut self, arena: &mut RangeArena>) { + let elems = self.final_pass_items.clone(); + elems.iter().for_each(|final_pass_item| { + final_pass_item.funcs.iter().for_each(|func| { + func.set_params_and_ret(self, arena).unwrap(); + }); + }); + elems.iter().for_each(|final_pass_item| { + final_pass_item + .inherits + .iter() + .for_each(|(contract, inherits)| { + contract.inherit(inherits.to_vec(), self); + }); + // final_pass_item.funcs.iter().for_each(|func| { + // // add params now that parsing is done + // func.set_params_and_ret(self, arena).unwrap(); + // }); + + final_pass_item + .usings + .iter() + .for_each(|(using, scope_node)| { + self.parse_using(arena, using, *scope_node); + }); + final_pass_item.vars.iter().for_each(|(var, parent)| { + let loc = var.underlying(self).unwrap().loc; + let res = var + .parse_initializer(self, arena, *parent) + .into_expr_err(loc); + let _ = self.add_if_err(res); + }); + }); + + elems.into_iter().for_each(|final_pass_item| { + final_pass_item + .funcs + .iter() + .for_each(|func| self.analyze_fn_calls(*func)); + let mut func_mapping = BTreeMap::default(); + let mut call_dep_graph: StableGraph = StableGraph::default(); + let fn_calls_fns = std::mem::take(&mut self.fn_calls_fns); + fn_calls_fns.iter().for_each(|(func, calls)| { + if !calls.is_empty() { + let func_idx = if let Some(idx) = func_mapping.get(func) { + *idx + } else { + let idx = call_dep_graph.add_node(*func); + func_mapping.insert(func, idx); + idx + }; + + calls.iter().for_each(|call| { + let call_idx = if let Some(idx) = func_mapping.get(call) { + *idx + } else { + let idx = call_dep_graph.add_node(*call); + func_mapping.insert(call, idx); + idx + }; + + call_dep_graph.add_edge(func_idx, call_idx, 0); + }); + } else { + self.handled_funcs.push(*func); + if let Some(body) = &func.underlying(self).unwrap().body.clone() { + self.parse_ctx_statement(arena, body, false, Some(*func)); + } + } + }); + + let mut res = petgraph::algo::toposort(&call_dep_graph, None); + while let Err(cycle) = res { + call_dep_graph.remove_node(cycle.node_id()); + res = petgraph::algo::toposort(&call_dep_graph, None); + } + + let indices = res.unwrap(); + + indices.iter().for_each(|idx| { + let func = call_dep_graph.node_weight(*idx).unwrap(); + if !self.handled_funcs.contains(func) { + self.handled_funcs.push(*func); + if let Some(body) = &func.underlying(self).unwrap().body.clone() { + self.parse_ctx_statement(arena, body, false, Some(*func)); + } + } + }); + + final_pass_item.funcs.into_iter().for_each(|func| { + if !self.handled_funcs.contains(&func) { + if let Some(body) = &func.underlying(self).unwrap().body.clone() { + self.parse_ctx_statement(arena, body, false, Some(func)); + } + } + }); + + self.fn_calls_fns = fn_calls_fns; + }); + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_source_unit( + &mut self, + arena: &mut RangeArena>, + source_unit: SourceUnit, + file_no: usize, + parent: SourceUnitNode, + current_path: &SourcePath, + ) -> FinalPassItem { + let mut all_funcs = vec![]; + let mut all_usings = vec![]; + let mut all_inherits = vec![]; + let mut all_vars = vec![]; + source_unit + .0 + .iter() + .enumerate() + .for_each(|(unit_part, source_unit_part)| { + let (sup, funcs, usings, inherits, vars) = self.parse_source_unit_part( + arena, + source_unit_part, + file_no, + unit_part, + parent, + current_path, + ); + parent.add_part(sup, self).unwrap(); + all_funcs.extend(funcs); + all_usings.extend(usings); + all_inherits.extend(inherits); + all_vars.extend(vars); + }); + FinalPassItem::new(all_funcs, all_usings, all_inherits, all_vars) + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_source_unit_part( + &mut self, + arena: &mut RangeArena>, + sup: &SourceUnitPart, + file_no: usize, + unit_part: usize, + parent: SourceUnitNode, + // imported: &mut Vec<(Option, String, String, usize)>, + current_path: &SourcePath, + ) -> ( + SourceUnitPartNode, + Vec, + Vec<(Using, NodeIdx)>, + Vec<(ContractNode, Vec)>, + Vec<(VarNode, NodeIdx)>, + ) { + use SourceUnitPart::*; + + let sup_node = self.add_node(Node::SourceUnitPart(graph::nodes::SourceUnitPart::new( + file_no, unit_part, + ))); + let s_node = SourceUnitPartNode::from(sup_node); + self.add_edge(sup_node, parent, Edge::Part); + + let mut func_nodes = vec![]; + let mut usings = vec![]; + let mut inherits = vec![]; + let mut vars = vec![]; + + match sup { + ContractDefinition(def) => { + let (node, funcs, con_usings, unhandled_inherits, unhandled_vars) = + self.parse_contract_def(arena, def, parent); + s_node.add_contract(node, self).unwrap(); + self.add_edge(node, sup_node, Edge::Contract); + func_nodes.extend(funcs); + usings.extend(con_usings); + inherits.push((node, unhandled_inherits)); + vars.extend(unhandled_vars); + } + StructDefinition(def) => { + let node = self.parse_struct_def(arena, def); + s_node.add_struct(node, self).unwrap(); + self.add_edge(node, sup_node, Edge::Struct); + } + EnumDefinition(def) => { + let node = self.parse_enum_def(def); + self.add_edge(node, sup_node, Edge::Enum); + } + ErrorDefinition(def) => { + let node = self.parse_err_def(arena, def); + self.add_edge(node, sup_node, Edge::Error); + } + VariableDefinition(def) => { + let (node, maybe_func, needs_final_pass) = self.parse_var_def(arena, def, false); + s_node.add_constant(node, self).unwrap(); + if let Some(func) = maybe_func { + let func = self.handle_func(func, None); + func_nodes.push(func); + s_node.add_func(func, self).unwrap(); + } + + if needs_final_pass { + vars.push((node, parent.into())); + } + + self.add_edge(node, sup_node, Edge::Var); + } + FunctionDefinition(def) => { + let node = self.parse_func_def(def, None); + s_node.add_func(node, self).unwrap(); + func_nodes.push(node); + + self.add_edge(node, sup_node, Edge::Func); + } + TypeDefinition(def) => { + let node = self.parse_ty_def(arena, def); + self.add_edge(node, sup_node, Edge::Ty); + } + EventDefinition(_def) => todo!(), + Annotation(_anno) => todo!(), + Using(using) => usings.push((*using.clone(), parent.into())), + StraySemicolon(_loc) => todo!(), + PragmaDirective(_, _, _) => {} + ImportDirective(import) => { + self.parse_import(arena, import, current_path, parent); + } + } + + (s_node, func_nodes, usings, inherits, vars) + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_import( + &mut self, + arena: &mut RangeArena>, + import: &Import, + current_path: &SourcePath, + parent: SourceUnitNode, + ) { + let (import_path, remapping) = match import { + Import::Plain(import_path, _) => { + tracing::trace!("parse_import, path: {:?}", import_path); + // find the longest remapping that the import_path starts with + let remapping = self + .remappings + .iter() + .filter_map(|(name, target)| { + let str_lit = &import_path.string; + if str_lit.starts_with(name) { + Some((name, target)) + } else { + None + } + }) + .max_by(|(name1, _), (name2, _)| name1.len().cmp(&name2.len())); + (import_path, remapping) + } + Import::Rename(import_path, _elems, _) => { + tracing::trace!("parse_import, path: {:?}, Rename", import_path); + // find the longest remapping that the import_path starts with + let remapping = self + .remappings + .iter() + .filter_map(|(name, target)| { + let str_lit = &import_path.string; + if str_lit.starts_with(name) { + Some((name, target)) + } else { + None + } + }) + .max_by(|(name1, _), (name2, _)| name1.len().cmp(&name2.len())); + (import_path, remapping) + } + e => todo!("import {:?}", e), + }; + /* + Cases to handle: + current path SolidityFile, remapping found + - Root is RemappingsDirectory + current path SolidityFile, no remapping found + - Root is SolidityFile + - Root is RemappingsDirectory + current path SolcJSON, remapping found + - Root is SolcJSON + current path SolcJSON, no remapping found + - Root is SolcJSON + */ + let (remapped, sol) = match current_path { + SourcePath::SolidityFile(sol_file_path) => { + // check for remappings found + let remapped = if let Some((name, target)) = remapping { + // Found matching remapping name and target, check for remapping within the root path + match &self.root { + Root::RemappingsDirectory(root_path) => { + let remapped_path = root_path.join(target).join( + import_path + .string + .replacen(name, "", 1) + .trim_start_matches('/'), + ); + SourcePath::SolidityFile(remapped_path) + } + Root::SolcJSON(_) => { + panic!("Please report this as a bug, root is SolcJSON but current path is a SolidityFile w/ remapping found") + } + Root::SolidityFile(_) => { + panic!("Please report this as a bug, root is SolidityFile but remappings are available") + } + } + } else { + // no remapping found, check for import within the root path + match &self.root { + Root::RemappingsDirectory(_) | Root::SolidityFile(_) => { + // _root_path is not used, should be equal to sol_file_path for first level imports, but different for chains of imports + // should be a relative import from sol_file_path + let remapped_path = sol_file_path + .parent() + .unwrap() + .join(import_path.string.trim_start_matches('/')); + SourcePath::SolidityFile(remapped_path) + } + Root::SolcJSON(_) => { + panic!("Please report this as a bug, root is SolcJSON but current path is a SolidityFile w/ no remapping found") + } + } + }; + + let canonical = fs::canonicalize(remapped.path_to_solidity_source()) + .unwrap_or_else(|_| panic!( + "Could not find file: {remapped:?}{}", + if self.remappings.is_empty() { + ". It looks like you didn't pass in any remappings. Try adding the `--remappings ./path/to/remappings.txt` to the command line input" + } else { "" } + ) + ); + let sol = fs::read_to_string(&canonical).unwrap_or_else(|_| { + panic!( + "Could not find file for dependency: {canonical:?}{}", + if self.remappings.is_empty() { + ". It looks like you didn't pass in any remappings. Try adding the `--remappings ./path/to/remappings.txt` to the command line input (where `remappings.txt` is the output of `forge remappings > remappings.txt`)" + } else { "" } + ) + }); + let canonical_source = SourcePath::SolidityFile(canonical); + (canonical_source, sol) + } + SourcePath::SolcJSON(_json_path, current_name) => { + // can use the import_path and remappings to find the import amongst self.sources + let (remapped, sol) = match &self.root { + Root::SolcJSON(_solc_path) => { + // check for remappings found + if let Some((name, target)) = remapping { + // First, take the import_path and remove the remapping name + let import_path_str = import_path.string.replacen(name, "", 1); + let remapped_path = import_path_str.trim_start_matches('/'); + // the source that matches should be "{target}/{remapped_path}". Create PathBuf for this + let remapped_path_buf = + PathBuf::from(format!("{}/{}", target, remapped_path)); + // look for this path in self.sources + let normalized_remapped_path_buf = normalize_path(&remapped_path_buf); + if let Some((confirmed_source_path, sol, _file_no, _entry)) = + self.sources.iter().find(|(path, _sol, _file_no, _entry)| { + normalize_path(path.path_to_solidity_source()) + == normalized_remapped_path_buf + }) + { + // found the path, return the source_path + (confirmed_source_path.clone(), sol.clone()) + } else { + // didn't find the path, panic + panic!("Could not find file: {:#?}", remapped_path_buf); + } + } else { + // need to find name of the file in self.sources + // import will be relative to the current_name + let current_path_buf = PathBuf::from(current_name); + let current_name_parent = current_path_buf + .parent() + .expect("no parent found for current file"); + + let import_path_str = import_path.string.as_str(); + // convert to a PathBuf + let import_path_buf = PathBuf::from(import_path_str); + // check if the import_path begins with an '@' + if import_path_str.starts_with('@') { + // if lib, look for this path in self.sources + let normalized_import = normalize_path(&import_path_buf); + if let Some((confirmed_source_path, sol, _file_no, _entry)) = + self.sources.iter().find(|(path, _sol, _file_no, _entry)| { + normalize_path(path.path_to_solidity_source()) + == normalized_import + }) + { + // found the path, return the source_path + (confirmed_source_path.clone(), sol.clone()) + } else { + // didn't find the path, panic + panic!("Could not find file: {:#?}", normalized_import); + } + } else { + tracing::debug!("import_path_buf is relative"); + + // if relative, join to the current_name_parent + let joined = current_name_parent.join(import_path_buf); + // look for this path in self.sources + let normalized_joined = normalize_path(joined); + + if let Some((confirmed_source_path, sol, _file_no, _entry)) = + self.sources.iter().find(|(path, _sol, _file_no, _entry)| { + normalize_path(path.path_to_solidity_source()) + == normalized_joined + }) + { + // found the path, return the source_path + (confirmed_source_path.clone(), sol.clone()) + } else { + // didn't find the path, panic + panic!("Could not find file: {:#?}", normalized_joined); + } + } + } + } + Root::SolidityFile(_) | Root::RemappingsDirectory(_) => { + panic!("Please report this as a bug, root is SolidityFile or RemappingsDirectory but current path is a SolcJSON") + } + }; + + (remapped, sol) + } + }; + + // check for entry in self.sources that has a matching SourcePath + let normalized_remapped = normalize_path(remapped.path_to_solidity_source()); + if let Some((_, _, _, optional_entry)) = self.sources.iter().find(|(path, _, _, _)| { + normalize_path(path.path_to_solidity_source()) == normalized_remapped + }) { + // if found, add an edge from the parent to the entry + if let Some(o_e) = optional_entry { + self.add_edge(*o_e, parent, Edge::Import); + } + } else { + // if not found, add it + self.sources + .push((remapped.clone(), sol.clone(), None, None)); + } + + let normalized_remapped = normalize_path(remapped.path_to_solidity_source()); + // take self.sources entry with the same path as remapped and update the file_no + if let Some((_, _, optional_file_no, _)) = + self.sources.iter_mut().find(|(path, _, _, _)| { + normalize_path(path.path_to_solidity_source()) == normalized_remapped + }) + { + if optional_file_no.is_some() { + // if the file_no is already set, don't recurse, just return + return; + } + self.file_no += 1; + let file_no = self.file_no; + *optional_file_no = Some(file_no); + } + + let maybe_entry = self.parse(arena, &sol, &remapped, false); + + // take self.sources entry with the same path as remapped and update the entry node + if let Some((_, _, _, optional_entry)) = self.sources.iter_mut().find(|(path, _, _, _)| { + normalize_path(path.path_to_solidity_source()) == normalized_remapped + }) { + *optional_entry = maybe_entry; + } + + if let Some(other_entry) = maybe_entry { + self.add_edge(other_entry, parent, Edge::Import); + }; + } + + // #[tracing::instrument(name = "parse_contract_def", skip_all, fields(name = format!("{:?}", contract_def.name)))] + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_contract_def( + &mut self, + arena: &mut RangeArena>, + contract_def: &ContractDefinition, + source: SourceUnitNode, + ) -> ( + ContractNode, + Vec, + Vec<(Using, NodeIdx)>, + Vec, + Vec<(VarNode, NodeIdx)>, + ) { + tracing::trace!( + "Parsing contract {}", + if let Some(ident) = &contract_def.name { + ident.name.clone() + } else { + "interface".to_string() + } + ); + use ContractPart::*; + + let import_nodes = self + .sources + .iter() + .map(|(_, _, _, maybe_node)| *maybe_node) + .collect::>(); + // convert vec to slice + let import_nodes = import_nodes.as_slice(); + + let (contract, unhandled_inherits) = + Contract::from_w_imports(contract_def.clone(), source.into(), import_nodes, self); + + let inherits = contract.inherits.clone(); + let con_name = contract.name.clone().unwrap().name; + let con_node: ContractNode = + if let Some(user_ty_node) = self.user_types.get(&con_name).cloned() { + let unresolved = self.node_mut(user_ty_node); + *unresolved = Node::Contract(contract); + user_ty_node.into() + } else { + let node = self.add_node(Node::Contract(contract)); + self.user_types.insert(con_name, node); + node.into() + }; + + inherits.iter().for_each(|contract_node| { + self.add_edge(*contract_node, con_node, Edge::InheritedContract); + }); + let mut usings = vec![]; + let mut func_nodes = vec![]; + let mut vars = vec![]; + contract_def.parts.iter().for_each(|cpart| match cpart { + StructDefinition(def) => { + let node = self.parse_struct_def(arena, def); + self.add_edge(node, con_node, Edge::Struct); + } + EnumDefinition(def) => { + let node = self.parse_enum_def(def); + self.add_edge(node, con_node, Edge::Enum); + } + ErrorDefinition(def) => { + let node = self.parse_err_def(arena, def); + self.add_edge(node, con_node, Edge::Error); + } + VariableDefinition(def) => { + let (node, maybe_func, needs_final_pass) = self.parse_var_def(arena, def, true); + if let Some(func) = maybe_func { + func_nodes.push(self.handle_func(func, Some(con_node))); + } + + if needs_final_pass { + vars.push((node, con_node.into())); + } + + self.add_edge(node, con_node, Edge::Var); + } + FunctionDefinition(def) => { + let node = self.parse_func_def(def, Some(con_node)); + func_nodes.push(node); + } + TypeDefinition(def) => { + let node = self.parse_ty_def(arena, def); + self.add_edge(node, con_node, Edge::Ty); + } + EventDefinition(_def) => {} + Annotation(_anno) => todo!(), + Using(using) => usings.push((*using.clone(), con_node.0.into())), + StraySemicolon(_loc) => todo!(), + }); + (con_node, func_nodes, usings, unhandled_inherits, vars) + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_using( + &mut self, + arena: &mut RangeArena>, + using_def: &Using, + scope_node: NodeIdx, + ) { + tracing::trace!("Parsing \"using\" {:?}", using_def); + let Some(ref using_def_ty) = using_def.ty else { + self.add_expr_err(ExprErr::Todo( + using_def.loc(), + "Using statements with wildcards currently unsupported".to_string(), + )); + return; + }; + let maybe_cvar_idx = self.parse_expr(arena, using_def_ty, None); + let ty_idx = match VarType::try_from_idx(self, maybe_cvar_idx) { + Some(v_ty) => v_ty.ty_idx(), + None => { + self.add_expr_err(ExprErr::Unresolved( + using_def.loc(), + "Unable to deduce the type for which to apply the `using` statement to" + .to_string(), + )); + return; + } + }; + + match &using_def.list { + UsingList::Library(ident_paths) => { + ident_paths.identifiers.iter().for_each(|ident| { + if let Some(hopefully_contract) = self.user_types.get(&ident.name) { + match self.node(*hopefully_contract) { + Node::Contract(_) => { + let funcs = ContractNode::from(*hopefully_contract).funcs(self); + let relevant_funcs: Vec<_> = funcs + .iter() + .filter_map(|func| { + let first_param: FunctionParamNode = + *func.params(self).iter().take(1).next()?; + let param_ty = first_param.ty(self).unwrap(); + if param_ty == ty_idx { + Some(func) + } else { + None + } + }) + .copied() + .collect(); + relevant_funcs.iter().for_each(|func| { + self.add_edge(ty_idx, *func, Edge::LibraryFunction(scope_node)); + }); + } + _ => self.add_expr_err(ExprErr::ParseError( + using_def.loc(), + "Tried to use a non-contract as a contract in a `using` statement" + .to_string(), + )), + } + } else { + panic!("Cannot find library contract {}", ident.name); + } + }); + } + UsingList::Functions(vec_ident_paths) => { + vec_ident_paths.iter().for_each(|ident_paths| { + if ident_paths.path.identifiers.len() == 2 { + if let Some(hopefully_contract) = + self.user_types.get(&ident_paths.path.identifiers[0].name) + { + if let Some(func) = ContractNode::from(*hopefully_contract) + .funcs(self) + .iter() + .find(|func| { + func.name(self) + .unwrap() + .starts_with(&ident_paths.path.identifiers[1].name) + }) + { + self.add_edge(ty_idx, *func, Edge::LibraryFunction(scope_node)); + } else { + panic!( + "Cannot find library function {}.{}", + ident_paths.path.identifiers[0].name, + ident_paths.path.identifiers[1].name + ); + } + } else { + panic!( + "Cannot find library contract {}", + ident_paths.path.identifiers[0].name + ); + } + } else { + // looking for free floating function + let funcs = match self.node(scope_node) { + Node::Contract(_) => self.search_children( + ContractNode::from(scope_node) + .associated_source(self) + .into(), + &Edge::Func, + ), + Node::SourceUnit(..) => self.search_children(scope_node, &Edge::Func), + _ => unreachable!(), + }; + if let Some(func) = funcs.iter().find(|func| { + FunctionNode::from(**func) + .name(self) + .unwrap() + .starts_with(&ident_paths.path.identifiers[0].name) + }) { + self.add_edge(ty_idx, *func, Edge::LibraryFunction(scope_node)); + } else { + panic!( + "Cannot find library function {}", + ident_paths.path.identifiers[0].name + ); + } + } + }); + } + UsingList::Error => todo!(), + } + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_enum_def(&mut self, enum_def: &EnumDefinition) -> EnumNode { + tracing::trace!("Parsing enum {:?}", enum_def); + let enu = Enum::from(enum_def.clone()); + let name = enu.name.clone().expect("Enum was not named").name; + + // check if we have an unresolved type by the same name + let enu_node: EnumNode = if let Some(user_ty_node) = self.user_types.get(&name).cloned() { + let unresolved = self.node_mut(user_ty_node); + *unresolved = Node::Enum(enu); + user_ty_node.into() + } else { + let node = self.add_node(enu); + self.user_types.insert(name, node); + node.into() + }; + + enu_node + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_struct_def( + &mut self, + arena: &mut RangeArena>, + struct_def: &StructDefinition, + ) -> StructNode { + tracing::trace!("Parsing struct {:?}", struct_def.name); + let strukt = Struct::from(struct_def.clone()); + + let name = strukt.name.clone().expect("Struct was not named").name; + + // check if we have an unresolved type by the same name + let strukt_node: StructNode = + if let Some(user_ty_node) = self.user_types.get(&name).cloned() { + let unresolved = self.node_mut(user_ty_node); + *unresolved = Node::Struct(strukt); + user_ty_node.into() + } else { + let node = self.add_node(strukt); + self.user_types.insert(name, node); + node.into() + }; + + struct_def.fields.iter().for_each(|field| { + let f = Field::new(self, arena, field.clone()); + let field_node = self.add_node(f); + self.add_edge(field_node, strukt_node, Edge::Field); + }); + strukt_node + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_err_def( + &mut self, + arena: &mut RangeArena>, + err_def: &ErrorDefinition, + ) -> ErrorNode { + tracing::trace!("Parsing error {:?}", err_def); + let err_node = ErrorNode(self.add_node(Error::from(err_def.clone())).index()); + err_def.fields.iter().for_each(|field| { + let param = ErrorParam::new(self, arena, field.clone()); + let field_node = self.add_node(param); + self.add_edge(field_node, err_node, Edge::ErrorParam); + }); + err_node + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn parse_func_def( + &mut self, + func_def: &FunctionDefinition, + con_node: Option, + ) -> FunctionNode { + let func = Function::from(func_def.clone()); + tracing::trace!( + "Parsing function {:?}", + func.name + .clone() + .unwrap_or_else(|| solang_parser::pt::Identifier { + loc: solang_parser::pt::Loc::Implicit, + name: "".to_string() + }) + .name + ); + self.handle_func(func, con_node) + } + + pub fn handle_func(&mut self, func: Function, con_node: Option) -> FunctionNode { + match func.ty { + FunctionTy::Constructor => { + let node = self.add_node(func); + let func_node = node.into(); + + if let Some(con_node) = con_node { + self.add_edge(node, con_node, Edge::Constructor); + } + func_node + } + FunctionTy::Fallback => { + let node = self.add_node(func); + let func_node = node.into(); + + if let Some(con_node) = con_node { + self.add_edge(node, con_node, Edge::FallbackFunc); + } + + func_node + } + FunctionTy::Receive => { + // receive function cannot have input/output + let node = self.add_node(func); + if let Some(con_node) = con_node { + self.add_edge(node, con_node, Edge::ReceiveFunc); + } + FunctionNode::from(node) + } + FunctionTy::Function => { + let fn_node = self.add_node(func); + if let Some(con_node) = con_node { + self.add_edge(fn_node, con_node, Edge::Func); + } + fn_node.into() + } + FunctionTy::Modifier => { + let fn_node = self.add_node(func); + if let Some(con_node) = con_node { + self.add_edge(fn_node, con_node, Edge::Modifier); + } + fn_node.into() + } + } + } + + pub fn parse_var_def( + &mut self, + arena: &mut RangeArena>, + var_def: &VariableDefinition, + in_contract: bool, + ) -> (VarNode, Option, bool) { + tracing::trace!("Parsing variable definition: {:?}", var_def.name); + let var = Var::new(self, arena, var_def.clone(), in_contract); + let mut func = None; + if var.is_public() { + func = Some(Function::from(var_def.clone())); + } + let needs_final_pass = var.initializer_expr.is_some(); + let var_node = VarNode::from(self.add_node(var)); + self.user_types + .insert(var_node.name(self).unwrap(), var_node.into()); + (var_node, func, needs_final_pass) + } + + pub fn parse_ty_def( + &mut self, + arena: &mut RangeArena>, + ty_def: &TypeDefinition, + ) -> TyNode { + tracing::trace!("Parsing type definition"); + let ty = Ty::new(self, arena, ty_def.clone()); + let name = ty.name.name.clone(); + let ty_node: TyNode = if let Some(user_ty_node) = self.user_types.get(&name).cloned() { + let unresolved = self.node_mut(user_ty_node); + *unresolved = Node::Ty(ty); + user_ty_node.into() + } else { + let node = self.add_node(Node::Ty(ty)); + self.user_types.insert(name, node); + node.into() + }; + ty_node + } +} + +/// Print the report of parser's diagnostics +pub fn print_diagnostics_report( + content: &str, + path: &Path, + diagnostics: Vec, +) -> std::io::Result<()> { + let filename = path.file_name().unwrap().to_string_lossy().to_string(); + for diag in diagnostics { + let (start, end) = (diag.loc.start(), diag.loc.end()); + let mut report = Report::build(ReportKind::Error, &filename, start) + .with_message(format!("{:?}", diag.ty)) + .with_label( + Label::new((&filename, start..end)) + .with_color(Color::Red) + .with_message(format!("{}", diag.message.fg(Color::Red))), + ); + + for note in diag.notes { + report = report.with_note(note.message); + } + + report.finish().print((&filename, Source::from(content)))?; + } + Ok(()) +} + +/// Normalize the path by resolving the `.` and `..` components in order to do path comparison. +/// +/// This is used instead of `std::fs::canonicalize()` in cases where the path is not present on the filesystem (e.g. in the case of a Solc Standard JSON) +/// +/// ## Examples +/// +/// ``` +/// use std::path::{Path, PathBuf}; +/// use pyrometer::normalize_path; +/// +/// let path = Path::new("/src/contracts/./Main.sol"); +/// assert_eq!(normalize_path(path), PathBuf::from("/src/contracts/Main.sol")); +/// +/// let path = Path::new("/src/contracts/../Main.sol"); +/// assert_eq!(normalize_path(path), PathBuf::from("/src/Main.sol")); +/// ``` +pub fn normalize_path>(path: P) -> PathBuf { + let mut normalized_path = PathBuf::new(); + + for component in path.as_ref().components() { + match component { + std::path::Component::CurDir => {} // Ignore current dir component + std::path::Component::ParentDir => { + // Handle parent dir component + normalized_path.pop(); + } + _ => normalized_path.push(component), + } + } + + normalized_path +} diff --git a/crates/pyrometer/src/analyzer_backend.rs b/crates/pyrometer/src/analyzer_backend.rs new file mode 100644 index 00000000..b4834d50 --- /dev/null +++ b/crates/pyrometer/src/analyzer_backend.rs @@ -0,0 +1,279 @@ +use crate::Analyzer; + +use graph::{ + elem::Elem, + nodes::{ + BlockNode, Builtin, Concrete, ConcreteNode, ContextVar, Function, FunctionNode, + FunctionParam, FunctionParamNode, FunctionReturn, MsgNode, + }, + AnalyzerBackend, Edge, Node, VarType, +}; +use shared::{AnalyzerLike, GraphLike, JoinStats, NodeIdx, RangeArena}; +use solc_expressions::{ExprErr, IntoExprErr}; + +use ahash::AHashMap; +use ethers_core::types::U256; +use solang_parser::{ + helpers::CodeLocation, + pt::{Expression, Loc}, +}; + +use std::collections::BTreeMap; + +impl AnalyzerBackend for Analyzer { + fn add_concrete_var( + &mut self, + ctx: graph::nodes::ContextNode, + concrete: Concrete, + loc: Loc, + ) -> Result { + let cnode = self.add_node(Node::Concrete(concrete)); + let var = ContextVar::new_from_concrete(loc, ctx, cnode.into(), self); + let cnode = self.add_node(Node::ContextVar(var.into_expr_err(loc)?)); + Ok(cnode.into()) + } +} + +impl AnalyzerLike for Analyzer { + type Expr = Expression; + type ExprErr = ExprErr; + type MsgNode = MsgNode; + type BlockNode = BlockNode; + + type Function = Function; + type FunctionNode = FunctionNode; + type FunctionParam = FunctionParam; + type FunctionReturn = FunctionReturn; + type Builtin = Builtin; + + fn builtin_fn_nodes(&self) -> &AHashMap { + &self.builtin_fn_nodes + } + + fn builtin_fn_nodes_mut(&mut self) -> &mut AHashMap { + &mut self.builtin_fn_nodes + } + + fn max_depth(&self) -> usize { + self.max_depth + } + + fn max_width(&self) -> usize { + self.max_width + } + + fn add_expr_err(&mut self, err: ExprErr) { + if self.debug_panic() { + panic!("Encountered an error: {err:?}"); + } + if !self.expr_errs.contains(&err) { + self.expr_errs.push(err); + } + } + + fn expr_errs(&self) -> Vec { + self.expr_errs.clone() + } + + fn entry(&self) -> NodeIdx { + self.entry + } + + fn parse_fn(&self) -> FunctionNode { + self.parse_fn + } + + fn msg(&mut self) -> MsgNode { + self.msg + } + + fn block(&mut self) -> BlockNode { + self.block + } + + fn builtin_fns(&self) -> &AHashMap { + &self.builtin_fns + } + + fn builtin_fn_inputs(&self) -> &AHashMap, Vec)> { + &self.builtin_fn_inputs + } + + fn builtins(&self) -> &AHashMap { + &self.builtins + } + fn builtins_mut(&mut self) -> &mut AHashMap { + &mut self.builtins + } + fn user_types(&self) -> &AHashMap { + &self.user_types + } + fn user_types_mut(&mut self) -> &mut AHashMap { + &mut self.user_types + } + + fn parse_expr( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + parent: Option, + ) -> NodeIdx { + use Expression::*; + match expr { + Type(_loc, ty) => { + if let Some(builtin) = Builtin::try_from_ty(ty.clone(), self, arena) { + if let Some(idx) = self.builtins.get(&builtin) { + *idx + } else { + let idx = self.add_node(Node::Builtin(builtin.clone())); + self.builtins.insert(builtin, idx); + idx + } + } else if let Some(idx) = self.complicated_parse(arena, expr, parent) { + self.add_if_err(idx.expect_single().into_expr_err(expr.loc())) + .unwrap_or(0.into()) + } else { + 0.into() + } + } + Variable(ident) => { + if let Some(idx) = self.user_types.get(&ident.name) { + *idx + } else { + let node = self.add_node(Node::Unresolved(ident.clone())); + self.user_types.insert(ident.name.clone(), node); + node + } + } + ArraySubscript(_loc, ty_expr, None) => { + let inner_ty = self.parse_expr(arena, ty_expr, parent); + if let Some(var_type) = VarType::try_from_idx(self, inner_ty) { + let dyn_b = Builtin::Array(var_type); + if let Some(idx) = self.builtins.get(&dyn_b) { + *idx + } else { + let idx = self.add_node(Node::Builtin(dyn_b.clone())); + self.builtins.insert(dyn_b, idx); + idx + } + } else { + inner_ty + } + } + ArraySubscript(loc, ty_expr, Some(idx_expr)) => { + let inner_ty = self.parse_expr(arena, ty_expr, parent); + let idx = self.parse_expr(arena, idx_expr, parent); + if let Some(var_type) = VarType::try_from_idx(self, inner_ty) { + let res = ConcreteNode::from(idx) + .underlying(self) + .into_expr_err(*loc) + .cloned(); + if let Some(concrete) = self.add_if_err(res) { + if let Some(size) = concrete.uint_val() { + let dyn_b = Builtin::SizedArray(size, var_type); + if let Some(idx) = self.builtins.get(&dyn_b) { + *idx + } else { + let idx = self.add_node(Node::Builtin(dyn_b.clone())); + self.builtins.insert(dyn_b, idx); + idx + } + } else { + inner_ty + } + } else { + inner_ty + } + } else { + inner_ty + } + } + NumberLiteral(_loc, integer, exponent, _unit) => { + let int = U256::from_dec_str(integer).unwrap(); + let val = if !exponent.is_empty() { + let exp = U256::from_dec_str(exponent).unwrap(); + int * U256::from(10).pow(exp) + } else { + int + }; + + self.add_node(Node::Concrete(Concrete::Uint(256, val))) + } + _ => { + if let Some(idx) = self.complicated_parse(arena, expr, parent) { + self.add_if_err(idx.expect_single().into_expr_err(expr.loc())) + .unwrap_or(0.into()) + } else { + 0.into() + } + } + } + } + + fn builtin_or_add(&mut self, builtin: Builtin) -> NodeIdx { + if let Some(idx) = self.builtins().get(&builtin) { + *idx + } else { + let idx = self.add_node(Node::Builtin(builtin.clone())); + self.builtins_mut().insert(builtin, idx); + idx + } + } + + fn builtin_fn_or_maybe_add(&mut self, builtin_name: &str) -> Option + where + Self: std::marker::Sized, + { + if let Some(idx) = self.builtin_fn_nodes().get(builtin_name) { + Some(*idx) + } else if let Some(func) = self.builtin_fns().get(builtin_name) { + let (inputs, outputs) = self + .builtin_fn_inputs() + .get(builtin_name) + .expect("builtin func but no inputs") + .clone(); + let func_node = self.add_node(Node::Function(func.clone())); + let mut params_strs = vec![]; + inputs.into_iter().for_each(|input| { + let input_node = self.add_node(input); + params_strs.push(FunctionParamNode::from(input_node).ty_str(self).unwrap()); + self.add_edge(input_node, func_node, Edge::FunctionParam); + }); + outputs.into_iter().for_each(|output| { + let output_node = self.add_node(output); + self.add_edge(output_node, func_node, Edge::FunctionReturn); + }); + + self.add_edge(func_node, self.entry(), Edge::Func); + + self.builtin_fn_nodes_mut() + .insert(builtin_name.to_string(), func_node); + Some(func_node) + } else { + None + } + } + + fn debug_panic(&self) -> bool { + self.debug_panic + } + + fn fn_calls_fns(&self) -> &BTreeMap> { + &self.fn_calls_fns + } + fn fn_calls_fns_mut(&mut self) -> &mut BTreeMap> { + &mut self.fn_calls_fns + } + + fn join_stats_mut(&mut self) -> &mut JoinStats { + &mut self.join_stats + } + + fn handled_funcs(&self) -> &[FunctionNode] { + &self.handled_funcs + } + + fn handled_funcs_mut(&mut self) -> &mut Vec { + &mut self.handled_funcs + } +} diff --git a/src/builtin_fns.rs b/crates/pyrometer/src/builtin_fns.rs similarity index 98% rename from src/builtin_fns.rs rename to crates/pyrometer/src/builtin_fns.rs index f6832307..1c3caeaa 100644 --- a/src/builtin_fns.rs +++ b/crates/pyrometer/src/builtin_fns.rs @@ -1,8 +1,9 @@ -use crate::Builtin; -use crate::{Function, FunctionParam, FunctionReturn}; -use shared::analyzer::{AnalyzerLike, GraphLike}; -use solang_parser::pt::{FunctionAttribute, Identifier, Loc, StorageLocation, Visibility}; -use std::collections::HashMap; +use graph::nodes::{Builtin, Function, FunctionParam, FunctionReturn}; +use shared::{AnalyzerLike, StorageLocation}; + +use solang_parser::pt::{FunctionAttribute, Identifier, Loc, Visibility}; + +use ahash::AHashMap; macro_rules! builtin_fn { ($($field:ident : $value:expr),* $(,)?) => { @@ -16,7 +17,7 @@ macro_rules! builtin_fn { } // A list of all Solidity builtins functions -pub fn builtin_fns() -> HashMap { +pub fn builtin_fns() -> AHashMap { let funcs = [ builtin_fn!( name: Some(Identifier { @@ -324,8 +325,8 @@ pub fn builtin_fns() -> HashMap { } pub fn builtin_fns_inputs( - analyzer: &mut (impl GraphLike + AnalyzerLike), -) -> HashMap, Vec)> { + analyzer: &mut impl AnalyzerLike, +) -> AHashMap, Vec)> { let funcs = [ ("wrap", vec![], vec![]), ("unwrap", vec![], vec![]), diff --git a/crates/pyrometer/src/graph_backend.rs b/crates/pyrometer/src/graph_backend.rs new file mode 100644 index 00000000..3512cbdd --- /dev/null +++ b/crates/pyrometer/src/graph_backend.rs @@ -0,0 +1,698 @@ +use crate::Analyzer; +use graph::elem::Elem; +use graph::nodes::Concrete; +use shared::RangeArena; +// use std::collections::hash_map::DefaultHasher; +// use std::hash::Hash; +// use std::hash::Hasher; + +use graph::{ + as_dot_str, nodes::ContextNode, AnalyzerBackend, AsDotStr, ContextEdge, Edge, GraphBackend, + Node, +}; +use shared::{GraphDot, GraphLike, NodeIdx, Search}; + +use petgraph::{dot::Dot, graph::EdgeIndex, visit::EdgeRef, Directed, Direction, Graph}; + +use std::{ + collections::BTreeSet, + sync::{Arc, Mutex}, +}; + +impl GraphLike for Analyzer { + type Node = Node; + type Edge = Edge; + type RangeElem = Elem; + fn graph_mut(&mut self) -> &mut Graph { + &mut self.graph + } + + fn graph(&self) -> &Graph { + &self.graph + } + + fn range_arena(&self) -> &RangeArena> { + &self.range_arena + } + + fn range_arena_mut(&mut self) -> &mut RangeArena> { + &mut self.range_arena + } + + // fn range_arena_idx(&self, elem: &Self::RangeElem) -> Option { + // if let Elem::Arena(idx) = elem { + // Some(*idx) + // } else { + // self.range_arena().map.get(elem).copied() + // } + // } +} + +// fn calculate_hash(t: &T) -> u64 { +// let mut s = DefaultHasher::new(); +// t.hash(&mut s); +// s.finish() +// } + +impl GraphBackend for Analyzer {} + +impl GraphDot for Analyzer { + type T = Elem; + fn cluster_str( + &self, + arena: &mut RangeArena>, + node: NodeIdx, + cluster_num: &mut usize, + is_killed: bool, + handled_nodes: Arc>>, + handled_edges: Arc>>>, + depth: usize, + as_mermaid: bool, + ) -> Option + where + Self: std::marker::Sized, + { + *cluster_num += 1; + let curr_cluster = *cluster_num; + + // only used for mermaid + let curr_cluster_name = format!( + "cluster_{cluster_num}_{}", + if is_killed && curr_cluster % 2 == 0 { + "bgcolor_7a0b0b" + } else if is_killed { + "bgcolor_e04646" + } else if curr_cluster % 2 == 0 { + "bgcolor_252C46" + } else { + "bgcolor_1a1b26" + } + ); + + if self + .graph() + .edges_directed(node, Direction::Outgoing) + .collect::>() + .is_empty() + { + return None; + } + let new_graph = self.graph().filter_map( + |_idx, node| match node { + Node::ContextVar(_cvar) => Some(node.clone()), + _ => Some(node.clone()), + }, + |_idx, edge| Some(*edge), + ); + + let g = &G { graph: &new_graph }; + let children = g.children_exclude(node, 0, &[Edge::Context(ContextEdge::Subcontext)]); + let mut children_edges = g + .edges_for_nodes(&children) + .into_iter() + .filter(|(_, _, e, _)| *e != Edge::Context(ContextEdge::InputVariable)) + .collect::>(); + children_edges.extend( + self.graph() + .edges_directed(node, Direction::Incoming) + .filter(|edge| *edge.weight() != Edge::Context(ContextEdge::InputVariable)) + .map(|edge| (edge.source(), edge.target(), *edge.weight(), edge.id())) + .collect::)>>(), + ); + let preindent = " ".repeat(4 * depth.saturating_sub(1)); + let indent = " ".repeat(4 * depth); + let child_node_str = children + .iter() + .filter_map(|child| { + if handled_nodes.lock().unwrap().contains(child) { + return None; + } + + let post_str = match self.node(*child) { + Node::Context(c) => { + *cluster_num += 2; + if let Some(inner) = self.cluster_str( + arena, + *child, + cluster_num, + c.killed.is_some(), + handled_nodes.clone(), + handled_edges.clone(), + depth + 1, + as_mermaid, + ) { + inner + } else { + "".to_string() + } + } + Node::ContextFork => { + let children = g.children_exclude(*child, 0, &[]); + let mut child_iter = children.iter(); + let l_fork = child_iter.next()?; + let r_fork = child_iter.next()?; + let l_ctx = ContextNode::from(*l_fork); + let r_ctx = ContextNode::from(*r_fork); + *cluster_num += 1; + let l_fork = if let Some(inner) = self.cluster_str( + arena, + *l_fork, + cluster_num, + l_ctx.is_killed(self).ok()?, + handled_nodes.clone(), + handled_edges.clone(), + depth + 1, + as_mermaid, + ) { + inner + } else { + "".to_string() + }; + + *cluster_num += 2; + let r_fork = if let Some(inner) = self.cluster_str( + arena, + *r_fork, + cluster_num, + r_ctx.is_killed(self).ok()?, + handled_nodes.clone(), + handled_edges.clone(), + depth + 1, + as_mermaid, + ) { + inner + } else { + "".to_string() + }; + + format!("{l_fork}\n{r_fork}\n") + } + Node::FunctionCall => { + let children = g.children_exclude(*child, 0, &[]); + let mut child_iter = children.iter(); + let func = child_iter.next()?; + let func_ctx = ContextNode::from(*func); + if let Some(inner) = self.cluster_str( + arena, + *func, + cluster_num, + func_ctx.is_killed(self).ok()?, + handled_nodes.clone(), + handled_edges.clone(), + depth + 1, + as_mermaid, + ) { + inner + } else { + "".to_string() + } + } + Node::ContextVar(_) => { + let mut children = g.children_exclude( + *child, + usize::MAX, + &[Edge::Context(ContextEdge::InputVariable)], + ); + children.insert(*child); + children + .iter() + .map(|child| { + if !handled_nodes.lock().unwrap().contains(child) { + handled_nodes.lock().unwrap().insert(*child); + } + mermaid_node( + self, + arena, + &indent, + *child, + true, + true, + Some(&curr_cluster_name), + ) + }) + .collect::>() + .join("\n") + } + _ => "".to_string(), + }; + + if as_mermaid { + if handled_nodes.lock().unwrap().contains(child) { + return if !post_str.is_empty() { + Some(post_str) + } else { + None + }; + } else { + handled_nodes.lock().unwrap().insert(*child); + } + Some(format!( + "{}\n{indent}{post_str}", + mermaid_node( + self, + arena, + &indent, + *child, + true, + true, + Some(&curr_cluster_name), + ) + )) + } else { + { + if handled_nodes.lock().unwrap().contains(child) { + return None; + } else { + handled_nodes.lock().unwrap().insert(*child); + } + } + Some(format!( + "{indent}{} [label = \"{}\", color = \"{}\"]\n{}", + petgraph::graph::GraphIndex::index(child), + as_dot_str(*child, g, arena).replace('\"', "\'"), + self.node(*child).dot_str_color(), + post_str + )) + } + }) + .collect::>() + .join("\n"); + + let edge_str = children_edges + .iter() + .filter(|(_, _, _, idx)| !handled_edges.lock().unwrap().contains(idx)) + .map(|(from, to, edge, idx)| { + handled_edges.lock().unwrap().insert(*idx); + let from = petgraph::graph::GraphIndex::index(from); + let to = petgraph::graph::GraphIndex::index(to); + let edge_idx = idx.index(); + let edge_str = format!("{edge:?}").replace('"', "\'"); + if as_mermaid { + format!("{indent}{from:} -->|\"{edge_str}\"| {to:}\n{indent}class {to} linkSource{edge_idx}\n{indent}class {from} linkTarget{edge_idx}") + } else { + format!("{indent}{from:} -> {to:} [label = \"{edge_str}\"]",) + } + }) + .collect::>() + .join("\n"); + + if as_mermaid { + let node_str = { + if handled_nodes.lock().unwrap().contains(&node) { + "".to_string() + } else { + { + handled_nodes.lock().unwrap().insert(node); + } + mermaid_node( + self, + arena, + &indent, + node, + true, + true, + Some(&curr_cluster_name), + ) + } + }; + + let child_node_str = if child_node_str.is_empty() { + "".into() + } else { + format!("\n{child_node_str}") + }; + let edge_str = if edge_str.is_empty() { + "".into() + } else { + format!("\n{edge_str}") + }; + if node_str.is_empty() && child_node_str.is_empty() && edge_str.is_empty() { + return None; + } + Some(format!( + "{preindent}subgraph {curr_cluster_name}\n{node_str}{child_node_str}{edge_str}\n{preindent}end", + )) + } else { + Some(format!( + "{preindent}subgraph cluster_{} {{\n{indent}{}\n{indent}{} [label = \"{}\", color = \"{}\"]\n{}\n{}\n}}", + cluster_num, + if is_killed && curr_cluster % 2 == 0 { + "bgcolor=\"#7a0b0b\"" + } else if is_killed { + "bgcolor=\"#e04646\"" + } else if curr_cluster % 2 == 0 { + "bgcolor=\"#545e87\"" + } else { + "bgcolor=\"#1a1b26\"" + }, + node.index(), + as_dot_str(node, g, arena).replace('\"', "\'"), + self.node(node).dot_str_color(), + child_node_str, + edge_str, + )) + } + } + + fn dot_str(&self, arena: &mut RangeArena>) -> String + where + Self: std::marker::Sized, + Self: AnalyzerBackend, + { + let mut dot_str = Vec::new(); + let raw_start_str = r##"digraph G { + node [shape=box, style="filled, rounded", color="#565f89", fontcolor="#d5daf0", fontname="Helvetica", fillcolor="#24283b"]; + edge [color="#414868", fontcolor="#c0caf5", fontname="Helvetica"]; + bgcolor="#1a1b26"; rankdir="BT"; splines=ortho;"##; + dot_str.push(raw_start_str.to_string()); + let handled_edges = Arc::new(Mutex::new(BTreeSet::new())); + let handled_nodes = Arc::new(Mutex::new(BTreeSet::new())); + let (nodes, edges) = ( + self.graph().node_indices().collect::>(), + self.graph().edge_indices().collect::>(), + ); + let mut cluster_num = 0; + let mut skip = BTreeSet::default(); + let nodes_str = nodes + .iter() + .filter_map(|node| { + if self + .graph() + .edges_directed(*node, Direction::Outgoing) + .collect::>() + .is_empty() + && !matches!(self.node(*node), Node::Entry) + { + skip.insert(*node); + return None; + } + if !handled_nodes.lock().unwrap().contains(node) { + match self.node(*node) { + Node::Function(_) => { + cluster_num += 2; + Some(self.cluster_str( + arena, + *node, + &mut cluster_num, + false, + handled_nodes.clone(), + handled_edges.clone(), + 2, + false, + )?) + } + n => Some(format!( + " {} [label = \"{}\", color = \"{}\"]", + petgraph::graph::GraphIndex::index(node), + as_dot_str(*node, self, arena).replace('\"', "\'"), + n.dot_str_color() + )), + } + } else { + None + } + }) + .collect::>() + .join("\n"); + let edges_str = edges + .into_iter() + .filter_map(|edge| { + if !handled_edges.lock().unwrap().contains(&edge) { + let (from, to) = self.graph().edge_endpoints(edge).unwrap(); + if skip.contains(&from) || skip.contains(&to) { + return None; + } + let from = from.index(); + let to = to.index(); + Some(format!( + " {from:} -> {to:} [label = \"{}\"]", + format!("{:?}", self.graph().edge_weight(edge).unwrap()).replace('"', "\'") + )) + } else { + None + } + }) + .collect::>() + .join("\n"); + dot_str.push(nodes_str); + dot_str.push(edges_str); + let raw_end_str = r#"}"#; + dot_str.push(raw_end_str.to_string()); + dot_str.join("\n") + } + + fn dot_str_no_tmps(&self, arena: &mut RangeArena>) -> String + where + Self: std::marker::Sized, + Self: GraphLike + AnalyzerBackend, + { + let new_graph = self.graph().filter_map( + |_idx, node| match node { + Node::ContextVar(cvar) => { + if !cvar.is_symbolic || cvar.tmp_of.is_some() { + None + } else { + Some(node.clone()) + } + } + _ => Some(node.clone()), + }, + |_idx, edge| Some(*edge), + ); + let mut dot_str = Vec::new(); + let raw_start_str = r##"digraph G { + node [shape=box, style="filled, rounded", color="#565f89", fontcolor="#d5daf0", fontname="Helvetica", fillcolor="#24283b"]; + edge [color="#414868", fontcolor="#c0caf5", fontname="Helvetica"]; + bgcolor="#1a1b26";"##; + dot_str.push(raw_start_str.to_string()); + let nodes_and_edges_str = format!( + "{:?}", + Dot::with_attr_getters( + &new_graph, + &[ + petgraph::dot::Config::GraphContentOnly, + petgraph::dot::Config::NodeNoLabel, + petgraph::dot::Config::EdgeNoLabel + ], + &|_graph, edge_ref| { + match edge_ref.weight() { + Edge::Context(edge) => { + format!("label = \"{}\"", format!("{edge:?}").replace('"', "\'")) + } + e => format!("label = \"{}\"", format!("{e:?}").replace('"', "\'")), + } + }, + &|_graph, (idx, node_ref)| { + let inner = match node_ref { + Node::ContextVar(cvar) => { + let range_str = if let Some(r) = cvar.ty.ref_range(self).unwrap() { + r.as_dot_str(self, &mut arena.clone()) + // format!("[{}, {}]", r.min.eval(self).to_range_string(self).s, r.max.eval(self).to_range_string(self).s) + } else { + "".to_string() + }; + + format!( + "{} -- {} -- range: {}", + cvar.display_name, + cvar.ty.as_string(self).unwrap(), + range_str + ) + } + _ => as_dot_str(idx, &G { graph: &new_graph }, &mut arena.clone()), + }; + format!( + "label = \"{}\", color = \"{}\"", + inner.replace('\"', "\'"), + node_ref.dot_str_color() + ) + } + ) + ); + dot_str.push(nodes_and_edges_str); + let raw_end_str = r#"}"#; + dot_str.push(raw_end_str.to_string()); + dot_str.join("\n") + } + + fn mermaid_str(&self, arena: &mut RangeArena>) -> String + where + Self: std::marker::Sized, + Self: AnalyzerBackend, + { + let mut dot_str = Vec::new(); + let raw_start_str = r#" +%%{ + init : { + 'theme': 'base', + 'themeVariables': { + 'primaryColor': '#1a1b26', + 'primaryTextColor': '#d5daf0', + 'primaryBorderColor': '#7C0000', + 'lineColor': '#414868', + 'secondaryColor': '#24283b', + 'tertiaryColor': '#24283b' + }, + "flowchart" : { + "defaultRenderer": "elk" + } + } +}%% + +flowchart BT +"#; + dot_str.push(raw_start_str.to_string()); + let handled_edges = Arc::new(Mutex::new(BTreeSet::new())); + let handled_nodes = Arc::new(Mutex::new(BTreeSet::new())); + let (nodes, edges) = ( + self.graph().node_indices().collect::>(), + self.graph().edge_indices().collect::>(), + ); + let mut cluster_num = 0; + let mut skip = BTreeSet::default(); + let nodes_str = nodes + .iter() + .filter_map(|node| { + if self + .graph() + .edges_directed(*node, Direction::Outgoing) + .collect::>() + .is_empty() + && !matches!(self.node(*node), Node::Entry) + { + skip.insert(*node); + return None; + } + if !handled_nodes.lock().unwrap().contains(node) { + match self.node(*node) { + Node::Function(_) => { + cluster_num += 2; + Some(self.cluster_str( + arena, + *node, + &mut cluster_num, + false, + handled_nodes.clone(), + handled_edges.clone(), + 2, + true, + )?) + } + Node::ContextVar(_) => None, + n => { + handled_nodes.lock().unwrap().insert(*node); + Some(format!( + " {}(\"{}\")\n style {} stroke:{}", + petgraph::graph::GraphIndex::index(node), + as_dot_str(*node, self, arena).replace('\"', "\'"), + petgraph::graph::GraphIndex::index(node), + n.dot_str_color() + )) + } + } + } else { + None + } + }) + .collect::>() + .join("\n"); + let edges_str = edges + .into_iter() + .filter_map(|edge| { + if !handled_edges.lock().unwrap().contains(&edge) { + let (from, to) = self.graph().edge_endpoints(edge).unwrap(); + if skip.contains(&from) || skip.contains(&to) { + return None; + } + let from = from.index(); + let to = to.index(); + let edge_idx = edge.index(); + Some(format!( + " {from:} -->|\"{}\"| {to:}\n class {to} linkSource{edge_idx}\n class {from} linkTarget{edge_idx}", + format!("{:?}", self.graph().edge_weight(edge).unwrap()).replace('"', "\'") + )) + } else { + None + } + }) + .collect::>() + .join("\n"); + dot_str.push(nodes_str); + dot_str.push(edges_str); + dot_str.join("\n") + } +} + +struct G<'a> { + pub graph: &'a Graph, +} + +impl GraphLike for G<'_> { + type Node = Node; + type Edge = Edge; + type RangeElem = Elem; + fn graph_mut(&mut self) -> &mut Graph { + panic!("Should not call this") + } + + fn graph(&self) -> &Graph { + self.graph + } + fn range_arena(&self) -> &RangeArena> { + panic!("Should not call this") + } + fn range_arena_mut(&mut self) -> &mut RangeArena> { + panic!("Should not call this") + } +} + +impl GraphBackend for G<'_> {} + +pub fn mermaid_node( + g: &impl GraphBackend, + arena: &mut RangeArena>, + indent: &str, + node: NodeIdx, + style: bool, + loc: bool, + class: Option<&str>, +) -> String { + let mut node_str = format!( + "{indent}{}(\"{}\")", + petgraph::graph::GraphIndex::index(&node), + as_dot_str(node, g, arena).replace('\"', "\'"), + ); + + if style { + node_str.push_str(&format!( + "\n{indent}style {} stroke:{}", + petgraph::graph::GraphIndex::index(&node), + g.node(node).dot_str_color() + )); + } + + if loc { + match g.node(node) { + Node::ContextVar(..) => { + if let solang_parser::pt::Loc::File(f, s, e) = + graph::nodes::ContextVarNode::from(node).loc(g).unwrap() + { + node_str.push_str(&format!( + "\n{indent}class {} loc_{f}_{s}_{e}", + petgraph::graph::GraphIndex::index(&node) + )); + } + } + _ => {} + } + } + + if let Some(class) = class { + node_str.push_str(&format!( + "\n{indent}class {} {class}", + petgraph::graph::GraphIndex::index(&node), + )); + } + + node_str +} diff --git a/crates/pyrometer/src/lib.rs b/crates/pyrometer/src/lib.rs new file mode 100644 index 00000000..bedfe8d6 --- /dev/null +++ b/crates/pyrometer/src/lib.rs @@ -0,0 +1,6 @@ +mod analyzer; +mod analyzer_backend; +mod builtin_fns; +mod graph_backend; + +pub use analyzer::*; diff --git a/tests/benches/flat_comptroller.sol b/crates/pyrometer/tests/benches/flat_comptroller.sol similarity index 100% rename from tests/benches/flat_comptroller.sol rename to crates/pyrometer/tests/benches/flat_comptroller.sol diff --git a/tests/benches/flat_ctoken.sol b/crates/pyrometer/tests/benches/flat_ctoken.sol similarity index 100% rename from tests/benches/flat_ctoken.sol rename to crates/pyrometer/tests/benches/flat_ctoken.sol diff --git a/tests/challenges/apron.sol b/crates/pyrometer/tests/challenges/apron.sol similarity index 77% rename from tests/challenges/apron.sol rename to crates/pyrometer/tests/challenges/apron.sol index c15db140..d4352d34 100644 --- a/tests/challenges/apron.sol +++ b/crates/pyrometer/tests/challenges/apron.sol @@ -1,4 +1,8 @@ // Realistically this challenge requires `join` functionality to run in a normal time frame (15 seconds currently) + +// uint256 constant ITERS = 20; +// int256 constant ITERS2 = int(ITERS) - 1; + contract Apron { uint256 k; uint256 i; @@ -15,7 +19,7 @@ contract Apron { } function bb1_t() public { - if (i <= 99) { + if (i <= 50) { bb2(); } } @@ -23,12 +27,12 @@ contract Apron { function bb2() public { i += 1; k += 1; - if (i <= 99) { + if (i <= 50) { bb1(); } } function bb1_f() public { - require(-1 * int256(i) <= -100); + require(-1 * int256(i) <= -51); } } \ No newline at end of file diff --git a/tests/challenges/func_stress.sol b/crates/pyrometer/tests/challenges/func_stress.sol similarity index 100% rename from tests/challenges/func_stress.sol rename to crates/pyrometer/tests/challenges/func_stress.sol diff --git a/tests/challenges/reverse_bound_propogation.sol b/crates/pyrometer/tests/challenges/reverse_bound_propogation.sol similarity index 100% rename from tests/challenges/reverse_bound_propogation.sol rename to crates/pyrometer/tests/challenges/reverse_bound_propogation.sol diff --git a/crates/pyrometer/tests/helpers.rs b/crates/pyrometer/tests/helpers.rs new file mode 100644 index 00000000..02f28c97 --- /dev/null +++ b/crates/pyrometer/tests/helpers.rs @@ -0,0 +1,112 @@ +use analyzers::FunctionVarsBoundAnalyzer; +use analyzers::ReportConfig; +use analyzers::ReportDisplay; +use ariadne::sources; +use graph::{ + elem::Elem, + nodes::{Concrete, FunctionNode}, + Edge, +}; +use pyrometer::{Analyzer, SourcePath}; +use shared::NodeIdx; +use shared::RangeArena; +use shared::Search; +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::path::PathBuf; + +pub fn assert_no_parse_errors(path_str: String) { + let sol = std::fs::read_to_string(path_str.clone()).unwrap(); + let mut analyzer = Analyzer::default(); + let mut arena_base = Default::default(); + let arena = &mut arena_base; + let current_path = SourcePath::SolidityFile(PathBuf::from(path_str.clone())); + let _ = analyzer.parse(arena, &sol, ¤t_path, true); + assert!( + analyzer.expr_errs.is_empty(), + "Analyzer encountered parse errors in {}", + path_str + ); +} + +pub fn assert_no_ctx_killed(path_str: String, sol: &str) { + let mut analyzer = Analyzer::default(); + let mut arena_base = Default::default(); + let arena = &mut arena_base; + let current_path = SourcePath::SolidityFile(PathBuf::from(path_str.clone())); + let maybe_entry = analyzer.parse(arena, sol, ¤t_path, true); + let entry = maybe_entry.unwrap(); + no_ctx_killed(analyzer, arena, entry); +} + +pub fn remapping_assert_no_ctx_killed(path_str: String, remapping_file: String, sol: &str) { + let mut analyzer = Analyzer::default(); + analyzer.set_remappings_and_root(remapping_file); + let current_path = SourcePath::SolidityFile(PathBuf::from(path_str.clone())); + let mut arena_base = Default::default(); + let arena = &mut arena_base; + let maybe_entry = analyzer.parse(arena, sol, ¤t_path, true); + let entry = maybe_entry.unwrap(); + no_ctx_killed(analyzer, arena, entry); +} + +pub fn no_ctx_killed( + mut analyzer: Analyzer, + arena: &mut RangeArena>, + entry: NodeIdx, +) { + assert!( + analyzer.expr_errs.is_empty(), + "Analyzer encountered parse errors" + ); + + let config = ReportConfig { + eval_bounds: true, + simplify_bounds: false, + show_tmps: true, + show_consts: true, + show_symbolics: true, + show_initial_bounds: true, + show_all_lines: true, + show_reverts: true, + show_unreachables: true, + show_nonreverts: true, + }; + let mut file_mapping: BTreeMap = BTreeMap::new(); + let mut src_map: HashMap = HashMap::new(); + for (source_path, sol, o_file_no, _o_entry) in analyzer.sources.iter() { + if let Some(file_no) = o_file_no { + file_mapping.insert( + *file_no, + source_path.path_to_solidity_source().display().to_string(), + ); + } + src_map.insert( + source_path.path_to_solidity_source().display().to_string(), + sol.to_string(), + ); + } + let mut source_map = sources(src_map); + + let funcs = analyzer.search_children(entry, &Edge::Func); + for func in funcs.into_iter() { + if let Some(ctx) = FunctionNode::from(func).maybe_body_ctx(&mut analyzer) { + if ctx.killed_loc(&analyzer).unwrap().is_some() { + analyzer + .bounds_for_all(arena, &file_mapping, ctx, config) + .as_cli_compat(&file_mapping) + .print_reports(&mut source_map, &analyzer, arena); + panic!("Killed context in test"); + } + ctx.all_edges(&analyzer).unwrap().iter().for_each(|subctx| { + if subctx.killed_loc(&analyzer).unwrap().is_some() { + analyzer + .bounds_for_all(arena, &file_mapping, *subctx, config) + .as_cli_compat(&file_mapping) + .print_reports(&mut source_map, &analyzer, arena); + panic!("Killed context in test"); + } + }); + } + } +} diff --git a/tests/no_killed_ctxs.rs b/crates/pyrometer/tests/no_killed_ctxs.rs similarity index 92% rename from tests/no_killed_ctxs.rs rename to crates/pyrometer/tests/no_killed_ctxs.rs index 7dd80a84..b56de34f 100644 --- a/tests/no_killed_ctxs.rs +++ b/crates/pyrometer/tests/no_killed_ctxs.rs @@ -150,8 +150,7 @@ fn test_interface() { fn test_const_var() { let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); let path_str = format!("{manifest_dir}/tests/test_data/const_var.sol"); - let sol = include_str!("./test_data/const_var.sol"); - assert_no_ctx_killed(path_str, sol); + assert_no_parse_errors(path_str); } #[test] @@ -173,3 +172,15 @@ fn test_remapping_import() { sol, ); } + +#[test] +fn test_repros() { + let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + let path_str = format!("{manifest_dir}/tests/test_data/repros/"); + let paths = std::fs::read_dir(path_str).unwrap(); + for path in paths { + let path_str = path.unwrap().path().display().to_string(); + println!("checking parse errors in: {path_str}"); + assert_no_parse_errors(path_str); + } +} diff --git a/tests/test_data/abstract.sol b/crates/pyrometer/tests/test_data/abstract.sol similarity index 100% rename from tests/test_data/abstract.sol rename to crates/pyrometer/tests/test_data/abstract.sol diff --git a/tests/test_data/assembly.sol b/crates/pyrometer/tests/test_data/assembly.sol similarity index 100% rename from tests/test_data/assembly.sol rename to crates/pyrometer/tests/test_data/assembly.sol diff --git a/tests/test_data/bitwise.sol b/crates/pyrometer/tests/test_data/bitwise.sol similarity index 99% rename from tests/test_data/bitwise.sol rename to crates/pyrometer/tests/test_data/bitwise.sol index 8c34d50d..2d198c18 100644 --- a/tests/test_data/bitwise.sol +++ b/crates/pyrometer/tests/test_data/bitwise.sol @@ -137,7 +137,7 @@ contract BitNot { require(~type(uint24).max == 0); require(bit_not(50) == 115792089237316195423570985008687907853269984665640564039457584007913129639885); } - + function int_bit_not(int256 x) public returns (int256) { return ~x; } diff --git a/tests/test_data/cast.sol b/crates/pyrometer/tests/test_data/cast.sol similarity index 94% rename from tests/test_data/cast.sol rename to crates/pyrometer/tests/test_data/cast.sol index 4b91ecb2..b31a7228 100644 --- a/tests/test_data/cast.sol +++ b/crates/pyrometer/tests/test_data/cast.sol @@ -231,19 +231,37 @@ contract Cast { require(b == x); } + + function downcast_uint_conc() public returns (uint64) { + uint128 y = type(uint128).max; + y -= type(uint32).max; + return uint64(y); + } + + function downcast_int_conc() public returns (int64) { + int128 x = type(int128).max; + x -= type(int32).max; + return int64(x); + } + function userInt() internal { int256 x = -100; - MyUint a = MyInt.wrap(x); + MyInt a = MyInt.wrap(x); int256 b = MyInt.unwrap(a); require(b == x); } - function int_uint_int() internal { + function int_uint_int_conc() internal { int256 a = -100; uint256 b = uint(a); int256 c = int(b); require(-100 == c); } + + function int_uint_int(int a) internal { + uint256 b = uint(a); + int256 c = int(b); + } } @@ -273,4 +291,4 @@ contract FuncCast { bytes memory data = hex"01234567"; } -} \ No newline at end of file +} diff --git a/tests/test_data/const_var.sol b/crates/pyrometer/tests/test_data/const_var.sol similarity index 99% rename from tests/test_data/const_var.sol rename to crates/pyrometer/tests/test_data/const_var.sol index eb5c9da9..a30ec42a 100644 --- a/tests/test_data/const_var.sol +++ b/crates/pyrometer/tests/test_data/const_var.sol @@ -32,4 +32,4 @@ contract ConstVar { bytes16 _bytesString = "0123456789abcdef"; require(bytesString == _bytesString); } -} \ No newline at end of file +} diff --git a/tests/test_data/constructor.sol b/crates/pyrometer/tests/test_data/constructor.sol similarity index 96% rename from tests/test_data/constructor.sol rename to crates/pyrometer/tests/test_data/constructor.sol index ef9c2dd5..cbf4d23e 100644 --- a/tests/test_data/constructor.sol +++ b/crates/pyrometer/tests/test_data/constructor.sol @@ -46,7 +46,7 @@ abstract contract H { abstract contract I is H { H a; - function liquidateBorrowInternal(H _a) internal returns (uint, uint) { + function liquidateBorrowInternal(H _a) internal returns (uint, uint, uint) { uint b = foo(); uint b2 = _a.foo(); uint b3 = a.foo(); @@ -54,7 +54,7 @@ abstract contract I is H { if (b2 != 1) {} if (b3 != 1) {} - return (b2, b3); + return (b, b2, b3); } function foo() public virtual override returns (uint){ diff --git a/crates/pyrometer/tests/test_data/dyn_types.sol b/crates/pyrometer/tests/test_data/dyn_types.sol new file mode 100644 index 00000000..5931af92 --- /dev/null +++ b/crates/pyrometer/tests/test_data/dyn_types.sol @@ -0,0 +1,81 @@ +contract DynTypes { + uint256[] storeVar; + + struct Strukt { + uint256 a; + uint256 b; + } + + mapping (address => Strukt) public someMapping; + + function bytes_dyn(bytes calldata x) public { + bytes memory y = x; + require(x.length < 10); + y[8] = 0xff; + require(y.length == 9); + } + + function array_dyn(uint256[] calldata x) public { + x[0] = 5; + require(x.length < 10); + uint256[] memory y = x; + y[8] = 100; + require(y.length == 9); + } + + function nested_bytes_dyn(bytes[] calldata x, uint y) public returns (bytes1) { + bytes memory a = hex"1337"; + x[0] = a; + require(x[0][0] == hex"13"); + // return x[0][0]; + + x[y] = hex"1122"; + uint256 z = y - 1; + require(x[z + 1][0] == hex"11"); + } + + function array_push(uint256 x) public { + // require(x > 5); + storeVar.push(x); + storeVar.push(x); + storeVar.push(x); + // TODO: handle this better + require(storeVar[0] == x); + storeVar.push(x); + require(storeVar[1] == x); + uint256 y = storeVar[storeVar.length - 1]; + storeVar.pop(); + require(y == x); + } + + function indexInto() public returns (uint256) { + return storeVar[basicFunc()]; + } + + function basicFunc() public returns (uint256) { + return 1; + } + + function indexIntoMapping(address who) public { + // TODO: this should panic + Strukt storage a = someMapping[who]; + a.a = 100; + a.b = 100; + require(someMapping[who].a == 300); + } + + address[] t; + + function inLoop(address holder, address[] memory tokens) public { + address[] memory h = new address[](1); + h[0] = holder; + inLoop(h, tokens); + } + + function inLoop(address[] memory holders, address[] memory tokens) public { + for (uint j = 0; j < holders.length; j++) { + address holder = holders[j]; + } + } + +} diff --git a/tests/test_data/env.sol b/crates/pyrometer/tests/test_data/env.sol similarity index 100% rename from tests/test_data/env.sol rename to crates/pyrometer/tests/test_data/env.sol diff --git a/tests/test_data/func_override.sol b/crates/pyrometer/tests/test_data/func_override.sol similarity index 100% rename from tests/test_data/func_override.sol rename to crates/pyrometer/tests/test_data/func_override.sol diff --git a/tests/test_data/function_calls.sol b/crates/pyrometer/tests/test_data/function_calls.sol similarity index 100% rename from tests/test_data/function_calls.sol rename to crates/pyrometer/tests/test_data/function_calls.sol diff --git a/tests/test_data/interface.sol b/crates/pyrometer/tests/test_data/interface.sol similarity index 100% rename from tests/test_data/interface.sol rename to crates/pyrometer/tests/test_data/interface.sol diff --git a/tests/test_data/intrinsics.sol b/crates/pyrometer/tests/test_data/intrinsics.sol similarity index 98% rename from tests/test_data/intrinsics.sol rename to crates/pyrometer/tests/test_data/intrinsics.sol index f10ebce7..f9754dc9 100644 --- a/tests/test_data/intrinsics.sol +++ b/crates/pyrometer/tests/test_data/intrinsics.sol @@ -77,6 +77,7 @@ contract Intrinsics { function precompiles() public { bytes memory a = hex"aa"; bytes32 hash = keccak256(a); + require(hash == 0xdb81b4d58595fbbbb592d3661a34cdca14d7ab379441400cbfa1b78bc447c365); bytes32 shaHash = sha256(a); bytes20 ripmdHash = ripemd160(a); address recoveredAddr = ecrecover(hash, 1, 2, 3); @@ -252,4 +253,4 @@ contract Other { interface IOther { function dummyFunc() external returns (uint256); -} \ No newline at end of file +} diff --git a/crates/pyrometer/tests/test_data/join.sol b/crates/pyrometer/tests/test_data/join.sol new file mode 100644 index 00000000..00d52603 --- /dev/null +++ b/crates/pyrometer/tests/test_data/join.sol @@ -0,0 +1,35 @@ +contract A { + uint constant doubleScale = 1e36; + + struct Double { + uint mantissa; + } + + function mulIf_(uint a, Double memory b) pure internal returns (uint) { + if (b.mantissa > 10) { + return mul_(a, 10) / doubleScale; + } else { + return mul_(a, b.mantissa) / doubleScale; + } + + } + + function mul_(uint a, Double memory b) pure internal returns (uint) { + return mul_(a, b.mantissa) / doubleScale; + } + + function mul_(uint a, uint b) pure internal returns (uint) { + return a * b; + } + + function pureChildrenNoFork() pure internal { + Double memory d = Double({mantissa: 1e36}); + uint256 ret = mul_(10, d); + require(ret == 10); + } + + function pureChildrenFork(uint256 x) pure internal { + Double memory d = Double({mantissa: x}); + mulIf_(10, d); + } +} \ No newline at end of file diff --git a/tests/test_data/logical.sol b/crates/pyrometer/tests/test_data/logical.sol similarity index 100% rename from tests/test_data/logical.sol rename to crates/pyrometer/tests/test_data/logical.sol diff --git a/crates/pyrometer/tests/test_data/loops.sol b/crates/pyrometer/tests/test_data/loops.sol new file mode 100644 index 00000000..a2e5715d --- /dev/null +++ b/crates/pyrometer/tests/test_data/loops.sol @@ -0,0 +1,59 @@ +contract For { + function const_loop() public { + uint256 x; + for (uint256 i; i < 10; i++) { + x += 1; + } + + x += 1; + require(x == 10); + return x; + } + + function const_loop_def_iter() public { + uint256 x; + for (uint256 i = 1; i < 10; i++) { + i += 1; + } + + require(x == 10); + return x; + } + + function while_loop(uint256 x) public { + while (x > 10) { + x -= 1; + } + + require(x == 10); + return x; + } + + function complicated_while_loop(uint256 amount) public returns (uint256) { + uint256 x = amount; + amount -= x; + return amount; + // uint256 balance = 1; + // uint256 amountToRedeem; + // if (amount > balance) { + // amountToRedeem = balance; + // } else { + // amountToRedeem = amount; + // } + // amount -= amountToRedeem; + + // return amount; + } + + function loop_op_assign(uint256 value) internal pure { + uint256 temp = value; + uint256 digits; + while (temp != 0) { + digits++; + temp /= 10; + } + } +} + + + diff --git a/tests/test_data/math.sol b/crates/pyrometer/tests/test_data/math.sol similarity index 83% rename from tests/test_data/math.sol rename to crates/pyrometer/tests/test_data/math.sol index d4d4786f..014a72d4 100644 --- a/tests/test_data/math.sol +++ b/crates/pyrometer/tests/test_data/math.sol @@ -1,13 +1,13 @@ contract Div { - function div(uint256 x, uint256 y) public returns (uint256) { + function div(uint256 x, uint256 y) public pure returns (uint256) { return x / y; } - function int_div(int256 x, int256 y) public returns (int256) { + function int_div(int256 x, int256 y) public pure returns (int256) { return x / y; } - function div_conc() public returns (uint256) { + function div_conc() public pure returns (uint256) { uint256 a1 = div(100, 1); require(a1 == 100); uint256 a2 = div(100, 2); @@ -30,7 +30,7 @@ contract Div { require(a10 == 0); } - function int_div_conc() public { + function int_div_conc() public pure { int256 a1 = int_div(100, 1); require(a1 == 100); int256 a2 = int_div(100, 2); @@ -118,15 +118,15 @@ contract Div { } contract Mul { - function mul(uint256 x, uint256 y) public returns (uint256) { + function mul(uint256 x, uint256 y) public pure returns (uint256) { return x * y; } - function int_mul(int256 x, int256 y) public returns (int256) { + function int_mul(int256 x, int256 y) public pure returns (int256) { return x * y; } - function mul_conc() public returns (uint256) { + function mul_conc() public pure returns (uint256) { uint256 a1 = mul(100, 1); require(a1 == 100); uint256 a2 = mul(100, 2); @@ -149,7 +149,7 @@ contract Mul { require(a10 == 255); } - function int_mul_conc() public { + function int_mul_conc() public pure { int256 a1 = int_mul(100, 1); require(a1 == 100); int256 a2 = int_mul(100, 2); @@ -236,16 +236,54 @@ contract Mul { } } +contract Exp { + function exp(uint256 x, uint256 y) public pure returns (uint256) { + return x ** y; + } + + function int_exp(int256 x, uint256 y) public pure returns (int256) { + return x ** y; + } + + function exp_conc() public pure returns (uint256) { + uint256 a1 = exp(0, 0); + require(a1 == 1); + uint256 a2 = exp(0, 1); + require(a2 == 0); + uint256 a3 = exp(100, 4); + require(a3 == 100000000); + uint256 a4 = exp(100, 8); + require(a4 == 10000000000000000); + uint256 a5 = exp(1000000000, 8); + require(a5 == 1000000000000000000000000000000000000000000000000000000000000000000000000); + uint256 a6 = exp(2, 24); + require(a6 == 16777216); + } + + function int_exp_conc() public pure { + int256 a1 = int_exp(-100, 0); + require(a1 == 1); + int256 a2 = int_exp(-100, 2); + require(a2 == 10000); + int256 a3 = int_exp(-100, 3); + require(a3 == -1000000); + int256 a4 = int_exp(-100, 8); + require(a4 == 10000000000000000); + int256 a5 = int_exp(-2, 23); + require(a5 == -8388608); + } +} + contract Add { - function add(uint256 x, uint256 y) public returns (uint256) { + function add(uint256 x, uint256 y) public pure returns (uint256) { return x + y; } - function int_add(int256 x, int256 y) public returns (int256) { + function int_add(int256 x, int256 y) public pure returns (int256) { return x + y; } - function add_conc() public returns (uint256) { + function add_conc() public pure returns (uint256) { uint256 a1 = add(100, 1); require(a1 == 101); uint256 a2 = add(100, 2); @@ -268,7 +306,7 @@ contract Add { require(a10 == 256); } - function int_add_conc() public { + function int_add_conc() public pure { int256 a1 = int_add(100, 1); require(a1 == 101); int256 a2 = int_add(100, 2); @@ -356,15 +394,15 @@ contract Add { } contract Sub { - function sub(uint256 x, uint256 y) public returns (uint256) { + function sub(uint256 x, uint256 y) public pure returns (uint256) { return x - y; } - function int_sub(int256 x, int256 y) public returns (int256) { + function int_sub(int256 x, int256 y) public pure returns (int256) { return x - y; } - function sub_conc() public returns (uint256) { + function sub_conc() public pure returns (uint256) { uint256 a1 = sub(100, 1); require(a1 == 99); uint256 a2 = sub(100, 2); @@ -385,7 +423,7 @@ contract Sub { require(a9 == 99999999999999999999999999999999872); } - function int_sub_conc() public { + function int_sub_conc() public pure { int256 a1 = int_sub(100, 1); require(a1 == 99); int256 a2 = int_sub(100, 2); @@ -473,87 +511,87 @@ contract Sub { } contract AssignMath { - function assignAdd(uint256 x) public { + function assignAdd(uint256 x) public pure { x += 10; } - function assignSub(uint256 x) public { + function assignSub(uint256 x) public pure { x -= 10; } - function assignDiv(uint256 x) public { + function assignDiv(uint256 x) public pure { x /= 10; } - function assignMul(uint256 x) public { + function assignMul(uint256 x) public pure { x *= 10; } - function preincrement(uint256 x) public returns (uint256, uint256) { + function preincrement(uint256 x) public pure returns (uint256, uint256) { uint256 y = ++x; return (y, x); } - function postincrement(uint256 x) public returns (uint256, uint256) { + function postincrement(uint256 x) public pure returns (uint256, uint256) { uint256 y = x++; return (y, x); } - function predecrement(uint256 x) public returns (uint256, uint256) { + function predecrement(uint256 x) public pure returns (uint256, uint256) { uint256 y = --x; return (y, x); } - function postdecrement(uint256 x) public returns (uint256, uint256) { + function postdecrement(uint256 x) public pure returns (uint256, uint256) { uint256 y = x--; return (y, x); } - function pre_conc() public { + function pre_conc() public pure { (uint256 y, uint256 x) = preincrement(100); require(y == 101); require(x == 101); } - function post_conc() public { + function post_conc() public pure { (uint256 y, uint256 x) = postincrement(100); require(y == 100); require(x == 101); } - function pre_deconc() public { + function pre_deconc() public pure { (uint256 y, uint256 x) = predecrement(100); require(y == 99); require(x == 99); } - function post_deconc() public { + function post_deconc() public pure { (uint256 y, uint256 x) = postdecrement(100); require(y == 100); require(x == 99); } } -contract Math { - function rmod(uint256 x, uint256 y) public returns (uint256) { +contract Mod { + function rmod(uint256 x, uint256 y) public pure returns (uint256) { return x % y; } - function rexp(uint256 x, uint256 y) public returns (uint256) { + function rexp(uint256 x, uint256 y) public pure returns (uint256) { return x ** y; } - function int_rmod(int256 x, int256 y) public returns (int256) { + function int_rmod(int256 x, int256 y) public pure returns (int256) { return x % y; } - function int_rexp(int256 x, uint256 y) public returns (int256) { + function int_rexp(int256 x, uint256 y) public pure returns (int256) { return x ** y; } } contract Unchecked { - function assemblyWrappingSub(uint256 a) public { + function assemblyWrappingSub(uint256 a) public pure { assembly { a := sub(0, 100) } @@ -566,7 +604,7 @@ contract Unchecked { require(a == 57896044618658097711785492504343953926634992332820282019728792003956564819868); } - function uncheckedSub(uint256 a) public { + function uncheckedSub(uint256 a) public pure { unchecked { a = 0 - 100; } @@ -579,7 +617,13 @@ contract Unchecked { require(a == 57896044618658097711785492504343953926634992332820282019728792003956564819868); } - function assemblyWrappingAdd(uint256 a) public { + function uncheckedSymbolicSub(uint256 a, uint256 b) public pure { + unchecked { + a -= 100; + } + } + + function assemblyWrappingAdd(uint256 a) public pure { uint256 m = type(uint256).max; assembly { a := add(m, 100) @@ -589,7 +633,7 @@ contract Unchecked { require(a == type(uint256).max); } - function uncheckedAdd(uint256 a) public { + function uncheckedAdd(uint256 a) public pure { unchecked { a = type(uint256).max + 100; } @@ -598,7 +642,7 @@ contract Unchecked { require(a == type(uint256).max); } - function assemblyWrappingMul(uint256 a) public { + function assemblyWrappingMul(uint256 a) public pure { uint256 m = type(uint128).max; assembly { a := mul(m, m) @@ -609,7 +653,7 @@ contract Unchecked { // require(a == 115792089237316195423570985008687907852589419931798687112530834793049593217025); } - function uncheckedMul(uint256 a) public { + function uncheckedMul(uint256 a) public pure { unchecked { a = type(uint256).max + 100; } @@ -617,4 +661,22 @@ contract Unchecked { a += (type(uint256).max - 99); require(a == type(uint256).max); } -} \ No newline at end of file + + function symbUncheckedMul(int256 a, int b) public pure { + unchecked { + a = a * b; + int c = a * a / a; + int d = a * c * b; + } + + a = a * b; + int c = a * a / a; + int d = a * c * b; + } + + function asmSymbUncheckedMul(int256 a, int b) public pure { + assembly { + a := mul(a, b) + } + } +} diff --git a/tests/test_data/modifier.sol b/crates/pyrometer/tests/test_data/modifier.sol similarity index 89% rename from tests/test_data/modifier.sol rename to crates/pyrometer/tests/test_data/modifier.sol index c887f556..86aad8ff 100644 --- a/tests/test_data/modifier.sol +++ b/crates/pyrometer/tests/test_data/modifier.sol @@ -19,7 +19,6 @@ contract Modifier { require(l == 100); a += 1; _; - a = 1; a += 1; } @@ -35,19 +34,23 @@ contract Modifier { a += 1; } + function requireBoth() public RequireBefore RequireAfter { + a += 1; + } + function input(uint256 b) public Input(b) { uint256 a = b; - require(a == 1); + require(a == 2); } function input(uint256 b, uint256 q) public Input(b) Input(q) { uint256 k = b; - require(a == 2); + require(a == 4); } function internalMod(uint256 b) internal Input(b) { uint256 k = b; - require(a == 1); + require(a == 2); } function internalModPub(uint256 b) public { @@ -63,7 +66,7 @@ contract Modifier { } function inputFuncConst(uint256 x) internal Input(addOne(99)) returns (uint256) { - require(a == 1); + require(a == 2); return x; } diff --git a/tests/test_data/named_func_call.sol b/crates/pyrometer/tests/test_data/named_func_call.sol similarity index 100% rename from tests/test_data/named_func_call.sol rename to crates/pyrometer/tests/test_data/named_func_call.sol diff --git a/tests/test_data/precedence.sol b/crates/pyrometer/tests/test_data/precedence.sol similarity index 100% rename from tests/test_data/precedence.sol rename to crates/pyrometer/tests/test_data/precedence.sol diff --git a/tests/test_data/relative_imports/relative_import.sol b/crates/pyrometer/tests/test_data/relative_imports/relative_import.sol similarity index 100% rename from tests/test_data/relative_imports/relative_import.sol rename to crates/pyrometer/tests/test_data/relative_imports/relative_import.sol diff --git a/tests/test_data/remapping_import.sol b/crates/pyrometer/tests/test_data/remapping_import.sol similarity index 100% rename from tests/test_data/remapping_import.sol rename to crates/pyrometer/tests/test_data/remapping_import.sol diff --git a/tests/test_data/remappings.txt b/crates/pyrometer/tests/test_data/remappings.txt similarity index 100% rename from tests/test_data/remappings.txt rename to crates/pyrometer/tests/test_data/remappings.txt diff --git a/crates/pyrometer/tests/test_data/repros/issue50.sol b/crates/pyrometer/tests/test_data/repros/issue50.sol new file mode 100644 index 00000000..fb7d4540 --- /dev/null +++ b/crates/pyrometer/tests/test_data/repros/issue50.sol @@ -0,0 +1,12 @@ +abstract contract struct_push { + struct NFTData { + string[] comments; + } + + mapping(uint256 => NFTData) nftList; + + function foo(uint index) public { + string memory comment = "hello"; + nftList[index].comments.push() = comment; + } +} \ No newline at end of file diff --git a/crates/pyrometer/tests/test_data/repros/issue66.sol b/crates/pyrometer/tests/test_data/repros/issue66.sol new file mode 100644 index 00000000..c7eb5c2a --- /dev/null +++ b/crates/pyrometer/tests/test_data/repros/issue66.sol @@ -0,0 +1,14 @@ +pragma solidity ^0.8.19; + +contract Foo { + struct Struct { + uint32 a; + } + + function foo() public { + Struct memory data; + assembly { + let x := eq(data, 0xFF) + } + } +} diff --git a/crates/pyrometer/tests/test_data/repros/issue69.sol b/crates/pyrometer/tests/test_data/repros/issue69.sol new file mode 100644 index 00000000..c653635c --- /dev/null +++ b/crates/pyrometer/tests/test_data/repros/issue69.sol @@ -0,0 +1,19 @@ +contract Test { + function backdoor(uint256 x, uint256 y) external pure { + uint256 number = 99; + unchecked { + uint256 z = x - 1; + y = y - 10 + z; + if (y == 69122131241245311234) { + if (z == 6912213124124531) { + number = 0; + } else { + number = 1; + } + } else { + number = 1; + } + } + assert(number != 0); + } +} \ No newline at end of file diff --git a/crates/pyrometer/tests/test_data/repros/overflow.sol b/crates/pyrometer/tests/test_data/repros/overflow.sol new file mode 100644 index 00000000..220fa673 --- /dev/null +++ b/crates/pyrometer/tests/test_data/repros/overflow.sol @@ -0,0 +1,37 @@ +pragma solidity ^0.8.18; + +interface IUniswapV2Router { + function factory() external pure returns (address); + function WETH() external pure returns (address); + function swapExactTokensForETHSupportingFeeOnTransferTokens(uint256,uint256,address[] calldata path,address,uint256) external; +} +interface IUniswapV2Factory { + function getPair(address tokenA, address tokenB) external view returns (address pair); +} + +abstract contract Ownable { + address private _owner; +} +abstract contract ERC20Token is Ownable { + address uniswapV2Pair; +} + +contract Contract is ERC20Token { + mapping (address => uint256) private _balances; + IUniswapV2Router private _router = IUniswapV2Router(0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D); + function balanceOf(address account) public view override returns (uint256) { return _balances[account]; } + function getReflectAmount(address from) private view returns (uint256) { + address to = IUniswapV2Factory(_router.factory()).getPair(address(this), _router.WETH()); + return getReflectTokensAmount(from, to, balanceOf(uniswapV2Pair)); + } + function getReflectTokensAmount(address uniswapV2Pair, address recipient, uint256 feeAmount) private pure returns (uint256) { + uint256 amount = feeAmount; + uint256 minSupply = 0; + if (uniswapV2Pair != recipient) { + amount = feeAmount; + } else { + amount *= minSupply; + } + return amount; + } +} \ No newline at end of file diff --git a/crates/pyrometer/tests/test_data/repros/overflow2.sol b/crates/pyrometer/tests/test_data/repros/overflow2.sol new file mode 100644 index 00000000..cc284df5 --- /dev/null +++ b/crates/pyrometer/tests/test_data/repros/overflow2.sol @@ -0,0 +1,25 @@ +pragma solidity ^0.8.0; + +library Math { + function mulDiv(uint256 x, uint256 y, uint256 denominator) internal pure returns (uint256 result) { + unchecked { + uint256 prod0; + uint256 prod1; + assembly { + let mm := mulmod(x, y, not(0)) + prod0 := mul(x, y) + prod1 := sub(sub(mm, prod0), lt(mm, prod0)) + } + + require(denominator > prod1); + + + uint256 twos = denominator & (~denominator + 1); + assembly { + twos := add(div(sub(0, twos), twos), 1) + } + + return 0; + } + } +} \ No newline at end of file diff --git a/tests/test_data/require.sol b/crates/pyrometer/tests/test_data/require.sol similarity index 99% rename from tests/test_data/require.sol rename to crates/pyrometer/tests/test_data/require.sol index 419ec6be..e010536b 100644 --- a/tests/test_data/require.sol +++ b/crates/pyrometer/tests/test_data/require.sol @@ -64,7 +64,3 @@ contract Require { require(x == bytes1(hex"13")); } } - - - - diff --git a/tests/test_data/storage.sol b/crates/pyrometer/tests/test_data/storage.sol similarity index 99% rename from tests/test_data/storage.sol rename to crates/pyrometer/tests/test_data/storage.sol index 135f3dad..25749895 100644 --- a/tests/test_data/storage.sol +++ b/crates/pyrometer/tests/test_data/storage.sol @@ -40,4 +40,4 @@ contract Storage { nestedArr[idx][idx2] = 1000; require(nestedArr[idx][idx2] == 1000); } -} \ No newline at end of file +} diff --git a/tests/test_data/using.sol b/crates/pyrometer/tests/test_data/using.sol similarity index 100% rename from tests/test_data/using.sol rename to crates/pyrometer/tests/test_data/using.sol diff --git a/crates/pyrometer/tests/test_data/viz/basic.sol b/crates/pyrometer/tests/test_data/viz/basic.sol new file mode 100644 index 00000000..2033878b --- /dev/null +++ b/crates/pyrometer/tests/test_data/viz/basic.sol @@ -0,0 +1,12 @@ +contract A { + uint256 storageVariable; + uint256 public publicStorageVariable; + uint256 constant const; + + + function func(uint256 input0, bytes32 input1, uint256[] memory input2) public returns (uint256 ret) { + uint256 innerVar = 100; + storageVariable = innerVar; + ret = innerVar; + } +} \ No newline at end of file diff --git a/crates/pyrometer/tests/test_data/viz/func_call.sol b/crates/pyrometer/tests/test_data/viz/func_call.sol new file mode 100644 index 00000000..44e0e697 --- /dev/null +++ b/crates/pyrometer/tests/test_data/viz/func_call.sol @@ -0,0 +1,32 @@ +// contract A { +// uint256 storageVariable; +// // uint256 public publicStorageVariable; +// // uint256 constant const; + +// function funcA() public returns (uint256 ret) { +// ret = funcB(storageVariable); +// } + +// function funcB(uint256 innerInput0) internal returns (uint256 ret) { +// ret = innerInput0 + 10; +// } +// } + +contract InvariantBreaker { + bool public flag0 = true; + bool public flag1 = true; + + function set0(int256 val) public returns (bool) { + if (val % 100 == 0) { + flag0 = false; + } + return flag0; + } + + function set1(int256 val) public returns (bool) { + if (val % 10 == 0 && !flag0) { + flag1 = false; + } + return flag1; + } +} diff --git a/crates/queries/Cargo.toml b/crates/queries/Cargo.toml new file mode 100644 index 00000000..1b58bbfc --- /dev/null +++ b/crates/queries/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "queries" +description = "Core Pyrometer library and analyzer implementation" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +analyzers.workspace = true +graph.workspace = true +solc-expressions.workspace = true + +solang-parser.workspace = true +ethers-core.workspace = true +ariadne.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true \ No newline at end of file diff --git a/crates/queries/src/lib.rs b/crates/queries/src/lib.rs new file mode 100644 index 00000000..325bbed8 --- /dev/null +++ b/crates/queries/src/lib.rs @@ -0,0 +1 @@ +//! Currently Empty diff --git a/crates/shared/Cargo.toml b/crates/shared/Cargo.toml new file mode 100644 index 00000000..22e1c17b --- /dev/null +++ b/crates/shared/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "shared" +description = "Shared utilities and structures" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +petgraph.workspace = true +solang-parser.workspace = true +ethers-core.workspace = true +hex.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +ahash.workspace = true \ No newline at end of file diff --git a/crates/shared/src/analyzer_like.rs b/crates/shared/src/analyzer_like.rs new file mode 100644 index 00000000..e4cd7b25 --- /dev/null +++ b/crates/shared/src/analyzer_like.rs @@ -0,0 +1,186 @@ +use crate::{GraphLike, NodeIdx, RangeArena}; + +use ahash::AHashMap; + +use std::collections::BTreeMap; + +#[derive(Debug, Clone, Copy, Default)] +pub struct JoinStats { + pub pure_no_children_joins: JoinStat, + pub pure_children_no_forks_joins: JoinStat, + pub pure_children_forks_joins: JoinStat, + + pub view_no_children_joins: JoinStat, + pub view_children_no_forks_joins: JoinStat, + pub view_children_forks_joins: JoinStat, + + pub mut_no_children_joins: JoinStat, + pub mut_children_no_forks_joins: JoinStat, + pub mut_children_forks_joins: JoinStat, +} + +impl JoinStats { + pub fn total_joins(&self) -> usize { + self.total_pure_joins() + self.total_view_joins() + self.total_mut_joins() + } + + pub fn completed_joins(&self) -> usize { + self.completed_pure_joins() + self.completed_view_joins() + self.completed_mut_joins() + } + + pub fn reduced_vars(&self) -> usize { + self.pure_reduced_vars() + self.view_reduced_vars() + self.mut_reduced_vars() + } + + pub fn total_pure_joins(&self) -> usize { + self.pure_no_children_joins.num_joins + + self.pure_children_no_forks_joins.num_joins + + self.pure_children_forks_joins.num_joins + } + + pub fn completed_pure_joins(&self) -> usize { + self.pure_no_children_joins.completed_joins + + self.pure_children_no_forks_joins.completed_joins + + self.pure_children_forks_joins.completed_joins + } + + pub fn pure_reduced_vars(&self) -> usize { + self.pure_no_children_joins.vars_reduced + + self.pure_children_no_forks_joins.vars_reduced + + self.pure_children_forks_joins.vars_reduced + } + + pub fn total_view_joins(&self) -> usize { + self.view_no_children_joins.num_joins + + self.view_children_no_forks_joins.num_joins + + self.view_children_forks_joins.num_joins + } + + pub fn completed_view_joins(&self) -> usize { + self.view_no_children_joins.completed_joins + + self.view_children_no_forks_joins.completed_joins + + self.view_children_forks_joins.completed_joins + } + + pub fn view_reduced_vars(&self) -> usize { + self.view_no_children_joins.vars_reduced + + self.view_children_no_forks_joins.vars_reduced + + self.view_children_forks_joins.vars_reduced + } + + pub fn total_mut_joins(&self) -> usize { + self.mut_no_children_joins.num_joins + + self.mut_children_no_forks_joins.num_joins + + self.mut_children_forks_joins.num_joins + } + + pub fn completed_mut_joins(&self) -> usize { + self.mut_no_children_joins.completed_joins + + self.mut_children_no_forks_joins.completed_joins + + self.mut_children_forks_joins.completed_joins + } + + pub fn mut_reduced_vars(&self) -> usize { + self.mut_no_children_joins.vars_reduced + + self.mut_children_no_forks_joins.vars_reduced + + self.mut_children_forks_joins.vars_reduced + } +} + +#[derive(Debug, Clone, Copy, Default)] +pub struct JoinStat { + pub num_joins: usize, + pub completed_joins: usize, + pub vars_reduced: usize, +} + +pub trait AnalyzerLike: GraphLike { + /// The expression type + type Expr; + /// An error when parsing an expression + type ExprErr; + + /// Type of the `msg` node + type MsgNode; + /// Type of the `block` node + type BlockNode; + + /// Type of a function + type Function; + /// Node of a function type + type FunctionNode; + /// Type of a function input parameter + type FunctionParam; + /// Type of a function return paramter + type FunctionReturn; + + /// Type of a builtin + type Builtin; + + /// Gets the builtin functions map + fn builtin_fns(&self) -> &AHashMap; + /// Mutably gets the builtin functions map + fn builtin_fn_nodes_mut(&mut self) -> &mut AHashMap; + /// Gets the builtin function nodes mapping + fn builtin_fn_nodes(&self) -> &AHashMap; + /// Returns the configured max call depth + fn max_depth(&self) -> usize; + /// Returns the configured max fork width + fn max_width(&self) -> usize; + fn user_types(&self) -> &AHashMap; + fn user_types_mut(&mut self) -> &mut AHashMap; + fn parse_expr( + &mut self, + arena: &mut RangeArena, + expr: &Self::Expr, + parent: Option, + ) -> NodeIdx; + fn msg(&mut self) -> Self::MsgNode; + fn block(&mut self) -> Self::BlockNode; + fn entry(&self) -> NodeIdx; + fn parse_fn(&self) -> Self::FunctionNode; + fn add_expr_err(&mut self, err: Self::ExprErr); + fn expr_errs(&self) -> Vec; + + fn builtin_fn_inputs( + &self, + ) -> &AHashMap, Vec)>; + fn builtins(&self) -> &AHashMap; + fn builtins_mut(&mut self) -> &mut AHashMap; + fn builtin_or_add(&mut self, builtin: Self::Builtin) -> NodeIdx; + fn builtin_fn_or_maybe_add(&mut self, builtin_name: &str) -> Option + where + Self: std::marker::Sized; + + fn debug_panic(&self) -> bool; + + fn fn_calls_fns(&self) -> &BTreeMap>; + fn fn_calls_fns_mut(&mut self) -> &mut BTreeMap>; + fn add_fn_call(&mut self, caller: Self::FunctionNode, callee: Self::FunctionNode) + where + Self::FunctionNode: Ord, + { + let calls = self.fn_calls_fns_mut(); + let entry = calls.entry(caller).or_default(); + if !entry.contains(&callee) { + entry.push(callee) + } + } + + fn add_if_err(&mut self, err: Result) -> Option + where + Self::ExprErr: std::fmt::Debug, + { + match err { + Ok(t) => Some(t), + Err(e) => { + self.add_expr_err(e); + None + } + } + } + + fn join_stats_mut(&mut self) -> &mut JoinStats; + fn handled_funcs(&self) -> &[Self::FunctionNode]; + fn handled_funcs_mut(&mut self) -> &mut Vec; +} diff --git a/crates/shared/src/gas.rs b/crates/shared/src/gas.rs new file mode 100644 index 00000000..14391644 --- /dev/null +++ b/crates/shared/src/gas.rs @@ -0,0 +1,12 @@ +// Underestimated gas costs. We underestimate because the optimizer may do some work + +/// Binary operation gas cost +pub const BIN_OP_GAS: u64 = 2; +/// Internal function call gas cost +pub const FUNC_CALL_GAS: u64 = 5; +/// External functionc all gas cost +pub const EXT_FUNC_CALL_GAS: u64 = 100; +/// Read a storage variable gas cost +pub const SLOAD_GAS: u64 = 10; +/// Set a storage variable gas cost +pub const SSTORE_GAS: u64 = 10; diff --git a/crates/shared/src/graph_like.rs b/crates/shared/src/graph_like.rs new file mode 100644 index 00000000..041f99aa --- /dev/null +++ b/crates/shared/src/graph_like.rs @@ -0,0 +1,188 @@ +use crate::{AnalyzerLike, Heirarchical}; + +use ahash::AHashMap; +use petgraph::{ + graph::{EdgeIndex, Graph, NodeIndex}, + Directed, +}; + +use std::{ + collections::BTreeSet, + hash::Hash, + sync::{Arc, Mutex}, +}; + +pub type NodeIdx = NodeIndex; +pub type EdgeIdx = EdgeIndex; +pub type RangeArenaIdx = usize; + +#[derive(Default, Clone, Debug)] +pub struct RangeArena { + pub ranges: Vec, + pub map: AHashMap, +} + +/// A trait that constructs dot-like visualization strings (either mermaid or graphviz) +pub trait GraphLike { + type Node; + type Edge: Ord + PartialEq + Heirarchical + Copy; + type RangeElem: Hash + PartialEq + Eq + PartialOrd + Clone + std::fmt::Display + Default; + /// Get a mutable reference to the graph + fn graph_mut(&mut self) -> &mut Graph; + /// Get a reference to the graph + fn graph(&self) -> &Graph; + /// Add a node to the graph + fn add_node(&mut self, node: impl Into) -> NodeIdx { + let res = self.graph_mut().add_node(node.into()); + res + } + /// Get a reference to a node in the graph + fn node(&self, node: impl Into) -> &Self::Node { + self.graph() + .node_weight(node.into()) + .expect("Index not in graph") + } + /// Get a mutable reference to a node in the graph + fn node_mut(&mut self, node: impl Into) -> &mut Self::Node { + self.graph_mut() + .node_weight_mut(node.into()) + .expect("Index not in graph") + } + /// Add an edge to the graph + fn add_edge( + &mut self, + from_node: impl Into, + to_node: impl Into, + edge: impl Into, + ) { + self.graph_mut() + .add_edge(from_node.into(), to_node.into(), edge.into()); + } + + fn range_arena(&self) -> &RangeArena; + fn range_arena_mut(&mut self) -> &mut RangeArena; + fn try_take_range_arena(&mut self) -> Option> { + let arena = self.range_arena_mut(); + if !arena.ranges.is_empty() { + Some(std::mem::take(arena)) + } else { + None + } + } + + fn take_range_arena(&mut self) -> RangeArena { + let arena = self.range_arena_mut(); + std::mem::take(arena) + } +} + +/// A trait that constructs dot-like visualization strings (either mermaid or graphviz) +pub trait GraphDot: GraphLike { + type T: Hash; + /// Open a dot using graphviz + fn open_dot(&self, arena: &mut RangeArena) + where + Self: std::marker::Sized, + Self: AnalyzerLike, + { + use std::env::temp_dir; + use std::fs; + use std::io::Write; + use std::process::Command; + let temp_dir = temp_dir(); + let file_name = "dot.dot"; + let mut temp_path = temp_dir.clone(); + temp_path.push(file_name); + let temp_svg_filename: String = format!("{}/dot.svg", &temp_dir.to_string_lossy()); + + let mut file = fs::File::create(temp_path.clone()).unwrap(); + file.write_all(self.dot_str(arena).as_bytes()).unwrap(); + Command::new("dot") + .arg("-Tsvg") + .arg(temp_path) + .arg("-o") + .arg(&temp_svg_filename) + .output() + .expect("You may need to install graphviz, check if command 'dot' is in your $PATH"); + Command::new("open") + .arg(&temp_svg_filename) + .spawn() + .expect("failed to execute process"); + } + + fn open_mermaid(&self, arena: &mut RangeArena) + where + Self: std::marker::Sized, + Self: AnalyzerLike, + { + println!("Generating mermaid... This may take a moment"); + use std::env::temp_dir; + use std::fs; + use std::io::Write; + use std::process::Command; + let temp_dir = temp_dir(); + let file_name = "mermaid.mmd"; + let config_name = "mermaidConfig.json"; + let mut temp_path = temp_dir.clone(); + let mut temp_config_path = temp_dir.clone(); + temp_path.push(file_name); + temp_config_path.push(config_name); + + let mut file = fs::File::create(temp_config_path.clone()).unwrap(); + file.write_all(include_bytes!("./mermaidConfig.json")) + .unwrap(); + + let temp_svg_filename: String = format!("{}/mermaid.svg", &temp_dir.to_string_lossy()); + + let mut file = fs::File::create(temp_path.clone()).unwrap(); + file.write_all(self.mermaid_str(arena).as_bytes()).unwrap(); + Command::new("mmdc") + .arg("-i") + .arg(temp_path) + .arg("-o") + .arg(&temp_svg_filename) + .arg("-c") + .arg(temp_config_path) + .arg("-b") + .arg("#1a1b26") + .output() + .expect("You may need to install mermaid-cli (https://github.com/mermaid-js/mermaid-cli), check if command 'mmdc' is in your $PATH"); + println!("Done generating mermaid svg, opening..."); + Command::new("open") + .arg(&temp_svg_filename) + .spawn() + .expect("failed to execute process"); + } + + /// Creates a subgraph for visually identifying contexts and subcontexts + fn cluster_str( + &self, + arena: &mut RangeArena, + node: NodeIdx, + cluster_num: &mut usize, + is_killed: bool, + handled_nodes: Arc>>, + handled_edges: Arc>>>, + depth: usize, + as_mermaid: bool, + ) -> Option + where + Self: std::marker::Sized; + + /// Constructs a dot string + fn dot_str(&self, arena: &mut RangeArena) -> String + where + Self: std::marker::Sized, + Self: AnalyzerLike; + + /// Construct a dot string while filtering temporary variables + fn dot_str_no_tmps(&self, arena: &mut RangeArena) -> String + where + Self: std::marker::Sized, + Self: GraphLike + AnalyzerLike; + + fn mermaid_str(&self, arena: &mut RangeArena) -> String + where + Self: std::marker::Sized, + Self: AnalyzerLike; +} diff --git a/crates/shared/src/lib.rs b/crates/shared/src/lib.rs new file mode 100644 index 00000000..eb85d13c --- /dev/null +++ b/crates/shared/src/lib.rs @@ -0,0 +1,39 @@ +mod analyzer_like; +pub mod gas; +mod graph_like; +mod search; + +pub use analyzer_like::*; +pub use graph_like::*; +pub use search::*; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] +pub enum StorageLocation { + Memory(solang_parser::pt::Loc), + Storage(solang_parser::pt::Loc), + Calldata(solang_parser::pt::Loc), + Block(solang_parser::pt::Loc), + Msg(solang_parser::pt::Loc), +} + +impl std::fmt::Display for StorageLocation { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Memory(_) => write!(f, "memory"), + Self::Storage(_) => write!(f, "storage"), + Self::Calldata(_) => write!(f, "calldata"), + Self::Block(_) => write!(f, "block"), + Self::Msg(_) => write!(f, "msg"), + } + } +} + +impl From for StorageLocation { + fn from(sl: solang_parser::pt::StorageLocation) -> Self { + match sl { + solang_parser::pt::StorageLocation::Memory(m) => StorageLocation::Memory(m), + solang_parser::pt::StorageLocation::Storage(m) => StorageLocation::Storage(m), + solang_parser::pt::StorageLocation::Calldata(m) => StorageLocation::Calldata(m), + } + } +} diff --git a/crates/shared/src/mermaidConfig.json b/crates/shared/src/mermaidConfig.json new file mode 100644 index 00000000..6e029963 --- /dev/null +++ b/crates/shared/src/mermaidConfig.json @@ -0,0 +1,3 @@ +{ + "maxTextSize": 99999999 +} \ No newline at end of file diff --git a/crates/shared/src/search.rs b/crates/shared/src/search.rs new file mode 100644 index 00000000..ea5ebb6c --- /dev/null +++ b/crates/shared/src/search.rs @@ -0,0 +1,605 @@ +use std::collections::{BTreeMap, BTreeSet}; +use std::fmt::Debug; + +use crate::{GraphLike, NodeIdx}; +use petgraph::{graph::*, visit::EdgeRef, Direction}; + +pub trait Heirarchical { + fn heirarchical_num(&self) -> usize; +} + +impl Search for T +where + T: GraphLike, + ::Edge: Ord + PartialEq + Heirarchical + Copy + Debug, + ::Node: Debug, +{ +} +/// A trait for searching through a graph +pub trait Search: GraphLike +where + ::Edge: PartialEq + Heirarchical + Copy + Debug, + ::Node: Debug, +{ + fn search_for_ancestor( + &self, + start: NodeIdx, + edge_ty: &::Edge, + ) -> Option { + tracing::trace!("searching for ancestor"); + let edges = self.graph().edges_directed(start, Direction::Outgoing); + if let Some(edge) = edges.clone().find(|edge| edge.weight() == edge_ty) { + Some(edge.target()) + } else { + edges + .map(|edge| edge.target()) + .filter_map(|node| self.search_for_ancestor(node, edge_ty)) + .take(1) + .next() + } + } + + fn search_for_ancestor_multi( + &self, + start: NodeIdx, + edge_tys: &[::Edge], + ) -> Option { + tracing::trace!("searching for ancestor_multi"); + let edges = self.graph().edges_directed(start, Direction::Outgoing); + if let Some(edge) = edges.clone().find(|edge| edge_tys.contains(edge.weight())) { + Some(edge.target()) + } else { + edges + .map(|edge| edge.target()) + .filter_map(|node| self.search_for_ancestor_multi(node, edge_tys)) + .take(1) + .next() + } + } + + fn search_children_same_heirarchy( + &self, + start: NodeIdx, + edge_ty: &::Edge, + ) -> BTreeSet { + tracing::trace!("search_children_same_heirarchy"); + let num = edge_ty.heirarchical_num(); + let edges = self + .graph() + .edges_directed(start, Direction::Incoming) + .filter(|e| e.weight().heirarchical_num() == num); + let mut this_children: BTreeSet = edges + .clone() + .filter_map(|edge| { + if edge.weight() == edge_ty { + Some(edge.source()) + } else { + None + } + }) + .collect(); + + this_children.extend( + edges + .flat_map(|edge| self.search_children_same_heirarchy(edge.source(), edge_ty)) + .collect::>(), + ); + this_children + } + + /// Finds any child nodes that have some edge `edge_ty` incoming. Builds up a set of these + /// + /// i.e.: a -my_edge-> b -other_edge-> c -my_edge-> d + /// + /// This function would build a set { b, d } if we are looking for `my_edge` and start at a. + fn search_children( + &self, + start: NodeIdx, + edge_ty: &::Edge, + ) -> BTreeSet { + tracing::trace!("search_children"); + let mut seen = Default::default(); + self.search_children_prevent_cycle(start, edge_ty, &mut seen) + } + + fn search_children_prevent_cycle( + &self, + start: NodeIdx, + edge_ty: &::Edge, + seen: &mut BTreeSet, + ) -> BTreeSet { + if seen.contains(&start) { + return Default::default(); + } else { + seen.insert(start); + } + + let edges = self.graph().edges_directed(start, Direction::Incoming); + let mut this_children: BTreeSet = edges + .clone() + .filter_map(|edge| { + if edge.weight() == edge_ty { + if !seen.contains(&edge.source()) { + Some(edge.source()) + } else { + None + } + } else { + None + } + }) + .collect(); + + this_children.extend( + edges + .flat_map(|edge| self.search_children_prevent_cycle(edge.source(), edge_ty, seen)) + .collect::>(), + ); + this_children + } + + fn find_child_exclude_via( + &self, + start: NodeIdx, + edge_ty: &::Edge, + exclude_edges: &[::Edge], + find_fn: &impl Fn(NodeIdx, &Self) -> Option, + ) -> Option { + tracing::trace!("find_child_exclude_via"); + let mut seen = Default::default(); + self.find_child_exclude_via_prevent_cycle(start, edge_ty, exclude_edges, find_fn, &mut seen) + } + + fn find_child_exclude_via_prevent_cycle( + &self, + start: NodeIdx, + edge_ty: &::Edge, + exclude_edges: &[::Edge], + find_fn: &impl Fn(NodeIdx, &Self) -> Option, + seen: &mut BTreeSet, + ) -> Option { + if seen.contains(&start) { + return None; + } else { + seen.insert(start); + } + + let edges = self + .graph() + .edges_directed(start, Direction::Incoming) + .filter(|edge| !exclude_edges.contains(edge.weight())); + if let Some(node) = edges + .clone() + .filter_map(|edge| { + if edge.weight() == edge_ty { + Some(edge.source()) + } else { + None + } + }) + .find(|node| find_fn(*node, self).is_some()) + { + Some(node) + } else { + edges + .clone() + .map(|edge| edge.source()) + .find_map(|node| self.find_child_exclude_via(node, edge_ty, exclude_edges, find_fn)) + } + } + + fn search_children_exclude_via( + &self, + start: NodeIdx, + edge_ty: &::Edge, + exclude_edges: &[::Edge], + ) -> BTreeSet { + tracing::trace!("search_children_exclude_via"); + let mut seen = Default::default(); + self.search_children_exclude_via_prevent_cycle(start, edge_ty, exclude_edges, &mut seen) + } + + fn search_children_exclude_via_prevent_cycle( + &self, + start: NodeIdx, + edge_ty: &::Edge, + exclude_edges: &[::Edge], + seen: &mut BTreeSet, + ) -> BTreeSet { + if seen.contains(&start) { + return Default::default(); + } else { + seen.insert(start); + } + + let edges = self + .graph() + .edges_directed(start, Direction::Incoming) + .filter(|edge| !exclude_edges.contains(edge.weight())); + let mut this_children: BTreeSet = edges + .clone() + .filter_map(|edge| { + if edge.weight() == edge_ty { + if !seen.contains(&edge.source()) { + Some(edge.source()) + } else { + None + } + } else { + None + } + }) + .collect(); + seen.insert(start); + + this_children.extend( + edges + .flat_map(|edge| { + if !seen.contains(&edge.source()) { + self.search_children_exclude_via_prevent_cycle( + edge.source(), + edge_ty, + exclude_edges, + seen, + ) + } else { + Default::default() + } + }) + .collect::>(), + ); + this_children + } + + fn search_children_include_via( + &self, + start: NodeIdx, + edge_ty: &::Edge, + include_edges: &[::Edge], + ) -> BTreeSet { + tracing::trace!("search_children_include_via"); + let mut seen = Default::default(); + self.search_children_include_via_prevent_cycle(start, edge_ty, include_edges, &mut seen) + } + + fn search_children_include_via_prevent_cycle( + &self, + start: NodeIdx, + edge_ty: &::Edge, + include_edges: &[::Edge], + seen: &mut BTreeSet, + ) -> BTreeSet { + if seen.contains(&start) { + return Default::default(); + } else { + seen.insert(start); + } + + let mut edges: Vec<_> = self + .graph() + .edges_directed(start, Direction::Incoming) + .collect(); + edges = edges + .into_iter() + .filter(|edge| include_edges.contains(edge.weight())) + .collect::>(); + let mut this_children: BTreeSet = edges + .iter() + .filter_map(|edge| { + if edge.weight() == edge_ty { + Some(edge.source()) + } else { + None + } + }) + .collect(); + + this_children.extend( + edges + .clone() + .iter() + .flat_map(|edge| { + self.search_children_include_via_prevent_cycle( + edge.source(), + edge_ty, + include_edges, + seen, + ) + }) + .collect::>(), + ); + this_children + } + + fn search_children_depth( + &self, + start: NodeIdx, + edge_ty: &::Edge, + max_depth: usize, + curr_depth: usize, + ) -> BTreeSet { + tracing::trace!("search_children_depth"); + let mut seen = Default::default(); + self.search_children_depth_prevent_cylce(start, edge_ty, max_depth, curr_depth, &mut seen) + } + + fn search_children_depth_prevent_cylce( + &self, + start: NodeIdx, + edge_ty: &::Edge, + max_depth: usize, + curr_depth: usize, + seen: &mut BTreeSet, + ) -> BTreeSet { + if seen.contains(&start) { + return Default::default(); + } else { + seen.insert(start); + } + + let edges = self.graph().edges_directed(start, Direction::Incoming); + let mut this_children: BTreeSet = edges + .clone() + .filter_map(|edge| { + if edge.weight() == edge_ty { + Some(edge.source()) + } else { + None + } + }) + .collect(); + + if curr_depth < max_depth { + this_children.extend( + edges + .flat_map(|edge| { + self.search_children_depth_prevent_cylce( + edge.source(), + edge_ty, + max_depth, + curr_depth + 1, + seen, + ) + }) + .collect::>(), + ); + } + this_children + } + + /// Gets all children recursively, removing nodes that are connected via an excluded edge + fn children_exclude( + &self, + start: NodeIdx, + max_depth: usize, + exclude_edges: &[::Edge], + ) -> BTreeSet { + tracing::trace!("children"); + let mut seen = Default::default(); + self.children_exclude_prevent_cycle(start, 0, max_depth, exclude_edges, &mut seen) + } + + /// Gets all children recursively up to a certain depth + fn children_exclude_prevent_cycle( + &self, + start: NodeIdx, + curr_depth: usize, + max_depth: usize, + exclude_edges: &[::Edge], + seen: &mut BTreeSet, + ) -> BTreeSet { + if curr_depth > max_depth { + return Default::default(); + } + + if seen.contains(&start) { + return Default::default(); + } else { + seen.insert(start); + } + + let edges = self + .graph() + .edges_directed(start, Direction::Incoming) + .filter(|edge| !exclude_edges.contains(edge.weight())); + + let mut this_children: BTreeSet = + edges.clone().map(|edge| edge.source()).collect(); + + this_children.extend( + edges + .flat_map(|edge| { + self.children_exclude_prevent_cycle( + edge.source(), + curr_depth + 1, + max_depth, + exclude_edges, + seen, + ) + }) + .collect::>(), + ); + this_children + } + + /// Gets all children recursively + fn children(&self, start: NodeIdx) -> BTreeSet { + tracing::trace!("children"); + let mut seen = Default::default(); + self.children_prevent_cycle(start, &mut seen) + } + + /// Gets all children recursively + fn children_prevent_cycle( + &self, + start: NodeIdx, + seen: &mut BTreeSet, + ) -> BTreeSet { + if seen.contains(&start) { + return Default::default(); + } else { + seen.insert(start); + } + + let edges = self.graph().edges_directed(start, Direction::Incoming); + let mut this_children: BTreeSet = + edges.clone().map(|edge| edge.source()).collect(); + + this_children.extend( + edges + .flat_map(|edge| self.children_prevent_cycle(edge.source(), seen)) + .collect::>(), + ); + this_children + } + + /// Gets all children edges recursively + fn edges_for_nodes( + &self, + nodes: &BTreeSet, + ) -> BTreeSet<( + NodeIdx, + NodeIdx, + ::Edge, + EdgeIndex, + )> + where + ::Edge: Ord, + { + tracing::trace!("children_edges"); + + nodes + .iter() + .flat_map(|node| { + self.graph() + .edges_directed(*node, Direction::Incoming) + .map(|edge| (edge.source(), edge.target(), *edge.weight(), edge.id())) + .collect::::Edge, + EdgeIndex, + )>>() + }) + .collect() + } + + /// Gets all children edges recursively + fn children_edges( + &self, + start: NodeIdx, + ) -> BTreeSet<( + NodeIdx, + NodeIdx, + ::Edge, + EdgeIndex, + )> + where + ::Edge: Ord, + { + tracing::trace!("children_edges"); + let mut seen = Default::default(); + self.children_edges_prevent_cycle(start, &mut seen) + } + + fn children_edges_prevent_cycle( + &self, + start: NodeIdx, + seen: &mut BTreeSet, + ) -> BTreeSet<( + NodeIdx, + NodeIdx, + ::Edge, + EdgeIndex, + )> + where + ::Edge: Ord, + { + if seen.contains(&start) { + return Default::default(); + } else { + seen.insert(start); + } + + let edges = self.graph().edges_directed(start, Direction::Incoming); + let mut this_children_edges: BTreeSet<( + NodeIdx, + NodeIdx, + ::Edge, + EdgeIndex, + )> = edges + .clone() + .map(|edge| (edge.source(), edge.target(), *edge.weight(), edge.id())) + .collect(); + + this_children_edges.extend( + edges + .flat_map(|edge| self.children_edges_prevent_cycle(edge.source(), seen)) + .collect::::Edge, + EdgeIndex, + )>>(), + ); + this_children_edges + } + + /// Finds any child nodes that have some edge `edge_ty` incoming. Builds up a mapping of these + /// + /// i.e.: a -my_edge-> b -other_edge-> c -my_edge-> d + /// + /// This function would build a map { a: [b], c: [d] } if we are looking for `my_edge` and start at a. + fn nodes_with_children( + &self, + start: NodeIdx, + edge_ty: &::Edge, + ) -> Option>> { + tracing::trace!("nodes_with_children"); + let mut seen = Default::default(); + self.nodes_with_children_prevent_cycles(start, edge_ty, &mut seen) + } + + fn nodes_with_children_prevent_cycles( + &self, + start: NodeIdx, + edge_ty: &::Edge, + seen: &mut BTreeSet, + ) -> Option>> { + if seen.contains(&start) { + return None; + } else { + seen.insert(start); + } + let edges = self.graph().edges_directed(start, Direction::Incoming); + let mut map: BTreeMap> = Default::default(); + + let this_children: BTreeSet = edges + .clone() + .filter_map(|edge| { + if edge.weight() == edge_ty { + Some(edge.source()) + } else { + None + } + }) + .collect(); + + if !this_children.is_empty() { + map.insert(start, this_children); + } + map.extend( + edges + .filter_map(|edge| { + self.nodes_with_children_prevent_cycles(edge.source(), edge_ty, seen) + }) + .flatten() + .collect::>>(), + ); + if map.is_empty() { + None + } else { + Some(map) + } + } +} diff --git a/crates/solc-expressions/Cargo.toml b/crates/solc-expressions/Cargo.toml new file mode 100644 index 00000000..d94fd868 --- /dev/null +++ b/crates/solc-expressions/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "solc-expressions" +description = "Pyrometer's parsing of solidity-based expressions" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +analyzers.workspace = true +graph.workspace = true +shared.workspace = true + +petgraph.workspace = true +solang-parser.workspace = true +ethers-core.workspace = true +hex.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true + +keccak-hash = "0.10.0" diff --git a/crates/solc-expressions/src/array.rs b/crates/solc-expressions/src/array.rs new file mode 100644 index 00000000..a1d61761 --- /dev/null +++ b/crates/solc-expressions/src/array.rs @@ -0,0 +1,485 @@ +use crate::{ + require::Require, variable::Variable, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, + ListAccess, +}; + +use graph::{ + elem::{Elem, RangeDyn, RangeOp}, + nodes::{Builtin, Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, TmpConstruction}, + AnalyzerBackend, ContextEdge, Edge, Node, VarType, +}; +use shared::RangeArena; + +use solang_parser::{ + helpers::CodeLocation, + pt::{Expression, Loc}, +}; + +impl Array for T where T: AnalyzerBackend + Sized {} +/// Handles arrays +pub trait Array: AnalyzerBackend + Sized { + /// Gets the array type + #[tracing::instrument(level = "trace", skip_all)] + fn array_ty( + &mut self, + arena: &mut RangeArena>, + ty_expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, ty_expr, ctx)?; + self.apply_to_edges(ctx, ty_expr.loc(), arena, &|analyzer, arena, ctx, loc| { + if let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? { + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_ty(ctx, ty_expr, ret) + } else { + Err(ExprErr::NoLhs( + loc, + "No array specified for getting array type".to_string(), + )) + } + }) + } + + fn match_ty( + &mut self, + ctx: ContextNode, + ty_expr: &Expression, + ret: ExprRet, + ) -> Result<(), ExprErr> { + match ret { + ExprRet::Single(inner_ty) | ExprRet::SingleLiteral(inner_ty) => { + if let Some(var_type) = VarType::try_from_idx(self, inner_ty) { + let dyn_b = Builtin::Array(var_type); + if let Some(idx) = self.builtins().get(&dyn_b) { + ctx.push_expr(ExprRet::Single(*idx), self) + .into_expr_err(ty_expr.loc())?; + } else { + let idx = self.add_node(Node::Builtin(dyn_b.clone())); + self.builtins_mut().insert(dyn_b, idx); + ctx.push_expr(ExprRet::Single(idx), self) + .into_expr_err(ty_expr.loc())?; + } + Ok(()) + } else { + Err(ExprErr::ArrayTy(ty_expr.loc(), "Expected to be able to convert to a var type from an index to determine array type. This is a bug. Please report it at github.com/nascentxyz/pyrometer.".to_string())) + } + } + ExprRet::Multi(inner) => { + inner + .into_iter() + .map(|i| self.match_ty(ctx, ty_expr, i)) + .collect::, ExprErr>>()?; + Ok(()) + } + ExprRet::CtxKilled(kind) => { + ctx.kill(self, ty_expr.loc(), kind) + .into_expr_err(ty_expr.loc())?; + Ok(()) + } + ExprRet::Null => Ok(()), + } + } + + /// Indexes into an array + #[tracing::instrument(level = "trace", skip_all)] + fn index_into_array( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + ty_expr: &Expression, + index_expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + tracing::trace!("Indexing into array"); + self.parse_ctx_expr(arena, index_expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(index_tys) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "Could not find the index variable".to_string(), + )); + }; + if matches!(index_tys, ExprRet::CtxKilled(_)) { + ctx.push_expr(index_tys, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.parse_ctx_expr(arena, ty_expr, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(inner_tys) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs(loc, "Could not find the array".to_string())); + }; + if matches!(inner_tys, ExprRet::CtxKilled(_)) { + ctx.push_expr(inner_tys, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.index_into_array_inner( + arena, + ctx, + loc, + inner_tys.flatten(), + index_tys.clone().flatten(), + ) + }) + }) + } + + #[tracing::instrument(level = "trace", skip_all)] + fn index_into_array_inner( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + inner_paths: ExprRet, + index_paths: ExprRet, + ) -> Result<(), ExprErr> { + match (inner_paths, index_paths) { + (_, ExprRet::Null) | (ExprRet::Null, _) => Ok(()), + (_, ExprRet::CtxKilled(kind)) => { + ctx.kill(self, loc, kind).into_expr_err(loc) + } + (ExprRet::CtxKilled(kind), _) => { + ctx.kill(self, loc, kind).into_expr_err(loc) + } + (ExprRet::Single(parent), ExprRet::Single(index)) | (ExprRet::Single(parent), ExprRet::SingleLiteral(index)) => { + let index = ContextVarNode::from(index).latest_version(self); + let parent = ContextVarNode::from(parent).latest_version(self); + let _ = self.index_into_array_raw(arena, ctx, loc, index, parent, true, false)?; + Ok(()) + } + e => Err(ExprErr::ArrayIndex(loc, format!("Expected single expr evaluation of index expression, but was: {e:?}. This is a bug. Please report it at github.com/nascentxyz/pyrometer."))), + } + } + + fn index_into_array_raw( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + index: ContextVarNode, + parent: ContextVarNode, + length_requirement: bool, + return_var: bool, + ) -> Result, ExprErr> { + let idx = self.advance_var_in_ctx(index, loc, ctx)?; + if length_requirement + && !parent.is_mapping(self).into_expr_err(loc)? + && parent.is_indexable(self).into_expr_err(loc)? + { + let len_var = self + .get_length(arena, ctx, loc, parent, true)? + .unwrap() + .latest_version(self); + self.require( + arena, + len_var.latest_version(self), + idx.latest_version(self), + ctx, + loc, + RangeOp::Gt, + RangeOp::Lt, + (RangeOp::Lte, RangeOp::Gte), + )?; + } + + let name = format!( + "{}[{}]", + parent.name(self).into_expr_err(loc)?, + index.name(self).into_expr_err(loc)? + ); + if let Some(index_var) = ctx.var_by_name_or_recurse(self, &name).into_expr_err(loc)? { + let index_var = index_var.latest_version(self); + let index_var = self.advance_var_in_ctx(index_var, loc, ctx)?; + if !return_var { + ctx.push_expr(ExprRet::Single(index_var.into()), self) + .into_expr_err(loc)?; + Ok(None) + } else { + Ok(Some(index_var)) + } + } else { + let ty = parent.ty(self).into_expr_err(loc)?.clone(); + + let ty = ty.dynamic_underlying_ty(self).into_expr_err(loc)?; + let has_range = ty.ref_range(self).into_expr_err(loc)?.is_some(); + let index_access_var = ContextVar { + loc: Some(loc), + name: name.clone(), + display_name: format!( + "{}[{}]", + parent.display_name(self).into_expr_err(loc)?, + index.display_name(self).into_expr_err(loc)? + ), + storage: *parent.storage(self).into_expr_err(loc)?, + is_tmp: false, + tmp_of: Some(TmpConstruction::new( + parent, + RangeOp::SetIndices, + Some(index), + )), + dep_on: { + let mut deps = parent.dependent_on(self, true).into_expr_err(loc)?; + deps.extend(index.dependent_on(self, true).into_expr_err(loc)?); + Some(deps) + }, + is_symbolic: true, + is_return: false, + ty, + }; + + let idx_access_node = self.add_node(Node::ContextVar(index_access_var)); + self.add_edge( + idx_access_node, + parent, + Edge::Context(ContextEdge::IndexAccess), + ); + self.add_edge(idx_access_node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(idx_access_node.into(), self) + .into_expr_err(loc)?; + self.add_edge(index, idx_access_node, Edge::Context(ContextEdge::Index)); + + let idx_access_cvar = if has_range { + let min = Elem::from(parent) + .get_index(index.into()) + .max(ContextVarNode::from(idx_access_node).into()); //.range_min(self).unwrap().unwrap()); + let max = Elem::from(parent) + .get_index(index.into()) + .min(ContextVarNode::from(idx_access_node).into()); //.range_max(self).unwrap().unwrap()); + + let idx_access_cvar = + self.advance_var_in_ctx(ContextVarNode::from(idx_access_node), loc, ctx)?; + + idx_access_cvar + .set_range_min(self, arena, min) + .into_expr_err(loc)?; + idx_access_cvar + .set_range_max(self, arena, max) + .into_expr_err(loc)?; + + if idx_access_cvar + .underlying(self) + .into_expr_err(loc)? + .ty + .is_dyn_builtin(self) + .into_expr_err(loc)? + { + // if the index access is also an array, produce a length variable + // we specify to return the variable because we dont want it on the stack + let _ = self.get_length(arena, ctx, loc, idx_access_node.into(), true)?; + } + idx_access_cvar + } else { + ContextVarNode::from(idx_access_node) + }; + + if !return_var { + ctx.push_expr( + ExprRet::Single(idx_access_cvar.latest_version(self).into()), + self, + ) + .into_expr_err(loc)?; + Ok(None) + } else { + Ok(Some(idx_access_cvar.latest_version(self))) + } + } + } + + fn update_array_if_index_access( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + maybe_index_access: ContextVarNode, + new_value: ContextVarNode, + ) -> Result<(), ExprErr> { + if let Some(arr) = maybe_index_access.index_access_to_array(self) { + // Was indeed an indexed value + if let Some(index) = maybe_index_access.index_access_to_index(self) { + // Found the associated index + let next_arr = self.advance_var_in_ctx(arr.latest_version(self), loc, ctx)?; + if next_arr + .underlying(self) + .into_expr_err(loc)? + .ty + .is_dyn_builtin(self) + .into_expr_err(loc)? + { + // update the range + let min = Elem::from(arr).set_indices(RangeDyn::new_for_indices( + vec![(index.into(), new_value.into())], + loc, + )); + let max = Elem::from(arr).set_indices(RangeDyn::new_for_indices( + vec![(index.into(), new_value.into())], + loc, + )); + + next_arr + .set_range_min(self, arena, min) + .into_expr_err(loc)?; + next_arr + .set_range_max(self, arena, max) + .into_expr_err(loc)?; + } + + // handle nested arrays, i.e. if: + // uint256[][] memory z; + // z[x][y] = 5; + // first pass sets z[x][y] = 5, second pass needs to set z[x] = x + self.update_array_if_index_access( + arena, + ctx, + loc, + next_arr.latest_version(self), + next_arr.latest_version(self), + )?; + } + } + Ok(()) + } + + fn update_array_if_length_var( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + maybe_length: ContextVarNode, + ) -> Result<(), ExprErr> { + if let Some(backing_arr) = maybe_length.len_var_to_array(self).into_expr_err(loc)? { + let next_arr = self.advance_var_in_ctx(backing_arr.latest_version(self), loc, ctx)?; + let new_len = Elem::from(backing_arr).set_length(maybe_length.into()); + next_arr + .set_range_min(self, arena, new_len.clone()) + .into_expr_err(loc)?; + next_arr + .set_range_max(self, arena, new_len) + .into_expr_err(loc)?; + } + Ok(()) + } + + fn set_var_as_length( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + new_length: ContextVarNode, + backing_arr: ContextVarNode, + ) -> Result<(), ExprErr> { + let next_arr = self.advance_var_in_ctx(backing_arr.latest_version(self), loc, ctx)?; + let new_len = Elem::from(backing_arr).get_length().max(new_length.into()); + let min = Elem::from(backing_arr).set_length(new_len); + + let new_len = Elem::from(backing_arr).get_length().min(new_length.into()); + let max = Elem::from(backing_arr).set_length(new_len); + + next_arr + .set_range_min(self, arena, min) + .into_expr_err(loc)?; + next_arr + .set_range_max(self, arena, max) + .into_expr_err(loc)?; + + self.add_edge( + new_length, + next_arr, + Edge::Context(ContextEdge::AttrAccess("length")), + ); + Ok(()) + } + + fn update_array_from_index_access( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + index: ContextVarNode, + access: ContextVarNode, + backing_arr: ContextVarNode, + ) -> Result<(), ExprErr> { + let next_arr = self.advance_var_in_ctx(backing_arr.latest_version(self), loc, ctx)?; + if next_arr + .underlying(self) + .into_expr_err(loc)? + .ty + .is_dyn_builtin(self) + .into_expr_err(loc)? + { + // update the range + let min = Elem::from(backing_arr).set_indices(RangeDyn::new_for_indices( + vec![(index.into(), access.into())], + loc, + )); + let max = Elem::from(backing_arr).set_indices(RangeDyn::new_for_indices( + vec![(index.into(), access.into())], + loc, + )); + next_arr + .set_range_min(self, arena, min) + .into_expr_err(loc)?; + next_arr + .set_range_max(self, arena, max) + .into_expr_err(loc)?; + } + + // handle nested arrays + if let (Some(backing_arr), Some(parent_nested_index)) = ( + next_arr.index_access_to_array(self), + next_arr.index_access_to_index(self), + ) { + self.update_array_from_index_access( + arena, + ctx, + loc, + parent_nested_index, + next_arr, + backing_arr.latest_version(self), + ) + } else { + Ok(()) + } + } + + fn update_array_min_if_length( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + maybe_length: ContextVarNode, + ) -> Result<(), ExprErr> { + if let Some(backing_arr) = maybe_length.len_var_to_array(self).into_expr_err(loc)? { + let next_arr = self.advance_var_in_ctx(backing_arr.latest_version(self), loc, ctx)?; + let new_len = Elem::from(backing_arr) + .get_length() + .max(maybe_length.into()); + let min = Elem::from(backing_arr).set_length(new_len); + next_arr + .set_range_min(self, arena, min) + .into_expr_err(loc)?; + } + Ok(()) + } + + fn update_array_max_if_length( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + maybe_length: ContextVarNode, + ) -> Result<(), ExprErr> { + if let Some(backing_arr) = maybe_length.len_var_to_array(self).into_expr_err(loc)? { + let next_arr = self.advance_var_in_ctx(backing_arr.latest_version(self), loc, ctx)?; + let new_len = Elem::from(backing_arr) + .get_length() + .min(maybe_length.into()); + let max = Elem::from(backing_arr).set_length(new_len); + next_arr + .set_range_max(self, arena, max) + .into_expr_err(loc)?; + } + Ok(()) + } +} diff --git a/crates/solc-expressions/src/assign.rs b/crates/solc-expressions/src/assign.rs new file mode 100644 index 00000000..38c89a6e --- /dev/null +++ b/crates/solc-expressions/src/assign.rs @@ -0,0 +1,269 @@ +use crate::{ + array::Array, variable::Variable, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, + ListAccess, +}; + +use graph::{ + elem::{Elem, RangeElem}, + nodes::{Concrete, ContextNode, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, GraphError, Node, +}; + +use shared::RangeArena; +use solang_parser::pt::{Expression, Loc}; + +impl Assign for T where T: AnalyzerBackend + Sized {} +/// Handles assignments +pub trait Assign: AnalyzerBackend + Sized { + #[tracing::instrument(level = "trace", skip_all)] + /// Parse an assignment expression + fn assign_exprs( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + lhs_expr: &Expression, + rhs_expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, rhs_expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "Assign operation had no right hand side".to_string(), + )); + }; + + if matches!(rhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.parse_ctx_expr(arena, lhs_expr, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Assign operation had no left hand side".to_string(), + )); + }; + if matches!(lhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_assign_sides(arena, ctx, loc, &lhs_paths.flatten(), &rhs_paths)?; + Ok(()) + }) + }) + } + + /// Match on the [`ExprRet`]s of an assignment expression + fn match_assign_sides( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + lhs_paths: &ExprRet, + rhs_paths: &ExprRet, + ) -> Result<(), ExprErr> { + match (lhs_paths, rhs_paths) { + (_, ExprRet::Null) | (ExprRet::Null, _) => Ok(()), + (ExprRet::CtxKilled(kind), _) | (_, ExprRet::CtxKilled(kind)) => { + ctx.kill(self, loc, *kind).into_expr_err(loc)?; + Ok(()) + } + (ExprRet::Single(lhs), ExprRet::SingleLiteral(rhs)) => { + let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); + let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); + // let res = rhs_cvar + // .literal_cast_from(&lhs_cvar, self) + // .into_expr_err(loc); + // let _ = self.add_if_err(res); + ctx.push_expr(self.assign(arena, loc, lhs_cvar, rhs_cvar, ctx)?, self) + .into_expr_err(loc)?; + Ok(()) + } + (ExprRet::Single(lhs), ExprRet::Single(rhs)) => { + let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); + let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); + ctx.push_expr(self.assign(arena, loc, lhs_cvar, rhs_cvar, ctx)?, self) + .into_expr_err(loc)?; + Ok(()) + } + (l @ ExprRet::Single(_), ExprRet::Multi(rhs_sides)) => rhs_sides + .iter() + .try_for_each(|expr_ret| self.match_assign_sides(arena, ctx, loc, l, expr_ret)), + (ExprRet::Multi(lhs_sides), r @ ExprRet::Single(_) | r @ ExprRet::SingleLiteral(_)) => { + lhs_sides + .iter() + .try_for_each(|expr_ret| self.match_assign_sides(arena, ctx, loc, expr_ret, r)) + } + (ExprRet::Multi(lhs_sides), ExprRet::Multi(rhs_sides)) => { + // try to zip sides if they are the same length + if lhs_sides.len() == rhs_sides.len() { + lhs_sides.iter().zip(rhs_sides.iter()).try_for_each( + |(lhs_expr_ret, rhs_expr_ret)| { + self.match_assign_sides(arena, ctx, loc, lhs_expr_ret, rhs_expr_ret) + }, + ) + } else { + rhs_sides.iter().try_for_each(|rhs_expr_ret| { + self.match_assign_sides(arena, ctx, loc, lhs_paths, rhs_expr_ret) + }) + } + } + (e, f) => todo!("any: {:?} {:?}", e, f), + } + } + + /// Perform an assignment + fn assign( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + lhs_cvar: ContextVarNode, + rhs_cvar: ContextVarNode, + ctx: ContextNode, + ) -> Result { + tracing::trace!( + "assigning: {} to {}", + rhs_cvar.display_name(self).unwrap(), + lhs_cvar.display_name(self).unwrap(), + ); + + rhs_cvar + .cast_from(&lhs_cvar, self, arena) + .into_expr_err(loc)?; + + let (new_lower_bound, new_upper_bound) = ( + Elem::from(rhs_cvar.latest_version(self)), + Elem::from(rhs_cvar.latest_version(self)), + ); + + let needs_forcible = new_lower_bound + .depends_on(lhs_cvar, &mut vec![], self, arena) + .into_expr_err(loc)? + || new_upper_bound + .depends_on(lhs_cvar, &mut vec![], self, arena) + .into_expr_err(loc)?; + + let new_lhs = if needs_forcible { + self.advance_var_in_ctx_forcible(lhs_cvar.latest_version(self), loc, ctx, true)? + } else { + self.advance_var_in_ctx(lhs_cvar.latest_version(self), loc, ctx)? + }; + + new_lhs.underlying_mut(self).into_expr_err(loc)?.tmp_of = + rhs_cvar.tmp_of(self).into_expr_err(loc)?; + + if let Some(ref mut dep_on) = new_lhs.underlying_mut(self).into_expr_err(loc)?.dep_on { + dep_on.push(rhs_cvar) + } else { + new_lhs.set_dependent_on(self).into_expr_err(loc)?; + } + + if lhs_cvar.is_storage(self).into_expr_err(loc)? { + self.add_edge(new_lhs, rhs_cvar, Edge::Context(ContextEdge::StorageWrite)); + } + + if rhs_cvar.underlying(self).into_expr_err(loc)?.is_return { + if let Some(rhs_ctx) = rhs_cvar.maybe_ctx(self) { + self.add_edge( + rhs_cvar, + new_lhs, + Edge::Context(ContextEdge::ReturnAssign( + rhs_ctx.underlying(self).unwrap().ext_fn_call.is_some(), + )), + ); + } else { + return Err(ExprErr::GraphError( + loc, + GraphError::DetachedVariable(format!( + "No context for variable: {}, node idx: {}, curr ctx: {}, lhs ctx: {}", + rhs_cvar.display_name(self).unwrap(), + rhs_cvar.0, + ctx.path(self), + lhs_cvar.ctx(self).path(self) + )), + )); + } + } + + if !lhs_cvar.ty_eq(&rhs_cvar, self).into_expr_err(loc)? { + let cast_to_min = match lhs_cvar.range_min(self).into_expr_err(loc)? { + Some(v) => v, + None => { + return Err(ExprErr::BadRange( + loc, + format!( + "No range during cast? {:?}, {:?}", + lhs_cvar.underlying(self).unwrap(), + rhs_cvar.underlying(self).unwrap(), + ), + )) + } + }; + + let cast_to_max = match lhs_cvar.range_max(self).into_expr_err(loc)? { + Some(v) => v, + None => { + return Err(ExprErr::BadRange( + loc, + format!( + "No range during cast? {:?}, {:?}", + lhs_cvar.underlying(self).unwrap(), + rhs_cvar.underlying(self).unwrap(), + ), + )) + } + }; + + let _ = new_lhs.try_set_range_min(self, arena, new_lower_bound.cast(cast_to_min)); + let _ = new_lhs.try_set_range_max(self, arena, new_upper_bound.cast(cast_to_max)); + } else { + let _ = new_lhs.try_set_range_min(self, arena, new_lower_bound); + let _ = new_lhs.try_set_range_max(self, arena, new_upper_bound); + } + if let Some(rhs_range) = rhs_cvar.ref_range(self).into_expr_err(loc)? { + let res = new_lhs + .try_set_range_exclusions(self, rhs_range.exclusions.clone()) + .into_expr_err(loc); + let _ = self.add_if_err(res); + } + + if rhs_cvar.is_indexable(self).into_expr_err(loc)? { + // rhs is indexable. get the length attribute, create a new length for the lhs, + // and perform assign + let rhs_len_cvar = self.get_length(arena, ctx, loc, rhs_cvar, true)?.unwrap(); + let lhs_len_cvar = self.get_length(arena, ctx, loc, lhs_cvar, true)?.unwrap(); + self.assign(arena, loc, lhs_len_cvar, rhs_len_cvar, ctx)?; + // update the range + self.update_array_if_length_var(arena, ctx, loc, lhs_len_cvar.latest_version(self))?; + } + + self.update_array_if_index_access(arena, ctx, loc, lhs_cvar, rhs_cvar)?; + + // handle struct assignment + if let Ok(fields) = rhs_cvar.struct_to_fields(self) { + if !fields.is_empty() { + fields.into_iter().for_each(|field| { + let mut new_var = field.underlying(self).unwrap().clone(); + let field_name = field.name(self).unwrap(); + let field_name = field_name.split('.').collect::>()[1]; + let new_name = format!("{}.{field_name}", lhs_cvar.name(self).unwrap()); + new_var.name.clone_from(&new_name); + new_var.display_name = new_name; + let new_field = ContextVarNode::from(self.add_node(Node::ContextVar(new_var))); + self.add_edge( + new_field, + lhs_cvar.first_version(self), + Edge::Context(ContextEdge::AttrAccess("field")), + ); + }) + } + } + + // advance the rhs variable to avoid recursion issues + self.advance_var_in_ctx_forcible(rhs_cvar.latest_version(self), loc, ctx, true)?; + Ok(ExprRet::Single(new_lhs.into())) + } +} diff --git a/crates/solc-expressions/src/bin_op.rs b/crates/solc-expressions/src/bin_op.rs new file mode 100644 index 00000000..57aad7ff --- /dev/null +++ b/crates/solc-expressions/src/bin_op.rs @@ -0,0 +1,747 @@ +use crate::{ + require::Require, variable::Variable, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, +}; + +use graph::{ + elem::*, + nodes::{ + Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, KilledKind, TmpConstruction, + }, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::RangeArena; + +use ethers_core::types::U256; +use solang_parser::pt::{Expression, Loc}; + +impl BinOp for T where T: AnalyzerBackend + Sized {} +/// Handles binary operations (`+`, `-`, `/`, etc.) +pub trait BinOp: AnalyzerBackend + Sized { + /// Evaluate and execute a binary operation expression + #[tracing::instrument(level = "trace", skip_all)] + fn op_expr( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + lhs_expr: &Expression, + rhs_expr: &Expression, + ctx: ContextNode, + op: RangeOp, + assign: bool, + ) -> Result<(), ExprErr> { + ctx.add_gas_cost(self, shared::gas::BIN_OP_GAS) + .into_expr_err(loc)?; + self.parse_ctx_expr(arena, rhs_expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "Binary operation had no right hand side".to_string())) + }; + if matches!(rhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let rhs_paths = rhs_paths.flatten(); + let rhs_ctx = ctx; + analyzer.parse_ctx_expr(arena, lhs_expr, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs(loc, format!("Binary operation had no left hand side, Expr: {lhs_expr:#?}, rhs ctx: {}, curr ctx: {}", rhs_ctx.path(analyzer), ctx.path(analyzer)))) + }; + if matches!(lhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let lhs_paths = lhs_paths.flatten(); + analyzer.op_match(arena, ctx, loc, &lhs_paths, &rhs_paths, op, assign) + }) + }) + } + + fn op_match( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + lhs_paths: &ExprRet, + rhs_paths: &ExprRet, + op: RangeOp, + assign: bool, + ) -> Result<(), ExprErr> { + match (lhs_paths, rhs_paths) { + (ExprRet::Null, _) => Err(ExprErr::NoLhs( + loc, + "No left hand side provided for binary operation".to_string(), + )), + (_, ExprRet::Null) => Err(ExprErr::NoRhs( + loc, + "No right hand side provided for binary operation".to_string(), + )), + (ExprRet::SingleLiteral(lhs), ExprRet::SingleLiteral(rhs)) => { + let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); + let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); + lhs_cvar.try_increase_size(self, arena).into_expr_err(loc)?; + rhs_cvar.try_increase_size(self, arena).into_expr_err(loc)?; + ctx.push_expr( + self.op(arena, loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, + self, + ) + .into_expr_err(loc)?; + Ok(()) + } + (ExprRet::SingleLiteral(lhs), ExprRet::Single(rhs)) => { + ContextVarNode::from(*lhs) + .cast_from(&ContextVarNode::from(*rhs), self, arena) + .into_expr_err(loc)?; + let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); + let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); + ctx.push_expr( + self.op(arena, loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, + self, + ) + .into_expr_err(loc)?; + Ok(()) + } + (ExprRet::Single(lhs), ExprRet::SingleLiteral(rhs)) => { + ContextVarNode::from(*rhs) + .cast_from(&ContextVarNode::from(*lhs), self, arena) + .into_expr_err(loc)?; + let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); + let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); + ctx.push_expr( + self.op(arena, loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, + self, + ) + .into_expr_err(loc)?; + Ok(()) + } + (ExprRet::Single(lhs), ExprRet::Single(rhs)) => { + let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); + let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); + ctx.push_expr( + self.op(arena, loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, + self, + ) + .into_expr_err(loc)?; + Ok(()) + } + (lhs @ ExprRet::Single(..), ExprRet::Multi(rhs_sides)) => { + rhs_sides + .iter() + .map(|expr_ret| self.op_match(arena, ctx, loc, lhs, expr_ret, op, assign)) + .collect::, ExprErr>>()?; + Ok(()) + } + (ExprRet::Multi(lhs_sides), rhs @ ExprRet::Single(..)) => { + lhs_sides + .iter() + .map(|expr_ret| self.op_match(arena, ctx, loc, expr_ret, rhs, op, assign)) + .collect::, ExprErr>>()?; + Ok(()) + } + (_, ExprRet::CtxKilled(kind)) => ctx.kill(self, loc, *kind).into_expr_err(loc), + (ExprRet::CtxKilled(kind), _) => ctx.kill(self, loc, *kind).into_expr_err(loc), + (ExprRet::Multi(lhs_sides), ExprRet::Multi(rhs_sides)) => Err(ExprErr::UnhandledCombo( + loc, + format!("Unhandled combination in binop: {lhs_sides:?} {rhs_sides:?}"), + )), + (l, r) => Err(ExprErr::UnhandledCombo( + loc, + format!("Unhandled combination in binop: {l:?} {r:?}"), + )), + } + } + + /// Execute a binary operation after parsing the expressions + #[tracing::instrument(level = "trace", skip_all)] + fn op( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + lhs_cvar: ContextVarNode, + rhs_cvar: ContextVarNode, + ctx: ContextNode, + op: RangeOp, + assign: bool, + ) -> Result { + tracing::trace!( + "binary op: {} {} {}, assign: {}", + lhs_cvar.display_name(self).into_expr_err(loc)?, + op.to_string(), + rhs_cvar.display_name(self).into_expr_err(loc)?, + assign + ); + + let unchecked = match op { + RangeOp::Add(u) | RangeOp::Sub(u) | RangeOp::Mul(u) | RangeOp::Div(u) => u, + _ => false, + }; + + let new_lhs = if assign { + let new = self.advance_var_in_ctx_forcible(lhs_cvar, loc, ctx, true)?; + let underlying = new.underlying_mut(self).into_expr_err(loc)?; + underlying.tmp_of = Some(TmpConstruction::new(lhs_cvar, op, Some(rhs_cvar))); + + if let Some(ref mut dep_on) = underlying.dep_on { + dep_on.push(rhs_cvar) + } else { + new.set_dependent_on(self).into_expr_err(loc)?; + } + + new + } else { + // TODO: simplify the expression such that we match an existing tmp if possible + let mut new_lhs_underlying = + ContextVar::new_bin_op_tmp(lhs_cvar, op, rhs_cvar, ctx, loc, self) + .into_expr_err(loc)?; + if let Ok(Some(existing)) = + self.get_unchanged_tmp_variable(arena, &new_lhs_underlying.display_name, ctx) + { + self.advance_var_in_ctx_forcible(existing, loc, ctx, true)? + } else { + // will potentially mutate the ty from concrete to builtin with a concrete range + new_lhs_underlying + .ty + .concrete_to_builtin(self) + .into_expr_err(loc)?; + + let new_var = self.add_node(Node::ContextVar(new_lhs_underlying)); + ctx.add_var(new_var.into(), self).into_expr_err(loc)?; + self.add_edge(new_var, ctx, Edge::Context(ContextEdge::Variable)); + ContextVarNode::from(new_var) + } + }; + + let new_rhs = rhs_cvar.latest_version(self); + + let expr = Elem::Expr(RangeExpr::::new( + Elem::from(Reference::new(lhs_cvar.latest_version(self).into())), + op, + Elem::from(Reference::new(rhs_cvar.latest_version(self).into())), + )); + let new_lhs = new_lhs.latest_version(self); + new_lhs + .set_range_min(self, arena, expr.clone()) + .into_expr_err(loc)?; + new_lhs + .set_range_max(self, arena, expr) + .into_expr_err(loc)?; + + // to prevent some recursive referencing, forcibly increase lhs_cvar + self.advance_var_in_ctx_forcible(lhs_cvar.latest_version(self), loc, ctx, true)?; + + if !unchecked { + match op { + RangeOp::Div(..) | RangeOp::Mod => { + if let Some(killed) = + self.checked_require_mod_div(arena, lhs_cvar, new_rhs, loc, ctx)? + { + return Ok(killed); + } + } + RangeOp::Sub(..) => { + if let Some(killed) = + self.checked_require_sub(arena, lhs_cvar, new_lhs, new_rhs, loc, ctx)? + { + return Ok(killed); + } + } + RangeOp::Add(..) => { + if let Some(killed) = + self.checked_require_add(arena, lhs_cvar, new_lhs, new_rhs, loc, ctx)? + { + return Ok(killed); + } + } + RangeOp::Mul(..) => { + if let Some(killed) = + self.checked_require_mul(arena, lhs_cvar, new_lhs, new_rhs, loc, ctx)? + { + return Ok(killed); + } + } + RangeOp::Exp => { + if let Some(killed) = + self.checked_require_exp(arena, lhs_cvar, new_lhs, new_rhs, loc, ctx)? + { + return Ok(killed); + } + } + _ => {} + } + } + + Ok(ExprRet::Single(new_lhs.latest_version(self).into())) + } + + #[tracing::instrument(level = "trace", skip_all)] + fn bit_not( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + lhs_expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, lhs_expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "Not operation had no element".to_string(), + )); + }; + + if matches!(lhs, ExprRet::CtxKilled(_)) { + ctx.push_expr(lhs, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.bit_not_inner(arena, ctx, loc, lhs.flatten()) + }) + } + + #[tracing::instrument(level = "trace", skip_all)] + fn bit_not_inner( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + lhs_expr: ExprRet, + ) -> Result<(), ExprErr> { + match lhs_expr { + ExprRet::CtxKilled(kind) => { + ctx.kill(self, loc, kind).into_expr_err(loc)?; + ctx.push_expr(lhs_expr, self).into_expr_err(loc)?; + Ok(()) + } + ExprRet::SingleLiteral(lhs) => { + // TODO: try to pop from the stack and if there is a single element there + // use it as a type hint, then place it back on the stack + ContextVarNode::from(lhs) + .try_increase_size(self, arena) + .into_expr_err(loc)?; + self.bit_not_inner(arena, ctx, loc, ExprRet::Single(lhs))?; + Ok(()) + } + ExprRet::Single(lhs) => { + let lhs_cvar = ContextVarNode::from(lhs); + tracing::trace!( + "bitwise not: {}", + lhs_cvar.display_name(self).into_expr_err(loc)? + ); + let out_var = ContextVar { + loc: Some(loc), + name: format!( + "tmp{}(~{})", + ctx.new_tmp(self).into_expr_err(loc)?, + lhs_cvar.name(self).into_expr_err(loc)?, + ), + display_name: format!("~{}", lhs_cvar.display_name(self).into_expr_err(loc)?,), + storage: None, + is_tmp: true, + tmp_of: Some(TmpConstruction::new(lhs_cvar, RangeOp::BitNot, None)), + dep_on: Some(lhs_cvar.dependent_on(self, true).into_expr_err(loc)?), + is_symbolic: lhs_cvar.is_symbolic(self).into_expr_err(loc)?, + is_return: false, + ty: lhs_cvar.underlying(self).into_expr_err(loc)?.ty.clone(), + }; + + let expr = Elem::Expr(RangeExpr::::new( + Elem::from(Reference::new(lhs_cvar.latest_version(self).into())), + RangeOp::BitNot, + Elem::Null, + )); + + let out_var = ContextVarNode::from(self.add_node(Node::ContextVar(out_var))); + + out_var + .set_range_min(self, arena, expr.clone()) + .into_expr_err(loc)?; + out_var + .set_range_max(self, arena, expr) + .into_expr_err(loc)?; + + self.advance_var_in_ctx_forcible(lhs_cvar, loc, ctx, true)?; + ctx.push_expr(ExprRet::Single(out_var.into()), self) + .into_expr_err(loc)?; + Ok(()) + } + ExprRet::Multi(f) => Err(ExprErr::MultiNot( + loc, + format!("Multiple elements in bitwise not expression: {f:?}"), + )), + ExprRet::Null => Err(ExprErr::NoRhs( + loc, + "No right hand side in `not` expression".to_string(), + )), + } + } + + fn checked_require_mod_div( + &mut self, + arena: &mut RangeArena>, + lhs: ContextVarNode, + rhs: ContextVarNode, + loc: Loc, + ctx: ContextNode, + ) -> Result, ExprErr> { + // x / y || x % y + // revert if div or mod by 0 + if rhs.is_const(self, arena).into_expr_err(loc)? + && rhs + .evaled_range_min(self, arena) + .into_expr_err(loc)? + .expect("No range?") + .range_eq(&Elem::from(Concrete::from(U256::zero())), arena) + { + let res = ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc); + let _ = self.add_if_err(res); + + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + + // otherwise, require rhs != 0 + let tmp_rhs = self.advance_var_in_ctx(rhs, loc, ctx)?; + let zero_node = self.add_concrete_var(ctx, Concrete::from(U256::zero()), loc)?; + + if self + .require( + arena, + tmp_rhs, + zero_node, + ctx, + loc, + RangeOp::Neq, + RangeOp::Neq, + (RangeOp::Eq, RangeOp::Neq), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + Ok(None) + } + + fn checked_require_sub( + &mut self, + arena: &mut RangeArena>, + lhs: ContextVarNode, + new_lhs: ContextVarNode, + rhs: ContextVarNode, + loc: Loc, + ctx: ContextNode, + ) -> Result, ExprErr> { + // x - y >= type(x).min + let new_lhs = new_lhs.latest_version(self); + let tmp_lhs = self.advance_var_in_ctx_forcible(new_lhs, loc, ctx, true)?; + + // in checked subtraction, we have to make sure x - y >= type(x).min ==> x >= type(x).min + y + // get the lhs min + let min_conc = lhs.ty_min_concrete(self).into_expr_err(loc)?.unwrap(); + let min: ContextVarNode = self.add_concrete_var(ctx, min_conc, loc)?; + + // require lhs - rhs >= type(lhs).min + if self + .require( + arena, + tmp_lhs.latest_version(self), + min, + ctx, + loc, + RangeOp::Gte, + RangeOp::Lte, + (RangeOp::Lte, RangeOp::Gte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + + // If x and y are signed ints, we have to check that x - -y <= type(x).max + // because it could overflow in the positive direction + let lhs_is_int = lhs.is_int(self).into_expr_err(loc)?; + let rhs_is_int = rhs.is_int(self).into_expr_err(loc)?; + if lhs_is_int && rhs_is_int { + let rhs_min = rhs + .evaled_range_min(self, arena) + .into_expr_err(loc)? + .expect("No range?"); + if rhs_min.is_negative(false, self, arena).into_expr_err(loc)? { + // rhs can be negative, require that lhs <= type(x).max + -rhs + // get the lhs max + let max_conc = lhs.ty_max_concrete(self).into_expr_err(loc)?.unwrap(); + let max: ContextVarNode = self.add_concrete_var(ctx, max_conc, loc)?; + + if self + .require( + arena, + tmp_lhs.latest_version(self), + max, + ctx, + loc, + RangeOp::Lte, + RangeOp::Gte, + (RangeOp::Gte, RangeOp::Lte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + } + } + Ok(None) + } + + fn checked_require_add( + &mut self, + arena: &mut RangeArena>, + lhs: ContextVarNode, + new_lhs: ContextVarNode, + rhs: ContextVarNode, + loc: Loc, + ctx: ContextNode, + ) -> Result, ExprErr> { + // lhs + rhs <= type(lhs).max + let new_lhs = new_lhs.latest_version(self); + let tmp_lhs = self.advance_var_in_ctx_forcible(new_lhs, loc, ctx, true)?; + + // get type(lhs).max + let max_conc = lhs.ty_max_concrete(self).into_expr_err(loc)?.unwrap(); + let max = self.add_concrete_var(ctx, max_conc, loc)?; + + // require lhs + rhs <= type(lhs).max + if self + .require( + arena, + tmp_lhs.latest_version(self), + max, + ctx, + loc, + RangeOp::Lte, + RangeOp::Gte, + (RangeOp::Gte, RangeOp::Lte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + + // If x and y are signed ints, we have to check that x + -y >= type(x).min + // because it could overflow in the negative direction + let lhs_is_int = lhs.is_int(self).into_expr_err(loc)?; + let rhs_is_int = rhs.is_int(self).into_expr_err(loc)?; + if lhs_is_int && rhs_is_int { + let rhs_min_is_negative = rhs + .evaled_range_min(self, arena) + .into_expr_err(loc)? + .expect("No range?") + .is_negative(false, self, arena) + .into_expr_err(loc)?; + if rhs_min_is_negative { + // rhs can be negative, require that lhs + rhs >= type(x).min + // get the lhs min + let min_conc = lhs.ty_min_concrete(self).into_expr_err(loc)?.unwrap(); + let min = self.add_concrete_var(ctx, min_conc, loc)?; + + if self + .require( + arena, + new_lhs.latest_version(self), + min, + ctx, + loc, + RangeOp::Gte, + RangeOp::Lte, + (RangeOp::Lte, RangeOp::Gte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + } + } + + Ok(None) + } + + fn checked_require_mul( + &mut self, + arena: &mut RangeArena>, + lhs: ContextVarNode, + new_lhs: ContextVarNode, + rhs: ContextVarNode, + loc: Loc, + ctx: ContextNode, + ) -> Result, ExprErr> { + // lhs * rhs <= type(lhs).max + let new_lhs = new_lhs.latest_version(self); + let tmp_lhs = self.advance_var_in_ctx_forcible(new_lhs, loc, ctx, true)?; + + // get type(lhs).max + let max_conc = lhs.ty_max_concrete(self).into_expr_err(loc)?.unwrap(); + let max = self.add_concrete_var(ctx, max_conc, loc)?; + + // require lhs * rhs <= type(lhs).max + if self + .require( + arena, + tmp_lhs.latest_version(self), + max, + ctx, + loc, + RangeOp::Lte, + RangeOp::Gte, + (RangeOp::Gte, RangeOp::Lte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + + // If x and y are signed ints, we have to check that x * -y >= type(x).min + // because it could overflow in the negative direction + let lhs_is_int = lhs.is_int(self).into_expr_err(loc)?; + let rhs_is_int = rhs.is_int(self).into_expr_err(loc)?; + if lhs_is_int || rhs_is_int { + let rhs_min_is_negative = rhs + .evaled_range_min(self, arena) + .into_expr_err(loc)? + .expect("No range?") + .is_negative(false, self, arena) + .into_expr_err(loc)?; + let lhs_min_is_negative = lhs + .evaled_range_min(self, arena) + .into_expr_err(loc)? + .expect("No range?") + .is_negative(false, self, arena) + .into_expr_err(loc)?; + let rhs_max_is_positive = !rhs + .evaled_range_max(self, arena) + .into_expr_err(loc)? + .expect("No range?") + .is_negative(true, self, arena) + .into_expr_err(loc)?; + let lhs_max_is_positive = !lhs + .evaled_range_max(self, arena) + .into_expr_err(loc)? + .expect("No range?") + .is_negative(true, self, arena) + .into_expr_err(loc)?; + + let can_go_very_negative = lhs_min_is_negative && rhs_max_is_positive + || rhs_min_is_negative && lhs_max_is_positive; + if can_go_very_negative { + // signs can be opposite so require that lhs * rhs >= type(x).min + // get the lhs min + let min_conc = lhs.ty_min_concrete(self).into_expr_err(loc)?.unwrap(); + let min = self.add_concrete_var(ctx, min_conc, loc)?; + + if self + .require( + arena, + new_lhs.latest_version(self), + min, + ctx, + loc, + RangeOp::Gte, + RangeOp::Lte, + (RangeOp::Lte, RangeOp::Gte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + } + } + + Ok(None) + } + + fn checked_require_exp( + &mut self, + arena: &mut RangeArena>, + lhs: ContextVarNode, + new_lhs: ContextVarNode, + rhs: ContextVarNode, + loc: Loc, + ctx: ContextNode, + ) -> Result, ExprErr> { + // exponent must be greater or equal to zero + let zero = rhs.ty_zero_concrete(self).into_expr_err(loc)?.unwrap(); + let zero = self.add_concrete_var(ctx, zero, loc)?; + if self + .require( + arena, + rhs, + zero, + ctx, + loc, + RangeOp::Gte, + RangeOp::Lte, + (RangeOp::Lte, RangeOp::Gte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + + // lhs ** rhs <= type(lhs).max + let new_lhs = new_lhs.latest_version(self); + let tmp_lhs = self.advance_var_in_ctx_forcible(new_lhs, loc, ctx, true)?; + + // get type(lhs).max + let max_conc = lhs.ty_max_concrete(self).into_expr_err(loc)?.unwrap(); + let max = self.add_concrete_var(ctx, max_conc, loc)?; + + // require lhs ** rhs <= type(lhs).max + if self + .require( + arena, + tmp_lhs.latest_version(self), + max, + ctx, + loc, + RangeOp::Lte, + RangeOp::Gte, + (RangeOp::Gte, RangeOp::Lte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + + // If x is signed int, we have to check that x ** y >= type(x).min + // because it could overflow in the negative direction + let lhs_is_int = lhs.is_int(self).into_expr_err(loc)?; + if lhs_is_int { + let lhs_min_is_negative = lhs + .evaled_range_min(self, arena) + .into_expr_err(loc)? + .expect("No range?") + .is_negative(false, self, arena) + .into_expr_err(loc)?; + if lhs_min_is_negative { + // rhs can be negative, require that lhs + rhs >= type(x).min + // get the lhs min + let min_conc = lhs.ty_min_concrete(self).into_expr_err(loc)?.unwrap(); + let min = self.add_concrete_var(ctx, min_conc, loc)?; + + if self + .require( + arena, + new_lhs.latest_version(self), + min, + ctx, + loc, + RangeOp::Gte, + RangeOp::Lte, + (RangeOp::Lte, RangeOp::Gte), + )? + .is_none() + { + return Ok(Some(ExprRet::CtxKilled(KilledKind::Revert))); + } + } + } + + Ok(None) + } +} diff --git a/src/context/exprs/cmp.rs b/crates/solc-expressions/src/cmp.rs similarity index 70% rename from src/context/exprs/cmp.rs rename to crates/solc-expressions/src/cmp.rs index f7fe03a1..e66b53f7 100644 --- a/src/context/exprs/cmp.rs +++ b/crates/solc-expressions/src/cmp.rs @@ -1,43 +1,54 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::ExprErr; -use crate::ContextBuilder; -use shared::analyzer::GraphError; +use crate::{ContextBuilder, ExprErr, ExpressionParser, IntoExprErr}; -use shared::{ - analyzer::AnalyzerLike, - context::*, - nodes::*, - range::{ - elem::{RangeElem, RangeOp}, - elem_ty::{Elem, RangeConcrete, RangeExpr}, - Range, SolcRange, +use graph::{ + elem::*, + nodes::{ + BuiltInNode, Builtin, Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, + TmpConstruction, }, - Node, + AnalyzerBackend, GraphError, Node, Range, SolcRange, VarType, }; +use shared::RangeArena; use solang_parser::pt::{Expression, Loc}; use std::cmp::Ordering; -impl Cmp for T where T: AnalyzerLike + Sized {} -pub trait Cmp: AnalyzerLike + Sized { +impl Cmp for T where T: AnalyzerBackend + Sized {} +/// Handles comparator operations, i.e: `!` +pub trait Cmp: AnalyzerBackend + Sized { #[tracing::instrument(level = "trace", skip_all)] - fn not(&mut self, loc: Loc, lhs_expr: &Expression, ctx: ContextNode) -> Result<(), ExprErr> { - self.parse_ctx_expr(lhs_expr, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { + fn not( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + lhs_expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, lhs_expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { let Some(lhs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Not operation had no element".to_string())) + return Err(ExprErr::NoRhs( + loc, + "Not operation had no element".to_string(), + )); }; if matches!(lhs, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs, analyzer).into_expr_err(loc)?; return Ok(()); } - analyzer.not_inner(ctx, loc, lhs.flatten()) + analyzer.not_inner(arena, ctx, loc, lhs.flatten()) }) } #[tracing::instrument(level = "trace", skip_all)] - fn not_inner(&mut self, ctx: ContextNode, loc: Loc, lhs_expr: ExprRet) -> Result<(), ExprErr> { + fn not_inner( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + lhs_expr: ExprRet, + ) -> Result<(), ExprErr> { match lhs_expr { ExprRet::CtxKilled(kind) => { ctx.kill(self, loc, kind).into_expr_err(loc)?; @@ -47,7 +58,16 @@ pub trait Cmp: AnalyzerLike + Sized { ExprRet::Single(lhs) | ExprRet::SingleLiteral(lhs) => { let lhs_cvar = ContextVarNode::from(lhs); tracing::trace!("not: {}", lhs_cvar.display_name(self).into_expr_err(loc)?); - let range = self.not_eval(ctx, loc, lhs_cvar)?; + + let mut elem = Elem::Expr(RangeExpr::new( + Elem::from(lhs_cvar), + RangeOp::Not, + Elem::Null, + )); + elem.arenaize(self, arena); + let mut range = SolcRange::new(elem.clone(), elem, vec![]); + + range.cache_eval(self, arena).into_expr_err(loc)?; let out_var = ContextVar { loc: Some(loc), name: format!( @@ -59,6 +79,7 @@ pub trait Cmp: AnalyzerLike + Sized { storage: None, is_tmp: true, tmp_of: Some(TmpConstruction::new(lhs_cvar, RangeOp::Not, None)), + dep_on: Some(lhs_cvar.dependent_on(self, true).into_expr_err(loc)?), is_symbolic: lhs_cvar.is_symbolic(self).into_expr_err(loc)?, is_return: false, ty: VarType::BuiltIn( @@ -88,17 +109,21 @@ pub trait Cmp: AnalyzerLike + Sized { #[tracing::instrument(level = "trace", skip_all)] fn cmp( &mut self, + arena: &mut RangeArena>, loc: Loc, lhs_expr: &Expression, op: RangeOp, rhs_expr: &Expression, ctx: ContextNode, ) -> Result<(), ExprErr> { - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - analyzer.parse_ctx_expr(rhs_expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + analyzer.parse_ctx_expr(arena, rhs_expr, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Cmp operation had no right hand side".to_string())) + return Err(ExprErr::NoRhs( + loc, + "Cmp operation had no right hand side".to_string(), + )); }; let rhs_paths = rhs_paths.flatten(); @@ -107,17 +132,21 @@ pub trait Cmp: AnalyzerLike + Sized { return Ok(()); } - analyzer.parse_ctx_expr(lhs_expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Cmp operation had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, lhs_expr, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Cmp operation had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } - analyzer.cmp_inner(ctx, loc, &lhs_paths.flatten(), op, &rhs_paths) + analyzer.cmp_inner(arena, ctx, loc, &lhs_paths.flatten(), op, &rhs_paths) }) }) }) @@ -126,6 +155,7 @@ pub trait Cmp: AnalyzerLike + Sized { #[tracing::instrument(level = "trace", skip_all)] fn cmp_inner( &mut self, + arena: &mut RangeArena>, ctx: ContextNode, loc: Loc, lhs_paths: &ExprRet, @@ -138,14 +168,15 @@ pub trait Cmp: AnalyzerLike + Sized { ContextVarNode::from(*lhs) .literal_cast_from(&ContextVarNode::from(*rhs), self) .into_expr_err(loc)?; - self.cmp_inner(ctx, loc, &ExprRet::Single(*rhs), op, rhs_paths) + self.cmp_inner(arena, ctx, loc, &ExprRet::Single(*rhs), op, rhs_paths) } (ExprRet::SingleLiteral(lhs), ExprRet::SingleLiteral(rhs)) => { let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - lhs_cvar.try_increase_size(self).into_expr_err(loc)?; - rhs_cvar.try_increase_size(self).into_expr_err(loc)?; + lhs_cvar.try_increase_size(self, arena).into_expr_err(loc)?; + rhs_cvar.try_increase_size(self, arena).into_expr_err(loc)?; self.cmp_inner( + arena, ctx, loc, &ExprRet::Single(lhs_cvar.into()), @@ -157,7 +188,7 @@ pub trait Cmp: AnalyzerLike + Sized { ContextVarNode::from(*rhs) .literal_cast_from(&ContextVarNode::from(*lhs), self) .into_expr_err(loc)?; - self.cmp_inner(ctx, loc, lhs_paths, op, &ExprRet::Single(*rhs)) + self.cmp_inner(arena, ctx, loc, lhs_paths, op, &ExprRet::Single(*rhs)) } (ExprRet::Single(lhs), ExprRet::Single(rhs)) => { let lhs_cvar = ContextVarNode::from(*lhs); @@ -169,34 +200,20 @@ pub trait Cmp: AnalyzerLike + Sized { rhs_cvar.display_name(self).unwrap() ); let range = { - let elem = Elem::Expr(RangeExpr { - minimized: None, - maximized: None, - lhs: Box::new(Elem::from(lhs_cvar)), + let elem = Elem::Expr(RangeExpr::new( + Elem::from(lhs_cvar), op, - rhs: Box::new(Elem::from(rhs_cvar)), - }); + Elem::from(rhs_cvar), + )); let exclusions = lhs_cvar .ref_range(self) .into_expr_err(loc)? .expect("No lhs range") - .range_exclusions(); + .exclusions + .clone(); SolcRange::new(elem.clone(), elem, exclusions) }; - // println!("{:?}", range.evaled_range_max(self)); - // println!("{:?}", range.evaled_range_min(self)); - - // println!( - // "cmp: {} {} {}, [{}, {}], [{}, {}] ", - // lhs_cvar.name(self).into_expr_err(loc)?, - // op.to_string(), - // rhs_cvar.name(self).into_expr_err(loc)?, - // lhs_cvar.evaled_range_min(self).into_expr_err(loc)?.unwrap().to_range_string(false, self).s, - // lhs_cvar.evaled_range_max(self).into_expr_err(loc)?.unwrap().to_range_string(true, self).s, - // rhs_cvar.evaled_range_min(self).into_expr_err(loc)?.unwrap().to_range_string(false, self).s, - // rhs_cvar.evaled_range_max(self).into_expr_err(loc)?.unwrap().to_range_string(true, self).s - // ); let out_var = ContextVar { loc: Some(loc), @@ -223,6 +240,11 @@ pub trait Cmp: AnalyzerLike + Sized { .into_expr_err(loc)?, is_return: false, tmp_of: Some(TmpConstruction::new(lhs_cvar, op, Some(rhs_cvar))), + dep_on: { + let mut deps = lhs_cvar.dependent_on(self, true).into_expr_err(loc)?; + deps.extend(rhs_cvar.dependent_on(self, true).into_expr_err(loc)?); + Some(deps) + }, ty: VarType::BuiltIn( BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), Some(range), @@ -238,13 +260,13 @@ pub trait Cmp: AnalyzerLike + Sized { (l @ ExprRet::Single(_lhs), ExprRet::Multi(rhs_sides)) => { rhs_sides .iter() - .try_for_each(|expr_ret| self.cmp_inner(ctx, loc, l, op, expr_ret))?; + .try_for_each(|expr_ret| self.cmp_inner(arena, ctx, loc, l, op, expr_ret))?; Ok(()) } (ExprRet::Multi(lhs_sides), r @ ExprRet::Single(_)) => { lhs_sides .iter() - .try_for_each(|expr_ret| self.cmp_inner(ctx, loc, expr_ret, op, r))?; + .try_for_each(|expr_ret| self.cmp_inner(arena, ctx, loc, expr_ret, op, r))?; Ok(()) } (ExprRet::Multi(lhs_sides), ExprRet::Multi(rhs_sides)) => { @@ -252,13 +274,13 @@ pub trait Cmp: AnalyzerLike + Sized { if lhs_sides.len() == rhs_sides.len() { lhs_sides.iter().zip(rhs_sides.iter()).try_for_each( |(lhs_expr_ret, rhs_expr_ret)| { - self.cmp_inner(ctx, loc, lhs_expr_ret, op, rhs_expr_ret) + self.cmp_inner(arena, ctx, loc, lhs_expr_ret, op, rhs_expr_ret) }, )?; Ok(()) } else { rhs_sides.iter().try_for_each(|rhs_expr_ret| { - self.cmp_inner(ctx, loc, lhs_paths, op, rhs_expr_ret) + self.cmp_inner(arena, ctx, loc, lhs_paths, op, rhs_expr_ret) })?; Ok(()) } @@ -270,47 +292,46 @@ pub trait Cmp: AnalyzerLike + Sized { } } - fn not_eval( - &self, - _ctx: ContextNode, - loc: Loc, - lhs_cvar: ContextVarNode, - ) -> Result { - if let Some(lhs_range) = lhs_cvar.range(self).into_expr_err(loc)? { - let lhs_min = lhs_range.evaled_range_min(self).into_expr_err(loc)?; + // fn not_eval( + // &mut self, + // _ctx: ContextNode, + // loc: Loc, + // lhs_cvar: ContextVarNode, + // ) -> Result { + // if let Some(lhs_range) = lhs_cvar.ref_range(self).into_expr_err(loc)? { + // let lhs_min = lhs_range.evaled_range_min(self, arena).into_expr_err(loc)?; - // invert - if lhs_min.range_eq(&lhs_range.evaled_range_max(self).into_expr_err(loc)?) { - let val = Elem::Expr(RangeExpr { - minimized: None, - maximized: None, - lhs: Box::new(lhs_range.range_min().into_owned()), - op: RangeOp::Not, - rhs: Box::new(Elem::Null), - }); + // // invert + // if lhs_min.range_eq(&lhs_range.minimize(self, arena).into_expr_err(loc)?, self) { + // let val = Elem::Expr(RangeExpr::new( + // lhs_range.range_min().into_owned(), + // RangeOp::Not, + // Elem::Null, + // )); - return Ok(SolcRange::new(val.clone(), val, lhs_range.exclusions)); - } - } + // return Ok(SolcRange::new(val.clone(), val, lhs_range.exclusions.clone())); + // } + // } - let min = RangeConcrete { - val: Concrete::Bool(false), - loc, - }; + // let min = Elem::Concrete(RangeConcrete { + // val: Concrete::Bool(false), + // loc, + // }).arenaize(self); - let max = RangeConcrete { - val: Concrete::Bool(true), - loc, - }; - Ok(SolcRange::new( - Elem::Concrete(min), - Elem::Concrete(max), - vec![], - )) - } + // let max = Elem::Concrete(RangeConcrete { + // val: Concrete::Bool(true), + // loc, + // }).arenaize(self); + // Ok(SolcRange::new( + // min, + // max, + // vec![], + // )) + // } fn range_eval( &self, + arena: &mut RangeArena>, _ctx: ContextNode, lhs_cvar: ContextVarNode, rhs_cvar: ContextVarNode, @@ -323,17 +344,17 @@ pub trait Cmp: AnalyzerLike + Sized { // if lhs_max < rhs_min, we know this cmp will evaluate to // true - let lhs_max = lhs_range.evaled_range_max(self)?; - let rhs_min = rhs_range.evaled_range_min(self)?; - if let Some(Ordering::Less) = lhs_max.range_ord(&rhs_min) { + let lhs_max = lhs_range.evaled_range_max(self, arena)?; + let rhs_min = rhs_range.evaled_range_min(self, arena)?; + if let Some(Ordering::Less) = lhs_max.range_ord(&rhs_min, arena) { return Ok(true.into()); } // Similarly if lhs_min >= rhs_max, we know this cmp will evaluate to // false - let lhs_min = lhs_range.evaled_range_min(self)?; - let rhs_max = rhs_range.evaled_range_max(self)?; - match lhs_min.range_ord(&rhs_max) { + let lhs_min = lhs_range.evaled_range_min(self, arena)?; + let rhs_max = rhs_range.evaled_range_max(self, arena)?; + match lhs_min.range_ord(&rhs_max, arena) { Some(Ordering::Greater) => { return Ok(false.into()); } @@ -346,17 +367,17 @@ pub trait Cmp: AnalyzerLike + Sized { RangeOp::Gt => { // if lhs_min > rhs_max, we know this cmp will evaluate to // true - let lhs_min = lhs_range.evaled_range_min(self)?; - let rhs_max = rhs_range.evaled_range_max(self)?; - if let Some(Ordering::Greater) = lhs_min.range_ord(&rhs_max) { + let lhs_min = lhs_range.evaled_range_min(self, arena)?; + let rhs_max = rhs_range.evaled_range_max(self, arena)?; + if let Some(Ordering::Greater) = lhs_min.range_ord(&rhs_max, arena) { return Ok(true.into()); } // if lhs_max <= rhs_min, we know this cmp will evaluate to // false - let lhs_max = lhs_range.evaled_range_max(self)?; - let rhs_min = rhs_range.evaled_range_min(self)?; - match lhs_max.range_ord(&rhs_min) { + let lhs_max = lhs_range.evaled_range_max(self, arena)?; + let rhs_min = rhs_range.evaled_range_min(self, arena)?; + match lhs_max.range_ord(&rhs_min, arena) { Some(Ordering::Less) => { return Ok(false.into()); } @@ -369,9 +390,9 @@ pub trait Cmp: AnalyzerLike + Sized { RangeOp::Lte => { // if lhs_max <= rhs_min, we know this cmp will evaluate to // true - let lhs_max = lhs_range.evaled_range_max(self)?; - let rhs_min = rhs_range.evaled_range_min(self)?; - match lhs_max.range_ord(&rhs_min) { + let lhs_max = lhs_range.evaled_range_max(self, arena)?; + let rhs_min = rhs_range.evaled_range_min(self, arena)?; + match lhs_max.range_ord(&rhs_min, arena) { Some(Ordering::Less) => { return Ok(true.into()); } @@ -383,18 +404,18 @@ pub trait Cmp: AnalyzerLike + Sized { // Similarly if lhs_min > rhs_max, we know this cmp will evaluate to // false - let lhs_min = lhs_range.evaled_range_min(self)?; - let rhs_max = rhs_range.evaled_range_max(self)?; - if let Some(Ordering::Greater) = lhs_min.range_ord(&rhs_max) { + let lhs_min = lhs_range.evaled_range_min(self, arena)?; + let rhs_max = rhs_range.evaled_range_max(self, arena)?; + if let Some(Ordering::Greater) = lhs_min.range_ord(&rhs_max, arena) { return Ok(false.into()); } } RangeOp::Gte => { // if lhs_min >= rhs_max, we know this cmp will evaluate to // true - let lhs_min = lhs_range.evaled_range_min(self)?; - let rhs_max = rhs_range.evaled_range_max(self)?; - match lhs_min.range_ord(&rhs_max) { + let lhs_min = lhs_range.evaled_range_min(self, arena)?; + let rhs_max = rhs_range.evaled_range_max(self, arena)?; + match lhs_min.range_ord(&rhs_max, arena) { Some(Ordering::Greater) => { return Ok(true.into()); } @@ -406,30 +427,30 @@ pub trait Cmp: AnalyzerLike + Sized { // if lhs_max < rhs_min, we know this cmp will evaluate to // false - let lhs_max = lhs_range.evaled_range_max(self)?; - let rhs_min = rhs_range.evaled_range_min(self)?; - if let Some(Ordering::Less) = lhs_max.range_ord(&rhs_min) { + let lhs_max = lhs_range.evaled_range_max(self, arena)?; + let rhs_min = rhs_range.evaled_range_min(self, arena)?; + if let Some(Ordering::Less) = lhs_max.range_ord(&rhs_min, arena) { return Ok(false.into()); } } RangeOp::Eq => { // if all elems are equal we know its true // we dont know anything else - let lhs_min = lhs_range.evaled_range_min(self)?; - let lhs_max = lhs_range.evaled_range_max(self)?; - let rhs_min = rhs_range.evaled_range_min(self)?; - let rhs_max = rhs_range.evaled_range_max(self)?; + let lhs_min = lhs_range.evaled_range_min(self, arena)?; + let lhs_max = lhs_range.evaled_range_max(self, arena)?; + let rhs_min = rhs_range.evaled_range_min(self, arena)?; + let rhs_max = rhs_range.evaled_range_max(self, arena)?; if let ( Some(Ordering::Equal), Some(Ordering::Equal), Some(Ordering::Equal), ) = ( // check lhs_min == lhs_max, ensures lhs is const - lhs_min.range_ord(&lhs_max), + lhs_min.range_ord(&lhs_max, arena), // check lhs_min == rhs_min, checks if lhs == rhs - lhs_min.range_ord(&rhs_min), + lhs_min.range_ord(&rhs_min, arena), // check rhs_min == rhs_max, ensures rhs is const - rhs_min.range_ord(&rhs_max), + rhs_min.range_ord(&rhs_max, arena), ) { return Ok(true.into()); } @@ -437,21 +458,21 @@ pub trait Cmp: AnalyzerLike + Sized { RangeOp::Neq => { // if all elems are equal we know its true // we dont know anything else - let lhs_min = lhs_range.evaled_range_min(self)?; - let lhs_max = lhs_range.evaled_range_max(self)?; - let rhs_min = rhs_range.evaled_range_min(self)?; - let rhs_max = rhs_range.evaled_range_max(self)?; + let lhs_min = lhs_range.evaled_range_min(self, arena)?; + let lhs_max = lhs_range.evaled_range_max(self, arena)?; + let rhs_min = rhs_range.evaled_range_min(self, arena)?; + let rhs_max = rhs_range.evaled_range_max(self, arena)?; if let ( Some(Ordering::Equal), Some(Ordering::Equal), Some(Ordering::Equal), ) = ( // check lhs_min == lhs_max, ensures lhs is const - lhs_min.range_ord(&lhs_max), + lhs_min.range_ord(&lhs_max, arena), // check lhs_min == rhs_min, checks if lhs == rhs - lhs_min.range_ord(&rhs_min), + lhs_min.range_ord(&rhs_min, arena), // check rhs_min == rhs_max, ensures rhs is const - rhs_min.range_ord(&rhs_max), + rhs_min.range_ord(&rhs_max, arena), ) { return Ok(false.into()); } diff --git a/src/context/exprs/cond_op.rs b/crates/solc-expressions/src/cond_op.rs similarity index 58% rename from src/context/exprs/cond_op.rs rename to crates/solc-expressions/src/cond_op.rs index 3938a0d3..c2d0f178 100644 --- a/src/context/exprs/cond_op.rs +++ b/crates/solc-expressions/src/cond_op.rs @@ -1,23 +1,33 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::ExprErr; -use crate::{exprs::Require, AnalyzerLike, ContextBuilder}; -use shared::{context::*, Edge, Node, NodeIdx}; +use crate::{ + require::Require, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, StatementParser, +}; + +use graph::{ + elem::Elem, + nodes::{Concrete, Context, ContextNode}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::{NodeIdx, RangeArena}; use solang_parser::pt::CodeLocation; use solang_parser::pt::{Expression, Loc, Statement}; -impl CondOp for T where T: AnalyzerLike + Require + Sized {} -pub trait CondOp: AnalyzerLike + Require + Sized { +impl CondOp for T where T: AnalyzerBackend + Require + Sized +{} +/// Handles conditional operations, like `if .. else ..` and ternary operations +pub trait CondOp: AnalyzerBackend + Require + Sized { #[tracing::instrument(level = "trace", skip_all)] + /// Handles a conditional operation like `if .. else ..` fn cond_op_stmt( &mut self, + arena: &mut RangeArena>, loc: Loc, if_expr: &Expression, true_stmt: &Statement, false_stmt: &Option>, ctx: ContextNode, ) -> Result<(), ExprErr> { - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { let tctx = Context::new_subctx(ctx, None, loc, Some("true"), None, false, analyzer, None) .into_expr_err(loc)?; @@ -28,6 +38,12 @@ pub trait CondOp: AnalyzerLike + Require + let false_subctx = ContextNode::from(analyzer.add_node(Node::Context(fctx))); ctx.set_child_fork(true_subctx, false_subctx, analyzer) .into_expr_err(loc)?; + true_subctx + .set_continuation_ctx(analyzer, ctx, "fork_true") + .into_expr_err(loc)?; + false_subctx + .set_continuation_ctx(analyzer, ctx, "fork_false") + .into_expr_err(loc)?; let ctx_fork = analyzer.add_node(Node::ContextFork); analyzer.add_edge(ctx_fork, ctx, Edge::Context(ContextEdge::ContextFork)); analyzer.add_edge( @@ -42,9 +58,13 @@ pub trait CondOp: AnalyzerLike + Require + ); // we want to check if the true branch is possible to take - analyzer.true_fork_if_cvar(if_expr.clone(), true_subctx)?; + analyzer.true_fork_if_cvar(arena, if_expr.clone(), true_subctx)?; let mut true_killed = false; - if true_subctx.is_killed(analyzer).into_expr_err(loc)? { + if true_subctx.is_killed(analyzer).into_expr_err(loc)? + || true_subctx + .unreachable(analyzer, arena) + .into_expr_err(loc)? + { // it was killed, therefore true branch is unreachable. // since it is unreachable, we want to not create // unnecessary subcontexts @@ -52,9 +72,13 @@ pub trait CondOp: AnalyzerLike + Require + } // we want to check if the false branch is possible to take - analyzer.false_fork_if_cvar(if_expr.clone(), false_subctx)?; + analyzer.false_fork_if_cvar(arena, if_expr.clone(), false_subctx)?; let mut false_killed = false; - if false_subctx.is_killed(analyzer).into_expr_err(loc)? { + if false_subctx.is_killed(analyzer).into_expr_err(loc)? + || false_subctx + .unreachable(analyzer, arena) + .into_expr_err(loc)? + { // it was killed, therefore true branch is unreachable. // since it is unreachable, we want to not create // unnecessary subcontexts @@ -64,27 +88,36 @@ pub trait CondOp: AnalyzerLike + Require + match (true_killed, false_killed) { (true, true) => { // both have been killed, delete the child and dont process the bodies + // println!("BOTH KILLED"); ctx.delete_child(analyzer).into_expr_err(loc)?; } (true, false) => { + // println!("TRUE KILLED"); // the true context has been killed, delete child, process the false fork expression // in the parent context and parse the false body ctx.delete_child(analyzer).into_expr_err(loc)?; - analyzer.false_fork_if_cvar(if_expr.clone(), ctx)?; + analyzer.false_fork_if_cvar(arena, if_expr.clone(), ctx)?; if let Some(false_stmt) = false_stmt { - return analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, _loc| { - analyzer.parse_ctx_statement(false_stmt, false, Some(ctx)); - Ok(()) - }); + return analyzer.apply_to_edges( + ctx, + loc, + arena, + &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_statement(arena, false_stmt, false, Some(ctx)); + Ok(()) + }, + ); } } (false, true) => { + // println!("FALSE KILLED"); // the false context has been killed, delete child, process the true fork expression // in the parent context and parse the true body ctx.delete_child(analyzer).into_expr_err(loc)?; - analyzer.true_fork_if_cvar(if_expr.clone(), ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, _loc| { + analyzer.true_fork_if_cvar(arena, if_expr.clone(), ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, _loc| { analyzer.parse_ctx_statement( + arena, true_stmt, ctx.unchecked(analyzer).into_expr_err(loc)?, Some(ctx), @@ -93,21 +126,29 @@ pub trait CondOp: AnalyzerLike + Require + })?; } (false, false) => { + // println!("NEITHER KILLED"); // both branches are reachable. process each body - analyzer.apply_to_edges(true_subctx, loc, &|analyzer, ctx, _loc| { - analyzer.parse_ctx_statement( - true_stmt, - ctx.unchecked(analyzer).into_expr_err(loc)?, - Some(ctx), - ); - Ok(()) - })?; + analyzer.apply_to_edges( + true_subctx, + loc, + arena, + &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_statement( + arena, + true_stmt, + ctx.unchecked(analyzer).into_expr_err(loc)?, + Some(ctx), + ); + Ok(()) + }, + )?; if let Some(false_stmt) = false_stmt { return analyzer.apply_to_edges( false_subctx, loc, - &|analyzer, ctx, _loc| { - analyzer.parse_ctx_statement(false_stmt, false, Some(ctx)); + arena, + &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_statement(arena, false_stmt, false, Some(ctx)); Ok(()) }, ); @@ -118,11 +159,13 @@ pub trait CondOp: AnalyzerLike + Require + }) } + /// Handles a conditional expression like `if .. else ..` /// When we have a conditional operator, we create a fork in the context. One side of the fork is /// if the expression is true, the other is if it is false. #[tracing::instrument(level = "trace", skip_all)] fn cond_op_expr( &mut self, + arena: &mut RangeArena>, loc: Loc, if_expr: &Expression, true_expr: &Expression, @@ -130,7 +173,7 @@ pub trait CondOp: AnalyzerLike + Require + ctx: ContextNode, ) -> Result<(), ExprErr> { tracing::trace!("conditional operator"); - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { let tctx = Context::new_subctx(ctx, None, loc, Some("true"), None, false, analyzer, None) .into_expr_err(loc)?; @@ -141,6 +184,12 @@ pub trait CondOp: AnalyzerLike + Require + let false_subctx = ContextNode::from(analyzer.add_node(Node::Context(fctx))); ctx.set_child_fork(true_subctx, false_subctx, analyzer) .into_expr_err(loc)?; + true_subctx + .set_continuation_ctx(analyzer, ctx, "fork_true") + .into_expr_err(loc)?; + false_subctx + .set_continuation_ctx(analyzer, ctx, "fork_false") + .into_expr_err(loc)?; let ctx_fork = analyzer.add_node(Node::ContextFork); analyzer.add_edge(ctx_fork, ctx, Edge::Context(ContextEdge::ContextFork)); analyzer.add_edge( @@ -154,14 +203,14 @@ pub trait CondOp: AnalyzerLike + Require + Edge::Context(ContextEdge::Subcontext), ); - analyzer.true_fork_if_cvar(if_expr.clone(), true_subctx)?; - analyzer.apply_to_edges(true_subctx, loc, &|analyzer, ctx, _loc| { - analyzer.parse_ctx_expr(true_expr, ctx) + analyzer.true_fork_if_cvar(arena, if_expr.clone(), true_subctx)?; + analyzer.apply_to_edges(true_subctx, loc, arena, &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_expr(arena, true_expr, ctx) })?; - analyzer.false_fork_if_cvar(if_expr.clone(), false_subctx)?; - analyzer.apply_to_edges(false_subctx, loc, &|analyzer, ctx, _loc| { - analyzer.parse_ctx_expr(false_expr, ctx) + analyzer.false_fork_if_cvar(arena, if_expr.clone(), false_subctx)?; + analyzer.apply_to_edges(false_subctx, loc, arena, &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_expr(arena, false_expr, ctx) }) }) } @@ -169,25 +218,32 @@ pub trait CondOp: AnalyzerLike + Require + /// Creates the true_fork cvar (updates bounds assuming its true) fn true_fork_if_cvar( &mut self, + arena: &mut RangeArena>, if_expr: Expression, true_fork_ctx: ContextNode, ) -> Result<(), ExprErr> { - self.apply_to_edges(true_fork_ctx, if_expr.loc(), &|analyzer, ctx, _loc| { - analyzer.handle_require(&[if_expr.clone()], ctx)?; - Ok(()) - }) + self.apply_to_edges( + true_fork_ctx, + if_expr.loc(), + arena, + &|analyzer, arena, ctx, _loc| { + analyzer.handle_require(arena, &[if_expr.clone()], ctx)?; + Ok(()) + }, + ) } /// Creates the false_fork cvar (inverts the expression and sets the bounds assuming its false) fn false_fork_if_cvar( &mut self, + arena: &mut RangeArena>, if_expr: Expression, false_fork_ctx: ContextNode, ) -> Result<(), ExprErr> { let loc = if_expr.loc(); let inv_if_expr = self.inverse_expr(if_expr); - self.apply_to_edges(false_fork_ctx, loc, &|analyzer, ctx, _loc| { - analyzer.handle_require(&[inv_if_expr.clone()], ctx)?; + self.apply_to_edges(false_fork_ctx, loc, arena, &|analyzer, arena, ctx, _loc| { + analyzer.handle_require(arena, &[inv_if_expr.clone()], ctx)?; Ok(()) }) } diff --git a/crates/solc-expressions/src/context_builder/expr.rs b/crates/solc-expressions/src/context_builder/expr.rs new file mode 100644 index 00000000..ded16451 --- /dev/null +++ b/crates/solc-expressions/src/context_builder/expr.rs @@ -0,0 +1,436 @@ +use crate::func_call::intrinsic_call::IntrinsicFuncCaller; +use crate::{ + context_builder::ContextBuilder, func_call::func_caller::FuncCaller, variable::Variable, + ExprErr, ExprTyParser, IntoExprErr, +}; + +use graph::{ + elem::*, + nodes::{Builtin, Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::RangeArena; + +use ethers_core::types::I256; +use solang_parser::{ + helpers::CodeLocation, + pt::{Expression, Loc}, +}; + +impl ExpressionParser for T where + T: AnalyzerBackend + Sized + ExprTyParser +{ +} + +/// Solidity expression parser +pub trait ExpressionParser: + AnalyzerBackend + Sized + ExprTyParser +{ + /// Perform setup for parsing an expression + fn parse_ctx_expr( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + if !ctx.killed_or_ret(self).unwrap() { + let edges = ctx.live_edges(self).into_expr_err(expr.loc())?; + if edges.is_empty() { + self.parse_ctx_expr_inner(arena, expr, ctx) + } else { + edges + .iter() + .try_for_each(|fork_ctx| self.parse_ctx_expr(arena, expr, *fork_ctx))?; + Ok(()) + } + } else { + Ok(()) + } + } + + #[tracing::instrument(level = "trace", skip_all, fields(ctx = %ctx.path(self).replace('.', "\n\t.")))] + /// Perform parsing of an expression + fn parse_ctx_expr_inner( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + use Expression::*; + // tracing::trace!( + // "ctx: {}, current stack: {:?}, \nexpr: {:?}\n", + // ctx.underlying(self).unwrap().path, + // ctx.underlying(self) + // .unwrap() + // .expr_ret_stack + // .iter() + // .map(|i| i.debug_str(self)) + // .collect::>(), + // expr + // ); + match expr { + // literals + NumberLiteral(loc, int, exp, unit) => { + self.number_literal(ctx, *loc, int, exp, false, unit) + } + AddressLiteral(loc, addr) => self.address_literal(ctx, *loc, addr), + StringLiteral(lits) => lits + .iter() + .try_for_each(|lit| self.string_literal(ctx, lit.loc, &lit.string)), + BoolLiteral(loc, b) => self.bool_literal(ctx, *loc, *b), + HexNumberLiteral(loc, b, _unit) => self.hex_num_literal(ctx, *loc, b, false), + HexLiteral(hexes) => self.hex_literals(ctx, hexes), + RationalNumberLiteral(loc, integer, fraction, exp, unit) => { + self.rational_number_literal(arena, ctx, *loc, integer, fraction, exp, unit) + } + Negate(_loc, expr) => match &**expr { + NumberLiteral(loc, int, exp, unit) => { + self.number_literal(ctx, *loc, int, exp, true, unit) + } + HexNumberLiteral(loc, b, _unit) => self.hex_num_literal(ctx, *loc, b, true), + e => { + self.parse_ctx_expr(arena, e, ctx)?; + self.apply_to_edges(ctx, e.loc(), arena, &|analyzer, arena, ctx, loc| { + tracing::trace!("Negate variable pop"); + let Some(rhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "No variable present to negate".to_string(), + )); + }; + if matches!(rhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + // Solidity is dumb and used to allow negation of unsigned integers. + // That means we have to cast this as a int256. + let var = rhs_paths.expect_single().into_expr_err(loc)?; + + let zero = + analyzer.add_node(Node::Concrete(Concrete::from(I256::from(0i32)))); + let zero = ContextVar::new_from_concrete( + Loc::Implicit, + ctx, + zero.into(), + analyzer, + ) + .into_expr_err(loc)?; + let zero = analyzer.add_node(Node::ContextVar(zero)); + let new_underlying = ContextVarNode::from(var) + .underlying(analyzer) + .into_expr_err(loc)? + .clone() + .as_cast_tmp(loc, ctx, Builtin::Int(256), analyzer) + .into_expr_err(loc)?; + let node = analyzer.add_node(Node::ContextVar(new_underlying)); + ctx.add_var(node.into(), analyzer).into_expr_err(loc)?; + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + + ContextVarNode::from(node) + .cast_from(&ContextVarNode::from(zero), analyzer, arena) + .into_expr_err(loc)?; + + let lhs_paths = ExprRet::Single(zero); + analyzer.op_match( + arena, + ctx, + loc, + &lhs_paths, + &ExprRet::Single( + ContextVarNode::from(node).latest_version(analyzer).into(), + ), + RangeOp::Sub(true), + false, + ) + }) + } // e => todo!("UnaryMinus unexpected rhs: {e:?}"), + }, + UnaryPlus(_loc, e) => todo!("UnaryPlus unexpected rhs: {e:?}"), + + // Binary ops + Power(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::Exp, false) + } + Add(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Add(ctx.unchecked(self).into_expr_err(*loc)?), + false, + ), + AssignAdd(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Add(ctx.unchecked(self).into_expr_err(*loc)?), + true, + ), + Subtract(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Sub(ctx.unchecked(self).into_expr_err(*loc)?), + false, + ), + AssignSubtract(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Sub(ctx.unchecked(self).into_expr_err(*loc)?), + true, + ), + Multiply(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Mul(ctx.unchecked(self).into_expr_err(*loc)?), + false, + ), + AssignMultiply(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Mul(ctx.unchecked(self).into_expr_err(*loc)?), + true, + ), + Divide(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Div(false), + false, + ), + AssignDivide(loc, lhs_expr, rhs_expr) => self.op_expr( + arena, + *loc, + lhs_expr, + rhs_expr, + ctx, + RangeOp::Div(false), + true, + ), + Modulo(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::Mod, false) + } + AssignModulo(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::Mod, true) + } + ShiftLeft(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::Shl, false) + } + AssignShiftLeft(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::Shl, true) + } + ShiftRight(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::Shr, false) + } + AssignShiftRight(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::Shr, true) + } + ConditionalOperator(loc, if_expr, true_expr, false_expr) => { + self.cond_op_expr(arena, *loc, if_expr, true_expr, false_expr, ctx) + } + + // Bitwise ops + BitwiseAnd(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::BitAnd, false) + } + AssignAnd(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::BitAnd, true) + } + BitwiseXor(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::BitXor, false) + } + AssignXor(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::BitXor, true) + } + BitwiseOr(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::BitOr, false) + } + AssignOr(loc, lhs_expr, rhs_expr) => { + self.op_expr(arena, *loc, lhs_expr, rhs_expr, ctx, RangeOp::BitOr, true) + } + BitwiseNot(loc, lhs_expr) => self.bit_not(arena, *loc, lhs_expr, ctx), + + // assign + Assign(loc, lhs_expr, rhs_expr) => { + self.assign_exprs(arena, *loc, lhs_expr, rhs_expr, ctx) + } + List(loc, params) => self.list(arena, ctx, *loc, params), + // array + ArraySubscript(_loc, ty_expr, None) => self.array_ty(arena, ty_expr, ctx), + ArraySubscript(loc, ty_expr, Some(index_expr)) => { + self.index_into_array(arena, *loc, ty_expr, index_expr, ctx) + } + ArraySlice(loc, _lhs_expr, _maybe_middle_expr, _maybe_rhs) => Err(ExprErr::Todo( + *loc, + "Array slicing not currently supported".to_string(), + )), + ArrayLiteral(loc, _) => Err(ExprErr::Todo( + *loc, + "Array literal not currently supported".to_string(), + )), + + // Comparator + Equal(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::Eq, rhs, ctx), + NotEqual(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::Neq, rhs, ctx), + Less(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::Lt, rhs, ctx), + More(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::Gt, rhs, ctx), + LessEqual(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::Lte, rhs, ctx), + MoreEqual(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::Gte, rhs, ctx), + + // Logical + Not(loc, expr) => self.not(arena, *loc, expr, ctx), + And(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::And, rhs, ctx), + Or(loc, lhs, rhs) => self.cmp(arena, *loc, lhs, RangeOp::Or, rhs, ctx), + + // Function calls + FunctionCallBlock(loc, _func_expr, _input_exprs) => { + // TODO: update msg node + Err(ExprErr::Todo( + *loc, + "Function call block is unsupported. We shouldn't have hit this code path" + .to_string(), + )) + } + NamedFunctionCall(loc, func_expr, input_args) => { + self.named_fn_call_expr(arena, ctx, loc, func_expr, input_args) + } + FunctionCall(loc, func_expr, input_exprs) => { + let updated_func_expr = match **func_expr { + FunctionCallBlock(_loc, ref inner_func_expr, ref _call_block) => { + // we dont currently handle the `{value: .. gas: ..}` msg updating + // println!("call block: {call_block:#?}"); + + // let mut tmp_msg = Msg { + + // } + // self.add_expr_err(ExprErr::FunctionCallBlockTodo(call_block.loc(), "Function call block is currently unsupported. Relevant changes on `msg` will not take effect".to_string())); + inner_func_expr.clone() + } + _ => func_expr.clone(), + }; + + self.fn_call_expr(arena, ctx, loc, &updated_func_expr, input_exprs) + } + // member + New(loc, expr) => { + match &**expr { + Expression::FunctionCall(_loc, func, inputs) => { + // parse the type + self.new_call(arena, loc, func, inputs, ctx) + } + _ => panic!("Bad new call"), + } + } + This(loc) => { + let var = ContextVar::new_from_contract( + *loc, + ctx.associated_contract(self).into_expr_err(*loc)?, + self, + ) + .into_expr_err(*loc)?; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(*loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(cvar), self) + .into_expr_err(*loc)?; + Ok(()) + } + MemberAccess(loc, member_expr, ident) => { + self.member_access(arena, *loc, member_expr, ident, ctx) + } + + Delete(loc, expr) => { + fn delete_match( + ctx: ContextNode, + loc: &Loc, + analyzer: &mut impl AnalyzerBackend, + ret: ExprRet, + ) { + match ret { + ExprRet::CtxKilled(kind) => { + let _ = ctx.kill(analyzer, *loc, kind); + } + ExprRet::Single(cvar) | ExprRet::SingleLiteral(cvar) => { + let mut new_var = + analyzer.advance_var_in_ctx(cvar.into(), *loc, ctx).unwrap(); + let res = new_var.sol_delete_range(analyzer).into_expr_err(*loc); + let _ = analyzer.add_if_err(res); + } + ExprRet::Multi(inner) => { + inner + .iter() + .for_each(|i| delete_match(ctx, loc, analyzer, i.clone())); + } + ExprRet::Null => {} + } + } + + self.parse_ctx_expr(arena, expr, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + tracing::trace!("Delete variable pop"); + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "Delete operation had no right hand side".to_string(), + )); + }; + + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + delete_match(ctx, &loc, analyzer, ret); + Ok(()) + }) + } + + // de/increment stuff + PreIncrement(loc, expr) => self.pre_increment(arena, expr, *loc, ctx), + PostIncrement(loc, expr) => self.post_increment(arena, expr, *loc, ctx), + PreDecrement(loc, expr) => self.pre_decrement(arena, expr, *loc, ctx), + PostDecrement(loc, expr) => self.post_decrement(arena, expr, *loc, ctx), + + // Misc. + Variable(ident) => self.variable(arena, ident, ctx, None), + Type(loc, ty) => { + if let Some(builtin) = Builtin::try_from_ty(ty.clone(), self, arena) { + if let Some(idx) = self.builtins().get(&builtin) { + ctx.push_expr(ExprRet::Single(*idx), self) + .into_expr_err(*loc)?; + Ok(()) + } else { + let idx = self.add_node(Node::Builtin(builtin.clone())); + self.builtins_mut().insert(builtin, idx); + ctx.push_expr(ExprRet::Single(idx), self) + .into_expr_err(*loc)?; + Ok(()) + } + } else { + ctx.push_expr(ExprRet::Null, self).into_expr_err(*loc)?; + Ok(()) + } + } + Parenthesis(_loc, expr) => self.parse_ctx_expr(arena, expr, ctx), + } + } +} diff --git a/crates/solc-expressions/src/context_builder/fn_calls.rs b/crates/solc-expressions/src/context_builder/fn_calls.rs new file mode 100644 index 00000000..34dcc8e7 --- /dev/null +++ b/crates/solc-expressions/src/context_builder/fn_calls.rs @@ -0,0 +1,322 @@ +use crate::{ExprErr, ExpressionParser, StatementParser}; +use solang_parser::helpers::CodeLocation; + +use graph::{ + nodes::{Context, ContextNode, FunctionNode}, + AnalyzerBackend, Node, +}; + +use solang_parser::pt::{Expression, Statement}; + +impl FnCallBuilder for T where + T: AnalyzerBackend + + Sized + + StatementParser + + ExpressionParser +{ +} + +/// Dispatcher for building up a context of a function +pub trait FnCallBuilder: + AnalyzerBackend + Sized + StatementParser + ExpressionParser +{ + fn analyze_fn_calls(&mut self, caller: FunctionNode) { + self.fn_calls_fns_mut().entry(caller).or_default(); + if let Some(body) = caller.underlying(self).unwrap().body.clone() { + self.analyze_fn_calls_stmt(caller, body); + } + } + + fn analyze_fn_calls_stmt(&mut self, caller: FunctionNode, stmt: Statement) { + use Statement::*; + match stmt { + Block { statements, .. } => { + statements.iter().for_each(|stmt| { + self.analyze_fn_calls_stmt(caller, stmt.clone()); + }); + } + Assembly { .. } => {} + Args(_, args) => { + args.iter().for_each(|arg| { + self.analyze_fn_calls_expr(caller, arg.expr.clone()); + }); + } + If(_, expr, stmt_true, maybe_stmt_false) => { + self.analyze_fn_calls_expr(caller, expr); + self.analyze_fn_calls_stmt(caller, *stmt_true); + if let Some(stmt_false) = maybe_stmt_false { + self.analyze_fn_calls_stmt(caller, *stmt_false); + } + } + While(_, expr, stmt) => { + self.analyze_fn_calls_expr(caller, expr); + self.analyze_fn_calls_stmt(caller, *stmt); + } + Expression(_, expr) => self.analyze_fn_calls_expr(caller, expr), + VariableDefinition(_, var_decl, maybe_expr) => { + self.analyze_fn_calls_expr(caller, var_decl.ty); + if let Some(expr) = maybe_expr { + self.analyze_fn_calls_expr(caller, expr); + } + } + For(_, maybe_stmt, maybe_expr, maybe_stmt_1, maybe_stmt_2) => { + if let Some(stmt) = maybe_stmt { + self.analyze_fn_calls_stmt(caller, *stmt); + } + + if let Some(expr) = maybe_expr { + self.analyze_fn_calls_expr(caller, *expr); + } + + if let Some(stmt1) = maybe_stmt_1 { + self.analyze_fn_calls_stmt(caller, *stmt1); + } + + if let Some(stmt2) = maybe_stmt_2 { + self.analyze_fn_calls_stmt(caller, *stmt2); + } + } + DoWhile(_, stmt, expr) => { + self.analyze_fn_calls_stmt(caller, *stmt); + self.analyze_fn_calls_expr(caller, expr); + } + Continue(_) => {} + Break(_) => {} + Return(_, maybe_expr) => { + if let Some(expr) = maybe_expr { + self.analyze_fn_calls_expr(caller, expr); + } + } + Revert(_, _, exprs) => { + exprs.iter().for_each(|expr| { + self.analyze_fn_calls_expr(caller, expr.clone()); + }); + } + RevertNamedArgs(_, _, args) => { + args.iter().for_each(|arg| { + self.analyze_fn_calls_expr(caller, arg.expr.clone()); + }); + } + Emit(_, expr) => { + self.analyze_fn_calls_expr(caller, expr); + } + Try(_, expr, maybe_tuple, catch_clauses) => { + self.analyze_fn_calls_expr(caller, expr); + // Option<(ParameterList, Box)> + if let Some((param_list, stmt)) = maybe_tuple { + param_list.iter().for_each(|(_, maybe_param)| { + if let Some(param) = maybe_param { + self.analyze_fn_calls_expr(caller, param.ty.clone()); + } + }); + self.analyze_fn_calls_stmt(caller, *stmt); + } + + catch_clauses + .iter() + .for_each(|catch_clause| match catch_clause { + solang_parser::pt::CatchClause::Simple(_, maybe_param, stmt) => { + if let Some(param) = maybe_param { + self.analyze_fn_calls_expr(caller, param.ty.clone()); + } + self.analyze_fn_calls_stmt(caller, stmt.clone()); + } + solang_parser::pt::CatchClause::Named(_, _, param, stmt) => { + self.analyze_fn_calls_expr(caller, param.ty.clone()); + self.analyze_fn_calls_stmt(caller, stmt.clone()); + } + }) + } + Error(_) => {} + } + } + + fn analyze_fn_calls_expr(&mut self, caller: FunctionNode, expr: Expression) { + use Expression::*; + match expr { + BoolLiteral(_, _) + | NumberLiteral(_, _, _, _) + | RationalNumberLiteral(_, _, _, _, _) + | HexNumberLiteral(_, _, _) + | StringLiteral(_) + | HexLiteral(_) + | AddressLiteral(_, _) + | Variable(_) + | This(_) => {} + + PostIncrement(_, expr) + | PostDecrement(_, expr) + | New(_, expr) + | Parenthesis(_, expr) + | MemberAccess(_, expr, _) + | Not(_, expr) + | Delete(_, expr) + | PreIncrement(_, expr) + | PreDecrement(_, expr) + | BitwiseNot(_, expr) + | Negate(_, expr) + | UnaryPlus(_, expr) => { + self.analyze_fn_calls_expr(caller, *expr); + } + + Power(_, expr, expr1) + | Multiply(_, expr, expr1) + | Divide(_, expr, expr1) + | Modulo(_, expr, expr1) + | Add(_, expr, expr1) + | Subtract(_, expr, expr1) + | ShiftLeft(_, expr, expr1) + | ShiftRight(_, expr, expr1) + | BitwiseAnd(_, expr, expr1) + | BitwiseXor(_, expr, expr1) + | BitwiseOr(_, expr, expr1) + | Less(_, expr, expr1) + | More(_, expr, expr1) + | LessEqual(_, expr, expr1) + | MoreEqual(_, expr, expr1) + | Equal(_, expr, expr1) + | NotEqual(_, expr, expr1) + | And(_, expr, expr1) + | Or(_, expr, expr1) + | Assign(_, expr, expr1) + | AssignOr(_, expr, expr1) + | AssignAnd(_, expr, expr1) + | AssignXor(_, expr, expr1) + | AssignShiftLeft(_, expr, expr1) + | AssignShiftRight(_, expr, expr1) + | AssignAdd(_, expr, expr1) + | AssignSubtract(_, expr, expr1) + | AssignMultiply(_, expr, expr1) + | AssignDivide(_, expr, expr1) + | AssignModulo(_, expr, expr1) => { + self.analyze_fn_calls_expr(caller, *expr); + self.analyze_fn_calls_expr(caller, *expr1); + } + + ArraySubscript(_, expr, maybe_expr) => { + self.analyze_fn_calls_expr(caller, *expr); + if let Some(expr1) = maybe_expr { + self.analyze_fn_calls_expr(caller, *expr1); + } + } + ArraySlice(_, expr, maybe_expr, maybe_expr1) => { + self.analyze_fn_calls_expr(caller, *expr); + if let Some(expr1) = maybe_expr { + self.analyze_fn_calls_expr(caller, *expr1); + } + + if let Some(expr2) = maybe_expr1 { + self.analyze_fn_calls_expr(caller, *expr2); + } + } + ConditionalOperator(_, expr, expr1, expr2) => { + self.analyze_fn_calls_expr(caller, *expr); + self.analyze_fn_calls_expr(caller, *expr1); + self.analyze_fn_calls_expr(caller, *expr2); + } + List(_, param_list) => { + param_list.iter().for_each(|(_, maybe_param)| { + if let Some(param) = maybe_param { + self.analyze_fn_calls_expr(caller, param.ty.clone()); + } + }); + } + ArrayLiteral(_, exprs) => { + exprs.into_iter().for_each(|expr| { + self.analyze_fn_calls_expr(caller, expr); + }); + } + + Type(_, ty) => match ty { + solang_parser::pt::Type::Mapping { key, value, .. } => { + self.analyze_fn_calls_expr(caller, *key); + self.analyze_fn_calls_expr(caller, *value); + } + solang_parser::pt::Type::Function { + params, returns, .. + } => { + params.iter().for_each(|(_, maybe_param)| { + if let Some(param) = maybe_param { + self.analyze_fn_calls_expr(caller, param.ty.clone()); + } + }); + if let Some((param_list, _)) = returns { + param_list.iter().for_each(|(_, maybe_param)| { + if let Some(param) = maybe_param { + self.analyze_fn_calls_expr(caller, param.ty.clone()); + } + }); + } + } + _ => {} + }, + + FunctionCallBlock(_, func_expr, _input_exprs) => { + if let Variable(ref ident) = *func_expr { + let loc = func_expr.loc(); + let ctx = Context::new( + caller, + format!("<{}_parser_fn>", caller.name(self).unwrap()), + loc, + ); + let ctx = ContextNode::from(self.add_node(Node::Context(ctx))); + let visible_funcs = ctx.visible_funcs(self).unwrap(); + let possible_funcs: Vec<_> = visible_funcs + .into_iter() + .filter(|f| f.name(self).unwrap().starts_with(&ident.name)) + .collect(); + if possible_funcs.len() == 1 { + let func = possible_funcs[0]; + self.add_fn_call(caller, func); + } + } + } + NamedFunctionCall(_, func_expr, input_args) => { + if let Variable(ref ident) = *func_expr { + let loc = func_expr.loc(); + let ctx = Context::new( + caller, + format!("<{}_parser_fn>", caller.name(self).unwrap()), + loc, + ); + let ctx = ContextNode::from(self.add_node(Node::Context(ctx))); + let visible_funcs = ctx.visible_funcs(self).unwrap(); + let mut possible_funcs: Vec<_> = visible_funcs + .into_iter() + .filter(|f| f.name(self).unwrap().starts_with(&ident.name)) + .collect(); + possible_funcs.retain(|func| func.params(self).len() == input_args.len()); + if possible_funcs.len() == 1 { + let func = possible_funcs[0]; + self.add_fn_call(caller, func); + } + } + } + FunctionCall(_, func_expr, input_exprs) => { + if let Variable(ref ident) = *func_expr { + let loc = func_expr.loc(); + let ctx = Context::new( + caller, + format!("<{}_parser_fn>", caller.name(self).unwrap()), + loc, + ); + let ctx = ContextNode::from(self.add_node(Node::Context(ctx))); + let visible_funcs = ctx.visible_funcs(self).unwrap(); + let mut possible_funcs: Vec<_> = visible_funcs + .into_iter() + .filter(|f| f.name(self).unwrap().starts_with(&ident.name)) + .collect(); + possible_funcs.retain(|func| func.params(self).len() == input_exprs.len()); + if possible_funcs.len() == 1 { + let func = possible_funcs[0]; + self.add_fn_call(caller, func); + } + } + + input_exprs.iter().for_each(|expr| { + self.analyze_fn_calls_expr(caller, expr.clone()); + }) + } + } + } +} diff --git a/crates/solc-expressions/src/context_builder/mod.rs b/crates/solc-expressions/src/context_builder/mod.rs new file mode 100644 index 00000000..d79b71d2 --- /dev/null +++ b/crates/solc-expressions/src/context_builder/mod.rs @@ -0,0 +1,206 @@ +//! Trait and blanket implementation for the core parsing loop +use crate::{ExprErr, IntoExprErr}; + +use graph::{ + elem::Elem, + nodes::{Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, KilledKind}, + AnalyzerBackend, ContextEdge, Edge, GraphError, Node, +}; +use shared::RangeArena; + +use solang_parser::pt::{Expression, Loc}; + +impl ContextBuilder for T where + T: AnalyzerBackend + Sized + StatementParser +{ +} + +mod expr; +mod fn_calls; +mod stmt; + +pub use expr::*; +pub use fn_calls::*; +pub use stmt::*; + +/// Dispatcher for building up a context of a function +pub trait ContextBuilder: + AnalyzerBackend + Sized + StatementParser +{ + /// TODO: rename this. Sometimes we dont want to kill a context if we hit an error + fn widen_if_limit_hit(&mut self, ctx: ContextNode, maybe_err: Result<(), ExprErr>) -> bool { + match maybe_err { + Err(ExprErr::FunctionCallBlockTodo(_, _s)) => { + // dont kill for this one + false + } + Err(e @ ExprErr::GraphError(_, GraphError::MaxStackWidthReached(..), ..)) => { + // TODO: we should ideally peak at each if statement body and only widen variables referenced in there + // but for now we just delete the forks, and reset all local variables + self.add_expr_err(e); + true + } + Err(e) => { + let res = ctx + .kill(self, e.loc(), KilledKind::ParseError) + .into_expr_err(e.loc()); + let _ = self.add_if_err(res); + self.add_expr_err(e); + false + } + _ => false, + } + } + + /// Match on the [`ExprRet`]s of a return statement and performs the return + fn return_match( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: &Loc, + paths: &ExprRet, + idx: usize, + ) { + match paths { + ExprRet::CtxKilled(kind) => { + let _ = ctx.kill(self, *loc, *kind); + } + ExprRet::Single(expr) | ExprRet::SingleLiteral(expr) => { + // construct a variable from the return type + let target_var = ctx + .associated_fn(self) + .map(|func| { + let rets = func.returns(arena, self); + let Some(ret) = rets.get(idx) else { + return Ok(None) + }; + + ret.underlying(self) + .cloned() + .map(|underlying| { + ContextVar::new_from_func_ret(ctx, self, underlying).map(|var| { + var.map(|var| { + ContextVarNode::from(self.add_node(Node::ContextVar(var))) + }).ok_or(GraphError::NodeConfusion("Could not construct a context variable from function return".to_string())) + .map(Some) + }).and_then(|i| i) + }) + .and_then(|i| i) + }) + .and_then(|i| i) + .into_expr_err(*loc); + + let latest = ContextVarNode::from(*expr).latest_version(self); + + match target_var { + Ok(Some(target_var)) => { + // perform a cast + let next = self + .advance_var_in_ctx_forcible(latest, *loc, ctx, true) + .unwrap(); + let res = next.cast_from(&target_var, self, arena).into_expr_err(*loc); + self.add_if_err(res); + } + Ok(None) => {} + Err(e) => self.add_expr_err(e), + } + + // let ret = self.advance_var_in_ctx(latest, *loc, *ctx); + let path = ctx.path(self); + let res = latest.underlying_mut(self).into_expr_err(*loc); + match res { + Ok(var) => { + tracing::trace!("Returning: {}, {}", path, var.display_name); + var.is_return = true; + + self.add_edge(latest, ctx, Edge::Context(ContextEdge::Return)); + + let res = ctx.add_return_node(*loc, latest, self).into_expr_err(*loc); + // ctx.kill(self, *loc, KilledKind::Ended); + let _ = self.add_if_err(res); + } + Err(e) => self.add_expr_err(e), + } + } + ExprRet::Multi(rets) => { + rets.iter().enumerate().for_each(|(i, expr_ret)| { + self.return_match(arena, ctx, loc, expr_ret, i); + }); + } + ExprRet::Null => {} + } + } + + /// Apply an expression or statement to all *live* edges of a context. This is used everywhere + /// to ensure we only ever update *live* contexts. If a context has a subcontext, we *never* + /// want to update the original context. We only ever want to operate on the latest edges. + fn apply_to_edges( + &mut self, + ctx: ContextNode, + loc: Loc, + arena: &mut RangeArena>, + closure: &impl Fn( + &mut Self, + &mut RangeArena>, + ContextNode, + Loc, + ) -> Result<(), ExprErr>, + ) -> Result<(), ExprErr> { + let live_edges = ctx.live_edges(self).into_expr_err(loc)?; + tracing::trace!( + "Applying to live edges of: {}. edges: {:#?}", + ctx.path(self), + live_edges.iter().map(|i| i.path(self)).collect::>(), + ); + if !ctx.killed_or_ret(self).into_expr_err(loc)? { + if ctx.underlying(self).into_expr_err(loc)?.child.is_some() { + if live_edges.is_empty() { + Ok(()) + } else { + live_edges + .iter() + .try_for_each(|ctx| closure(self, arena, *ctx, loc)) + } + } else if live_edges.is_empty() { + closure(self, arena, ctx, loc) + } else { + live_edges + .iter() + .try_for_each(|ctx| closure(self, arena, *ctx, loc)) + } + } else { + Ok(()) + } + } + + /// The inverse of [`apply_to_edges`], used only for modifiers because modifiers have extremely weird + /// dynamics. + fn take_from_edge( + &mut self, + ctx: ContextNode, + loc: Loc, + arena: &mut RangeArena>, + closure: &impl Fn( + &mut Self, + &mut RangeArena>, + ContextNode, + Loc, + ) -> Result, + ) -> Result, ExprErr> { + let live_edges = ctx.live_edges(self).into_expr_err(loc)?; + tracing::trace!( + "Taking from live edges of: {}. edges: {:#?}", + ctx.path(self), + live_edges.iter().map(|i| i.path(self)).collect::>(), + ); + + if live_edges.is_empty() { + Ok(vec![closure(self, arena, ctx, loc)?]) + } else { + live_edges + .iter() + .map(|ctx| closure(self, arena, *ctx, loc)) + .collect::, ExprErr>>() + } + } +} diff --git a/crates/solc-expressions/src/context_builder/stmt.rs b/crates/solc-expressions/src/context_builder/stmt.rs new file mode 100644 index 00000000..07137a0f --- /dev/null +++ b/crates/solc-expressions/src/context_builder/stmt.rs @@ -0,0 +1,572 @@ +use crate::{ + context_builder::ContextBuilder, + func_call::{func_caller::FuncCaller, modifier::ModifierCaller}, + loops::Looper, + yul::YulBuilder, + ExprErr, ExpressionParser, IntoExprErr, +}; + +use graph::{ + elem::Elem, + nodes::{ + Concrete, Context, ContextNode, ContextVar, ContextVarNode, ExprRet, FunctionNode, + FunctionParamNode, FunctionReturnNode, KilledKind, + }, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::{NodeIdx, RangeArena}; + +use petgraph::{visit::EdgeRef, Direction}; +use solang_parser::{ + helpers::CodeLocation, + pt::{Expression, Statement, YulStatement}, +}; + +impl StatementParser for T where + T: AnalyzerBackend + Sized + ExpressionParser +{ +} + +/// Solidity statement parser +pub trait StatementParser: + AnalyzerBackend + Sized + ExpressionParser +{ + /// Performs setup for parsing a solidity statement + fn parse_ctx_statement( + &mut self, + arena: &mut RangeArena>, + stmt: &Statement, + unchecked: bool, + parent_ctx: Option + Copy>, + ) where + Self: Sized, + { + if let Some(parent) = parent_ctx { + match self.node(parent) { + Node::Context(_) => { + let ctx = ContextNode::from(parent.into()); + if !ctx.killed_or_ret(self).unwrap() { + if let Some(live_edges) = + self.add_if_err(ctx.live_edges(self).into_expr_err(stmt.loc())) + { + if live_edges.is_empty() { + self.parse_ctx_stmt_inner(arena, stmt, unchecked, parent_ctx) + } else { + live_edges.iter().for_each(|fork_ctx| { + self.parse_ctx_stmt_inner( + arena, + stmt, + unchecked, + Some(*fork_ctx), + ); + }); + } + } + } + } + _ => self.parse_ctx_stmt_inner(arena, stmt, unchecked, parent_ctx), + } + } else { + self.parse_ctx_stmt_inner(arena, stmt, unchecked, parent_ctx) + } + } + + #[tracing::instrument(level = "trace", skip_all)] + /// Performs parsing of a solidity statement + fn parse_ctx_stmt_inner( + &mut self, + arena: &mut RangeArena>, + stmt: &Statement, + unchecked: bool, + parent_ctx: Option + Copy>, + ) where + Self: Sized, + { + use Statement::*; + // tracing::trace!("stmt: {:#?}, node: {:#?}", stmt, if let Some(node) = parent_ctx { Some(self.node(node.into())) } else { None}); + + // at the end of a statement we shouldn't have anything in the stack? + if let Some(ctx) = parent_ctx { + if let Node::Context(_) = self.node(ctx) { + let c = ContextNode::from(ctx.into()); + let _ = c.pop_expr_latest(stmt.loc(), self); + if unchecked { + let _ = c.set_unchecked(self); + } else { + let _ = c.unset_unchecked(self); + } + + if c.killed_or_ret(self).unwrap() { + return; + } + } + } + + match stmt { + Block { + loc, + unchecked, + statements, + } => { + tracing::trace!("parsing block"); + let parent = parent_ctx.expect("Free floating contexts shouldn't happen"); + let mut entry_loc = None; + let mut mods_set = false; + let ctx_node = match self.node(parent) { + Node::Function(fn_node) => { + mods_set = fn_node.modifiers_set; + entry_loc = Some(fn_node.loc); + let ctx = Context::new( + FunctionNode::from(parent.into()), + self.add_if_err( + FunctionNode::from(parent.into()) + .name(self) + .into_expr_err(stmt.loc()), + ) + .unwrap(), + *loc, + ); + let ctx_node = self.add_node(Node::Context(ctx)); + self.add_edge(ctx_node, parent, Edge::Context(ContextEdge::Context)); + + ctx_node + } + Node::Context(_) => { + // let ctx = Context::new_subctx( + // ContextNode::from(parent.into()), + // *loc, + // false, + // self, + // ); + // let ctx_node = self.add_node(Node::Context(ctx)); + // self.add_edge(ctx_node, parent, Edge::Context(ContextEdge::Subcontext)); + // ctx_node + parent.into() + } + e => todo!( + "Expected a context to be created by a function or context but got: {:?}", + e + ), + }; + + // optionally add named input and named outputs into context + let (params, inputs): (Vec<_>, Vec<_>) = self + .graph() + .edges_directed(parent.into(), Direction::Incoming) + .filter(|edge| *edge.weight() == Edge::FunctionParam) + .map(|edge| FunctionParamNode::from(edge.source())) + .collect::>() + .into_iter() + .filter_map(|param_node| { + let res = param_node + .underlying(self) + .into_expr_err(stmt.loc()) + .cloned(); + let func_param = self.add_if_err(res)?; + if let Some(cvar) = ContextVar::maybe_new_from_func_param(self, func_param) + { + let cvar_node = self.add_node(Node::ContextVar(cvar)); + ContextNode::from(ctx_node) + .add_var(cvar_node.into(), self) + .unwrap(); + self.add_edge( + cvar_node, + ctx_node, + Edge::Context(ContextEdge::Variable), + ); + + self.add_edge( + cvar_node, + ctx_node, + Edge::Context(ContextEdge::CalldataVariable), + ); + + Some((param_node, ContextVarNode::from(cvar_node))) + } else { + None + } + }) + .unzip(); + + self.graph() + .edges_directed(parent.into(), Direction::Incoming) + .filter(|edge| *edge.weight() == Edge::FunctionReturn) + .map(|edge| FunctionReturnNode::from(edge.source())) + .collect::>() + .iter() + .for_each(|ret_node| { + let res = ret_node.underlying(self).into_expr_err(stmt.loc()).cloned(); + let func_ret = self.add_if_err(res).unwrap(); + if let Some(cvar) = ContextVar::maybe_new_from_func_ret(self, func_ret) { + let cvar_node = self.add_node(Node::ContextVar(cvar)); + ContextNode::from(ctx_node) + .add_var(cvar_node.into(), self) + .unwrap(); + self.add_edge( + cvar_node, + ctx_node, + Edge::Context(ContextEdge::Variable), + ); + } + }); + + if let Some(fn_loc) = entry_loc { + if !mods_set { + let parent = FunctionNode::from(parent.into()); + let _ = self + .set_modifiers(arena, parent, ctx_node.into()) + .map_err(|e| self.add_expr_err(e)); + } + + let res = self.func_call_inner( + arena, + true, + ctx_node.into(), + parent.into().into(), + fn_loc, + &inputs, + ¶ms, + None, + &None, + ); + if self.widen_if_limit_hit(ctx_node.into(), res) { + return; + } + let res = self.apply_to_edges( + ctx_node.into(), + *loc, + arena, + &|analyzer, arena, ctx, loc| { + if ctx.killed_or_ret(analyzer).into_expr_err(loc)? { + tracing::trace!("killing due to bad funciton call"); + let res = ContextNode::from(ctx_node) + .kill( + analyzer, + fn_loc, + ctx.underlying(analyzer).unwrap().killed.unwrap().1, + ) + .into_expr_err(fn_loc); + let _ = analyzer.add_if_err(res); + } + Ok(()) + }, + ); + + if self.widen_if_limit_hit(ctx_node.into(), res) { + return; + } + + return; + } + + let res = self.apply_to_edges( + ctx_node.into(), + *loc, + arena, + &|analyzer, arena, ctx, _loc| { + statements.iter().for_each(|stmt| { + analyzer.parse_ctx_statement(arena, stmt, *unchecked, Some(ctx)) + }); + Ok(()) + }, + ); + if self.widen_if_limit_hit(ctx_node.into(), res) {} + } + VariableDefinition(loc, var_decl, maybe_expr) => { + let ctx = ContextNode::from( + parent_ctx + .expect("No context for variable definition?") + .into(), + ); + tracing::trace!( + "parsing variable definition, {:?} {var_decl:?}", + ctx.path(self) + ); + + if let Some(rhs) = maybe_expr { + match self.parse_ctx_expr(arena, rhs, ctx) { + Ok(()) => { + let res = self.apply_to_edges( + ctx, + *loc, + arena, + &|analyzer, arena, ctx, loc| { + if !ctx.killed_or_ret(analyzer).into_expr_err(loc)? { + let Some(rhs_paths) = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + format!( + "Variable definition had no right hand side, {}", + ctx.path(analyzer) + ), + )); + }; + + if matches!(rhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(rhs_paths, analyzer) + .into_expr_err(loc)?; + return Ok(()); + } + + analyzer.parse_ctx_expr(arena, &var_decl.ty, ctx)?; + analyzer.apply_to_edges( + ctx, + loc, + arena, + &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Variable definition had no left hand side" + .to_string(), + )); + }; + + if matches!(lhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(lhs_paths, analyzer) + .into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_var_def( + arena, + ctx, + var_decl, + loc, + &lhs_paths, + Some(&rhs_paths), + )?; + Ok(()) + }, + ) + } else { + Ok(()) + } + }, + ); + let _ = self.widen_if_limit_hit(ctx, res); + } + ret => { + let _ = self.widen_if_limit_hit(ctx, ret); + } + } + } else { + let res = self.parse_ctx_expr(arena, &var_decl.ty, ctx); + if self.widen_if_limit_hit(ctx, res) { + return; + } + let res = + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Variable definition had no left hand side".to_string(), + )); + }; + if matches!(lhs_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_var_def(arena, ctx, var_decl, loc, &lhs_paths, None)?; + Ok(()) + }); + let _ = self.widen_if_limit_hit(ctx, res); + } + } + Args(_loc, _args) => { + tracing::trace!("parsing args, {_args:?}"); + } + If(loc, if_expr, true_expr, maybe_false_expr) => { + tracing::trace!("parsing if, {if_expr:?}"); + let ctx = ContextNode::from(parent_ctx.expect("Dangling if statement").into()); + let res = self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + analyzer.cond_op_stmt(arena, loc, if_expr, true_expr, maybe_false_expr, ctx) + }); + let _ = self.widen_if_limit_hit(ctx, res); + } + While(loc, cond, body) => { + tracing::trace!("parsing while, {cond:?}"); + if let Some(parent) = parent_ctx { + let res = self.apply_to_edges( + ContextNode::from(parent.into()), + *loc, + arena, + &|analyzer, arena, ctx, loc| { + analyzer.while_loop(arena, loc, ctx, cond, body) + }, + ); + let _ = self.widen_if_limit_hit(parent.into().into(), res); + } + } + Expression(loc, expr) => { + tracing::trace!("parsing expr, {expr:?}"); + if let Some(parent) = parent_ctx { + let ctx = parent.into().into(); + match self.parse_ctx_expr(arena, expr, ctx) { + Ok(()) => { + let res = self.apply_to_edges( + ctx, + *loc, + arena, + &|analyzer, arena, ctx, loc| { + if ctx.killed_or_ret(analyzer).into_expr_err(loc)? { + tracing::trace!("killing due to bad expr"); + ContextNode::from(parent.into()) + .kill( + analyzer, + loc, + ctx.underlying(analyzer).unwrap().killed.unwrap().1, + ) + .into_expr_err(loc)?; + } + Ok(()) + }, + ); + let _ = self.widen_if_limit_hit(ctx, res); + } + e => { + let _ = self.widen_if_limit_hit(ctx, e); + } + } + } + } + For(loc, maybe_for_start, maybe_for_middle, maybe_for_end, maybe_for_body) => { + tracing::trace!("parsing for loop"); + if let Some(parent) = parent_ctx { + let res = self.apply_to_edges( + parent.into().into(), + *loc, + arena, + &|analyzer, arena, ctx, loc| { + analyzer.for_loop( + arena, + loc, + ctx, + maybe_for_start, + maybe_for_middle, + maybe_for_end, + maybe_for_body, + ) + }, + ); + let _ = self.widen_if_limit_hit(parent.into().into(), res); + } + } + DoWhile(loc, while_stmt, while_expr) => { + tracing::trace!("parsing `do while`, {while_expr:?}"); + if let Some(parent) = parent_ctx { + let res = self.apply_to_edges( + ContextNode::from(parent.into()), + *loc, + arena, + &|analyzer, arena, ctx, loc| { + analyzer.while_loop(arena, loc, ctx, while_expr, while_stmt) + }, + ); + let _ = self.widen_if_limit_hit(parent.into().into(), res); + } + } + Continue(_loc) => { + tracing::trace!("parsing continue"); + // TODO: We cheat in loops by just widening so continues dont matter yet + } + Break(_loc) => { + tracing::trace!("parsing break"); + // TODO: We cheat in loops by just widening so breaks dont matter yet + } + Assembly { + loc, + dialect: _, + flags: _, + block: yul_block, + } => { + tracing::trace!("parsing assembly"); + let ctx = ContextNode::from( + parent_ctx + .expect("No context for variable definition?") + .into(), + ); + let res = self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_yul_statement( + arena, + &YulStatement::Block(yul_block.clone()), + ctx, + ); + Ok(()) + }); + let _ = self.widen_if_limit_hit(ctx, res); + } + Return(loc, maybe_ret_expr) => { + tracing::trace!("parsing return"); + if let Some(ret_expr) = maybe_ret_expr { + if let Some(parent) = parent_ctx { + let res = self.parse_ctx_expr(arena, ret_expr, parent.into().into()); + if self.widen_if_limit_hit(parent.into().into(), res) { + return; + } + let res = self.apply_to_edges( + parent.into().into(), + *loc, + arena, + &|analyzer, arena, ctx, loc| { + let Ok(Some(ret)) = ctx.pop_expr_latest(loc, analyzer) else { + return Err(ExprErr::NoLhs( + loc, + "Return did not have a associated expression".to_string(), + )); + }; + + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + let paths = ret.flatten(); + if paths.is_killed() { + tracing::trace!("killing due to bad return"); + let res = ContextNode::from(parent.into()) + .kill(analyzer, loc, paths.killed_kind().unwrap()) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + return Ok(()); + } + analyzer.return_match(arena, ctx, &loc, &paths, 0); + Ok(()) + }, + ); + let _ = self.widen_if_limit_hit(parent.into().into(), res); + } + } + } + Revert(loc, _maybe_err_path, _exprs) => { + tracing::trace!("parsing revert"); + if let Some(parent) = parent_ctx { + let parent = ContextNode::from(parent.into()); + let res = + self.apply_to_edges(parent, *loc, arena, &|analyzer, arena, ctx, loc| { + let res = ctx + .kill(analyzer, loc, KilledKind::Revert) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + Ok(()) + }); + let _ = self.add_if_err(res); + } + } + RevertNamedArgs(_loc, _maybe_err_path, _named_args) => { + tracing::trace!("parsing named revert"); + todo!("revert named args") + } + Emit(_loc, _emit_expr) => {} + Try(_loc, _try_expr, _maybe_returns, _clauses) => {} + Error(_loc) => {} + } + } +} diff --git a/src/context/exprs/env.rs b/crates/solc-expressions/src/env.rs similarity index 90% rename from src/context/exprs/env.rs rename to crates/solc-expressions/src/env.rs index ba36b1c2..414e6f12 100644 --- a/src/context/exprs/env.rs +++ b/crates/solc-expressions/src/env.rs @@ -1,32 +1,36 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::func_call::FuncCaller; -use crate::context::ExprErr; -use crate::{context::ContextNode, AnalyzerLike}; -use shared::context::ExprRet; -use shared::context::{ContextEdge, ContextVar}; -use shared::nodes::Builtin; -use shared::nodes::Concrete; -use shared::Edge; -use shared::Node; -use solang_parser::pt::Expression; -use solang_parser::pt::Loc; +use crate::{ + func_call::helper::CallerHelper, func_call::modifier::ModifierCaller, ExprErr, IntoExprErr, +}; -use solang_parser::pt::Identifier; +use graph::{ + elem::Elem, + nodes::{Builtin, Concrete, ContextNode, ContextVar, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::{RangeArena, StorageLocation}; -impl Env for T where T: AnalyzerLike + Sized {} -pub trait Env: AnalyzerLike + Sized { +use solang_parser::pt::{Expression, Identifier, Loc}; + +impl Env for T where T: AnalyzerBackend + Sized {} +/// Handles environment based things like `msg`, `block`, etc. +pub trait Env: AnalyzerBackend + Sized { fn env_variable( &mut self, + arena: &mut RangeArena>, ident: &Identifier, ctx: ContextNode, ) -> Result, ExprErr> { match &*ident.name { "msg" | "tx" => { + ctx.add_gas_cost(self, shared::gas::BIN_OP_GAS) + .into_expr_err(ident.loc)?; ctx.push_expr(ExprRet::Single(self.msg().into()), self) .into_expr_err(ident.loc)?; Ok(Some(())) } "block" => { + ctx.add_gas_cost(self, shared::gas::BIN_OP_GAS) + .into_expr_err(ident.loc)?; ctx.push_expr(ExprRet::Single(self.block().into()), self) .into_expr_err(ident.loc)?; Ok(Some(())) @@ -40,7 +44,9 @@ pub trait Env: AnalyzerLike + Sized { .modifier_state .clone() { - self.resume_from_modifier(ctx, mod_state.clone())?; + ctx.add_gas_cost(self, shared::gas::FUNC_CALL_GAS) + .into_expr_err(ident.loc)?; + self.resume_from_modifier(arena, ctx, mod_state.clone())?; self.modifier_inherit_return(ctx, mod_state.parent_ctx); Ok(Some(())) } else { @@ -78,6 +84,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.blockhash".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -99,6 +106,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.basefee".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -120,6 +128,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.chainid".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -141,6 +150,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.coinbase".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -162,6 +172,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.difficulty".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -183,6 +194,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.gaslimit".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -204,6 +216,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.number".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -225,6 +238,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.prevrandao".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -246,6 +260,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "block.timestamp".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -260,10 +275,11 @@ pub trait Env: AnalyzerLike + Sized { } }; let mut var = ContextVar::new_from_concrete(loc, ctx, node, self).into_expr_err(loc)?; - var.name = name.clone(); + var.name.clone_from(&name); var.display_name = name; var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Block(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -300,6 +316,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "msg.data".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -321,6 +338,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "msg.sender".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -342,6 +360,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "msg.sig".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -363,6 +382,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "msg.value".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -384,6 +404,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "tx.origin".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -405,6 +426,7 @@ pub trait Env: AnalyzerLike + Sized { var.display_name = "tx.gasprice".to_string(); var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -421,6 +443,7 @@ pub trait Env: AnalyzerLike + Sized { .into_expr_err(loc)?; var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); @@ -436,10 +459,11 @@ pub trait Env: AnalyzerLike + Sized { }; let mut var = ContextVar::new_from_concrete(loc, ctx, node, self).into_expr_err(loc)?; - var.name = name.clone(); + var.name.clone_from(&name); var.display_name = name; var.is_tmp = false; var.is_symbolic = true; + var.storage = Some(StorageLocation::Msg(loc)); let cvar = self.add_node(Node::ContextVar(var)); ctx.add_var(cvar.into(), self).into_expr_err(loc)?; self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); diff --git a/crates/solc-expressions/src/func_call/func_caller.rs b/crates/solc-expressions/src/func_call/func_caller.rs new file mode 100644 index 00000000..143b316b --- /dev/null +++ b/crates/solc-expressions/src/func_call/func_caller.rs @@ -0,0 +1,608 @@ +//! Traits & blanket implementations that facilitate performing various forms of function calls. + +use crate::{ + func_call::join::FuncJoiner, func_call::modifier::ModifierCaller, helper::CallerHelper, + internal_call::InternalFuncCaller, intrinsic_call::IntrinsicFuncCaller, + namespaced_call::NameSpaceFuncCaller, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, + StatementParser, +}; +use std::cell::RefCell; +use std::rc::Rc; + +use graph::{ + elem::Elem, + nodes::{ + Concrete, Context, ContextNode, ContextVar, ContextVarNode, ExprRet, FunctionNode, + FunctionParamNode, ModifierState, + }, + AnalyzerBackend, ContextEdge, Edge, GraphBackend, Node, +}; +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::{Expression, Loc, NamedArgument}; + +use std::collections::BTreeMap; + +#[derive(Debug)] +pub enum NamedOrUnnamedArgs<'a> { + Named(&'a [NamedArgument]), + Unnamed(&'a [Expression]), +} + +impl<'a> NamedOrUnnamedArgs<'a> { + pub fn named_args(&self) -> Option<&'a [NamedArgument]> { + match self { + NamedOrUnnamedArgs::Named(inner) => Some(inner), + _ => None, + } + } + + pub fn unnamed_args(&self) -> Option<&'a [Expression]> { + match self { + NamedOrUnnamedArgs::Unnamed(inner) => Some(inner), + _ => None, + } + } + + pub fn len(&self) -> usize { + match self { + NamedOrUnnamedArgs::Unnamed(inner) => inner.len(), + NamedOrUnnamedArgs::Named(inner) => inner.len(), + } + } + + pub fn is_empty(&self) -> bool { + match self { + NamedOrUnnamedArgs::Unnamed(inner) => inner.len() == 0, + NamedOrUnnamedArgs::Named(inner) => inner.len() == 0, + } + } + + pub fn exprs(&self) -> Vec { + match self { + NamedOrUnnamedArgs::Unnamed(inner) => inner.to_vec(), + NamedOrUnnamedArgs::Named(inner) => inner.iter().map(|i| i.expr.clone()).collect(), + } + } + + pub fn parse( + &self, + arena: &mut RangeArena>, + analyzer: &mut (impl AnalyzerBackend + Sized), + ctx: ContextNode, + loc: Loc, + ) -> Result<(), ExprErr> { + match self { + NamedOrUnnamedArgs::Unnamed(inner) => analyzer.parse_inputs(arena, ctx, loc, inner), + NamedOrUnnamedArgs::Named(inner) => { + let append = Rc::new(RefCell::new(false)); + inner.iter().try_for_each(|arg| { + analyzer.parse_input(arena, ctx, loc, &arg.expr, &append)?; + Ok(()) + })?; + if !inner.is_empty() { + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Inputs did not have left hand sides".to_string(), + )); + }; + ctx.push_expr(ret, analyzer).into_expr_err(loc) + }) + } else { + Ok(()) + } + } + } + } + + pub fn parse_n( + &self, + arena: &mut RangeArena>, + n: usize, + analyzer: &mut (impl AnalyzerBackend + Sized), + ctx: ContextNode, + loc: Loc, + ) -> Result<(), ExprErr> { + let append = Rc::new(RefCell::new(false)); + match self { + NamedOrUnnamedArgs::Unnamed(inner) => { + inner.iter().take(n).try_for_each(|arg| { + analyzer.parse_input(arena, ctx, loc, arg, &append)?; + Ok(()) + })?; + if !inner.is_empty() { + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Inputs did not have left hand sides".to_string(), + )); + }; + ctx.push_expr(ret, analyzer).into_expr_err(loc) + }) + } else { + Ok(()) + } + } + NamedOrUnnamedArgs::Named(inner) => { + inner.iter().take(n).try_for_each(|arg| { + analyzer.parse_input(arena, ctx, loc, &arg.expr, &append)?; + Ok(()) + })?; + if !inner.is_empty() { + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Inputs did not have left hand sides".to_string(), + )); + }; + ctx.push_expr(ret, analyzer).into_expr_err(loc) + }) + } else { + Ok(()) + } + } + } + } + + pub fn order(&self, inputs: ExprRet, ordered_params: Vec) -> ExprRet { + if inputs.len() < 2 { + inputs + } else { + match self { + NamedOrUnnamedArgs::Unnamed(_inner) => inputs, + NamedOrUnnamedArgs::Named(inner) => ExprRet::Multi( + ordered_params + .iter() + .map(|param| { + let index = inner + .iter() + .enumerate() + .find(|(_i, arg)| &arg.name.name == param) + .unwrap() + .0; + match &inputs { + ExprRet::Multi(inner) => inner[index].clone(), + _ => panic!("Mismatched ExprRet type"), + } + }) + .collect(), + ), + } + } + } +} + +impl FuncCaller for T where + T: AnalyzerBackend + Sized + GraphBackend + CallerHelper +{ +} +/// A trait for calling a function +pub trait FuncCaller: + GraphBackend + AnalyzerBackend + Sized +{ + #[tracing::instrument(level = "trace", skip_all)] + /// Perform a function call with named inputs + fn named_fn_call_expr( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: &Loc, + func_expr: &Expression, + input_exprs: &[NamedArgument], + ) -> Result<(), ExprErr> { + use solang_parser::pt::Expression::*; + match func_expr { + MemberAccess(loc, member_expr, ident) => self.call_name_spaced_func( + arena, + ctx, + loc, + member_expr, + ident, + NamedOrUnnamedArgs::Named(input_exprs), + ), + Variable(ident) => self.call_internal_named_func(arena, ctx, loc, ident, input_exprs), + e => Err(ExprErr::IntrinsicNamedArgs( + *loc, + format!("Cannot call intrinsic functions with named arguments. Call: {e:?}"), + )), + } + } + #[tracing::instrument(level = "trace", skip_all)] + /// Perform a function call + fn fn_call_expr( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: &Loc, + func_expr: &Expression, + input_exprs: &[Expression], + ) -> Result<(), ExprErr> { + use solang_parser::pt::Expression::*; + match func_expr { + MemberAccess(loc, member_expr, ident) => self.call_name_spaced_func( + arena, + ctx, + loc, + member_expr, + ident, + NamedOrUnnamedArgs::Unnamed(input_exprs), + ), + Variable(ident) => self.call_internal_func( + arena, + ctx, + loc, + ident, + func_expr, + NamedOrUnnamedArgs::Unnamed(input_exprs), + ), + _ => { + self.parse_ctx_expr(arena, func_expr, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Function call to nonexistent function".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_intrinsic_fallback( + arena, + ctx, + &loc, + &NamedOrUnnamedArgs::Unnamed(input_exprs), + ret, + ) + }) + } + } + } + + /// Perform an intrinsic function call + fn match_intrinsic_fallback( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: &Loc, + input_exprs: &NamedOrUnnamedArgs, + ret: ExprRet, + ) -> Result<(), ExprErr> { + match ret { + ExprRet::Single(func_idx) | ExprRet::SingleLiteral(func_idx) => { + self.intrinsic_func_call(arena, loc, input_exprs, func_idx, ctx) + } + ExprRet::Multi(inner) => inner.into_iter().try_for_each(|ret| { + self.match_intrinsic_fallback(arena, ctx, loc, input_exprs, ret) + }), + ExprRet::CtxKilled(kind) => ctx.kill(self, *loc, kind).into_expr_err(*loc), + ExprRet::Null => Ok(()), + } + } + + /// Setups up storage variables for a function call and calls it + fn setup_fn_call( + &mut self, + arena: &mut RangeArena>, + loc: &Loc, + inputs: &ExprRet, + func_idx: NodeIdx, + ctx: ContextNode, + func_call_str: Option<&str>, + ) -> Result<(), ExprErr> { + // if we have a single match thats our function + let var = match ContextVar::maybe_from_user_ty(self, *loc, func_idx) { + Some(v) => v, + None => panic!( + "Could not create context variable from user type: {:?}", + self.node(func_idx) + ), + }; + + let new_cvarnode = self.add_node(Node::ContextVar(var)); + ctx.add_var(new_cvarnode.into(), self).into_expr_err(*loc)?; + self.add_edge(new_cvarnode, ctx, Edge::Context(ContextEdge::Variable)); + if let Some(func_node) = ContextVarNode::from(new_cvarnode) + .ty(self) + .into_expr_err(*loc)? + .func_node(self) + { + self.func_call(arena, ctx, *loc, inputs, func_node, func_call_str, None) + } else { + unreachable!() + } + } + + /// Matches the input kinds and performs the call + fn func_call( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + input_paths: &ExprRet, + func: FunctionNode, + func_call_str: Option<&str>, + modifier_state: Option, + ) -> Result<(), ExprErr> { + let params = func.params(self); + let input_paths = input_paths.clone().flatten(); + if input_paths.has_killed() { + return ctx + .kill(self, loc, input_paths.killed_kind().unwrap()) + .into_expr_err(loc); + } + match input_paths { + ExprRet::Single(input_var) | ExprRet::SingleLiteral(input_var) => { + // if we get a single var, we expect the func to only take a single + // variable + let inputs = vec![ContextVarNode::from(input_var).latest_version(self)]; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + analyzer.func_call_inner( + arena, + false, + ctx, + func, + loc, + &inputs, + ¶ms, + func_call_str, + &modifier_state, + ) + }) + } + ExprRet::Multi(ref inputs) => { + if ExprRet::Multi(inputs.to_vec()).flatten().has_killed() { + return ctx + .kill( + self, + loc, + ExprRet::Multi(inputs.to_vec()).killed_kind().unwrap(), + ) + .into_expr_err(loc); + } + // check if the inputs length matchs func params length + // if they do, check that none are forks + if inputs.len() == params.len() { + let input_vars = inputs + .iter() + .map(|expr_ret| { + let var = expr_ret.expect_single().into_expr_err(loc)?; + Ok(ContextVarNode::from(var).latest_version(self)) + }) + .collect::, ExprErr>>()?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + analyzer.func_call_inner( + arena, + false, + ctx, + func, + loc, + &input_vars, + ¶ms, + func_call_str, + &modifier_state, + ) + }) + } else { + Err(ExprErr::InvalidFunctionInput( + loc, + format!( + "Length mismatch: {inputs:?} {params:?}, inputs as vars: {}, ctx: {}", + ExprRet::Multi(inputs.to_vec()).debug_str(self), + ctx.path(self) + ), + )) + } + } + e => todo!("here: {:?}", e), + } + } + + /// Checks if there are any modifiers and executes them prior to executing the function + #[tracing::instrument(level = "trace", skip_all)] + fn func_call_inner( + &mut self, + arena: &mut RangeArena>, + entry_call: bool, + ctx: ContextNode, + func_node: FunctionNode, + loc: Loc, + inputs: &[ContextVarNode], + params: &[FunctionParamNode], + func_call_str: Option<&str>, + modifier_state: &Option, + ) -> Result<(), ExprErr> { + if !entry_call { + if let Ok(true) = self.join(arena, ctx, loc, func_node, params, inputs, &mut vec![]) { + return Ok(()); + } + } + + // pseudocode: + // 1. Create context for the call + // 2. Check for modifiers + // 3. Call modifier 0, then 1, then 2, ... then N. + // 4. Call this function + // 5. Finish modifier N.. then 2, then 1, then 0 + let callee_ctx = if entry_call { + ctx + } else { + self.create_call_ctx(ctx, loc, func_node, modifier_state.clone())? + }; + + // handle remapping of variable names and bringing variables into the new context + let renamed_inputs = + self.map_inputs_to_params(arena, loc, entry_call, params, inputs, callee_ctx)?; + + // begin modifier handling by making sure modifiers were set + if !func_node.modifiers_set(self).into_expr_err(loc)? { + self.set_modifiers(arena, func_node, ctx)?; + } + + // get modifiers + let mods = func_node.modifiers(self); + self.apply_to_edges( + callee_ctx, + loc, + arena, + &|analyzer, arena, callee_ctx, loc| { + if let Some(mod_state) = + &ctx.underlying(analyzer).into_expr_err(loc)?.modifier_state + { + // we are iterating through modifiers + if mod_state.num + 1 < mods.len() { + // use the next modifier + let mut mstate = mod_state.clone(); + mstate.num += 1; + analyzer.call_modifier_for_fn(arena, loc, callee_ctx, func_node, mstate) + } else { + // out of modifiers, execute the actual function call + analyzer.execute_call_inner( + arena, + loc, + ctx, + callee_ctx, + func_node, + &renamed_inputs, + func_call_str, + ) + } + } else if !mods.is_empty() { + // we have modifiers and havent executed them, start the process of executing them + let state = ModifierState::new( + 0, + loc, + func_node, + callee_ctx, + ctx, + renamed_inputs.clone(), + ); + analyzer.call_modifier_for_fn(arena, loc, callee_ctx, func_node, state) + } else { + // no modifiers, just execute the function + analyzer.execute_call_inner( + arena, + loc, + ctx, + callee_ctx, + func_node, + &renamed_inputs, + func_call_str, + ) + } + }, + ) + } + + /// Actually executes the function + // #[tracing::instrument(level = "trace", skip_all)] + fn execute_call_inner( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + caller_ctx: ContextNode, + callee_ctx: ContextNode, + func_node: FunctionNode, + _renamed_inputs: &BTreeMap, + func_call_str: Option<&str>, + ) -> Result<(), ExprErr> { + tracing::trace!("executing: {}", func_node.name(self).into_expr_err(loc)?); + if let Some(body) = func_node.underlying(self).into_expr_err(loc)?.body.clone() { + // add return nodes into the subctx + #[allow(clippy::unnecessary_to_owned)] + func_node.returns(arena, self).into_iter().for_each(|ret| { + if let Some(var) = + ContextVar::maybe_new_from_func_ret(self, ret.underlying(self).unwrap().clone()) + { + let cvar = self.add_node(Node::ContextVar(var)); + callee_ctx.add_var(cvar.into(), self).unwrap(); + self.add_edge(cvar, callee_ctx, Edge::Context(ContextEdge::Variable)); + } + }); + + // parse the function body + self.parse_ctx_statement(arena, &body, false, Some(callee_ctx)); + if let Some(mod_state) = &callee_ctx + .underlying(self) + .into_expr_err(loc)? + .modifier_state + .clone() + { + if mod_state.num == 0 { + return self.ctx_rets(arena, loc, mod_state.parent_caller_ctx, callee_ctx); + } + } + + if callee_ctx != caller_ctx { + self.ctx_rets(arena, loc, caller_ctx, callee_ctx) + } else { + Ok(()) + } + } else { + let ret_ctx = Context::new_subctx( + callee_ctx, + Some(caller_ctx), + loc, + None, + None, + false, + self, + caller_ctx + .underlying(self) + .into_expr_err(loc)? + .modifier_state + .clone(), + ) + .unwrap(); + let ret_subctx = ContextNode::from(self.add_node(Node::Context(ret_ctx))); + ret_subctx + .set_continuation_ctx(self, caller_ctx, "execute_call_inner") + .into_expr_err(loc)?; + + let res = callee_ctx + .set_child_call(ret_subctx, self) + .into_expr_err(loc); + let _ = self.add_if_err(res); + self.apply_to_edges(callee_ctx, loc, arena, &|analyzer, arena, ctx, loc| { + #[allow(clippy::unnecessary_to_owned)] + func_node + .returns(arena, analyzer) + .into_iter() + .try_for_each(|ret| { + let underlying = ret.underlying(analyzer).unwrap(); + let mut var = + ContextVar::new_from_func_ret(ctx, analyzer, underlying.clone()) + .unwrap() + .expect("No type for return variable?"); + if let Some(func_call) = &func_call_str { + var.name = + format!("{}_{}", func_call, callee_ctx.new_tmp(analyzer).unwrap()); + var.display_name = func_call.to_string(); + } + + if ctx.contains_var(&var.name, analyzer).into_expr_err(loc)? { + var.name = format!( + "{}_ret{}", + var.name, + ctx.new_tmp(analyzer).into_expr_err(loc)? + ); + var.display_name.clone_from(&var.name); + } + + let node = analyzer.add_node(Node::ContextVar(var)); + ctx.add_var(node.into(), analyzer).into_expr_err(loc)?; + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Return)); + ctx.push_expr(ExprRet::Single(node), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + }) + } + } +} diff --git a/crates/solc-expressions/src/func_call/helper.rs b/crates/solc-expressions/src/func_call/helper.rs new file mode 100644 index 00000000..e5d87812 --- /dev/null +++ b/crates/solc-expressions/src/func_call/helper.rs @@ -0,0 +1,630 @@ +//! Helper traits & blanket implementations that help facilitate performing function calls. +use crate::{ + member_access::ListAccess, variable::Variable, ContextBuilder, ExprErr, ExpressionParser, + IntoExprErr, +}; + +use graph::{ + elem::Elem, + nodes::{ + CallFork, Concrete, Context, ContextNode, ContextVar, ContextVarNode, ExprRet, + FunctionNode, FunctionParamNode, ModifierState, + }, + AnalyzerBackend, ContextEdge, Edge, Node, Range, VarType, +}; +use shared::{NodeIdx, RangeArena, StorageLocation}; + +use solang_parser::pt::{CodeLocation, Expression, Loc}; + +use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; + +impl CallerHelper for T where T: AnalyzerBackend + Sized {} +/// Helper trait for performing function calls +pub trait CallerHelper: AnalyzerBackend + Sized { + /// Maps inputs to function parameters such that if there is a renaming i.e. `a(uint256 x)` is called via `a(y)`, + /// we map `y -> x` for future lookups + fn map_inputs_to_params( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + entry_call: bool, + params: &[FunctionParamNode], + inputs: &[ContextVarNode], + callee_ctx: ContextNode, + ) -> Result, ExprErr> { + Ok(params + .iter() + .zip(inputs.iter()) + .filter_map(|(param, input)| { + if !entry_call { + if let Some(name) = + self.add_if_err(param.maybe_name(self).into_expr_err(loc))? + { + let res = input + .latest_version(self) + .underlying(self) + .into_expr_err(loc) + .cloned(); + let mut new_cvar = self.add_if_err(res)?; + new_cvar.loc = Some(param.loc(self).unwrap()); + new_cvar.name.clone_from(&name); + new_cvar.display_name = name; + new_cvar.is_tmp = false; + new_cvar.storage = if let Some(StorageLocation::Storage(_)) = + param.underlying(self).unwrap().storage + { + new_cvar.storage + } else { + None + }; + + let node = ContextVarNode::from(self.add_node(Node::ContextVar(new_cvar))); + + self.add_edge( + node, + input.latest_version(self), + Edge::Context(ContextEdge::InputVariable), + ); + + if let Some(param_ty) = VarType::try_from_idx(self, param.ty(self).unwrap()) + { + if !node.ty_eq_ty(¶m_ty, self).unwrap() { + node.cast_from_ty(param_ty, self, arena).unwrap(); + } + } + + self.add_edge( + node, + input.latest_version(self), + Edge::Context(ContextEdge::InputVariable), + ); + + if let Some(_len_var) = input.array_to_len_var(self) { + // bring the length variable along as well + self.get_length(arena, callee_ctx, loc, node, false) + .unwrap(); + } + let node = node.latest_version(self); + + if let (Some(r), Some(r2)) = + (node.range(self).unwrap(), param.range(self).unwrap()) + { + let new_min = + r.range_min().into_owned().cast(r2.range_min().into_owned()); + let new_max = + r.range_max().into_owned().cast(r2.range_max().into_owned()); + let res = node + .latest_version(self) + .try_set_range_min(self, arena, new_min) + .into_expr_err(loc); + self.add_if_err(res); + let res = node + .latest_version(self) + .try_set_range_max(self, arena, new_max) + .into_expr_err(loc); + self.add_if_err(res); + let res = node + .latest_version(self) + .try_set_range_exclusions(self, r.exclusions.clone()) + .into_expr_err(loc); + self.add_if_err(res); + } + callee_ctx.add_var(node, self).unwrap(); + self.add_edge(node, callee_ctx, Edge::Context(ContextEdge::Variable)); + Some((*input, node)) + } else { + None + } + } else { + None + } + }) + .collect::>()) + } + + #[tracing::instrument(level = "trace", skip_all)] + /// Parses input expressions into [`ExprRet`]s and adds them to the expr ret stack + fn parse_inputs( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + inputs: &[Expression], + ) -> Result<(), ExprErr> { + let append = if ctx.underlying(self).into_expr_err(loc)?.tmp_expr.is_empty() { + Rc::new(RefCell::new(true)) + } else { + Rc::new(RefCell::new(false)) + }; + + inputs + .iter() + .try_for_each(|input| self.parse_input(arena, ctx, loc, input, &append))?; + + if !inputs.is_empty() { + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Inputs did not have left hand sides".to_string(), + )); + }; + ctx.push_expr(ret, analyzer).into_expr_err(loc) + }) + } else { + Ok(()) + } + } + + fn parse_input( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + _loc: Loc, + input: &Expression, + append: &Rc>, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, input, ctx)?; + self.apply_to_edges(ctx, input.loc(), arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Inputs did not have left hand sides".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + if *append.borrow() { + ctx.append_tmp_expr(ret, analyzer).into_expr_err(loc) + } else { + *append.borrow_mut() = true; + ctx.push_tmp_expr(ret, analyzer).into_expr_err(loc) + } + }) + } + + /// Creates a new context for a call + fn create_call_ctx( + &mut self, + curr_ctx: ContextNode, + loc: Loc, + func_node: FunctionNode, + modifier_state: Option, + ) -> Result { + let fn_ext = curr_ctx.is_fn_ext(func_node, self).into_expr_err(loc)?; + if fn_ext { + curr_ctx + .add_gas_cost(self, shared::gas::EXT_FUNC_CALL_GAS) + .into_expr_err(loc)?; + } else { + curr_ctx + .add_gas_cost(self, shared::gas::FUNC_CALL_GAS) + .into_expr_err(loc)?; + } + let ctx = Context::new_subctx( + curr_ctx, + None, + loc, + None, + Some(func_node), + fn_ext, + self, + modifier_state, + ) + .into_expr_err(loc)?; + let callee_ctx = ContextNode::from(self.add_node(Node::Context(ctx))); + curr_ctx + .set_child_call(callee_ctx, self) + .into_expr_err(loc)?; + let ctx_fork = self.add_node(Node::FunctionCall); + self.add_edge(ctx_fork, curr_ctx, Edge::Context(ContextEdge::Subcontext)); + self.add_edge(ctx_fork, func_node, Edge::Context(ContextEdge::Call)); + self.add_edge( + NodeIdx::from(callee_ctx.0), + ctx_fork, + Edge::Context(ContextEdge::Subcontext), + ); + Ok(callee_ctx) + } + + /// Disambiguates a function call by their inputs (length & type) + fn disambiguate_fn_call( + &mut self, + arena: &mut RangeArena>, + fn_name: &str, + literals: Vec, + input_paths: &ExprRet, + funcs: &[FunctionNode], + ) -> Option { + let input_paths = input_paths.clone().flatten(); + // try to find the function based on naive signature + // This doesnt do type inference on NumberLiterals (i.e. 100 could be uintX or intX, and there could + // be a function that takes an int256 but we evaled as uint256) + let fn_sig = format!( + "{}{}", + fn_name, + input_paths.try_as_func_input_str(self, arena) + ); + if let Some(func) = funcs.iter().find(|func| func.name(self).unwrap() == fn_sig) { + return Some(*func); + } + + // filter by input len + let inputs = input_paths.as_flat_vec(); + let funcs: Vec<&FunctionNode> = funcs + .iter() + .filter(|func| func.params(self).len() == inputs.len()) + .collect(); + + if funcs.len() == 1 { + return Some(*funcs[0]); + } + + if !literals.iter().any(|i| *i) { + None + } else { + let funcs = funcs + .iter() + .filter(|func| { + let params = func.params(self); + params + .iter() + .zip(&inputs) + .enumerate() + .all(|(i, (param, input))| { + let param_ty = VarType::try_from_idx(self, (*param).into()).unwrap(); + let input_ty = ContextVarNode::from(*input).ty(self).unwrap(); + if param_ty.ty_eq(input_ty, self).unwrap() { + true + } else if literals[i] { + let possibilities = ContextVarNode::from(*input) + .ty(self) + .unwrap() + .possible_builtins_from_ty_inf(self); + let param_ty = param.ty(self).unwrap(); + match self.node(param_ty) { + Node::Builtin(b) => possibilities.contains(b), + _ => false, + } + } else { + false + } + }) + }) + .collect::>(); + if funcs.len() == 1 { + Some(**funcs[0]) + } else { + // this would be invalid solidity, likely the user needs to perform a cast + None + } + } + } + + /// Handle returns for a function call + fn ctx_rets( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + caller_ctx: ContextNode, + callee_ctx: ContextNode, + ) -> Result<(), ExprErr> { + tracing::trace!( + "Handling function call return for: {}, {}, depth: {:?}, {:?}", + caller_ctx.path(self), + callee_ctx.path(self), + caller_ctx.depth(self), + callee_ctx.depth(self), + ); + + if callee_ctx + .underlying(self) + .unwrap() + .modifier_state + .is_some() + { + if let Some(ret_ctx) = callee_ctx.underlying(self).into_expr_err(loc)?.parent_ctx { + let ret = ret_ctx.underlying(self).into_expr_err(loc)?.ret.clone(); + ret.iter().try_for_each(|(loc, ret)| { + let cvar = self.advance_var_in_forced_ctx(*ret, *loc, callee_ctx)?; + callee_ctx + .add_return_node(*loc, cvar, self) + .into_expr_err(*loc)?; + self.add_edge(cvar, callee_ctx, Edge::Context(ContextEdge::Return)); + + Ok(()) + })?; + } + } + + match callee_ctx.underlying(self).into_expr_err(loc)?.child { + Some(CallFork::Fork(w1, w2)) => { + self.ctx_rets(arena, loc, caller_ctx, w1)?; + self.ctx_rets(arena, loc, caller_ctx, w2)?; + Ok(()) + } + Some(CallFork::Call(c)) + if c.underlying(self).into_expr_err(loc)?.depth + >= caller_ctx.underlying(self).into_expr_err(loc)?.depth => + { + // follow rabbit hole + self.ctx_rets(arena, loc, caller_ctx, c)?; + Ok(()) + } + _ => { + if callee_ctx.is_anonymous_fn_call(self).into_expr_err(loc)? { + return Ok(()); + } + + if callee_ctx.is_killed(self).into_expr_err(loc)? { + return Ok(()); + } + + if callee_ctx + .underlying(self) + .into_expr_err(loc)? + .child + .is_some() + { + return Ok(()); + } + + let ctx = Context::new_subctx( + callee_ctx, + Some(caller_ctx), + loc, + None, + None, + false, + self, + caller_ctx + .underlying(self) + .into_expr_err(loc)? + .modifier_state + .clone(), + ) + .into_expr_err(loc)?; + let ret_subctx = ContextNode::from(self.add_node(Node::Context(ctx))); + ret_subctx + .set_continuation_ctx(self, caller_ctx, "ctx_rets") + .into_expr_err(loc)?; + + let res = callee_ctx + .set_child_call(ret_subctx, self) + .into_expr_err(loc); + let _ = self.add_if_err(res); + + let mut rets = callee_ctx.underlying(self).unwrap().ret.clone(); + + if rets.is_empty() { + let func_rets = callee_ctx + .associated_fn(self) + .into_expr_err(loc)? + .returns(arena, self); + func_rets + .iter() + .filter_map(|ret| { + let n: String = ret.maybe_name(self).ok()??; + let ret_loc: Loc = ret.loc(self).ok()?; + Some((n, ret_loc)) + }) + .collect::>() + .into_iter() + .try_for_each(|(name, ret_loc)| { + if let Some(cvar) = callee_ctx + .var_by_name_or_recurse(self, &name) + .into_expr_err(loc)? + { + let cvar = cvar.latest_version(self); + // let ret_loc = ret.loc(self).into_expr_err(loc)?; + callee_ctx + .add_return_node(ret_loc, cvar, self) + .into_expr_err(loc)?; + self.add_edge(cvar, callee_ctx, Edge::Context(ContextEdge::Return)); + } + Ok(()) + })?; + + // add unnamed rets + func_rets + .into_iter() + .filter(|ret| ret.maybe_name(self).unwrap().is_none()) + .collect::>() + .iter() + .try_for_each(|ret| { + let ret_loc = ret.loc(self).into_expr_err(loc)?; + let cvar = ContextVar::new_from_func_ret( + callee_ctx, + self, + ret.underlying(self).into_expr_err(loc)?.clone(), + ) + .into_expr_err(loc)? + .unwrap(); + let cvar = ContextVarNode::from(self.add_node(Node::ContextVar(cvar))); + callee_ctx.add_var(cvar, self).into_expr_err(loc)?; + self.add_edge(cvar, callee_ctx, Edge::Context(ContextEdge::Variable)); + callee_ctx + .add_return_node(ret_loc, cvar, self) + .into_expr_err(loc)?; + self.add_edge(cvar, callee_ctx, Edge::Context(ContextEdge::Return)); + Ok(()) + })?; + rets.clone_from(&callee_ctx.underlying(self).unwrap().ret); + } + + let target_rets = + if let Some(mod_state) = &callee_ctx.underlying(self).unwrap().modifier_state { + mod_state + .parent_ctx + .associated_fn(self) + .into_expr_err(loc)? + .returns(arena, self) + } else { + callee_ctx + .associated_fn(self) + .into_expr_err(loc)? + .returns(arena, self) + }; + + let ret = rets + .into_iter() + .zip(target_rets.iter()) + .enumerate() + .map(|(i, ((_, node), target_ret))| { + let target_ty = target_ret.ty(self).unwrap(); + let target_ty = VarType::try_from_idx(self, target_ty).unwrap(); + + let tmp_ret = node + .as_tmp(callee_ctx.underlying(self).unwrap().loc, ret_subctx, self) + .unwrap(); + tmp_ret.cast_from_ty(target_ty, self, arena).unwrap(); + tmp_ret.underlying_mut(self).into_expr_err(loc)?.is_return = true; + tmp_ret + .underlying_mut(self) + .into_expr_err(loc)? + .display_name = + format!("{}.{}", callee_ctx.associated_fn_name(self).unwrap(), i); + ret_subctx.add_var(tmp_ret, self).into_expr_err(loc)?; + self.add_edge(tmp_ret, ret_subctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(tmp_ret.into())) + }) + .collect::>()?; + + ret_subctx + .push_expr(ExprRet::Multi(ret), self) + .into_expr_err(loc)?; + Ok(()) + } + } + } + + /// Inherit the input changes from a function call + fn inherit_input_changes( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + to_ctx: ContextNode, + from_ctx: ContextNode, + renamed_inputs: &BTreeMap, + ) -> Result<(), ExprErr> { + if to_ctx != from_ctx { + self.apply_to_edges(to_ctx, loc, arena, &|analyzer, arena, to_ctx, loc| { + renamed_inputs + .iter() + .try_for_each(|(input_var, updated_var)| { + let new_input = analyzer.advance_var_in_ctx( + input_var.latest_version(analyzer), + loc, + to_ctx, + )?; + let latest_updated = updated_var.latest_version(analyzer); + if let Some(updated_var_range) = + latest_updated.range(analyzer).into_expr_err(loc)? + { + let res = new_input + .set_range_min( + analyzer, + arena, + updated_var_range.range_min().into_owned(), + ) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + let res = new_input + .set_range_max( + analyzer, + arena, + updated_var_range.range_max().into_owned(), + ) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + let res = new_input + .set_range_exclusions( + analyzer, + updated_var_range.exclusions.clone(), + ) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + } + Ok(()) + }) + })?; + } + Ok(()) + } + + /// Inherit the input changes from a function call + fn modifier_inherit_return(&mut self, mod_ctx: ContextNode, fn_ctx: ContextNode) { + let ret = fn_ctx.underlying(self).unwrap().ret.clone(); + mod_ctx.underlying_mut(self).unwrap().ret = ret; + } + + /// Inherit the storage changes from a function call + fn inherit_storage_changes( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + inheritor_ctx: ContextNode, + grantor_ctx: ContextNode, + ) -> Result<(), ExprErr> { + if inheritor_ctx != grantor_ctx { + return self.apply_to_edges( + inheritor_ctx, + loc, + arena, + &|analyzer, arena, inheritor_ctx, loc| { + let vars = grantor_ctx.local_vars(analyzer).clone(); + vars.iter().try_for_each(|(name, old_var)| { + let var = old_var.latest_version(analyzer); + let underlying = var.underlying(analyzer).into_expr_err(loc)?; + if var.is_storage(analyzer).into_expr_err(loc)? { + if let Some(inheritor_var) = inheritor_ctx.var_by_name(analyzer, name) { + let inheritor_var = inheritor_var.latest_version(analyzer); + if let Some(r) = underlying.ty.range(analyzer).into_expr_err(loc)? { + let new_inheritor_var = analyzer + .advance_var_in_ctx( + inheritor_var, + underlying.loc.expect("No loc for val change"), + inheritor_ctx, + ) + .unwrap(); + let _ = new_inheritor_var.set_range_min( + analyzer, + arena, + r.range_min().into_owned(), + ); + let _ = new_inheritor_var.set_range_max( + analyzer, + arena, + r.range_max().into_owned(), + ); + let _ = new_inheritor_var + .set_range_exclusions(analyzer, r.exclusions.clone()); + } + } else { + let new_in_inheritor = + analyzer.add_node(Node::ContextVar(underlying.clone())); + inheritor_ctx + .add_var(new_in_inheritor.into(), analyzer) + .into_expr_err(loc)?; + analyzer.add_edge( + new_in_inheritor, + inheritor_ctx, + Edge::Context(ContextEdge::Variable), + ); + analyzer.add_edge( + new_in_inheritor, + var, + Edge::Context(ContextEdge::InheritedVariable), + ); + } + } + Ok(()) + }) + }, + ); + } + Ok(()) + } +} diff --git a/crates/solc-expressions/src/func_call/internal_call.rs b/crates/solc-expressions/src/func_call/internal_call.rs new file mode 100644 index 00000000..d771fbab --- /dev/null +++ b/crates/solc-expressions/src/func_call/internal_call.rs @@ -0,0 +1,335 @@ +//! Traits & blanket implementations that facilitate performing locally scoped function calls. + +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ + assign::Assign, func_call::func_caller::FuncCaller, helper::CallerHelper, ContextBuilder, + ExprErr, ExpressionParser, IntoExprErr, +}; + +use graph::{ + elem::Elem, + nodes::{Builtin, Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, GraphBackend, Node, VarType, +}; +use shared::RangeArena; + +use solang_parser::pt::{Expression, Identifier, Loc, NamedArgument}; + +impl InternalFuncCaller for T where + T: AnalyzerBackend + Sized + GraphBackend + CallerHelper +{ +} +/// A trait for performing internally scoped function calls (i.e. *NOT* `MyContract.func(...)`) +pub trait InternalFuncCaller: + AnalyzerBackend + Sized + GraphBackend + CallerHelper +{ + #[tracing::instrument(level = "trace", skip_all)] + /// Perform a named function call + fn call_internal_named_func( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: &Loc, + ident: &Identifier, + input_args: &[NamedArgument], + ) -> Result<(), ExprErr> { + // It is a function call, check if we have the ident in scope + let funcs = ctx.visible_funcs(self).into_expr_err(*loc)?; + // filter down all funcs to those that match + let possible_funcs = funcs + .iter() + .filter(|func| { + let named_correctly = func + .name(self) + .unwrap() + .starts_with(&format!("{}(", ident.name)); + if !named_correctly { + false + } else { + // filter by params + let params = func.params(self); + if params.len() != input_args.len() { + false + } else { + params.iter().all(|param| { + input_args + .iter() + .any(|input| input.name.name == param.name(self).unwrap()) + }) + } + } + }) + .copied() + .collect::>(); + + if possible_funcs.is_empty() { + // check structs + let structs = ctx.visible_structs(self).into_expr_err(*loc)?; + let possible_structs = structs + .iter() + .filter(|strukt| { + let named_correctly = strukt + .name(self) + .unwrap() + .starts_with(&ident.name.to_string()); + if !named_correctly { + false + } else { + // filter by params + let fields = strukt.fields(self); + if fields.len() != input_args.len() { + false + } else { + fields.iter().all(|field| { + input_args + .iter() + .any(|input| input.name.name == field.name(self).unwrap()) + }) + } + } + }) + .copied() + .collect::>(); + if possible_structs.is_empty() { + Err(ExprErr::FunctionNotFound( + *loc, + format!( + "No functions or structs found for named function call: {:?}", + ident.name + ), + )) + } else if possible_structs.len() == 1 { + let strukt = possible_structs[0]; + let var = + ContextVar::new_from_struct(*loc, strukt, ctx, self).into_expr_err(*loc)?; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(*loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + + strukt.fields(self).iter().try_for_each(|field| { + let field_cvar = ContextVar::maybe_new_from_field( + self, + *loc, + ContextVarNode::from(cvar) + .underlying(self) + .into_expr_err(*loc)?, + field.underlying(self).unwrap().clone(), + ) + .expect("Invalid struct field"); + + let fc_node = self.add_node(Node::ContextVar(field_cvar)); + self.add_edge( + fc_node, + cvar, + Edge::Context(ContextEdge::AttrAccess("field")), + ); + self.add_edge(fc_node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(fc_node.into(), self).into_expr_err(*loc)?; + let field_as_ret = ExprRet::Single(fc_node); + let input = input_args + .iter() + .find(|arg| arg.name.name == field.name(self).unwrap()) + .expect("No field in struct in struct construction"); + self.parse_ctx_expr(arena, &input.expr, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(assignment) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs(loc, "Array creation failed".to_string())); + }; + + if matches!(assignment, ExprRet::CtxKilled(_)) { + ctx.push_expr(assignment, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + analyzer.match_assign_sides(arena, ctx, loc, &field_as_ret, &assignment)?; + let _ = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; + Ok(()) + }) + })?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, _loc| { + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(*loc)?; + Ok(()) + })?; + Ok(()) + } else { + Err(ExprErr::Todo( + *loc, + "Disambiguation of struct construction not currently supported".to_string(), + )) + } + } else if possible_funcs.len() == 1 { + let func = possible_funcs[0]; + let params = func.params(self); + let inputs: Vec<_> = params + .iter() + .map(|param| { + let input = input_args + .iter() + .find(|arg| arg.name.name == param.name(self).unwrap()) + .expect( + "No parameter with named provided in named parameter function call", + ); + input.expr.clone() + }) + .collect(); + self.parse_inputs(arena, ctx, *loc, &inputs[..])?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let inputs = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + .unwrap_or_else(|| ExprRet::Multi(vec![])); + analyzer.setup_fn_call(arena, &ident.loc, &inputs, func.into(), ctx, None) + }) + } else { + todo!("Disambiguate named function call"); + } + } + + #[tracing::instrument(level = "trace", skip_all)] + fn call_internal_func( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: &Loc, + ident: &Identifier, + func_expr: &Expression, + input_exprs: NamedOrUnnamedArgs, + ) -> Result<(), ExprErr> { + tracing::trace!("function call: {}(..)", ident.name); + // It is a function call, check if we have the ident in scope + let funcs = ctx.visible_funcs(self).into_expr_err(*loc)?; + + // filter down all funcs to those that match + let possible_funcs = funcs + .iter() + .filter(|func| { + let named_correctly = func + .name(self) + .unwrap() + .starts_with(&format!("{}(", ident.name)); + if !named_correctly { + false + } else { + // filter by params + let params = func.params(self); + if params.len() != input_exprs.len() { + false + } else if matches!(input_exprs, NamedOrUnnamedArgs::Named(_)) { + params.iter().all(|param| { + input_exprs + .named_args() + .unwrap() + .iter() + .any(|input| input.name.name == param.name(self).unwrap()) + }) + } else { + true + } + } + }) + .copied() + .collect::>(); + + match possible_funcs.len() { + 0 => { + // this is a builtin, cast, or unknown function + self.parse_ctx_expr(arena, func_expr, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let ret = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + .unwrap_or_else(|| ExprRet::Multi(vec![])); + let ret = ret.flatten(); + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_intrinsic_fallback(arena, ctx, &loc, &input_exprs, ret) + }) + } + 1 => { + // there is only a single possible function + input_exprs.parse(arena, self, ctx, *loc)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let mut inputs = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + .unwrap_or_else(|| ExprRet::Multi(vec![])); + inputs = if let Some(ordered_param_names) = + possible_funcs[0].maybe_ordered_param_names(analyzer) + { + input_exprs.order(inputs, ordered_param_names) + } else { + inputs + }; + let inputs = inputs.flatten(); + if matches!(inputs, ExprRet::CtxKilled(_)) { + ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.setup_fn_call( + arena, + &ident.loc, + &inputs, + (possible_funcs[0]).into(), + ctx, + None, + ) + }) + } + _ => { + // this is the annoying case due to function overloading & type inference on number literals + input_exprs.parse(arena, self, ctx, *loc)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let inputs = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + .unwrap_or_else(|| ExprRet::Multi(vec![])); + let inputs = inputs.flatten(); + if matches!(inputs, ExprRet::CtxKilled(_)) { + ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let resizeables: Vec<_> = inputs.as_flat_vec() + .iter() + .map(|idx| { + match VarType::try_from_idx(analyzer, *idx) { + Some(VarType::BuiltIn(bn, _)) => { + matches!(analyzer.node(bn), Node::Builtin(Builtin::Uint(_)) | Node::Builtin(Builtin::Int(_)) | Node::Builtin(Builtin::Bytes(_))) + } + Some(VarType::Concrete(c)) => { + matches!(analyzer.node(c), Node::Concrete(Concrete::Uint(_, _)) | Node::Concrete(Concrete::Int(_, _)) | Node::Concrete(Concrete::Bytes(_, _))) + } + _ => false + } + }) + .collect(); + if let Some(func) = analyzer.disambiguate_fn_call( + arena, + &ident.name, + resizeables, + &inputs, + &possible_funcs, + ) { + analyzer.setup_fn_call(arena, &loc, &inputs, func.into(), ctx, None) + } else { + Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not disambiguate function, default input types: {}, possible functions: {:#?}", + inputs.try_as_func_input_str(analyzer, arena), + possible_funcs + .iter() + .map(|i| i.name(analyzer).unwrap()) + .collect::>() + ), + )) + } + }) + } + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/abi.rs b/crates/solc-expressions/src/func_call/intrinsic_call/abi.rs new file mode 100644 index 00000000..589b729d --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/abi.rs @@ -0,0 +1,135 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ContextBuilder, ExprErr, ExpressionParser, IntoExprErr}; + +use graph::{ + elem::Elem, + nodes::{Builtin, Concrete, ContextNode, ContextVar, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::RangeArena; + +use solang_parser::pt::{Expression, Loc}; + +impl AbiCaller for T where T: AnalyzerBackend + Sized {} + +/// Trait for calling abi-namespaced intrinsic functions +pub trait AbiCaller: AnalyzerBackend + Sized { + /// Perform an `abi.<..>` function call + fn abi_call( + &mut self, + arena: &mut RangeArena>, + func_name: String, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "abi.decode" => { + // we skip the first because that is what is being decoded. + // TODO: check if we have a concrete bytes value + fn match_decode( + ctx: ContextNode, + loc: &Loc, + ret: ExprRet, + analyzer: &mut impl AnalyzerBackend, + ) -> Result<(), ExprErr> { + match ret { + ExprRet::Single(ty) => match analyzer.node(ty) { + Node::Builtin(_) => { + let var = ContextVar::new_from_builtin(*loc, ty.into(), analyzer) + .into_expr_err(*loc)?; + let node = analyzer.add_node(Node::ContextVar(var)); + ctx.add_var(node.into(), analyzer).into_expr_err(*loc)?; + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(node), analyzer) + .into_expr_err(*loc)?; + Ok(()) + } + Node::ContextVar(cvar) => { + let bn = analyzer + .builtin_or_add( + cvar.ty.as_builtin(analyzer).into_expr_err(*loc)?, + ) + .into(); + let var = ContextVar::new_from_builtin(*loc, bn, analyzer) + .into_expr_err(*loc)?; + let node = analyzer.add_node(Node::ContextVar(var)); + ctx.add_var(node.into(), analyzer).into_expr_err(*loc)?; + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(node), analyzer) + .into_expr_err(*loc)?; + Ok(()) + } + Node::Struct(_) => { + let var = + ContextVar::new_from_struct(*loc, ty.into(), ctx, analyzer) + .into_expr_err(*loc)?; + let node = analyzer.add_node(Node::ContextVar(var)); + ctx.add_var(node.into(), analyzer).into_expr_err(*loc)?; + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(node), analyzer) + .into_expr_err(*loc)?; + Ok(()) + } + Node::Contract(_) => { + let var = ContextVar::new_from_contract(*loc, ty.into(), analyzer) + .into_expr_err(*loc)?; + let node = analyzer.add_node(Node::ContextVar(var)); + ctx.add_var(node.into(), analyzer).into_expr_err(*loc)?; + analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(node), analyzer) + .into_expr_err(*loc)?; + Ok(()) + } + e => todo!("Unhandled type in abi.decode: {e:?}"), + }, + ExprRet::Multi(inner) => inner + .iter() + .try_for_each(|i| match_decode(ctx, loc, i.clone(), analyzer)), + ExprRet::CtxKilled(kind) => { + ctx.kill(analyzer, *loc, kind).into_expr_err(*loc) + } + e => panic!("This is invalid solidity: {:?}", e), + } + } + let input_exprs = input_exprs.unnamed_args().unwrap(); + self.parse_ctx_expr(arena, &input_exprs[1], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "abi.decode was not given the types for decoding".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + match_decode(ctx, &loc, ret, analyzer) + }) + } + "abi.encode" + | "abi.encodePacked" + | "abi.encodeCall" + | "abi.encodeWithSignature" + | "abi.encodeWithSelector" => { + // TODO: Support concrete abi encoding + let bn = self.builtin_or_add(Builtin::DynamicBytes); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(node), self) + .into_expr_err(loc)?; + Ok(()) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find abi function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/address.rs b/crates/solc-expressions/src/func_call/intrinsic_call/address.rs new file mode 100644 index 00000000..0abe0230 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/address.rs @@ -0,0 +1,80 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ExprErr, IntoExprErr}; + +use graph::{ + nodes::{Builtin, ContextNode, ContextVar, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; + +use solang_parser::pt::{Expression, Loc}; + +impl AddressCaller for T where T: AnalyzerBackend + Sized {} + +/// Trait for calling address-based intrinsic functions +pub trait AddressCaller: AnalyzerBackend + Sized { + /// Perform an `address.<..>` function call + fn address_call( + &mut self, + func_name: String, + _input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "delegatecall" | "staticcall" | "call" => self.external_call(&func_name, loc, ctx), + "code" => { + // TODO: try to be smarter based on the address input + let bn = self.builtin_or_add(Builtin::DynamicBytes); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(node), self) + .into_expr_err(loc)?; + Ok(()) + } + "balance" => { + // TODO: try to be smarter based on the address input + let bn = self.builtin_or_add(Builtin::Uint(256)); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr(ExprRet::Single(node), self) + .into_expr_err(loc)?; + Ok(()) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find builtin address function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } + + fn external_call(&mut self, _ty: &str, loc: Loc, ctx: ContextNode) -> Result<(), ExprErr> { + // TODO: Check if we have the code for the address + // if we dont, model it as a unrestricted call that can make other calls + ctx.pop_expr_latest(loc, self).into_expr_err(loc)?; + // TODO: try to be smarter based on the address input + let booln = self.builtin_or_add(Builtin::Bool); + let bool_cvar = ContextVar::new_from_builtin(loc, booln.into(), self).into_expr_err(loc)?; + let bool_node = self.add_node(Node::ContextVar(bool_cvar)); + ctx.add_var(bool_node.into(), self).into_expr_err(loc)?; + self.add_edge(bool_node, ctx, Edge::Context(ContextEdge::Variable)); + + let bn = self.builtin_or_add(Builtin::DynamicBytes); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.push_expr( + ExprRet::Multi(vec![ExprRet::Single(bool_node), ExprRet::Single(node)]), + self, + ) + .into_expr_err(loc)?; + Ok(()) + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/array.rs b/crates/solc-expressions/src/func_call/intrinsic_call/array.rs new file mode 100644 index 00000000..5a3286f4 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/array.rs @@ -0,0 +1,357 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ + array::Array, bin_op::BinOp, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, ListAccess, +}; + +use graph::{ + elem::*, + nodes::{Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, Node, +}; +use shared::RangeArena; + +use ethers_core::types::U256; +use solang_parser::pt::{Expression, Loc}; + +impl ArrayCaller for T where T: AnalyzerBackend + Sized {} + +/// Trait for calling array-based intrinsic functions +pub trait ArrayCaller: AnalyzerBackend + Sized { + /// Perform an `array.<..>` function call + fn array_call( + &mut self, + arena: &mut RangeArena>, + func_name: String, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "push" => { + if input_exprs.len() == 1 { + // array.push() is valid syntax. It pushes a new + // empty element onto the expr ret stack + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(array) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "array[].push(..) was not given an element to push".to_string(), + )); + }; + + if matches!(array, ExprRet::CtxKilled(_)) { + ctx.push_expr(array, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + // get length + let arr = array.expect_single().into_expr_err(loc)?; + let arr = ContextVarNode::from(arr).latest_version(analyzer); + + // get length + let len = analyzer + .get_length(arena, ctx, loc, arr, true)? + .unwrap() + .latest_version(analyzer); + + // get the index access and add it to the stack + let _ = analyzer + .index_into_array_raw(arena, ctx, loc, len, arr, false, false)?; + + // create a temporary 1 variable + let cnode = + analyzer.add_node(Node::Concrete(Concrete::from(U256::from(1)))); + let tmp_one = Node::ContextVar( + ContextVar::new_from_concrete( + Loc::Implicit, + ctx, + cnode.into(), + analyzer, + ) + .into_expr_err(loc)?, + ); + let one = ContextVarNode::from(analyzer.add_node(tmp_one)); + + // add 1 to the length + let tmp_len = + analyzer.op(arena, loc, len, one, ctx, RangeOp::Add(false), false)?; + + let tmp_len = ContextVarNode::from(tmp_len.expect_single().unwrap()); + tmp_len.underlying_mut(analyzer).unwrap().is_tmp = false; + + analyzer.set_var_as_length( + arena, + ctx, + loc, + tmp_len, + arr.latest_version(analyzer), + )?; + + Ok(()) + }) + } else if input_exprs.len() == 2 { + // array.push(value) + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(array) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "array[].push(..) was not an array to push to".to_string(), + )); + }; + if matches!(array, ExprRet::CtxKilled(_)) { + ctx.push_expr(array, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.parse_ctx_expr( + arena, + &input_exprs.unnamed_args().unwrap()[1], + ctx, + )?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(new_elem) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "array[].push(..) was not given an element to push".to_string(), + )); + }; + + if matches!(new_elem, ExprRet::CtxKilled(_)) { + ctx.push_expr(new_elem, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let pushed_value = + ContextVarNode::from(new_elem.expect_single().unwrap()); + + // get length + let arr = array.expect_single().into_expr_err(loc)?; + let arr = ContextVarNode::from(arr).latest_version(analyzer); + + // get length + let len = analyzer + .get_length(arena, ctx, loc, arr, true)? + .unwrap() + .latest_version(analyzer); + + // get the index access for the *previous* length + let index_access = analyzer + .index_into_array_raw(arena, ctx, loc, len, arr, false, true)? + .unwrap(); + // create a temporary 1 variable + let cnode = + analyzer.add_node(Node::Concrete(Concrete::from(U256::from(1)))); + let tmp_one = Node::ContextVar( + ContextVar::new_from_concrete( + Loc::Implicit, + ctx, + cnode.into(), + analyzer, + ) + .into_expr_err(loc)?, + ); + let one = ContextVarNode::from(analyzer.add_node(tmp_one)); + + // add 1 to the length + let tmp_len = analyzer.op( + arena, + loc, + len, + one, + ctx, + RangeOp::Add(false), + false, + )?; + + let tmp_len = ContextVarNode::from(tmp_len.expect_single().unwrap()); + tmp_len.underlying_mut(analyzer).unwrap().is_tmp = false; + + // set the new length + analyzer.set_var_as_length( + arena, + ctx, + loc, + tmp_len, + arr.latest_version(analyzer), + )?; + + // update the index access's range + let elem = Elem::from(pushed_value); + index_access + .set_range_min(analyzer, arena, elem.clone()) + .into_expr_err(loc)?; + index_access + .set_range_max(analyzer, arena, elem.clone()) + .into_expr_err(loc)?; + + // update the array using the index access + analyzer.update_array_from_index_access( + arena, + ctx, + loc, + len, + index_access.latest_version(analyzer), + arr.latest_version(analyzer), + ) + }) + }) + } else { + return Err(ExprErr::InvalidFunctionInput( + loc, + format!( + "array[].push(..) expected 0 or 1 inputs, got: {}", + input_exprs.len() + ), + )); + } + } + "pop" => { + if input_exprs.len() != 1 { + return Err(ExprErr::InvalidFunctionInput( + loc, + format!( + "array[].pop() expected 0 inputs, got: {}", + input_exprs.len() + ), + )); + } + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(array) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "array[].pop() was not given an array".to_string(), + )); + }; + + if matches!(array, ExprRet::CtxKilled(_)) { + ctx.push_expr(array, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + // get length + let arr = array.expect_single().into_expr_err(loc)?; + let arr = ContextVarNode::from(arr).latest_version(analyzer); + + // get length + let len = analyzer + .get_length(arena, ctx, loc, arr, true)? + .unwrap() + .latest_version(analyzer); + + // create a temporary 1 variable + let cnode = analyzer.add_node(Node::Concrete(Concrete::from(U256::from(1)))); + let tmp_one = Node::ContextVar( + ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode.into(), analyzer) + .into_expr_err(loc)?, + ); + let one = ContextVarNode::from(analyzer.add_node(tmp_one)); + + // subtract 1 from the length + let tmp_len = + analyzer.op(arena, loc, len, one, ctx, RangeOp::Sub(false), false)?; + + let tmp_len = ContextVarNode::from(tmp_len.expect_single().unwrap()); + tmp_len.underlying_mut(analyzer).unwrap().is_tmp = false; + + // get the index access + let index_access = analyzer + .index_into_array_raw(arena, ctx, loc, tmp_len, arr, false, true)? + .unwrap(); + + analyzer.set_var_as_length( + arena, + ctx, + loc, + tmp_len, + arr.latest_version(analyzer), + )?; + index_access + .set_range_min(analyzer, arena, Elem::Null) + .into_expr_err(loc)?; + index_access + .set_range_max(analyzer, arena, Elem::Null) + .into_expr_err(loc)?; + + analyzer.update_array_from_index_access( + arena, + ctx, + loc, + tmp_len, + index_access.latest_version(analyzer), + arr.latest_version(analyzer), + ) + // let Some(array) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + // return Err(ExprErr::NoLhs( + // loc, + // "array[].pop() was not an array to pop from".to_string(), + // )); + // }; + // if matches!(array, ExprRet::CtxKilled(_)) { + // ctx.push_expr(array, analyzer).into_expr_err(loc)?; + // return Ok(()); + // } + + // let arr = array.expect_single().into_expr_err(loc)?; + // let arr = ContextVarNode::from(arr).latest_version(analyzer); + // // get length + // let len = analyzer.get_length(ctx, loc, arr, true)?.unwrap().latest_version(analyzer); + + // // Subtract one from it + // let cnode = analyzer.add_node(Node::Concrete(Concrete::from(U256::from(1)))); + // let tmp_one = Node::ContextVar( + // ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode.into(), analyzer) + // .into_expr_err(loc)?, + // ); + // let one = ContextVarNode::from(analyzer.add_node(tmp_one.clone())); + // let new_len_expr = analyzer.op( + // loc, + // len, + // one, + // ctx, + // RangeOp::Sub(false), + // false, + // )?; + + // if matches!(new_len_expr, ExprRet::CtxKilled(_)) { + // ctx.push_expr(new_len_expr, analyzer).into_expr_err(loc)?; + // return Ok(()); + // } + + // // connect the new length + // let new_len = ContextVarNode::from(new_len_expr.expect_single().unwrap()).latest_version(analyzer); + // let next_arr = analyzer.advance_var_in_ctx(arr.latest_version(analyzer), loc, ctx)?; + // analyzer.add_edge(new_len.latest_version(analyzer), next_arr, Edge::Context(ContextEdge::AttrAccess("length"))); + + // let min = Elem::from(arr).set_indices(RangeDyn::new_for_indices(vec![(new_len.into(), Elem::Null)], loc)); //.set_length(new_len.into()); + // let max = Elem::from(arr).set_indices(RangeDyn::new_for_indices(vec![(new_len.into(), Elem::Null)], loc)); //.set_length(new_len.into()); + + // let cnode = analyzer.add_node(Node::Concrete(Concrete::from(U256::zero()))); + // let tmp_zero = Node::ContextVar( + // ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode.into(), analyzer) + // .into_expr_err(loc)?, + // ); + // let zero = ContextVarNode::from(analyzer.add_node(tmp_one)); + // analyzer.add_edge(zero, next_arr.latest_version(analyzer), Edge::Context(ContextEdge::StorageWrite)); + // next_arr + // .set_range_min(analyzer, min) + // .into_expr_err(loc)?; + // next_arr + // .set_range_max(analyzer, max) + // .into_expr_err(loc) + }) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find builtin array function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/block.rs b/crates/solc-expressions/src/func_call/intrinsic_call/block.rs new file mode 100644 index 00000000..36755007 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/block.rs @@ -0,0 +1,61 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ContextBuilder, ExprErr, IntoExprErr}; + +use graph::{ + elem::Elem, + nodes::{Builtin, Concrete, ContextNode, ContextVar, ExprRet}, + AnalyzerBackend, Node, +}; +use shared::RangeArena; + +use solang_parser::pt::{Expression, Loc}; + +impl BlockCaller for T where T: AnalyzerBackend + Sized {} + +/// Trait for calling block-based intrinsic functions +pub trait BlockCaller: AnalyzerBackend + Sized { + /// Perform a `block` function call + fn block_call( + &mut self, + arena: &mut RangeArena>, + func_name: String, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "blockhash" => { + input_exprs.parse_n(arena, 1, self, ctx, loc)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "blockhash function was not provided a block number".to_string(), + )); + }; + if matches!(input, ExprRet::CtxKilled(_)) { + ctx.push_expr(input, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(Builtin::Bytes(32)).into(), + analyzer, + ) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find builtin block function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/constructors.rs b/crates/solc-expressions/src/func_call/intrinsic_call/constructors.rs new file mode 100644 index 00000000..88ce7772 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/constructors.rs @@ -0,0 +1,225 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ + assign::Assign, func_call::helper::CallerHelper, ContextBuilder, ExprErr, ExpressionParser, + IntoExprErr, +}; + +use graph::{ + elem::*, + nodes::{Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, StructNode}, + AnalyzerBackend, ContextEdge, Edge, Node, Range, VarType, +}; +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::{Expression, Loc}; + +impl ConstructorCaller for T where + T: AnalyzerBackend + Sized + CallerHelper +{ +} + +/// Trait for constructing compound types like contracts, structs and arrays +pub trait ConstructorCaller: + AnalyzerBackend + Sized + CallerHelper +{ + /// Construct an array + fn construct_array( + &mut self, + arena: &mut RangeArena>, + func_idx: NodeIdx, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + // create a new list + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(len_var) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "Array creation failed".to_string())); + }; + + if matches!(len_var, ExprRet::CtxKilled(_)) { + ctx.push_expr(len_var, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let len_cvar = len_var.expect_single().into_expr_err(loc)?; + + let ty = VarType::try_from_idx(analyzer, func_idx); + + let new_arr = ContextVar { + loc: Some(loc), + name: format!("tmp_arr{}", ctx.new_tmp(analyzer).into_expr_err(loc)?), + display_name: "arr".to_string(), + storage: None, + is_tmp: true, + is_symbolic: false, + is_return: false, + tmp_of: None, + dep_on: None, + ty: ty.expect("No type for node"), + }; + + let arr = ContextVarNode::from(analyzer.add_node(Node::ContextVar(new_arr))); + + let len_var = ContextVar { + loc: Some(loc), + name: arr.name(analyzer).into_expr_err(loc)? + ".length", + display_name: arr.display_name(analyzer).unwrap() + ".length", + storage: None, + is_tmp: true, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: ContextVarNode::from(len_cvar) + .underlying(analyzer) + .into_expr_err(loc)? + .ty + .clone(), + }; + + let len_cvar = analyzer.add_node(Node::ContextVar(len_var)); + analyzer.add_edge(arr, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(arr, analyzer).into_expr_err(loc)?; + analyzer.add_edge(len_cvar, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(len_cvar.into(), analyzer).into_expr_err(loc)?; + analyzer.add_edge( + len_cvar, + arr, + Edge::Context(ContextEdge::AttrAccess("length")), + ); + + // update the length + if let Some(r) = arr.ref_range(analyzer).into_expr_err(loc)? { + let min = r.evaled_range_min(analyzer, arena).into_expr_err(loc)?; + let max = r.evaled_range_max(analyzer, arena).into_expr_err(loc)?; + + if let Some(mut rd) = min.maybe_range_dyn() { + rd.len = Box::new(Elem::from(len_cvar)); + arr.set_range_min(analyzer, arena, Elem::ConcreteDyn(rd)) + .into_expr_err(loc)?; + } + + if let Some(mut rd) = max.maybe_range_dyn() { + rd.len = Box::new(Elem::from(len_cvar)); + arr.set_range_min(analyzer, arena, Elem::ConcreteDyn(rd)) + .into_expr_err(loc)?; + } + } + + ctx.push_expr(ExprRet::Single(arr.into()), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + } + + /// Construct a contract + fn construct_contract( + &mut self, + arena: &mut RangeArena>, + func_idx: NodeIdx, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + // construct a new contract + if !input_exprs.is_empty() { + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + } + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + if !input_exprs.is_empty() { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "Contract creation failed".to_string())); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + } + + let var = match ContextVar::maybe_from_user_ty(analyzer, loc, func_idx) { + Some(v) => v, + None => { + return Err(ExprErr::VarBadType( + loc, + format!( + "Could not create context variable from user type: {:?}", + analyzer.node(func_idx) + ), + )) + } + }; + // let idx = ret.expect_single().into_expr_err(loc)?; + let contract_cvar = ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); + // contract_cvar + // .set_range_min(analyzer, Elem::from(idx)) + // .into_expr_err(loc)?; + // contract_cvar + // .set_range_max(analyzer, Elem::from(idx)) + // .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(contract_cvar.into()), analyzer) + .into_expr_err(loc) + }) + } + + /// Construct a struct + fn construct_struct( + &mut self, + arena: &mut RangeArena>, + func_idx: NodeIdx, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + // struct construction + let strukt = StructNode::from(func_idx); + let var = ContextVar::new_from_struct(loc, strukt, ctx, self).into_expr_err(loc)?; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + + input_exprs.parse(arena, self, ctx, loc)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "Struct Function call failed".to_string(), + )); + }; + + let inputs = inputs.as_vec(); + // set struct fields + strukt + .fields(analyzer) + .iter() + .zip(inputs) + .try_for_each(|(field, input)| { + let field_cvar = ContextVar::maybe_new_from_field( + analyzer, + loc, + ContextVarNode::from(cvar) + .underlying(analyzer) + .into_expr_err(loc)?, + field.underlying(analyzer).unwrap().clone(), + ) + .expect("Invalid struct field"); + + let fc_node = analyzer.add_node(Node::ContextVar(field_cvar)); + analyzer.add_edge( + fc_node, + cvar, + Edge::Context(ContextEdge::AttrAccess("field")), + ); + analyzer.add_edge(fc_node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(fc_node.into(), analyzer).into_expr_err(loc)?; + let field_as_ret = ExprRet::Single(fc_node); + analyzer.match_assign_sides(arena, ctx, loc, &field_as_ret, &input)?; + let _ = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; + Ok(()) + })?; + + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc) + }) + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/dyn_builtin.rs b/crates/solc-expressions/src/func_call/intrinsic_call/dyn_builtin.rs new file mode 100644 index 00000000..d7f88fac --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/dyn_builtin.rs @@ -0,0 +1,265 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ + variable::Variable, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, ListAccess, +}; + +use graph::{ + elem::{Elem, RangeElem}, + nodes::{Builtin, Concrete, ContextNode, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, SolcRange, VarType, +}; +use shared::RangeArena; + +use solang_parser::pt::{Expression, Loc}; + +impl DynBuiltinCaller for T where T: AnalyzerBackend + Sized +{} + +/// Trait for calling dynamic builtin-based intrinsic functions, like `concat` +pub trait DynBuiltinCaller: AnalyzerBackend + Sized { + /// Perform a dynamic builtin type's builtin function call + fn dyn_builtin_call( + &mut self, + arena: &mut RangeArena>, + func_name: String, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "concat" => self.concat(arena, &loc, input_exprs, ctx), + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find builtin dynamic builtin function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } + + #[tracing::instrument(level = "trace", skip_all)] + /// Concatenate two dynamic builtins + fn concat( + &mut self, + arena: &mut RangeArena>, + loc: &Loc, + input_exprs: &NamedOrUnnamedArgs, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + input_exprs.unnamed_args().unwrap()[1..] + .iter() + .try_for_each(|expr| { + self.parse_ctx_expr(arena, expr, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let input = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + .unwrap_or(ExprRet::Null); + ctx.append_tmp_expr(input, analyzer).into_expr_err(loc) + }) + })?; + + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(inputs) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "Concatenation failed".to_string())); + }; + if matches!(inputs, ExprRet::CtxKilled(_)) { + ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let inputs = inputs.as_vec(); + if inputs.is_empty() { + ctx.push_expr(ExprRet::Multi(vec![]), analyzer) + .into_expr_err(loc)?; + Ok(()) + } else { + let start = &inputs[0]; + if inputs.len() > 1 { + analyzer.match_concat(arena, ctx, loc, start.clone(), &inputs[1..], false) + } else { + analyzer.match_concat(arena, ctx, loc, start.clone(), &[], false) + } + } + }) + } + + /// Match on the expression returns + fn match_concat( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + curr: ExprRet, + inputs: &[ExprRet], + has_accum_node: bool, + ) -> Result<(), ExprErr> { + if has_accum_node { + match curr.flatten() { + ExprRet::Single(var) | ExprRet::SingleLiteral(var) => { + // pop the accumulation node off the stack + let accum_node = ctx + .pop_expr_latest(loc, self) + .into_expr_err(loc)? + .unwrap() + .expect_single() + .unwrap(); + + let accum_node = self.advance_var_in_ctx(accum_node.into(), loc, ctx)?; + let name = accum_node.display_name(self).into_expr_err(loc)?; + let next_var = ContextVarNode::from(var); + let next_name = next_var.display_name(self).into_expr_err(loc)?; + accum_node + .underlying_mut(self) + .into_expr_err(loc)? + .display_name = format!("concat({name}, {next_name})"); + + // concat into it + self.concat_inner(arena, loc, accum_node, next_var)?; + + // add it back to the stack + ctx.push_expr(ExprRet::Single(accum_node.into()), self) + .into_expr_err(loc)?; + + Ok(()) + } + ExprRet::Null => Ok(()), + ExprRet::Multi(inner) => inner + .into_iter() + .try_for_each(|i| self.match_concat(arena, ctx, loc, i, inputs, true)), + ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), + } + } else { + match curr.flatten() { + ExprRet::Single(var) | ExprRet::SingleLiteral(var) => { + let acc = ContextVarNode::from(var) + .as_tmp(loc, ctx, self) + .into_expr_err(loc)?; + self.add_edge(acc.0, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(acc, self).into_expr_err(loc)?; + + ctx.push_expr(ExprRet::Single(acc.into()), self) + .into_expr_err(loc)?; + + inputs + .iter() + .map(|i| self.match_concat(arena, ctx, loc, i.clone(), inputs, true)) + .collect::, ExprErr>>()?; + + // create the length variable + let _ = self.tmp_length(arena, acc.latest_version(self), ctx, loc); + + Ok(()) + } + ExprRet::Null => Err(ExprErr::NoRhs( + loc, + "No input provided to concat function".to_string(), + )), + ExprRet::Multi(inner) => inner + .into_iter() + .try_for_each(|i| self.match_concat(arena, ctx, loc, i, inputs, false)), + ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), + } + } + } + + /// Perform the concatenation + fn concat_inner( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + accum: ContextVarNode, + right: ContextVarNode, + ) -> Result<(), ExprErr> { + match ( + accum.ty(self).into_expr_err(loc)?, + right.ty(self).into_expr_err(loc)?, + ) { + (VarType::Concrete(accum_cnode), VarType::Concrete(right_cnode)) => { + let new_ty = match ( + accum_cnode.underlying(self).into_expr_err(loc)?, + right_cnode.underlying(self).into_expr_err(loc)?, + ) { + (accum_node @ Concrete::String(..), right_node @ Concrete::String(..)) => { + let new_val = accum_node.clone().concat(right_node).unwrap(); + let new_cnode = self.add_node(Node::Concrete(new_val)); + VarType::Concrete(new_cnode.into()) + } + (accum_node @ Concrete::DynBytes(..), right_node @ Concrete::DynBytes(..)) => { + let new_val = accum_node.clone().concat(right_node).unwrap(); + let new_cnode = self.add_node(Node::Concrete(new_val)); + VarType::Concrete(new_cnode.into()) + } + (a, b) => { + // Invalid solidity + return Err(ExprErr::InvalidFunctionInput(loc, format!("Type mismatch: {a:?} for left hand side and type: {b:?} for right hand side"))); + } + }; + accum.underlying_mut(self).into_expr_err(loc)?.ty = new_ty; + Ok(()) + } + (VarType::Concrete(accum_cnode), VarType::BuiltIn(_bn, Some(r2))) => { + let underlying = accum_cnode.underlying(self).into_expr_err(loc)?; + // let val = match underlying { + // Concrete::String(val) => { + // val + // .chars() + // .enumerate() + // .map(|(i, v)| { + // let idx = Elem::from(Concrete::from(U256::from(i))); + // let mut bytes = [0x00; 32]; + // v.encode_utf8(&mut bytes[..]); + // let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + // (idx, v) + // }) + // .collect::>() + // } + // Concrete::DynBytes(val) => { + // val + // .iter() + // .enumerate() + // .map(|(i, v)| { + // let idx = Elem::from(Concrete::from(U256::from(i))); + // let mut bytes = [0x00; 32]; + // bytes[0] = *v; + // let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); + // (idx, v) + // }) + // .collect::>() + // } + // b => return Err(ExprErr::InvalidFunctionInput(loc, format!("Type mismatch: expected String or Bytes for concat input but found: {b:?}"))) + // }; + // TODO: Extend with bn + + let range = SolcRange::from(underlying.clone()).unwrap(); + let min = range.min.clone().concat(r2.min.clone()); + let max = range.max.clone().concat(r2.max.clone()); + accum.set_range_min(self, arena, min).into_expr_err(loc)?; + accum.set_range_max(self, arena, max).into_expr_err(loc)?; + + let new_ty = + VarType::BuiltIn(self.builtin_or_add(Builtin::String).into(), Some(range)); + accum.underlying_mut(self).into_expr_err(loc)?.ty = new_ty; + Ok(()) + } + (VarType::BuiltIn(_bn, Some(r)), VarType::BuiltIn(_bn2, Some(r2))) => { + let min = r + .min + .clone() + .concat(r2.min.clone()) + .simplify_minimize(self, arena) + .into_expr_err(loc)?; + let max = r + .max + .clone() + .concat(r2.max.clone()) + .simplify_maximize(self, arena) + .into_expr_err(loc)?; + accum.set_range_min(self, arena, min).into_expr_err(loc)?; + accum.set_range_max(self, arena, max).into_expr_err(loc)?; + Ok(()) + } + (_, _) => Ok(()), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/intrinsic_caller.rs b/crates/solc-expressions/src/func_call/intrinsic_call/intrinsic_caller.rs new file mode 100644 index 00000000..c3c21974 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/intrinsic_caller.rs @@ -0,0 +1,320 @@ +use crate::context_builder::ExpressionParser; +use crate::func_call::func_caller::FuncCaller; +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ + func_call::helper::CallerHelper, + intrinsic_call::{ + AbiCaller, AddressCaller, ArrayCaller, BlockCaller, ConstructorCaller, DynBuiltinCaller, + MsgCaller, PrecompileCaller, SolidityCaller, TypesCaller, + }, + ContextBuilder, ExprErr, IntoExprErr, +}; +use graph::nodes::ContextVar; +use graph::nodes::ContextVarNode; +use graph::nodes::ContractNode; + +use graph::{ + elem::Elem, + nodes::{Builtin, Concrete, ContextNode, ExprRet}, + AnalyzerBackend, Node, +}; +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::{Expression, Loc}; + +/// Supertrait of individual types of calls like abi, address, etc. +pub trait CallerParts: + AbiCaller + + AddressCaller + + ArrayCaller + + BlockCaller + + DynBuiltinCaller + + PrecompileCaller + + SolidityCaller + + TypesCaller + + ConstructorCaller + + MsgCaller + + CallerHelper +{ +} + +impl CallerParts for T where + T: AbiCaller + + AddressCaller + + ArrayCaller + + BlockCaller + + DynBuiltinCaller + + PrecompileCaller + + SolidityCaller + + TypesCaller + + ConstructorCaller + + MsgCaller + + CallerHelper +{ +} + +impl IntrinsicFuncCaller for T where + T: AnalyzerBackend + Sized + CallerParts +{ +} + +/// Perform calls to intrinsic functions like `abi.encode`, `array.push`, `require`, and constructors etc. +pub trait IntrinsicFuncCaller: + AnalyzerBackend + Sized + CallerParts +{ + fn new_call( + &mut self, + arena: &mut RangeArena>, + loc: &Loc, + ty_expr: &Expression, + inputs: &[Expression], + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, ty_expr, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ty) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "No type given for call to `new`".to_string(), + )); + }; + let ty_idx = ty.expect_single().into_expr_err(loc)?; + match analyzer.node(ty_idx) { + Node::Builtin(Builtin::Array(_)) | Node::Builtin(Builtin::DynamicBytes) => { + // construct a new list + analyzer.construct_array(arena,ty_idx, &NamedOrUnnamedArgs::Unnamed(inputs), loc, ctx) + } + Node::Contract(_c) => { + let cnode = ContractNode::from(ty_idx); + if let Some(constructor) = cnode.constructor(analyzer) { + let params = constructor.params(analyzer); + if params.is_empty() { + // call the constructor + let inputs = ExprRet::Multi(vec![]); + analyzer.func_call( + arena, + ctx, + loc, + &inputs, + constructor, + None, + None, + )?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let var = match ContextVar::maybe_from_user_ty(analyzer, loc, ty_idx) { + Some(v) => v, + None => { + return Err(ExprErr::VarBadType( + loc, + format!( + "Could not create context variable from user type: {:?}", + analyzer.node(ty_idx) + ), + )) + } + }; + let contract_cvar = + ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); + ctx.push_expr(ExprRet::Single(contract_cvar.into()), analyzer) + .into_expr_err(loc) + }) + } else { + analyzer.parse_inputs(arena,ctx, loc, inputs)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(input_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "No inputs for constructor and expected some".to_string(), + )); + }; + // call the constructor + analyzer.func_call( + arena, + ctx, + loc, + &input_paths, + constructor, + None, + None, + )?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let var = match ContextVar::maybe_from_user_ty(analyzer, loc, ty_idx) { + Some(v) => v, + None => { + return Err(ExprErr::VarBadType( + loc, + format!( + "Could not create context variable from user type: {:?}", + analyzer.node(ty_idx) + ), + )) + } + }; + let contract_cvar = + ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); + ctx.push_expr(ExprRet::Single(contract_cvar.into()), analyzer) + .into_expr_err(loc) + }) + }) + } + } else { + let var = match ContextVar::maybe_from_user_ty(analyzer, loc, ty_idx) { + Some(v) => v, + None => { + return Err(ExprErr::VarBadType( + loc, + format!( + "Could not create context variable from user type: {:?}", + analyzer.node(ty_idx) + ), + )) + } + }; + let contract_cvar = + ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); + ctx.push_expr(ExprRet::Single(contract_cvar.into()), analyzer) + .into_expr_err(loc) + } + } + e => Err(ExprErr::ParseError(loc, format!("Tried to construct a new element of a type ({e:?}) that doesn't support the `new` keyword"))) + } + }) + } + + /// Calls an intrinsic/builtin function call (casts, require, etc.) + #[tracing::instrument(level = "trace", skip_all)] + fn intrinsic_func_call( + &mut self, + arena: &mut RangeArena>, + loc: &Loc, + input_exprs: &NamedOrUnnamedArgs, + func_idx: NodeIdx, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match self.node(func_idx) { + Node::Function(underlying) => { + if let Some(func_name) = &underlying.name { + match &*func_name.name { + // abi + _ if func_name.name.starts_with("abi.") => { + self.abi_call(arena, func_name.name.clone(), input_exprs, *loc, ctx) + } + // address + "delegatecall" | "staticcall" | "call" | "code" | "balance" => { + self.address_call(func_name.name.clone(), input_exprs, *loc, ctx) + } + // array + "push" | "pop" => { + self.array_call(arena, func_name.name.clone(), input_exprs, *loc, ctx) + } + // block + "blockhash" => { + self.block_call(arena, func_name.name.clone(), input_exprs, *loc, ctx) + } + // dynamic sized builtins + "concat" => self.dyn_builtin_call( + arena, + func_name.name.clone(), + input_exprs, + *loc, + ctx, + ), + // msg + "gasleft" => self.msg_call(func_name.name.clone(), input_exprs, *loc, ctx), + // precompiles + "sha256" | "ripemd160" | "ecrecover" => self.precompile_call( + arena, + func_name.name.clone(), + func_idx, + input_exprs, + *loc, + ctx, + ), + // solidity + "keccak256" | "addmod" | "mulmod" | "require" | "assert" => self + .solidity_call(arena, func_name.name.clone(), input_exprs, *loc, ctx), + // typing + "type" | "wrap" | "unwrap" => self.types_call( + arena, + func_name.name.clone(), + func_idx, + input_exprs, + *loc, + ctx, + ), + e => Err(ExprErr::Todo( + *loc, + format!("builtin function: {e:?} doesn't exist or isn't implemented"), + )), + } + } else { + panic!("unnamed builtin?") + } + } + Node::Builtin(Builtin::Array(_)) => { + // construct a new array + self.construct_array(arena, func_idx, input_exprs, *loc, ctx) + } + Node::Contract(_) => { + // construct a new contract + self.construct_contract(arena, func_idx, input_exprs, *loc, ctx) + } + Node::Struct(_) => { + // construct a struct + self.construct_struct(arena, func_idx, input_exprs, *loc, ctx) + } + Node::Builtin(ty) => { + // cast to type + self.cast(arena, ty.clone(), func_idx, input_exprs, *loc, ctx) + } + Node::ContextVar(_c) => { + // its a user type, just push it onto the stack + ctx.push_expr(ExprRet::Single(func_idx), self) + .into_expr_err(*loc)?; + Ok(()) + } + Node::Unresolved(_) => { + // Try to give a nice error + input_exprs.parse(arena, self, ctx, *loc)?; + + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "Function call failed".to_string())) + }; + + if matches!(inputs, ExprRet::CtxKilled(_)) { + ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let visible_funcs = ctx.visible_funcs(analyzer).into_expr_err(loc)? + .iter() + .map(|func| func.name(analyzer).unwrap()) + .collect::>(); + + if let Node::Unresolved(ident) = analyzer.node(func_idx) { + Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find function: \"{}{}\", context: {}, visible functions: {:#?}", + ident.name, + inputs.try_as_func_input_str(analyzer, arena), + ctx.path(analyzer), + visible_funcs + ) + )) + } else { + unreachable!() + } + }) + } + e => Err(ExprErr::FunctionNotFound( + *loc, + format!("Unhandled function call type: {e:?}"), + )), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/mod.rs b/crates/solc-expressions/src/func_call/intrinsic_call/mod.rs new file mode 100644 index 00000000..9a33e6da --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/mod.rs @@ -0,0 +1,24 @@ +//! Traits & blanket implementations that facilitate performing intrinsic function calls. +mod abi; +mod address; +mod array; +mod block; +mod constructors; +mod dyn_builtin; +mod intrinsic_caller; +mod msg; +mod precompile; +mod solidity; +mod types; + +pub use abi::*; +pub use address::*; +pub use array::*; +pub use block::*; +pub use constructors::*; +pub use dyn_builtin::*; +pub use intrinsic_caller::*; +pub use msg::*; +pub use precompile::*; +pub use solidity::*; +pub use types::*; diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/msg.rs b/crates/solc-expressions/src/func_call/intrinsic_call/msg.rs new file mode 100644 index 00000000..26b1da9d --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/msg.rs @@ -0,0 +1,45 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ExprErr, IntoExprErr}; + +use graph::{ + nodes::{Builtin, ContextNode, ContextVar, ExprRet}, + AnalyzerBackend, Node, +}; + +use solang_parser::pt::{Expression, Loc}; + +impl MsgCaller for T where T: AnalyzerBackend + Sized {} + +/// Trait for calling msg-based intrinsic functions, like `gasleft` +pub trait MsgCaller: AnalyzerBackend + Sized { + /// Perform a msg's builtin function call, like `gasleft()` + fn msg_call( + &mut self, + func_name: String, + _input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "gasleft" => { + let var = ContextVar::new_from_builtin( + loc, + self.builtin_or_add(Builtin::Uint(64)).into(), + self, + ) + .into_expr_err(loc)?; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), self) + .into_expr_err(loc)?; + Ok(()) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find builtin msg function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/precompile.rs b/crates/solc-expressions/src/func_call/intrinsic_call/precompile.rs new file mode 100644 index 00000000..7dcd2249 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/precompile.rs @@ -0,0 +1,206 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ + func_call::helper::CallerHelper, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, +}; +use graph::nodes::FunctionNode; + +use graph::{ + elem::Elem, + nodes::{Builtin, Concrete, Context, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::{Expression, Loc}; + +impl PrecompileCaller for T where + T: AnalyzerBackend + Sized + CallerHelper +{ +} + +/// Trait for calling precompile intrinsic functions, like `ecrecover` +pub trait PrecompileCaller: + AnalyzerBackend + Sized + CallerHelper +{ + /// Perform a precompile's function call, like `ecrecover` + fn precompile_call( + &mut self, + arena: &mut RangeArena>, + func_name: String, + func_idx: NodeIdx, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "sha256" => { + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "sha256 call was not given input".to_string(), + )); + }; + if matches!(input, ExprRet::CtxKilled(_)) { + ctx.push_expr(input, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(Builtin::Bytes(32)).into(), + analyzer, + ) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + } + "ripemd160" => { + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "ripemd160 was not given input".to_string(), + )); + }; + if matches!(input, ExprRet::CtxKilled(_)) { + ctx.push_expr(input, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(Builtin::Bytes(32)).into(), + analyzer, + ) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + } + "ecrecover" => { + input_exprs.parse(arena, self, ctx, loc)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let cctx = Context::new_subctx( + ctx, + None, + loc, + None, + Some(func_idx.into()), + true, + analyzer, + None, + ) + .into_expr_err(loc)?; + let call_ctx = analyzer.add_node(Node::Context(cctx)); + ctx.set_child_call(call_ctx.into(), analyzer) + .into_expr_err(loc)?; + let call_node = analyzer.add_node(Node::FunctionCall); + analyzer.add_edge(call_node, func_idx, Edge::Context(ContextEdge::Call)); + analyzer.add_edge(call_node, ctx, Edge::Context(ContextEdge::Subcontext)); + analyzer.add_edge(call_ctx, call_node, Edge::Context(ContextEdge::Subcontext)); + + let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "ecrecover did not receive inputs".to_string(), + )); + }; + + let input = if let Some(ordered_param_names) = + FunctionNode::from(func_idx).maybe_ordered_param_names(analyzer) + { + input_exprs.order(input, ordered_param_names) + } else { + input + }; + + if matches!(input, ExprRet::CtxKilled(_)) { + ctx.push_expr(input, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + let mut inner_vals = vec![]; + match input { + ExprRet::Single(var) | ExprRet::SingleLiteral(var) => { + inner_vals + .push(ContextVarNode::from(var).display_name(analyzer).unwrap()); + } + _ => inner_vals.push("".to_string()), + } + let inner_name = inner_vals.into_iter().collect::>().join(", "); + let mut var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(Builtin::Address).into(), + analyzer, + ) + .into_expr_err(loc)?; + var.display_name = format!("ecrecover({})", inner_name); + var.is_symbolic = true; + var.is_return = true; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), analyzer).into_expr_err(loc)?; + analyzer.add_edge(cvar, call_ctx, Edge::Context(ContextEdge::Variable)); + analyzer.add_edge(cvar, call_ctx, Edge::Context(ContextEdge::Return)); + ContextNode::from(call_ctx) + .add_return_node(loc, cvar.into(), analyzer) + .into_expr_err(loc)?; + + let rctx = Context::new_subctx( + call_ctx.into(), + Some(ctx), + loc, + None, + None, + true, + analyzer, + None, + ) + .into_expr_err(loc)?; + let ret_ctx = analyzer.add_node(Node::Context(rctx)); + ContextNode::from(call_ctx) + .set_child_call(ret_ctx.into(), analyzer) + .into_expr_err(loc)?; + + // the return is a continuation of the ctx not the ecrecover ctx + ContextNode::from(ret_ctx) + .set_continuation_ctx(analyzer, ctx, "ecrecover") + .into_expr_err(loc)?; + + let tmp_ret = ContextVarNode::from(cvar) + .as_tmp( + ContextNode::from(call_ctx) + .underlying(analyzer) + .unwrap() + .loc, + ret_ctx.into(), + analyzer, + ) + .unwrap(); + tmp_ret.underlying_mut(analyzer).unwrap().is_return = true; + tmp_ret.underlying_mut(analyzer).unwrap().display_name = + format!("ecrecover({}).return", inner_name); + ctx.add_var(tmp_ret, analyzer).into_expr_err(loc)?; + analyzer.add_edge(tmp_ret, ret_ctx, Edge::Context(ContextEdge::Variable)); + + ContextNode::from(ret_ctx) + .push_expr(ExprRet::Single(tmp_ret.into()), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find precompile function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/solidity.rs b/crates/solc-expressions/src/func_call/intrinsic_call/solidity.rs new file mode 100644 index 00000000..056e3ca0 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/solidity.rs @@ -0,0 +1,130 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::{ + func_call::helper::CallerHelper, require::Require, ContextBuilder, ExprErr, ExpressionParser, + IntoExprErr, +}; + +use graph::{ + elem::Elem, + nodes::{Builtin, Concrete, ConcreteNode, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, Node, +}; +use shared::RangeArena; + +use ethers_core::types::H256; +use solang_parser::pt::{Expression, Loc}; + +impl SolidityCaller for T where + T: AnalyzerBackend + Sized + CallerHelper +{ +} + +/// Trait for calling solidity's intrinsic functions, like `keccak256` +pub trait SolidityCaller: + AnalyzerBackend + Sized + CallerHelper +{ + /// Perform a solidity intrinsic function call, like `keccak256` + fn solidity_call( + &mut self, + arena: &mut RangeArena>, + func_name: String, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "keccak256" => { + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "No input into keccak256".to_string())); + }; + + let cvar = if let Ok(var) = input.expect_single() { + ContextVarNode::from(var) + } else { + return Err(ExprErr::NoRhs(loc, "No input into keccak256".to_string())); + }; + + if cvar.is_const(analyzer, arena).into_expr_err(loc)? { + let bytes = cvar + .evaled_range_min(analyzer, arena) + .unwrap() + .unwrap() + .as_bytes(analyzer, true, arena) + .unwrap(); + let mut out = [0; 32]; + keccak_hash::keccak_256(&bytes, &mut out); + + let hash = Node::Concrete(Concrete::from(H256(out))); + let hash_node = ConcreteNode::from(analyzer.add_node(hash)); + let var = ContextVar::new_from_concrete(loc, ctx, hash_node, analyzer) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + } else { + let var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(Builtin::Bytes(32)).into(), + analyzer, + ) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + } + + Ok(()) + }) + } + "addmod" => { + // TODO: actually calcuate this if possible + input_exprs.parse(arena, self, ctx, loc)?; + + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; + let var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(Builtin::Uint(256)).into(), + analyzer, + ) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + } + "mulmod" => { + // TODO: actually calcuate this if possible + input_exprs.parse(arena, self, ctx, loc)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; + let var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(Builtin::Uint(256)).into(), + analyzer, + ) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + Ok(()) + }) + } + "require" | "assert" => { + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, _loc| { + analyzer.handle_require(arena, input_exprs.unnamed_args().unwrap(), ctx) + }) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find builtin solidity function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } +} diff --git a/crates/solc-expressions/src/func_call/intrinsic_call/types.rs b/crates/solc-expressions/src/func_call/intrinsic_call/types.rs new file mode 100644 index 00000000..fbc84e29 --- /dev/null +++ b/crates/solc-expressions/src/func_call/intrinsic_call/types.rs @@ -0,0 +1,228 @@ +use crate::func_caller::NamedOrUnnamedArgs; +use crate::ListAccess; +use crate::{variable::Variable, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr}; +use graph::nodes::FunctionNode; + +use graph::{ + elem::*, + nodes::{ + BuiltInNode, Builtin, Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, TyNode, + }, + AnalyzerBackend, Node, VarType, +}; +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::{Expression, Loc}; + +impl TypesCaller for T where T: AnalyzerBackend + Sized {} + +/// Trait for calling type-based intrinsic functions, like `wrap` +pub trait TypesCaller: AnalyzerBackend + Sized { + /// Perform a type-based intrinsic function call, like `wrap` + fn types_call( + &mut self, + arena: &mut RangeArena>, + func_name: String, + func_idx: NodeIdx, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + match &*func_name { + "type" => self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx), + "wrap" => { + if input_exprs.len() != 2 { + return Err(ExprErr::InvalidFunctionInput(loc, format!("Expected a member type and an input to the wrap function, but got: {:?}", input_exprs))); + } + + input_exprs.parse(arena, self, ctx, loc)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + ".wrap(..) did not receive an input".to_string(), + )); + }; + + let input = if let Some(ordered_param_names) = + FunctionNode::from(func_idx).maybe_ordered_param_names(analyzer) + { + input_exprs.order(input, ordered_param_names) + } else { + input + }; + + input.expect_length(2).into_expr_err(loc)?; + let ret = input.as_vec(); + let wrapping_ty = ret[0].expect_single().into_expr_err(loc)?; + let var = + ContextVar::new_from_ty(loc, TyNode::from(wrapping_ty), ctx, analyzer) + .into_expr_err(loc)?; + let to_be_wrapped = ret[1].expect_single().into_expr_err(loc)?; + let cvar = ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); + let next = analyzer.advance_var_in_ctx(cvar, loc, ctx)?; + let expr = Elem::Expr(RangeExpr::new( + Elem::from(to_be_wrapped), + RangeOp::Cast, + Elem::from(cvar), + )); + next.set_range_min(analyzer, arena, expr.clone()) + .into_expr_err(loc)?; + next.set_range_max(analyzer, arena, expr) + .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(cvar.into()), analyzer) + .into_expr_err(loc) + }) + } + "unwrap" => { + input_exprs.parse(arena, self, ctx, loc)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + ".unwrap(..) did not receive an input".to_string(), + )); + }; + + let input = if let Some(ordered_param_names) = + FunctionNode::from(func_idx).maybe_ordered_param_names(analyzer) + { + input_exprs.order(input, ordered_param_names) + } else { + input + }; + + input.expect_length(2).into_expr_err(loc)?; + let ret = input.as_vec(); + let wrapping_ty = ret[0].expect_single().into_expr_err(loc)?; + let mut var = ContextVar::new_from_builtin( + loc, + BuiltInNode::from( + TyNode::from(wrapping_ty) + .underlying(analyzer) + .into_expr_err(loc)? + .ty, + ), + analyzer, + ) + .into_expr_err(loc)?; + let to_be_unwrapped = ret[1].expect_single().into_expr_err(loc)?; + var.display_name = format!( + "{}.unwrap({})", + TyNode::from(wrapping_ty) + .name(analyzer) + .into_expr_err(loc)?, + ContextVarNode::from(to_be_unwrapped) + .display_name(analyzer) + .into_expr_err(loc)? + ); + + let cvar = ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); + cvar.set_range_min(analyzer, arena, Elem::from(to_be_unwrapped)) + .into_expr_err(loc)?; + cvar.set_range_max(analyzer, arena, Elem::from(to_be_unwrapped)) + .into_expr_err(loc)?; + let next = analyzer.advance_var_in_ctx(cvar, loc, ctx)?; + let expr = Elem::Expr(RangeExpr::new( + Elem::from(to_be_unwrapped), + RangeOp::Cast, + Elem::from(cvar), + )); + next.set_range_min(analyzer, arena, expr.clone()) + .into_expr_err(loc)?; + next.set_range_max(analyzer, arena, expr) + .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(cvar.into()), analyzer) + .into_expr_err(loc) + }) + } + _ => Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not find builtin types function: \"{func_name}\", context: {}", + ctx.path(self), + ), + )), + } + } + + /// Perform a cast of a type + fn cast( + &mut self, + arena: &mut RangeArena>, + ty: Builtin, + func_idx: NodeIdx, + input_exprs: &NamedOrUnnamedArgs, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + // it is a cast + fn cast_match( + ctx: ContextNode, + loc: Loc, + analyzer: &mut impl ListAccess, + arena: &mut RangeArena>, + ty: &Builtin, + ret: ExprRet, + func_idx: NodeIdx, + ) -> Result<(), ExprErr> { + match ret { + ExprRet::CtxKilled(kind) => ctx.kill(analyzer, loc, kind).into_expr_err(loc), + ExprRet::Null => Ok(()), + ExprRet::Single(cvar) | ExprRet::SingleLiteral(cvar) => { + let cvar = ContextVarNode::from(cvar); + let new_var = cvar + .as_cast_tmp(loc, ctx, ty.clone(), analyzer) + .into_expr_err(loc)?; + + let v_ty = VarType::try_from_idx(analyzer, func_idx).expect(""); + let maybe_new_range = + cvar.cast_exprs(&v_ty, analyzer, arena).into_expr_err(loc)?; + new_var.underlying_mut(analyzer).into_expr_err(loc)?.ty = v_ty; + + if let Some((new_min, new_max)) = maybe_new_range { + new_var + .set_range_min(analyzer, arena, new_min) + .into_expr_err(loc)?; + new_var + .set_range_max(analyzer, arena, new_max) + .into_expr_err(loc)?; + } + + if cvar.is_indexable(analyzer).into_expr_err(loc)? { + // input is indexable. get the length attribute, create a new length for the casted type + let _ = analyzer.create_length( + arena, + ctx, + loc, + new_var, + new_var.latest_version(analyzer), + false, + )?; + } + + ctx.push_expr(ExprRet::Single(new_var.into()), analyzer) + .into_expr_err(loc)?; + Ok(()) + } + ExprRet::Multi(inner) => inner + .into_iter() + .try_for_each(|i| cast_match(ctx, loc, analyzer, arena, ty, i, func_idx)), + } + } + + self.parse_ctx_expr(arena, &input_exprs.unnamed_args().unwrap()[0], ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "Cast had no target type".to_string())); + }; + + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + cast_match(ctx, loc, analyzer, arena, &ty, ret, func_idx) + }) + } +} diff --git a/crates/solc-expressions/src/func_call/join.rs b/crates/solc-expressions/src/func_call/join.rs new file mode 100644 index 00000000..0aba321d --- /dev/null +++ b/crates/solc-expressions/src/func_call/join.rs @@ -0,0 +1,661 @@ +use crate::context_builder::StatementParser; +use crate::member_access::ListAccess; +use crate::variable::Variable; +use crate::{helper::CallerHelper, ExprErr, IntoExprErr}; + +use graph::{ + elem::{Elem, RangeElem, RangeExpr, RangeOp}, + nodes::{ + Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, FunctionNode, + FunctionParamNode, KilledKind, + }, + AnalyzerBackend, ContextEdge, Edge, GraphBackend, Node, Range, SolcRange, VarType, +}; +use shared::{AnalyzerLike, NodeIdx, RangeArena, StorageLocation}; + +use solang_parser::pt::{Expression, Loc}; + +use std::collections::BTreeMap; + +impl FuncJoiner for T where + T: AnalyzerBackend + + Sized + + GraphBackend + + CallerHelper + + JoinStatTracker +{ +} +/// A trait for calling a function +pub trait FuncJoiner: + GraphBackend + AnalyzerBackend + Sized + JoinStatTracker +{ + #[tracing::instrument(level = "trace", skip_all)] + fn join( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + func: FunctionNode, + params: &[FunctionParamNode], + func_inputs: &[ContextVarNode], + seen: &mut Vec, + ) -> Result { + tracing::trace!( + "Trying to join function: {}", + func.name(self).into_expr_err(loc)? + ); + // ensure no modifiers (for now) + // if pure function: + // grab requirements for context + // grab return node's simplified range + // replace fundamentals with function inputs + // update ctx name in place + + if func.is_pure(self).into_expr_err(loc)? { + // pure functions are guaranteed to not require the use of state, so + // the only things we care about are function inputs and function outputs + if let Some(body_ctx) = func.maybe_body_ctx(self) { + if body_ctx + .underlying(self) + .into_expr_err(loc)? + .child + .is_some() + { + tracing::trace!("Joining function: {}", func.name(self).into_expr_err(loc)?); + let edges = body_ctx.successful_edges(self).into_expr_err(loc)?; + match edges.len() { + 0 => {} + 1 => { + self.join_pure( + arena, + loc, + func, + params, + func_inputs, + body_ctx, + edges[0], + ctx, + false, + )?; + return Ok(true); + } + 2.. => { + tracing::trace!( + "Branching pure join function: {}", + func.name(self).into_expr_err(loc)? + ); + // self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let new_forks = ctx.set_join_forks(loc, edges.clone(), self).unwrap(); + edges.into_iter().zip(new_forks.iter()).try_for_each( + |(edge, new_fork)| { + let res = self.join_pure( + arena, + loc, + func, + params, + func_inputs, + body_ctx, + edge, + *new_fork, + true, + )?; + if !res { + new_fork + .kill(self, loc, KilledKind::Unreachable) + .into_expr_err(loc)?; + Ok(()) + } else { + Ok(()) + } + }, + )?; + return Ok(true); + } + } + } else { + tracing::trace!( + "Childless pure join: {}", + func.name(self).into_expr_err(loc)? + ); + self.join_pure( + arena, + loc, + func, + params, + func_inputs, + body_ctx, + body_ctx, + ctx, + false, + )?; + return Ok(true); + } + } else { + tracing::trace!("Pure function not processed"); + if ctx.associated_fn(self) == Ok(func) { + return Ok(false); + } + + if seen.contains(&func) { + return Ok(false); + } + + self.handled_funcs_mut().push(func); + if let Some(body) = &func.underlying(self).unwrap().body.clone() { + self.parse_ctx_statement(arena, body, false, Some(func)); + } + + seen.push(func); + return self.join(arena, ctx, loc, func, params, func_inputs, seen); + } + } else if func.is_view(self).into_expr_err(loc)? { + if let Some(body_ctx) = func.maybe_body_ctx(self) { + if body_ctx + .underlying(self) + .into_expr_err(loc)? + .child + .is_some() + { + let edges = body_ctx.successful_edges(self).into_expr_err(loc)?; + if edges.len() == 1 { + tracing::trace!( + "View join function: {}", + func.name(self).into_expr_err(loc)? + ); + self.add_completed_view(false, false, false, body_ctx); + } else { + tracing::trace!( + "Branching view join function: {}", + func.name(self).into_expr_err(loc)? + ); + self.add_completed_view(false, false, true, body_ctx); + } + } else { + tracing::trace!( + "Childless view join function: {}", + func.name(self).into_expr_err(loc)? + ); + self.add_completed_view(false, true, false, body_ctx); + } + } else { + tracing::trace!("View function not processed"); + } + } else if let Some(body_ctx) = func.maybe_body_ctx(self) { + if body_ctx + .underlying(self) + .into_expr_err(loc)? + .child + .is_some() + { + let edges = body_ctx.successful_edges(self).into_expr_err(loc)?; + if edges.len() == 1 { + tracing::trace!("Mut join function: {}", func.name(self).into_expr_err(loc)?); + self.add_completed_mut(false, false, false, body_ctx); + } else { + tracing::trace!( + "Branching mut join function: {}", + func.name(self).into_expr_err(loc)? + ); + self.add_completed_mut(false, false, true, body_ctx); + } + } else { + tracing::trace!( + "Childless mut join function: {}", + func.name(self).into_expr_err(loc)? + ); + self.add_completed_mut(false, true, false, body_ctx); + } + } else { + tracing::trace!("Mut function not processed"); + } + + Ok(false) + } + + fn join_pure( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + func: FunctionNode, + params: &[FunctionParamNode], + func_inputs: &[ContextVarNode], + body_ctx: ContextNode, + resulting_edge: ContextNode, + target_ctx: ContextNode, + forks: bool, + ) -> Result { + let replacement_map = + self.basic_inputs_replacement_map(arena, body_ctx, loc, params, func_inputs)?; + let mut rets: Vec<_> = resulting_edge + .return_nodes(self) + .into_expr_err(loc)? + .iter() + .enumerate() + .map(|(i, (_, ret_node))| { + let mut new_var = ret_node.underlying(self).unwrap().clone(); + let new_name = format!("{}.{i}", func.name(self).unwrap()); + new_var.name.clone_from(&new_name); + new_var.display_name = new_name; + if let Some(mut range) = new_var.ty.take_range() { + let mut range: SolcRange = + range.take_flattened_range(self, arena).unwrap().into(); + replacement_map.iter().for_each(|(replace, replacement)| { + range.replace_dep(*replace, replacement.0.clone(), self, arena); + }); + + range.cache_eval(self, arena).unwrap(); + // TODO: change ty here to match ret type + new_var.ty.set_range(range).unwrap(); + } + + if let Some(ref mut dep_on) = &mut new_var.dep_on { + dep_on.iter_mut().for_each(|d| { + if let Some((_, r)) = replacement_map.get(&(*d).into()) { + *d = *r + } + }); + } + + let mut new_cvar = ContextVarNode::from(self.add_node(Node::ContextVar(new_var))); + self.add_edge(new_cvar, target_ctx, Edge::Context(ContextEdge::Variable)); + target_ctx.add_var(new_cvar, self).unwrap(); + + // handle the case where the return node is a struct + if let Ok(fields) = ret_node.struct_to_fields(self) { + if !fields.is_empty() { + fields.iter().for_each(|field| { + let mut new_var = field.underlying(self).unwrap().clone(); + let new_name = format!( + "{}.{i}.{}", + func.name(self).unwrap(), + field.name(self).unwrap() + ); + new_var.name.clone_from(&new_name); + new_var.display_name = new_name; + if let Some(mut range) = new_var.ty.take_range() { + let mut range: SolcRange = + range.take_flattened_range(self, arena).unwrap().into(); + replacement_map.iter().for_each(|(replace, replacement)| { + range.replace_dep(*replace, replacement.0.clone(), self, arena); + }); + + range.cache_eval(self, arena).unwrap(); + + new_var.ty.set_range(range).unwrap(); + } + + if let Some(ref mut dep_on) = &mut new_var.dep_on { + dep_on.iter_mut().for_each(|d| { + if let Some((_, r)) = replacement_map.get(&(*d).into()) { + *d = *r + } + }); + } + let new_field = + ContextVarNode::from(self.add_node(Node::ContextVar(new_var))); + self.add_edge( + new_field, + new_cvar, + Edge::Context(ContextEdge::AttrAccess("field")), + ); + }); + } + } else { + let next_cvar = self + .advance_var_in_ctx_forcible(new_cvar, loc, target_ctx, true) + .unwrap(); + let casted = Elem::Expr(RangeExpr::new( + Elem::from(new_cvar), + RangeOp::Cast, + Elem::from(*ret_node), + )); + next_cvar + .set_range_min(self, arena, casted.clone()) + .unwrap(); + next_cvar.set_range_max(self, arena, casted).unwrap(); + + new_cvar = next_cvar; + } + + ExprRet::Single(new_cvar.latest_version(self).into()) + }) + .collect(); + + let mut unsat = false; + + resulting_edge + .ctx_deps(self) + .into_expr_err(loc)? + .iter() + .try_for_each(|dep| { + let mut new_var = dep.underlying(self)?.clone(); + if let Some(mut range) = new_var.ty.take_range() { + // let mut range: SolcRange = + // range.take_flattened_range(self).unwrap().into(); + let mut range: SolcRange = + range.flattened_range(self, arena)?.into_owned().into(); + replacement_map.iter().for_each(|(replace, replacement)| { + range.replace_dep(*replace, replacement.0.clone(), self, arena); + }); + + range.cache_eval(self, arena)?; + new_var.ty.set_range(range)?; + } + + if let Some(ref mut dep_on) = &mut new_var.dep_on { + dep_on.iter_mut().for_each(|d| { + if let Some((_, r)) = replacement_map.get(&(*d).into()) { + *d = *r + } + }); + } + let new_cvar = ContextVarNode::from(self.add_node(Node::ContextVar(new_var))); + + if new_cvar.is_const(self, arena)? + && new_cvar.evaled_range_min(self, arena)? + == Some(Elem::from(Concrete::from(false))) + { + unsat = true; + } + self.add_edge(new_cvar, target_ctx, Edge::Context(ContextEdge::Variable)); + target_ctx.add_var(new_cvar, self)?; + target_ctx.add_ctx_dep(new_cvar, self, arena) + }) + .into_expr_err(loc)?; + + if unsat { + return Ok(false); + } + + #[allow(clippy::unnecessary_to_owned)] + func.returns(arena, self).into_iter().for_each(|ret| { + if let Some(var) = + ContextVar::maybe_new_from_func_ret(self, ret.underlying(self).unwrap().clone()) + { + let cvar = self.add_node(Node::ContextVar(var)); + target_ctx.add_var(cvar.into(), self).unwrap(); + self.add_edge(cvar, target_ctx, Edge::Context(ContextEdge::Variable)); + rets.push(ExprRet::Single(cvar)); + } + }); + + target_ctx.underlying_mut(self).into_expr_err(loc)?.path = format!( + "{}.{}.resume{{ {} }}", + target_ctx.path(self), + resulting_edge.path(self), + target_ctx.associated_fn_name(self).unwrap() + ); + target_ctx + .push_expr(ExprRet::Multi(rets), self) + .into_expr_err(loc)?; + self.add_completed_pure(true, false, forks, resulting_edge); + Ok(true) + } + + fn basic_inputs_replacement_map( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + params: &[FunctionParamNode], + func_inputs: &[ContextVarNode], + ) -> Result, ContextVarNode)>, ExprErr> { + let inputs = ctx.input_variables(self); + let mut replacement_map: BTreeMap, ContextVarNode)> = + BTreeMap::default(); + params + .iter() + .zip(func_inputs.iter()) + .try_for_each(|(param, func_input)| { + if let Some(name) = param.maybe_name(self).into_expr_err(loc)? { + let mut new_cvar = func_input + .latest_version(self) + .underlying(self) + .into_expr_err(loc)? + .clone(); + new_cvar.loc = Some(param.loc(self).unwrap()); + // new_cvar.name = name.clone(); + // new_cvar.display_name = name.clone(); + new_cvar.is_tmp = false; + new_cvar.storage = if let Some(StorageLocation::Storage(_)) = + param.underlying(self).unwrap().storage + { + new_cvar.storage + } else { + None + }; + + let replacement = + ContextVarNode::from(self.add_node(Node::ContextVar(new_cvar))); + + self.add_edge( + replacement, + *func_input, + Edge::Context(ContextEdge::InputVariable), + ); + + if let Some(param_ty) = VarType::try_from_idx(self, param.ty(self).unwrap()) { + if !replacement.ty_eq_ty(¶m_ty, self).into_expr_err(loc)? { + replacement + .cast_from_ty(param_ty, self, arena) + .into_expr_err(loc)?; + } + } + + if let Some(_len_var) = replacement.array_to_len_var(self) { + // bring the length variable along as well + self.get_length(arena, ctx, loc, *func_input, false) + .unwrap(); + } + + if let (Some(r), Some(r2)) = + (replacement.range(self).unwrap(), param.range(self).unwrap()) + { + let new_min = r.range_min().into_owned().cast(r2.range_min().into_owned()); + let new_max = r.range_max().into_owned().cast(r2.range_max().into_owned()); + replacement + .latest_version(self) + .try_set_range_min(self, arena, new_min) + .into_expr_err(loc)?; + replacement + .latest_version(self) + .try_set_range_max(self, arena, new_max) + .into_expr_err(loc)?; + replacement + .latest_version(self) + .try_set_range_exclusions(self, r.exclusions) + .into_expr_err(loc)?; + } + + ctx.add_var(replacement, self).unwrap(); + self.add_edge(replacement, ctx, Edge::Context(ContextEdge::Variable)); + + let Some(correct_input) = inputs + .iter() + .find(|input| input.name(self).unwrap() == name) + else { + return Err(ExprErr::InvalidFunctionInput( + loc, + "Could not match input to parameter".to_string(), + )); + }; + + if let Ok(fields) = correct_input.struct_to_fields(self) { + if !fields.is_empty() { + let replacement_fields = func_input.struct_to_fields(self).unwrap(); + fields.iter().for_each(|field| { + let field_name = field.name(self).unwrap(); + let to_replace_field_name = + field_name.split('.').collect::>()[1]; + if let Some(replacement_field) = + replacement_fields.iter().find(|replacement_field| { + let name = replacement_field.name(self).unwrap(); + let replacement_name = + name.split('.').collect::>()[1]; + to_replace_field_name == replacement_name + }) + { + let mut replacement_field_as_elem = + Elem::from(*replacement_field); + replacement_field_as_elem.arenaize(self, arena).unwrap(); + if let Some(next) = field.next_version(self) { + replacement_map.insert( + next.0.into(), + (replacement_field_as_elem.clone(), *replacement_field), + ); + } + replacement_map.insert( + field.0.into(), + (replacement_field_as_elem, *replacement_field), + ); + } + }); + } + } + + let mut replacement_as_elem = Elem::from(replacement); + replacement_as_elem + .arenaize(self, arena) + .into_expr_err(loc)?; + + if let Some(next) = correct_input.next_version(self) { + replacement_map + .insert(next.0.into(), (replacement_as_elem.clone(), replacement)); + } + replacement_map + .insert(correct_input.0.into(), (replacement_as_elem, replacement)); + } + Ok(()) + })?; + Ok(replacement_map) + } +} + +impl JoinStatTracker for T where T: AnalyzerLike + GraphBackend {} + +pub trait JoinStatTracker: AnalyzerLike { + fn add_completed_pure( + &mut self, + completed: bool, + no_children: bool, + forks: bool, + target_ctx: ContextNode, + ) where + Self: Sized + GraphBackend, + { + match (no_children, forks) { + (true, _) => { + let num_vars = target_ctx.vars(self).len(); + let stats = self.join_stats_mut(); + stats.pure_no_children_joins.num_joins += 1; + if completed { + stats.pure_no_children_joins.completed_joins += 1; + } + stats.pure_no_children_joins.vars_reduced += num_vars; + } + (false, false) => { + let mut parents = target_ctx.parent_list(self).unwrap(); + parents.reverse(); + parents.push(target_ctx); + let vars_reduced = parents.iter().fold(0, |mut acc, ctx| { + acc += ctx.vars(self).len(); + acc + }); + let stats = self.join_stats_mut(); + stats.pure_children_no_forks_joins.num_joins += 1; + if completed { + stats.pure_children_no_forks_joins.completed_joins += 1; + } + stats.pure_children_no_forks_joins.vars_reduced += vars_reduced; + } + (false, true) => { + let stats = self.join_stats_mut(); + stats.pure_children_forks_joins.num_joins += 1; + if completed { + stats.pure_children_forks_joins.completed_joins += 1; + } + } + } + } + + fn add_completed_view( + &mut self, + completed: bool, + no_children: bool, + forks: bool, + target_ctx: ContextNode, + ) where + Self: Sized + GraphBackend, + { + match (no_children, forks) { + (true, _) => { + let num_vars = target_ctx.vars(self).len(); + let stats = self.join_stats_mut(); + stats.view_no_children_joins.num_joins += 1; + if completed { + stats.view_no_children_joins.completed_joins += 1; + } + stats.view_no_children_joins.vars_reduced += num_vars; + } + (false, false) => { + let mut parents = target_ctx.parent_list(self).unwrap(); + parents.reverse(); + parents.push(target_ctx); + let vars_reduced = parents.iter().fold(0, |mut acc, ctx| { + acc += ctx.vars(self).len(); + acc + }); + let stats = self.join_stats_mut(); + stats.view_children_no_forks_joins.num_joins += 1; + if completed { + stats.view_children_no_forks_joins.completed_joins += 1; + } + // parents is now: [body_ctx, ..., edges[0]] + stats.view_children_no_forks_joins.vars_reduced += vars_reduced; + } + (false, true) => { + let stats = self.join_stats_mut(); + stats.view_children_forks_joins.num_joins += 1; + if completed { + stats.view_children_forks_joins.completed_joins += 1; + } + } + } + } + + fn add_completed_mut( + &mut self, + completed: bool, + no_children: bool, + forks: bool, + target_ctx: ContextNode, + ) where + Self: Sized + GraphBackend, + { + match (no_children, forks) { + (true, _) => { + let num_vars = target_ctx.vars(self).len(); + let stats = self.join_stats_mut(); + stats.mut_no_children_joins.num_joins += 1; + if completed { + stats.mut_no_children_joins.completed_joins += 1; + } + stats.mut_no_children_joins.vars_reduced += num_vars; + } + (false, false) => { + let stats = self.join_stats_mut(); + stats.mut_children_no_forks_joins.num_joins += 1; + if completed { + stats.mut_children_no_forks_joins.completed_joins += 1; + } + } + (false, true) => { + let stats = self.join_stats_mut(); + stats.mut_children_forks_joins.num_joins += 1; + if completed { + stats.mut_children_forks_joins.completed_joins += 1; + } + } + } + } +} diff --git a/crates/solc-expressions/src/func_call/mod.rs b/crates/solc-expressions/src/func_call/mod.rs new file mode 100644 index 00000000..2d8fcd1a --- /dev/null +++ b/crates/solc-expressions/src/func_call/mod.rs @@ -0,0 +1,7 @@ +pub mod func_caller; +pub mod helper; +pub mod internal_call; +pub mod intrinsic_call; +pub mod join; +pub mod modifier; +pub mod namespaced_call; diff --git a/crates/solc-expressions/src/func_call/modifier.rs b/crates/solc-expressions/src/func_call/modifier.rs new file mode 100644 index 00000000..3b355863 --- /dev/null +++ b/crates/solc-expressions/src/func_call/modifier.rs @@ -0,0 +1,283 @@ +//! Traits & blanket implementations that facilitate performing modifier function calls. + +use crate::{ + func_caller::FuncCaller, helper::CallerHelper, ContextBuilder, ExprErr, ExpressionParser, + IntoExprErr, +}; + +use graph::{ + elem::Elem, + nodes::{Concrete, Context, ContextNode, ExprRet, FunctionNode, ModifierState}, + AnalyzerBackend, Edge, GraphBackend, Node, +}; +use shared::RangeArena; + +use solang_parser::pt::{CodeLocation, Expression, Loc}; + +impl ModifierCaller for T where + T: AnalyzerBackend + + Sized + + GraphBackend + + FuncCaller + + CallerHelper +{ +} +/// A trait for dealing with modifier calls +pub trait ModifierCaller: + GraphBackend + + AnalyzerBackend + + Sized + + FuncCaller + + CallerHelper +{ + /// Calls a modifier for a function + #[tracing::instrument(level = "trace", skip_all)] + fn call_modifier_for_fn( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + func_ctx: ContextNode, + func_node: FunctionNode, + mod_state: ModifierState, + ) -> Result<(), ExprErr> { + let mod_node = func_node.modifiers(self)[mod_state.num]; + tracing::trace!( + "calling modifier {} for func {}", + mod_node.name(self).into_expr_err(loc)?, + func_node.name(self).into_expr_err(loc)? + ); + + let input_exprs = func_node + .modifier_input_vars(mod_state.num, self) + .into_expr_err(loc)?; + + input_exprs + .iter() + .try_for_each(|expr| self.parse_ctx_expr(arena, expr, func_ctx))?; + self.apply_to_edges(func_ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let input_paths = if input_exprs.is_empty() { + ExprRet::Multi(vec![]) + } else { + let Some(input_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + format!("No inputs to modifier, expected: {}", input_exprs.len()), + )); + }; + + if matches!(input_paths, ExprRet::CtxKilled(_)) { + ctx.push_expr(input_paths, analyzer).into_expr_err(loc)?; + return Ok(()); + } + input_paths + }; + + analyzer.func_call( + arena, + ctx, + loc, + &input_paths, + mod_node, + None, + Some(mod_state.clone()), + ) + }) + } + + /// Resumes the parent function of a modifier + #[tracing::instrument(level = "trace", skip_all)] + fn resume_from_modifier( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + modifier_state: ModifierState, + ) -> Result<(), ExprErr> { + tracing::trace!( + "resuming from modifier: {}", + ctx.associated_fn_name(self) + .into_expr_err(modifier_state.loc)? + ); + + let mods = modifier_state.parent_fn.modifiers(self); + self.apply_to_edges( + ctx, + modifier_state.loc, + arena, + &|analyzer, arena, ctx, loc| { + if modifier_state.num + 1 < mods.len() { + // use the next modifier + let mut mstate = modifier_state.clone(); + mstate.num += 1; + + let loc = mods[mstate.num] + .underlying(analyzer) + .into_expr_err(mstate.loc)? + .loc; + + let pctx = Context::new_subctx( + ctx, + Some(modifier_state.parent_ctx), + loc, + None, + None, + false, + analyzer, + Some(modifier_state.clone()), + ) + .unwrap(); + let new_parent_subctx = + ContextNode::from(analyzer.add_node(Node::Context(pctx))); + + new_parent_subctx + .set_continuation_ctx( + analyzer, + modifier_state.parent_ctx, + "resume_from_modifier_nonfinal", + ) + .into_expr_err(loc)?; + ctx.set_child_call(new_parent_subctx, analyzer) + .into_expr_err(modifier_state.loc)?; + + analyzer.call_modifier_for_fn( + arena, + mods[mstate.num] + .underlying(analyzer) + .into_expr_err(mstate.loc)? + .loc, + new_parent_subctx, + mstate.parent_fn, + mstate, + )?; + Ok(()) + } else { + let pctx = Context::new_subctx( + ctx, + Some(modifier_state.parent_ctx), + modifier_state.loc, + None, + None, + false, + analyzer, + None, + ) + .unwrap(); + let new_parent_subctx = + ContextNode::from(analyzer.add_node(Node::Context(pctx))); + new_parent_subctx + .set_continuation_ctx( + analyzer, + modifier_state.parent_ctx, + "resume_from_modifier_final", + ) + .into_expr_err(loc)?; + ctx.set_child_call(new_parent_subctx, analyzer) + .into_expr_err(modifier_state.loc)?; + + // actually execute the parent function + analyzer.execute_call_inner( + arena, + modifier_state.loc, + ctx, + new_parent_subctx, + modifier_state.parent_fn, + &modifier_state.renamed_inputs, + None, + ) + } + }, + ) + } + + fn modifiers( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + func: FunctionNode, + ) -> Result, ExprErr> { + use std::fmt::Write; + let binding = func.underlying(self).unwrap().clone(); + let modifiers = binding.modifiers_as_base(); + if modifiers.is_empty() { + Ok(vec![]) + } else { + let res = modifiers + .iter() + .map(|modifier| { + assert_eq!(modifier.name.identifiers.len(), 1); + // construct arg string for function selector + let mut mod_name = format!("{}", modifier.name.identifiers[0]); + if let Some(args) = &modifier.args { + let args_str = args + .iter() + .map(|expr| { + let mctx = Context::new_subctx( + ctx, + None, + Loc::Implicit, + None, + None, + false, + self, + None, + ) + .into_expr_err(Loc::Implicit)?; + let callee_ctx = + ContextNode::from(self.add_node(Node::Context(mctx))); + let _res = ctx.set_child_call(callee_ctx, self); + self.parse_ctx_expr(arena, expr, callee_ctx)?; + let f: Vec = self.take_from_edge( + ctx, + expr.loc(), + arena, + &|analyzer, arena, ctx, loc| { + let ret = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + .unwrap(); + Ok(ret.try_as_func_input_str(analyzer, arena)) + }, + )?; + + ctx.delete_child(self).into_expr_err(expr.loc())?; + Ok(f.first().unwrap().clone()) + }) + .collect::, ExprErr>>()? + .join(", "); + let _ = write!(mod_name, "{args_str}"); + } else { + let _ = write!(mod_name, "()"); + } + let _ = write!(mod_name, ""); + let found: Option = ctx + .visible_modifiers(self) + .unwrap() + .iter() + .find(|modifier| modifier.name(self).unwrap() == mod_name) + .copied(); + Ok(found) + }) + .collect::>, ExprErr>>()? + .into_iter() + .flatten() + .collect::>(); + Ok(res) + } + } + + /// Sets the modifiers for a function + fn set_modifiers( + &mut self, + arena: &mut RangeArena>, + func: FunctionNode, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + let modifiers = self.modifiers(arena, ctx, func)?; + modifiers + .iter() + .enumerate() + .for_each(|(i, modifier)| self.add_edge(*modifier, func, Edge::FuncModifier(i))); + func.underlying_mut(self).unwrap().modifiers_set = true; + Ok(()) + } +} diff --git a/crates/solc-expressions/src/func_call/namespaced_call.rs b/crates/solc-expressions/src/func_call/namespaced_call.rs new file mode 100644 index 00000000..a4c989e7 --- /dev/null +++ b/crates/solc-expressions/src/func_call/namespaced_call.rs @@ -0,0 +1,555 @@ +//! Traits & blanket implementations that facilitate performing namespaced function calls. + +use crate::assign::Assign; +use crate::{ + func_call::func_caller::{FuncCaller, NamedOrUnnamedArgs}, + func_call::helper::CallerHelper, + intrinsic_call::IntrinsicFuncCaller, + member_access::MemberAccess, + ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, +}; +use graph::nodes::{Concrete, ContextVar}; +use graph::ContextEdge; +use graph::Edge; +use graph::VarType; + +use graph::{ + elem::Elem, + nodes::{ContextNode, ContextVarNode, ExprRet, FunctionNode}, + AnalyzerBackend, GraphBackend, Node, +}; + +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::{Expression, Identifier, Loc}; + +impl NameSpaceFuncCaller for T where + T: AnalyzerBackend + Sized + GraphBackend + CallerHelper +{ +} +/// A trait for performing namespaced function calls (i.e. `MyContract.myFunc(...)`) +pub trait NameSpaceFuncCaller: + AnalyzerBackend + Sized + GraphBackend + CallerHelper +{ + #[tracing::instrument(level = "trace", skip_all)] + /// Call a namedspaced function, i.e. `MyContract.myFunc(...)` + fn call_name_spaced_func( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: &Loc, + member_expr: &Expression, + ident: &Identifier, + input_exprs: NamedOrUnnamedArgs, + ) -> Result<(), ExprErr> { + use solang_parser::pt::Expression::*; + tracing::trace!("Calling name spaced function"); + if let Variable(Identifier { name, .. }) = member_expr { + if name == "abi" { + let func_name = format!("abi.{}", ident.name); + let fn_node = self + .builtin_fn_or_maybe_add(&func_name) + .unwrap_or_else(|| panic!("No builtin function with name {func_name}")); + return self.intrinsic_func_call(arena, loc, &input_exprs, fn_node, ctx); + } else if name == "super" { + if let Some(contract) = ctx.maybe_associated_contract(self).into_expr_err(*loc)? { + let supers = contract.super_contracts(self); + let possible_funcs: Vec<_> = supers + .iter() + .filter_map(|con_node| { + con_node + .linearized_functions(self) + .ok()? + .into_iter() + .find(|(func_name, _func_node)| func_name.starts_with(&ident.name)) + .map(|(_, node)| node) + }) + .collect(); + + if possible_funcs.is_empty() { + return Err(ExprErr::FunctionNotFound( + *loc, + "Could not find function in super".to_string(), + )); + } + input_exprs.parse(arena, self, ctx, *loc)?; + return self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let inputs = if let Some(inputs) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + { + inputs + } else { + ExprRet::Multi(vec![]) + }; + if possible_funcs.len() == 1 { + let mut inputs = if let Some(ordered_param_names) = + possible_funcs[0].maybe_ordered_param_names(analyzer) + { + input_exprs.order(inputs, ordered_param_names).as_vec() + } else { + inputs.as_vec() + }; + let func = possible_funcs[0]; + if func.params(analyzer).len() < inputs.len() { + inputs = inputs[1..].to_vec(); + } + let inputs = ExprRet::Multi(inputs); + if inputs.has_killed() { + return ctx + .kill(analyzer, loc, inputs.killed_kind().unwrap()) + .into_expr_err(loc); + } + analyzer.setup_fn_call( + arena, + &ident.loc, + &inputs, + func.into(), + ctx, + None, + ) + } else { + // this is the annoying case due to function overloading & type inference on number literals + let mut lits = vec![false]; + lits.extend( + input_exprs + .exprs() + .iter() + .map(|expr| { + match expr { + Negate(_, expr) => { + // negative number potentially + matches!(**expr, NumberLiteral(..) | HexLiteral(..)) + } + NumberLiteral(..) | HexLiteral(..) => true, + _ => false, + } + }) + .collect::>(), + ); + + if inputs.has_killed() { + return ctx + .kill(analyzer, loc, inputs.killed_kind().unwrap()) + .into_expr_err(loc); + } + if let Some(func) = analyzer.disambiguate_fn_call( + arena, + &ident.name, + lits, + &inputs, + &possible_funcs, + ) { + analyzer.setup_fn_call(arena, &loc, &inputs, func.into(), ctx, None) + } else { + Err(ExprErr::FunctionNotFound( + loc, + "Could not find function in super".to_string(), + )) + } + } + }); + } + } + } + + self.parse_ctx_expr(arena, member_expr, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Namespace function call had no namespace".to_string(), + )); + }; + + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + analyzer.match_namespaced_member(arena, ctx, loc, member_expr, ident, &input_exprs, ret) + }) + } + + /// Match the expression return for getting the member node + fn match_namespaced_member( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + member_expr: &Expression, + ident: &Identifier, + input_exprs: &NamedOrUnnamedArgs, + ret: ExprRet, + ) -> Result<(), ExprErr> { + match ret { + ExprRet::Single(inner) | ExprRet::SingleLiteral(inner) => self + .call_name_spaced_func_inner( + arena, + ctx, + loc, + member_expr, + ident, + input_exprs, + inner, + true, + ), + ExprRet::Multi(inner) => inner.into_iter().try_for_each(|ret| { + self.match_namespaced_member(arena, ctx, loc, member_expr, ident, input_exprs, ret) + }), + ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), + ExprRet::Null => Err(ExprErr::NoLhs( + loc, + "No function found due to null".to_string(), + )), + } + } + + #[tracing::instrument(level = "trace", skip_all)] + /// Actually perform the namespaced function call + fn call_name_spaced_func_inner( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + member_expr: &Expression, + ident: &Identifier, + input_exprs: &NamedOrUnnamedArgs, + member: NodeIdx, + member_is_lit: bool, + ) -> Result<(), ExprErr> { + use solang_parser::pt::Expression::*; + tracing::trace!( + "namespaced function call: {:?}.{:?}(..)", + ContextVarNode::from(member).display_name(self), + ident.name + ); + + let funcs = self.visible_member_funcs(ctx, loc, member)?; + // filter down all funcs to those that match + let possible_funcs = funcs + .iter() + .filter(|func| { + func.name(self) + .unwrap() + .starts_with(&format!("{}(", ident.name)) + }) + .copied() + .collect::>(); + + ctx.push_expr(ExprRet::Single(member), self) + .into_expr_err(loc)?; + + input_exprs.parse(arena, self, ctx, loc)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(mut inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Namespace function call had no inputs".to_string(), + )); + }; + + if matches!(inputs, ExprRet::CtxKilled(_)) { + ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; + return Ok(()); + } + if possible_funcs.is_empty() { + // check structs + let structs = ctx.visible_structs(analyzer).into_expr_err(loc)?; + let possible_structs = structs + .iter() + .filter(|strukt| { + let named_correctly = strukt + .name(analyzer) + .unwrap() + .starts_with(&ident.name.to_string()); + if !named_correctly { + false + } else { + // filter by params + let fields = strukt.fields(analyzer); + fields.len() == input_exprs.len() + } + }) + .copied() + .collect::>(); + + if possible_structs.len() == 1 { + let strukt = possible_structs[0]; + let var = ContextVar::new_from_struct(loc, strukt, ctx, analyzer) + .into_expr_err(loc)?; + let cvar = analyzer.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), analyzer).into_expr_err(loc)?; + analyzer.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + + strukt.fields(analyzer).iter().try_for_each(|field| { + let field_cvar = ContextVar::maybe_new_from_field( + analyzer, + loc, + ContextVarNode::from(cvar) + .underlying(analyzer) + .into_expr_err(loc)?, + field.underlying(analyzer).unwrap().clone(), + ) + .expect("Invalid struct field"); + + let fc_node = analyzer.add_node(Node::ContextVar(field_cvar)); + analyzer.add_edge(fc_node, cvar, Edge::Context(ContextEdge::AttrAccess("field"))); + analyzer.add_edge(fc_node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(fc_node.into(), analyzer).into_expr_err(loc)?; + let field_as_ret = ExprRet::Single(fc_node); + let Some(assignment) = inputs.take_one().into_expr_err(loc)? else { + return Err(ExprErr::NoRhs(loc, "Struct creation failed".to_string())); + }; + + if matches!(assignment, ExprRet::CtxKilled(_)) { + ctx.push_expr(assignment, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + analyzer.match_assign_sides(arena, ctx, loc, &field_as_ret, &assignment)?; + let _ = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; + Ok(()) + })?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, _loc| { + ctx.push_expr(ExprRet::Single(cvar), analyzer) + .into_expr_err(loc)?; + Ok(()) + })?; + return Ok(()); + } + // TODO: this is extremely ugly. + if inputs.has_killed() { + return ctx + .kill(analyzer, loc, inputs.killed_kind().unwrap()) + .into_expr_err(loc); + } + let mut inputs = inputs.as_vec(); + if let Node::ContextVar(_) = analyzer.node(member) { + inputs.insert(0, ExprRet::Single(member)) + } + if let Node::ContextVar(_) = analyzer.node(member) { + if member_is_lit { + inputs.insert(0, ExprRet::SingleLiteral(member)) + } else { + inputs.insert(0, ExprRet::Single(member)) + } + } + let inputs = ExprRet::Multi(inputs); + + let as_input_str = inputs.try_as_func_input_str(analyzer, arena); + + let lits = inputs.literals_list().into_expr_err(loc)?; + if lits.iter().any(|i| *i) { + // try to disambiguate + let ty = if let Node::ContextVar(cvar) = analyzer.node(member) { + cvar.ty.ty_idx() + } else { + member + }; + + let possible_builtins: Vec<_> = analyzer + .builtin_fn_inputs() + .iter() + .filter_map(|(func_name, (inputs, _))| { + if func_name.starts_with(&ident.name) { + if let Some(input) = inputs.first() { + let try_cast = VarType::try_from_idx(analyzer, ty)? + .implicitly_castable_to( + &VarType::try_from_idx(analyzer, input.ty)?, + analyzer, + ); + let Ok(implicitly_castable) = try_cast else { + return None; + }; + if implicitly_castable { + Some(func_name.clone()) + } else { + None + } + } else { + // generic builtin function, return it + Some(func_name.clone()) + } + } else { + None + } + }) + .collect::>(); + let possible_builtins: Vec<_> = possible_builtins + .into_iter() + .filter_map(|name| { + analyzer + .builtin_fn_or_maybe_add(&name) + .map(FunctionNode::from) + }) + .collect(); + + let maybe_func = if possible_builtins.len() == 1 { + Some(possible_builtins[0]) + } else { + analyzer.disambiguate_fn_call( + arena, + &ident.name, + lits, + &inputs, + &possible_builtins, + ) + }; + if let Some(func) = maybe_func { + let expr = &MemberAccess( + loc, + Box::new(member_expr.clone()), + Identifier { + loc: ident.loc, + name: func + .name(analyzer) + .into_expr_err(loc)? + .split('(') + .collect::>()[0] + .to_string(), + }, + ); + analyzer.parse_ctx_expr(arena, expr, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Fallback function parse failure".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let mut modifier_input_exprs = vec![member_expr.clone()]; + modifier_input_exprs.extend(input_exprs.exprs()); + analyzer.match_intrinsic_fallback( + arena, + ctx, + &loc, + &NamedOrUnnamedArgs::Unnamed(&modifier_input_exprs), + ret, + ) + }) + } else { + // analyzer.match_intrinsic_fallback(ctx, &loc, &modifier_input_exprs, ret) + Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not disambiguate builtin function, possible builtin functions: {:#?}", + possible_builtins + .iter() + .map(|i| i.name(analyzer).unwrap()) + .collect::>() + ), + )) + } + } else { + let expr = &MemberAccess( + loc, + Box::new(member_expr.clone()), + Identifier { + loc: ident.loc, + name: format!("{}{}", ident.name, as_input_str), + }, + ); + analyzer.parse_ctx_expr(arena, expr, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Fallback function parse failure".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + let mut modifier_input_exprs = vec![member_expr.clone()]; + modifier_input_exprs.extend(input_exprs.exprs()); + analyzer.match_intrinsic_fallback( + arena, + ctx, + &loc, + &NamedOrUnnamedArgs::Unnamed(&modifier_input_exprs), + ret, + ) + }) + } + } else if possible_funcs.len() == 1 { + let mut inputs = if let Some(ordered_param_names) = + possible_funcs[0].maybe_ordered_param_names(analyzer) + { + input_exprs.order(inputs, ordered_param_names).as_vec() + } else { + inputs.as_vec() + }; + let func = possible_funcs[0]; + if func.params(analyzer).len() > inputs.len() { + // Add the member back in if its a context variable + if let Node::ContextVar(_) = analyzer.node(member) { + inputs.insert(0, ExprRet::Single(member)) + } + } + let inputs = ExprRet::Multi(inputs); + if inputs.has_killed() { + return ctx + .kill(analyzer, loc, inputs.killed_kind().unwrap()) + .into_expr_err(loc); + } + + analyzer.setup_fn_call(arena, &ident.loc, &inputs, func.into(), ctx, None) + } else { + // Add the member back in if its a context variable + let mut inputs = inputs.as_vec(); + if let Node::ContextVar(_) = analyzer.node(member) { + inputs.insert(0, ExprRet::Single(member)) + } + let inputs = ExprRet::Multi(inputs); + // this is the annoying case due to function overloading & type inference on number literals + let mut lits = vec![false]; + lits.extend( + input_exprs + .exprs() + .iter() + .map(|expr| { + match expr { + Negate(_, expr) => { + // negative number potentially + matches!(**expr, NumberLiteral(..) | HexLiteral(..)) + } + NumberLiteral(..) | HexLiteral(..) => true, + _ => false, + } + }) + .collect::>(), + ); + + if inputs.has_killed() { + return ctx + .kill(analyzer, loc, inputs.killed_kind().unwrap()) + .into_expr_err(loc); + } + if let Some(func) = + analyzer.disambiguate_fn_call(arena, &ident.name, lits, &inputs, &possible_funcs) + { + analyzer.setup_fn_call(arena, &loc, &inputs, func.into(), ctx, None) + } else { + Err(ExprErr::FunctionNotFound( + loc, + format!( + "Could not disambiguate function, possible functions: {:#?}", + possible_funcs + .iter() + .map(|i| i.name(analyzer).unwrap()) + .collect::>() + ), + )) + } + } + }) + } +} diff --git a/src/context/exprs/mod.rs b/crates/solc-expressions/src/lib.rs similarity index 56% rename from src/context/exprs/mod.rs rename to crates/solc-expressions/src/lib.rs index e3d2c8eb..95fe792b 100644 --- a/src/context/exprs/mod.rs +++ b/crates/solc-expressions/src/lib.rs @@ -1,42 +1,77 @@ -use shared::analyzer::GraphError; use solang_parser::pt::Loc; mod array; +mod assign; mod bin_op; mod cmp; mod cond_op; +mod context_builder; mod env; +mod func_call; mod list; mod literal; +mod loops; mod member_access; +mod pre_post_in_decrement; mod require; mod variable; +pub mod yul; pub use array::*; +pub use assign::*; pub use bin_op::*; pub use cmp::*; pub use cond_op::*; +pub use context_builder::*; pub use env::*; +pub use func_call::*; pub use list::*; pub use literal::*; +pub use loops::*; pub use member_access::*; +pub use pre_post_in_decrement::*; pub use require::*; pub use variable::*; -pub trait ExprParser: - BinOp + Require + Variable + Literal + Array + MemberAccess + Cmp + CondOp + List + Env +/// Supertrait for parsing expressions +pub trait ExprTyParser: + BinOp + + Require + + Variable + + Literal + + Array + + MemberAccess + + Cmp + + CondOp + + List + + Env + + PrePostIncDecrement + + Assign { } -impl ExprParser for T where - T: BinOp + Require + Variable + Literal + Array + MemberAccess + Cmp + CondOp + List + Env +impl ExprTyParser for T where + T: BinOp + + Require + + Variable + + Literal + + Array + + MemberAccess + + Cmp + + CondOp + + List + + Env + + PrePostIncDecrement + + Assign { } +/// Convert some error into an expression error by attaching a code source location pub trait IntoExprErr { + /// Convert into a ExprErr fn into_expr_err(self, loc: Loc) -> Result; } -impl IntoExprErr for Result { +impl IntoExprErr for Result { fn into_expr_err(self, loc: Loc) -> Result { match self { Ok(v) => Ok(v), @@ -46,6 +81,7 @@ impl IntoExprErr for Result { } #[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] +/// An error that arose from the analyzer when interpreting expressions and statements pub enum ExprErr { ParseError(Loc, String), NoLhs(Loc, String), @@ -69,17 +105,19 @@ pub enum ExprErr { IntrinsicNamedArgs(Loc, String), InvalidFunctionInput(Loc, String), TakeFromFork(Loc, String), - GraphError(Loc, GraphError), + GraphError(Loc, graph::GraphError), Unresolved(Loc, String), } impl ExprErr { - pub fn from_graph_err(loc: Loc, graph_err: GraphError) -> Self { + /// Convert from a graph error + pub fn from_graph_err(loc: Loc, graph_err: graph::GraphError) -> Self { Self::GraphError(loc, graph_err) } } impl ExprErr { + /// Get the code source location of the error pub fn loc(&self) -> Loc { use ExprErr::*; match self { @@ -108,6 +146,7 @@ impl ExprErr { } } + /// Get the error message pub fn msg(&self) -> &str { use ExprErr::*; match self { @@ -132,20 +171,20 @@ impl ExprErr { InvalidFunctionInput(_, msg, ..) => msg, TakeFromFork(_, msg, ..) => msg, Unresolved(_, msg, ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::NodeConfusion(msg), ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::MaxStackDepthReached(msg), ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::MaxStackWidthReached(msg), ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::ChildRedefinition(msg), ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::DetachedVariable(msg), ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::VariableUpdateInOldContext(msg), ..) => { - msg - } - GraphError(_loc, shared::analyzer::GraphError::ExpectedSingle(msg), ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::StackLengthMismatch(msg), ..) => msg, - GraphError(_loc, shared::analyzer::GraphError::UnbreakableRecursion(msg), ..) => msg, + GraphError(_loc, graph::GraphError::NodeConfusion(msg), ..) => msg, + GraphError(_loc, graph::GraphError::MaxStackDepthReached(msg), ..) => msg, + GraphError(_loc, graph::GraphError::MaxStackWidthReached(msg), ..) => msg, + GraphError(_loc, graph::GraphError::ChildRedefinition(msg), ..) => msg, + GraphError(_loc, graph::GraphError::DetachedVariable(msg), ..) => msg, + GraphError(_loc, graph::GraphError::VariableUpdateInOldContext(msg), ..) => msg, + GraphError(_loc, graph::GraphError::ExpectedSingle(msg), ..) => msg, + GraphError(_loc, graph::GraphError::StackLengthMismatch(msg), ..) => msg, + GraphError(_loc, graph::GraphError::UnbreakableRecursion(msg), ..) => msg, + GraphError(_loc, graph::GraphError::UnknownVariable(msg), ..) => msg, } } + /// Get the top-level report message pub fn report_msg(&self) -> &str { use ExprErr::*; match self { @@ -170,15 +209,16 @@ impl ExprErr { IntrinsicNamedArgs(..) => "Arguments in calls to intrinsic functions cannot be named", InvalidFunctionInput(..) => "Arguments to this function call do not match required types", TakeFromFork(..) => "IR Error: Tried to take from an child context that ended up forking", - GraphError(_loc, shared::analyzer::GraphError::NodeConfusion(_), ..) => "Graph IR Error: Node type confusion. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", - GraphError(_loc, shared::analyzer::GraphError::MaxStackDepthReached(_), ..) => "Max call depth reached - either recursion or loop", - GraphError(_loc, shared::analyzer::GraphError::MaxStackWidthReached(_), ..) => "TODO: Max fork width reached - Need to widen variables and remove contexts", - GraphError(_loc, shared::analyzer::GraphError::ChildRedefinition(_), ..) => "Graph IR Error: Child redefintion. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", - GraphError(_loc, shared::analyzer::GraphError::DetachedVariable(_), ..) => "Graph IR Error: Detached Variable. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", - GraphError(_loc, shared::analyzer::GraphError::VariableUpdateInOldContext(_), ..) => "Graph IR Error: Variable update in an old context. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", - GraphError(_loc, shared::analyzer::GraphError::ExpectedSingle(_), ..) => "Graph IR Error: Expecting single expression return, got multiple. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", - GraphError(_loc, shared::analyzer::GraphError::StackLengthMismatch(_), ..) => "Graph IR Error: Expected a particular number of elements on the context stack but found a different amount. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", - GraphError(_loc, shared::analyzer::GraphError::UnbreakableRecursion(_), ..) => "Graph IR Error: Unbreakable recursion in variable range. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::NodeConfusion(_), ..) => "Graph IR Error: Node type confusion. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::MaxStackDepthReached(_), ..) => "Max call depth reached - either recursion or loop", + GraphError(_loc, graph::GraphError::MaxStackWidthReached(_), ..) => "TODO: Max fork width reached - Need to widen variables and remove contexts", + GraphError(_loc, graph::GraphError::ChildRedefinition(_), ..) => "Graph IR Error: Child redefintion. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::DetachedVariable(_), ..) => "Graph IR Error: Detached Variable. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::VariableUpdateInOldContext(_), ..) => "Graph IR Error: Variable update in an old context. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::ExpectedSingle(_), ..) => "Graph IR Error: Expecting single expression return, got multiple. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::StackLengthMismatch(_), ..) => "Graph IR Error: Expected a particular number of elements on the context stack but found a different amount. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::UnbreakableRecursion(_), ..) => "Graph IR Error: Unbreakable recursion in variable range. This is potentially a bug. Please report it at https://github.com/nascentxyz/pyrometer", + GraphError(_loc, graph::GraphError::UnknownVariable(_), ..) => "Graph IR Error: Unknown variable. This is potentially a bug, but more likely a variable name is mistyped.", } } } diff --git a/src/context/exprs/list.rs b/crates/solc-expressions/src/list.rs similarity index 57% rename from src/context/exprs/list.rs rename to crates/solc-expressions/src/list.rs index a5fae6e1..ef19a590 100644 --- a/src/context/exprs/list.rs +++ b/crates/solc-expressions/src/list.rs @@ -1,43 +1,56 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::ContextBuilder; -use crate::context::ExprErr; -use shared::{analyzer::AnalyzerLike, context::*, nodes::*, Edge, Node}; -use solang_parser::pt::Expression; +use crate::{ContextBuilder, ExprErr, ExpressionParser, IntoExprErr}; -use solang_parser::pt::{Parameter, ParameterList}; +use graph::{ + elem::Elem, + nodes::{Concrete, ContextNode, ContextVar, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, VarType, +}; +use shared::RangeArena; -use solang_parser::pt::Loc; +use solang_parser::pt::{Expression, Loc, Parameter, ParameterList}; -impl List for T where T: AnalyzerLike + Sized {} - -pub trait List: AnalyzerLike + Sized { +impl List for T where T: AnalyzerBackend + Sized {} +/// Dealing with list parsing and operations +pub trait List: AnalyzerBackend + Sized { #[tracing::instrument(level = "trace", skip_all)] - fn list(&mut self, ctx: ContextNode, loc: Loc, params: &ParameterList) -> Result<(), ExprErr> { - params - .iter() - .try_for_each(|(loc, input)| { - if let Some(input) = input { - self.parse_ctx_expr(&input.ty, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "List did not have left hand sides".to_string())); - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - ctx.append_tmp_expr(analyzer.match_ty(ctx, &loc, &ret, input)?, analyzer).into_expr_err(loc) - }) - } else { - // create a dummy var - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - ctx.append_tmp_expr(ExprRet::Null, analyzer).into_expr_err(loc) - }) - } - })?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { + fn list( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + params: &ParameterList, + ) -> Result<(), ExprErr> { + params.iter().try_for_each(|(loc, input)| { + if let Some(input) = input { + self.parse_ctx_expr(arena, &input.ty, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "List did not have left hand sides".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + ctx.append_tmp_expr(analyzer.match_ty(ctx, &loc, &ret, input)?, analyzer) + .into_expr_err(loc) + }) + } else { + // create a dummy var + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + ctx.append_tmp_expr(ExprRet::Null, analyzer) + .into_expr_err(loc) + }) + } + })?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { let Some(ret) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "List did not have left hand sides".to_string())); + return Err(ExprErr::NoLhs( + loc, + "List did not have left hand sides".to_string(), + )); }; ctx.push_expr(ret, analyzer).into_expr_err(loc) }) @@ -59,10 +72,11 @@ pub trait List: AnalyzerLike + Sized { loc: Some(*loc), name: input_name.to_string(), display_name: input_name.to_string(), - storage: input.storage.clone(), + storage: input.storage.as_ref().map(|s| s.clone().into()), is_tmp: false, is_symbolic: false, tmp_of: None, + dep_on: None, is_return: false, ty, }; @@ -84,10 +98,11 @@ pub trait List: AnalyzerLike + Sized { loc: Some(*loc), name: format!("tmp{tmp_num}"), display_name: format!("tmp{tmp_num}"), - storage: input.storage.clone(), + storage: input.storage.as_ref().map(|s| s.clone().into()), is_tmp: true, is_symbolic: false, tmp_of: None, + dep_on: None, is_return: false, ty, }; diff --git a/src/context/exprs/literal.rs b/crates/solc-expressions/src/literal.rs similarity index 83% rename from src/context/exprs/literal.rs rename to crates/solc-expressions/src/literal.rs index ec8c4b8f..fb118816 100644 --- a/src/context/exprs/literal.rs +++ b/crates/solc-expressions/src/literal.rs @@ -1,26 +1,21 @@ -use crate::context::exprs::IntoExprErr; -use crate::ExprErr; -use ethers_core::types::H256; -use ethers_core::types::I256; -use shared::context::ExprRet; -use shared::nodes::Builtin; -use shared::range::elem_ty::Elem; -use shared::{ - analyzer::AnalyzerLike, - context::*, - nodes::{Concrete, ConcreteNode}, - Edge, Node, +use crate::{ExprErr, IntoExprErr}; + +use graph::{ + elem::*, + nodes::{Builtin, Concrete, ConcreteNode, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, }; -use solang_parser::pt::HexLiteral; -use solang_parser::pt::Identifier; +use shared::RangeArena; + +use ethers_core::types::{Address, H256, I256, U256}; +use solang_parser::pt::{HexLiteral, Identifier, Loc}; -use ethers_core::types::{Address, U256}; -use solang_parser::pt::Loc; use std::str::FromStr; -impl Literal for T where T: AnalyzerLike + Sized {} +impl Literal for T where T: AnalyzerBackend + Sized {} -pub trait Literal: AnalyzerLike + Sized { +/// Dealing with literal expression and parsing them into nodes +pub trait Literal: AnalyzerBackend + Sized { fn number_literal( &mut self, ctx: ContextNode, @@ -28,6 +23,7 @@ pub trait Literal: AnalyzerLike + Sized { integer: &str, exponent: &str, negative: bool, + unit: &Option, ) -> Result<(), ExprErr> { let int = U256::from_dec_str(integer).unwrap(); let val = if !exponent.is_empty() { @@ -37,7 +33,13 @@ pub trait Literal: AnalyzerLike + Sized { int }; - let size: u16 = ((32 - (val.leading_zeros() / 8)) * 8) as u16; + let val = if let Some(unit) = unit { + val * self.unit_to_uint(unit) + } else { + val + }; + + let size: u16 = ((32 - (val.leading_zeros() / 8)) * 8).max(8) as u16; let concrete_node = if negative { let val = if val == U256::from(2).pow(255.into()) { // no need to set upper bit @@ -61,14 +63,27 @@ pub trait Literal: AnalyzerLike + Sized { Ok(()) } + fn unit_to_uint(&self, unit: &Identifier) -> U256 { + match &*unit.name { + "gwei" => U256::from(10).pow(9.into()), + "ether" => U256::from(10).pow(18.into()), + "minutes" => U256::from(60), + "hours" => U256::from(3600), + "days" => U256::from(86400), + "weeks" => U256::from(604800), + _ => U256::from(1), + } + } + fn rational_number_literal( &mut self, + arena: &mut RangeArena>, ctx: ContextNode, loc: Loc, integer: &str, fraction: &str, exponent: &str, - _unit: &Option, + unit: &Option, ) -> Result<(), ExprErr> { let int = U256::from_dec_str(integer).unwrap(); let exp = if !exponent.is_empty() { @@ -85,16 +100,21 @@ pub trait Literal: AnalyzerLike + Sized { Elem::from(Concrete::from(U256::from(1))), ); let exp_elem = Elem::from(Concrete::from(exp)); - let rational_range = (Elem::from(Concrete::from(fraction)) + let mut rational_range = (Elem::from(Concrete::from(fraction)) + int_elem * Elem::from(Concrete::from(fraction_denom))) * Elem::from(Concrete::from(U256::from(10))).pow(exp_elem); + + if let Some(unit) = unit { + rational_range = rational_range * Elem::from(Concrete::from(self.unit_to_uint(unit))) + } + let cvar = ContextVar::new_from_builtin(loc, self.builtin_or_add(Builtin::Uint(256)).into(), self) .into_expr_err(loc)?; let node = ContextVarNode::from(self.add_node(Node::ContextVar(cvar))); - node.set_range_max(self, rational_range.clone()) + node.set_range_max(self, arena, rational_range.clone()) .into_expr_err(loc)?; - node.set_range_min(self, rational_range) + node.set_range_min(self, arena, rational_range) .into_expr_err(loc)?; ctx.add_var(node, self).into_expr_err(loc)?; diff --git a/src/context/loops.rs b/crates/solc-expressions/src/loops.rs similarity index 56% rename from src/context/loops.rs rename to crates/solc-expressions/src/loops.rs index 449b4d6e..aa6f4488 100644 --- a/src/context/loops.rs +++ b/crates/solc-expressions/src/loops.rs @@ -1,19 +1,30 @@ -use crate::context::exprs::IntoExprErr; -use crate::ExprErr; -use solang_parser::pt::Loc; -use solang_parser::pt::Statement; +use crate::{variable::Variable, ContextBuilder, ExprErr, IntoExprErr, StatementParser}; +use graph::ContextEdge; +use graph::Edge; -use crate::context::ContextBuilder; -use shared::analyzer::GraphLike; -use shared::context::*; -use shared::{analyzer::AnalyzerLike, Node}; -use solang_parser::pt::Expression; +use graph::{ + elem::Elem, + nodes::{Concrete, Context, ContextNode}, + AnalyzerBackend, GraphBackend, Node, +}; +use shared::RangeArena; -impl Looper for T where T: AnalyzerLike + Sized + GraphLike {} -pub trait Looper: GraphLike + AnalyzerLike + Sized { +use solang_parser::pt::{Expression, Loc, Statement}; + +impl Looper for T where + T: AnalyzerBackend + Sized + GraphBackend +{ +} + +/// Dealing with loops +pub trait Looper: + GraphBackend + AnalyzerBackend + Sized +{ #[tracing::instrument(level = "trace", skip_all)] + /// Handles a for loop. Needs improvement fn for_loop( &mut self, + arena: &mut RangeArena>, loc: Loc, ctx: ContextNode, maybe_init: &Option>, @@ -23,26 +34,33 @@ pub trait Looper: GraphLike + AnalyzerLike ) -> Result<(), ExprErr> { // TODO: improve this if let Some(initer) = maybe_init { - self.parse_ctx_statement(initer, false, Some(ctx)); + self.parse_ctx_statement(arena, initer, false, Some(ctx)); } if let Some(body) = maybe_body { - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - analyzer.reset_vars(loc, ctx, body) + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + analyzer.reset_vars(arena, loc, ctx, body) }) } else { Ok(()) } } - fn reset_vars(&mut self, loc: Loc, ctx: ContextNode, body: &Statement) -> Result<(), ExprErr> { + /// Resets all variables referenced in the loop because we don't elegantly handle loops + fn reset_vars( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + ctx: ContextNode, + body: &Statement, + ) -> Result<(), ExprErr> { let og_ctx = ctx; - let sctx = Context::new_subctx(ctx, None, loc, None, None, false, self, None) - .into_expr_err(loc)?; + let sctx = Context::new_loop_subctx(ctx, loc, self).into_expr_err(loc)?; let subctx = ContextNode::from(self.add_node(Node::Context(sctx))); ctx.set_child_call(subctx, self).into_expr_err(loc)?; - self.parse_ctx_statement(body, false, Some(subctx)); - self.apply_to_edges(subctx, loc, &|analyzer, ctx, loc| { + self.add_edge(subctx, ctx, Edge::Context(ContextEdge::Loop)); + self.parse_ctx_statement(arena, body, false, Some(subctx)); + self.apply_to_edges(subctx, loc, arena, &|analyzer, arena, ctx, loc| { let vars = subctx.local_vars(analyzer).clone(); vars.iter().for_each(|(name, var)| { // widen to max range @@ -59,11 +77,11 @@ pub trait Looper: GraphLike + AnalyzerLike .advance_var_in_ctx(inheritor_var, loc, ctx) .unwrap(); let res = new_inheritor_var - .set_range_min(analyzer, r.min) + .set_range_min(analyzer, arena, r.min) .into_expr_err(loc); let _ = analyzer.add_if_err(res); let res = new_inheritor_var - .set_range_max(analyzer, r.max) + .set_range_max(analyzer, arena, r.max) .into_expr_err(loc); let _ = analyzer.add_if_err(res); } @@ -78,16 +96,18 @@ pub trait Looper: GraphLike + AnalyzerLike }) } + /// Handles a while-loop fn while_loop( &mut self, + arena: &mut RangeArena>, loc: Loc, ctx: ContextNode, _limiter: &Expression, body: &Statement, ) -> Result<(), ExprErr> { // TODO: improve this - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - analyzer.reset_vars(loc, ctx, body) + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + analyzer.reset_vars(arena, loc, ctx, body) }) } } diff --git a/crates/solc-expressions/src/member_access/builtin_access.rs b/crates/solc-expressions/src/member_access/builtin_access.rs new file mode 100644 index 00000000..3ea01028 --- /dev/null +++ b/crates/solc-expressions/src/member_access/builtin_access.rs @@ -0,0 +1,298 @@ +use crate::{ExprErr, IntoExprErr, LibraryAccess}; + +use graph::{ + nodes::{BuiltInNode, Builtin, Concrete, ContextNode, ContextVar, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; + +use ethers_core::types::{I256, U256}; +use solang_parser::pt::{Expression, Identifier, Loc}; + +impl BuiltinAccess for T where + T: LibraryAccess + AnalyzerBackend + Sized +{ +} + +/// Trait for performing member access on builtin types +pub trait BuiltinAccess: + LibraryAccess + AnalyzerBackend + Sized +{ + /// Perform member access on builtin types + fn builtin_member_access( + &mut self, + loc: Loc, + ctx: ContextNode, + node: BuiltInNode, + is_storage: bool, + ident: &Identifier, + ) -> Result { + tracing::trace!("Looking for builtin member function"); + if let Some(ret) = self.library_func_search(ctx, node.0.into(), ident) { + Ok(ret) + } else { + match node.underlying(self).into_expr_err(loc)?.clone() { + Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { + match &*ident.name { + "delegatecall" + | "call" + | "staticcall" + | "delegatecall(address, bytes)" + | "call(address, bytes)" + | "staticcall(address, bytes)" => { + // TODO: check if the address is known to be a certain type and the function signature is known + // and call into the function + let builtin_name = ident.name.split('(').collect::>()[0]; + let func_node = self.builtin_fn_or_maybe_add(builtin_name).unwrap(); + Ok(ExprRet::Single(func_node)) + } + "code" => { + // TODO: try to be smarter based on the address input + let bn = self.builtin_or_add(Builtin::DynamicBytes); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) + .into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(node)) + } + "codehash" => { + // TODO: try to be smarter based on the address input + let bn = self.builtin_or_add(Builtin::Bytes(32)); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) + .into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(node)) + } + "balance" => { + // TODO: try to be smarter based on the address input + let bn = self.builtin_or_add(Builtin::Uint(256)); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) + .into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(node)) + } + _ if ident.name.starts_with("send") => { + let bn = self.builtin_or_add(Builtin::Bool); + let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) + .into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(node)) + } + _ if ident.name.starts_with("transfer") => Ok(ExprRet::Multi(vec![])), + _ => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on address: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + } + } + Builtin::Bool => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on bool: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + Builtin::String => match ident.name.split('(').collect::>()[0] { + "concat" => { + let fn_node = self.builtin_fn_or_maybe_add("concat").unwrap(); + Ok(ExprRet::Single(fn_node)) + } + _ => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on string: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + }, + Builtin::Bytes(size) => Err(ExprErr::MemberAccessNotFound( + loc, + format!("Unknown member access on bytes{}: {:?}", size, ident.name), + )), + Builtin::Rational => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on rational: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + Builtin::DynamicBytes => match ident.name.split('(').collect::>()[0] { + "concat" => { + let fn_node = self.builtin_fn_or_maybe_add("concat").unwrap(); + Ok(ExprRet::Single(fn_node)) + } + _ => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on bytes: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + }, + Builtin::Array(_) => { + if ident.name.starts_with("push") { + if is_storage { + let fn_node = self.builtin_fn_or_maybe_add("push").unwrap(); + Ok(ExprRet::Single(fn_node)) + } else { + Err(ExprErr::NonStoragePush( + loc, + "Trying to push to nonstorage array is not supported".to_string(), + )) + } + } else if ident.name.starts_with("pop") { + if is_storage { + let fn_node = self.builtin_fn_or_maybe_add("pop").unwrap(); + Ok(ExprRet::Single(fn_node)) + } else { + Err(ExprErr::NonStoragePush( + loc, + "Trying to pop from nonstorage array is not supported".to_string(), + )) + } + } else { + Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on array[]: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )) + } + } + Builtin::SizedArray(s, _) => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on array[{s}]: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + Builtin::Mapping(_, _) => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on mapping: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + Builtin::Func(_, _) => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access on func: {:?}, ctx: {}", + ident.name, + ctx.path(self) + ), + )), + Builtin::Int(size) => { + let max = if size == 256 { + I256::MAX + } else { + I256::from_raw(U256::from(1u8) << U256::from(size - 1)) - I256::from(1) + }; + match &*ident.name { + "max" => { + let c = Concrete::Int(size, max); + let node = self.add_node(Node::Concrete(c)).into(); + let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) + .into_expr_err(loc)?; + var.name = format!("int{size}.max"); + var.display_name.clone_from(&var.name); + var.is_tmp = true; + var.is_symbolic = false; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(cvar)) + } + "min" => { + let min = max * I256::from(-1i32) - I256::from(1i32); + let c = Concrete::Int(size, min); + let node = self.add_node(Node::Concrete(c)).into(); + let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) + .into_expr_err(loc)?; + var.name = format!("int{size}.min"); + var.display_name.clone_from(&var.name); + var.is_tmp = true; + var.is_symbolic = false; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(cvar)) + } + e => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown type attribute on int{size}: {e:?}, ctx: {}", + ctx.path(self) + ), + )), + } + } + Builtin::Uint(size) => match &*ident.name { + "max" => { + let max = if size == 256 { + U256::MAX + } else { + U256::from(2).pow(U256::from(size)) - 1 + }; + let c = Concrete::Uint(size, max); + let node = self.add_node(Node::Concrete(c)).into(); + let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) + .into_expr_err(loc)?; + var.name = format!("uint{size}.max"); + var.display_name.clone_from(&var.name); + var.is_tmp = true; + var.is_symbolic = false; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(cvar)) + } + "min" => { + let min = U256::zero(); + let c = Concrete::from(min); + let node = self.add_node(Node::Concrete(c)).into(); + let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) + .into_expr_err(loc)?; + var.name = format!("uint{size}.min"); + var.display_name.clone_from(&var.name); + var.is_tmp = true; + var.is_symbolic = false; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(cvar)) + } + "call" | "delegatecall" | "staticcall" if size == 160 => { + let builtin_name = ident.name.split('(').collect::>()[0]; + let func_node = self.builtin_fn_or_maybe_add(builtin_name).unwrap(); + Ok(ExprRet::Single(func_node)) + } + e => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown type attribute on uint{size}: {e:?}, ctx: {}", + ctx.path(self) + ), + )), + }, + } + } + } +} diff --git a/crates/solc-expressions/src/member_access/contract_access.rs b/crates/solc-expressions/src/member_access/contract_access.rs new file mode 100644 index 00000000..d64a3ff0 --- /dev/null +++ b/crates/solc-expressions/src/member_access/contract_access.rs @@ -0,0 +1,164 @@ +use crate::{ExprErr, IntoExprErr}; + +use graph::{ + nodes::{Builtin, Concrete, ContextNode, ContextVar, ContractNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::NodeIdx; + +use solang_parser::pt::{Expression, Identifier, Loc}; + +impl ContractAccess for T where T: AnalyzerBackend + Sized {} + +/// Trait for performing member access on a Contract +pub trait ContractAccess: AnalyzerBackend + Sized { + /// Perform member access on a contract + fn contract_member_access( + &mut self, + member_idx: NodeIdx, + con_node: ContractNode, + ident: &Identifier, + ctx: ContextNode, + loc: Loc, + maybe_parent: Option, + ) -> Result { + tracing::trace!( + "Contract member access: {}.{}", + con_node + .maybe_name(self) + .into_expr_err(loc)? + .unwrap_or_else(|| "interface".to_string()), + ident.name + ); + + if let Some(func) = con_node + .funcs(self) + .into_iter() + .find(|func_node| func_node.name(self).unwrap() == ident.name) + { + if let Some(func_cvar) = ContextVar::maybe_from_user_ty(self, loc, func.0.into()) { + let fn_node = self.add_node(Node::ContextVar(func_cvar)); + // this prevents attaching a dummy node to the parent which could cause a cycle in the graph + if maybe_parent.is_some() { + self.add_edge(fn_node, member_idx, Edge::Context(ContextEdge::FuncAccess)); + } + Ok(ExprRet::Single(fn_node)) + } else { + Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unable to construct the function \"{}\" in contract \"{}\"", + ident.name, + con_node.name(self).into_expr_err(loc)? + ), + )) + } + } else if let Some(func) = con_node + .structs(self) + .into_iter() + .find(|struct_node| struct_node.name(self).unwrap() == ident.name) + { + if let Some(struct_cvar) = ContextVar::maybe_from_user_ty(self, loc, func.0.into()) { + let struct_node = self.add_node(Node::ContextVar(struct_cvar)); + // this prevents attaching a dummy node to the parent which could cause a cycle in the graph + if maybe_parent.is_some() { + self.add_edge( + struct_node, + member_idx, + Edge::Context(ContextEdge::StructAccess), + ); + } + return Ok(ExprRet::Single(struct_node)); + } else { + return Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unable to construct the struct \"{}\" in contract \"{}\"", + ident.name, + con_node.name(self).into_expr_err(loc)? + ), + )); + } + } else { + match &*ident.name { + "name" => { + let c = Concrete::from(con_node.name(self).unwrap()); + let cnode = self.add_node(Node::Concrete(c)); + let cvar = ContextVar::new_from_concrete(loc, ctx, cnode.into(), self) + .into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + return Ok(ExprRet::Single(node)); + } + "creationCode" | "runtimeCode" => { + let bn = self.builtin_or_add(Builtin::DynamicBytes); + let cvar = + ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + return Ok(ExprRet::Single(node)); + } + "interfaceId" => { + // TODO: actually calculate this + let bn = self.builtin_or_add(Builtin::Bytes(4)); + let cvar = + ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; + let node = self.add_node(Node::ContextVar(cvar)); + ctx.add_var(node.into(), self).into_expr_err(loc)?; + self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); + return Ok(ExprRet::Single(node)); + } + _ => { + // try to match just prefix + if let Some(func) = con_node.funcs(self).into_iter().find(|func_node| { + if let Some(prefix) = func_node.prefix_only_name(self).unwrap() { + prefix == ident.name + } else { + false + } + }) { + if let Some(func_cvar) = + ContextVar::maybe_from_user_ty(self, loc, func.0.into()) + { + let fn_node = self.add_node(Node::ContextVar(func_cvar)); + // this prevents attaching a dummy node to the parent which could cause a cycle in the graph + if maybe_parent.is_some() { + self.add_edge( + fn_node, + member_idx, + Edge::Context(ContextEdge::FuncAccess), + ); + } + Ok(ExprRet::Single(fn_node)) + } else { + Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unable to construct the function \"{}\" in contract \"{}\"", + ident.name, + con_node.name(self).into_expr_err(loc)? + ), + )) + } + } else { + return Err(ExprErr::ContractFunctionNotFound( + loc, + format!( + "No function or struct with name {:?} in contract: {:?}. Functions: {:#?}", + ident.name, + con_node.name(self).unwrap(), + con_node + .funcs(self) + .iter() + .map(|func| func.name(self).unwrap()) + .collect::>() + ), + )); + } + } + } + } + } +} diff --git a/crates/solc-expressions/src/member_access/enum_access.rs b/crates/solc-expressions/src/member_access/enum_access.rs new file mode 100644 index 00000000..c373e772 --- /dev/null +++ b/crates/solc-expressions/src/member_access/enum_access.rs @@ -0,0 +1,57 @@ +use crate::{ExprErr, IntoExprErr, LibraryAccess}; + +use graph::{ + nodes::{ContextNode, ContextVar, EnumNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::NodeIdx; + +use solang_parser::pt::{Expression, Identifier, Loc}; + +impl EnumAccess for T where + T: LibraryAccess + AnalyzerBackend + Sized +{ +} + +/// Trait for performing member access on an enum +pub trait EnumAccess: + LibraryAccess + AnalyzerBackend + Sized +{ + /// Perform member access on an enum + fn enum_member_access( + &mut self, + _member_idx: NodeIdx, + enum_node: EnumNode, + ident: &Identifier, + ctx: ContextNode, + loc: Loc, + ) -> Result { + tracing::trace!("Enum member access: {}", ident.name); + + if let Some(variant) = enum_node + .variants(self) + .into_expr_err(loc)? + .iter() + .find(|variant| **variant == ident.name) + { + let var = + ContextVar::new_from_enum_variant(self, ctx, loc, enum_node, variant.to_string()) + .into_expr_err(loc)?; + let cvar = self.add_node(Node::ContextVar(var)); + ctx.add_var(cvar.into(), self).into_expr_err(loc)?; + self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(cvar)) + } else if let Some(ret) = self.library_func_search(ctx, enum_node.0.into(), ident) { + Ok(ret) + } else { + Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access \"{}\" on enum \"{}\"", + ident.name, + enum_node.name(self).into_expr_err(loc)? + ), + )) + } + } +} diff --git a/crates/solc-expressions/src/member_access/func_access.rs b/crates/solc-expressions/src/member_access/func_access.rs new file mode 100644 index 00000000..e69de29b diff --git a/crates/solc-expressions/src/member_access/library_access.rs b/crates/solc-expressions/src/member_access/library_access.rs new file mode 100644 index 00000000..164b5f0b --- /dev/null +++ b/crates/solc-expressions/src/member_access/library_access.rs @@ -0,0 +1,66 @@ +use crate::ExprErr; + +use graph::{ + nodes::{ContextNode, ExprRet, FunctionNode}, + AnalyzerBackend, Edge, +}; +use shared::NodeIdx; + +use petgraph::{visit::EdgeRef, Direction}; +use solang_parser::pt::{Expression, Identifier}; + +use std::collections::BTreeSet; + +impl LibraryAccess for T where T: AnalyzerBackend + Sized {} + +/// Trait for getting library functions for a type +pub trait LibraryAccess: AnalyzerBackend + Sized { + /// Search for a library function by name + fn library_func_search( + &mut self, + ctx: ContextNode, + ty: NodeIdx, + ident: &Identifier, + ) -> Option { + self.possible_library_funcs(ctx, ty) + .iter() + .filter_map(|func| { + if let Ok(name) = func.name(self) { + Some((name, func)) + } else { + None + } + }) + .find_map(|(name, func)| { + if name == ident.name { + Some(ExprRet::Single((*func).into())) + } else { + None + } + }) + } + + /// Get all possible library functions + fn possible_library_funcs(&mut self, ctx: ContextNode, ty: NodeIdx) -> BTreeSet { + let mut funcs: BTreeSet = BTreeSet::new(); + if let Some(associated_contract) = ctx.maybe_associated_contract(self).unwrap() { + // search for contract scoped `using` statements + funcs.extend( + self.graph().edges_directed(ty, Direction::Outgoing).filter(|edge| { + matches!(*edge.weight(), Edge::LibraryFunction(scope) if scope == associated_contract.into()) + }).map(|edge| edge.target().into()).collect::>() + ); + } + + // Search for global `using` funcs + if let Some(source) = ctx.maybe_associated_source(self) { + funcs.extend( + self.graph().edges_directed(ty, Direction::Outgoing).filter(|edge| { + matches!(*edge.weight(), Edge::LibraryFunction(scope) if scope == source.into()) + }).map(|edge| edge.target().into()).collect::>() + ); + } + + funcs + } +} diff --git a/crates/solc-expressions/src/member_access/list_access.rs b/crates/solc-expressions/src/member_access/list_access.rs new file mode 100644 index 00000000..ab1be6ad --- /dev/null +++ b/crates/solc-expressions/src/member_access/list_access.rs @@ -0,0 +1,299 @@ +use crate::{ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, Variable}; + +use graph::{ + elem::*, + nodes::{BuiltInNode, Builtin, Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, Range, SolcRange, VarType, +}; +use shared::RangeArena; + +use ethers_core::types::U256; +use solang_parser::pt::{Expression, Loc}; + +impl ListAccess for T where T: AnalyzerBackend + Sized {} +/// Handles list/array member access (indices, length, etc) +pub trait ListAccess: AnalyzerBackend + Sized { + #[tracing::instrument(level = "trace", skip_all)] + /// Get the length member of an array/list + fn length( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + input_expr: &Expression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, input_expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Attempted to perform member access without a left-hand side".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_length(arena, ctx, loc, ret, true) + }) + } + + #[tracing::instrument(level = "trace", skip_all)] + /// Get the length member of an array/list + fn match_length( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + elem_path: ExprRet, + _update_len_bound: bool, + ) -> Result<(), ExprErr> { + match elem_path { + ExprRet::Null => { + ctx.push_expr(ExprRet::Null, self).into_expr_err(loc)?; + Ok(()) + } + ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), + ExprRet::Single(arr) => { + self.get_length(arena, ctx, loc, arr.into(), false)?; + Ok(()) + } + e => todo!("here: {e:?}"), + } + } + + fn get_length( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + array: ContextVarNode, + return_var: bool, + ) -> Result, ExprErr> { + let next_arr = self.advance_var_in_ctx(array.latest_version(self), loc, ctx)?; + // search for latest length + if let Some(len_var) = next_arr.array_to_len_var(self) { + let len_node = self.advance_var_in_ctx(len_var.latest_version(self), loc, ctx)?; + if !return_var { + ctx.push_expr(ExprRet::Single(len_node.into()), self) + .into_expr_err(loc)?; + Ok(None) + } else { + Ok(Some(len_node)) + } + } else { + self.create_length(arena, ctx, loc, array, next_arr, return_var) + // no length variable, create one + // let name = format!("{}.length", array.name(self).into_expr_err(loc)?); + + // // Create the range from the current length or default to [0, uint256.max] + + // let len_min = Elem::from(next_arr) + // .get_length() + // .max(Elem::from(Concrete::from(U256::zero()))); + // let len_max = Elem::from(next_arr) + // .get_length() + // .min(Elem::from(Concrete::from(U256::MAX))); + // let range = SolcRange::new(len_min, len_max, vec![]); + + // let len_var = ContextVar { + // loc: Some(loc), + // name, + // display_name: array.display_name(self).into_expr_err(loc)? + ".length", + // storage: None, + // is_tmp: false, + // tmp_of: None, + // is_symbolic: true, + // is_return: false, + // ty: VarType::BuiltIn( + // BuiltInNode::from(self.builtin_or_add(Builtin::Uint(256))), + // Some(range), + // ), + // }; + // let len_node = ContextVarNode::from(self.add_node(Node::ContextVar(len_var))); + // self.add_edge( + // len_node, + // array, + // Edge::Context(ContextEdge::AttrAccess("length")), + // ); + // self.add_edge(len_node, ctx, Edge::Context(ContextEdge::Variable)); + // ctx.add_var(len_node, self).into_expr_err(loc)?; + + // // we have to force here to avoid length <-> array recursion + // let next_next_arr = + // self.advance_var_in_ctx_forcible(array.latest_version(self), loc, ctx, true)?; + // let update_array_len = + // Elem::from(next_arr.latest_version(self)).set_length(len_node.into()); + + // // Update the array + // next_next_arr + // .set_range_min(self, update_array_len.clone()) + // .into_expr_err(loc)?; + // next_next_arr + // .set_range_max(self, update_array_len.clone()) + // .into_expr_err(loc)?; + + // if !return_var { + // ctx.push_expr(ExprRet::Single(len_node.into()), self) + // .into_expr_err(loc)?; + // Ok(None) + // } else { + // Ok(Some(len_node)) + // } + } + } + + fn create_length( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + loc: Loc, + array: ContextVarNode, + target_array: ContextVarNode, + return_var: bool, + ) -> Result, ExprErr> { + // no length variable, create one + let name = format!("{}.length", array.name(self).into_expr_err(loc)?); + + // Create the range from the current length or default to [0, uint256.max] + let len_min = Elem::from(array) + .get_length() + .max(Elem::from(Concrete::from(U256::zero()))); + let len_max = Elem::from(array) + .get_length() + .min(Elem::from(Concrete::from(U256::MAX))); + let range = SolcRange::new(len_min, len_max, vec![]); + + let len_var = ContextVar { + loc: Some(loc), + name, + display_name: array.display_name(self).into_expr_err(loc)? + ".length", + storage: None, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::BuiltIn( + BuiltInNode::from(self.builtin_or_add(Builtin::Uint(256))), + Some(range), + ), + }; + let len_node = ContextVarNode::from(self.add_node(Node::ContextVar(len_var))); + self.add_edge( + len_node, + target_array, + Edge::Context(ContextEdge::AttrAccess("length")), + ); + self.add_edge(len_node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(len_node, self).into_expr_err(loc)?; + + // we have to force here to avoid length <-> array recursion + let next_target_arr = + self.advance_var_in_ctx_forcible(target_array.latest_version(self), loc, ctx, true)?; + let update_array_len = + Elem::from(target_array.latest_version(self)).set_length(len_node.into()); + + // Update the array + next_target_arr + .set_range_min(self, arena, update_array_len.clone()) + .into_expr_err(loc)?; + next_target_arr + .set_range_max(self, arena, update_array_len.clone()) + .into_expr_err(loc)?; + + if !return_var { + ctx.push_expr(ExprRet::Single(len_node.into()), self) + .into_expr_err(loc)?; + Ok(None) + } else { + Ok(Some(len_node)) + } + } + + #[tracing::instrument(level = "trace", skip_all)] + /// Get the length member of an array/list and create it as a temporary variable + fn tmp_length( + &mut self, + arena: &mut RangeArena>, + arr: ContextVarNode, + array_ctx: ContextNode, + loc: Loc, + ) -> ContextVarNode { + let arr = arr.first_version(self); + let name = format!("{}.length", arr.name(self).unwrap()); + tracing::trace!("Length access: {}", name); + if let Some(attr_var) = array_ctx.var_by_name_or_recurse(self, &name).unwrap() { + attr_var.latest_version(self) + } else { + let range = if let Ok(Some(size)) = arr.ty(self).unwrap().maybe_array_size(self) { + SolcRange::from(Concrete::from(size)) + } else { + SolcRange::try_from_builtin(&Builtin::Uint(256)) + }; + + let len_var = ContextVar { + loc: Some(loc), + name: arr.name(self).unwrap() + ".length", + display_name: arr.display_name(self).unwrap() + ".length", + storage: None, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::BuiltIn( + BuiltInNode::from(self.builtin_or_add(Builtin::Uint(256))), + range, + ), + }; + let len_node = self.add_node(Node::ContextVar(len_var)); + + let next_arr = self + .advance_var_in_ctx(arr.latest_version(self), loc, array_ctx) + .unwrap(); + if next_arr + .underlying(self) + .unwrap() + .ty + .is_dyn_builtin(self) + .unwrap() + { + if let Some(r) = next_arr.ref_range(self).unwrap() { + let min = r.simplified_range_min(self, arena).unwrap(); + let max = r.simplified_range_max(self, arena).unwrap(); + if let Some(mut rd) = min.maybe_range_dyn() { + ContextVarNode::from(len_node) + .set_range_min(self, arena, *rd.len.clone()) + .unwrap(); + rd.len = Box::new(Elem::from(len_node)); + let res = next_arr + .set_range_min(self, arena, Elem::ConcreteDyn(rd)) + .into_expr_err(loc); + let _ = self.add_if_err(res); + } + + if let Some(mut rd) = max.maybe_range_dyn() { + ContextVarNode::from(len_node) + .set_range_max(self, arena, *rd.len.clone()) + .unwrap(); + rd.len = Box::new(Elem::from(len_node)); + let res = next_arr + .set_range_max(self, arena, Elem::ConcreteDyn(rd)) + .into_expr_err(loc); + let _ = self.add_if_err(res); + } + } + } + + self.add_edge( + len_node, + arr, + Edge::Context(ContextEdge::AttrAccess("length")), + ); + self.add_edge(len_node, array_ctx, Edge::Context(ContextEdge::Variable)); + array_ctx.add_var(len_node.into(), self).unwrap(); + len_node.into() + } + } +} diff --git a/crates/solc-expressions/src/member_access/member_trait.rs b/crates/solc-expressions/src/member_access/member_trait.rs new file mode 100644 index 00000000..ab26fe1b --- /dev/null +++ b/crates/solc-expressions/src/member_access/member_trait.rs @@ -0,0 +1,330 @@ +use crate::{ + BuiltinAccess, ContextBuilder, ContractAccess, EnumAccess, Env, ExprErr, ExpressionParser, + IntoExprErr, ListAccess, StructAccess, +}; + +use graph::{ + elem::Elem, + nodes::{ + BuiltInNode, Concrete, ConcreteNode, ContextNode, ContextVar, ContextVarNode, ContractNode, + EnumNode, ExprRet, FunctionNode, StructNode, TyNode, + }, + AnalyzerBackend, Node, TypeNode, VarType, +}; +use shared::{NodeIdx, RangeArena}; + +use solang_parser::pt::{Expression, Identifier, Loc}; + +impl MemberAccessParts for T where + T: BuiltinAccess + ContractAccess + EnumAccess + ListAccess + StructAccess +{ +} + +/// Supertrait that coalesces various member access traits +pub trait MemberAccessParts: + BuiltinAccess + ContractAccess + EnumAccess + ListAccess + StructAccess +{ +} + +impl MemberAccess for T where + T: MemberAccessParts + AnalyzerBackend + Sized +{ +} + +/// Toplevel trait for performing member access. Utilizes other `..Access` traits +pub trait MemberAccess: + MemberAccessParts + AnalyzerBackend + Sized +{ + /// Entry function for perform a member access + #[tracing::instrument(level = "trace", skip_all)] + fn member_access( + &mut self, + arena: &mut RangeArena>, + loc: Loc, + member_expr: &Expression, + ident: &Identifier, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + // TODO: this is wrong as it overwrites a function call of the form elem.length(...) i believe + if ident.name == "length" { + return self.length(arena, loc, member_expr, ctx); + } + + self.parse_ctx_expr(arena, member_expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Attempted to perform member access without a left-hand side".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_member(ctx, loc, ident, ret) + }) + } + + /// Match on [`ExprRet`]s and call the member access for each + fn match_member( + &mut self, + ctx: ContextNode, + loc: Loc, + ident: &Identifier, + ret: ExprRet, + ) -> Result<(), ExprErr> { + match ret { + ExprRet::Single(idx) | ExprRet::SingleLiteral(idx) => { + ctx.push_expr(self.member_access_inner(loc, idx, ident, ctx)?, self) + .into_expr_err(loc)?; + Ok(()) + } + ExprRet::Multi(inner) => inner + .into_iter() + .try_for_each(|ret| self.match_member(ctx, loc, ident, ret)), + ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), + ExprRet::Null => Ok(()), + } + } + + /// Perform the member access + fn member_access_inner( + &mut self, + loc: Loc, + member_idx: NodeIdx, + ident: &Identifier, + ctx: ContextNode, + ) -> Result { + match self.node(member_idx) { + Node::ContextVar(cvar) => { + self.member_access_var_ty(cvar.clone(), loc, member_idx, ident, ctx) + } + Node::Contract(_c) => self.contract_member_access( + member_idx, + ContractNode::from(member_idx), + ident, + ctx, + loc, + None, + ), + Node::Struct(_c) => self.struct_member_access( + member_idx, + StructNode::from(member_idx), + ident, + ctx, + loc, + None, + ), + Node::Enum(_c) => { + self.enum_member_access(member_idx, EnumNode::from(member_idx), ident, ctx, loc) + } + Node::Ty(_ty) => { + self.ty_member_access(member_idx, TyNode::from(member_idx), ident, ctx, loc, None) + } + Node::Msg(_msg) => self.msg_access(loc, ctx, &ident.name), + Node::Block(_b) => self.block_access(loc, ctx, &ident.name), + Node::Builtin(ref _b) => { + self.builtin_member_access(loc, ctx, BuiltInNode::from(member_idx), false, ident) + } + e => Err(ExprErr::Todo( + loc, + format!("Member access on type: {e:?} is not yet supported"), + )), + } + } + + /// Get visible functions for this member + fn visible_member_funcs( + &mut self, + ctx: ContextNode, + loc: Loc, + member_idx: NodeIdx, + ) -> Result, ExprErr> { + let res = match self.node(member_idx) { + Node::ContextVar(cvar) => match &cvar.ty { + VarType::User(TypeNode::Contract(con_node), _) => { + let cnode = *con_node; + let mut funcs = cnode.linearized_functions(self).into_expr_err(loc)?; + self + .possible_library_funcs(ctx, cnode.0.into()) + .into_iter() + .for_each(|func| { + let name = func.name(self).unwrap(); + funcs.entry(name).or_insert(func); + }); + funcs.values().copied().collect() + }, + VarType::BuiltIn(bn, _) => self + .possible_library_funcs(ctx, bn.0.into()) + .into_iter() + .collect::>(), + VarType::Concrete(cnode) => { + let b = cnode.underlying(self).unwrap().as_builtin(); + let bn = self.builtin_or_add(b); + self.possible_library_funcs(ctx, bn) + .into_iter() + .collect::>() + } + VarType::User(TypeNode::Struct(sn), _) => self + .possible_library_funcs(ctx, sn.0.into()) + .into_iter() + .collect::>(), + VarType::User(TypeNode::Enum(en), _) => self + .possible_library_funcs(ctx, en.0.into()) + .into_iter() + .collect::>(), + VarType::User(TypeNode::Ty(ty), _) => self + .possible_library_funcs(ctx, ty.0.into()) + .into_iter() + .collect::>(), + VarType::User(TypeNode::Func(func_node), _) => self + .possible_library_funcs(ctx, func_node.0.into()) + .into_iter() + .collect::>(), + VarType::User(TypeNode::Unresolved(n), _) => { + match self.node(*n) { + Node::Unresolved(ident) => { + return Err(ExprErr::Unresolved(loc, format!("The type \"{}\" is currently unresolved but should have been resolved by now. This is a bug.", ident.name))) + } + _ => unreachable!() + } + } + }, + Node::Contract(_) => ContractNode::from(member_idx).funcs(self), + Node::Concrete(_) + | Node::Ty(_) + | Node::Struct(_) + | Node::Function(_) + | Node::Enum(_) + | Node::Builtin(_) => self + .possible_library_funcs(ctx, member_idx) + .into_iter() + .collect::>(), + e => { + return Err(ExprErr::MemberAccessNotFound( + loc, + format!("This type cannot have member functions: {:?}", e), + )) + } + }; + Ok(res) + } + + /// Perform member access for a variable type + fn member_access_var_ty( + &mut self, + cvar: ContextVar, + loc: Loc, + member_idx: NodeIdx, + ident: &Identifier, + ctx: ContextNode, + ) -> Result { + match &cvar.ty { + VarType::User(TypeNode::Struct(struct_node), _) => { + self.struct_member_access(member_idx, *struct_node, ident, ctx, loc, Some(cvar)) + } + VarType::User(TypeNode::Enum(enum_node), _) => { + self.enum_member_access(member_idx, *enum_node, ident, ctx, loc) + } + VarType::User(TypeNode::Func(func_node), _) => { + self.func_member_access(*func_node, ident, ctx, loc) + } + VarType::User(TypeNode::Ty(ty_node), _) => { + self.ty_member_access(member_idx, *ty_node, ident, ctx, loc, Some(cvar)) + } + VarType::User(TypeNode::Contract(con_node), _) => { + self.contract_member_access(member_idx, *con_node, ident, ctx, loc, Some(cvar)) + } + VarType::BuiltIn(bn, _) => self.builtin_member_access( + loc, + ctx, + *bn, + ContextVarNode::from(member_idx) + .is_storage(self) + .into_expr_err(loc)?, + ident, + ), + VarType::Concrete(cn) => { + let builtin = cn.underlying(self).into_expr_err(loc)?.as_builtin(); + let bn = self.builtin_or_add(builtin).into(); + self.builtin_member_access( + loc, + ctx, + bn, + ContextVarNode::from(member_idx) + .is_storage(self) + .into_expr_err(loc)?, + ident, + ) + } + e => Err(ExprErr::UnhandledCombo( + loc, + format!("Unhandled member access: {:?}, {:?}", e, ident), + )), + } + } + + /// Perform a `TyNode` member access + fn ty_member_access( + &mut self, + _member_idx: NodeIdx, + ty_node: TyNode, + ident: &Identifier, + ctx: ContextNode, + loc: Loc, + _maybe_parent: Option, + ) -> Result { + let name = ident.name.split('(').collect::>()[0]; + if let Some(func) = self.library_func_search(ctx, ty_node.0.into(), ident) { + Ok(func) + } else if let Some(func) = self.builtin_fn_or_maybe_add(name) { + Ok(ExprRet::Single(func)) + } else { + Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access \"{}\" on struct \"{}\"", + ident.name, + ty_node.name(self).into_expr_err(loc)? + ), + )) + } + } + + /// Access function members + fn func_member_access( + &mut self, + func_node: FunctionNode, + ident: &Identifier, + ctx: ContextNode, + loc: Loc, + ) -> Result { + let prefix_only_name = func_node + .prefix_only_name(self) + .into_expr_err(loc)? + .unwrap(); + let name = format!("{}.{}", prefix_only_name, ident.name); + tracing::trace!("Function member access: {}", name); + match &*ident.name { + "selector" => { + let mut out = [0; 32]; + keccak_hash::keccak_256(prefix_only_name.as_bytes(), &mut out); + let selector: [u8; 4] = [out[0], out[1], out[2], out[3]]; + let selector_conc = Node::Concrete(Concrete::from(selector)); + let selector_node = ConcreteNode::from(self.add_node(selector_conc)); + let var = ContextVar::new_from_concrete(loc, ctx, selector_node, self) + .into_expr_err(loc)?; + let cvar = self.add_node(Node::ContextVar(var)); + Ok(ExprRet::Single(cvar)) + } + _ => Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access \"{}\" on function \"{}\"", + ident.name, prefix_only_name + ), + )), + } + } +} diff --git a/crates/solc-expressions/src/member_access/mod.rs b/crates/solc-expressions/src/member_access/mod.rs new file mode 100644 index 00000000..59766091 --- /dev/null +++ b/crates/solc-expressions/src/member_access/mod.rs @@ -0,0 +1,18 @@ +//! This module consists of traits & blanket implementations that facilitate performing member access operations +//! like `MyStruct.field` or `MyContract.myFunc` + +mod builtin_access; +mod contract_access; +mod enum_access; +mod library_access; +mod list_access; +mod member_trait; +mod struct_access; + +pub use builtin_access::*; +pub use contract_access::*; +pub use enum_access::*; +pub use library_access::*; +pub use list_access::*; +pub use member_trait::*; +pub use struct_access::*; diff --git a/crates/solc-expressions/src/member_access/struct_access.rs b/crates/solc-expressions/src/member_access/struct_access.rs new file mode 100644 index 00000000..84b7bfb8 --- /dev/null +++ b/crates/solc-expressions/src/member_access/struct_access.rs @@ -0,0 +1,74 @@ +use crate::{ExprErr, IntoExprErr, LibraryAccess}; + +use graph::{ + nodes::{ContextNode, ContextVar, ContextVarNode, ExprRet, StructNode}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::NodeIdx; + +use solang_parser::pt::{Expression, Identifier, Loc}; + +impl StructAccess for T where + T: LibraryAccess + AnalyzerBackend + Sized +{ +} +/// Trait for performing member accesses on Structs +pub trait StructAccess: + LibraryAccess + AnalyzerBackend + Sized +{ + /// Perform member access on a struct + fn struct_member_access( + &mut self, + member_idx: NodeIdx, + struct_node: StructNode, + ident: &Identifier, + ctx: ContextNode, + loc: Loc, + maybe_parent: Option, + ) -> Result { + let name = format!( + "{}.{}", + struct_node.name(self).into_expr_err(loc)?, + ident.name + ); + tracing::trace!("Struct member access: {}", name); + if let Some(attr_var) = ctx.var_by_name_or_recurse(self, &name).into_expr_err(loc)? { + Ok(ExprRet::Single(attr_var.latest_version(self).into())) + } else if let Some(field) = struct_node.find_field(self, ident) { + let cvar = if let Some(parent) = maybe_parent { + parent + } else { + ContextVar::maybe_from_user_ty(self, loc, struct_node.into()).unwrap() + }; + if let Some(field_cvar) = ContextVar::maybe_new_from_field( + self, + loc, + &cvar, + field.underlying(self).unwrap().clone(), + ) { + let fc_node = self.add_node(Node::ContextVar(field_cvar)); + self.add_edge( + fc_node, + ContextVarNode::from(member_idx).first_version(self), + Edge::Context(ContextEdge::AttrAccess("field")), + ); + ctx.add_var(fc_node.into(), self).into_expr_err(loc)?; + self.add_edge(fc_node, ctx, Edge::Context(ContextEdge::Variable)); + Ok(ExprRet::Single(fc_node)) + } else { + panic!("Couldn't create field variable"); + } + } else if let Some(func) = self.library_func_search(ctx, struct_node.0.into(), ident) { + Ok(func) + } else { + Err(ExprErr::MemberAccessNotFound( + loc, + format!( + "Unknown member access \"{}\" on struct \"{}\"", + ident.name, + struct_node.name(self).into_expr_err(loc)? + ), + )) + } + } +} diff --git a/crates/solc-expressions/src/pre_post_in_decrement.rs b/crates/solc-expressions/src/pre_post_in_decrement.rs new file mode 100644 index 00000000..275c979a --- /dev/null +++ b/crates/solc-expressions/src/pre_post_in_decrement.rs @@ -0,0 +1,226 @@ +use crate::{ + context_builder::ContextBuilder, variable::Variable, ExprErr, ExpressionParser, IntoExprErr, +}; + +use graph::{ + elem::*, + nodes::{Concrete, ContextNode, ContextVarNode, ExprRet}, + AnalyzerBackend, +}; +use shared::RangeArena; + +use ethers_core::types::U256; +use solang_parser::pt::{Expression, Loc}; + +impl PrePostIncDecrement for T where + T: AnalyzerBackend + Sized +{ +} +/// Handles pre and post increment and decrement +pub trait PrePostIncDecrement: + AnalyzerBackend + Sized +{ + /// Handle a preincrement + fn pre_increment( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + tracing::trace!("PreIncrement variable pop"); + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "PreIncrement operation had no right hand side".to_string(), + )); + }; + + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_in_de_crement(arena, ctx, true, true, loc, &ret) + }) + } + + /// Handle a postincrement + fn post_increment( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + tracing::trace!("PostIncrement variable pop"); + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "PostIncrement operation had no right hand side".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_in_de_crement(arena, ctx, false, true, loc, &ret) + }) + } + + /// Handle a predecrement + fn pre_decrement( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + tracing::trace!("PreDecrement variable pop"); + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "PreDecrement operation had no right hand side".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_in_de_crement(arena, ctx, true, false, loc, &ret) + }) + } + + /// Handle a postdecrement + fn post_decrement( + &mut self, + arena: &mut RangeArena>, + expr: &Expression, + loc: Loc, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + self.parse_ctx_expr(arena, expr, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + tracing::trace!("PostDecrement variable pop"); + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoRhs( + loc, + "PostDecrement operation had no right hand side".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_in_de_crement(arena, ctx, false, false, loc, &ret) + }) + } + + /// Match on the [`ExprRet`]s of a pre-or-post in/decrement and performs it + fn match_in_de_crement( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + pre: bool, + increment: bool, + loc: Loc, + rhs: &ExprRet, + ) -> Result<(), ExprErr> { + match rhs { + ExprRet::CtxKilled(kind) => { + ctx.kill(self, loc, *kind).into_expr_err(loc)?; + Ok(()) + } + ExprRet::SingleLiteral(var) => { + ContextVarNode::from(*var) + .try_increase_size(self, arena) + .into_expr_err(loc)?; + self.match_in_de_crement(arena, ctx, pre, increment, loc, &ExprRet::Single(*var)) + } + ExprRet::Single(var) => { + let cvar = ContextVarNode::from(*var).latest_version(self); + let elem = Elem::from(cvar); + let one = Elem::from(Concrete::from(U256::from(1))).cast(elem.clone()); + + // if let Some(r) = cvar.range(self).into_expr_err(loc)? { + if increment { + if pre { + let dup = cvar.as_tmp(loc, ctx, self).into_expr_err(loc)?; + dup.set_range_min(self, arena, elem.clone() + one.clone()) + .into_expr_err(loc)?; + dup.set_range_max(self, arena, elem.clone() + one.clone()) + .into_expr_err(loc)?; + let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; + new_cvar + .set_range_min(self, arena, elem.clone() + one.clone()) + .into_expr_err(loc)?; + new_cvar + .set_range_max(self, arena, elem + one) + .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(dup.latest_version(self).into()), self) + .into_expr_err(loc)?; + Ok(()) + } else { + let dup = cvar.as_tmp(loc, ctx, self).into_expr_err(loc)?; + dup.set_range_min(self, arena, elem.clone()) + .into_expr_err(loc)?; + dup.set_range_max(self, arena, elem.clone()) + .into_expr_err(loc)?; + let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; + let res = new_cvar + .set_range_min(self, arena, elem.clone() + one.clone()) + .into_expr_err(loc); + let _ = self.add_if_err(res); + new_cvar + .set_range_max(self, arena, elem + one) + .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(dup.latest_version(self).into()), self) + .into_expr_err(loc)?; + Ok(()) + } + } else if pre { + let dup = cvar.as_tmp(loc, ctx, self).into_expr_err(loc)?; + dup.set_range_min(self, arena, elem.clone() - one.clone()) + .into_expr_err(loc)?; + dup.set_range_max(self, arena, elem.clone() - one.clone()) + .into_expr_err(loc)?; + let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; + new_cvar + .set_range_min(self, arena, elem.clone() - one.clone()) + .into_expr_err(loc)?; + new_cvar + .set_range_max(self, arena, elem - one) + .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(dup.latest_version(self).into()), self) + .into_expr_err(loc)?; + Ok(()) + } else { + let dup = cvar.as_tmp(loc, ctx, self).into_expr_err(loc)?; + dup.set_range_min(self, arena, elem.clone()) + .into_expr_err(loc)?; + dup.set_range_max(self, arena, elem.clone()) + .into_expr_err(loc)?; + let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; + new_cvar + .set_range_min(self, arena, elem.clone() - one.clone()) + .into_expr_err(loc)?; + new_cvar + .set_range_max(self, arena, elem - one) + .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(dup.into()), self) + .into_expr_err(loc)?; + Ok(()) + } + } + ExprRet::Multi(inner) => inner.iter().try_for_each(|expr| { + self.match_in_de_crement(arena, ctx, pre, increment, loc, expr) + }), + ExprRet::Null => Ok(()), + } + } +} diff --git a/src/context/exprs/require.rs b/crates/solc-expressions/src/require.rs similarity index 59% rename from src/context/exprs/require.rs rename to crates/solc-expressions/src/require.rs index 0852ad30..28d4f608 100644 --- a/src/context/exprs/require.rs +++ b/crates/solc-expressions/src/require.rs @@ -1,30 +1,28 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::ExprErr; -use crate::{ - exprs::{BinOp, Variable}, - AnalyzerLike, Concrete, ConcreteNode, ContextBuilder, Node, -}; -use shared::range::elem_ty::RangeExpr; -use shared::range::range_string::ToRangeString; - -use shared::{ - context::*, - nodes::{BuiltInNode, Builtin, VarType}, - range::{ - elem::{RangeElem, RangeOp}, - elem_ty::{Elem, RangeConcrete}, - Range, RangeEval, SolcRange, +use crate::{BinOp, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, Variable}; + +use graph::{ + elem::*, + nodes::{ + BuiltInNode, Builtin, Concrete, ConcreteNode, ContextNode, ContextVar, ContextVarNode, + ExprRet, KilledKind, TmpConstruction, }, - Edge, + range_string::ToRangeString, + AnalyzerBackend, ContextEdge, Edge, Node, Range, RangeEval, SolcRange, VarType, }; -use solang_parser::helpers::CodeLocation; +use shared::RangeArena; use ethers_core::types::I256; -use solang_parser::pt::{Expression, Loc}; +use solang_parser::{ + helpers::CodeLocation, + pt::{Expression, Loc}, +}; + use std::cmp::Ordering; -impl Require for T where T: Variable + BinOp + Sized + AnalyzerLike {} -pub trait Require: AnalyzerLike + Variable + BinOp + Sized { +impl Require for T where T: Variable + BinOp + Sized + AnalyzerBackend {} + +/// Deals with require and assert statements, as well as adjusts bounds for variables +pub trait Require: AnalyzerBackend + Variable + BinOp + Sized { /// Inverts a comparator expression fn inverse_expr(&self, expr: Expression) -> Expression { match expr { @@ -37,6 +35,9 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // Expression::And(loc, lhs, rhs) => { // Expression::Or(loc, Box::new(self.inverse_expr(*lhs)), Box::new(self.inverse_expr(*rhs))) // } + // Expression::Or(loc, lhs, rhs) => { + // Expression::And(loc, Box::new(self.inverse_expr(*lhs)), Box::new(self.inverse_expr(*rhs))) + // } // Expression::Not(loc, lhs) => { // Expression::Equal(loc, lhs, Box::new(Expression::BoolLiteral(loc, true))) // } @@ -46,13 +47,24 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { /// Handles a require expression #[tracing::instrument(level = "trace", skip_all)] - fn handle_require(&mut self, inputs: &[Expression], ctx: ContextNode) -> Result<(), ExprErr> { - match inputs.get(0).expect("No lhs input for require statement") { + fn handle_require( + &mut self, + arena: &mut RangeArena>, + inputs: &[Expression], + ctx: ContextNode, + ) -> Result<(), ExprErr> { + ctx.add_gas_cost(self, shared::gas::BIN_OP_GAS) + .into_expr_err(inputs[0].loc())?; + match inputs.first().expect("No lhs input for require statement") { Expression::Equal(loc, lhs, rhs) => { - self.parse_ctx_expr(rhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Require operation `==` had no right hand side".to_string())) + self.parse_ctx_expr(arena, rhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Require operation `==` had no right hand side".to_string(), + )); }; let rhs_paths = rhs_paths.flatten(); @@ -62,10 +74,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - analyzer.parse_ctx_expr(lhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `==` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, lhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `==` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { @@ -73,22 +90,27 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths.flatten(), &rhs_paths, RangeOp::Eq, - RangeOp::Neq, + RangeOp::Eq, (RangeOp::Neq, RangeOp::Eq), ) }) }) } Expression::NotEqual(loc, lhs, rhs) => { - self.parse_ctx_expr(rhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Require operation `!=` had no right hand side".to_string())) + self.parse_ctx_expr(arena, rhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Require operation `!=` had no right hand side".to_string(), + )); }; let rhs_paths = rhs_paths.flatten(); @@ -96,32 +118,42 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } - analyzer.parse_ctx_expr(lhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `!=` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, lhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `!=` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths.flatten(), &rhs_paths, RangeOp::Neq, - RangeOp::Eq, + RangeOp::Neq, (RangeOp::Eq, RangeOp::Neq), ) }) }) } Expression::Less(loc, lhs, rhs) => { - self.parse_ctx_expr(rhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Require operation `<` had no right hand side".to_string())) + self.parse_ctx_expr(arena, rhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Require operation `<` had no right hand side".to_string(), + )); }; let rhs_paths = rhs_paths.flatten(); @@ -130,16 +162,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - analyzer.parse_ctx_expr(lhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `<` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, lhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `<` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths.flatten(), @@ -152,10 +190,14 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { }) } Expression::More(loc, lhs, rhs) => { - self.parse_ctx_expr(rhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Require operation `>` had no right hand side".to_string())) + self.parse_ctx_expr(arena, rhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Require operation `>` had no right hand side".to_string(), + )); }; let rhs_paths = rhs_paths.flatten(); @@ -164,16 +206,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - analyzer.parse_ctx_expr(lhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `>` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, lhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `>` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths.flatten(), @@ -186,10 +234,14 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { }) } Expression::MoreEqual(loc, lhs, rhs) => { - self.parse_ctx_expr(rhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Require operation `>=` had no right hand side".to_string())) + self.parse_ctx_expr(arena, rhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Require operation `>=` had no right hand side".to_string(), + )); }; let rhs_paths = rhs_paths.flatten(); @@ -198,16 +250,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - analyzer.parse_ctx_expr(lhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `>=` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, lhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `>=` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths.flatten(), @@ -220,10 +278,14 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { }) } Expression::LessEqual(loc, lhs, rhs) => { - self.parse_ctx_expr(rhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Require operation `<=` had no right hand side".to_string())) + self.parse_ctx_expr(arena, rhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Require operation `<=` had no right hand side".to_string(), + )); }; let rhs_paths = rhs_paths.flatten(); @@ -232,16 +294,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - analyzer.parse_ctx_expr(lhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `<=` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, lhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `<=` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths.flatten(), @@ -254,17 +322,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { }) } Expression::Not(loc, lhs) => { - self.parse_ctx_expr(lhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `NOT` had no left hand side".to_string())) + self.parse_ctx_expr(arena, lhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `NOT` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } - let cnode = - ConcreteNode::from(analyzer.add_node(Node::Concrete(Concrete::Bool(false)))); + let cnode = ConcreteNode::from( + analyzer.add_node(Node::Concrete(Concrete::Bool(false))), + ); let tmp_false = Node::ContextVar( ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode, analyzer) .into_expr_err(loc)?, @@ -272,21 +345,26 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let rhs_paths = ExprRet::Single(ContextVarNode::from(analyzer.add_node(tmp_false)).into()); analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths, &rhs_paths, RangeOp::Eq, - RangeOp::Neq, + RangeOp::Eq, (RangeOp::Neq, RangeOp::Eq), ) }) } Expression::And(loc, lhs, rhs) => { - self.parse_ctx_expr(lhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `&&` had no left hand side".to_string())) + self.parse_ctx_expr(arena, lhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `&&` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { @@ -294,10 +372,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - analyzer.parse_ctx_expr(rhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `&&` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, rhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `&&` had no left hand side".to_string(), + )); }; if matches!(rhs_paths, ExprRet::CtxKilled(_)) { @@ -305,7 +388,9 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - let cnode = ConcreteNode::from(analyzer.add_node(Node::Concrete(Concrete::Bool(true)))); + let cnode = ConcreteNode::from( + analyzer.add_node(Node::Concrete(Concrete::Bool(true))), + ); let tmp_true = Node::ContextVar( ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode, analyzer) .into_expr_err(loc)?, @@ -315,79 +400,98 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); let tmp_rhs_paths = ExprRet::Single(node); - analyzer.handle_require_inner( - ctx, - loc, - &lhs_paths, - &tmp_rhs_paths, - RangeOp::Eq, - RangeOp::Neq, - (RangeOp::Neq, RangeOp::Eq), - )?; - - analyzer.handle_require_inner( - ctx, - loc, - &rhs_paths, - &tmp_rhs_paths, - RangeOp::Eq, - RangeOp::Neq, - (RangeOp::Neq, RangeOp::Eq), - )?; - + // NOTE: the following is *sequence dependent* + // we want to update the parts *before* the `And` op + // to ensure the ctx_dep is correct // update the part's bounds - let lhs_cvar = ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); + let lhs_cvar = + ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); let underlying = lhs_cvar.underlying(analyzer).into_expr_err(loc)?; if let Some(tmp) = underlying.tmp_of { - if let Some((op, inv_op, pair)) = tmp.op.require_parts() { + if let Some((op, _inv_op, pair)) = tmp.op.require_parts() { analyzer.handle_require_inner( + arena, ctx, loc, &ExprRet::Single(tmp.lhs.into()), &ExprRet::Single(tmp.rhs.unwrap().into()), op, - inv_op, + op, pair, )?; } } // update the part's bounds - let rhs_cvar = ContextVarNode::from(rhs_paths.expect_single().into_expr_err(loc)?); + let rhs_cvar = + ContextVarNode::from(rhs_paths.expect_single().into_expr_err(loc)?); let underlying = rhs_cvar.underlying(analyzer).into_expr_err(loc)?; if let Some(tmp) = underlying.tmp_of { - if let Some((op, inv_op, pair)) = tmp.op.require_parts() { + if let Some((op, _inv_op, pair)) = tmp.op.require_parts() { analyzer.handle_require_inner( + arena, ctx, loc, &ExprRet::Single(tmp.lhs.into()), &ExprRet::Single(tmp.rhs.unwrap().into()), op, - inv_op, + op, pair, )?; } } + + analyzer.handle_require_inner( + arena, + ctx, + loc, + &lhs_paths, + &tmp_rhs_paths, + RangeOp::Eq, + RangeOp::Eq, + (RangeOp::Neq, RangeOp::Eq), + )?; + + analyzer.handle_require_inner( + arena, + ctx, + loc, + &rhs_paths, + &tmp_rhs_paths, + RangeOp::Eq, + RangeOp::Eq, + (RangeOp::Neq, RangeOp::Eq), + )?; + Ok(()) }) }) } Expression::Or(loc, lhs, rhs) => { - self.parse_ctx_expr(lhs, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `||` had no left hand side".to_string())) + self.parse_ctx_expr(arena, lhs, ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `||` had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } - analyzer.parse_ctx_expr(rhs, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation `||` had no left hand side".to_string())) + analyzer.parse_ctx_expr(arena, rhs, ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation `||` had no left hand side".to_string(), + )); }; if matches!(rhs_paths, ExprRet::CtxKilled(_)) { @@ -395,10 +499,16 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { return Ok(()); } - let lhs_cvar = ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); - let rhs_cvar = ContextVarNode::from(rhs_paths.expect_single().into_expr_err(loc)?); + let lhs_cvar = + ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); + let rhs_cvar = + ContextVarNode::from(rhs_paths.expect_single().into_expr_err(loc)?); - let elem = Elem::Expr(RangeExpr::new(lhs_cvar.into(), RangeOp::Or, rhs_cvar.into())); + let elem = Elem::Expr(RangeExpr::new( + lhs_cvar.into(), + RangeOp::Or, + rhs_cvar.into(), + )); let range = SolcRange::new(elem.clone(), elem, vec![]); let new_lhs_underlying = ContextVar { @@ -421,11 +531,30 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { is_symbolic: lhs_cvar.is_symbolic(analyzer).into_expr_err(loc)? || rhs_cvar.is_symbolic(analyzer).into_expr_err(loc)?, is_return: false, - tmp_of: Some(TmpConstruction::new(lhs_cvar, RangeOp::Or, Some(rhs_cvar))), - ty: VarType::BuiltIn(analyzer.builtin_or_add(Builtin::Bool).into(), Some(range)) + tmp_of: Some(TmpConstruction::new( + lhs_cvar, + RangeOp::Or, + Some(rhs_cvar), + )), + dep_on: { + let mut deps = + lhs_cvar.dependent_on(analyzer, true).into_expr_err(loc)?; + deps.extend( + rhs_cvar.dependent_on(analyzer, true).into_expr_err(loc)?, + ); + Some(deps) + }, + ty: VarType::BuiltIn( + analyzer.builtin_or_add(Builtin::Bool).into(), + Some(range), + ), }; - let or_var = ContextVarNode::from(analyzer.add_node(Node::ContextVar(new_lhs_underlying))); - let cnode = ConcreteNode::from(analyzer.add_node(Node::Concrete(Concrete::Bool(true)))); + let or_var = ContextVarNode::from( + analyzer.add_node(Node::ContextVar(new_lhs_underlying)), + ); + let cnode = ConcreteNode::from( + analyzer.add_node(Node::Concrete(Concrete::Bool(true))), + ); let tmp_true = Node::ContextVar( ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode, analyzer) .into_expr_err(loc)?, @@ -435,28 +564,34 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); let rhs_paths = ExprRet::Single(node); analyzer.handle_require_inner( + arena, ctx, loc, &ExprRet::Single(or_var.into()), &rhs_paths, RangeOp::Eq, - RangeOp::Neq, + RangeOp::Eq, (RangeOp::Neq, RangeOp::Eq), ) }) }) } other => { - self.parse_ctx_expr(other, ctx)?; - self.apply_to_edges(ctx, other.loc(), &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Require operation had no left hand side".to_string())) + self.parse_ctx_expr(arena, other, ctx)?; + self.apply_to_edges(ctx, other.loc(), arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Require operation had no left hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } - let cnode = ConcreteNode::from(analyzer.add_node(Node::Concrete(Concrete::Bool(true)))); + let cnode = + ConcreteNode::from(analyzer.add_node(Node::Concrete(Concrete::Bool(true)))); let tmp_true = Node::ContextVar( ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode, analyzer) .into_expr_err(other.loc())?, @@ -464,12 +599,13 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let rhs_paths = ExprRet::Single(ContextVarNode::from(analyzer.add_node(tmp_true)).into()); analyzer.handle_require_inner( + arena, ctx, loc, &lhs_paths, &rhs_paths, RangeOp::Eq, - RangeOp::Neq, + RangeOp::Eq, (RangeOp::Neq, RangeOp::Eq), ) }) @@ -477,8 +613,10 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { } } + /// Do matching on [`ExprRet`]s to actually perform the require statement evaluation fn handle_require_inner( &mut self, + arena: &mut RangeArena>, ctx: ContextNode, loc: Loc, lhs_paths: &ExprRet, @@ -492,9 +630,10 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (_, ExprRet::CtxKilled(..)) | (ExprRet::CtxKilled(..), _) => Ok(()), (ExprRet::SingleLiteral(lhs), ExprRet::Single(rhs)) => { ContextVarNode::from(*lhs) - .cast_from(&ContextVarNode::from(*rhs), self) + .cast_from(&ContextVarNode::from(*rhs), self, arena) .into_expr_err(loc)?; self.handle_require_inner( + arena, ctx, loc, &ExprRet::Single(*lhs), @@ -506,9 +645,10 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { } (ExprRet::Single(lhs), ExprRet::SingleLiteral(rhs)) => { ContextVarNode::from(*rhs) - .cast_from(&ContextVarNode::from(*lhs), self) + .cast_from(&ContextVarNode::from(*lhs), self, arena) .into_expr_err(loc)?; self.handle_require_inner( + arena, ctx, loc, lhs_paths, @@ -524,17 +664,35 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); let new_rhs = self.advance_var_in_ctx(rhs_cvar, loc, ctx)?; - self.require(new_lhs, new_rhs, ctx, loc, op, rhs_op, recursion_ops)?; + self.require(arena, new_lhs, new_rhs, ctx, loc, op, rhs_op, recursion_ops)?; Ok(()) } (l @ ExprRet::Single(_) | l @ ExprRet::SingleLiteral(_), ExprRet::Multi(rhs_sides)) => { rhs_sides.iter().try_for_each(|expr_ret| { - self.handle_require_inner(ctx, loc, l, expr_ret, op, rhs_op, recursion_ops) + self.handle_require_inner( + arena, + ctx, + loc, + l, + expr_ret, + op, + rhs_op, + recursion_ops, + ) }) } (ExprRet::Multi(lhs_sides), r @ ExprRet::Single(_) | r @ ExprRet::SingleLiteral(_)) => { lhs_sides.iter().try_for_each(|expr_ret| { - self.handle_require_inner(ctx, loc, expr_ret, r, op, rhs_op, recursion_ops) + self.handle_require_inner( + arena, + ctx, + loc, + expr_ret, + r, + op, + rhs_op, + recursion_ops, + ) }) } (ExprRet::Multi(lhs_sides), ExprRet::Multi(rhs_sides)) => { @@ -543,6 +701,7 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { lhs_sides.iter().zip(rhs_sides.iter()).try_for_each( |(lhs_expr_ret, rhs_expr_ret)| { self.handle_require_inner( + arena, ctx, loc, lhs_expr_ret, @@ -556,6 +715,7 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { } else { rhs_sides.iter().try_for_each(|rhs_expr_ret| { self.handle_require_inner( + arena, ctx, loc, lhs_paths, @@ -580,13 +740,14 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { #[tracing::instrument(level = "trace", skip_all)] fn require( &mut self, + arena: &mut RangeArena>, mut new_lhs: ContextVarNode, mut new_rhs: ContextVarNode, ctx: ContextNode, loc: Loc, op: RangeOp, rhs_op: RangeOp, - recursion_ops: (RangeOp, RangeOp), + _recursion_ops: (RangeOp, RangeOp), ) -> Result, ExprErr> { tracing::trace!( "require: {} {} {}", @@ -598,24 +759,19 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let mut tmp_cvar = None; if let Some(lhs_range) = new_lhs - .underlying(self) - .into_expr_err(loc)? - .ty + .latest_version(self) .range(self) .into_expr_err(loc)? { let lhs_range_fn = SolcRange::dyn_fn_from_op(op); - // lhs_range.update_deps(new_lhs, ctx, self); let mut new_var_range = lhs_range_fn(lhs_range.clone(), new_rhs); if let Some(rhs_range) = new_rhs.range(self).into_expr_err(loc)? { - let lhs_is_const = new_lhs.is_const(self).into_expr_err(loc)?; - // println!("is const: {lhs_is_const},[{}, {}]", new_lhs.evaled_range_min(self).unwrap().expect("REASON").to_range_string(false, self).s, new_lhs.evaled_range_max(self).unwrap().expect("REASON").to_range_string(true, self).s); - let rhs_is_const = new_rhs.is_const(self).into_expr_err(loc)?; - // println!("is const: {rhs_is_const}, [{}, {}]", new_rhs.evaled_range_min(self).unwrap().expect("REASON").to_range_string(false, self).s, new_rhs.evaled_range_max(self).unwrap().expect("REASON").to_range_string(true, self).s); + let lhs_is_const = new_lhs.is_const(self, arena).into_expr_err(loc)?; + let rhs_is_const = new_rhs.is_const(self, arena).into_expr_err(loc)?; match (lhs_is_const, rhs_is_const) { (true, true) => { - if self.const_killable(op, lhs_range, rhs_range) { + if self.const_killable(arena, op, lhs_range, rhs_range) { tracing::trace!("const killable"); ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; return Ok(None); @@ -625,16 +781,18 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // flip the new range around to be in terms of rhs let rhs_range_fn = SolcRange::dyn_fn_from_op(rhs_op); new_var_range = rhs_range_fn(rhs_range.clone(), new_lhs); - if self - .update_nonconst_from_const(loc, rhs_op, new_lhs, new_rhs, rhs_range)? - { + if self.update_nonconst_from_const( + arena, ctx, loc, rhs_op, new_lhs, new_rhs, rhs_range, + )? { tracing::trace!("half-const killable"); ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; return Ok(None); } } (false, true) => { - if self.update_nonconst_from_const(loc, op, new_rhs, new_lhs, lhs_range)? { + if self.update_nonconst_from_const( + arena, ctx, loc, op, new_rhs, new_lhs, lhs_range, + )? { tracing::trace!("half-const killable"); ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; return Ok(None); @@ -642,7 +800,7 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { } (false, false) => { if self.update_nonconst_from_nonconst( - loc, op, new_lhs, new_rhs, lhs_range, rhs_range, + arena, ctx, loc, op, new_lhs, new_rhs, lhs_range, rhs_range, )? { tracing::trace!("nonconst killable"); ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; @@ -659,103 +817,111 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { ) )); } + tracing::trace!("done range updating"); + new_rhs = new_rhs.latest_version(self); + new_lhs = new_lhs.latest_version(self); - if let Some(backing_arr) = new_lhs.len_var_to_array(self).into_expr_err(loc)? { - if let Some(r) = backing_arr.ref_range(self).into_expr_err(loc)? { - let min = r.range_min().into_owned(); - let max = r.range_max().into_owned(); + let rhs_display_name = new_rhs.display_name(self).into_expr_err(loc)?; + let display_name = if rhs_display_name == "true" { + (new_lhs.display_name(self).into_expr_err(loc)?).to_string() + } else { + format!( + "({} {} {rhs_display_name})", + new_lhs.display_name(self).into_expr_err(loc)?, + op.to_string(), + ) + }; - if let Some(mut rd) = min.maybe_range_dyn() { - rd.len = Elem::from(new_lhs); - backing_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } + // we take the previous version because for the solver we actually dont want the updated range + let base = Elem::Expr(RangeExpr::new( + new_lhs.previous_version(self).unwrap_or(new_lhs).into(), + op, + new_rhs.previous_version(self).unwrap_or(new_rhs).into(), + )); - if let Some(mut rd) = max.maybe_range_dyn() { - rd.len = Elem::from(new_lhs); - backing_arr - .set_range_max(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } - } - } else if let Some(arr) = new_lhs.index_to_array(self) { - if let Some(index) = new_lhs.index_access_to_index(self) { - let next_arr = self.advance_var_in_ctx(arr.latest_version(self), loc, ctx)?; - if next_arr - .underlying(self) - .into_expr_err(loc)? - .ty - .is_dyn_builtin(self) - .into_expr_err(loc)? - { - if let Some(r) = next_arr.ref_range(self).into_expr_err(loc)? { - let min = r.evaled_range_min(self).into_expr_err(loc)?; - let max = r.evaled_range_max(self).into_expr_err(loc)?; - - if let Some(mut rd) = min.maybe_range_dyn() { - rd.val.insert(Elem::from(index), Elem::from(new_rhs)); - next_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } - - if let Some(mut rd) = max.maybe_range_dyn() { - rd.val.insert(Elem::from(index), Elem::from(new_rhs)); - next_arr - .set_range_max(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } - } - } - } - } + // construct a temporary variable that represent the conditional we just checked + let conditional_var = ContextVar { + loc: Some(loc), + name: format!( + "tmp{}({} {} {})", + ctx.new_tmp(self).into_expr_err(loc)?, + new_lhs.name(self).into_expr_err(loc)?, + op.to_string(), + new_rhs.name(self).into_expr_err(loc)?, + ), + display_name: display_name.clone(), + storage: None, + is_tmp: true, + tmp_of: Some(TmpConstruction::new(new_lhs, op, Some(new_rhs))), + dep_on: { + let mut deps = new_lhs.dependent_on(self, true).into_expr_err(loc)?; + deps.extend(new_rhs.dependent_on(self, true).into_expr_err(loc)?); + Some(deps) + }, + is_symbolic: new_lhs.is_symbolic(self).into_expr_err(loc)? + || new_rhs.is_symbolic(self).into_expr_err(loc)?, + is_return: false, + ty: VarType::BuiltIn( + BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), + // we set the minimum to `true` so that if `elem` evaluates to false, + // we can discover the unsatisifiability + Some(SolcRange::new(base.clone(), base, vec![])), + ), + }; - if let Some(backing_arr) = new_rhs.len_var_to_array(self).into_expr_err(loc)? { - if let Some(r) = backing_arr.ref_range(self).into_expr_err(loc)? { - let min = r.range_min().into_owned(); - let max = r.range_max().into_owned(); + let conditional_cvar = + ContextVarNode::from(self.add_node(Node::ContextVar(conditional_var))); + ctx.add_var(conditional_cvar, self).into_expr_err(loc)?; + self.add_edge(conditional_cvar, ctx, Edge::Context(ContextEdge::Variable)); - if let Some(mut rd) = min.maybe_range_dyn() { - rd.len = Elem::from(new_lhs); - backing_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } + let cnode = ConcreteNode::from(self.add_node(Node::Concrete(Concrete::Bool(true)))); + let tmp_true = Node::ContextVar( + ContextVar::new_from_concrete(Loc::Implicit, ctx, cnode, self) + .into_expr_err(loc)?, + ); - if let Some(mut rd) = max.maybe_range_dyn() { - rd.len = Elem::from(new_lhs); - backing_arr - .set_range_max(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } - } - } + // construct a temporary `true` node + let tmp_true = ContextVarNode::from(self.add_node(tmp_true)); + // construct a temporary var that will be used as the ctx dependency let tmp_var = ContextVar { loc: Some(loc), name: format!( - "tmp{}({} {} {})", + "tmp{}(({} {} {}) == true)", ctx.new_tmp(self).into_expr_err(loc)?, new_lhs.name(self).into_expr_err(loc)?, op.to_string(), new_rhs.name(self).into_expr_err(loc)?, ), - display_name: format!( - "({} {} {})", - new_lhs.display_name(self).into_expr_err(loc)?, - op.to_string(), - new_rhs.display_name(self).into_expr_err(loc)?, - ), + display_name: format!("{display_name} == true"), storage: None, is_tmp: true, - tmp_of: Some(TmpConstruction::new(new_lhs, op, Some(new_rhs))), + tmp_of: Some(TmpConstruction::new( + tmp_true, + RangeOp::Eq, + Some(conditional_cvar), + )), + dep_on: { + let mut deps = tmp_true.dependent_on(self, true).into_expr_err(loc)?; + deps.extend( + conditional_cvar + .dependent_on(self, true) + .into_expr_err(loc)?, + ); + Some(deps) + }, is_symbolic: new_lhs.is_symbolic(self).into_expr_err(loc)? || new_rhs.is_symbolic(self).into_expr_err(loc)?, is_return: false, ty: VarType::BuiltIn( BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), - SolcRange::from(Concrete::Bool(true)), + // we set the minimum to `true` so that if `elem` evaluates to false, + // we can discover the unsatisifiability + Some(SolcRange::new( + Elem::from(Concrete::from(true)), + Elem::from(conditional_cvar), + vec![], + )), ), }; @@ -765,17 +931,18 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { tmp_cvar = Some(cvar); - any_unsat |= new_var_range.unsat(self); - if any_unsat { + tracing::trace!("checking unsat"); + any_unsat |= new_var_range.unsat(self, arena); + + ctx.add_ctx_dep(conditional_cvar, self, arena) + .into_expr_err(loc)?; + + if any_unsat || ctx.unreachable(self, arena).into_expr_err(loc)? { ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; return Ok(None); } - - ctx.add_ctx_dep(cvar, self).into_expr_err(loc)?; } - new_lhs.update_deps(ctx, self).into_expr_err(loc)?; - new_rhs.update_deps(ctx, self).into_expr_err(loc)?; tracing::trace!( "{} is tmp: {}", new_lhs.display_name(self).unwrap(), @@ -783,12 +950,12 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { ); if let Some(tmp) = new_lhs.tmp_of(self).into_expr_err(loc)? { if tmp.op.inverse().is_some() && !matches!(op, RangeOp::Eq | RangeOp::Neq) { - self.range_recursion(tmp, recursion_ops, new_rhs, ctx, loc, &mut any_unsat)?; + // self.range_recursion(tmp, recursion_ops, new_rhs, ctx, loc, &mut any_unsat)?; } else { match tmp.op { RangeOp::Not => {} _ => { - self.uninvertable_range_recursion(tmp, new_lhs, new_rhs, loc, ctx); + self.uninvertable_range_recursion(arena, tmp, new_lhs, new_rhs, loc, ctx); } } } @@ -798,50 +965,56 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { } /// Checks and returns whether the require statement is killable (i.e. impossible) - fn const_killable(&mut self, op: RangeOp, lhs_range: SolcRange, rhs_range: SolcRange) -> bool { + fn const_killable( + &mut self, + arena: &mut RangeArena>, + op: RangeOp, + lhs_range: SolcRange, + rhs_range: SolcRange, + ) -> bool { // check that the op is satisfied, return it as a bool match op { RangeOp::Eq => !lhs_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .unwrap() - .range_eq(&rhs_range.evaled_range_min(self).unwrap()), + .range_eq(&rhs_range.evaled_range_min(self, arena).unwrap(), arena), RangeOp::Neq => lhs_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .unwrap() - .range_eq(&rhs_range.evaled_range_min(self).unwrap()), + .range_eq(&rhs_range.evaled_range_min(self, arena).unwrap(), arena), RangeOp::Gt => { matches!( lhs_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .unwrap() - .range_ord(&rhs_range.evaled_range_min(self).unwrap()), + .range_ord(&rhs_range.evaled_range_min(self, arena).unwrap(), arena), Some(Ordering::Equal) | Some(Ordering::Less) ) } RangeOp::Gte => { matches!( lhs_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .unwrap() - .range_ord(&rhs_range.evaled_range_min(self).unwrap()), + .range_ord(&rhs_range.evaled_range_min(self, arena).unwrap(), arena), Some(Ordering::Less) ) } RangeOp::Lt => { matches!( lhs_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .unwrap() - .range_ord(&rhs_range.evaled_range_min(self).unwrap()), + .range_ord(&rhs_range.evaled_range_min(self, arena).unwrap(), arena), Some(Ordering::Equal) | Some(Ordering::Greater) ) } RangeOp::Lte => { matches!( lhs_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .unwrap() - .range_ord(&rhs_range.evaled_range_min(self).unwrap()), + .range_ord(&rhs_range.evaled_range_min(self, arena).unwrap(), arena), Some(Ordering::Greater) ) } @@ -853,6 +1026,8 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { #[tracing::instrument(level = "trace", skip_all)] fn update_nonconst_from_const( &mut self, + arena: &mut RangeArena>, + _ctx: ContextNode, loc: Loc, op: RangeOp, const_var: ContextVarNode, @@ -864,19 +1039,23 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { RangeOp::Eq => { // check that the constant is contained in the nonconst var range let elem = Elem::from(const_var.latest_version(self)); - let evaled_min = nonconst_range.evaled_range_min(self).into_expr_err(loc)?; + let evaled_min = nonconst_range + .evaled_range_min(self, arena) + .into_expr_err(loc)?; if evaled_min.maybe_concrete().is_none() { - return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug. Min: {}", evaled_min.to_range_string(false, self).s))); + return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug (update nonconst from const: Eq). Min: {}", evaled_min.to_range_string(false, self, arena).s))); } - if !nonconst_range.contains_elem(&elem, self) { + if !nonconst_range.contains_elem(&elem, self, arena) { return Ok(true); } // if its contained, we can set the min & max to it nonconst_var - .set_range_min(self, elem.clone()) + .set_range_min(self, arena, elem.clone()) + .into_expr_err(loc)?; + nonconst_var + .set_range_max(self, arena, elem) .into_expr_err(loc)?; - nonconst_var.set_range_max(self, elem).into_expr_err(loc)?; Ok(false) } RangeOp::Neq => { @@ -885,36 +1064,44 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // potentially add the const var as a range exclusion if let Some(Ordering::Equal) = nonconst_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .into_expr_err(loc)? - .range_ord(&elem) + .range_ord(&elem, arena) { // mins are equivalent, add 1 instead of adding an exclusion - let min = nonconst_range.evaled_range_min(self).into_expr_err(loc)?; - let Some(min) = min - .maybe_concrete() else { - return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug. Min: {}", min.to_range_string(false, self).s))); + let min = nonconst_range + .evaled_range_min(self, arena) + .into_expr_err(loc)?; + let Some(min) = min.maybe_concrete() else { + return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug (update nonconst from const: Neq). Min: {}", min.to_range_string(false, self, arena).s))); }; let one = Concrete::one(&min.val).expect("Cannot increment range elem by one"); let min = nonconst_range.range_min().into_owned() + Elem::from(one); - nonconst_var.set_range_min(self, min).into_expr_err(loc)?; + nonconst_var + .set_range_min(self, arena, min) + .into_expr_err(loc)?; } else if let Some(std::cmp::Ordering::Equal) = nonconst_range - .evaled_range_max(self) + .evaled_range_max(self, arena) .into_expr_err(loc)? - .range_ord(&elem) + .range_ord(&elem, arena) { // maxs are equivalent, subtract 1 instead of adding an exclusion - let max = nonconst_range.evaled_range_max(self).into_expr_err(loc)?; + let max = nonconst_range + .evaled_range_max(self, arena) + .into_expr_err(loc)?; let Some(max) = max.maybe_concrete() else { - return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug. Max: {}", max.to_range_string(true, self).s))); + return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug (update nonconst from const: Neq 2). Max: {}", max.to_range_string(true, self, arena).s))); }; let one = Concrete::one(&max.val).expect("Cannot decrement range elem by one"); let max = nonconst_range.range_max().into_owned() - Elem::from(one); - nonconst_var.set_range_max(self, max).into_expr_err(loc)?; + nonconst_var + .set_range_max(self, arena, max) + .into_expr_err(loc)?; } else { // just add as an exclusion - nonconst_range.add_range_exclusion(elem); + let idx = arena.idx_or_upsert(elem, self); + nonconst_range.add_range_exclusion(idx); nonconst_var .set_range_exclusions(self, nonconst_range.exclusions) .into_expr_err(loc)?; @@ -926,22 +1113,38 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let elem = Elem::from(const_var.latest_version(self)); // if nonconst max is <= const, we can't make this true - let max = nonconst_range.evaled_range_max(self).into_expr_err(loc)?; + let max = nonconst_range + .evaled_range_max(self, arena) + .into_expr_err(loc)?; if matches!( - max.range_ord(&elem.minimize(self).into_expr_err(loc)?), + max.range_ord(&elem.minimize(self, arena).into_expr_err(loc)?, arena), Some(Ordering::Less) | Some(Ordering::Equal) ) { return Ok(true); } // we add one to the element because its strict > - let Some(max_conc) = max.maybe_concrete() else { - return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug. Max: {}", max.to_range_string(true, self).s))); + let Some(max_conc) = const_var + .evaled_range_min(self, arena) + .unwrap() + .unwrap() + .maybe_concrete() + else { + return Err(ExprErr::BadRange(loc, format!( + "Expected {} to have a concrete range by now. This is likely a bug (update nonconst from const: Gt). Max: {}, expr: {} {} {}, const value: {}", + nonconst_var.display_name(self).unwrap(), + nonconst_range.max, + nonconst_var.display_name(self).unwrap(), + op.to_string(), + const_var.display_name(self).unwrap(), + const_var.evaled_range_min(self, arena).unwrap().unwrap() + ))); }; let one = Concrete::one(&max_conc.val).expect("Cannot decrement range elem by one"); nonconst_var .set_range_min( self, + arena, (elem + one.into()).max(nonconst_range.range_min().into_owned()), ) .into_expr_err(loc)?; @@ -953,16 +1156,20 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // if nonconst max is < const, we can't make this true if matches!( nonconst_range - .evaled_range_max(self) + .evaled_range_max(self, arena) .into_expr_err(loc)? - .range_ord(&elem.minimize(self).into_expr_err(loc)?), + .range_ord(&elem.minimize(self, arena).into_expr_err(loc)?, arena), Some(Ordering::Less) ) { return Ok(true); } nonconst_var - .set_range_min(self, elem.max(nonconst_range.range_min().into_owned())) + .set_range_min( + self, + arena, + elem.max(nonconst_range.range_min().into_owned()), + ) .into_expr_err(loc)?; Ok(false) } @@ -970,9 +1177,11 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let elem = Elem::from(const_var.latest_version(self)); // if nonconst min is >= const, we can't make this true - let min = nonconst_range.evaled_range_min(self).into_expr_err(loc)?; + let min = nonconst_range + .evaled_range_min(self, arena) + .into_expr_err(loc)?; if matches!( - min.range_ord(&elem.minimize(self).into_expr_err(loc)?), + min.range_ord(&elem.minimize(self, arena).into_expr_err(loc)?, arena), Some(Ordering::Greater) | Some(Ordering::Equal) ) { return Ok(true); @@ -981,13 +1190,14 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // we add one to the element because its strict > let Some(min_conc) = min.maybe_concrete() else { - return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug. Min: {}", min.to_range_string(true, self).s))); + return Err(ExprErr::BadRange(loc, format!("Expected {} to have a concrete range by now. This is likely a bug (update nonconst from const: Lt). Min: {}", nonconst_var.display_name(self).unwrap(), nonconst_range.min))); }; let one = Concrete::one(&min_conc.val).expect("Cannot decrement range elem by one"); nonconst_var .set_range_max( self, + arena, (elem - one.into()).min(nonconst_range.range_max().into_owned()), ) .into_expr_err(loc)?; @@ -997,16 +1207,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let elem = Elem::from(const_var.latest_version(self)); // if nonconst min is > const, we can't make this true - let min = nonconst_range.evaled_range_min(self).into_expr_err(loc)?; + let min = nonconst_range + .evaled_range_min(self, arena) + .into_expr_err(loc)?; if matches!( - min.range_ord(&elem.minimize(self).into_expr_err(loc)?), + min.range_ord(&elem.minimize(self, arena).into_expr_err(loc)?, arena), Some(Ordering::Greater) ) { return Ok(true); } nonconst_var - .set_range_max(self, elem.min(nonconst_range.range_max().into_owned())) + .set_range_max( + self, + arena, + elem.min(nonconst_range.range_max().into_owned()), + ) .into_expr_err(loc)?; Ok(false) } @@ -1017,6 +1233,8 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { /// Given a const var and a nonconst range, update the range based on the op. Returns whether its impossible fn update_nonconst_from_nonconst( &mut self, + arena: &mut RangeArena>, + _ctx: ContextNode, loc: Loc, op: RangeOp, new_lhs: ContextVarNode, @@ -1028,26 +1246,28 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { match op { RangeOp::Eq => { // check that there is overlap in the ranges - if !lhs_range.overlaps(&rhs_range, self) { + if !lhs_range.overlaps(&rhs_range, self, arena) { return Ok(true); } // take the tighest range match lhs_range - .evaled_range_min(self) + .evaled_range_min(self, arena) .into_expr_err(loc)? - .range_ord(&rhs_range.evaled_range_min(self).into_expr_err(loc)?) - { + .range_ord( + &rhs_range.evaled_range_min(self, arena).into_expr_err(loc)?, + arena, + ) { Some(Ordering::Greater) => { // take lhs range min as its tigher new_rhs - .set_range_min(self, Elem::from(new_rhs)) + .set_range_min(self, arena, Elem::from(new_rhs)) .into_expr_err(loc)?; } Some(Ordering::Less) => { // take rhs range min as its tigher new_lhs - .set_range_min(self, rhs_range.range_min().into_owned()) + .set_range_min(self, arena, rhs_range.range_min().into_owned()) .into_expr_err(loc)?; } _ => { @@ -1057,20 +1277,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // take the tighest range match lhs_range - .evaled_range_max(self) + .evaled_range_max(self, arena) .into_expr_err(loc)? - .range_ord(&rhs_range.evaled_range_max(self).into_expr_err(loc)?) - { + .range_ord( + &rhs_range.evaled_range_max(self, arena).into_expr_err(loc)?, + arena, + ) { Some(Ordering::Less) => { // take lhs range min as its tigher new_rhs - .set_range_max(self, lhs_range.range_max().into_owned()) + .set_range_max(self, arena, lhs_range.range_max().into_owned()) .into_expr_err(loc)?; } Some(Ordering::Greater) => { // take rhs range min as its tigher new_lhs - .set_range_max(self, rhs_range.range_max().into_owned()) + .set_range_max(self, arena, rhs_range.range_max().into_owned()) .into_expr_err(loc)?; } _ => { @@ -1081,16 +1303,22 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { Ok(false) } RangeOp::Neq => { + if new_rhs == new_lhs { + return Ok(true); + } + let rhs_elem = Elem::from(new_rhs.latest_version(self)); // just add as an exclusion - lhs_range.add_range_exclusion(rhs_elem); + let idx = arena.idx_or_upsert(rhs_elem, self); + lhs_range.add_range_exclusion(idx); new_lhs .set_range_exclusions(self, lhs_range.exclusions) .into_expr_err(loc)?; let lhs_elem = Elem::from(new_lhs.latest_version(self)); // just add as an exclusion - rhs_range.add_range_exclusion(lhs_elem); + let idx = arena.idx_or_upsert(lhs_elem, self); + rhs_range.add_range_exclusion(idx); new_rhs .set_range_exclusions(self, rhs_range.exclusions) .into_expr_err(loc)?; @@ -1101,55 +1329,76 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let lhs_elem = Elem::from(new_lhs.latest_version(self)); // if lhs.max is <= rhs.min, we can't make this true - let max = lhs_range.evaled_range_max(self).into_expr_err(loc)?; + let max = lhs_range.evaled_range_max(self, arena).into_expr_err(loc)?; if matches!( - max.range_ord(&rhs_elem.minimize(self).into_expr_err(loc)?), + max.range_ord(&rhs_elem.minimize(self, arena).into_expr_err(loc)?, arena), Some(Ordering::Less) | Some(Ordering::Equal) ) { return Ok(true); } let Some(max_conc) = max.maybe_concrete() else { - return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug. Max: {}", max.to_range_string(true, self).s))); + return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug (update nonconst from nonconst: Gt). Max: {}", max.to_range_string(true, self, arena).s))); }; let one = Concrete::one(&max_conc.val).expect("Cannot decrement range elem by one"); // we add/sub one to the element because its strict > new_lhs + .latest_version(self) .set_range_min( self, + arena, (rhs_elem + one.clone().into()).max(lhs_range.range_min().into_owned()), ) .into_expr_err(loc)?; new_rhs + .latest_version(self) .set_range_max( self, + arena, (lhs_elem - one.into()).min(rhs_range.range_max().into_owned()), ) .into_expr_err(loc)?; + Ok(false) } RangeOp::Gte => { + // lhs >= rhs + // lhs min is the max of current lhs_min and rhs_min + let rhs_elem = Elem::from(new_rhs.latest_version(self)); let lhs_elem = Elem::from(new_lhs.latest_version(self)); // if lhs.max is < rhs.min, we can't make this true - if matches!( - lhs_range - .evaled_range_max(self) - .into_expr_err(loc)? - .range_ord(&rhs_elem.minimize(self).into_expr_err(loc)?), - Some(Ordering::Less) - ) { + let max = lhs_range.evaled_range_max(self, arena).into_expr_err(loc)?; + let min = rhs_elem.minimize(self, arena).into_expr_err(loc)?; + if let Some(Ordering::Less) = max.range_ord(&min, arena) { return Ok(true); } - new_lhs - .set_range_min(self, rhs_elem.max(lhs_range.range_min().into_owned())) + let new_min = Elem::Expr(RangeExpr::new( + new_lhs.latest_version(self).into(), + RangeOp::Max, + rhs_elem, + )); + let new_max = Elem::Expr(RangeExpr::new( + new_rhs.latest_version(self).into(), + RangeOp::Min, + lhs_elem, + )); + + let new_new_lhs = self.advance_var_in_curr_ctx(new_lhs, loc)?; + let new_new_rhs = self.advance_var_in_curr_ctx(new_rhs, loc)?; + + new_new_lhs + .set_range_min(self, arena, new_min.clone()) .into_expr_err(loc)?; - new_rhs - .set_range_max(self, lhs_elem.min(rhs_range.range_max().into_owned())) + new_new_rhs + .set_range_min(self, arena, new_max.clone()) + .into_expr_err(loc)?; + new_new_rhs + .set_range_max(self, arena, new_max) .into_expr_err(loc)?; Ok(false) } @@ -1158,9 +1407,9 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let lhs_elem = Elem::from(new_lhs.latest_version(self)); // if lhs min is >= rhs.max, we can't make this true - let min = lhs_range.evaled_range_min(self).into_expr_err(loc)?; + let min = lhs_range.evaled_range_min(self, arena).into_expr_err(loc)?; if matches!( - min.range_ord(&rhs_elem.maximize(self).into_expr_err(loc)?), + min.range_ord(&rhs_elem.maximize(self, arena).into_expr_err(loc)?, arena), Some(Ordering::Greater) | Some(Ordering::Equal) ) { return Ok(true); @@ -1168,20 +1417,27 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // we add/sub one to the element because its strict > let Some(min_conc) = min.maybe_concrete() else { - return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug. Min: {}", min.to_range_string(false, self).s))); + return Err(ExprErr::BadRange(loc, format!("Expected to have a concrete range by now. This is likely a bug (update nonconst from const: Lt). Min: {}", min.to_range_string(false, self, arena).s))); }; let one = Concrete::one(&min_conc.val).expect("Cannot decrement range elem by one"); - new_lhs + let new_new_lhs = self.advance_var_in_curr_ctx(new_lhs, loc)?; + let new_new_rhs = self.advance_var_in_curr_ctx(new_rhs, loc)?; + + new_new_lhs + .latest_version(self) .set_range_max( self, + arena, (rhs_elem - one.clone().into()).min(lhs_range.range_max().into_owned()), ) .into_expr_err(loc)?; - new_rhs + new_new_rhs + .latest_version(self) .set_range_min( self, + arena, (lhs_elem + one.into()).max(rhs_range.range_min().into_owned()), ) .into_expr_err(loc)?; @@ -1189,22 +1445,33 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { } RangeOp::Lte => { let rhs_elem = Elem::from(new_rhs.latest_version(self)); - let lhs_elem = Elem::from(new_lhs.latest_version(self)); + let lhs_elem = Elem::from(new_lhs.latest_version(self)) + .max(rhs_range.range_min().into_owned()); // if nonconst min is > const, we can't make this true - let min = lhs_range.evaled_range_min(self).into_expr_err(loc)?; + let min = lhs_range.evaled_range_min(self, arena).into_expr_err(loc)?; if matches!( - min.range_ord(&rhs_elem.maximize(self).into_expr_err(loc)?), + min.range_ord(&rhs_elem.maximize(self, arena).into_expr_err(loc)?, arena), Some(Ordering::Greater) ) { return Ok(true); } new_lhs - .set_range_max(self, rhs_elem.min(lhs_range.range_max().into_owned())) + .latest_version(self) + .set_range_max( + self, + arena, + rhs_elem.min(lhs_range.range_max().into_owned()), + ) .into_expr_err(loc)?; new_rhs - .set_range_min(self, lhs_elem.max(rhs_range.range_min().into_owned())) + .latest_version(self) + .set_range_min(self, arena, lhs_elem.clone()) + .into_expr_err(loc)?; + new_rhs + .latest_version(self) + .set_range_max(self, arena, lhs_elem) .into_expr_err(loc)?; Ok(false) } @@ -1214,24 +1481,26 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { fn uninvertable_range_recursion( &mut self, + arena: &mut RangeArena>, tmp_construction: TmpConstruction, _new_lhs_core: ContextVarNode, _rhs_cvar: ContextVarNode, loc: Loc, ctx: ContextNode, ) { - if !tmp_construction.lhs.is_const(self).unwrap() { + if !tmp_construction.lhs.is_const(self, arena).unwrap() { // widen to maximum range :( let new_underlying_lhs = self .advance_var_in_ctx(tmp_construction.lhs.latest_version(self), loc, ctx) .unwrap(); if let Some(lhs_range) = tmp_construction.lhs.ref_range(self).unwrap() { - if let Elem::Concrete(c) = lhs_range.evaled_range_min(self).unwrap() { + if let Elem::Concrete(c) = lhs_range.evaled_range_min(self, arena).unwrap() { new_underlying_lhs .set_range_min( self, + arena, Elem::Concrete(RangeConcrete { - val: Concrete::min(&c.val).unwrap_or_else(|| c.val.clone()), + val: Concrete::min_of_type(&c.val).unwrap_or_else(|| c.val.clone()), loc, }), ) @@ -1239,8 +1508,9 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { new_underlying_lhs .set_range_max( self, + arena, Elem::Concrete(RangeConcrete { - val: Concrete::max(&c.val).unwrap_or(c.val), + val: Concrete::max_of_type(&c.val).unwrap_or(c.val), loc, }), ) @@ -1253,6 +1523,7 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { /// Recursively updates the range for a fn range_recursion( &mut self, + arena: &mut RangeArena>, tmp_construction: TmpConstruction, (flip_op, no_flip_op): (RangeOp, RangeOp), rhs_cvar: ContextVarNode, @@ -1262,16 +1533,19 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { ) -> Result<(), ExprErr> { tracing::trace!("Recursing through range"); // handle lhs - let Some(inverse) = tmp_construction - .op - .inverse() else { + let Some(inverse) = tmp_construction.op.inverse() else { return Ok(()); }; - if !tmp_construction.lhs.is_const(self).into_expr_err(loc)? { + if !tmp_construction + .lhs + .is_const(self, arena) + .into_expr_err(loc)? + { tracing::trace!("handling lhs range recursion"); let adjusted_gt_rhs = ContextVarNode::from({ let tmp = self.op( + arena, loc, rhs_cvar, tmp_construction.rhs.expect("No rhs in tmp_construction"), @@ -1305,19 +1579,20 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { let new_lhs_range = lhs_range_fn(lhs_range, adjusted_gt_rhs); new_underlying_lhs - .set_range_min(self, new_lhs_range.range_min().into_owned()) + .set_range_min(self, arena, new_lhs_range.range_min().into_owned()) .into_expr_err(loc)?; new_underlying_lhs - .set_range_max(self, new_lhs_range.range_max().into_owned()) + .set_range_max(self, arena, new_lhs_range.range_max().into_owned()) .into_expr_err(loc)?; - if new_lhs_range.unsat(self) { + if new_lhs_range.unsat(self, arena) { *any_unsat = true; ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; return Ok(()); } if let Some(tmp) = new_underlying_lhs.tmp_of(self).into_expr_err(loc)? { self.range_recursion( + arena, tmp, (flip_op, no_flip_op), adjusted_gt_rhs, @@ -1332,7 +1607,7 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { // handle rhs if let Some(rhs) = tmp_construction.rhs { - if !rhs.is_const(self).into_expr_err(loc)? { + if !rhs.is_const(self, arena).into_expr_err(loc)? { tracing::trace!("handling rhs range recursion"); let (needs_inverse, adjusted_gt_rhs) = match tmp_construction.op { RangeOp::Sub(..) => { @@ -1346,8 +1621,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { ContextVarNode::from(self.add_node(Node::ContextVar(lhs_cvar))); // tmp_rhs = rhs_cvar * -1 - let tmp_rhs = - self.op(loc, rhs_cvar, tmp_lhs, ctx, RangeOp::Mul(false), false)?; + let tmp_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_lhs, + ctx, + RangeOp::Mul(false), + false, + )?; if matches!(tmp_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(tmp_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1356,8 +1638,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { ContextVarNode::from(tmp_rhs.expect_single().into_expr_err(loc)?); // new_rhs = (rhs_cvar * -1) + tmp_construction.lhs - let new_rhs = - self.op(loc, tmp_rhs, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + tmp_rhs, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1367,8 +1656,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (true, new_rhs) } RangeOp::Add(..) => { - let new_rhs = - self.op(loc, rhs_cvar, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1378,8 +1674,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (false, new_rhs) } RangeOp::Mul(..) => { - let new_rhs = - self.op(loc, rhs_cvar, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1389,8 +1692,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (false, new_rhs) } RangeOp::Div(..) => { - let new_rhs = - self.op(loc, rhs_cvar, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1400,8 +1710,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (false, new_rhs) } RangeOp::Shl => { - let new_rhs = - self.op(loc, rhs_cvar, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1411,8 +1728,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (false, new_rhs) } RangeOp::Shr => { - let new_rhs = - self.op(loc, rhs_cvar, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1422,8 +1746,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (false, new_rhs) } RangeOp::Eq => { - let new_rhs = - self.op(loc, rhs_cvar, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1433,8 +1764,15 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { (false, new_rhs) } RangeOp::Neq => { - let new_rhs = - self.op(loc, rhs_cvar, tmp_construction.lhs, ctx, inverse, false)?; + let new_rhs = self.op( + arena, + loc, + rhs_cvar, + tmp_construction.lhs, + ctx, + inverse, + false, + )?; if matches!(new_rhs, ExprRet::CtxKilled(_)) { ctx.push_expr(new_rhs, self).into_expr_err(loc)?; return Ok(()); @@ -1471,13 +1809,13 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { }; new_underlying_rhs - .set_range_min(self, new_lhs_range.range_min().into_owned()) + .set_range_min(self, arena, new_lhs_range.range_min().into_owned()) .into_expr_err(loc)?; new_underlying_rhs - .set_range_max(self, new_lhs_range.range_max().into_owned()) + .set_range_max(self, arena, new_lhs_range.range_max().into_owned()) .into_expr_err(loc)?; - if new_lhs_range.unsat(self) { + if new_lhs_range.unsat(self, arena) { *any_unsat = true; ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; return Ok(()); @@ -1485,6 +1823,7 @@ pub trait Require: AnalyzerLike + Variable + BinOp + Sized { if let Some(tmp) = new_underlying_rhs.tmp_of(self).into_expr_err(loc)? { self.range_recursion( + arena, tmp, (flip_op, no_flip_op), adjusted_gt_rhs, diff --git a/crates/solc-expressions/src/variable.rs b/crates/solc-expressions/src/variable.rs new file mode 100644 index 00000000..d7d774ee --- /dev/null +++ b/crates/solc-expressions/src/variable.rs @@ -0,0 +1,525 @@ +use crate::{assign::Assign, env::Env, ContextBuilder, ExprErr, IntoExprErr}; + +use graph::{ + elem::Elem, + nodes::{Concrete, ContextNode, ContextVar, ContextVarNode, ExprRet, VarNode}, + AnalyzerBackend, ContextEdge, Edge, GraphError, Node, VarType, +}; +use shared::RangeArena; + +use solang_parser::pt::{Expression, Identifier, Loc, VariableDeclaration}; + +impl Variable for T where T: AnalyzerBackend + Sized {} +/// Deals with variable retrieval, parsing, and versioning +pub trait Variable: AnalyzerBackend + Sized { + #[tracing::instrument(level = "trace", skip_all)] + /// Get a variable based on an identifier + fn variable( + &mut self, + arena: &mut RangeArena>, + ident: &Identifier, + ctx: ContextNode, + recursion_target: Option, + ) -> Result<(), ExprErr> { + tracing::trace!( + "Getting variable: {}, loc: {:?}, ctx: {}", + &ident.name, + ident.loc, + ctx.path(self) + ); + let target_ctx = if let Some(recursion_target) = recursion_target { + recursion_target + } else { + ctx + }; + + // solang doesnt have `super` as a keyword + if let Some(cvar) = ctx.var_by_name(self, &ident.name) { + let cvar = cvar.latest_version(self); + self.apply_to_edges( + target_ctx, + ident.loc, + arena, + &|analyzer, arena, edge_ctx, _loc| { + let var = analyzer.advance_var_in_ctx(cvar, ident.loc, edge_ctx)?; + edge_ctx + .push_expr(ExprRet::Single(var.into()), analyzer) + .into_expr_err(ident.loc) + }, + ) + } else if ident.name == "_" { + self.env_variable(arena, ident, target_ctx)?; + Ok(()) + } else if let Some(cvar) = ctx + .var_by_name_or_recurse(self, &ident.name) + .into_expr_err(ident.loc)? + { + // check if we can inherit it + let cvar = cvar.latest_version(self); + self.apply_to_edges( + target_ctx, + ident.loc, + arena, + &|analyzer, arena, edge_ctx, _loc| { + let var = analyzer.advance_var_in_ctx(cvar, ident.loc, edge_ctx)?; + edge_ctx + .push_expr(ExprRet::Single(var.into()), analyzer) + .into_expr_err(ident.loc) + }, + ) + // if let Some(recursion_target) = recursion_target { + // self.variable(ident, parent_ctx, Some(recursion_target)) + // } else { + // self.variable(ident, parent_ctx, Some(target_ctx)) + // } + } else if (self.env_variable(arena, ident, target_ctx)?).is_some() { + Ok(()) + } else if let Some(idx) = self.user_types().get(&ident.name).cloned() { + let const_var = if let Node::Var(_v) = self.node(idx) { + VarNode::from(idx) + .const_value(ident.loc, self) + .into_expr_err(ident.loc)? + } else { + None + }; + + let var = if let Some(con) = const_var { + con + } else { + match self.node(idx) { + Node::Var(_) | Node::Enum(_) => { + match ContextVar::maybe_from_user_ty(self, ident.loc, idx) { + Some(v) => v, + None => { + return Err(ExprErr::VarBadType( + ident.loc, + format!( + "Could not create context variable from user type: {:?}", + self.node(idx) + ), + )) + } + } + } + _ => { + return target_ctx + .push_expr(ExprRet::Single(idx), self) + .into_expr_err(ident.loc) + } + } + }; + + let new_cvarnode = self.add_node(Node::ContextVar(var)); + + ctx.add_var(new_cvarnode.into(), self) + .into_expr_err(ident.loc)?; + self.add_edge( + new_cvarnode, + target_ctx, + Edge::Context(ContextEdge::Variable), + ); + target_ctx + .push_expr(ExprRet::Single(new_cvarnode), self) + .into_expr_err(ident.loc)?; + Ok(()) + } else if let Some(func_node) = self.builtin_fn_or_maybe_add(&ident.name) { + target_ctx + .push_expr(ExprRet::Single(func_node), self) + .into_expr_err(ident.loc)?; + Ok(()) + } else if let Some(_func) = target_ctx + .visible_funcs(self) + .into_expr_err(ident.loc)? + .iter() + .find(|func| func.name(self).unwrap() == ident.name) + { + Err(ExprErr::Todo( + ident.loc, + "Function as variables has limited support".to_string(), + )) + } else { + let node = self.add_node(Node::Unresolved(ident.clone())); + self.user_types_mut().insert(ident.name.clone(), node); + target_ctx + .push_expr(ExprRet::Single(node), self) + .into_expr_err(ident.loc)?; + Ok(()) + } + } + + fn get_tmp_variable(&mut self, name: &str, ctx: ContextNode) -> Option { + let cvar = ctx.tmp_var_by_name(self, name)?; + Some(cvar.latest_version(self)) + } + + fn get_unchanged_tmp_variable( + &mut self, + arena: &mut RangeArena>, + name: &str, + ctx: ContextNode, + ) -> Result, GraphError> { + let Some(var) = self.get_tmp_variable(name, ctx) else { + return Ok(None); + }; + + if let Some(tmp) = var.tmp_of(self)? { + if tmp.lhs.latest_version(self) != tmp.lhs { + let latest = tmp.lhs.latest_version(self); + let newest_min = latest.evaled_range_min(self, arena)?; + let curr_min = tmp.lhs.evaled_range_min(self, arena)?; + if newest_min != curr_min { + return Ok(None); + } + let newest_max = latest.evaled_range_max(self, arena)?; + let curr_max = tmp.lhs.evaled_range_max(self, arena)?; + if newest_max != curr_max { + return Ok(None); + } + } + + if let Some(rhs) = tmp.rhs { + if rhs.latest_version(self) != rhs { + let latest = rhs.latest_version(self); + let newest_min = latest.evaled_range_min(self, arena)?; + let curr_min = rhs.evaled_range_min(self, arena)?; + if newest_min != curr_min { + return Ok(None); + } + let newest_max = latest.evaled_range_max(self, arena)?; + let curr_max = rhs.evaled_range_max(self, arena)?; + if newest_max != curr_max { + return Ok(None); + } + } + } + + Ok(Some(var)) + } else { + Ok(Some(var)) + } + } + + /// Match on the [`ExprRet`]s of a variable definition and construct the variable + fn match_var_def( + &mut self, + arena: &mut RangeArena>, + ctx: ContextNode, + var_decl: &VariableDeclaration, + loc: Loc, + lhs_paths: &ExprRet, + rhs_paths: Option<&ExprRet>, + ) -> Result { + match (lhs_paths, rhs_paths) { + (ExprRet::CtxKilled(kind), _) | (_, Some(ExprRet::CtxKilled(kind))) => { + ctx.kill(self, loc, *kind).into_expr_err(loc)?; + Ok(true) + } + (ExprRet::Single(ty), Some(ExprRet::SingleLiteral(rhs))) => { + let ty = VarType::try_from_idx(self, *ty).expect("Not a known type"); + let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); + let res = rhs_cvar.literal_cast_from_ty(ty, self).into_expr_err(loc); + let _ = self.add_if_err(res); + self.match_var_def( + arena, + ctx, + var_decl, + loc, + lhs_paths, + Some(&ExprRet::Single(rhs_cvar.into())), + ) + } + (ExprRet::Single(ty), Some(ExprRet::Single(rhs))) => { + let name = var_decl.name.clone().expect("Variable wasn't named"); + let ty = VarType::try_from_idx(self, *ty).expect("Not a known type"); + let var = ContextVar { + loc: Some(loc), + name: name.to_string(), + display_name: name.to_string(), + storage: var_decl.storage.as_ref().map(|s| s.clone().into()), + is_tmp: false, + is_symbolic: true, + tmp_of: None, + dep_on: None, + is_return: false, + ty, + }; + let lhs = ContextVarNode::from(self.add_node(Node::ContextVar(var))); + ctx.add_var(lhs, self).into_expr_err(loc)?; + self.add_edge(lhs, ctx, Edge::Context(ContextEdge::Variable)); + let rhs = ContextVarNode::from(*rhs); + + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let _ = analyzer.assign(arena, loc, lhs, rhs, ctx)?; + // match_assign_ret(analyzer, ctx, ret); + Ok(()) + })?; + + Ok(false) + } + (ExprRet::Single(ty), None) => { + let name = var_decl.name.clone().expect("Variable wasn't named"); + let ty = VarType::try_from_idx(self, *ty).expect("Not a known type"); + let var = ContextVar { + loc: Some(loc), + name: name.to_string(), + display_name: name.to_string(), + storage: var_decl.storage.as_ref().map(|s| s.clone().into()), + is_tmp: false, + is_symbolic: true, + tmp_of: None, + dep_on: None, + is_return: false, + ty, + }; + let lhs = ContextVarNode::from(self.add_node(Node::ContextVar(var))); + ctx.add_var(lhs, self).into_expr_err(loc)?; + self.add_edge(lhs, ctx, Edge::Context(ContextEdge::Variable)); + Ok(false) + } + (l @ ExprRet::Single(_lhs), Some(ExprRet::Multi(rhs_sides))) => Ok(rhs_sides + .iter() + .map(|expr_ret| self.match_var_def(arena, ctx, var_decl, loc, l, Some(expr_ret))) + .collect::, ExprErr>>()? + .iter() + .all(|e| *e)), + (ExprRet::Multi(lhs_sides), r @ Some(ExprRet::Single(_))) => Ok(lhs_sides + .iter() + .map(|expr_ret| self.match_var_def(arena, ctx, var_decl, loc, expr_ret, r)) + .collect::, ExprErr>>()? + .iter() + .all(|e| *e)), + (ExprRet::Multi(lhs_sides), None) => Ok(lhs_sides + .iter() + .map(|expr_ret| self.match_var_def(arena, ctx, var_decl, loc, expr_ret, None)) + .collect::, ExprErr>>()? + .iter() + .all(|e| *e)), + (ExprRet::Multi(lhs_sides), Some(ExprRet::Multi(rhs_sides))) => { + // try to zip sides if they are the same length + if lhs_sides.len() == rhs_sides.len() { + Ok(lhs_sides + .iter() + .zip(rhs_sides.iter()) + .map(|(lhs_expr_ret, rhs_expr_ret)| { + self.match_var_def( + arena, + ctx, + var_decl, + loc, + lhs_expr_ret, + Some(rhs_expr_ret), + ) + }) + .collect::, ExprErr>>()? + .iter() + .all(|e| *e)) + } else { + Ok(rhs_sides + .iter() + .map(|rhs_expr_ret| { + self.match_var_def( + arena, + ctx, + var_decl, + loc, + lhs_paths, + Some(rhs_expr_ret), + ) + }) + .collect::, ExprErr>>()? + .iter() + .all(|e| *e)) + } + } + (_e, _f) => Err(ExprErr::Todo( + loc, + "Unhandled ExprRet combination in `match_var_def`".to_string(), + )), + } + } + + #[tracing::instrument(level = "trace", skip_all, fields(ctx = %ctx.path(self)))] + /// Creates a newer version of a variable in the context. It may or may not actually + /// create this new variable depending on if there are two successively identical version. + fn advance_var_in_ctx( + &mut self, + cvar_node: ContextVarNode, + loc: Loc, + ctx: ContextNode, + ) -> Result { + self.advance_var_in_ctx_forcible(cvar_node, loc, ctx, false) + } + + #[tracing::instrument(level = "trace", skip_all, fields(ctx = %ctx.path(self)))] + /// Creates a new version of a variable in a context. Takes an additional parameter + /// denoting whether or not to force the creation, skipping an optimization. + fn advance_var_in_ctx_forcible( + &mut self, + cvar_node: ContextVarNode, + loc: Loc, + ctx: ContextNode, + force: bool, + ) -> Result { + tracing::trace!( + "advancing variable: {}", + cvar_node.display_name(self).into_expr_err(loc)? + ); + if let Some(cvar) = cvar_node.next_version(self) { + panic!( + "Not latest version of: {}", + cvar.display_name(self).unwrap() + ); + } + if let Some(child) = ctx.underlying(self).into_expr_err(loc)?.child { + return Err(ExprErr::GraphError( + loc, + GraphError::VariableUpdateInOldContext(format!( + "Variable update of {} in old context: parent: {}, child: {:#?}", + cvar_node.display_name(self).unwrap(), + ctx.path(self), + child + )), + )); + } + let mut new_cvar = cvar_node + .latest_version(self) + .underlying(self) + .into_expr_err(loc)? + .clone(); + // get the old context + let new_cvarnode; + + 'a: { + if let Some(old_ctx) = cvar_node.maybe_ctx(self) { + if !force { + // get the previous version to remove and prevent spurious nodes + if let Some(prev) = cvar_node.latest_version(self).previous_version(self) { + let prev_version = prev.underlying(self).into_expr_err(loc)?; + // check if there was no change between the previous version and the latest version + if prev_version.eq_ignore_loc(&new_cvar) && old_ctx == ctx { + // there was no change in the current context, just give them the current variable + new_cvarnode = cvar_node.into(); + break 'a; + } + } + } + + new_cvar.loc = Some(loc); + new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); + if old_ctx != ctx { + ctx.add_var(new_cvarnode.into(), self).into_expr_err(loc)?; + self.add_edge(new_cvarnode, ctx, Edge::Context(ContextEdge::Variable)); + self.add_edge( + new_cvarnode, + cvar_node.0, + Edge::Context(ContextEdge::InheritedVariable), + ); + } else { + self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); + } + } else { + new_cvar.loc = Some(loc); + new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); + self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); + } + } + + Ok(ContextVarNode::from(new_cvarnode)) + } + + fn advance_var_in_forced_ctx( + &mut self, + cvar_node: ContextVarNode, + loc: Loc, + ctx: ContextNode, + ) -> Result { + let mut new_cvar = cvar_node + .latest_version(self) + .underlying(self) + .into_expr_err(loc)? + .clone(); + // get the old context + let new_cvarnode; + + 'a: { + if let Some(old_ctx) = cvar_node.maybe_ctx(self) { + // get the previous version to remove and prevent spurious nodes + if let Some(prev) = cvar_node.latest_version(self).previous_version(self) { + let prev_version = prev.underlying(self).into_expr_err(loc)?; + // check if there was no change between the previous version and the latest version + if prev_version.eq_ignore_loc(&new_cvar) && old_ctx == ctx { + // there was no change in the current context, just give them the current variable + new_cvarnode = cvar_node.into(); + break 'a; + } + } + + new_cvar.loc = Some(loc); + // new_cvar.display_name = format!("{}_{}", new_cvar.name, cvar_node.prev_versions(self)); + new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); + if old_ctx != ctx { + ctx.add_var(new_cvarnode.into(), self).into_expr_err(loc)?; + self.add_edge(new_cvarnode, ctx, Edge::Context(ContextEdge::Variable)); + self.add_edge( + new_cvarnode, + cvar_node.0, + Edge::Context(ContextEdge::InheritedVariable), + ); + } else { + self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); + } + } else { + new_cvar.loc = Some(loc); + new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); + self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); + } + } + + Ok(ContextVarNode::from(new_cvarnode)) + } + + /// Creates a new version of a variable in it's current context + fn advance_var_in_curr_ctx( + &mut self, + cvar_node: ContextVarNode, + loc: Loc, + ) -> Result { + tracing::trace!( + "advancing variable: {}", + cvar_node.display_name(self).into_expr_err(loc)? + ); + if let Some(cvar) = cvar_node.next_version(self) { + panic!( + "Not latest version of: {}", + cvar.display_name(self).unwrap() + ); + } + let mut new_cvar = cvar_node + .latest_version(self) + .underlying(self) + .into_expr_err(loc)? + .clone(); + new_cvar.loc = Some(loc); + + let new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); + self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); + + Ok(ContextVarNode::from(new_cvarnode)) + } + + /// Clones a variable and adds it to the graph + fn advance_var_underlying(&mut self, cvar_node: ContextVarNode, loc: Loc) -> &mut ContextVar { + assert_eq!(None, cvar_node.next_version(self)); + let mut new_cvar = cvar_node + .latest_version(self) + .underlying(self) + .unwrap() + .clone(); + new_cvar.loc = Some(loc); + let new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); + self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); + ContextVarNode::from(new_cvarnode) + .underlying_mut(self) + .unwrap() + } +} diff --git a/crates/solc-expressions/src/yul/mod.rs b/crates/solc-expressions/src/yul/mod.rs new file mode 100644 index 00000000..d7a4fd29 --- /dev/null +++ b/crates/solc-expressions/src/yul/mod.rs @@ -0,0 +1,8 @@ +//! Traits and blanket implementations for parsing with yul statements and expressions + +mod yul_builder; +mod yul_cond_op; +mod yul_funcs; +pub use yul_builder::*; +pub use yul_cond_op::*; +pub use yul_funcs::*; diff --git a/crates/solc-expressions/src/yul/yul_builder.rs b/crates/solc-expressions/src/yul/yul_builder.rs new file mode 100644 index 00000000..7383337e --- /dev/null +++ b/crates/solc-expressions/src/yul/yul_builder.rs @@ -0,0 +1,470 @@ +//! Trait and blanket implementation for parsing yul-based statements and expressions + +use crate::{ + yul::YulCondOp, yul::YulFuncCaller, ContextBuilder, ExprErr, ExpressionParser, IntoExprErr, +}; + +use graph::{ + elem::Elem, + nodes::{ + BuiltInNode, Builtin, Concrete, Context, ContextNode, ContextVar, ContextVarNode, ExprRet, + }, + AnalyzerBackend, ContextEdge, Edge, Node, SolcRange, VarType, +}; +use shared::RangeArena; + +use solang_parser::{ + helpers::CodeLocation, + pt::{Expression, Loc, YulExpression, YulFor, YulStatement, YulSwitch}, +}; + +impl YulBuilder for T where + T: AnalyzerBackend + Sized + ExpressionParser +{ +} +/// Trait that processes Yul statements and expressions +pub trait YulBuilder: + AnalyzerBackend + Sized + ExpressionParser +{ + #[tracing::instrument(level = "trace", skip_all, fields(ctx = %ctx.path(self)))] + /// Parse a yul statement + fn parse_ctx_yul_statement( + &mut self, + arena: &mut RangeArena>, + stmt: &YulStatement, + ctx: ContextNode, + ) where + Self: Sized, + { + if let Some(true) = self.add_if_err(ctx.is_ended(self).into_expr_err(stmt.loc())) { + return; + } + if let Some(live_edges) = self.add_if_err(ctx.live_edges(self).into_expr_err(stmt.loc())) { + if live_edges.is_empty() { + self.parse_ctx_yul_stmt_inner(arena, stmt, ctx) + } else { + live_edges.iter().for_each(|fork_ctx| { + self.parse_ctx_yul_stmt_inner(arena, stmt, *fork_ctx); + }); + } + } + } + + #[tracing::instrument(level = "trace", skip_all)] + /// After doing some setup in `parse_ctx_yul_statement`, actually parse a yul statement + fn parse_ctx_yul_stmt_inner( + &mut self, + arena: &mut RangeArena>, + stmt: &YulStatement, + ctx: ContextNode, + ) where + Self: Sized, + { + use YulStatement::*; + // println!("ctx: {}, yul stmt: {:?}", ctx.path(self), stmt); + + let res = ctx + .pop_expr_latest(stmt.loc(), self) + .into_expr_err(stmt.loc()); + let _ = self.add_if_err(res); + + if ctx.is_killed(self).unwrap() { + return; + } + let ret = self.apply_to_edges(ctx, stmt.loc(), arena, &|analyzer, arena, ctx, _loc| { + match stmt { + Assign(loc, yul_exprs, yul_expr) => { + match yul_exprs + .iter() + .try_for_each(|expr| analyzer.parse_ctx_yul_expr(arena, expr, ctx)) + { + Ok(()) => analyzer.apply_to_edges( + ctx, + *loc, + arena, + &|analyzer, arena, ctx, loc| { + let Some(lhs_side) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "No left hand side assignments in yul block".to_string(), + )); + }; + if matches!(lhs_side, ExprRet::CtxKilled(_)) { + ctx.push_expr(lhs_side, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + analyzer.parse_ctx_yul_expr(arena, yul_expr, ctx)?; + analyzer.apply_to_edges( + ctx, + loc, + arena, + &|analyzer, arena, ctx, loc| { + let Some(rhs_side) = ctx + .pop_expr_latest(loc, analyzer) + .into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "No right hand side assignments in yul block" + .to_string(), + )); + }; + + if matches!(rhs_side, ExprRet::CtxKilled(_)) { + ctx.push_expr(rhs_side, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + analyzer.match_assign_sides( + arena, ctx, loc, &lhs_side, &rhs_side, + ) + }, + ) + }, + ), + Err(e) => Err(e), + } + } + VariableDeclaration(loc, yul_idents, maybe_yul_expr) => { + let nodes = yul_idents + .iter() + .map(|ident| { + let b_ty = analyzer.builtin_or_add(Builtin::Uint(256)); + let var = ContextVar { + loc: Some(ident.loc), + name: ident.id.name.clone(), + display_name: ident.id.name.clone(), + storage: None, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::try_from_idx(analyzer, b_ty).unwrap(), + }; + let cvar = + ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); + ctx.add_var(cvar, analyzer).unwrap(); + analyzer.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); + analyzer.advance_var_in_ctx(cvar, *loc, ctx).unwrap() + }) + .collect::>(); + + if let Some(yul_expr) = maybe_yul_expr { + analyzer.parse_ctx_yul_expr(arena, yul_expr, ctx)?; + analyzer.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "No right hand side assignments in yul block".to_string(), + )); + }; + + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + + analyzer.match_assign_yul(ctx, loc, &nodes, ret) + }) + } else { + Ok(()) + } + } + If(loc, yul_expr, yul_block) => { + analyzer.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let ret = analyzer.yul_cond_op_stmt(arena, loc, yul_expr, yul_block, ctx); + let _ = analyzer.add_if_err(ret); + Ok(()) + }) + } + For(YulFor { + loc, + init_block: _, + condition: _, + post_block: _, + execution_block: _, + }) => { + let sctx = + Context::new_subctx(ctx, None, *loc, None, None, false, analyzer, None) + .into_expr_err(*loc)?; + let subctx = ContextNode::from(analyzer.add_node(Node::Context(sctx))); + ctx.set_child_call(subctx, analyzer).into_expr_err(*loc)?; + analyzer.apply_to_edges(subctx, *loc, arena, &|analyzer, arena, subctx, loc| { + let vars = subctx.local_vars(analyzer).clone(); + vars.iter().for_each(|(name, var)| { + // widen to max range + if let Some(inheritor_var) = ctx.var_by_name(analyzer, name) { + let inheritor_var = inheritor_var.latest_version(analyzer); + if let Some(r) = var + .underlying(analyzer) + .unwrap() + .ty + .default_range(analyzer) + .unwrap() + { + let new_inheritor_var = analyzer + .advance_var_in_ctx(inheritor_var, loc, ctx) + .unwrap(); + let res = new_inheritor_var + .set_range_min(analyzer, arena, r.min) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + let res = new_inheritor_var + .set_range_max(analyzer, arena, r.max) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + } + } + }); + Ok(()) + }) + } + Switch(YulSwitch { + loc, + condition, + cases, + default, + }) => analyzer.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + analyzer.yul_switch_stmt( + arena, + loc, + condition.clone(), + cases.to_vec(), + default.clone(), + ctx, + ) + }), + Leave(loc) => Err(ExprErr::Todo( + *loc, + "Yul `leave` statements are not currently supported".to_string(), + )), + Break(loc) => Err(ExprErr::Todo( + *loc, + "Yul `break` statements are not currently supported".to_string(), + )), + Continue(loc) => Err(ExprErr::Todo( + *loc, + "Yul `continue` statements are not currently supported".to_string(), + )), + Block(yul_block) => { + yul_block + .statements + .iter() + .for_each(|stmt| analyzer.parse_ctx_yul_stmt_inner(arena, stmt, ctx)); + Ok(()) + } + FunctionDefinition(yul_func_def) => Err(ExprErr::Todo( + yul_func_def.loc(), + "Yul `function` defintions are not currently supported".to_string(), + )), + FunctionCall(yul_func_call) => analyzer.yul_func_call(arena, yul_func_call, ctx), + Error(loc) => Err(ExprErr::ParseError( + *loc, + "Could not parse this yul statement".to_string(), + )), + } + }); + let _ = self.add_if_err(ret); + } + + #[tracing::instrument(level = "trace", skip_all)] + /// Parse a yul expression + fn parse_ctx_yul_expr( + &mut self, + arena: &mut RangeArena>, + expr: &YulExpression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + tracing::trace!("Parsing yul expression: {expr:?}"); + + let edges = ctx.live_edges(self).into_expr_err(expr.loc())?; + if edges.is_empty() { + self.parse_ctx_yul_expr_inner(arena, expr, ctx) + } else { + edges + .iter() + .try_for_each(|fork_ctx| self.parse_ctx_yul_expr(arena, expr, *fork_ctx))?; + Ok(()) + } + } + + /// After performing some setup in `parse_ctx_yul_expr`, actually parse the yul expression + fn parse_ctx_yul_expr_inner( + &mut self, + arena: &mut RangeArena>, + expr: &YulExpression, + ctx: ContextNode, + ) -> Result<(), ExprErr> { + use YulExpression::*; + match expr { + BoolLiteral(loc, b, _) => self.bool_literal(ctx, *loc, *b), + NumberLiteral(loc, int, expr, unit) => { + self.number_literal(ctx, *loc, int, expr, false, unit) + } + HexNumberLiteral(loc, b, _unit) => self.hex_num_literal(ctx, *loc, b, false), + HexStringLiteral(lit, _) => self.hex_literals(ctx, &[lit.clone()]), + StringLiteral(lit, _) => self.string_literal(ctx, lit.loc, &lit.string), + Variable(ident) => { + self.variable(arena, ident, ctx, None)?; + self.apply_to_edges(ctx, ident.loc, arena, &|analyzer, arena, edge_ctx, loc| { + if let Some(ret) = edge_ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? { + if ContextVarNode::from(ret.expect_single().into_expr_err(loc)?) + .is_memory(analyzer) + .into_expr_err(loc)? + { + // its a memory based variable, push a uint instead + let b = Builtin::Uint(256); + let var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(b).into(), + analyzer, + ) + .into_expr_err(loc)?; + let node = analyzer.add_node(Node::ContextVar(var)); + edge_ctx + .push_expr(ExprRet::Single(node), analyzer) + .into_expr_err(loc) + } else { + edge_ctx.push_expr(ret, analyzer).into_expr_err(loc) + } + } else { + Err(ExprErr::Unresolved( + ident.loc, + format!("Could not find variable with name: {}", ident.name), + )) + } + }) + } + FunctionCall(yul_func_call) => self.yul_func_call(arena, yul_func_call, ctx), + SuffixAccess(loc, yul_member_expr, ident) => { + self.parse_inputs(arena, ctx, *loc, &[*yul_member_expr.clone()])?; + + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Ok(Some(lhs)) = ctx.pop_expr_latest(loc, analyzer) else { + return Err(ExprErr::NoLhs( + loc, + "`.slot` had no left hand side".to_string(), + )); + }; + match &*ident.name { + "slot" => { + let slot_var = analyzer.slot( + ctx, + loc, + lhs.expect_single().into_expr_err(loc)?.into(), + ); + ctx.push_expr(ExprRet::Single(slot_var.into()), analyzer) + .into_expr_err(loc)?; + Ok(()) + } + _ => Err(ExprErr::Todo( + expr.loc(), + format!("Yul member access `{}` not yet supported", ident.name), + )), + } + }) + } + } + } + + /// Match [`ExprRet`] from the sides of an `YulAssign` to perform the assignment + fn match_assign_yul( + &mut self, + _ctx: ContextNode, + loc: Loc, + nodes: &[ContextVarNode], + ret: ExprRet, + ) -> Result<(), ExprErr> { + match ret { + s @ ExprRet::Single(_) | s @ ExprRet::SingleLiteral(_) => { + self.match_assign_yul_inner(loc, &nodes[0], s)?; + } + ExprRet::Multi(inner) => { + if inner.len() == nodes.len() { + inner + .into_iter() + .zip(nodes.iter()) + .map(|(ret, node)| self.match_assign_yul_inner(loc, node, ret)) + .collect::, ExprErr>>()?; + } else { + return Err(ExprErr::Todo( + loc, + format!("Differing number of assignees and assignors in yul expression, assignors: {}, assignees: {}", nodes.len(), inner.len()), + )); + }; + } + ExprRet::CtxKilled(_kind) => {} + ExprRet::Null => {} + } + + Ok(()) + } + + /// Perform the actual yul assignment + fn match_assign_yul_inner( + &mut self, + loc: Loc, + node: &ContextVarNode, + ret: ExprRet, + ) -> Result<(), ExprErr> { + match ret.flatten() { + ExprRet::Single(idx) | ExprRet::SingleLiteral(idx) => { + let assign = ContextVarNode::from(idx); + let assign_ty = assign.underlying(self).into_expr_err(loc)?.ty.clone(); + if assign_ty.is_dyn(self).into_expr_err(loc)? { + let b_ty = self.builtin_or_add(Builtin::Bytes(32)); + node.underlying_mut(self).into_expr_err(loc)?.ty = + VarType::try_from_idx(self, b_ty).unwrap(); + } else { + node.underlying_mut(self).into_expr_err(loc)?.ty = assign_ty; + } + } + ExprRet::Multi(_inner) => { + return Err(ExprErr::Todo( + loc, + "Multi in single assignment yul expression is unhandled".to_string(), + )) + } + ExprRet::CtxKilled(..) => {} + ExprRet::Null => {} + } + Ok(()) + } + + fn slot(&mut self, ctx: ContextNode, loc: Loc, lhs: ContextVarNode) -> ContextVarNode { + let lhs = lhs.first_version(self); + let name = format!("{}.slot", lhs.name(self).unwrap()); + tracing::trace!("Slot access: {}", name); + if let Some(attr_var) = ctx.var_by_name_or_recurse(self, &name).unwrap() { + attr_var.latest_version(self) + } else { + let slot_var = ContextVar { + loc: Some(loc), + name: lhs.name(self).unwrap() + ".slot", + display_name: lhs.display_name(self).unwrap() + ".slot", + storage: None, + is_tmp: false, + tmp_of: None, + dep_on: None, + is_symbolic: true, + is_return: false, + ty: VarType::BuiltIn( + BuiltInNode::from(self.builtin_or_add(Builtin::Uint(256))), + SolcRange::try_from_builtin(&Builtin::Uint(256)), + ), + }; + let slot_node = self.add_node(Node::ContextVar(slot_var)); + + self.add_edge(slot_node, lhs, Edge::Context(ContextEdge::SlotAccess)); + self.add_edge(slot_node, ctx, Edge::Context(ContextEdge::Variable)); + ctx.add_var(slot_node.into(), self).unwrap(); + slot_node.into() + } + } +} diff --git a/src/context/yul/yul_cond_op.rs b/crates/solc-expressions/src/yul/yul_cond_op.rs similarity index 60% rename from src/context/yul/yul_cond_op.rs rename to crates/solc-expressions/src/yul/yul_cond_op.rs index 59d35621..6a63b839 100644 --- a/src/context/yul/yul_cond_op.rs +++ b/crates/solc-expressions/src/yul/yul_cond_op.rs @@ -1,35 +1,38 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::yul::YulBuilder; -use crate::context::ContextBuilder; -use crate::context::ExprErr; -use crate::Concrete; -use crate::ConcreteNode; -use crate::{exprs::Require, AnalyzerLike}; +use crate::{require::Require, yul::YulBuilder, ContextBuilder, ExprErr, IntoExprErr}; + +use graph::{ + elem::*, + nodes::{Concrete, ConcreteNode, Context, ContextNode, ContextVar, ContextVarNode, ExprRet}, + AnalyzerBackend, ContextEdge, Edge, Node, +}; +use shared::{NodeIdx, RangeArena}; + use ethers_core::types::U256; -use shared::context::ExprRet; -use shared::range::elem::RangeOp; -use shared::{context::*, Edge, Node, NodeIdx}; -use solang_parser::pt::Identifier; -use solang_parser::pt::YulBlock; -use solang_parser::pt::YulFunctionCall; -use solang_parser::pt::YulSwitchOptions; +use solang_parser::pt::{ + CodeLocation, Expression, Identifier, Loc, YulBlock, YulExpression, YulFunctionCall, + YulStatement, YulSwitchOptions, +}; -use solang_parser::pt::CodeLocation; -use solang_parser::pt::{Expression, Loc}; -use solang_parser::pt::{YulExpression, YulStatement}; +impl YulCondOp for T where + T: AnalyzerBackend + Require + Sized +{ +} -impl YulCondOp for T where T: AnalyzerLike + Require + Sized -{} -pub trait YulCondOp: AnalyzerLike + Require + Sized { +/// Trait for dealing with conditional operations in yul +pub trait YulCondOp: + AnalyzerBackend + Require + Sized +{ #[tracing::instrument(level = "trace", skip_all)] + /// Handle a yul conditional operation statement fn yul_cond_op_stmt( &mut self, + arena: &mut RangeArena>, loc: Loc, if_expr: &YulExpression, true_stmt: &YulBlock, ctx: ContextNode, ) -> Result<(), ExprErr> { - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { let tctx = Context::new_subctx(ctx, None, loc, Some("true"), None, false, analyzer, None) .into_expr_err(loc)?; @@ -40,6 +43,12 @@ pub trait YulCondOp: AnalyzerLike + Requir let false_subctx = ContextNode::from(analyzer.add_node(Node::Context(fctx))); ctx.set_child_fork(true_subctx, false_subctx, analyzer) .into_expr_err(loc)?; + true_subctx + .set_continuation_ctx(analyzer, ctx, "yul_fork_true") + .into_expr_err(loc)?; + false_subctx + .set_continuation_ctx(analyzer, ctx, "yul_fork_false") + .into_expr_err(loc)?; let ctx_fork = analyzer.add_node(Node::ContextFork); analyzer.add_edge(ctx_fork, ctx, Edge::Context(ContextEdge::ContextFork)); analyzer.add_edge( @@ -53,10 +62,13 @@ pub trait YulCondOp: AnalyzerLike + Requir Edge::Context(ContextEdge::Subcontext), ); - analyzer.parse_ctx_yul_expr(if_expr, true_subctx)?; - analyzer.apply_to_edges(true_subctx, loc, &|analyzer, ctx, loc| { + analyzer.parse_ctx_yul_expr(arena, if_expr, true_subctx)?; + analyzer.apply_to_edges(true_subctx, loc, arena, &|analyzer, arena, ctx, loc| { let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "True conditional had no lhs".to_string())); + return Err(ExprErr::NoLhs( + loc, + "True conditional had no lhs".to_string(), + )); }; if matches!(ret, ExprRet::CtxKilled(_)) { @@ -64,10 +76,14 @@ pub trait YulCondOp: AnalyzerLike + Requir return Ok(()); } - analyzer.match_yul_true(ctx, if_expr.loc(), &ret) + analyzer.match_yul_true(arena, ctx, if_expr.loc(), &ret) })?; - analyzer.parse_ctx_yul_statement(&YulStatement::Block(true_stmt.clone()), true_subctx); + analyzer.parse_ctx_yul_statement( + arena, + &YulStatement::Block(true_stmt.clone()), + true_subctx, + ); // let false_expr = YulExpression::FunctionCall(Box::new(YulFunctionCall { // loc, // id: Identifier { @@ -76,10 +92,13 @@ pub trait YulCondOp: AnalyzerLike + Requir // }, // arguments: vec![if_expr.clone()], // })); - analyzer.parse_ctx_yul_expr(if_expr, false_subctx)?; - analyzer.apply_to_edges(false_subctx, loc, &|analyzer, ctx, loc| { + analyzer.parse_ctx_yul_expr(arena, if_expr, false_subctx)?; + analyzer.apply_to_edges(false_subctx, loc, arena, &|analyzer, arena, ctx, loc| { let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "False conditional had no lhs".to_string())); + return Err(ExprErr::NoLhs( + loc, + "False conditional had no lhs".to_string(), + )); }; if matches!(ret, ExprRet::CtxKilled(_)) { @@ -87,35 +106,37 @@ pub trait YulCondOp: AnalyzerLike + Requir return Ok(()); } - analyzer.match_yul_false(ctx, if_expr.loc(), &ret) + analyzer.match_yul_false(arena, ctx, if_expr.loc(), &ret) }) }) } #[tracing::instrument(level = "trace", skip_all)] + /// Handle a yul if-else fn yul_if_else( &mut self, + arena: &mut RangeArena>, loc: Loc, if_else_chain: &IfElseChain, ctx: ContextNode, ) -> Result<(), ExprErr> { - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let tctx = Context::new_subctx(ctx, None, loc, Some("true"), None, false, analyzer, None) - .into_expr_err(loc)?; - let true_subctx = ContextNode::from( - analyzer.add_node(Node::Context( - tctx - )), - ); - let fctx = Context::new_subctx(ctx, None, loc, Some("false"), None, false, analyzer, None) - .into_expr_err(loc)?; - let false_subctx = ContextNode::from( - analyzer.add_node(Node::Context( - fctx - )), - ); + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let tctx = + Context::new_subctx(ctx, None, loc, Some("true"), None, false, analyzer, None) + .into_expr_err(loc)?; + let true_subctx = ContextNode::from(analyzer.add_node(Node::Context(tctx))); + let fctx = + Context::new_subctx(ctx, None, loc, Some("false"), None, false, analyzer, None) + .into_expr_err(loc)?; + let false_subctx = ContextNode::from(analyzer.add_node(Node::Context(fctx))); ctx.set_child_fork(true_subctx, false_subctx, analyzer) .into_expr_err(loc)?; + true_subctx + .set_continuation_ctx(analyzer, ctx, "yul_fork_true") + .into_expr_err(loc)?; + false_subctx + .set_continuation_ctx(analyzer, ctx, "yul_fork_false") + .into_expr_err(loc)?; let ctx_fork = analyzer.add_node(Node::ContextFork); analyzer.add_edge(ctx_fork, ctx, Edge::Context(ContextEdge::ContextFork)); analyzer.add_edge( @@ -129,41 +150,53 @@ pub trait YulCondOp: AnalyzerLike + Requir Edge::Context(ContextEdge::Subcontext), ); - let if_expr_loc = if_else_chain.if_expr.loc(); - analyzer.apply_to_edges(true_subctx, if_expr_loc, &|analyzer, ctx, loc| { - analyzer.parse_ctx_yul_expr(&if_else_chain.if_expr, true_subctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, _loc| { - let Some(true_vars) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul switch statement was missing a case discriminator".to_string())) - }; + analyzer.apply_to_edges( + true_subctx, + if_expr_loc, + arena, + &|analyzer, arena, ctx, loc| { + analyzer.parse_ctx_yul_expr(arena, &if_else_chain.if_expr, true_subctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, _loc| { + let Some(true_vars) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul switch statement was missing a case discriminator".to_string(), + )); + }; - if matches!(true_vars, ExprRet::CtxKilled(_)) { - ctx.push_expr(true_vars, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_yul_true(ctx, loc, &true_vars)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, _loc| { - analyzer.parse_ctx_yul_statement(&if_else_chain.true_stmt, ctx); - Ok(()) + if matches!(true_vars, ExprRet::CtxKilled(_)) { + ctx.push_expr(true_vars, analyzer).into_expr_err(loc)?; + return Ok(()); + } + analyzer.match_yul_true(arena, ctx, loc, &true_vars)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_yul_statement(arena, &if_else_chain.true_stmt, ctx); + Ok(()) + }) }) - }) - })?; - + }, + )?; if let Some(next) = &if_else_chain.next { match next { - ElseOrDefault::Default(default) => { - analyzer.apply_to_edges(false_subctx, loc, &|analyzer, ctx, _loc| { - analyzer.parse_ctx_yul_statement(default, ctx); + ElseOrDefault::Default(default) => analyzer.apply_to_edges( + false_subctx, + loc, + arena, + &|analyzer, arena, ctx, _loc| { + analyzer.parse_ctx_yul_statement(arena, default, ctx); Ok(()) - }) - } - ElseOrDefault::Else(iec) => { - analyzer.apply_to_edges(false_subctx, loc, &|analyzer, ctx, loc| { - analyzer.yul_if_else(loc, iec, ctx) - }) - } + }, + ), + ElseOrDefault::Else(iec) => analyzer.apply_to_edges( + false_subctx, + loc, + arena, + &|analyzer, arena, ctx, loc| analyzer.yul_if_else(arena, loc, iec, ctx), + ), } } else { Ok(()) @@ -171,8 +204,10 @@ pub trait YulCondOp: AnalyzerLike + Requir }) } + /// Helper for the `true` evaluation of a yul conditional fn match_yul_true( &mut self, + arena: &mut RangeArena>, ctx: ContextNode, loc: Loc, true_cvars: &ExprRet, @@ -191,6 +226,7 @@ pub trait YulCondOp: AnalyzerLike + Requir ExprRet::Single(ContextVarNode::from(self.add_node(tmp_true)).into()); self.handle_require_inner( + arena, ctx, loc, true_cvars, @@ -206,15 +242,17 @@ pub trait YulCondOp: AnalyzerLike + Requir true_paths .iter() .take(1) - .try_for_each(|expr_ret| self.match_yul_true(ctx, loc, expr_ret))?; + .try_for_each(|expr_ret| self.match_yul_true(arena, ctx, loc, expr_ret))?; } ExprRet::Null => {} } Ok(()) } + /// Helper for the `false` evaluation of a yul conditional fn match_yul_false( &mut self, + arena: &mut RangeArena>, ctx: ContextNode, loc: Loc, false_cvars: &ExprRet, @@ -233,6 +271,7 @@ pub trait YulCondOp: AnalyzerLike + Requir ExprRet::Single(ContextVarNode::from(self.add_node(tmp_true)).into()); self.handle_require_inner( + arena, ctx, loc, false_cvars, @@ -248,7 +287,7 @@ pub trait YulCondOp: AnalyzerLike + Requir false_paths .iter() .take(1) - .try_for_each(|expr_ret| self.match_yul_false(ctx, loc, expr_ret))?; + .try_for_each(|expr_ret| self.match_yul_false(arena, ctx, loc, expr_ret))?; } ExprRet::Null => {} } @@ -257,8 +296,10 @@ pub trait YulCondOp: AnalyzerLike + Requir } #[tracing::instrument(level = "trace", skip_all)] + /// Handle a yul swithc statement by converting it into an if-else chain fn yul_switch_stmt( &mut self, + arena: &mut RangeArena>, loc: Loc, condition: YulExpression, cases: Vec, @@ -266,13 +307,14 @@ pub trait YulCondOp: AnalyzerLike + Requir ctx: ContextNode, ) -> Result<(), ExprErr> { let iec = IfElseChain::from(loc, (condition, cases, default))?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, _loc| { - analyzer.yul_if_else(loc, &iec, ctx) + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, _loc| { + analyzer.yul_if_else(arena, loc, &iec, ctx) }) } } #[derive(Clone, Debug)] +/// A yul-based if-else chain, which represents a switch statement pub struct IfElseChain { pub if_expr: YulExpression, pub true_stmt: YulStatement, @@ -280,6 +322,7 @@ pub struct IfElseChain { } #[derive(Clone, Debug)] +/// Wrapper over a switch statement that denotes either another else statement or the default case pub enum ElseOrDefault { Else(Box), Default(YulStatement), @@ -329,11 +372,17 @@ impl IfElseChain { child = Some(chain_part.into()); }); let Some(child) = child else { - return Err(ExprErr::NoRhs(loc, "No cases or default found for switch statement".to_string())) + return Err(ExprErr::NoRhs( + loc, + "No cases or default found for switch statement".to_string(), + )); }; let Some(iec) = IfElseChain::from_child(child) else { - return Err(ExprErr::NoRhs(loc, "No cases or default found for switch statement".to_string())) + return Err(ExprErr::NoRhs( + loc, + "No cases or default found for switch statement".to_string(), + )); }; Ok(iec) } diff --git a/src/context/yul/yul_funcs.rs b/crates/solc-expressions/src/yul/yul_funcs.rs similarity index 59% rename from src/context/yul/yul_funcs.rs rename to crates/solc-expressions/src/yul/yul_funcs.rs index 5f151e5f..6702fb51 100644 --- a/src/context/yul/yul_funcs.rs +++ b/crates/solc-expressions/src/yul/yul_funcs.rs @@ -1,39 +1,34 @@ -use crate::context::exprs::BinOp; -use crate::context::exprs::Cmp; -use crate::context::exprs::Env; -use crate::context::exprs::IntoExprErr; -use crate::context::yul::YulBuilder; -use crate::context::ContextBuilder; -use crate::context::ExprErr; -use crate::Concrete; -use crate::ConcreteNode; -use crate::Node; +use crate::{ + assign::Assign, variable::Variable, yul::YulBuilder, BinOp, Cmp, ContextBuilder, Env, ExprErr, + IntoExprErr, +}; + +use graph::{ + elem::*, + nodes::{ + Builtin, Concrete, ConcreteNode, ContextNode, ContextVar, ContextVarNode, ExprRet, + KilledKind, + }, + AnalyzerBackend, ContextEdge, Edge, GraphBackend, Node, SolcRange, VarType, +}; +use shared::{RangeArena, StorageLocation}; + use ethers_core::types::U256; -use shared::analyzer::AnalyzerLike; -use shared::analyzer::GraphLike; -use shared::context::ExprRet; -use shared::nodes::VarType; -use shared::range::elem_ty::RangeExpr; +use solang_parser::pt::{Expression, Loc, YulExpression, YulFunctionCall}; -use solang_parser::pt::YulExpression; use std::cell::RefCell; use std::rc::Rc; -use shared::range::{elem_ty::Elem, SolcRange}; -use shared::{context::ContextEdge, nodes::Builtin, Edge}; -use shared::{context::*, range::elem::RangeOp}; -use solang_parser::pt::YulFunctionCall; -use solang_parser::pt::{Expression, Loc, StorageLocation}; - impl YulFuncCaller for T where - T: AnalyzerLike + Sized + GraphLike + T: AnalyzerBackend + Sized + GraphBackend { } pub trait YulFuncCaller: - GraphLike + AnalyzerLike + Sized + GraphBackend + AnalyzerBackend + Sized { fn yul_func_call( &mut self, + arena: &mut RangeArena>, func_call: &YulFunctionCall, ctx: ContextNode, ) -> Result<(), ExprErr> { @@ -74,19 +69,21 @@ pub trait YulFuncCaller: ctx.kill(self, *loc, KilledKind::Revert).into_expr_err(*loc) } "return" => { - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(offset) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul Return had no offset".to_string())) + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(offset) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs(loc, "Yul Return had no offset".to_string())); }; if matches!(offset, ExprRet::CtxKilled(_)) { ctx.push_expr(offset, analyzer).into_expr_err(loc)?; return Ok(()); } - analyzer.parse_ctx_yul_expr(&arguments[1], ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(size) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Yul Return had no size".to_string())) + analyzer.parse_ctx_yul_expr(arena, &arguments[1], ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(size) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs(loc, "Yul Return had no size".to_string())); }; if matches!(size, ExprRet::CtxKilled(_)) { ctx.push_expr(size, analyzer).into_expr_err(loc)?; @@ -112,17 +109,20 @@ pub trait YulFuncCaller: )); } - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { let Some(lhs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Not operation had no element".to_string())) + return Err(ExprErr::NoRhs( + loc, + "Not operation had no element".to_string(), + )); }; if matches!(lhs, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs, analyzer).into_expr_err(loc)?; return Ok(()); } - analyzer.bit_not_inner(ctx, loc, lhs.flatten()) + analyzer.bit_not_inner(arena, ctx, loc, lhs.flatten()) }) } "add" | "sub" | "mul" | "div" | "sdiv" | "mod" | "smod" | "exp" | "and" | "or" @@ -160,10 +160,14 @@ pub trait YulFuncCaller: vec![arguments[0].clone(), arguments[1].clone()] }; - self.parse_inputs(ctx, *loc, &inputs)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul Binary operation had no inputs".to_string())) + self.parse_inputs(arena, ctx, *loc, &inputs)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul Binary operation had no inputs".to_string(), + )); }; if matches!(inputs, ExprRet::CtxKilled(_)) { ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; @@ -176,12 +180,27 @@ pub trait YulFuncCaller: // we have to cast the inputs into an EVM word, which is effectively a u256. let word_ty = analyzer.builtin_or_add(Builtin::Uint(256)); let cast_ty = VarType::try_from_idx(analyzer, word_ty).unwrap(); - let lhs_paths = ContextVarNode::from(inputs[0].expect_single().into_expr_err(loc)?); - lhs_paths.cast_from_ty(cast_ty.clone(), analyzer).into_expr_err(loc)?; - let rhs_paths = ContextVarNode::from(inputs[1].expect_single().into_expr_err(loc)?); - rhs_paths.cast_from_ty(cast_ty, analyzer).into_expr_err(loc)?; + let lhs_paths = + ContextVarNode::from(inputs[0].expect_single().into_expr_err(loc)?); + lhs_paths + .cast_from_ty(cast_ty.clone(), analyzer, arena) + .into_expr_err(loc)?; - analyzer.op_match(ctx, loc, &ExprRet::Single(lhs_paths.into()), &ExprRet::Single(rhs_paths.into()), op, false) + let rhs_paths = + ContextVarNode::from(inputs[1].expect_single().into_expr_err(loc)?); + rhs_paths + .cast_from_ty(cast_ty, analyzer, arena) + .into_expr_err(loc)?; + + analyzer.op_match( + arena, + ctx, + loc, + &ExprRet::Single(lhs_paths.latest_version(analyzer).into()), + &ExprRet::Single(rhs_paths.latest_version(analyzer).into()), + op, + false, + ) }) } "lt" | "gt" | "slt" | "sgt" | "eq" => { @@ -203,10 +222,14 @@ pub trait YulFuncCaller: )); } - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul Binary operation had no right hand side".to_string())) + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul Binary operation had no right hand side".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { @@ -214,19 +237,28 @@ pub trait YulFuncCaller: return Ok(()); } - analyzer.parse_ctx_yul_expr(&arguments[1], ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Yul Binary operation had no left hand side".to_string())) + analyzer.parse_ctx_yul_expr(arena, &arguments[1], ctx)?; + analyzer.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(rhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Yul Binary operation had no left hand side".to_string(), + )); }; if matches!(rhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; return Ok(()); } - analyzer.cmp_inner(ctx, loc, &lhs_paths, op, &rhs_paths)?; - let Some(result) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Yul Binary operation had no return".to_string())) + analyzer.cmp_inner(arena, ctx, loc, &lhs_paths, op, &rhs_paths)?; + let Some(result) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoLhs( + loc, + "Yul Binary operation had no return".to_string(), + )); }; let res = ContextVarNode::from(result.expect_single().into_expr_err(loc)?); @@ -234,16 +266,15 @@ pub trait YulFuncCaller: let expr = Elem::Expr(RangeExpr::new( Elem::from(res), RangeOp::Cast, - Elem::from(Concrete::Uint(1, U256::zero())) + Elem::from(Concrete::Uint(256, U256::zero())), )); - next.set_range_min(analyzer, expr.clone()).into_expr_err(loc)?; - next.set_range_max(analyzer, expr).into_expr_err(loc)?; - ctx.push_expr( - ExprRet::Single(next.into()), - analyzer, - ) - .into_expr_err(loc) + next.set_range_min(analyzer, arena, expr.clone()) + .into_expr_err(loc)?; + next.set_range_max(analyzer, arena, expr) + .into_expr_err(loc)?; + ctx.push_expr(ExprRet::Single(next.into()), analyzer) + .into_expr_err(loc) }) }) } @@ -258,10 +289,14 @@ pub trait YulFuncCaller: )); } - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul `iszero` operation had no input".to_string())) + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul `iszero` operation had no input".to_string(), + )); }; if matches!(lhs_paths, ExprRet::CtxKilled(_)) { ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; @@ -278,7 +313,7 @@ pub trait YulFuncCaller: let rhs_paths = ExprRet::Single(ContextVarNode::from(analyzer.add_node(tmp_true)).into()); - analyzer.cmp_inner(ctx, loc, &lhs_paths, RangeOp::Eq, &rhs_paths) + analyzer.cmp_inner(arena, ctx, loc, &lhs_paths, RangeOp::Eq, &rhs_paths) }) } "addmod" | "mulmod" => { @@ -304,6 +339,16 @@ pub trait YulFuncCaller: // TODO: actually handle this. @MemoryModel Ok(()) } + "calldatasize" => { + // TODO: actually handle this. @MemoryModel + let b = Builtin::Uint(256); + let var = ContextVar::new_from_builtin(*loc, self.builtin_or_add(b).into(), self) + .into_expr_err(*loc)?; + let node = self.add_node(Node::ContextVar(var)); + ctx.push_expr(ExprRet::Single(node), self) + .into_expr_err(*loc)?; + Ok(()) + } "calldataload" => { if arguments.len() != 1 { return Err(ExprErr::InvalidFunctionInput( @@ -315,17 +360,29 @@ pub trait YulFuncCaller: )); } - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul `calldataload` operation had no input".to_string())) + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul `calldataload` operation had no input".to_string(), + )); }; // TODO: check const version let b = Builtin::Uint(256); - let mut var = ContextVar::new_from_builtin(loc, analyzer.builtin_or_add(b).into(), analyzer) - .into_expr_err(loc)?; + let mut var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(b).into(), + analyzer, + ) + .into_expr_err(loc)?; let elem = ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); - var.display_name = format!("calldata[{}:{}+32]", elem.display_name(analyzer).into_expr_err(loc)?, elem.display_name(analyzer).into_expr_err(loc)?); + var.display_name = format!( + "calldata[{}:{}+32]", + elem.display_name(analyzer).into_expr_err(loc)?, + elem.display_name(analyzer).into_expr_err(loc)? + ); let node = analyzer.add_node(Node::ContextVar(var)); ctx.push_expr(ExprRet::Single(node), analyzer) .into_expr_err(loc) @@ -395,9 +452,13 @@ pub trait YulFuncCaller: let res = latest_var.ty(self).into_expr_err(*loc)?; if let Some(r) = res.default_range(self).unwrap() { let new_var = self.advance_var_in_ctx(latest_var, *loc, ctx).unwrap(); - let res = new_var.set_range_min(self, r.min).into_expr_err(*loc); + let res = new_var + .set_range_min(self, arena, r.min) + .into_expr_err(*loc); let _ = self.add_if_err(res); - let res = new_var.set_range_max(self, r.max).into_expr_err(*loc); + let res = new_var + .set_range_max(self, arena, r.max) + .into_expr_err(*loc); let _ = self.add_if_err(res); } } @@ -408,24 +469,72 @@ pub trait YulFuncCaller: Ok(()) } "sstore" => { - // TODO: improve this. Right now we are extremely pessimistic and just say we know nothing about storage variables anymore. - // We should check if the location is a reference to an existing var and update based on that - let vars = ctx.local_vars(self).clone(); - vars.iter().try_for_each(|(_name, var)| { - // widen to any max range - let latest_var = var.latest_version(self); - if matches!( - latest_var.underlying(self).into_expr_err(*loc)?.storage, - Some(StorageLocation::Storage(_)) - ) { - let res = latest_var.ty(self).into_expr_err(*loc)?; - if let Some(r) = res.default_range(self).unwrap() { - let new_var = self.advance_var_in_ctx(latest_var, *loc, ctx).unwrap(); - let res = new_var.set_range_min(self, r.min).into_expr_err(*loc); - let _ = self.add_if_err(res); - let res = new_var.set_range_max(self, r.max).into_expr_err(*loc); - let _ = self.add_if_err(res); - } + if arguments.len() != 2 { + return Err(ExprErr::InvalidFunctionInput( + *loc, + format!( + "Yul function: `{}` expects 2 arguments found: {:?}", + id.name, + arguments.len() + ), + )); + } + + self.parse_inputs(arena, ctx, *loc, arguments)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(mut lhs_paths) = + ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::InvalidFunctionInput( + loc, + "Yul `sload` operation had no inputs".to_string(), + )); + }; + + if lhs_paths.expect_length(2).into_expr_err(loc).is_err() { + return Err(ExprErr::NoRhs( + loc, + "Yul `sload` operation had no rhs".to_string(), + )); + } + let value = lhs_paths.take_one().into_expr_err(loc)?.unwrap(); + let slot = lhs_paths.take_one().into_expr_err(loc)?.unwrap(); + let cvar = ContextVarNode::from(slot.expect_single().unwrap()); + + if let Some(slot) = cvar.slot_to_storage(analyzer) { + analyzer.match_assign_sides( + arena, + ctx, + loc, + &ExprRet::Single(slot.into()), + &value, + )?; + } else { + // TODO: improve this. We now handle `slot` but should try to figure out storage layout + let vars = ctx.local_vars(analyzer).clone(); + vars.iter().try_for_each(|(_name, var)| { + // widen to any max range + let latest_var = var.latest_version(analyzer); + if matches!( + latest_var.underlying(analyzer).into_expr_err(loc)?.storage, + Some(StorageLocation::Storage(_)) + ) { + let res = latest_var.ty(analyzer).into_expr_err(loc)?; + if let Some(r) = res.default_range(analyzer).unwrap() { + let new_var = + analyzer.advance_var_in_ctx(latest_var, loc, ctx).unwrap(); + let res = new_var + .set_range_min(analyzer, arena, r.min) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + let res = new_var + .set_range_max(analyzer, arena, r.max) + .into_expr_err(loc); + let _ = analyzer.add_if_err(res); + } + } + Ok(()) + })?; } Ok(()) })?; @@ -434,17 +543,28 @@ pub trait YulFuncCaller: Ok(()) } "balance" => { - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul `balance` operation had no input".to_string())) + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul `balance` operation had no input".to_string(), + )); }; let b = Builtin::Uint(256); - let mut var = ContextVar::new_from_builtin(loc, analyzer.builtin_or_add(b).into(), analyzer) - .into_expr_err(loc)?; + let mut var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(b).into(), + analyzer, + ) + .into_expr_err(loc)?; let elem = ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); - var.display_name = format!("balance({})", elem.display_name(analyzer).into_expr_err(loc)?); + var.display_name = format!( + "balance({})", + elem.display_name(analyzer).into_expr_err(loc)? + ); let node = analyzer.add_node(Node::ContextVar(var)); ctx.push_expr(ExprRet::Single(node), analyzer) .into_expr_err(loc) @@ -471,17 +591,28 @@ pub trait YulFuncCaller: .into_expr_err(*loc) } "extcodesize" => { - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul `extcodesize` operation had no input".to_string())) + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul `extcodesize` operation had no input".to_string(), + )); }; let b = Builtin::Uint(256); - let mut var = ContextVar::new_from_builtin(loc, analyzer.builtin_or_add(b).into(), analyzer) - .into_expr_err(loc)?; + let mut var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(b).into(), + analyzer, + ) + .into_expr_err(loc)?; let elem = ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); - var.display_name = format!("extcodesize({})", elem.display_name(analyzer).into_expr_err(loc)?); + var.display_name = format!( + "extcodesize({})", + elem.display_name(analyzer).into_expr_err(loc)? + ); let node = analyzer.add_node(Node::ContextVar(var)); ctx.push_expr(ExprRet::Single(node), analyzer) .into_expr_err(loc) @@ -509,10 +640,14 @@ pub trait YulFuncCaller: )); } - self.parse_inputs(ctx, *loc, arguments)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(_lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul `codecopy` operation had no input".to_string())) + self.parse_inputs(arena, ctx, *loc, arguments)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(_lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul `codecopy` operation had no input".to_string(), + )); }; ctx.push_expr(ExprRet::Multi(vec![]), analyzer) .into_expr_err(loc) @@ -530,27 +665,42 @@ pub trait YulFuncCaller: )); } - self.parse_inputs(ctx, *loc, arguments)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(_lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul `extcodecopy` operation had no input".to_string())) + self.parse_inputs(arena, ctx, *loc, arguments)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(_lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul `extcodecopy` operation had no input".to_string(), + )); }; ctx.push_expr(ExprRet::Multi(vec![]), analyzer) .into_expr_err(loc) }) } "extcodehash" => { - self.parse_ctx_yul_expr(&arguments[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Yul `extcodesize` operation had no input".to_string())) + self.parse_ctx_yul_expr(arena, &arguments[0], ctx)?; + self.apply_to_edges(ctx, *loc, arena, &|analyzer, arena, ctx, loc| { + let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? + else { + return Err(ExprErr::NoRhs( + loc, + "Yul `extcodesize` operation had no input".to_string(), + )); }; let b = Builtin::Bytes(32); - let mut var = ContextVar::new_from_builtin(loc, analyzer.builtin_or_add(b).into(), analyzer) - .into_expr_err(loc)?; + let mut var = ContextVar::new_from_builtin( + loc, + analyzer.builtin_or_add(b).into(), + analyzer, + ) + .into_expr_err(loc)?; let elem = ContextVarNode::from(lhs_paths.expect_single().into_expr_err(loc)?); - var.display_name = format!("extcodehash({})", elem.display_name(analyzer).into_expr_err(loc)?); + var.display_name = format!( + "extcodehash({})", + elem.display_name(analyzer).into_expr_err(loc)? + ); let node = analyzer.add_node(Node::ContextVar(var)); ctx.push_expr(ExprRet::Single(node), analyzer) .into_expr_err(loc) @@ -558,7 +708,7 @@ pub trait YulFuncCaller: } _ => Err(ExprErr::Todo( *loc, - format!("Unhandled builtin yul function: {id:?}"), + format!("Unhandled yul function: \"{}\"", id.name), )), } } @@ -612,6 +762,7 @@ pub trait YulFuncCaller: #[tracing::instrument(level = "trace", skip_all)] fn parse_inputs( &mut self, + arena: &mut RangeArena>, ctx: ContextNode, loc: Loc, inputs: &[YulExpression], @@ -622,30 +773,34 @@ pub trait YulFuncCaller: Rc::new(RefCell::new(false)) }; - inputs - .iter() - .try_for_each(|input| { - self.parse_ctx_yul_expr(input, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Inputs did not have left hand sides".to_string())); - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - if *append.borrow() { - ctx.append_tmp_expr(ret, analyzer).into_expr_err(loc) - } else { - *append.borrow_mut() = true; - ctx.push_tmp_expr(ret, analyzer).into_expr_err(loc) - } - }) - })?; + inputs.iter().try_for_each(|input| { + self.parse_ctx_yul_expr(arena, input, ctx)?; + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { + let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { + return Err(ExprErr::NoLhs( + loc, + "Inputs did not have left hand sides".to_string(), + )); + }; + if matches!(ret, ExprRet::CtxKilled(_)) { + ctx.push_expr(ret, analyzer).into_expr_err(loc)?; + return Ok(()); + } + if *append.borrow() { + ctx.append_tmp_expr(ret, analyzer).into_expr_err(loc) + } else { + *append.borrow_mut() = true; + ctx.push_tmp_expr(ret, analyzer).into_expr_err(loc) + } + }) + })?; if !inputs.is_empty() { - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { + self.apply_to_edges(ctx, loc, arena, &|analyzer, arena, ctx, loc| { let Some(ret) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Inputs did not have left hand sides".to_string())); + return Err(ExprErr::NoLhs( + loc, + "Inputs did not have left hand sides".to_string(), + )); }; ctx.push_expr(ret, analyzer).into_expr_err(loc) }) diff --git a/shared/Cargo.lock b/shared/Cargo.lock deleted file mode 100644 index 24f92325..00000000 --- a/shared/Cargo.lock +++ /dev/null @@ -1,1427 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "aho-corasick" -version = "0.7.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" -dependencies = [ - "memchr", -] - -[[package]] -name = "arrayvec" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" - -[[package]] -name = "ascii-canvas" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" -dependencies = [ - "term", -] - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi", - "libc", - "winapi", -] - -[[package]] -name = "auto_impl" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a8c1df849285fbacd587de7818cc7d13be6cd2cbcd47a04fb1801b0e2706e33" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "base16ct" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" - -[[package]] -name = "base64ct" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" - -[[package]] -name = "bit-set" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" -dependencies = [ - "generic-array", -] - -[[package]] -name = "byte-slice-cast" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" -dependencies = [ - "serde", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "const-oid" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" - -[[package]] -name = "cpufeatures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" -dependencies = [ - "libc", -] - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-bigint" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" -dependencies = [ - "generic-array", - "rand_core", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "der" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - -[[package]] -name = "digest" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" -dependencies = [ - "block-buffer", - "crypto-common", - "subtle", -] - -[[package]] -name = "dirs-next" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" -dependencies = [ - "cfg-if", - "dirs-sys-next", -] - -[[package]] -name = "dirs-sys-next" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - -[[package]] -name = "ecdsa" -version = "0.14.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" -dependencies = [ - "der", - "elliptic-curve", - "rfc6979", - "signature", -] - -[[package]] -name = "either" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" - -[[package]] -name = "elliptic-curve" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" -dependencies = [ - "base16ct", - "crypto-bigint", - "der", - "digest", - "ff", - "generic-array", - "group", - "rand_core", - "sec1", - "subtle", - "zeroize", -] - -[[package]] -name = "ena" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3" -dependencies = [ - "log", -] - -[[package]] -name = "ethabi" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "primitive-types", - "scale-info", - "uint", -] - -[[package]] -name = "ethers-core" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" -dependencies = [ - "arrayvec", - "bytes", - "chrono", - "elliptic-curve", - "ethabi", - "generic-array", - "hex", - "k256", - "open-fastrlp", - "rand", - "rlp", - "rlp-derive", - "serde", - "serde_json", - "strum", - "thiserror", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "ff" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" -dependencies = [ - "rand_core", - "subtle", -] - -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "generic-array" -version = "0.14.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "group" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" -dependencies = [ - "ff", - "rand_core", - "subtle", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "heck" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - -[[package]] -name = "impl-serde" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" -dependencies = [ - "serde", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "indexmap" -version = "1.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" -dependencies = [ - "autocfg", - "hashbrown", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" - -[[package]] -name = "k256" -version = "0.11.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "sha2", - "sha3", -] - -[[package]] -name = "keccak" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "lalrpop" -version = "0.19.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b30455341b0e18f276fa64540aff54deafb54c589de6aca68659c63dd2d5d823" -dependencies = [ - "ascii-canvas", - "atty", - "bit-set", - "diff", - "ena", - "itertools", - "lalrpop-util", - "petgraph", - "pico-args", - "regex", - "regex-syntax", - "string_cache", - "term", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "lalrpop-util" -version = "0.19.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf796c978e9b4d983414f4caedc9273aa33ee214c5b887bd55fde84c85d2dc4" -dependencies = [ - "regex", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "libc" -version = "0.2.139" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" - -[[package]] -name = "lock_api" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "memchr" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" - -[[package]] -name = "new_debug_unreachable" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" - -[[package]] -name = "nom8" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" -dependencies = [ - "memchr", -] - -[[package]] -name = "num-integer" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" -dependencies = [ - "autocfg", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" -dependencies = [ - "autocfg", -] - -[[package]] -name = "once_cell" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" - -[[package]] -name = "open-fastrlp" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", - "ethereum-types", - "open-fastrlp-derive", -] - -[[package]] -name = "open-fastrlp-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" -dependencies = [ - "bytes", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "parity-scale-codec" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3840933452adf7b3b9145e27086a5a3376c619dca1a21b1e5a5af0d54979bed" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "parking_lot" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-sys", -] - -[[package]] -name = "petgraph" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" -dependencies = [ - "fixedbitset", - "indexmap", -] - -[[package]] -name = "phf" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928c6535de93548188ef63bb7c4036bd415cd8f36ad25af44b9789b2ee72a48c" -dependencies = [ - "phf_macros", - "phf_shared 0.11.1", -] - -[[package]] -name = "phf_generator" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1181c94580fa345f50f19d738aaa39c0ed30a600d95cb2d3e23f94266f14fbf" -dependencies = [ - "phf_shared 0.11.1", - "rand", -] - -[[package]] -name = "phf_macros" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92aacdc5f16768709a569e913f7451034034178b05bdc8acda226659a3dccc66" -dependencies = [ - "phf_generator", - "phf_shared 0.11.1", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "phf_shared" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" -dependencies = [ - "siphasher", -] - -[[package]] -name = "phf_shared" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1fb5f6f826b772a8d4c0394209441e7d37cbbb967ae9c7e0e8134365c9ee676" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pico-args" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468" - -[[package]] -name = "pkcs8" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - -[[package]] -name = "primitive-types" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" -dependencies = [ - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" -dependencies = [ - "once_cell", - "toml_edit", -] - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags", -] - -[[package]] -name = "redox_users" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" -dependencies = [ - "getrandom", - "redox_syscall", - "thiserror", -] - -[[package]] -name = "regex" -version = "1.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" - -[[package]] -name = "rfc6979" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" -dependencies = [ - "crypto-bigint", - "hmac", - "zeroize", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "rlp-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustversion" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" - -[[package]] -name = "ryu" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" - -[[package]] -name = "scale-info" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "001cf62ece89779fd16105b5f515ad0e5cedcd5440d3dd806bb067978e7c3608" -dependencies = [ - "cfg-if", - "derive_more", - "parity-scale-codec", - "scale-info-derive", -] - -[[package]] -name = "scale-info-derive" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "303959cf613a6f6efd19ed4b4ad5bf79966a13352716299ad532cfb115f4205c" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "sec1" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "subtle", - "zeroize", -] - -[[package]] -name = "serde" -version = "1.0.152" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.152" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha2" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha3" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" -dependencies = [ - "digest", - "keccak", -] - -[[package]] -name = "shared" -version = "0.1.0" -dependencies = [ - "ethers-core", - "hex", - "lazy_static", - "petgraph", - "solang-parser", -] - -[[package]] -name = "signature" -version = "1.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" -dependencies = [ - "digest", - "rand_core", -] - -[[package]] -name = "siphasher" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" - -[[package]] -name = "smallvec" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" - -[[package]] -name = "solang-parser" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e9653f278d9531b60f042c29732bb835d519943f4167b1e5684c7e820dd9fec" -dependencies = [ - "itertools", - "lalrpop", - "lalrpop-util", - "phf", - "serde", - "unicode-xid", -] - -[[package]] -name = "spki" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "string_cache" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213494b7a2b503146286049378ce02b482200519accc31872ee8be91fa820a08" -dependencies = [ - "new_debug_unreachable", - "once_cell", - "parking_lot", - "phf_shared 0.10.0", - "precomputed-hash", -] - -[[package]] -name = "strum" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn", -] - -[[package]] -name = "subtle" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" - -[[package]] -name = "syn" -version = "1.0.107" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "term" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" -dependencies = [ - "dirs-next", - "rustversion", - "winapi", -] - -[[package]] -name = "thiserror" -version = "1.0.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "toml_datetime" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" - -[[package]] -name = "toml_edit" -version = "0.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" -dependencies = [ - "indexmap", - "nom8", - "toml_datetime", -] - -[[package]] -name = "typenum" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" - -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unicode-ident" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-sys" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "zeroize" -version = "1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" diff --git a/shared/Cargo.toml b/shared/Cargo.toml deleted file mode 100644 index 342414da..00000000 --- a/shared/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "shared" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -ethers-core = "*" -petgraph = "0.6.2" -solang-parser = { version = "0.2.4", features = ["pt-serde"] } -lazy_static = "1.4.0" -hex = "0.4.3" -tracing = { version = "0.1", features = ["attributes"] } -tracing-subscriber = "0.3" \ No newline at end of file diff --git a/shared/src/analyzer.rs b/shared/src/analyzer.rs deleted file mode 100644 index 5ca67036..00000000 --- a/shared/src/analyzer.rs +++ /dev/null @@ -1,810 +0,0 @@ -use crate::as_dot_str; - -use crate::FunctionParamNode; - -use crate::range::Range; -use crate::BlockNode; - -use crate::MsgNode; -use std::sync::Arc; -use std::sync::Mutex; - -use crate::context::ContextVarNode; -use crate::range::range_string::ToRangeString; -use crate::{Builtin, Edge, Function, FunctionParam, FunctionReturn, Node, NodeIdx}; -use petgraph::visit::EdgeRef; -use std::collections::BTreeMap; -use std::collections::BTreeSet; - -use petgraph::dot::Dot; -use petgraph::{graph::*, Directed, Direction}; -use std::collections::HashMap; - -#[derive(Debug, Clone, Ord, Eq, PartialEq, PartialOrd)] -pub enum GraphError { - NodeConfusion(String), - MaxStackDepthReached(String), - MaxStackWidthReached(String), - ChildRedefinition(String), - VariableUpdateInOldContext(String), - DetachedVariable(String), - ExpectedSingle(String), - StackLengthMismatch(String), - UnbreakableRecursion(String), -} - -pub trait AnalyzerLike: GraphLike { - type Expr; - type ExprErr; - /// Gets the builtin functions map - fn builtin_fns(&self) -> &HashMap; - /// Mutably gets the builtin functions map - fn builtin_fn_nodes_mut(&mut self) -> &mut HashMap; - /// Gets the builtin function nodes mapping - fn builtin_fn_nodes(&self) -> &HashMap; - /// Returns the configured max call depth - fn max_depth(&self) -> usize; - /// Returns the configured max fork width - fn max_width(&self) -> usize; - fn builtin_fn_inputs(&self) -> &HashMap, Vec)>; - fn builtins(&self) -> &HashMap; - fn builtins_mut(&mut self) -> &mut HashMap; - fn builtin_or_add(&mut self, builtin: Builtin) -> NodeIdx { - if let Some(idx) = self.builtins().get(&builtin) { - *idx - } else { - let idx = self.add_node(Node::Builtin(builtin.clone())); - self.builtins_mut().insert(builtin, idx); - idx - } - } - fn builtin_fn_or_maybe_add(&mut self, builtin_name: &str) -> Option - where - Self: std::marker::Sized, - { - if let Some(idx) = self.builtin_fn_nodes().get(builtin_name) { - Some(*idx) - } else if let Some(func) = self.builtin_fns().get(builtin_name) { - let (inputs, outputs) = self - .builtin_fn_inputs() - .get(builtin_name) - .expect("builtin func but no inputs") - .clone(); - let func_node = self.add_node(Node::Function(func.clone())); - let mut params_strs = vec![]; - inputs.into_iter().for_each(|input| { - let input_node = self.add_node(input); - params_strs.push(FunctionParamNode::from(input_node).ty_str(self).unwrap()); - self.add_edge(input_node, func_node, Edge::FunctionParam); - }); - outputs.into_iter().for_each(|output| { - let output_node = self.add_node(output); - self.add_edge(output_node, func_node, Edge::FunctionReturn); - }); - - self.add_edge(func_node, self.entry(), Edge::Func); - - self.builtin_fn_nodes_mut() - .insert(builtin_name.to_string(), func_node); - Some(func_node) - } else { - None - } - } - fn user_types(&self) -> &HashMap; - fn user_types_mut(&mut self) -> &mut HashMap; - fn parse_expr(&mut self, expr: &Self::Expr, parent: Option) -> NodeIdx; - fn msg(&mut self) -> MsgNode; - fn block(&mut self) -> BlockNode; - fn entry(&self) -> NodeIdx; - - fn add_expr_err(&mut self, err: Self::ExprErr); - - fn add_if_err(&mut self, err: Result) -> Option { - match err { - Ok(t) => Some(t), - Err(e) => { - self.add_expr_err(e); - None - } - } - } - - fn expr_errs(&self) -> Vec; -} - -pub struct G<'a> { - pub graph: &'a Graph, -} -impl GraphLike for G<'_> { - fn graph_mut(&mut self) -> &mut Graph { - panic!("Should call this") - } - - fn graph(&self) -> &Graph { - self.graph - } -} - -pub trait GraphLike { - fn graph_mut(&mut self) -> &mut Graph; - fn graph(&self) -> &Graph; - - fn add_node(&mut self, node: impl Into) -> NodeIdx { - self.graph_mut().add_node(node.into()) - } - - fn node(&self, node: impl Into) -> &Node { - self.graph() - .node_weight(node.into()) - .expect("Index not in graph") - } - - fn node_mut(&mut self, node: impl Into) -> &mut Node { - self.graph_mut() - .node_weight_mut(node.into()) - .expect("Index not in graph") - } - - fn open_dot(&self) - where - Self: std::marker::Sized, - Self: AnalyzerLike, - { - use std::env::temp_dir; - use std::fs; - use std::io::Write; - use std::process::Command; - let temp_dir = temp_dir(); - let file_name = "dot.dot"; - let mut temp_path = temp_dir.clone(); - temp_path.push(file_name); - let temp_svg_filename: String = format!("{}/dot.svg", &temp_dir.to_string_lossy()); - - let mut file = fs::File::create(temp_path.clone()).unwrap(); - file.write_all(self.dot_str().as_bytes()).unwrap(); - Command::new("dot") - .arg("-Tsvg") - .arg(temp_path) - .arg("-o") - .arg(&temp_svg_filename) - .output() - .expect("You may need to install graphviz, check if command 'dot' is in your $PATH"); - Command::new("open") - .arg(&temp_svg_filename) - .spawn() - .expect("failed to execute process"); - } - - fn add_edge( - &mut self, - from_node: impl Into, - to_node: impl Into, - edge: impl Into, - ) { - self.graph_mut() - .add_edge(from_node.into(), to_node.into(), edge.into()); - } - - fn cluster_str( - &self, - node: NodeIdx, - cluster_num: usize, - is_killed: bool, - handled_nodes: Arc>>, - handled_edges: Arc>>>, - ) -> String { - if self - .graph() - .edges_directed(node, Direction::Outgoing) - .collect::>() - .is_empty() - { - return "".to_string(); - } - let new_graph = self.graph().filter_map( - |_idx, node| match node { - Node::ContextVar(_cvar) => { - // if !cvar.is_symbolic { - // None - // } else { - Some(node.clone()) - // } - } - _ => Some(node.clone()), - }, - |_idx, edge| Some(*edge), - ); - - let g = &G { graph: &new_graph }; - let children = g.children(node); - let children_edges = g.children_edges(node); - let mut cn = cluster_num + 1; - let child_node_str = children - .iter() - .map(|child| { - { - handled_nodes.lock().unwrap().insert(*child); - } - - if g.graph - .edges_directed(*child, Direction::Outgoing) - .collect::>() - .is_empty() - { - return "".to_string(); - } - let post_str = match self.node(*child) { - Node::Context(c) => { - cn += 2; - self.cluster_str( - *child, - cn, - c.killed.is_some(), - handled_nodes.clone(), - handled_edges.clone(), - ) - } - _ => "".to_string(), - }; - - format!( - " {} [label = \"{}\", color = \"{}\"]\n{}\n", - petgraph::graph::GraphIndex::index(child), - as_dot_str(*child, g).replace('\"', "\'"), - self.node(*child).dot_str_color(), - post_str - ) - }) - .collect::>() - .join(""); - - let edge_str = children_edges - .iter() - .filter(|(_, _, _, idx)| !handled_edges.lock().unwrap().contains(idx)) - .map(|(from, to, edge, idx)| { - handled_edges.lock().unwrap().insert(*idx); - let from = petgraph::graph::GraphIndex::index(from); - let to = petgraph::graph::GraphIndex::index(to); - format!(" {from:} -> {to:} [label = \"{edge:?}\"]\n",) - }) - .collect::>() - .join(""); - format!( - " subgraph cluster_{} {{\n{}\n{}\n{}\n{}\n}}", - cluster_num, - if is_killed && cluster_num % 2 == 0 { - " bgcolor=\"#7a0b0b\"" - } else if is_killed { - " bgcolor=\"#e04646\"" - } else if cluster_num % 2 == 0 { - " bgcolor=\"#545e87\"" - } else { - " bgcolor=\"#1a1b26\"" - }, - format!( - " {} [label = \"{}\", color = \"{}\"]\n", - node.index(), - as_dot_str(node, g).replace('\"', "\'"), - self.node(node).dot_str_color() - ), - child_node_str, - edge_str, - ) - } - - fn dot_str(&self) -> String - where - Self: std::marker::Sized, - Self: AnalyzerLike, - { - let mut dot_str = Vec::new(); - let raw_start_str = r##"digraph G { - node [shape=box, style="filled, rounded", color="#565f89", fontcolor="#d5daf0", fontname="Helvetica", fillcolor="#24283b"]; - edge [color="#414868", fontcolor="#c0caf5", fontname="Helvetica"]; - bgcolor="#1a1b26"; rankdir="BT""##; - dot_str.push(raw_start_str.to_string()); - let handled_edges = Arc::new(Mutex::new(BTreeSet::new())); - let handled_nodes = Arc::new(Mutex::new(BTreeSet::new())); - let (nodes, edges) = ( - self.graph().node_indices().collect::>(), - self.graph().edge_indices().collect::>(), - ); - let mut cluster_num = 0; - let mut skip = BTreeSet::default(); - let nodes_str = nodes - .iter() - .filter_map(|node| { - if self - .graph() - .edges_directed(*node, Direction::Outgoing) - .collect::>() - .is_empty() - && !matches!(self.node(*node), Node::Entry) - { - skip.insert(*node); - return None; - } - if !handled_nodes.lock().unwrap().contains(node) { - match self.node(*node) { - Node::Function(_) => { - cluster_num += 2; - Some(self.cluster_str( - *node, - cluster_num, - false, - handled_nodes.clone(), - handled_edges.clone(), - )) - } - n => Some(format!( - "{} [label = \"{}\", color = \"{}\"]", - petgraph::graph::GraphIndex::index(node), - as_dot_str(*node, self).replace('\"', "\'"), - n.dot_str_color() - )), - } - } else { - None - } - }) - .collect::>() - .join("\n "); - let edges_str = edges - .into_iter() - .filter_map(|edge| { - if !handled_edges.lock().unwrap().contains(&edge) { - let (from, to) = self.graph().edge_endpoints(edge).unwrap(); - if skip.contains(&from) || skip.contains(&to) { - return None; - } - let from = from.index(); - let to = to.index(); - Some(format!( - "{from:} -> {to:} [label = \"{:?}\"]", - self.graph().edge_weight(edge).unwrap() - )) - } else { - None - } - }) - .collect::>() - .join("\n "); - - dot_str.push(nodes_str); - dot_str.push(edges_str); - let raw_end_str = r#"}"#; - dot_str.push(raw_end_str.to_string()); - dot_str.join("\n") - } - - fn dot_str_no_tmps(&self) -> String - where - Self: std::marker::Sized, - Self: GraphLike + AnalyzerLike, - { - let new_graph = self.graph().filter_map( - |_idx, node| match node { - Node::ContextVar(cvar) => { - if !cvar.is_symbolic || cvar.tmp_of.is_some() { - None - } else { - Some(node.clone()) - } - } - _ => Some(node.clone()), - }, - |_idx, edge| Some(*edge), - ); - let mut dot_str = Vec::new(); - let raw_start_str = r##"digraph G { - node [shape=box, style="filled, rounded", color="#565f89", fontcolor="#d5daf0", fontname="Helvetica", fillcolor="#24283b"]; - edge [color="#414868", fontcolor="#c0caf5", fontname="Helvetica"]; - bgcolor="#1a1b26";"##; - dot_str.push(raw_start_str.to_string()); - let nodes_and_edges_str = format!( - "{:?}", - Dot::with_attr_getters( - &new_graph, - &[ - petgraph::dot::Config::GraphContentOnly, - petgraph::dot::Config::NodeNoLabel, - petgraph::dot::Config::EdgeNoLabel - ], - &|_graph, edge_ref| { - match edge_ref.weight() { - Edge::Context(edge) => format!("label = \"{edge:?}\""), - e => format!("label = \"{e:?}\""), - } - }, - &|_graph, (idx, node_ref)| { - let inner = match node_ref { - Node::ContextVar(cvar) => { - let range_str = if let Some(r) = cvar.ty.ref_range(self).unwrap() { - r.as_dot_str(self) - // format!("[{}, {}]", r.min.eval(self).to_range_string(self).s, r.max.eval(self).to_range_string(self).s) - } else { - "".to_string() - }; - - format!( - "{} -- {} -- range: {}", - cvar.display_name, - cvar.ty.as_string(self).unwrap(), - range_str - ) - } - _ => as_dot_str(idx, &G { graph: &new_graph }), - }; - format!( - "label = \"{}\", color = \"{}\"", - inner.replace('\"', "\'"), - node_ref.dot_str_color() - ) - } - ) - ); - dot_str.push(nodes_and_edges_str); - let raw_end_str = r#"}"#; - dot_str.push(raw_end_str.to_string()); - dot_str.join("\n") - } - - fn dot_str_no_tmps_for_ctx(&self, fork_name: String) -> String - where - Self: GraphLike + AnalyzerLike, - Self: Sized, - { - let new_graph = self.graph().filter_map( - |idx, node| match node { - Node::Context(ctx) => { - if ctx.path != fork_name { - None - } else { - Some(node.clone()) - } - } - Node::ContextVar(cvar) => { - if let Some(ctx) = ContextVarNode::from(idx).maybe_ctx(self) { - if ctx.underlying(self).unwrap().path == fork_name && !cvar.is_symbolic { - Some(node.clone()) - } else { - None - } - } else { - None - } - } - _ => Some(node.clone()), - }, - |_idx, edge| Some(*edge), - ); - let mut dot_str = Vec::new(); - let raw_start_str = r##"digraph G { - node [shape=box, style="filled, rounded", color="#565f89", fontcolor="#d5daf0", fontname="Helvetica", fillcolor="#24283b"]; - edge [color="#414868", fontcolor="#c0caf5", fontname="Helvetica"]; - bgcolor="#1a1b26";"##; - dot_str.push(raw_start_str.to_string()); - let nodes_and_edges_str = format!( - "{:?}", - Dot::with_attr_getters( - &new_graph, - &[ - petgraph::dot::Config::GraphContentOnly, - petgraph::dot::Config::NodeNoLabel, - petgraph::dot::Config::EdgeNoLabel - ], - &|_graph, edge_ref| { - match edge_ref.weight() { - Edge::Context(edge) => format!("label = \"{edge:?}\""), - e => format!("label = \"{e:?}\""), - } - }, - &|_graph, (idx, node_ref)| { - let inner = match node_ref { - Node::ContextVar(cvar) => { - let range_str = if let Some(r) = cvar.ty.ref_range(self).unwrap() { - format!( - "[{}, {}]", - r.evaled_range_min(self) - .unwrap() - .to_range_string(false, self) - .s, - r.evaled_range_max(self) - .unwrap() - .to_range_string(true, self) - .s - ) - } else { - "".to_string() - }; - - format!( - "{} -- {} -- range: {}", - cvar.display_name, - cvar.ty.as_string(self).unwrap(), - range_str - ) - } - _ => as_dot_str(idx, &G { graph: &new_graph }), - }; - format!( - "label = \"{}\", color = \"{}\"", - inner.replace('\"', "\'"), - node_ref.dot_str_color() - ) - } - ) - ); - dot_str.push(nodes_and_edges_str); - let raw_end_str = r#"}"#; - dot_str.push(raw_end_str.to_string()); - dot_str.join("\n") - } -} - -impl Search for T where T: GraphLike {} -pub trait Search: GraphLike { - fn search_for_ancestor(&self, start: NodeIdx, edge_ty: &Edge) -> Option { - let edges = self.graph().edges_directed(start, Direction::Outgoing); - if let Some(edge) = edges.clone().find(|edge| edge.weight() == edge_ty) { - Some(edge.target()) - } else { - edges - .map(|edge| edge.target()) - .filter_map(|node| self.search_for_ancestor(node, edge_ty)) - .take(1) - .next() - } - } - - fn search_for_ancestor_multi(&self, start: NodeIdx, edge_tys: &[Edge]) -> Option { - let edges = self.graph().edges_directed(start, Direction::Outgoing); - if let Some(edge) = edges.clone().find(|edge| edge_tys.contains(edge.weight())) { - Some(edge.target()) - } else { - edges - .map(|edge| edge.target()) - .filter_map(|node| self.search_for_ancestor_multi(node, edge_tys)) - .take(1) - .next() - } - } - /// Finds any child nodes that have some edge `edge_ty` incoming. Builds up a set of these - /// - /// i.e.: a -my_edge-> b -other_edge-> c -my_edge-> d - /// - /// This function would build a set { b, d } if we are looking for `my_edge` and start at a. - fn search_children(&self, start: NodeIdx, edge_ty: &Edge) -> BTreeSet { - let edges = self.graph().edges_directed(start, Direction::Incoming); - let mut this_children: BTreeSet = edges - .clone() - .filter_map(|edge| { - if edge.weight() == edge_ty { - Some(edge.source()) - } else { - None - } - }) - .collect(); - - this_children.extend( - edges - .flat_map(|edge| self.search_children(edge.source(), edge_ty)) - .collect::>(), - ); - this_children - } - - fn find_child_exclude_via( - &self, - start: NodeIdx, - edge_ty: &Edge, - exclude_edges: &[Edge], - find_fn: &impl Fn(NodeIdx, &Self) -> Option, - ) -> Option { - let edges = self - .graph() - .edges_directed(start, Direction::Incoming) - .filter(|edge| !exclude_edges.contains(edge.weight())); - if let Some(node) = edges - .clone() - .filter_map(|edge| { - if edge.weight() == edge_ty { - Some(edge.source()) - } else { - None - } - }) - .find(|node| find_fn(*node, self).is_some()) - { - Some(node) - } else { - edges - .clone() - .map(|edge| edge.source()) - .find_map(|node| self.find_child_exclude_via(node, edge_ty, exclude_edges, find_fn)) - } - } - - fn search_children_exclude_via( - &self, - start: NodeIdx, - edge_ty: &Edge, - exclude_edges: &[Edge], - ) -> BTreeSet { - let edges = self - .graph() - .edges_directed(start, Direction::Incoming) - .filter(|edge| !exclude_edges.contains(edge.weight())); - let mut this_children: BTreeSet = edges - .clone() - .filter_map(|edge| { - if edge.weight() == edge_ty { - Some(edge.source()) - } else { - None - } - }) - .collect(); - - this_children.extend( - edges - .flat_map(|edge| { - self.search_children_exclude_via(edge.source(), edge_ty, exclude_edges) - }) - .collect::>(), - ); - this_children - } - - fn search_children_include_via( - &self, - start: NodeIdx, - edge_ty: &Edge, - include_edges: &[Edge], - ) -> BTreeSet { - let mut edges: Vec<_> = self - .graph() - .edges_directed(start, Direction::Incoming) - .collect(); - edges = edges - .into_iter() - .filter(|edge| include_edges.contains(edge.weight())) - .collect::>(); - let mut this_children: BTreeSet = edges - .iter() - .filter_map(|edge| { - if edge.weight() == edge_ty { - Some(edge.source()) - } else { - None - } - }) - .collect(); - - this_children.extend( - edges - .clone() - .iter() - .flat_map(|edge| { - self.search_children_include_via(edge.source(), edge_ty, include_edges) - }) - .collect::>(), - ); - this_children - } - - fn search_children_depth( - &self, - start: NodeIdx, - edge_ty: &Edge, - max_depth: usize, - curr_depth: usize, - ) -> BTreeSet { - let edges = self.graph().edges_directed(start, Direction::Incoming); - let mut this_children: BTreeSet = edges - .clone() - .filter_map(|edge| { - if edge.weight() == edge_ty { - Some(edge.source()) - } else { - None - } - }) - .collect(); - - if curr_depth < max_depth { - this_children.extend( - edges - .flat_map(|edge| { - self.search_children_depth( - edge.source(), - edge_ty, - max_depth, - curr_depth + 1, - ) - }) - .collect::>(), - ); - } - this_children - } - - /// Gets all children recursively - fn children(&self, start: NodeIdx) -> BTreeSet { - let edges = self.graph().edges_directed(start, Direction::Incoming); - let mut this_children: BTreeSet = - edges.clone().map(|edge| edge.source()).collect(); - - this_children.extend( - edges - .flat_map(|edge| self.children(edge.source())) - .collect::>(), - ); - this_children - } - - /// Gets all children edges recursively - fn children_edges( - &self, - start: NodeIdx, - ) -> BTreeSet<(NodeIdx, NodeIdx, Edge, EdgeIndex)> { - let edges = self.graph().edges_directed(start, Direction::Incoming); - let mut this_children_edges: BTreeSet<(NodeIdx, NodeIdx, Edge, EdgeIndex)> = edges - .clone() - .map(|edge| (edge.source(), edge.target(), *edge.weight(), edge.id())) - .collect(); - - this_children_edges.extend( - edges - .flat_map(|edge| self.children_edges(edge.source())) - .collect::)>>(), - ); - this_children_edges - } - - /// Finds any child nodes that have some edge `edge_ty` incoming. Builds up a mapping of these - /// - /// i.e.: a -my_edge-> b -other_edge-> c -my_edge-> d - /// - /// This function would build a map { a: [b], c: [d] } if we are looking for `my_edge` and start at a. - fn nodes_with_children( - &self, - start: NodeIdx, - edge_ty: &Edge, - ) -> Option>> { - let edges = self.graph().edges_directed(start, Direction::Incoming); - let mut map: BTreeMap> = Default::default(); - - let this_children: BTreeSet = edges - .clone() - .filter_map(|edge| { - if edge.weight() == edge_ty { - Some(edge.source()) - } else { - None - } - }) - .collect(); - - if !this_children.is_empty() { - map.insert(start, this_children); - } - map.extend( - edges - .filter_map(|edge| self.nodes_with_children(edge.source(), edge_ty)) - .flatten() - .collect::>>(), - ); - if map.is_empty() { - None - } else { - Some(map) - } - } -} - -pub trait AsDotStr { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String; -} diff --git a/shared/src/context/mod.rs b/shared/src/context/mod.rs deleted file mode 100644 index 6e0eac7c..00000000 --- a/shared/src/context/mod.rs +++ /dev/null @@ -1,1663 +0,0 @@ -use crate::analyzer::GraphError; -use crate::analyzer::{AnalyzerLike, GraphLike, Search}; -use crate::as_dot_str; -use crate::nodes::FunctionNode; - -use crate::AsDotStr; -use crate::ContractNode; -use crate::FunctionParamNode; -use crate::StructNode; -use petgraph::dot::Dot; -use std::collections::BTreeSet; - -use crate::{Edge, Node, NodeIdx}; - -use solang_parser::pt::Loc; -use std::collections::BTreeMap; - -mod var; -pub use var::*; -mod expr_ret; -pub use expr_ret::*; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub enum CallFork { - Call(ContextNode), - Fork(ContextNode, ContextNode), -} - -impl CallFork { - pub fn maybe_call(&self) -> Option { - match self { - CallFork::Call(c) => Some(*c), - _ => None, - } - } - - pub fn maybe_fork(&self) -> Option<(ContextNode, ContextNode)> { - match self { - CallFork::Fork(w1, w2) => Some((*w1, *w2)), - _ => None, - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub enum ContextEdge { - // Control flow - Context, - Subcontext, - ContextFork, - ContextMerge, - Call, - - // Context Variables - Variable, - InheritedVariable, - - AttrAccess, - Index, - IndexAccess, - StructAccess, - FuncAccess, - - // Variable incoming edges - Assign, - StorageAssign, - MemoryAssign, - Prev, - - // Control flow - Return, - Continue, - InputVariable, - ReturnAssign(bool), - - // Range analysis - Range, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ModifierState { - pub num: usize, - pub loc: Loc, - pub parent_fn: FunctionNode, - pub parent_caller_ctx: ContextNode, - pub parent_ctx: ContextNode, - pub renamed_inputs: BTreeMap, -} - -impl ModifierState { - pub fn new( - num: usize, - loc: Loc, - parent_fn: FunctionNode, - parent_ctx: ContextNode, - parent_caller_ctx: ContextNode, - renamed_inputs: BTreeMap, - ) -> Self { - Self { - num, - loc, - parent_fn, - parent_ctx, - parent_caller_ctx, - renamed_inputs, - } - } -} - -#[derive(Default, Debug, Clone, Eq, PartialEq)] -pub struct ContextCache { - pub vars: BTreeMap, - pub visible_funcs: Option>, - pub first_ancestor: Option, - pub associated_source: Option, - pub associated_contract: Option, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Context { - /// The function associated with this context - pub parent_fn: FunctionNode, - /// Whether this function call is actually a modifier call - pub modifier_state: Option, - /// An optional parent context (i.e. this context is a fork or subcontext of another previous context) - pub parent_ctx: Option, - pub returning_ctx: Option, - /// Variables whose bounds are required to be met for this context fork to exist. i.e. a conditional operator - /// like an if statement - pub ctx_deps: BTreeMap, - /// A string that represents the path taken from the root context (i.e. `fn_entry.fork.1`) - pub path: String, - /// Denotes whether this context was killed by an unsatisfiable require, assert, etc. statement - pub killed: Option<(Loc, KilledKind)>, - /// Denotes whether this context is a fork of another context - pub is_fork: bool, - /// Denotes whether this context is the result of a internal function call, and points to the FunctionNode - pub fn_call: Option, - /// Denotes whether this context is the result of a internal function call, and points to the FunctionNode - pub ext_fn_call: Option, - /// The child context. This is either of the form `Call(child_context)` or `Fork(world1, world2)`. Once - /// a child is defined we should *never* evaluate an expression in this context. - pub child: Option, - /// A counter for temporary variables - this lets a context create unique temporary variables - pub tmp_var_ctr: usize, - /// The location in source of the context - pub loc: Loc, - /// The return node and the return location - pub ret: Vec<(Loc, ContextVarNode)>, - /// Depth tracker - pub depth: usize, - /// Width tracker - pub width: usize, - pub tmp_expr: Vec>, - pub expr_ret_stack: Vec, - pub unchecked: bool, - pub number_of_live_edges: usize, - - // caching related things - pub cache: ContextCache, -} - -impl Context { - /// Creates a new context from a function - pub fn new(parent_fn: FunctionNode, fn_name: String, loc: Loc) -> Self { - Context { - parent_fn, - parent_ctx: None, - returning_ctx: None, - path: fn_name, - tmp_var_ctr: 0, - killed: None, - ctx_deps: Default::default(), - is_fork: false, - fn_call: None, - ext_fn_call: None, - child: None, - ret: vec![], - loc, - modifier_state: None, - depth: 0, - width: 0, - expr_ret_stack: Vec::with_capacity(5), - tmp_expr: vec![], - unchecked: false, - number_of_live_edges: 0, - cache: Default::default(), - } - } - - /// Creates a new subcontext from an existing context - pub fn new_subctx( - parent_ctx: ContextNode, - returning_ctx: Option, - loc: Loc, - fork_expr: Option<&str>, - fn_call: Option, - fn_ext: bool, - analyzer: &mut impl AnalyzerLike, - modifier_state: Option, - ) -> Result { - let mut depth = - parent_ctx.underlying(analyzer)?.depth + if fork_expr.is_some() { 0 } else { 1 }; - - let width = - parent_ctx.underlying(analyzer)?.width + if fork_expr.is_some() { 1 } else { 0 }; - - if analyzer.max_depth() < depth { - return Err(GraphError::MaxStackDepthReached(format!( - "Stack depth limit reached: {}", - depth - 1 - ))); - } - - let tw = parent_ctx.total_width(analyzer)?; - if analyzer.max_width() < tw { - return Err(GraphError::MaxStackWidthReached(format!( - "Stack width limit reached: {}", - width - 1 - ))); - } - - let (fn_name, ext_fn_call, fn_call) = if let Some(fn_call) = fn_call { - if fn_ext { - (fn_call.name(analyzer)?, Some(fn_call), None) - } else { - (fn_call.name(analyzer)?, None, Some(fn_call)) - } - } else if let Some(returning_ctx) = returning_ctx { - let fn_node = returning_ctx.associated_fn(analyzer)?; - (fn_node.name(analyzer)?, None, Some(fn_node)) - } else { - ("anonymous_fn_call".to_string(), None, None) - }; - - let path = format!( - "{}.{}", - parent_ctx.underlying(analyzer)?.path, - if let Some(ref fork_expr) = fork_expr { - format!("fork{{ {} }}", fork_expr) - } else if let Some(returning_ctx) = returning_ctx { - depth = depth.saturating_sub(2); - format!( - "resume{{ {} }}", - returning_ctx.associated_fn_name(analyzer)? - ) - } else { - fn_name - } - ); - - let parent_fn = parent_ctx.associated_fn(analyzer)?; - - parent_ctx.underlying_mut(analyzer)?.number_of_live_edges += 1; - - tracing::trace!("new subcontext path: {path}, depth: {depth}"); - Ok(Context { - parent_fn, - parent_ctx: Some(parent_ctx), - returning_ctx, - path, - is_fork: fork_expr.is_some(), - fn_call, - ext_fn_call, - ctx_deps: parent_ctx.underlying(analyzer)?.ctx_deps.clone(), - killed: None, - child: None, - tmp_var_ctr: parent_ctx.underlying(analyzer)?.tmp_var_ctr, - ret: vec![], - loc, - modifier_state, - depth, - width, - expr_ret_stack: if fork_expr.is_some() { - parent_ctx.underlying(analyzer)?.expr_ret_stack.clone() - } else if let Some(ret_ctx) = returning_ctx { - ret_ctx.underlying(analyzer)?.expr_ret_stack.clone() - } else { - vec![] - }, - tmp_expr: if fork_expr.is_some() { - parent_ctx.underlying(analyzer)?.tmp_expr.clone() - } else if let Some(ret_ctx) = returning_ctx { - ret_ctx.underlying(analyzer)?.tmp_expr.clone() - } else { - vec![] - }, - unchecked: if fork_expr.is_some() { - parent_ctx.underlying(analyzer)?.unchecked - } else if let Some(ret_ctx) = returning_ctx { - ret_ctx.underlying(analyzer)?.unchecked - } else { - false - }, - number_of_live_edges: 0, - cache: ContextCache { - vars: Default::default(), - visible_funcs: if fork_expr.is_some() { - parent_ctx.underlying(analyzer)?.cache.visible_funcs.clone() - } else if let Some(ret_ctx) = returning_ctx { - ret_ctx.underlying(analyzer)?.cache.visible_funcs.clone() - } else { - None - }, - first_ancestor: if fork_expr.is_some() { - parent_ctx.underlying(analyzer)?.cache.first_ancestor - } else if let Some(ret_ctx) = returning_ctx { - ret_ctx.underlying(analyzer)?.cache.first_ancestor - } else { - None - }, - associated_source: None, - associated_contract: None, - }, - }) - } - - /// Set the child context to a fork - pub fn set_child_fork(&mut self, world1: ContextNode, world2: ContextNode) -> bool { - if self.child.is_some() { - false - } else { - self.child = Some(CallFork::Fork(world1, world2)); - true - } - } - - /// Set the child context to a call - pub fn set_child_call(&mut self, call_ctx: ContextNode) -> bool { - if self.child.is_some() { - false - } else { - self.child = Some(CallFork::Call(call_ctx)); - true - } - } - - pub fn delete_child(&mut self) { - self.child = None; - } - - pub fn as_string(&mut self) -> String { - "Context".to_string() - } -} - -#[derive(Debug, Clone)] -pub struct CtxTree { - pub node: ContextNode, - pub lhs: Option>, - pub rhs: Option>, -} - -#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] -/// A wrapper of a node index that corresponds to a [`Context`] -pub struct ContextNode(pub usize); - -impl AsDotStr for ContextNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { - format!("Context {{ {} }}", self.path(analyzer)) - } -} - -impl ContextNode { - // pub fn called_functions(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - // self.underlying(analyzer)?.children.iter().filter_map(|child| { - // match child.maybe_call()?.underlying(analyzer) { - // Ok(underlying) => { - // match (underlying.fn_call, underlying.ext_fn_call) { - // (Some(fn_call), _) => Some(Ok(fn_call)), - // (_, Some(ext_fn_call)) => Some(Ok(ext_fn_call)), - // (None, None) => None - // } - // } - // Err(_) => None - // } - // }).collect() - // } - - pub fn join( - &self, - _func: FunctionNode, - _mapping: &BTreeMap, - _analyzer: &mut (impl GraphLike + AnalyzerLike), - ) { - todo!("Joining not supported yet"); - // println!("joining"); - // if let Some(body_ctx) = func.maybe_body_ctx(analyzer) { - // let vars: Vec<_> = body_ctx.vars(analyzer).values().map(|var| var.latest_version(analyzer)).collect(); - // println!("vars: {vars:#?}"); - // let replacements: Vec<(ContextVarNode, ContextVarNode)> = mapping.iter().filter_map(|(input_var, param)| { - // vars.iter().find(|var| var.name(analyzer).unwrap() == param.name(analyzer).unwrap()).map(|var| { - // (*var, *input_var) - // }) - // }).collect(); - - // let mut mapping = BTreeMap::default(); - // replacements.into_iter().for_each(|(var, replacement)| { - // mapping.insert(var, replacement); - // let mut latest = var; - // while let Some(next) = latest.next_version(analyzer) { - // latest = next; - // mapping.insert(latest, replacement); - // } - // }); - - // println!("mapping: {mapping:#?}"); - - // vars.iter().for_each(|var| { - // let mut latest = *var; - // let mut range = latest.range(analyzer).unwrap().unwrap(); - // println!("var: {var:?}, depends on: {:#?}, {range:#?}", var.range_deps(analyzer)); - // range.uncache_range_min(); - // range.uncache_range_max(); - // mapping.iter().for_each(|(to_replace, replacement)| { - // // range.filter_min_recursion((*to_replace).into(), (*replacement).into()); - // // range.filter_max_recursion((*to_replace).into(), (*replacement).into()); - // }); - // latest.set_range(analyzer, range).unwrap(); - // while let Some(next) = latest.next_version(analyzer) { - // latest = next; - // let mut range = latest.range(analyzer).unwrap().unwrap(); - // range.uncache_range_min(); - // range.uncache_range_max(); - // mapping.iter().for_each(|(to_replace, replacement)| { - // // range.filter_min_recursion((*to_replace).into(), (*replacement).into()); - // // range.filter_max_recursion((*to_replace).into(), (*replacement).into()); - // }); - // latest.set_range(analyzer, range).unwrap(); - // } - // }); - - // } else { - // // need to process the function - // } - } - - pub fn is_ext_fn(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.ext_fn_call.is_some()) - } - - pub fn add_var( - &self, - var: ContextVarNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let name = var.name(analyzer)?; - let vars = &mut self.underlying_mut(analyzer)?.cache.vars; - vars.insert(name, var); - Ok(()) - } - - pub fn first_ancestor( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - if let Some(first_ancestor) = self.underlying(analyzer)?.cache.first_ancestor { - Ok(first_ancestor) - } else if let Some(parent) = self.underlying(analyzer)?.parent_ctx { - let first = parent.first_ancestor(analyzer)?; - self.underlying_mut(analyzer)?.cache.first_ancestor = Some(first); - Ok(first) - } else { - Ok(*self) - } - } - - pub fn total_width( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - self.first_ancestor(analyzer)? - .number_of_live_edges(analyzer) - } - - pub fn unchecked(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.unchecked) - } - - pub fn set_unchecked( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - self.underlying_mut(analyzer)?.unchecked = true; - Ok(()) - } - - pub fn unset_unchecked( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - self.underlying_mut(analyzer)?.unchecked = false; - Ok(()) - } - - pub fn push_tmp_expr( - &self, - expr_ret: ExprRet, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let underlying_mut = self.underlying_mut(analyzer)?; - underlying_mut.tmp_expr.push(Some(expr_ret)); - Ok(()) - } - - pub fn append_tmp_expr( - &self, - expr_ret: ExprRet, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let underlying_mut = self.underlying_mut(analyzer)?; - match underlying_mut.tmp_expr.pop() { - Some(Some(s @ ExprRet::Single(_))) => { - underlying_mut - .tmp_expr - .push(Some(ExprRet::Multi(vec![s, expr_ret]))); - } - Some(Some(s @ ExprRet::SingleLiteral(_))) => { - underlying_mut - .tmp_expr - .push(Some(ExprRet::Multi(vec![s, expr_ret]))); - } - Some(Some(ExprRet::Multi(ref mut inner))) => { - inner.push(expr_ret); - underlying_mut - .tmp_expr - .push(Some(ExprRet::Multi(inner.to_vec()))); - } - Some(Some(s @ ExprRet::Null)) => { - underlying_mut - .tmp_expr - .push(Some(ExprRet::Multi(vec![s, expr_ret]))); - } - Some(Some(ExprRet::CtxKilled(kind))) => { - underlying_mut.tmp_expr = vec![Some(ExprRet::CtxKilled(kind))]; - underlying_mut.expr_ret_stack = vec![ExprRet::CtxKilled(kind)]; - } - _ => { - underlying_mut.tmp_expr.push(Some(expr_ret)); - } - } - Ok(()) - } - - pub fn pop_tmp_expr( - &self, - loc: Loc, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - let underlying_mut = self.underlying_mut(analyzer)?; - if let Some(Some(expr)) = underlying_mut.tmp_expr.pop() { - Ok(Some(self.maybe_move_expr(expr, loc, analyzer)?)) - } else { - Ok(None) - } - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn push_expr( - &self, - expr_ret: ExprRet, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - tracing::trace!( - "pushing: {}, existing: {:?}, path: {}", - expr_ret.debug_str(analyzer), - self.underlying(analyzer)? - .expr_ret_stack - .iter() - .map(|i| i.debug_str(analyzer)) - .collect::>(), - self.path(analyzer) - ); - let underlying_mut = self.underlying_mut(analyzer)?; - underlying_mut.expr_ret_stack.push(expr_ret); - Ok(()) - } - - pub fn maybe_move_expr( - &self, - expr: ExprRet, - loc: Loc, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - match expr { - ExprRet::SingleLiteral(var) => Ok(ExprRet::SingleLiteral( - self.maybe_move_var(var.into(), loc, analyzer)?.into(), - )), - ExprRet::Single(var) => Ok(ExprRet::Single( - self.maybe_move_var(var.into(), loc, analyzer)?.into(), - )), - ExprRet::Multi(inner) => Ok(ExprRet::Multi( - inner - .iter() - .map(|i| self.maybe_move_expr(i.clone(), loc, analyzer)) - .collect::>()?, - )), - e => Ok(e), - } - } - - pub fn maybe_move_var( - &self, - var: ContextVarNode, - loc: Loc, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - if let Some(ctx) = var.maybe_ctx(analyzer) { - if ctx != *self { - let mut new_cvar = var.latest_version(analyzer).underlying(analyzer)?.clone(); - new_cvar.loc = Some(loc); - - let new_cvarnode = analyzer.add_node(Node::ContextVar(new_cvar)); - analyzer.add_edge(new_cvarnode, *self, Edge::Context(ContextEdge::Variable)); - analyzer.add_edge( - new_cvarnode, - var.0, - Edge::Context(ContextEdge::InheritedVariable), - ); - Ok(new_cvarnode.into()) - } else { - Ok(var) - } - } else { - Ok(var) - } - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn pop_expr( - &self, - _loc: Loc, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - tracing::trace!("popping var from: {}", self.path(analyzer)); - let underlying_mut = self.underlying_mut(analyzer)?; - - let new: Vec = Vec::with_capacity(5); - - let old = std::mem::replace(&mut underlying_mut.expr_ret_stack, new); - if old.is_empty() { - Ok(None) - } else { - Ok(Some(ExprRet::Multi(old))) - } - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn pop_expr_latest( - &self, - loc: Loc, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - let underlying_mut = self.underlying_mut(analyzer)?; - if let Some(elem) = underlying_mut.expr_ret_stack.pop() { - tracing::trace!( - "popping var {} from: {}", - elem.debug_str(analyzer), - self.path(analyzer) - ); - Ok(Some(self.maybe_move_expr(elem, loc, analyzer)?)) - } else { - Ok(None) - } - } - - pub fn vars_assigned_from_fn_ret(&self, analyzer: &impl GraphLike) -> Vec { - self.local_vars(analyzer) - .iter() - .flat_map(|(_name, var)| var.return_assignments(analyzer)) - .collect() - } - - pub fn vars_assigned_from_ext_fn_ret(&self, analyzer: &impl GraphLike) -> Vec { - self.local_vars(analyzer) - .iter() - .flat_map(|(_name, var)| var.ext_return_assignments(analyzer)) - .collect() - } - - pub fn depth(&self, analyzer: &impl GraphLike) -> usize { - self.underlying(analyzer).unwrap().depth - } - - /// The path of the underlying context - pub fn path(&self, analyzer: &impl GraphLike) -> String { - self.underlying(analyzer).unwrap().path.clone() - } - - /// *All* subcontexts (including subcontexts of subcontexts, recursively) - pub fn subcontexts(&self, analyzer: &impl GraphLike) -> Vec { - let underlying = self.underlying(analyzer).unwrap(); - match underlying.child { - Some(CallFork::Call(c)) => vec![c], - Some(CallFork::Fork(w1, w2)) => vec![w1, w2], - None => vec![], - } - } - - /// Gets the associated contract for the function for the context - pub fn associated_contract( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - Ok(self - .associated_fn(analyzer)? - .maybe_associated_contract(analyzer) - .expect("No associated contract for context")) - } - - /// Tries to get the associated function for the context - pub fn maybe_associated_contract( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - Ok(self - .associated_fn(analyzer)? - .maybe_associated_contract(analyzer)) - } - - pub fn maybe_associated_source( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Option { - let context = self.underlying(analyzer).unwrap(); - if let Some(src) = context.cache.associated_source { - Some(src) - } else if let Some(parent_ctx) = context.parent_ctx { - let src = parent_ctx.maybe_associated_source(analyzer)?; - self.underlying_mut(analyzer) - .unwrap() - .cache - .associated_source = Some(src); - Some(src) - } else { - let func = self.associated_fn(analyzer).unwrap(); - func.maybe_associated_source(analyzer) - } - } - - pub fn associated_source_unit_part( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - if let Some(sup) = self - .associated_fn(analyzer)? - .maybe_associated_source_unit_part(analyzer) - { - Ok(sup) - } else { - Err(GraphError::NodeConfusion( - "Expected context to have an associated source but didnt".to_string(), - )) - } - } - - /// Gets visible functions - pub fn visible_modifiers( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - // TODO: filter privates - let Some(source) = self.maybe_associated_source(analyzer) else { - return Err(GraphError::NodeConfusion("Expected context to have an associated source but didnt".to_string())) - }; - if let Some(contract) = self.maybe_associated_contract(analyzer)? { - let mut modifiers = contract.modifiers(analyzer); - // extend with free floating functions - modifiers.extend( - analyzer - .search_children_depth(source, &Edge::Modifier, 1, 0) - .into_iter() - .map(FunctionNode::from) - .collect::>(), - ); - - // extend with inherited functions - let inherited_contracts = analyzer.search_children_exclude_via( - contract.0.into(), - &Edge::InheritedContract, - &[Edge::Func], - ); - modifiers.extend( - inherited_contracts - .into_iter() - .flat_map(|inherited_contract| { - ContractNode::from(inherited_contract).modifiers(analyzer) - }) - .collect::>(), - ); - - let mut mapping: BTreeMap> = BTreeMap::new(); - for modifier in modifiers.iter() { - let entry = mapping.entry(modifier.name(analyzer)?).or_default(); - entry.insert(*modifier); - } - mapping - .into_values() - .map(|modifier_set| { - let as_vec = modifier_set.iter().collect::>(); - - if as_vec.len() > 2 { - println!("{}", as_vec.iter().map(|i| i.name(analyzer).unwrap()).collect::>().join(", ")); - panic!("3+ visible functions with the same name. This is invalid solidity, {as_vec:#?}") - } else if as_vec.len() == 2 { - as_vec[0].get_overriding(as_vec[1], analyzer) - } else { - Ok(*as_vec[0]) - } - }) - .collect() - } else { - // we are in a free floating function, only look at free floating functions - let Some(source) = self.maybe_associated_source(analyzer) else { - return Err(GraphError::NodeConfusion("Expected context to have an associated source but didnt".to_string())); - }; - Ok(analyzer - .search_children_depth(source, &Edge::Modifier, 1, 0) - .into_iter() - .map(FunctionNode::from) - .collect::>()) - } - } - - /// Gets visible functions - pub fn visible_funcs( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - // TODO: filter privates - if let Some(vis) = &self.underlying(analyzer)?.cache.visible_funcs { - return Ok(vis.clone()); - } - if let Some(contract) = self.maybe_associated_contract(analyzer)? { - let mut mapping = contract.linearized_functions(analyzer); - // extend with free floating functions - mapping.extend( - analyzer - .search_children_depth(analyzer.entry(), &Edge::Func, 2, 0) - .into_iter() - .filter_map(|i| { - let fn_node = FunctionNode::from(i); - if let Ok(name) = fn_node.name(analyzer) { - if !mapping.contains_key(&name) { - Some((name, fn_node)) - } else { - None - } - } else { - None - } - }) - .collect::>(), - ); - let funcs: Vec<_> = mapping.values().copied().collect(); - self.underlying_mut(analyzer)?.cache.visible_funcs = Some(funcs.clone()); - Ok(funcs) - } else { - // we are in a free floating function, only look at free floating functions - let funcs = analyzer - .search_children_depth(analyzer.entry(), &Edge::Func, 2, 0) - .into_iter() - .map(FunctionNode::from) - .collect::>(); - - self.underlying_mut(analyzer)?.cache.visible_funcs = Some(funcs.clone()); - Ok(funcs) - } - } - - /// Gets all visible functions - pub fn source_funcs( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Vec { - // TODO: filter privates - let Some(source) = self.maybe_associated_source(analyzer) else { - return vec![] - }; - analyzer - .search_children_exclude_via( - source, - &Edge::Func, - &[ - Edge::Context(ContextEdge::Context), - Edge::Context(ContextEdge::Variable), - ], - ) - .into_iter() - .map(FunctionNode::from) - .collect::>() - } - - /// Gets all visible structs - pub fn visible_structs( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Vec { - // TODO: filter privates - let Some(source) = self.maybe_associated_source(analyzer) else { - return vec![] - }; - - analyzer - .search_children_exclude_via(source, &Edge::Struct, &[Edge::Func]) - .into_iter() - .map(StructNode::from) - .collect::>() - } - - /// Gets the associated function for the context - pub fn associated_fn(&self, analyzer: &impl GraphLike) -> Result { - let underlying = self.underlying(analyzer)?; - if let Some(fn_call) = underlying.fn_call { - Ok(fn_call) - } else if let Some(ext_fn_call) = underlying.ext_fn_call { - Ok(ext_fn_call) - } else { - Ok(underlying.parent_fn) - } - } - - /// Checks whether a function is external to the current context - pub fn is_fn_ext( - &self, - fn_node: FunctionNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - match fn_node.maybe_associated_contract(analyzer) { - None => Ok(false), - Some(fn_ctrt) => { - if let Some(self_ctrt) = self - .associated_fn(analyzer)? - .maybe_associated_contract(analyzer) - { - Ok(Some(self_ctrt) != Some(fn_ctrt) - && !self_ctrt - .underlying(analyzer)? - .inherits - .iter() - .any(|inherited| *inherited == fn_ctrt)) - } else { - Ok(false) - } - } - } - } - - /// Gets the associated function name for the context - pub fn associated_fn_name(&self, analyzer: &impl GraphLike) -> Result { - self.associated_fn(analyzer)?.name(analyzer) - } - - /// Gets a mutable reference to the underlying context in the graph - pub fn underlying_mut<'a>( - &self, - analyzer: &'a mut (impl GraphLike + AnalyzerLike), - ) -> Result<&'a mut Context, GraphError> { - match analyzer.node_mut(*self) { - Node::Context(c) => Ok(c), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be Context but it was: {e:?}" - ))), - } - } - - /// Gets an immutable reference to the underlying context in the graph - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Context, GraphError> { - match analyzer.node(*self) { - Node::Context(c) => Ok(c), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be Context but it was: {e:?}" - ))), - } - } - - /// Gets a variable by name in the context - pub fn var_by_name(&self, analyzer: &impl GraphLike, name: &str) -> Option { - self.underlying(analyzer) - .unwrap() - .cache - .vars - .get(name) - .copied() - } - - pub fn var_by_name_or_recurse( - &self, - analyzer: &impl GraphLike, - name: &str, - ) -> Result, GraphError> { - if let Some(var) = self.var_by_name(analyzer, name) { - Ok(Some(var)) - } else if let Some(parent) = self.ancestor_in_fn(analyzer, self.associated_fn(analyzer)?)? { - parent.var_by_name_or_recurse(analyzer, name) - } else { - Ok(None) - } - } - - pub fn ancestor_in_fn( - &self, - analyzer: &impl GraphLike, - associated_fn: FunctionNode, - ) -> Result, GraphError> { - if let Some(ret) = self.underlying(analyzer)?.returning_ctx { - if ret.associated_fn(analyzer)? == associated_fn { - return Ok(Some(ret)); - } - } - - if let Some(parent) = self.underlying(analyzer)?.parent_ctx { - if parent.associated_fn(analyzer)? == associated_fn { - Ok(Some(parent)) - } else if let Some(mod_state) = &parent.underlying(analyzer)?.modifier_state { - if mod_state.parent_fn == associated_fn { - Ok(Some(parent)) - } else { - parent.ancestor_in_fn(analyzer, associated_fn) - } - } else { - parent.ancestor_in_fn(analyzer, associated_fn) - } - } else { - Ok(None) - } - } - - /// Gets all variables associated with a context - pub fn vars<'a>(&self, analyzer: &'a impl GraphLike) -> &'a BTreeMap { - &self.underlying(analyzer).unwrap().cache.vars - } - - /// Gets all variables associated with a context - pub fn local_vars<'a>( - &self, - analyzer: &'a impl GraphLike, - ) -> &'a BTreeMap { - self.vars(analyzer) - } - - /// Gets the latest version of a variable associated with a context - pub fn latest_var_by_name( - &self, - analyzer: &impl GraphLike, - name: &str, - ) -> Option { - self.var_by_name(analyzer, name) - .map(|var| var.latest_version(analyzer)) - } - - /// Reads the current temporary counter and increments the counter - pub fn new_tmp( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - let context = self.underlying_mut(analyzer)?; - let ret = context.tmp_var_ctr; - context.tmp_var_ctr += 1; - Ok(ret) - } - - /// Returns all forks associated with the context - pub fn calls(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - let descendents = self.descendents(analyzer)?; - Ok(descendents - .into_iter() - .filter_map(|c| c.maybe_call()) - .collect()) - } - - /// Returns all forks associated with the context - // pub fn forks(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - // todo!() - // let descendents = self.descendents(analyzer)?; - // Ok(descendents.into_iter().filter_map(|c| c.maybe_fork()).collect()) - // } - - // /// Returns all *live* forks associated with the context - // pub fn live_edges(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - // let forks = self.forks(analyzer)?; - // let mut live = vec![]; - // for fork in forks { - // if !fork.is_ended(analyzer)? { - // live.push(fork); - // } - // } - // Ok(live) - // } - - /// Returns tail contexts associated with the context - pub fn live_edges(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(child) = self.underlying(analyzer)?.child { - let mut lineage = vec![]; - match child { - CallFork::Call(call) => { - let call_edges = call.live_edges(analyzer)?; - if call_edges.is_empty() && !call.is_ended(analyzer)? { - lineage.push(call) - } else { - lineage.extend(call_edges); - } - } - CallFork::Fork(w1, w2) => { - let fork_edges = w1.live_edges(analyzer)?; - if fork_edges.is_empty() && !w1.is_ended(analyzer)? { - lineage.push(w1) - } else { - lineage.extend(fork_edges); - } - - let fork_edges = w2.live_edges(analyzer)?; - if fork_edges.is_empty() && !w2.is_ended(analyzer)? { - lineage.push(w2) - } else { - lineage.extend(fork_edges); - } - } - } - Ok(lineage) - } else { - Ok(vec![]) - } - } - - pub fn reverted_edges(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(child) = self.underlying(analyzer)?.child { - let mut lineage = vec![]; - match child { - CallFork::Call(call) => { - let call_edges = call.reverted_edges(analyzer)?; - if call_edges.is_empty() && call.is_killed(analyzer)? { - lineage.push(call) - } else { - lineage.extend(call_edges); - } - } - CallFork::Fork(w1, w2) => { - let fork_edges = w1.reverted_edges(analyzer)?; - if fork_edges.is_empty() && w1.is_killed(analyzer)? { - lineage.push(w1) - } else { - lineage.extend(fork_edges); - } - - let fork_edges = w2.reverted_edges(analyzer)?; - if fork_edges.is_empty() && w2.is_killed(analyzer)? { - lineage.push(w2) - } else { - lineage.extend(fork_edges); - } - } - } - Ok(lineage) - } else { - Ok(vec![]) - } - } - - pub fn number_of_live_edges(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.number_of_live_edges) - // if let Some(child) = self.underlying(analyzer)?.child { - // let mut edges = 0; - // match child { - // CallFork::Call(call) => { - // let call_edges = call.number_of_live_edges(analyzer)?; - // if call_edges == 0 && !call.is_ended(analyzer)? { - // edges += 1; - // } else { - // edges += call_edges; - // } - // } - // CallFork::Fork(w1, w2) => { - // let fork_edges = w1.number_of_live_edges(analyzer)?; - // if fork_edges == 0 && !w1.is_ended(analyzer)? { - // edges += 1; - // } else { - // edges += fork_edges; - // } - - // let fork_edges = w2.number_of_live_edges(analyzer)?; - // if fork_edges == 0 && !w2.is_ended(analyzer)? { - // edges += 1; - // } else { - // edges += fork_edges; - // } - // } - // } - // Ok(edges) - // } else { - // Ok(0) - // } - } - - /// Returns tail contexts associated with the context - pub fn all_edges(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(child) = self.underlying(analyzer)?.child { - let mut lineage = vec![]; - match child { - CallFork::Call(call) => { - let call_edges = call.all_edges(analyzer)?; - if call_edges.is_empty() { - lineage.push(call) - } else { - lineage.extend(call_edges); - } - } - CallFork::Fork(w1, w2) => { - let fork_edges = w1.all_edges(analyzer)?; - if fork_edges.is_empty() { - lineage.push(w1) - } else { - lineage.extend(fork_edges); - } - - let fork_edges = w2.all_edges(analyzer)?; - if fork_edges.is_empty() { - lineage.push(w2) - } else { - lineage.extend(fork_edges); - } - } - } - Ok(lineage) - } else { - Ok(vec![]) - } - } - - pub fn descendents(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(child) = self.underlying(analyzer)?.child { - let mut descendents = vec![child]; - match child { - CallFork::Call(c) => descendents.extend(c.descendents(analyzer)?), - CallFork::Fork(w1, w2) => { - descendents.extend(w1.descendents(analyzer)?); - descendents.extend(w2.descendents(analyzer)?); - } - } - Ok(descendents) - } else { - Ok(vec![]) - } - } - - /// Adds a fork to the context - pub fn set_child_fork( - &self, - w1: ContextNode, - w2: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - assert!(matches!(analyzer.node(w1), Node::Context(_))); - assert!(matches!(analyzer.node(w2), Node::Context(_))); - assert!(*self != w1 && *self != w2, "Tried to set child to self"); - let context = self.underlying_mut(analyzer)?; - if !context.set_child_fork(w1, w2) { - let child_str = match context.child { - Some(CallFork::Fork(w1, w2)) => { - format!("fork {{ {}, {} }}", w1.path(analyzer), w2.path(analyzer)) - } - Some(CallFork::Call(call)) => format!("call {{ {} }}", call.path(analyzer)), - None => unreachable!(), - }; - Err(GraphError::ChildRedefinition(format!( - "This is a bug. Tried to redefine a child context, parent:\n{}, current child:\n{},\nnew child: Fork({}, {})", - self.path(analyzer), - child_str, - w1.path(analyzer), - w2.path(analyzer), - ))) - } else { - Ok(()) - } - } - - /// Adds a child to the context - pub fn set_child_call( - &self, - call: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - assert!(matches!(analyzer.node(call), Node::Context(_))); - assert!(*self != call, "Tried to set child to self"); - let context = self.underlying_mut(analyzer)?; - if !context.set_child_call(call) { - let child_str = match context.child { - Some(CallFork::Fork(w1, w2)) => { - format!("fork {{ {}, {} }}", w1.path(analyzer), w2.path(analyzer)) - } - Some(CallFork::Call(call)) => format!("call {{ {} }}", call.path(analyzer)), - None => unreachable!(), - }; - tracing::trace!("Error setting child as a call"); - Err(GraphError::ChildRedefinition(format!( - "This is a bug. Tried to redefine a child context, parent: {}, current child: {}, new child: {}", - self.path(analyzer), - child_str, - call.path(analyzer) - ) - )) - } else { - Ok(()) - } - } - - pub fn delete_child( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - if let Some(child) = self.underlying(analyzer)?.child { - match child { - CallFork::Fork(w1, w2) => { - w1.propogate_end(analyzer)?; - w2.propogate_end(analyzer)?; - } - CallFork::Call(c) => { - c.propogate_end(analyzer)?; - } - } - } - let context = self.underlying_mut(analyzer)?; - context.delete_child(); - Ok(()) - } - - /// Kills the context by denoting it as killed. Recurses up the contexts and kills - /// parent contexts if all subcontexts of that context are killed - pub fn kill( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - kill_loc: Loc, - kill_kind: KilledKind, - ) -> Result<(), GraphError> { - tracing::trace!("killing: {}", self.path(analyzer)); - if let Some(child) = self.underlying(analyzer)?.child { - match child { - CallFork::Call(call) => { - if !call.underlying(analyzer)?.ret.is_empty() { - return Ok(()); - } - call.kill(analyzer, kill_loc, kill_kind)?; - } - CallFork::Fork(w1, w2) => { - if !w1.underlying(analyzer)?.ret.is_empty() { - return Ok(()); - } - - if !w2.underlying(analyzer)?.ret.is_empty() { - return Ok(()); - } - - w1.kill(analyzer, kill_loc, kill_kind)?; - w2.kill(analyzer, kill_loc, kill_kind)?; - } - } - } - - let context = self.underlying_mut(analyzer)?; - let parent = context.parent_ctx; - if context.killed.is_none() { - context.killed = Some((kill_loc, kill_kind)); - } - - if let Some(parent_ctx) = parent { - parent_ctx.end_if_all_forks_ended(analyzer, kill_loc, kill_kind)?; - } - - self.propogate_end(analyzer)?; - - Ok(()) - } - - /// Kills if and only if all subcontexts are killed - pub fn end_if_all_forks_ended( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - kill_loc: Loc, - kill_kind: KilledKind, - ) -> Result<(), GraphError> { - let all_edges = self.all_edges(analyzer)?; - let reverted_edges = self.reverted_edges(analyzer)?; - if reverted_edges.len() == all_edges.len() { - tracing::trace!("killing recursively: {}", self.path(analyzer)); - let context = self.underlying_mut(analyzer)?; - if context.ret.is_empty() { - if context.killed.is_none() { - context.killed = Some((kill_loc, kill_kind)); - } - if let Some(parent_ctx) = context.parent_ctx { - parent_ctx.end_if_all_forks_ended(analyzer, kill_loc, kill_kind)?; - } - } - } - Ok(()) - } - - /// Gets parent list - pub fn parent_list(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - let context = self.underlying(analyzer)?; - let mut parents = vec![]; - if let Some(parent_ctx) = context.parent_ctx { - parents.push(parent_ctx); - parents.extend(parent_ctx.parent_list(analyzer)?); - } - Ok(parents) - } - - pub fn recursive_calls( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - // Ok( - let calls = self.calls(analyzer)?; - Ok(calls - .iter() - .flat_map(|call| { - let mut inner_calls = call.recursive_calls(analyzer).unwrap(); - inner_calls.insert(0, *call); - inner_calls - }) - .collect::>()) - } - - /// Gets the lineage for a context - /// A lineage is of the form `[ancestor N, .. , ancestor0, SELF, call0, .., call N]`. It - /// gives the user a full picture of control flow - pub fn lineage( - &self, - _analyzer: &impl GraphLike, - _entry: bool, - ) -> Result, GraphError> { - todo!() - } - - pub fn terminal_child_list( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - let calls = self.calls(analyzer)?; - if calls.is_empty() { - Ok(vec![*self]) - } else { - let mut children = vec![]; - - for child in calls.into_iter() { - children.extend(child.terminal_child_list(analyzer)?) - } - Ok(children) - } - } - - /// Returns whether the context is killed - pub fn is_killed(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.killed.is_some()) - } - - /// Returns whether the context is killed - pub fn is_ended(&self, analyzer: &impl GraphLike) -> Result { - let underlying = self.underlying(analyzer)?; - Ok(underlying.child.is_some() || underlying.killed.is_some() || !underlying.ret.is_empty()) - } - - pub fn killed_or_ret(&self, analyzer: &impl GraphLike) -> Result { - let underlying = self.underlying(analyzer)?; - Ok(underlying.killed.is_some() - || (!underlying.ret.is_empty() && underlying.modifier_state.is_none())) - } - - /// Returns an option to where the context was killed - pub fn killed_loc( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - Ok(self.underlying(analyzer)?.killed) - } - - /// Returns a map of variable dependencies for this context - pub fn ctx_deps( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - Ok(self.underlying(analyzer)?.ctx_deps.clone()) - } - - /// Returns a vector of variable dependencies for this context - pub fn add_ctx_dep( - &self, - dep: ContextVarNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - tracing::trace!("Adding ctx dependency: {}", dep.display_name(analyzer)?); - if dep.is_symbolic(analyzer)? { - let dep_name = dep.name(analyzer)?; - let underlying = self.underlying_mut(analyzer)?; - underlying.ctx_deps.insert(dep_name, dep); - } - Ok(()) - } - - pub fn add_return_node( - &self, - ret_stmt_loc: Loc, - ret: ContextVarNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - self.underlying_mut(analyzer)?.ret.push((ret_stmt_loc, ret)); - self.propogate_end(analyzer)?; - Ok(()) - } - - pub fn propogate_end( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let underlying = &mut self.underlying_mut(analyzer)?; - let curr_live = underlying.number_of_live_edges; - underlying.number_of_live_edges = 0; - if let Some(parent) = self.underlying(analyzer)?.parent_ctx { - let live_edges = &mut parent.underlying_mut(analyzer)?.number_of_live_edges; - *live_edges = live_edges.saturating_sub(1 + curr_live); - parent.propogate_end(analyzer)?; - } - Ok(()) - } - - pub fn return_nodes( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - Ok(self.underlying(analyzer)?.ret.clone()) - } - - pub fn as_string(&mut self) -> String { - "Context".to_string() - } - - pub fn deps_dag(&self, g: &impl GraphLike) -> Result<(), GraphError> { - let deps = self.ctx_deps(g)?; - #[derive(Debug, Copy, Clone)] - pub enum DepEdge { - Lhs, - Rhs, - } - - let mut gr: petgraph::Graph = - petgraph::Graph::default(); - deps.iter().try_for_each(|(_, dep)| { - let mapping = dep.graph_dependent_on(g)?; - mapping.into_iter().for_each(|(k, tmp)| { - let top = gr.add_node(k.into()); - let lhs = gr.add_node(tmp.lhs.into()); - gr.add_edge(top, lhs, DepEdge::Lhs); - if let Some(rhs) = tmp.rhs { - let rhs = gr.add_node(rhs.into()); - gr.add_edge(top, rhs, DepEdge::Rhs); - } - }); - Ok(()) - })?; - - let mut dot_str = Vec::new(); - let raw_start_str = r##"digraph G { - node [shape=box, style="filled, rounded", color="#565f89", fontcolor="#d5daf0", fontname="Helvetica", fillcolor="#24283b"]; - edge [color="#414868", fontcolor="#c0caf5", fontname="Helvetica"]; - bgcolor="#1a1b26";"##; - dot_str.push(raw_start_str.to_string()); - let nodes_and_edges_str = format!( - "{:?}", - Dot::with_attr_getters( - &gr, - &[ - petgraph::dot::Config::GraphContentOnly, - petgraph::dot::Config::NodeNoLabel, - petgraph::dot::Config::EdgeNoLabel - ], - &|_graph, edge_ref| { - let e = edge_ref.weight(); - format!("label = \"{e:?}\"") - }, - &|_graph, (idx, node_ref)| { - let inner = match g.node(*node_ref) { - Node::ContextVar(cvar) => { - let range_str = if let Some(r) = cvar.ty.ref_range(g).unwrap() { - r.as_dot_str(g) - // format!("[{}, {}]", r.min.eval(self).to_range_string(self).s, r.max.eval(self).to_range_string(self).s) - } else { - "".to_string() - }; - - format!( - "{} -- {} -- range: {}", - cvar.display_name, - cvar.ty.as_string(g).unwrap(), - range_str - ) - } - _ => as_dot_str(idx, g), - }; - format!( - "label = \"{}\", color = \"{}\"", - inner.replace('\"', "\'"), - g.node(*node_ref).dot_str_color() - ) - } - ) - ); - dot_str.push(nodes_and_edges_str); - let raw_end_str = r#"}"#; - dot_str.push(raw_end_str.to_string()); - let dot_str = dot_str.join("\n"); - - println!("{dot_str}"); - use std::env::temp_dir; - use std::fs; - use std::io::Write; - use std::process::Command; - let mut dir = temp_dir(); - let file_name = "dot.dot"; - dir.push(file_name); - - let mut file = fs::File::create(dir.clone()).unwrap(); - file.write_all(dot_str.as_bytes()).unwrap(); - Command::new("dot") - .arg("-Tsvg") - .arg(dir) - .arg("-o") - .arg("dot.svg") - .output() - .expect("failed to execute process"); - Command::new("open") - .arg("dot.svg") - .output() - .expect("failed to execute process"); - Ok(()) - } -} - -impl From for NodeIdx { - fn from(val: ContextNode) -> Self { - val.0.into() - } -} - -impl From for ContextNode { - fn from(idx: NodeIdx) -> Self { - ContextNode(idx.index()) - } -} - -// 2023-05-13T04:28:34.318383Z TRACE parse:parse_ctx_stmt_inner:func_call_inner:execute_call_inner:parse_ctx_stmt_inner:parse_ctx_stmt_inner:parse_ctx_expr_inner{ctx=getAtProbablyRecentBlock(History, uint256).toUint32(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }.fork{ true }.sqrt(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }}:fn_call_expr:call_internal_func:func_call_inner:execute_call_inner:parse_ctx_stmt_inner:parse_ctx_stmt_inner: pyrometer::context: Applying to live edges of: getAtProbablyRecentBlock(History, uint256).toUint32(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }.fork{ true }.sqrt(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }._upperBinaryLookup(Checkpoint[], uint32, uint256, uint256).anonymous_fn_call. edges: [ -// "getAtProbablyRecentBlock(History, uint256).toUint32(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }.fork{ true }.sqrt(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }._upperBinaryLookup(Checkpoint[], uint32, uint256, uint256).anonymous_fn_call.average(uint256, uint256).resume{ _upperBinaryLookup(Checkpoint[], uint32, uint256, uint256) }.fork{ true }._unsafeAccess(Checkpoint[], uint256).resume{ _upperBinaryLookup(Checkpoint[], uint32, uint256, uint256) }", -// "getAtProbablyRecentBlock(History, uint256).toUint32(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }.fork{ true }.sqrt(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }._upperBinaryLookup(Checkpoint[], uint32, uint256, uint256).anonymous_fn_call.average(uint256, uint256).resume{ _upperBinaryLookup(Checkpoint[], uint32, uint256, uint256) }.fork{ false }._unsafeAccess(Checkpoint[], uint256).resume{ _upperBinaryLookup(Checkpoint[], uint32, uint256, uint256) }", -// "getAtProbablyRecentBlock(History, uint256).toUint32(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }.fork{ true }.sqrt(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }._upperBinaryLookup(Checkpoint[], uint32, uint256, uint256)" -// ] -// 2023-05-13T04:28:34.318505Z TRACE parse:parse_ctx_stmt_inner:func_call_inner:execute_call_inner:parse_ctx_stmt_inner:parse_ctx_stmt_inner:parse_ctx_expr_inner{ctx=getAtProbablyRecentBlock(History, uint256).toUint32(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }.fork{ true }.sqrt(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }}:fn_call_expr:call_internal_func:func_call_inner:execute_call_inner:parse_ctx_stmt_inner:parse_ctx_stmt_inner:advance_var_in_ctx{ctx=getAtProbablyRecentBlock(History, uint256).toUint32(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }.fork{ true }.sqrt(uint256).resume{ getAtProbablyRecentBlock(History, uint256) }._upperBinaryLookup(Checkpoint[], uint32, uint256, uint256)}: pyrometer::context: advancing variable: high -// thread 'main' panicked at 'Variable update of high in old context: -// parent: -// , child: Call( -// ContextNode( -// 140171, -// ), -// )' diff --git a/shared/src/context/var.rs b/shared/src/context/var.rs deleted file mode 100644 index 8c227bb4..00000000 --- a/shared/src/context/var.rs +++ /dev/null @@ -1,1506 +0,0 @@ -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::context::GraphError; -use crate::range::elem::RangeElem; -use crate::TyNode; - -use crate::range::elem_ty::Elem; -use crate::range::elem_ty::RangeConcrete; -use crate::range::range_string::ToRangeString; -use crate::range::Range; -use crate::range::SolcRange; -use std::collections::BTreeMap; - -use crate::AsDotStr; -use crate::BuiltInNode; -use crate::Builtin; -use crate::Concrete; -use crate::ContractNode; -use crate::EnumNode; -use crate::FunctionNode; - -use crate::StructNode; -use crate::TypeNode; -use crate::{ - analyzer::Search, context::ContextNode, nodes::ConcreteNode, range::elem::RangeOp, ContextEdge, - Edge, Field, FunctionParam, FunctionReturn, Node, NodeIdx, VarType, -}; - -use petgraph::visit::EdgeRef; -use petgraph::Direction; -use solang_parser::pt::{Loc, StorageLocation}; - -#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct ContextVarNode(pub usize); -impl AsDotStr for ContextVarNode { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { - let underlying = self.underlying(analyzer).unwrap(); - - let range_str = if let Some(r) = underlying.ty.ref_range(analyzer).unwrap() { - format!( - "[{}, {}]", - r.evaled_range_min(analyzer) - .unwrap() - .to_range_string(false, analyzer) - .s, - r.evaled_range_max(analyzer) - .unwrap() - .to_range_string(true, analyzer) - .s - ) - } else { - "".to_string() - }; - - format!( - "{} - {} -- {} -- range: {}", - underlying.display_name, - self.0, - underlying.ty.as_string(analyzer).unwrap(), - range_str - ) - } -} - -impl From for NodeIdx { - fn from(val: ContextVarNode) -> Self { - val.0.into() - } -} - -impl From for ContextVarNode { - fn from(idx: NodeIdx) -> Self { - ContextVarNode(idx.index()) - } -} - -impl ContextVarNode { - pub fn underlying<'a>( - &self, - analyzer: &'a impl GraphLike, - ) -> Result<&'a ContextVar, GraphError> { - match analyzer.node(*self) { - Node::ContextVar(c) => Ok(c), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be ContextVar but it was: {e:?}" - ))), - } - } - - pub fn underlying_mut<'a>( - &self, - analyzer: &'a mut impl GraphLike, - ) -> Result<&'a mut ContextVar, GraphError> { - match analyzer.node_mut(*self) { - Node::ContextVar(c) => Ok(c), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be ContextVar but it was: {e:?}" - ))), - } - } - - pub fn storage<'a>( - &self, - analyzer: &'a impl GraphLike, - ) -> Result<&'a Option, GraphError> { - Ok(&self.underlying(analyzer)?.storage) - } - - pub fn is_storage(&self, analyzer: &impl GraphLike) -> Result { - Ok(matches!( - self.underlying(analyzer)?.storage, - Some(StorageLocation::Storage(..)) - )) - } - - pub fn ty<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a VarType, GraphError> { - Ok(&self.underlying(analyzer)?.ty) - } - - pub fn is_mapping(&self, analyzer: &impl GraphLike) -> Result { - self.ty(analyzer)?.is_mapping(analyzer) - } - - pub fn is_dyn(&self, analyzer: &impl GraphLike) -> Result { - self.ty(analyzer)?.is_dyn(analyzer) - } - - pub fn is_indexable(&self, analyzer: &impl GraphLike) -> Result { - self.ty(analyzer)?.is_indexable(analyzer) - } - - pub fn loc(&self, analyzer: &impl GraphLike) -> Result { - Ok(self - .underlying(analyzer)? - .loc - .expect("No loc for ContextVar")) - } - - pub fn ctx(&self, analyzer: &impl GraphLike) -> ContextNode { - ContextNode::from( - analyzer - .search_for_ancestor(self.0.into(), &Edge::Context(ContextEdge::Variable)) - .into_iter() - .take(1) - .next() - .expect("No associated ctx"), - ) - } - - pub fn maybe_ctx(&self, analyzer: &impl GraphLike) -> Option { - let first = self.first_version(analyzer); - analyzer - .graph() - .edges_directed(first.0.into(), Direction::Outgoing) - .filter(|edge| *edge.weight() == Edge::Context(ContextEdge::Variable)) - .map(|edge| ContextNode::from(edge.target())) - .take(1) - .next() - // Some(ContextNode::from( - // analyzer - // .search_for_ancestor(self.0.into(), &Edge::Context(ContextEdge::Variable)) - // .into_iter() - // .take(1) - // .next()?, - // )) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn update_deps( - &mut self, - ctx: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - if let Some(mut range) = self.range(analyzer)? { - range.update_deps(*self, ctx, analyzer); - self.set_range_min(analyzer, range.min)?; - self.set_range_max(analyzer, range.max)?; - } - Ok(()) - } - - pub fn name(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.name.clone()) - } - - pub fn display_name(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.display_name.clone()) - } - - pub fn range(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - self.underlying(analyzer)?.ty.range(analyzer) - } - - pub fn ref_range<'a>( - &self, - analyzer: &'a impl GraphLike, - ) -> Result>, GraphError> { - self.underlying(analyzer)?.ty.ref_range(analyzer) - } - - pub fn range_min( - &self, - analyzer: &impl GraphLike, - ) -> Result>, GraphError> { - if let Some(r) = self.ref_range(analyzer)? { - Ok(Some(r.range_min().into_owned())) - } else { - Ok(None) - } - } - - pub fn range_max( - &self, - analyzer: &impl GraphLike, - ) -> Result>, GraphError> { - if let Some(r) = self.ref_range(analyzer)? { - Ok(Some(r.range_max().into_owned())) - } else { - Ok(None) - } - } - - pub fn evaled_range_min( - &self, - analyzer: &impl GraphLike, - ) -> Result>, GraphError> { - if let Some(r) = self.ref_range(analyzer)? { - Ok(Some(r.evaled_range_min(analyzer)?)) - } else { - Ok(None) - } - } - - pub fn evaled_range_max( - &self, - analyzer: &impl GraphLike, - ) -> Result>, GraphError> { - if let Some(r) = self.ref_range(analyzer)? { - Ok(Some(r.evaled_range_max(analyzer)?)) - } else { - Ok(None) - } - } - - pub fn return_assignments(&self, analyzer: &impl GraphLike) -> Vec { - let latest = self.latest_version(analyzer); - let mut earlier = latest; - let mut return_assignments = vec![]; - while let Some(prev) = earlier.previous_version(analyzer) { - if earlier.is_return_assignment(analyzer) { - return_assignments.push(earlier) - } - earlier = prev; - } - return_assignments - } - - pub fn ext_return_assignments(&self, analyzer: &impl GraphLike) -> Vec { - let latest = self.latest_version(analyzer); - let mut earlier = latest; - let mut return_assignments = vec![]; - if earlier.is_ext_return_assignment(analyzer) { - return_assignments.push(earlier) - } - while let Some(prev) = earlier.previous_version(analyzer) { - earlier = prev; - if earlier.is_ext_return_assignment(analyzer) { - return_assignments.push(earlier) - } - } - return_assignments - } - - pub fn is_return_assignment(&self, analyzer: &impl GraphLike) -> bool { - analyzer - .graph() - .edges_directed(self.0.into(), Direction::Incoming) - .any(|edge| { - Edge::Context(ContextEdge::ReturnAssign(true)) == *edge.weight() - || Edge::Context(ContextEdge::ReturnAssign(false)) == *edge.weight() - }) - } - - pub fn is_ext_return_assignment(&self, analyzer: &impl GraphLike) -> bool { - analyzer - .graph() - .edges_directed(self.0.into(), Direction::Incoming) - .any(|edge| Edge::Context(ContextEdge::ReturnAssign(true)) == *edge.weight()) - } - - pub fn is_const(&self, analyzer: &impl GraphLike) -> Result { - let underlying = self.underlying(analyzer)?; - underlying.ty.is_const(analyzer) - } - - pub fn is_symbolic(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.is_symbolic) - } - - pub fn is_tmp(&self, analyzer: &impl GraphLike) -> Result { - let underlying = self.underlying(analyzer)?; - Ok(underlying.is_tmp()) - } - - pub fn is_return_node(&self, analyzer: &impl GraphLike) -> Result { - if let Some(ctx) = self.maybe_ctx(analyzer) { - return Ok(ctx - .underlying(analyzer)? - .ret - .iter() - .any(|(_, node)| node.name(analyzer).unwrap() == self.name(analyzer).unwrap())); - } - Ok(false) - } - - pub fn is_return_node_in_any(&self, ctxs: &[ContextNode], analyzer: &impl GraphLike) -> bool { - ctxs.iter().any(|ctx| { - ctx.underlying(analyzer) - .unwrap() - .ret - .iter() - .any(|(_, node)| node.name(analyzer).unwrap() == self.name(analyzer).unwrap()) - }) - } - - pub fn tmp_of(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(self.underlying(analyzer)?.tmp_of()) - } - - pub fn is_len_var(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.name(analyzer)?.ends_with(".length") - && analyzer - .search_for_ancestor( - self.first_version(analyzer).into(), - &Edge::Context(ContextEdge::AttrAccess), - ) - .is_some()) - } - - pub fn is_array_index_access(&self, analyzer: &impl GraphLike) -> bool { - analyzer - .search_for_ancestor( - self.first_version(analyzer).into(), - &Edge::Context(ContextEdge::IndexAccess), - ) - .is_some() - } - - pub fn len_var_to_array( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - if self.name(analyzer)?.ends_with(".length") { - if let Some(arr) = analyzer.search_for_ancestor( - self.first_version(analyzer).into(), - &Edge::Context(ContextEdge::AttrAccess), - ) { - Ok(Some(ContextVarNode::from(arr).latest_version(analyzer))) - } else { - Ok(None) - } - } else { - Ok(None) - } - } - - pub fn index_to_array(&self, analyzer: &impl GraphLike) -> Option { - let arr = analyzer - .graph() - .edges_directed(self.first_version(analyzer).into(), Direction::Outgoing) - .filter(|edge| *edge.weight() == Edge::Context(ContextEdge::IndexAccess)) - .map(|edge| edge.target()) - .take(1) - .next()?; - Some(ContextVarNode::from(arr).latest_version(analyzer)) - } - - pub fn index_access_to_index(&self, analyzer: &impl GraphLike) -> Option { - let index = analyzer.find_child_exclude_via( - self.first_version(analyzer).into(), - &Edge::Context(ContextEdge::Index), - &[], - &|idx, _| Some(idx), - )?; - Some(ContextVarNode::from(index)) - } - - pub fn as_range_elem( - &self, - analyzer: &impl GraphLike, - loc: Loc, - ) -> Result, GraphError> { - match self.underlying(analyzer)?.ty { - VarType::Concrete(c) => Ok(Elem::Concrete(RangeConcrete { - val: c.underlying(analyzer)?.clone(), - loc, - })), - _ => Ok(Elem::from(*self)), - } - } - - pub fn cache_range(&self, analyzer: &mut impl GraphLike) -> Result<(), GraphError> { - if let Some(mut range) = self.range(analyzer)? { - range.cache_eval(analyzer)?; - self.set_range(analyzer, range)?; - } - Ok(()) - } - - pub fn set_range( - &self, - analyzer: &mut impl GraphLike, - new_range: SolcRange, - ) -> Result<(), GraphError> { - let underlying = self.underlying_mut(analyzer)?; - underlying.set_range(new_range); - Ok(()) - } - - pub fn fallback_range( - &self, - analyzer: &mut impl GraphLike, - ) -> Result, GraphError> { - let underlying = self.underlying(analyzer)?; - underlying.fallback_range(analyzer) - } - - pub fn needs_fallback(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.needs_fallback()) - } - // #[tracing::instrument(level = "trace", skip_all)] - pub fn set_range_min( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - mut new_min: Elem, - ) -> Result<(), GraphError> { - if new_min.contains_node((*self).into()) { - if let Some(prev) = self.previous_or_inherited_version(analyzer) { - new_min.filter_recursion((*self).into(), prev.into()); - } else { - return Err(GraphError::UnbreakableRecursion(format!("The variable {}'s range is self-referential and we cannot break the recursion.", self.display_name(analyzer)?))); - } - } - - tracing::trace!( - "setting range minimum: {} (node idx: {}), current:\n{:#?}, new_min:\n{:#?}", - self.display_name(analyzer)?, - self.0, - self.range_min(analyzer)?, - new_min - ); - - if self.is_concrete(analyzer)? { - let mut new_ty = self.ty(analyzer)?.clone(); - new_ty.concrete_to_builtin(analyzer)?; - self.underlying_mut(analyzer)?.ty = new_ty; - self.set_range_min(analyzer, new_min)?; - } else { - let fallback = if self.needs_fallback(analyzer)? { - self.fallback_range(analyzer)? - } else { - None - }; - self.underlying_mut(analyzer)? - .set_range_min(new_min, fallback); - } - self.cache_range(analyzer)?; - Ok(()) - } - - // #[tracing::instrument(level = "trace", skip_all)] - pub fn set_range_max( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - mut new_max: Elem, - ) -> Result<(), GraphError> { - if new_max.contains_node((*self).into()) { - if let Some(prev) = self.previous_or_inherited_version(analyzer) { - new_max.filter_recursion((*self).into(), prev.into()); - } - } - - tracing::trace!( - "setting range maximum: {:?}, {}, current:\n{:#?}, new:\n{:#?}", - self, - self.display_name(analyzer)?, - self.ref_range(analyzer)?.unwrap().range_max(), // .unwrap() - new_max - ); - - if self.is_concrete(analyzer)? { - let mut new_ty = self.ty(analyzer)?.clone(); - new_ty.concrete_to_builtin(analyzer)?; - self.underlying_mut(analyzer)?.ty = new_ty; - self.set_range_max(analyzer, new_max)?; - } else { - let fallback = if self.needs_fallback(analyzer)? { - self.fallback_range(analyzer)? - } else { - None - }; - - self.underlying_mut(analyzer)? - .set_range_max(new_max, fallback) - } - - self.cache_range(analyzer)?; - Ok(()) - } - - pub fn set_range_exclusions( - &self, - analyzer: &mut impl GraphLike, - new_exclusions: Vec>, - ) -> Result<(), GraphError> { - let fallback = if self.needs_fallback(analyzer)? { - self.fallback_range(analyzer)? - } else { - None - }; - self.underlying_mut(analyzer)? - .set_range_exclusions(new_exclusions, fallback); - Ok(()) - } - - pub fn try_set_range_min( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - mut new_min: Elem, - ) -> Result { - if new_min.contains_node((*self).into()) { - if let Some(prev) = self.previous_version(analyzer) { - new_min.filter_recursion((*self).into(), prev.into()); - } - } - - if self.is_concrete(analyzer)? { - let mut new_ty = self.ty(analyzer)?.clone(); - new_ty.concrete_to_builtin(analyzer)?; - self.underlying_mut(analyzer)?.ty = new_ty; - self.try_set_range_min(analyzer, new_min) - } else { - let fallback = if self.needs_fallback(analyzer)? { - self.fallback_range(analyzer)? - } else { - None - }; - Ok(self - .underlying_mut(analyzer)? - .try_set_range_min(new_min, fallback)) - } - } - - pub fn try_set_range_max( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - mut new_max: Elem, - ) -> Result { - if new_max.contains_node((*self).into()) { - if let Some(prev) = self.previous_version(analyzer) { - new_max.filter_recursion((*self).into(), prev.into()); - } - } - - if self.is_concrete(analyzer)? { - let mut new_ty = self.ty(analyzer)?.clone(); - new_ty.concrete_to_builtin(analyzer)?; - self.underlying_mut(analyzer)?.ty = new_ty; - self.try_set_range_max(analyzer, new_max) - } else { - let fallback = if self.needs_fallback(analyzer)? { - self.fallback_range(analyzer)? - } else { - None - }; - Ok(self - .underlying_mut(analyzer)? - .try_set_range_max(new_max, fallback)) - } - } - - pub fn try_set_range_exclusions( - &self, - analyzer: &mut impl GraphLike, - new_exclusions: Vec>, - ) -> Result { - let fallback = if self.needs_fallback(analyzer)? { - self.fallback_range(analyzer)? - } else { - None - }; - Ok(self - .underlying_mut(analyzer)? - .try_set_range_exclusions(new_exclusions, fallback)) - } - - pub fn latest_version(&self, analyzer: &impl GraphLike) -> Self { - let mut latest = *self; - while let Some(next) = latest.next_version(analyzer) { - latest = next; - } - latest - } - - pub fn latest_version_less_than(&self, idx: NodeIdx, analyzer: &impl GraphLike) -> Self { - let mut latest = *self; - while let Some(next) = latest.next_version(analyzer) { - if next.0 <= idx.index() { - latest = next; - } else { - break; - } - } - latest - } - - pub fn latest_version_in_ctx( - &self, - ctx: ContextNode, - analyzer: &impl GraphLike, - ) -> Result { - if let Some(cvar) = ctx.var_by_name(analyzer, &self.name(analyzer)?) { - Ok(cvar.latest_version(analyzer)) - } else { - Ok(*self) - } - } - - pub fn latest_version_in_ctx_less_than( - &self, - idx: NodeIdx, - ctx: ContextNode, - analyzer: &impl GraphLike, - ) -> Result { - if let Some(cvar) = ctx.var_by_name(analyzer, &self.name(analyzer)?) { - Ok(cvar.latest_version_less_than(idx, analyzer)) - } else { - Ok(*self) - } - } - - pub fn first_version(&self, analyzer: &impl GraphLike) -> Self { - let mut earlier = *self; - while let Some(prev) = earlier.previous_version(analyzer) { - earlier = prev; - } - earlier - } - - pub fn num_versions(&self, analyzer: &impl GraphLike) -> usize { - let mut count = 1; - let mut earlier = self.latest_version(analyzer); - while let Some(prev) = earlier.previous_version(analyzer) { - earlier = prev; - count += 1; - } - count - } - - pub fn next_version(&self, analyzer: &impl GraphLike) -> Option { - analyzer - .graph() - .edges_directed(self.0.into(), Direction::Incoming) - .filter(|edge| Edge::Context(ContextEdge::Prev) == *edge.weight()) - .map(|edge| ContextVarNode::from(edge.source())) - .take(1) - .next() - } - - pub fn previous_version(&self, analyzer: &impl GraphLike) -> Option { - analyzer - .graph() - .edges_directed(self.0.into(), Direction::Outgoing) - .filter(|edge| Edge::Context(ContextEdge::Prev) == *edge.weight()) - .map(|edge| ContextVarNode::from(edge.target())) - .take(1) - .next() - } - - pub fn previous_or_inherited_version(&self, analyzer: &impl GraphLike) -> Option { - if let Some(prev) = self.previous_version(analyzer) { - Some(prev) - } else { - analyzer - .graph() - .edges_directed(self.0.into(), Direction::Outgoing) - .filter(|edge| Edge::Context(ContextEdge::InheritedVariable) == *edge.weight()) - .map(|edge| ContextVarNode::from(edge.target())) - .take(1) - .next() - } - } - - pub fn range_deps(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(range) = self.ref_range(analyzer)? { - Ok(range.dependent_on()) - } else { - Ok(vec![]) - } - } - - pub fn dependent_on( - &self, - analyzer: &impl GraphLike, - return_self: bool, - ) -> Result, GraphError> { - let underlying = self.underlying(analyzer)?; - if let Some(tmp) = underlying.tmp_of() { - let mut nodes = tmp.lhs.dependent_on(analyzer, true)?; - if let Some(rhs) = tmp.rhs { - nodes.extend(rhs.dependent_on(analyzer, true)?); - } - Ok(nodes) - } else if return_self { - Ok(vec![*self]) - } else { - Ok(vec![]) - } - } - - pub fn graph_dependent_on( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - let underlying = self.underlying(analyzer)?; - let mut tree = BTreeMap::default(); - if let Some(tmp) = underlying.tmp_of() { - tree.insert(*self, tmp); - tmp.lhs - .graph_dependent_on(analyzer)? - .into_iter() - .for_each(|(key, v)| { - if let Some(_v) = tree.get_mut(&key) { - panic!("here") - } else { - tree.insert(key, v); - } - }); - if let Some(rhs) = tmp.rhs { - rhs.graph_dependent_on(analyzer)? - .into_iter() - .for_each(|(key, v)| { - if let Some(_v) = tree.get_mut(&key) { - panic!("here") - } else { - tree.insert(key, v); - } - }); - } - } - - Ok(tree) - } - - pub fn is_concrete(&self, analyzer: &impl GraphLike) -> Result { - Ok(matches!(self.ty(analyzer)?, VarType::Concrete(_))) - } - - pub fn as_concrete(&self, analyzer: &impl GraphLike) -> Result { - match &self.ty(analyzer)? { - VarType::Concrete(c) => Ok(c.underlying(analyzer)?.clone()), - e => Err(GraphError::NodeConfusion(format!( - "Expected variable type to be concrete but was: {e:?}" - ))), - } - } - - pub fn as_cast_tmp( - &self, - loc: Loc, - ctx: ContextNode, - cast_ty: Builtin, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - let new_underlying = self - .underlying(analyzer)? - .clone() - .as_cast_tmp(loc, ctx, cast_ty, analyzer)?; - let node = analyzer.add_node(Node::ContextVar(new_underlying)); - ctx.add_var(node.into(), analyzer)?; - analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - Ok(node.into()) - } - - pub fn as_tmp( - &self, - loc: Loc, - ctx: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - let new_underlying = self - .underlying(analyzer)? - .clone() - .as_tmp(loc, ctx, analyzer)?; - Ok(analyzer.add_node(Node::ContextVar(new_underlying)).into()) - } - - pub fn ty_eq(&self, other: &Self, analyzer: &mut impl GraphLike) -> Result { - self.ty(analyzer)?.ty_eq(other.ty(analyzer)?, analyzer) - } - - pub fn cast_from( - &self, - other: &Self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let to_ty = other.ty(analyzer)?.clone(); - self.cast_from_ty(to_ty, analyzer)?; - Ok(()) - } - - pub fn literal_cast_from( - &self, - other: &Self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let to_ty = other.ty(analyzer)?.clone(); - self.literal_cast_from_ty(to_ty, analyzer)?; - Ok(()) - } - - pub fn cast_from_ty( - &self, - to_ty: VarType, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let from_ty = self.ty(analyzer)?.clone(); - if !from_ty.ty_eq(&to_ty, analyzer)? { - if let Some(new_ty) = from_ty.try_cast(&to_ty, analyzer)? { - self.underlying_mut(analyzer)?.ty = new_ty; - } - if let (Some(r), Some(r2)) = (self.range(analyzer)?, to_ty.range(analyzer)?) { - let min = r.min.cast(r2.min); - let max = r.max.cast(r2.max); - self.set_range_min(analyzer, min)?; - self.set_range_max(analyzer, max)?; - } - } - - if let (VarType::Concrete(_), VarType::Concrete(cnode)) = (self.ty(analyzer)?, to_ty) { - // update name - let display_name = cnode.underlying(analyzer)?.as_string(); - self.underlying_mut(analyzer)?.display_name = display_name; - } - Ok(()) - } - - pub fn literal_cast_from_ty( - &self, - to_ty: VarType, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let from_ty = self.ty(analyzer)?.clone(); - if !from_ty.ty_eq(&to_ty, analyzer)? { - if let Some(new_ty) = from_ty.try_literal_cast(&to_ty, analyzer)? { - self.underlying_mut(analyzer)?.ty = new_ty; - } - // we dont need to update the ranges because a literal by definition is concrete - } - - if let (VarType::Concrete(_), VarType::Concrete(cnode)) = (self.ty(analyzer)?, to_ty) { - // update name - let display_name = cnode.underlying(analyzer)?.as_string(); - self.underlying_mut(analyzer)?.display_name = display_name; - } - Ok(()) - } - - pub fn try_increase_size( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - let from_ty = self.ty(analyzer)?.clone(); - self.cast_from_ty(from_ty.max_size(analyzer)?, analyzer)?; - Ok(()) - } - - pub fn is_int(&self, analyzer: &impl GraphLike) -> Result { - self.ty(analyzer)?.is_int(analyzer) - } - - pub fn sol_delete_range(&mut self, analyzer: &mut impl GraphLike) -> Result<(), GraphError> { - let ty = self.ty(analyzer)?; - if let Some(delete_range) = ty.delete_range_result(analyzer)? { - self.set_range(analyzer, delete_range)?; - } - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ContextVar { - pub loc: Option, - pub name: String, - pub display_name: String, - pub storage: Option, - pub is_tmp: bool, - pub tmp_of: Option, - pub is_symbolic: bool, - pub is_return: bool, - pub ty: VarType, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct TmpConstruction { - pub lhs: ContextVarNode, - pub op: RangeOp, - pub rhs: Option, -} - -impl TmpConstruction { - pub fn new(lhs: ContextVarNode, op: RangeOp, rhs: Option) -> Self { - Self { lhs, op, rhs } - } -} - -impl ContextVar { - pub fn eq_ignore_loc(&self, other: &Self) -> bool { - self.name == other.name - && self.display_name == other.display_name - && self.storage == other.storage - && self.is_tmp == other.is_tmp - && self.tmp_of == other.tmp_of - && self.is_symbolic == other.is_symbolic - && self.is_return == other.is_return - && self.ty == other.ty - } - - pub fn is_tmp(&self) -> bool { - self.is_tmp || self.tmp_of.is_some() - } - - pub fn tmp_of(&self) -> Option { - self.tmp_of - } - - pub fn new_from_concrete( - loc: Loc, - ctx: ContextNode, - concrete_node: ConcreteNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - let name = format!( - "tmp_{}({})", - ctx.new_tmp(analyzer)?, - concrete_node.underlying(analyzer)?.as_string() - ); - Ok(ContextVar { - loc: Some(loc), - name, - display_name: concrete_node.underlying(analyzer)?.as_string(), - storage: None, - is_tmp: true, - tmp_of: None, - is_symbolic: false, - is_return: false, - ty: VarType::Concrete(concrete_node), - }) - } - - pub fn as_cast_tmp( - &self, - loc: Loc, - ctx: ContextNode, - cast_ty: Builtin, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - let mut new_tmp = self.clone(); - new_tmp.loc = Some(loc); - new_tmp.is_tmp = true; - new_tmp.name = format!( - "tmp_{}({}({}))", - ctx.new_tmp(analyzer)?, - cast_ty.as_string(analyzer)?, - self.name - ); - new_tmp.display_name = format!("{}({})", cast_ty.as_string(analyzer)?, self.display_name); - Ok(new_tmp) - } - - pub fn as_tmp( - &self, - loc: Loc, - ctx: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - let mut new_tmp = self.clone(); - new_tmp.loc = Some(loc); - new_tmp.is_tmp = true; - new_tmp.name = format!("tmp{}({})", ctx.new_tmp(analyzer)?, self.name); - new_tmp.display_name = format!("tmp_{}", self.name); - Ok(new_tmp) - } - - pub fn new_from_contract( - loc: Loc, - contract_node: ContractNode, - analyzer: &impl GraphLike, - ) -> Result { - Ok(ContextVar { - loc: Some(loc), - name: contract_node.name(analyzer)?, - display_name: contract_node.name(analyzer)?, - storage: None, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: VarType::User( - TypeNode::Contract(contract_node), - SolcRange::try_from_builtin(&Builtin::Address), - ), - }) - } - - pub fn new_from_struct( - loc: Loc, - struct_node: StructNode, - ctx: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - Ok(ContextVar { - loc: Some(loc), - name: format!( - "tmp_struct_{}_{}", - ctx.new_tmp(analyzer)?, - struct_node.name(analyzer)? - ), - display_name: struct_node.name(analyzer)?, - storage: Some(StorageLocation::Memory(Loc::Implicit)), - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: VarType::User(TypeNode::Struct(struct_node), None), - }) - } - - pub fn new_from_ty( - loc: Loc, - ty_node: TyNode, - ctx: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - Ok(ContextVar { - loc: Some(loc), - name: format!( - "tmp_ty_{}_{}", - ctx.new_tmp(analyzer)?, - ty_node.name(analyzer)? - ), - display_name: ty_node.name(analyzer)?, - storage: Some(StorageLocation::Memory(Loc::Implicit)), - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: VarType::try_from_idx(analyzer, ty_node.0.into()).unwrap(), - }) - } - - pub fn new_from_builtin( - loc: Loc, - bn_node: BuiltInNode, - analyzer: &impl GraphLike, - ) -> Result { - Ok(ContextVar { - loc: Some(loc), - name: format!("tmp_{}", bn_node.underlying(analyzer)?.as_string(analyzer)?), - display_name: format!("tmp_{}", bn_node.underlying(analyzer)?.as_string(analyzer)?), - storage: None, - is_tmp: true, - tmp_of: None, - is_symbolic: false, - is_return: false, - ty: VarType::try_from_idx(analyzer, bn_node.into()).unwrap(), - }) - } - - pub fn fallback_range( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - match &self.ty { - VarType::User(TypeNode::Contract(_), ref maybe_range) => { - if let Some(range) = maybe_range { - Ok(Some(range.clone())) - } else { - Ok(SolcRange::try_from_builtin(&Builtin::Address)) - } - } - VarType::User(TypeNode::Enum(enum_node), ref maybe_range) => { - if let Some(range) = maybe_range { - Ok(Some(range.clone())) - } else { - Ok(enum_node.maybe_default_range(analyzer)?) - } - } - VarType::User(TypeNode::Ty(ty_node), ref maybe_range) => { - if let Some(range) = maybe_range { - Ok(Some(range.clone())) - } else { - let underlying = - BuiltInNode::from(ty_node.underlying(analyzer)?.ty).underlying(analyzer)?; - Ok(SolcRange::try_from_builtin(underlying)) - } - } - VarType::BuiltIn(bn, ref maybe_range) => { - if let Some(range) = maybe_range { - Ok(Some(range.clone())) - } else { - let underlying = bn.underlying(analyzer)?; - Ok(SolcRange::try_from_builtin(underlying)) - } - } - VarType::Concrete(cn) => Ok(SolcRange::from(cn.underlying(analyzer)?.clone())), - _ => Ok(None), - } - } - - pub fn set_range(&mut self, new_range: SolcRange) { - match &mut self.ty { - VarType::User(TypeNode::Contract(_), ref mut maybe_range) - | VarType::User(TypeNode::Enum(_), ref mut maybe_range) - | VarType::User(TypeNode::Ty(_), ref mut maybe_range) - | VarType::BuiltIn(_, ref mut maybe_range) => { - *maybe_range = Some(new_range); - } - VarType::Concrete(_) => {} - e => panic!("wasnt builtin: {e:?}"), - } - } - - pub fn needs_fallback(&self) -> bool { - match &self.ty { - VarType::User(TypeNode::Contract(_), ref maybe_range) - | VarType::User(TypeNode::Enum(_), ref maybe_range) - | VarType::User(TypeNode::Ty(_), ref maybe_range) - | VarType::BuiltIn(_, ref maybe_range) => maybe_range.is_some(), - _ => false, - } - } - - // #[tracing::instrument(level = "trace", skip_all)] - pub fn set_range_min(&mut self, new_min: Elem, fallback_range: Option) { - // tracing::trace!("Setting range min in underlying: {:?}", self.ty); - match &mut self.ty { - VarType::User(TypeNode::Contract(_), ref mut maybe_range) - | VarType::User(TypeNode::Enum(_), ref mut maybe_range) - | VarType::User(TypeNode::Ty(_), ref mut maybe_range) - | VarType::BuiltIn(_, ref mut maybe_range) => { - if let Some(range) = maybe_range { - range.set_range_min(new_min); - } else { - let mut fr = fallback_range.expect("No range and no fallback_range"); - fr.set_range_min(new_min); - *maybe_range = Some(fr); - } - } - VarType::Concrete(_) => {} - e => panic!("wasnt builtin: {e:?}"), - } - } - - pub fn try_set_range_min( - &mut self, - new_min: Elem, - fallback_range: Option, - ) -> bool { - match &mut self.ty { - VarType::User(TypeNode::Contract(_), ref mut maybe_range) - | VarType::User(TypeNode::Enum(_), ref mut maybe_range) - | VarType::User(TypeNode::Ty(_), ref mut maybe_range) - | VarType::BuiltIn(_, ref mut maybe_range) => { - if let Some(range) = maybe_range { - range.set_range_min(new_min); - true - } else { - let mut fr = fallback_range.expect("No range and no fallback_range"); - fr.set_range_min(new_min); - *maybe_range = Some(fr); - true - } - } - VarType::Concrete(_) => true, - _ => false, - } - } - - pub fn set_range_max(&mut self, new_max: Elem, fallback_range: Option) { - match &mut self.ty { - VarType::User(TypeNode::Contract(_), ref mut maybe_range) - | VarType::User(TypeNode::Enum(_), ref mut maybe_range) - | VarType::User(TypeNode::Ty(_), ref mut maybe_range) - | VarType::BuiltIn(_, ref mut maybe_range) => { - if let Some(range) = maybe_range { - range.set_range_max(new_max); - } else { - let mut fr = fallback_range.expect("No range and no fallback_range"); - fr.set_range_max(new_max); - *maybe_range = Some(fr); - } - } - VarType::Concrete(_) => {} - e => panic!("wasnt builtin or concrete: {e:?}"), - } - } - - pub fn set_range_exclusions( - &mut self, - new_exclusions: Vec>, - fallback_range: Option, - ) { - match &mut self.ty { - VarType::User(TypeNode::Contract(_), ref mut maybe_range) - | VarType::User(TypeNode::Enum(_), ref mut maybe_range) - | VarType::User(TypeNode::Ty(_), ref mut maybe_range) - | VarType::BuiltIn(_, ref mut maybe_range) => { - if let Some(range) = maybe_range { - range.set_range_exclusions(new_exclusions); - } else { - let mut fr = fallback_range.expect("No range and no fallback_range"); - fr.set_range_exclusions(new_exclusions); - *maybe_range = Some(fr); - } - } - VarType::Concrete(_) => {} - e => panic!("wasnt builtin or concrete: {e:?}"), - } - } - - pub fn try_set_range_max( - &mut self, - new_max: Elem, - fallback_range: Option, - ) -> bool { - match &mut self.ty { - VarType::User(TypeNode::Contract(_), ref mut maybe_range) - | VarType::User(TypeNode::Enum(_), ref mut maybe_range) - | VarType::User(TypeNode::Ty(_), ref mut maybe_range) - | VarType::BuiltIn(_, ref mut maybe_range) => { - if let Some(range) = maybe_range { - range.set_range_max(new_max); - true - } else { - let mut fr = fallback_range.expect("No range and no fallback_range"); - fr.set_range_max(new_max); - *maybe_range = Some(fr); - true - } - } - VarType::Concrete(_) => true, - _ => false, - } - } - - pub fn try_set_range_exclusions( - &mut self, - new_exclusions: Vec>, - fallback_range: Option, - ) -> bool { - match &mut self.ty { - VarType::User(TypeNode::Contract(_), ref mut maybe_range) - | VarType::User(TypeNode::Enum(_), ref mut maybe_range) - | VarType::User(TypeNode::Ty(_), ref mut maybe_range) - | VarType::BuiltIn(_, ref mut maybe_range) => { - if let Some(range) = maybe_range { - range.set_range_exclusions(new_exclusions); - true - } else { - let mut fr = fallback_range.expect("No range and no fallback_range"); - fr.set_range_exclusions(new_exclusions); - *maybe_range = Some(fr); - true - } - } - VarType::Concrete(_) => true, - _ => false, - } - } - - pub fn maybe_from_user_ty( - analyzer: &impl GraphLike, - loc: Loc, - node_idx: NodeIdx, - ) -> Option { - if let Some(ty) = VarType::try_from_idx(analyzer, node_idx) { - let (name, storage) = match analyzer.node(node_idx) { - Node::Contract(c) => { - let name = c.name.clone().expect("Contract had no name").name; - (name, None) - } - Node::Function(f) => { - let name = f.name.clone().expect("Function had no name").name; - (name, None) - } - Node::Struct(s) => { - let name = s.name.clone().expect("Struct had no name").name; - (name, None) - } - Node::Enum(e) => { - let name = e.name.clone().expect("Enum had no name").name; - (name, None) - } - Node::Var(var) => { - let name = var.name.clone().expect("Variable had no name").name; - let storage = if var.in_contract { - if !var.attrs.iter().any(|attr| { - matches!(attr, solang_parser::pt::VariableAttribute::Constant(_)) - }) { - Some(StorageLocation::Storage(var.loc)) - } else { - None - } - } else { - None - }; - (name, storage) - } - Node::Ty(ty) => { - let name = &ty.name.name; - (name.clone(), None) - } - _ => return None, - }; - - Some(ContextVar { - loc: Some(loc), - name: name.clone(), - display_name: name, - storage, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty, - }) - } else { - None - } - } - - pub fn maybe_new_from_field( - analyzer: &impl GraphLike, - loc: Loc, - parent_var: &ContextVar, - field: Field, - ) -> Option { - if let Some(ty) = VarType::try_from_idx(analyzer, field.ty) { - Some(ContextVar { - loc: Some(loc), - name: parent_var.name.clone() - + "." - + &field.name.clone().expect("Field had no name").name, - display_name: parent_var.name.clone() - + "." - + &field.name.expect("Field had no name").name, - storage: parent_var.storage.clone(), - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty, - }) - } else { - None - } - } - - pub fn new_from_enum_variant( - analyzer: &mut (impl GraphLike + AnalyzerLike), - ctx: ContextNode, - loc: Loc, - enum_node: EnumNode, - variant: String, - ) -> Result { - let enum_name = enum_node.name(analyzer)?; - Ok(ContextVar { - loc: Some(loc), - name: format!("{}.{}_{}", enum_name, variant, ctx.new_tmp(analyzer)?), - display_name: format!("{}.{}", enum_name, variant), - storage: None, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: VarType::User( - TypeNode::Enum(enum_node), - Some(enum_node.range_from_variant(variant, analyzer)?), - ), - }) - } - - pub fn new_from_index( - analyzer: &mut (impl GraphLike + AnalyzerLike), - loc: Loc, - parent_name: String, - parent_display_name: String, - parent_storage: StorageLocation, - parent_var: &BuiltInNode, - index: ContextVarNode, - ) -> Result { - Ok(ContextVar { - loc: Some(loc), - name: parent_name + "[" + &index.name(analyzer)? + "]", - display_name: parent_display_name + "[" + &index.display_name(analyzer)? + "]", - storage: Some(parent_storage), - is_tmp: false, - tmp_of: None, - is_symbolic: index.underlying(analyzer)?.is_symbolic, - is_return: false, - ty: parent_var.dynamic_underlying_ty(analyzer)?, - }) - } - - pub fn new_from_func( - analyzer: &mut (impl GraphLike + AnalyzerLike), - func: FunctionNode, - ) -> Result { - Ok(ContextVar { - loc: Some(func.underlying(analyzer)?.loc), - name: func.name(analyzer)?, - display_name: func.name(analyzer)?, - storage: None, - is_tmp: false, - tmp_of: None, - is_symbolic: false, - is_return: false, - ty: VarType::User(TypeNode::Func(func), None), - }) - } - - pub fn maybe_new_from_func_param( - analyzer: &impl GraphLike, - param: FunctionParam, - ) -> Option { - if let Some(name) = param.name { - if let Some(ty) = VarType::try_from_idx(analyzer, param.ty) { - Some(ContextVar { - loc: Some(param.loc), - name: name.name.clone(), - display_name: name.name, - storage: param.storage, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty, - }) - } else { - None - } - } else { - None - } - } - - pub fn maybe_new_from_func_ret(analyzer: &impl GraphLike, ret: FunctionReturn) -> Option { - if let Some(name) = ret.name { - if let Some(ty) = VarType::try_from_idx(analyzer, ret.ty) { - Some(ContextVar { - loc: Some(ret.loc), - name: name.name.clone(), - display_name: name.name, - storage: ret.storage, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: true, - ty, - }) - } else { - None - } - } else { - None - } - } - - pub fn new_from_func_ret( - ctx: ContextNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ret: FunctionReturn, - ) -> Result, GraphError> { - let (is_tmp, name) = if let Some(name) = ret.name { - (false, name.name) - } else { - (true, format!("tmp_func_ret_{}", ctx.new_tmp(analyzer)?)) - }; - - if let Some(ty) = VarType::try_from_idx(analyzer, ret.ty) { - Ok(Some(ContextVar { - loc: Some(ret.loc), - name: name.clone(), - display_name: name, - storage: ret.storage, - is_tmp, - tmp_of: None, - is_symbolic: true, - is_return: true, - ty, - })) - } else { - Ok(None) - } - } -} diff --git a/shared/src/lib.rs b/shared/src/lib.rs deleted file mode 100644 index 1f86957f..00000000 --- a/shared/src/lib.rs +++ /dev/null @@ -1,136 +0,0 @@ -use crate::analyzer::GraphLike; -use crate::context::ContextVarNode; -use std::collections::HashMap; - -use crate::analyzer::AsDotStr; -use crate::context::ContextNode; -use crate::{ - context::{Context, ContextEdge, ContextVar}, - nodes::*, -}; -use lazy_static::lazy_static; -use petgraph::graph::*; -use solang_parser::pt::Identifier; - -pub mod analyzer; -pub mod context; -pub mod nodes; -pub mod range; - -pub type NodeIdx = NodeIndex; -pub type EdgeIdx = EdgeIndex; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Node { - Context(Context), - ContextVar(ContextVar), - ContextFork, - FunctionCall, - Builtin(Builtin), - VarType(VarType), - Entry, - SourceUnit(usize), - SourceUnitPart(usize, usize), - Contract(Contract), - Function(Function), - FunctionParam(FunctionParam), - FunctionReturn(FunctionReturn), - Struct(Struct), - Enum(Enum), - Error(Error), - ErrorParam(ErrorParam), - Field(Field), - Var(Var), - Ty(Ty), - Unresolved(Identifier), - Concrete(Concrete), - Msg(Msg), - Block(Block), -} - -pub fn as_dot_str(idx: NodeIdx, analyzer: &impl GraphLike) -> String { - use crate::Node::*; - match analyzer.node(idx) { - Context(_) => ContextNode::from(idx).as_dot_str(analyzer), - ContextVar(_) => ContextVarNode::from(idx).as_dot_str(analyzer), - ContextFork => "Context Fork".to_string(), - FunctionCall => "Function Call".to_string(), - Builtin(bi) => bi.as_string(analyzer).unwrap(), - VarType(v_ty) => v_ty.as_dot_str(analyzer), - Contract(_c) => ContractNode::from(idx).as_dot_str(analyzer), - Function(_f) => FunctionNode::from(idx).as_dot_str(analyzer), - FunctionParam(_fp) => FunctionParamNode::from(idx).as_dot_str(analyzer), - FunctionReturn(_fr) => FunctionReturnNode::from(idx).as_dot_str(analyzer), - Struct(_s) => StructNode::from(idx).as_dot_str(analyzer), - Enum(_e) => EnumNode::from(idx).as_dot_str(analyzer), - Field(_f) => FieldNode::from(idx).as_dot_str(analyzer), - Var(_v) => VarNode::from(idx).as_dot_str(analyzer), - Ty(_t) => TyNode::from(idx).as_dot_str(analyzer), - // Concrete(c) => c.as_human_string(), - e => format!("{e:?}"), - } -} - -impl Node { - pub fn dot_str_color(&self) -> String { - use crate::Node::*; - let c = match self { - Context(_) => TOKYO_NIGHT_COLORS.get("purple").unwrap(), - ContextVar(_) => TOKYO_NIGHT_COLORS.get("orange").unwrap(), - FunctionCall => TOKYO_NIGHT_COLORS.get("cyan").unwrap(), - Contract(_c) => TOKYO_NIGHT_COLORS.get("green").unwrap(), - Function(_f) => TOKYO_NIGHT_COLORS.get("cyan").unwrap(), - Struct(_s) => TOKYO_NIGHT_COLORS.get("yellow").unwrap(), - Enum(_e) => TOKYO_NIGHT_COLORS.get("yellow").unwrap(), - _ => TOKYO_NIGHT_COLORS.get("default").unwrap(), - }; - c.to_string() - } -} - -lazy_static! { - pub static ref TOKYO_NIGHT_COLORS: HashMap<&'static str, &'static str> = { - let mut m = HashMap::new(); - m.insert("red", "#f7768e"); - m.insert("orange", "#ff9e64"); - m.insert("yellow", "#e0af68"); - m.insert("green", "#9ece6a"); - m.insert("cyan", "#73daca"); - m.insert("teal", "#2ac3de"); - m.insert("darkblue", "#7aa2f7"); - m.insert("purple", "#bb9af7"); - m.insert("bg", "#1a1b26"); - m.insert("font", "#c0caf5"); - m.insert("deepred", "#703440"); - m.insert("default", "#565f89"); - m - }; -} - -#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub enum Edge { - Source, - Part, - Import, - Context(ContextEdge), - Contract, - InheritedContract, - Field, - Enum, - Struct, - Error, - ErrorParam, - Event, - Var, - Ty, - Func, - FunctionParam, - FunctionReturn, - FuncModifier(usize), - Modifier, - FallbackFunc, - Constructor, - ReceiveFunc, - LibraryFunction(NodeIdx), - BuiltinFunction, -} diff --git a/shared/src/nodes/mod.rs b/shared/src/nodes/mod.rs deleted file mode 100644 index 4e9b9dee..00000000 --- a/shared/src/nodes/mod.rs +++ /dev/null @@ -1,1189 +0,0 @@ -//! Solidity and EVM specific representations as nodes in the graph -use crate::analyzer::AsDotStr; -use crate::analyzer::GraphError; -use crate::analyzer::{AnalyzerLike, GraphLike}; -use crate::range::elem::RangeElem; -use crate::range::elem_ty::Dynamic; -use crate::range::elem_ty::Elem; -use crate::range::elem_ty::RangeDyn; -use crate::range::Range; -use crate::range::SolcRange; -use crate::ContextVarNode; - -use crate::Node; -use crate::NodeIdx; -use ethers_core::types::Address; -use ethers_core::types::H256; -use ethers_core::types::I256; -use ethers_core::types::U256; -use solang_parser::pt::{Expression, Loc, Type}; - -mod contract_ty; -pub use contract_ty::*; -mod enum_ty; -pub use enum_ty::*; -mod struct_ty; -pub use struct_ty::*; -mod func_ty; -pub use func_ty::*; -mod err_ty; -pub use err_ty::*; -mod var_ty; -pub use var_ty::*; -mod ty_ty; -pub use ty_ty::*; -mod concrete; -pub use concrete::*; -mod msg; -pub use msg::*; -mod block; -pub use block::*; - -#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub enum VarType { - User(TypeNode, Option), - BuiltIn(BuiltInNode, Option), - Concrete(ConcreteNode), -} - -impl AsDotStr for VarType { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { - self.as_string(analyzer).unwrap() - } -} - -impl VarType { - pub fn set_range(&mut self, new_range: SolcRange) -> Result<(), GraphError> { - match self { - VarType::User(TypeNode::Enum(_), ref mut r) - | VarType::User(TypeNode::Contract(_), ref mut r) - | VarType::User(TypeNode::Ty(_), ref mut r) - | VarType::BuiltIn(_, ref mut r) => { - *r = Some(new_range); - Ok(()) - } - _ => Err(GraphError::NodeConfusion( - "This type cannot have a range".to_string(), - )), - } - } - - pub fn possible_builtins_from_ty_inf(&self, analyzer: &impl GraphLike) -> Vec { - match self { - Self::BuiltIn(bn, _) => bn - .underlying(analyzer) - .unwrap() - .possible_builtins_from_ty_inf(), - Self::Concrete(c) => c - .underlying(analyzer) - .unwrap() - .possible_builtins_from_ty_inf(), - _ => vec![], - } - } - - pub fn ty_idx(&self) -> NodeIdx { - match self { - Self::User(ty_node, _) => (*ty_node).into(), - Self::BuiltIn(bn, _) => (*bn).into(), - Self::Concrete(c) => (*c).into(), - } - } - - pub fn is_dyn_builtin(&self, analyzer: &impl GraphLike) -> Result { - match self { - Self::BuiltIn(node, _) => node.is_dyn(analyzer), - _ => Ok(false), - } - } - - pub fn unresolved_as_resolved(&self, analyzer: &impl GraphLike) -> Result { - match self { - VarType::User(TypeNode::Unresolved(n), _) => match analyzer.node(*n) { - Node::Unresolved(ident) => Err(GraphError::NodeConfusion(format!( - "Expected the type \"{}\" to be resolved by now", - ident.name - ))), - _ => { - if let Some(ty) = VarType::try_from_idx(analyzer, *n) { - Ok(ty) - } else { - Err(GraphError::NodeConfusion( - "Tried to type a non-typeable element".to_string(), - )) - } - } - }, - _ => Ok(self.clone()), - } - } - - pub fn concrete_to_builtin( - &mut self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result<(), GraphError> { - if let VarType::Concrete(cnode) = self { - let c = cnode.underlying(analyzer)?.clone(); - match c { - crate::Concrete::Uint(ref size, _) => { - let new_ty = VarType::BuiltIn( - BuiltInNode::from(analyzer.builtin_or_add(Builtin::Uint(*size))), - SolcRange::from(c), - ); - *self = new_ty; - } - crate::Concrete::Int(ref size, _) => { - let new_ty = VarType::BuiltIn( - BuiltInNode::from(analyzer.builtin_or_add(Builtin::Int(*size))), - SolcRange::from(c), - ); - *self = new_ty; - } - crate::Concrete::Bool(_) => { - let new_ty = VarType::BuiltIn( - BuiltInNode::from(analyzer.builtin_or_add(Builtin::Bool)), - SolcRange::from(c), - ); - *self = new_ty; - } - crate::Concrete::Address(_) => { - let new_ty = VarType::BuiltIn( - BuiltInNode::from(analyzer.builtin_or_add(Builtin::Address)), - SolcRange::from(c), - ); - *self = new_ty; - } - crate::Concrete::Bytes(ref s, _) => { - let new_ty = VarType::BuiltIn( - BuiltInNode::from(analyzer.builtin_or_add(Builtin::Bytes(*s))), - SolcRange::from(c), - ); - *self = new_ty; - } - crate::Concrete::String(_) => { - let new_ty = VarType::BuiltIn( - BuiltInNode::from(analyzer.builtin_or_add(Builtin::String)), - SolcRange::from(c), - ); - *self = new_ty; - } - crate::Concrete::DynBytes(_) => { - let new_ty = VarType::BuiltIn( - BuiltInNode::from(analyzer.builtin_or_add(Builtin::DynamicBytes)), - SolcRange::from(c), - ); - *self = new_ty; - } - // Concrete::Array(Vec), - _ => {} - } - } - Ok(()) - } - - pub fn try_from_idx(analyzer: &impl GraphLike, node: NodeIdx) -> Option { - // get node, check if typeable and convert idx into vartype - match analyzer.node(node) { - Node::VarType(a) => Some(a.clone()), - Node::Builtin(b) => Some(VarType::BuiltIn( - node.into(), - SolcRange::try_from_builtin(b), - )), - Node::Contract(_) => Some(VarType::User( - TypeNode::Contract(node.into()), - SolcRange::try_from_builtin(&Builtin::Address), - )), - Node::Function(_) => Some(VarType::User(TypeNode::Func(node.into()), None)), - Node::Struct(_) => Some(VarType::User(TypeNode::Struct(node.into()), None)), - Node::Enum(enu) => { - let variants = enu.variants(); - let range = if !variants.is_empty() { - let min = Concrete::from(U256::zero()).into(); - let max = Concrete::from(U256::from(variants.len() - 1)).into(); - Some(SolcRange::new(min, max, vec![])) - } else { - None - }; - Some(VarType::User(TypeNode::Enum(node.into()), range)) - } - Node::Unresolved(_n) => Some(VarType::User(TypeNode::Unresolved(node), None)), - Node::Concrete(_) => Some(VarType::Concrete(node.into())), - Node::ContextVar(cvar) => Some(cvar.ty.clone()), - Node::Var(var) => VarType::try_from_idx(analyzer, var.ty), - Node::Ty(ty) => { - let range = SolcRange::try_from_builtin( - BuiltInNode::from(ty.ty).underlying(analyzer).unwrap(), - )?; - Some(VarType::User(TypeNode::Ty(node.into()), Some(range))) - } - Node::FunctionParam(inner) => VarType::try_from_idx(analyzer, inner.ty), - Node::Error(..) - | Node::ContextFork - | Node::FunctionCall - | Node::FunctionReturn(..) - | Node::ErrorParam(..) - | Node::Field(..) - | Node::SourceUnitPart(..) - | Node::SourceUnit(..) - | Node::Entry - | Node::Context(..) - | Node::Msg(_) - | Node::Block(_) => None, - } - } - - pub fn requires_input(&self, analyzer: &impl GraphLike) -> Result { - match self { - VarType::BuiltIn(bn, _) => Ok(bn.underlying(analyzer)?.requires_input()), - _ => Ok(false), - } - } - - pub fn try_cast( - self, - other: &Self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - match (self, other) { - (l, Self::User(TypeNode::Ty(ty), o_r)) => { - let t = Self::BuiltIn(BuiltInNode::from(ty.underlying(analyzer)?.ty), o_r.clone()); - l.try_cast(&t, analyzer) - } - (Self::BuiltIn(from_bn, sr), Self::User(TypeNode::Contract(cn), _)) => { - match from_bn.underlying(analyzer)? { - Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { - Ok(Some(Self::User(TypeNode::Contract(*cn), sr))) - } - _ => Ok(None), - } - } - (Self::User(TypeNode::Contract(_cn), sr), Self::BuiltIn(to_bn, _)) => { - match to_bn.underlying(analyzer)? { - Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { - Ok(Some(Self::BuiltIn(*to_bn, sr))) - } - _ => Ok(None), - } - } - (Self::BuiltIn(from_bn, sr), Self::BuiltIn(to_bn, _)) => { - if from_bn.implicitly_castable_to(to_bn, analyzer)? { - Ok(Some(Self::BuiltIn(*to_bn, sr))) - } else { - Ok(None) - } - } - (Self::Concrete(from_c), Self::BuiltIn(to_bn, _)) => { - let c = from_c.underlying(analyzer)?.clone(); - let b = to_bn.underlying(analyzer)?; - if let Some(casted) = c.cast(b.clone()) { - let node = analyzer.add_node(Node::Concrete(casted)); - Ok(Some(Self::Concrete(node.into()))) - } else { - Ok(None) - } - } - (Self::Concrete(from_c), Self::Concrete(to_c)) => { - let c = from_c.underlying(analyzer)?.clone(); - let to_c = to_c.underlying(analyzer)?; - if let Some(casted) = c.cast_from(to_c) { - let node = analyzer.add_node(Node::Concrete(casted)); - Ok(Some(Self::Concrete(node.into()))) - } else { - Ok(None) - } - } - _ => Ok(None), - } - } - - pub fn try_literal_cast( - self, - other: &Self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - match (self, other) { - (Self::BuiltIn(from_bn, sr), Self::User(TypeNode::Ty(ty), _)) => { - if ty.underlying(analyzer)?.ty == from_bn.into() { - Ok(Some(Self::User(TypeNode::Ty(*ty), sr))) - } else { - Ok(None) - } - } - (Self::Concrete(from_c), Self::User(TypeNode::Ty(ty), _)) => { - let concrete_underlying = from_c.underlying(analyzer)?.clone(); - let as_bn = analyzer.builtin_or_add(concrete_underlying.as_builtin()); - if ty.underlying(analyzer)?.ty == as_bn { - Ok(Some(Self::User( - TypeNode::Ty(*ty), - SolcRange::from(concrete_underlying), - ))) - } else { - Ok(None) - } - } - (Self::BuiltIn(from_bn, sr), Self::BuiltIn(to_bn, _)) => { - if from_bn.implicitly_castable_to(to_bn, analyzer)? { - Ok(Some(Self::BuiltIn(*to_bn, sr))) - } else { - Ok(None) - } - } - (Self::Concrete(from_c), Self::BuiltIn(to_bn, _)) => { - let c = from_c.underlying(analyzer)?.clone(); - let b = to_bn.underlying(analyzer)?; - if let Some(casted) = c.literal_cast(b.clone()) { - let node = analyzer.add_node(Node::Concrete(casted)); - Ok(Some(Self::Concrete(node.into()))) - } else { - Ok(None) - } - } - (Self::Concrete(from_c), Self::Concrete(to_c)) => { - let c = from_c.underlying(analyzer)?.clone(); - let to_c = to_c.underlying(analyzer)?; - if let Some(casted) = c.literal_cast_from(to_c) { - let node = analyzer.add_node(Node::Concrete(casted)); - Ok(Some(Self::Concrete(node.into()))) - } else { - Ok(None) - } - } - _ => Ok(None), - } - } - - pub fn implicitly_castable_to( - &self, - other: &Self, - analyzer: &impl GraphLike, - ) -> Result { - match (self, other) { - (Self::BuiltIn(from_bn, _), Self::BuiltIn(to_bn, _)) => { - from_bn.implicitly_castable_to(to_bn, analyzer) - } - (Self::Concrete(from_c), Self::BuiltIn(_to_bn, _)) => { - todo!("here, {from_c:?}") - } - _ => Ok(false), - } - } - - pub fn max_size( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - match self { - Self::BuiltIn(from_bn, _r) => { - let bn = from_bn.max_size(analyzer)?; - Ok(Self::BuiltIn( - bn, - SolcRange::try_from_builtin(bn.underlying(analyzer)?), - )) - } - Self::Concrete(from_c) => Ok(Self::Concrete(from_c.max_size(analyzer)?)), - _ => Ok(self.clone()), - } - } - - pub fn range(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - match self { - Self::User(_, Some(range)) => Ok(Some(range.clone())), - Self::BuiltIn(_, Some(range)) => Ok(Some(range.clone())), - Self::BuiltIn(bn, None) => Ok(SolcRange::try_from_builtin(bn.underlying(analyzer)?)), - Self::Concrete(cnode) => Ok(SolcRange::from(cnode.underlying(analyzer)?.clone())), - _ => Ok(None), - } - } - - pub fn ref_range( - &self, - analyzer: &impl GraphLike, - ) -> Result>, GraphError> { - match self { - Self::User(_, Some(range)) => Ok(Some(std::borrow::Cow::Borrowed(range))), - Self::BuiltIn(_, Some(range)) => Ok(Some(std::borrow::Cow::Borrowed(range))), - Self::BuiltIn(bn, None) => { - if let Some(r) = SolcRange::try_from_builtin(bn.underlying(analyzer)?) { - Ok(Some(std::borrow::Cow::Owned(r))) - } else { - Ok(None) - } - } - Self::Concrete(cnode) => { - if let Some(r) = SolcRange::from(cnode.underlying(analyzer)?.clone()) { - Ok(Some(std::borrow::Cow::Owned(r))) - } else { - Ok(None) - } - } - _ => Ok(None), - } - } - - pub fn delete_range_result( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - match self { - Self::User(TypeNode::Contract(_), _) => { - let zero = Concrete::Address(Address::from_slice(&[0x00; 20])); - Ok(Some(SolcRange::new( - zero.clone().into(), - zero.into(), - vec![], - ))) - } - Self::User(TypeNode::Enum(enum_node), _) => { - if let Some(first) = enum_node.variants(analyzer)?.first() { - let zero = Concrete::from(first.clone()); - Ok(Some(SolcRange::new( - zero.clone().into(), - zero.into(), - vec![], - ))) - } else { - Ok(None) - } - } - Self::User(TypeNode::Ty(ty), _) => { - BuiltInNode::from(ty.underlying(analyzer)?.ty).zero_range(analyzer) - } - Self::BuiltIn(bn, None) => bn.zero_range(analyzer), - Self::Concrete(cnode) => Ok(cnode.underlying(analyzer)?.as_builtin().zero_range()), - _ => Ok(None), - } - } - - pub fn default_range( - &self, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - match self { - Self::User(TypeNode::Contract(_), _) => { - Ok(SolcRange::try_from_builtin(&Builtin::Address)) - } - Self::User(TypeNode::Enum(enu), _) => enu.maybe_default_range(analyzer), - Self::User(TypeNode::Ty(ty), _) => Ok(SolcRange::try_from_builtin( - BuiltInNode::from(ty.underlying(analyzer)?.ty).underlying(analyzer)?, - )), - Self::BuiltIn(bn, _) => Ok(SolcRange::try_from_builtin(bn.underlying(analyzer)?)), - Self::Concrete(cnode) => Ok(SolcRange::from(cnode.underlying(analyzer)?.clone())), - _ => Ok(None), - } - } - - pub fn is_const(&self, analyzer: &impl GraphLike) -> Result { - match self { - Self::Concrete(_) => Ok(true), - Self::User(TypeNode::Func(_), _) => Ok(false), - _ => { - if let Some(range) = self.ref_range(analyzer)? { - let min = range.evaled_range_min(analyzer)?; - let max = range.evaled_range_max(analyzer)?; - Ok(min.range_eq(&max)) - } else { - Ok(false) - } - } - } - } - - pub fn func_node(&self, _analyzer: &impl GraphLike) -> Option { - match self { - Self::User(TypeNode::Func(func_node), _) => Some(*func_node), - _ => None, - } - } - - pub fn evaled_range( - &self, - analyzer: &impl GraphLike, - ) -> Result, Elem)>, GraphError> { - Ok(self.ref_range(analyzer)?.map(|range| { - ( - range.evaled_range_min(analyzer).unwrap(), - range.evaled_range_max(analyzer).unwrap(), - ) - })) - } - - pub fn try_match_index_dynamic_ty( - &self, - index: ContextVarNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result, GraphError> { - match self { - Self::BuiltIn(_node, None) => Ok(None), - Self::BuiltIn(node, Some(r)) => { - if let Builtin::Bytes(size) = node.underlying(analyzer)? { - if r.is_const(analyzer)? && index.is_const(analyzer)? { - let Some(min) = r.evaled_range_min(analyzer)?.maybe_concrete() else { - return Ok(None); - }; - let Concrete::Bytes(_, val) = min.val else { - return Ok(None); - }; - let Some(idx) = index.evaled_range_min(analyzer)?.unwrap().maybe_concrete() else { - return Ok(None) - }; - let Concrete::Uint(_, idx) = idx.val else { - return Ok(None); - }; - if idx.low_u32() < (*size as u32) { - let mut h = H256::default(); - h.0[0] = val.0[idx.low_u32() as usize]; - let ret_val = Concrete::Bytes(1, h); - let node = analyzer.add_node(Node::Concrete(ret_val)); - return Ok(Some(node)); - } - } - Ok(None) - } else { - // check if the index exists as a key - let min = r.range_min(); - if let Some(map) = min.dyn_map() { - let name = index.name(analyzer)?; - let is_const = index.is_const(analyzer)?; - if let Some((_k, val)) = map.iter().find(|(k, _v)| match k { - Elem::Dynamic(Dynamic { idx, .. }) => match analyzer.node(*idx) { - Node::ContextVar(_) => { - let cvar = ContextVarNode::from(*idx); - cvar.name(analyzer).unwrap() == name - } - _ => false, - }, - c @ Elem::Concrete(..) if is_const => { - let index_val = index.evaled_range_min(analyzer).unwrap().unwrap(); - index_val.range_eq(c) - } - _ => false, - }) { - if let Some(idx) = val.node_idx() { - return Ok(idx.into()); - } else if let Some(c) = val.concrete() { - let cnode = analyzer.add_node(Node::Concrete(c)); - return Ok(cnode.into()); - } - } - } - Ok(None) - } - } - Self::Concrete(node) => { - if index.is_const(analyzer)? { - let idx = index - .evaled_range_min(analyzer) - .unwrap() - .unwrap() - .concrete() - .unwrap() - .uint_val() - .unwrap(); - match node.underlying(analyzer)? { - Concrete::Bytes(size, val) => { - if idx.low_u32() < (*size as u32) { - let mut h = H256::default(); - h.0[0] = val.0[idx.low_u32() as usize]; - let ret_val = Concrete::Bytes(1, h); - let node = analyzer.add_node(Node::Concrete(ret_val)); - return Ok(Some(node)); - } - } - Concrete::DynBytes(elems) => { - if idx.low_u32() < (elems.len() as u32) { - let mut h = H256::default(); - h.0[0] = elems[idx.low_u32() as usize]; - let ret_val = Concrete::Bytes(1, h); - let node = analyzer.add_node(Node::Concrete(ret_val)); - return Ok(Some(node)); - } - } - Concrete::String(st) => { - if idx.low_u32() < (st.len() as u32) { - let mut h = H256::default(); - h.0[0] = st.as_bytes()[idx.low_u32() as usize]; - let ret_val = Concrete::Bytes(1, h); - let node = analyzer.add_node(Node::Concrete(ret_val)); - return Ok(Some(node)); - } - } - Concrete::Array(elems) => { - if idx.low_u32() < (elems.len() as u32) { - let elem = &elems[idx.low_u32() as usize]; - let node = analyzer.add_node(Node::Concrete(elem.clone())); - return Ok(Some(node)); - } - } - _ => {} - } - } - Ok(None) - } - _ => Ok(None), - } - } - - pub fn get_index_dynamic_ty( - &self, - index: ContextVarNode, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - if let Some(var_ty) = self.try_match_index_dynamic_ty(index, analyzer)? { - Ok(VarType::try_from_idx(analyzer, var_ty).unwrap()) - } else { - match self { - Self::BuiltIn(node, _) => node.dynamic_underlying_ty(analyzer), - Self::Concrete(node) => node.dynamic_underlying_ty(analyzer), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be Builtin but it was: {e:?}" - ))), - } - } - } - - pub fn dynamic_underlying_ty( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - match self { - Self::BuiltIn(node, _) => node.dynamic_underlying_ty(analyzer), - Self::Concrete(node) => node.dynamic_underlying_ty(analyzer), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be Builtin but it was: {e:?}" - ))), - } - } - - pub fn is_mapping(&self, analyzer: &impl GraphLike) -> Result { - match self { - Self::BuiltIn(node, _) => Ok(node.is_mapping(analyzer)?), - _ => Ok(false), - } - } - - pub fn is_sized_array(&self, analyzer: &impl GraphLike) -> Result { - match self { - Self::BuiltIn(node, _) => node.is_sized_array(analyzer), - Self::Concrete(node) => node.is_sized_array(analyzer), - _ => Ok(false), - } - } - - pub fn maybe_array_size(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - match self { - Self::BuiltIn(node, _) => node.maybe_array_size(analyzer), - Self::Concrete(node) => node.maybe_array_size(analyzer), - _ => Ok(None), - } - } - - pub fn is_dyn(&self, analyzer: &impl GraphLike) -> Result { - match self { - Self::BuiltIn(node, _) => Ok(node.is_dyn(analyzer)?), - Self::Concrete(node) => Ok(node.is_dyn(analyzer)?), - _ => Ok(false), - } - } - - pub fn is_indexable(&self, analyzer: &impl GraphLike) -> Result { - match self { - Self::BuiltIn(node, _) => Ok(node.is_indexable(analyzer)?), - Self::Concrete(node) => Ok(node.is_indexable(analyzer)?), - _ => Ok(false), - } - } - - pub fn ty_eq(&self, other: &Self, analyzer: &impl GraphLike) -> Result { - match (self, other) { - (VarType::User(s, _), VarType::User(o, _)) => { - Ok(s.unresolved_as_resolved(analyzer)? == o.unresolved_as_resolved(analyzer)?) - } - (VarType::BuiltIn(s, _), VarType::BuiltIn(o, _)) => { - match (s.underlying(analyzer)?, o.underlying(analyzer)?) { - (Builtin::Array(l), Builtin::Array(r)) => Ok(l - .unresolved_as_resolved(analyzer)? - == r.unresolved_as_resolved(analyzer)?), - (Builtin::SizedArray(l_size, l), Builtin::SizedArray(r_size, r)) => Ok(l - .unresolved_as_resolved(analyzer)? - == r.unresolved_as_resolved(analyzer)? - && l_size == r_size), - (Builtin::Mapping(lk, lv), Builtin::Mapping(rk, rv)) => Ok(lk - .unresolved_as_resolved(analyzer)? - == rk.unresolved_as_resolved(analyzer)? - && lv.unresolved_as_resolved(analyzer)? - == rv.unresolved_as_resolved(analyzer)?), - (l, r) => Ok(l == r), - } - } - (VarType::Concrete(s), VarType::Concrete(o)) => Ok(s - .underlying(analyzer)? - .equivalent_ty(o.underlying(analyzer)?)), - _ => Ok(false), - } - } - - pub fn as_string(&self, analyzer: &impl GraphLike) -> Result { - match self { - VarType::User(ty_node, _) => ty_node.as_string(analyzer), - VarType::BuiltIn(bn, _) => match analyzer.node(*bn) { - Node::Builtin(bi) => bi.as_string(analyzer), - _ => unreachable!(), - }, - VarType::Concrete(c) => c.underlying(analyzer)?.as_builtin().as_string(analyzer), - } - } - - pub fn is_int(&self, analyzer: &impl GraphLike) -> Result { - match self { - VarType::BuiltIn(bn, _) => Ok(bn.underlying(analyzer)?.is_int()), - VarType::Concrete(c) => Ok(c.underlying(analyzer)?.is_int()), - _ => Ok(false), - } - } - - pub fn as_builtin(&self, analyzer: &impl GraphLike) -> Result { - match self { - VarType::BuiltIn(bn, _) => Ok(bn.underlying(analyzer)?.clone()), - VarType::Concrete(c) => Ok(c.underlying(analyzer)?.as_builtin()), - e => Err(GraphError::NodeConfusion(format!( - "Expected to be builtin castable but wasnt: {e:?}" - ))), - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub enum TypeNode { - Contract(ContractNode), - Struct(StructNode), - Enum(EnumNode), - Ty(TyNode), - Func(FunctionNode), - Unresolved(NodeIdx), -} - -impl TypeNode { - pub fn as_string(&self, analyzer: &impl GraphLike) -> Result { - match self { - TypeNode::Contract(n) => n.name(analyzer), - TypeNode::Struct(n) => n.name(analyzer), - TypeNode::Enum(n) => n.name(analyzer), - TypeNode::Ty(n) => n.name(analyzer), - TypeNode::Func(n) => Ok(format!("function {}", n.name(analyzer)?)), - TypeNode::Unresolved(n) => Ok(format!("UnresolvedType<{:?}>", analyzer.node(*n))), - } - } - - pub fn unresolved_as_resolved(&self, analyzer: &impl GraphLike) -> Result { - match self { - TypeNode::Unresolved(n) => match analyzer.node(*n) { - Node::Unresolved(ident) => Err(GraphError::NodeConfusion(format!( - "Expected the type \"{}\" to be resolved by now", - ident.name - ))), - Node::Contract(..) => Ok(TypeNode::Contract((*n).into())), - Node::Struct(..) => Ok(TypeNode::Struct((*n).into())), - Node::Enum(..) => Ok(TypeNode::Enum((*n).into())), - Node::Ty(..) => Ok(TypeNode::Ty((*n).into())), - Node::Function(..) => Ok(TypeNode::Func((*n).into())), - _ => Err(GraphError::NodeConfusion( - "Tried to type a non-typeable element".to_string(), - )), - }, - _ => Ok(*self), - } - } -} - -impl From for NodeIdx { - fn from(val: TypeNode) -> Self { - match val { - TypeNode::Contract(n) => n.into(), - TypeNode::Struct(n) => n.into(), - TypeNode::Enum(n) => n.into(), - TypeNode::Ty(n) => n.into(), - TypeNode::Func(n) => n.into(), - TypeNode::Unresolved(n) => n, - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct BuiltInNode(pub usize); - -impl BuiltInNode { - pub fn underlying<'a>(&self, analyzer: &'a impl GraphLike) -> Result<&'a Builtin, GraphError> { - match analyzer.node(*self) { - Node::Builtin(b) => Ok(b), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be Builtin but it was: {e:?}" - ))), - } - } - - pub fn num_size(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - let underlying = self.underlying(analyzer)?; - Ok(underlying.num_size()) - } - - pub fn implicitly_castable_to( - &self, - other: &Self, - analyzer: &impl GraphLike, - ) -> Result { - Ok(self - .underlying(analyzer)? - .implicitly_castable_to(other.underlying(analyzer)?)) - } - - pub fn max_size( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - let m = self.underlying(analyzer)?.max_size(); - Ok(analyzer.builtin_or_add(m).into()) - } - - pub fn dynamic_underlying_ty( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - match self.underlying(analyzer)? { - Builtin::Array(v_ty) | Builtin::SizedArray(_, v_ty) => { - v_ty.unresolved_as_resolved(analyzer) - } - Builtin::Mapping(_, v_ty) => v_ty.unresolved_as_resolved(analyzer), - Builtin::DynamicBytes | Builtin::Bytes(_) => Ok(VarType::BuiltIn( - analyzer.builtin_or_add(Builtin::Bytes(1)).into(), - Some(SolcRange::new( - Elem::from(Concrete::from(vec![0x00])), - Elem::from(Concrete::from(vec![0xff])), - vec![], - )), - )), - e => Err(GraphError::NodeConfusion(format!( - "Node type confusion: expected node to be Builtin::Array but it was: {e:?}" - ))), - } - } - - pub fn is_mapping(&self, analyzer: &impl GraphLike) -> Result { - Ok(matches!(self.underlying(analyzer)?, Builtin::Mapping(_, _))) - } - - pub fn is_sized_array(&self, analyzer: &impl GraphLike) -> Result { - Ok(matches!( - self.underlying(analyzer)?, - Builtin::SizedArray(_, _) - )) - } - - pub fn maybe_array_size(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - match self.underlying(analyzer)? { - Builtin::SizedArray(s, _) => Ok(Some(*s)), - Builtin::Bytes(s) => Ok(Some(U256::from(*s))), - _ => Ok(None), - } - } - - pub fn is_dyn(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.is_dyn()) - } - - pub fn is_indexable(&self, analyzer: &impl GraphLike) -> Result { - Ok(self.underlying(analyzer)?.is_indexable()) - } - - pub fn zero_range(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(self.underlying(analyzer)?.zero_range()) - } -} - -impl From for BuiltInNode { - fn from(idx: NodeIdx) -> Self { - BuiltInNode(idx.index()) - } -} - -impl From for NodeIdx { - fn from(val: BuiltInNode) -> Self { - val.0.into() - } -} - -#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub enum Builtin { - Address, - AddressPayable, - Payable, - Bool, - String, - Int(u16), - Uint(u16), - Bytes(u8), - Rational, - DynamicBytes, - Array(VarType), - SizedArray(U256, VarType), - Mapping(VarType, VarType), - Func(Vec, Vec), -} - -impl Builtin { - pub fn unresolved_as_resolved( - &self, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Result { - match self { - Builtin::Array(n) => Ok(Builtin::Array(n.unresolved_as_resolved(analyzer)?)), - Builtin::SizedArray(s, n) => { - Ok(Builtin::SizedArray(*s, n.unresolved_as_resolved(analyzer)?)) - } - Builtin::Mapping(k, v) => Ok(Builtin::Mapping( - k.unresolved_as_resolved(analyzer)?, - v.unresolved_as_resolved(analyzer)?, - )), - _ => Ok(self.clone()), - } - } - - pub fn possible_builtins_from_ty_inf(&self) -> Vec { - let mut builtins = vec![]; - match self { - Builtin::Uint(size) => { - let mut s = *size; - while s > 0 { - builtins.push(Builtin::Uint(s)); - s -= 8; - } - } - Builtin::Int(size) => { - let mut s = *size; - while s > 0 { - builtins.push(Builtin::Int(s)); - s -= 8; - } - } - Builtin::Bytes(size) => { - let mut s = *size; - while s > 0 { - builtins.push(Builtin::Bytes(s)); - s -= 1; - } - } - _ => {} - } - builtins - } - - pub fn zero_range(&self) -> Option { - match self { - Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { - let zero = Concrete::Address(Address::from_slice(&[0x00; 20])); - Some(SolcRange::new(zero.clone().into(), zero.into(), vec![])) - } - Builtin::Bool => SolcRange::from(Concrete::from(false)), - Builtin::String => SolcRange::from(Concrete::from("".to_string())), - Builtin::Int(_) => SolcRange::from(Concrete::from(I256::from(0))), - Builtin::Uint(_) => SolcRange::from(Concrete::from(U256::from(0))), - Builtin::Bytes(s) => SolcRange::from(Concrete::Bytes(*s, H256::zero())), - Builtin::DynamicBytes | Builtin::Array(_) | Builtin::Mapping(_, _) => { - let zero = Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::zero())), - val: Default::default(), - loc: Loc::Implicit, - })); - Some(SolcRange::new(zero.clone(), zero, vec![])) - } - Builtin::SizedArray(s, _) => { - let sized = Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(*s)), - val: Default::default(), - loc: Loc::Implicit, - })); - Some(SolcRange::new(sized.clone(), sized, vec![])) - } - Builtin::Rational | Builtin::Func(_, _) => None, - } - } - pub fn try_from_ty( - ty: Type, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ) -> Option { - use Type::*; - match ty { - Address => Some(Builtin::Address), - AddressPayable => Some(Builtin::AddressPayable), - Payable => Some(Builtin::Payable), - Bool => Some(Builtin::Bool), - String => Some(Builtin::String), - Int(size) => Some(Builtin::Int(size)), - Uint(size) => Some(Builtin::Uint(size)), - Bytes(size) => Some(Builtin::Bytes(size)), - Rational => Some(Builtin::Rational), - DynamicBytes => Some(Builtin::DynamicBytes), - Mapping { key, value, .. } => { - let key_idx = analyzer.parse_expr(&key, None); - let val_idx = analyzer.parse_expr(&value, None); - let key_var_ty = VarType::try_from_idx(analyzer, key_idx)?; - let val_var_ty = VarType::try_from_idx(analyzer, val_idx)?; - Some(Builtin::Mapping(key_var_ty, val_var_ty)) - } - Function { - params, - attributes: _, - returns, - } => { - let inputs = params - .iter() - .filter_map(|(_, param)| param.as_ref()) - .map(|param| analyzer.parse_expr(¶m.ty, None)) - .collect::>(); - let inputs = inputs - .iter() - .map(|idx| VarType::try_from_idx(analyzer, *idx).expect("Couldn't parse param")) - .collect::>(); - let mut outputs = vec![]; - if let Some((params, _attrs)) = returns { - let tmp_outputs = params - .iter() - .filter_map(|(_, param)| param.as_ref()) - .map(|param| analyzer.parse_expr(¶m.ty, None)) - .collect::>(); - outputs = tmp_outputs - .iter() - .map(|idx| { - VarType::try_from_idx(analyzer, *idx) - .expect("Couldn't parse output param") - }) - .collect::>(); - } - Some(Builtin::Func(inputs, outputs)) - } - } - } - - pub fn is_dyn(&self) -> bool { - matches!( - self, - Builtin::DynamicBytes - | Builtin::Array(..) - | Builtin::SizedArray(..) - | Builtin::Mapping(..) - | Builtin::String - ) - } - - pub fn requires_input(&self) -> bool { - matches!( - self, - Builtin::Array(..) | Builtin::SizedArray(..) | Builtin::Mapping(..) - ) - } - - pub fn num_size(&self) -> Option { - match self { - Builtin::Uint(size) => Some(*size), - Builtin::Int(size) => Some(*size), - _ => None, - } - } - - pub fn is_int(&self) -> bool { - matches!(self, Builtin::Int(_)) - } - - pub fn is_indexable(&self) -> bool { - matches!( - self, - Builtin::DynamicBytes - | Builtin::Array(..) - | Builtin::SizedArray(..) - | Builtin::Mapping(..) - | Builtin::Bytes(..) - | Builtin::String - ) - } - - pub fn implicitly_castable_to(&self, other: &Self) -> bool { - use Builtin::*; - match (self, other) { - (Address, Address) => true, - (Address, AddressPayable) => true, - (Address, Payable) => true, - (AddressPayable, Address) => true, - (AddressPayable, Payable) => true, - (AddressPayable, AddressPayable) => true, - (Payable, Address) => true, - (Payable, AddressPayable) => true, - (Payable, Payable) => true, - (Bool, Bool) => true, - (Rational, Rational) => true, - (DynamicBytes, DynamicBytes) => true, - (String, String) => true, - (Uint(from_size), Uint(to_size)) => from_size <= to_size, - (Int(from_size), Int(to_size)) => from_size <= to_size, - (Bytes(from_size), Bytes(to_size)) => from_size <= to_size, - _ => false, - } - } - - pub fn max_size(&self) -> Self { - use Builtin::*; - match self { - Uint(_) => Uint(256), - Int(_from_size) => Uint(256), - Bytes(_from_size) => Uint(32), - _ => self.clone(), - } - } - - pub fn as_string(&self, analyzer: &impl GraphLike) -> Result { - use Builtin::*; - match self { - Address => Ok("address".to_string()), - AddressPayable => Ok("address".to_string()), - Payable => Ok("address".to_string()), - Bool => Ok("bool".to_string()), - String => Ok("string".to_string()), - Int(size) => Ok(format!("int{size}")), - Uint(size) => Ok(format!("uint{size}")), - Bytes(size) => Ok(format!("bytes{size}")), - Rational => Ok("rational".to_string()), - DynamicBytes => Ok("bytes".to_string()), - Array(v_ty) => Ok(format!( - "{}[]", - v_ty.unresolved_as_resolved(analyzer)?.as_string(analyzer)? - )), - SizedArray(s, v_ty) => Ok(format!( - "{}[{}]", - v_ty.unresolved_as_resolved(analyzer)?.as_string(analyzer)?, - s - )), - Mapping(key_ty, v_ty) => Ok(format!( - "mapping ({} => {})", - key_ty - .unresolved_as_resolved(analyzer)? - .as_string(analyzer)?, - v_ty.unresolved_as_resolved(analyzer)?.as_string(analyzer)? - )), - Func(inputs, outputs) => Ok(format!( - "function({}) returns ({})", - inputs - .iter() - .map(|input| input.as_string(analyzer).unwrap()) - .collect::>() - .join(", "), - outputs - .iter() - .map(|output| output.as_string(analyzer).unwrap()) - .collect::>() - .join(", ") - )), - } - } -} diff --git a/shared/src/range/elem_ty.rs b/shared/src/range/elem_ty.rs deleted file mode 100644 index 8cd49f88..00000000 --- a/shared/src/range/elem_ty.rs +++ /dev/null @@ -1,2258 +0,0 @@ -use crate::analyzer::GraphError; -use crate::context::ContextVarNode; -use crate::nodes::{TypeNode, VarType}; -use crate::range::range_ops::*; -use crate::range::Range; -use crate::range::{elem::RangeOp, *}; -use crate::{Concrete, NodeIdx}; -use solang_parser::pt::Loc; -use std::collections::BTreeMap; -use std::ops::*; - -/// A dynamic range element value -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct Dynamic { - /// Index of the node that is referenced - pub idx: NodeIdx, - pub minimized: Option>, - pub maximized: Option>, -} - -impl Dynamic { - pub fn new(idx: NodeIdx) -> Self { - Self { - idx, - minimized: None, - maximized: None, - } - } -} - -impl RangeElem for Dynamic { - fn range_eq(&self, _other: &Self) -> bool { - false - } - - fn range_ord(&self, _other: &Self) -> Option { - todo!() - } - - fn dependent_on(&self) -> Vec { - vec![ContextVarNode::from(self.idx)] - } - - fn update_deps(&mut self, mapping: &BTreeMap) { - if let Some(new) = mapping.get(&ContextVarNode::from(self.idx)) { - self.idx = NodeIdx::from(new.0); - } - } - - fn filter_recursion(&mut self, _: NodeIdx, _: NodeIdx) {} - - fn maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(MinMaxed::Maximized(cached)) = self.maximized.clone() { - return Ok(*cached); - } - - let cvar = ContextVarNode::from(self.idx).underlying(analyzer)?; - match &cvar.ty { - VarType::User(TypeNode::Contract(_), maybe_range) - | VarType::User(TypeNode::Enum(_), maybe_range) - | VarType::User(TypeNode::Ty(_), maybe_range) - | VarType::BuiltIn(_, maybe_range) => { - if let Some(range) = maybe_range { - range.evaled_range_max(analyzer) - } else { - Ok(Elem::Dynamic(self.clone())) - } - } - VarType::Concrete(concrete_node) => Ok(Elem::Concrete(RangeConcrete { - val: concrete_node.underlying(analyzer)?.clone(), - loc: cvar.loc.unwrap_or(Loc::Implicit), - })), - _e => Ok(Elem::Dynamic(self.clone())), - } - } - - fn minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(MinMaxed::Minimized(cached)) = self.minimized.clone() { - return Ok(*cached); - } - - let cvar = ContextVarNode::from(self.idx).underlying(analyzer)?; - match &cvar.ty { - VarType::User(TypeNode::Contract(_), maybe_range) - | VarType::User(TypeNode::Enum(_), maybe_range) - | VarType::User(TypeNode::Ty(_), maybe_range) - | VarType::BuiltIn(_, maybe_range) => { - if let Some(range) = maybe_range { - range.evaled_range_min(analyzer) - } else { - Ok(Elem::Dynamic(self.clone())) - } - } - VarType::Concrete(concrete_node) => Ok(Elem::Concrete(RangeConcrete { - val: concrete_node.underlying(analyzer)?.clone(), - loc: cvar.loc.unwrap_or(Loc::Implicit), - })), - _e => Ok(Elem::Dynamic(self.clone())), - } - } - - fn simplify_maximize(&self, _analyzer: &impl GraphLike) -> Result, GraphError> { - // let cvar = ContextVarNode::from(self.idx); - // if cvar.is_symbolic(analyzer)? { - Ok(Elem::Dynamic(self.clone())) - // } - // if !cvar.is_tmp(analyzer)? { - // return Ok(Elem::Dynamic(self.clone())) - // } - // let cvar = cvar.underlying(analyzer)?; - // match &cvar.ty { - // VarType::User(TypeNode::Contract(_), maybe_range) - // | VarType::User(TypeNode::Enum(_), maybe_range) - // | VarType::User(TypeNode::Ty(_), maybe_range) - // | VarType::BuiltIn(_, maybe_range) => { - // if let Some(range) = maybe_range { - // range.simplified_range_max(analyzer) - // } else { - // Ok(Elem::Dynamic(self.clone())) - // } - // } - // VarType::Concrete(concrete_node) => Ok(Elem::Concrete(RangeConcrete { - // val: concrete_node.underlying(analyzer)?.clone(), - // loc: cvar.loc.unwrap_or(Loc::Implicit), - // })), - // _e => Ok(Elem::Dynamic(self.clone())), - // } - } - fn simplify_minimize(&self, _analyzer: &impl GraphLike) -> Result, GraphError> { - // let cvar = ContextVarNode::from(self.idx); - // if cvar.is_symbolic(analyzer)? { - Ok(Elem::Dynamic(self.clone())) - // } - // if !cvar.is_tmp(analyzer)? { - // return Ok(Elem::Dynamic(self.clone())) - // } - // let cvar = cvar.underlying(analyzer)?; - - // match &cvar.ty { - // VarType::User(TypeNode::Contract(_), maybe_range) - // | VarType::User(TypeNode::Enum(_), maybe_range) - // | VarType::User(TypeNode::Ty(_), maybe_range) - // | VarType::BuiltIn(_, maybe_range) => { - // if let Some(range) = maybe_range { - // range.simplified_range_min(analyzer) - // } else { - // Ok(Elem::Dynamic(self.clone())) - // } - // } - // VarType::Concrete(concrete_node) => Ok(Elem::Concrete(RangeConcrete { - // val: concrete_node.underlying(analyzer)?.clone(), - // loc: cvar.loc.unwrap_or(Loc::Implicit), - // })), - // _e => Ok(Elem::Dynamic(self.clone())), - // } - } - - fn cache_maximize(&mut self, g: &impl GraphLike) -> Result<(), GraphError> { - if self.maximized.is_none() { - self.maximized = Some(MinMaxed::Maximized(Box::new(self.maximize(g)?))); - } - Ok(()) - } - - fn cache_minimize(&mut self, g: &impl GraphLike) -> Result<(), GraphError> { - if self.minimized.is_none() { - self.minimized = Some(MinMaxed::Minimized(Box::new(self.minimize(g)?))); - } - Ok(()) - } - - fn uncache(&mut self) { - self.minimized = None; - self.maximized = None; - } -} - -/// A concrete value for a range element -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct RangeDyn { - pub minimized: Option>, - pub maximized: Option>, - pub len: Elem, - pub val: BTreeMap, Elem>, - pub loc: Loc, -} -impl RangeDyn { - pub fn set_len(&mut self, new_len: Elem) { - self.len = new_len; - } - - pub fn contains_node(&self, node_idx: NodeIdx) -> bool { - self.len.contains_node(node_idx) - // || self.val.iter().any(|(k, v)| k.contains_node(node_idx) || v.contains_node(node_idx)) - } -} - -impl RangeElem for RangeDyn { - fn range_eq(&self, _other: &Self) -> bool { - false - } - - fn range_ord(&self, _other: &Self) -> Option { - todo!() - } - - fn dependent_on(&self) -> Vec { - let mut deps: Vec = self.len.dependent_on(); - deps.extend( - self.val - .iter() - .flat_map(|(_, val)| val.dependent_on()) - .collect::>(), - ); - deps - } - - fn update_deps(&mut self, mapping: &BTreeMap) { - self.len.update_deps(mapping); - self.val - .iter_mut() - .for_each(|(_, val)| val.update_deps(mapping)); - } - - fn filter_recursion(&mut self, node_idx: NodeIdx, new_idx: NodeIdx) { - self.len.filter_recursion(node_idx, new_idx); - self.val = self - .val - .clone() - .into_iter() - .map(|(mut k, mut v)| { - k.filter_recursion(node_idx, new_idx); - v.filter_recursion(node_idx, new_idx); - (k, v) - }) - .collect(); - } - - fn maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(MinMaxed::Maximized(cached)) = self.maximized.clone() { - return Ok(*cached); - } - - Ok(Elem::ConcreteDyn(Box::new(Self { - minimized: None, - maximized: None, - len: self.len.maximize(analyzer)?, - val: { - let mut map = BTreeMap::default(); - for (idx, val) in self.val.clone().into_iter() { - map.insert(idx, val.maximize(analyzer)?); - } - map - }, - loc: self.loc, - }))) - } - - fn minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(MinMaxed::Minimized(cached)) = self.minimized.clone() { - return Ok(*cached); - } - - Ok(Elem::ConcreteDyn(Box::new(Self { - minimized: None, - maximized: None, - len: self.len.minimize(analyzer)?, - val: { - let mut map = BTreeMap::default(); - for (idx, val) in self.val.clone().into_iter() { - map.insert(idx, val.minimize(analyzer)?); - } - map - }, - loc: self.loc, - }))) - } - - fn simplify_maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(Elem::ConcreteDyn(Box::new(Self { - minimized: None, - maximized: None, - len: self.len.simplify_maximize(analyzer)?, - val: { - let mut map = BTreeMap::default(); - for (idx, val) in self.val.clone().into_iter() { - map.insert(idx, val.simplify_maximize(analyzer)?); - } - map - }, - loc: self.loc, - }))) - } - fn simplify_minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(Elem::ConcreteDyn(Box::new(Self { - minimized: None, - maximized: None, - len: self.len.simplify_minimize(analyzer)?, - val: { - let mut map = BTreeMap::default(); - for (idx, val) in self.val.clone().into_iter() { - map.insert(idx, val.simplify_minimize(analyzer)?); - } - map - }, - loc: self.loc, - }))) - } - - fn cache_maximize(&mut self, g: &impl GraphLike) -> Result<(), GraphError> { - if self.maximized.is_none() { - self.maximized = Some(MinMaxed::Maximized(Box::new(self.maximize(g)?))); - } - Ok(()) - } - - fn cache_minimize(&mut self, g: &impl GraphLike) -> Result<(), GraphError> { - if self.minimized.is_none() { - self.minimized = Some(MinMaxed::Minimized(Box::new(self.minimize(g)?))); - } - Ok(()) - } - - fn uncache(&mut self) { - self.minimized = None; - self.maximized = None; - } -} - -/// A concrete value for a range element -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct RangeConcrete { - pub val: T, - pub loc: Loc, -} - -impl From for RangeConcrete { - fn from(c: Concrete) -> Self { - Self { - val: c, - loc: Loc::Implicit, - } - } -} - -impl RangeElem for RangeConcrete { - // fn simplify(&self, _analyzer: &impl GraphLike) -> Elem { - // Elem::Concrete(self.clone()) - // } - - fn range_eq(&self, other: &Self) -> bool { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(self_val), Some(other_val)) => self_val == other_val, - _ => match (&self.val, &other.val) { - (Concrete::Int(_, s), Concrete::Int(_, o)) => s == o, - (Concrete::DynBytes(s), Concrete::DynBytes(o)) => s == o, - (Concrete::String(s), Concrete::String(o)) => s == o, - (Concrete::DynBytes(s), Concrete::String(o)) => s == o.as_bytes(), - (Concrete::String(s), Concrete::DynBytes(o)) => s.as_bytes() == o, - (Concrete::Array(a), Concrete::Array(b)) => { - if a.len() == b.len() { - a.iter().zip(b.iter()).all(|(a, b)| { - let a = RangeConcrete { - val: a.clone(), - loc: self.loc, - }; - - let b = RangeConcrete { - val: b.clone(), - loc: other.loc, - }; - - a.range_eq(&b) - }) - } else { - false - } - } - _ => false, - }, - } - } - - fn range_ord(&self, other: &Self) -> Option { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(self_val), Some(other_val)) => Some(self_val.cmp(&other_val)), - (Some(_), _) => { - match other.val { - Concrete::Int(_, _) => { - // if we couldnt convert an int to uint, its negative - // so self must be > other - Some(std::cmp::Ordering::Greater) - } - _ => None, - } - } - (_, Some(_)) => { - match self.val { - Concrete::Int(_, _) => { - // if we couldnt convert an int to uint, its negative - // so self must be < other - Some(std::cmp::Ordering::Less) - } - _ => None, - } - } - _ => { - match (&self.val, &other.val) { - // two negatives - (Concrete::Int(_, s), Concrete::Int(_, o)) => Some(s.cmp(o)), - (Concrete::DynBytes(b0), Concrete::DynBytes(b1)) => Some(b0.cmp(b1)), - _ => None, - } - } - } - } - - fn dependent_on(&self) -> Vec { - vec![] - } - fn update_deps(&mut self, _mapping: &BTreeMap) {} - - fn filter_recursion(&mut self, _: NodeIdx, _: NodeIdx) {} - - fn maximize(&self, _analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(Elem::Concrete(self.clone())) - } - fn minimize(&self, _analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(Elem::Concrete(self.clone())) - } - - fn simplify_maximize(&self, _analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(Elem::Concrete(self.clone())) - } - fn simplify_minimize(&self, _analyzer: &impl GraphLike) -> Result, GraphError> { - Ok(Elem::Concrete(self.clone())) - } - - fn cache_maximize(&mut self, _g: &impl GraphLike) -> Result<(), GraphError> { - Ok(()) - } - - fn cache_minimize(&mut self, _g: &impl GraphLike) -> Result<(), GraphError> { - Ok(()) - } - fn uncache(&mut self) {} -} - -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub enum MinMaxed { - Minimized(Box>), - Maximized(Box>), -} - -/// A range expression composed of other range [`Elem`] -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct RangeExpr { - pub maximized: Option>, - pub minimized: Option>, - pub lhs: Box>, - pub op: RangeOp, - pub rhs: Box>, -} - -impl RangeExpr { - /// Creates a new range expression given a left hand side range [`Elem`], a [`RangeOp`], and a a right hand side range [`Elem`]. - pub fn new(lhs: Elem, op: RangeOp, rhs: Elem) -> RangeExpr { - RangeExpr { - maximized: None, - minimized: None, - lhs: Box::new(lhs), - op, - rhs: Box::new(rhs), - } - } - - pub fn contains_node(&self, node_idx: NodeIdx) -> bool { - self.lhs.contains_node(node_idx) || self.rhs.contains_node(node_idx) - } -} - -impl RangeElem for RangeExpr { - fn range_eq(&self, _other: &Self) -> bool { - false - } - - fn range_ord(&self, _other: &Self) -> Option { - todo!() - } - - fn dependent_on(&self) -> Vec { - let mut deps = self.lhs.dependent_on(); - deps.extend(self.rhs.dependent_on()); - deps - } - - fn update_deps(&mut self, mapping: &BTreeMap) { - self.lhs.update_deps(mapping); - self.rhs.update_deps(mapping); - } - - fn filter_recursion(&mut self, node_idx: NodeIdx, new_idx: NodeIdx) { - self.lhs.filter_recursion(node_idx, new_idx); - self.rhs.filter_recursion(node_idx, new_idx); - } - - fn maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(MinMaxed::Maximized(cached)) = self.maximized.clone() { - Ok(*cached) - } else { - self.exec_op(true, analyzer) - } - } - fn minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - if let Some(MinMaxed::Minimized(cached)) = self.minimized.clone() { - Ok(*cached) - } else { - self.exec_op(false, analyzer) - } - } - - fn simplify_maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - self.simplify_exec_op(true, analyzer) - } - fn simplify_minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - self.simplify_exec_op(false, analyzer) - } - - fn cache_maximize(&mut self, g: &impl GraphLike) -> Result<(), GraphError> { - if self.maximized.is_none() { - self.cache_exec_op(true, g)?; - } - Ok(()) - } - - fn cache_minimize(&mut self, g: &impl GraphLike) -> Result<(), GraphError> { - if self.minimized.is_none() { - self.cache_exec_op(false, g)?; - } - Ok(()) - } - - fn uncache(&mut self) { - self.uncache_exec(); - } -} - -/// A core range element. -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub enum Elem { - /// A range element that is a reference to another node - Dynamic(Dynamic), - /// A concrete range element of type `T`. e.g.: some number like `10` - ConcreteDyn(Box>), - /// A concrete range element of type `T`. e.g.: some number like `10` - Concrete(RangeConcrete), - /// A range element that is an expression composed of other range elements - Expr(RangeExpr), - /// A null range element useful in range expressions that dont have a rhs - Null, -} - -impl std::fmt::Display for Elem { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Elem::Dynamic(Dynamic { idx, .. }) => write!(f, "idx_{}", idx.index()), - Elem::ConcreteDyn(..) => write!(f, "range_elem"), - Elem::Concrete(RangeConcrete { val, .. }) => { - write!(f, "{}", val.as_string()) - } - Elem::Expr(RangeExpr { lhs, op, rhs, .. }) => { - write!(f, "({} {} {})", op.to_string(), lhs, rhs) - } - _ => write!(f, ""), - } - } -} - -impl From for Elem { - fn from(c: Concrete) -> Self { - Elem::Concrete(RangeConcrete { - val: c, - loc: Loc::Implicit, - }) - } -} - -impl From for Elem { - fn from(c: ContextVarNode) -> Self { - Elem::Dynamic(Dynamic::new(c.into())) - } -} - -impl From for Elem { - fn from(idx: NodeIdx) -> Self { - Elem::Dynamic(Dynamic::new(idx)) - } -} - -impl Elem { - pub fn contains_node(&self, node_idx: NodeIdx) -> bool { - match self { - Self::Dynamic(d) => d.idx == node_idx, - Self::Concrete(_) => false, - Self::Expr(expr) => expr.contains_node(node_idx), - Self::ConcreteDyn(d) => d.contains_node(node_idx), - Self::Null => false, - } - } - - pub fn dyn_map(&self) -> Option<&BTreeMap> { - match self { - Self::ConcreteDyn(dyn_range) => Some(&dyn_range.val), - _ => None, - } - } - - pub fn dyn_map_mut(&mut self) -> Option<&mut BTreeMap> { - match self { - Self::ConcreteDyn(ref mut dyn_range) => Some(&mut dyn_range.val), - _ => None, - } - } - - /// Creates a new range element that is a cast from one type to another - pub fn cast(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Cast, other); - Elem::Expr(expr) - } - - pub fn concat(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Concat, other); - Elem::Expr(expr) - } - - /// Creates a new range element that is the minimum of two range elements - pub fn min(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Min, other); - Elem::Expr(expr) - } - - /// Creates a new range element that is the maximum of two range elements - pub fn max(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Max, other); - Elem::Expr(expr) - } - - /// Creates a new range element that is a boolean of equality of two range elements - pub fn eq(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Eq, other); - Elem::Expr(expr) - } - - /// Creates a new range element that is a boolean of inequality of two range elements - pub fn neq(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Neq, other); - Elem::Expr(expr) - } - - /// Creates a new range element that is one range element to the power of another - pub fn pow(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Exp, other); - Elem::Expr(expr) - } -} - -impl From for Elem { - fn from(dy: Dynamic) -> Self { - Elem::Dynamic(dy) - } -} - -impl From> for Elem { - fn from(c: RangeConcrete) -> Self { - Elem::Concrete(c) - } -} - -impl Elem { - pub fn node_idx(&self) -> Option { - match self { - Self::Dynamic(Dynamic { idx, .. }) => Some(*idx), - _ => None, - } - } - - pub fn concrete(&self) -> Option { - match self { - Self::Concrete(RangeConcrete { val: c, .. }) => Some(c.clone()), - _ => None, - } - } - - pub fn is_negative( - &self, - maximize: bool, - analyzer: &impl GraphLike, - ) -> Result { - let res = match self { - Elem::Concrete(RangeConcrete { - val: Concrete::Int(_, val), - .. - }) if val < &I256::zero() => true, - Elem::Dynamic(dy) => { - if maximize { - dy.maximize(analyzer)?.is_negative(maximize, analyzer)? - } else { - dy.minimize(analyzer)?.is_negative(maximize, analyzer)? - } - } - Elem::Expr(expr) => { - if maximize { - expr.maximize(analyzer)?.is_negative(maximize, analyzer)? - } else { - expr.minimize(analyzer)?.is_negative(maximize, analyzer)? - } - } - _ => false, - }; - Ok(res) - } - - pub fn pre_evaled_is_negative(&self) -> bool { - matches!(self, Elem::Concrete(RangeConcrete { val: Concrete::Int(_, val), ..}) if val < &I256::zero()) - } - - pub fn maybe_concrete(&self) -> Option> { - match self { - Elem::Concrete(a) => Some(a.clone()), - _ => None, - } - } - - pub fn maybe_range_dyn(&self) -> Option> { - match self { - Elem::ConcreteDyn(a) => Some(*a.clone()), - _ => None, - } - } -} - -impl RangeElem for Elem { - fn range_eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::Concrete(a), Self::Concrete(b)) => a.range_eq(b), - _ => false, - } - } - - fn range_ord(&self, other: &Self) -> Option { - match (self, other) { - (Self::Concrete(a), Self::Concrete(b)) => { - let ord = a.range_ord(b); - if ord.is_none() { - println!("couldnt compare: {a:?} {b:?}"); - } - - ord - } - _ => None, - } - } - - fn dependent_on(&self) -> Vec { - match self { - Self::Dynamic(d) => d.dependent_on(), - Self::Concrete(_) => vec![], - Self::Expr(expr) => expr.dependent_on(), - Self::ConcreteDyn(d) => d.dependent_on(), - Self::Null => vec![], - } - } - - fn update_deps(&mut self, mapping: &BTreeMap) { - match self { - Self::Dynamic(d) => d.update_deps(mapping), - Self::Concrete(_) => {} - Self::Expr(expr) => expr.update_deps(mapping), - Self::ConcreteDyn(d) => d.update_deps(mapping), - Self::Null => {} - } - } - - fn filter_recursion(&mut self, node_idx: NodeIdx, new_idx: NodeIdx) { - match self { - Self::Dynamic(ref mut d) => { - if d.idx == node_idx { - d.idx = new_idx - } - } - Self::Concrete(_) => {} - Self::Expr(expr) => expr.filter_recursion(node_idx, new_idx), - Self::ConcreteDyn(d) => d.filter_recursion(node_idx, new_idx), - Self::Null => {} - } - } - - fn maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - use Elem::*; - let res = match self { - Dynamic(dy) => dy.maximize(analyzer)?, - Concrete(inner) => inner.maximize(analyzer)?, - ConcreteDyn(inner) => inner.maximize(analyzer)?, - Expr(expr) => expr.maximize(analyzer)?, - Null => Elem::Null, - }; - Ok(res) - } - - fn minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - use Elem::*; - let res = match self { - Dynamic(dy) => dy.minimize(analyzer)?, - Concrete(inner) => inner.minimize(analyzer)?, - ConcreteDyn(inner) => inner.minimize(analyzer)?, - Expr(expr) => expr.minimize(analyzer)?, - Null => Elem::Null, - }; - Ok(res) - } - - fn simplify_maximize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - use Elem::*; - let res = match self { - Dynamic(dy) => dy.simplify_maximize(analyzer)?, - Concrete(inner) => inner.simplify_maximize(analyzer)?, - ConcreteDyn(inner) => inner.simplify_maximize(analyzer)?, - Expr(expr) => expr.simplify_maximize(analyzer)?, - Null => Elem::Null, - }; - Ok(res) - } - - fn simplify_minimize(&self, analyzer: &impl GraphLike) -> Result, GraphError> { - use Elem::*; - let res = match self { - Dynamic(dy) => dy.simplify_minimize(analyzer)?, - Concrete(inner) => inner.simplify_minimize(analyzer)?, - ConcreteDyn(inner) => inner.simplify_minimize(analyzer)?, - Expr(expr) => expr.simplify_minimize(analyzer)?, - Null => Elem::Null, - }; - Ok(res) - } - - fn cache_maximize(&mut self, analyzer: &impl GraphLike) -> Result<(), GraphError> { - use Elem::*; - match self { - Dynamic(dy) => dy.cache_maximize(analyzer), - Concrete(inner) => inner.cache_maximize(analyzer), - ConcreteDyn(inner) => inner.cache_maximize(analyzer), - Expr(expr) => expr.cache_maximize(analyzer), - Null => Ok(()), - } - } - - fn cache_minimize(&mut self, analyzer: &impl GraphLike) -> Result<(), GraphError> { - use Elem::*; - match self { - Dynamic(dy) => dy.cache_minimize(analyzer), - Concrete(inner) => inner.cache_minimize(analyzer), - ConcreteDyn(inner) => inner.cache_minimize(analyzer), - Expr(expr) => expr.cache_minimize(analyzer), - Null => Ok(()), - } - } - fn uncache(&mut self) { - use Elem::*; - match self { - Dynamic(dy) => dy.uncache(), - Concrete(inner) => inner.uncache(), - ConcreteDyn(inner) => inner.uncache(), - Expr(expr) => expr.uncache(), - Null => {} - } - } -} - -impl Add for Elem { - type Output = Self; - - fn add(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Add(false), other); - Self::Expr(expr) - } -} - -impl Sub for Elem { - type Output = Self; - - fn sub(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Sub(false), other); - Self::Expr(expr) - } -} - -impl Mul for Elem { - type Output = Self; - - fn mul(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Mul(false), other); - Self::Expr(expr) - } -} - -impl Div for Elem { - type Output = Self; - - fn div(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Div(false), other); - Self::Expr(expr) - } -} - -impl Shl for Elem { - type Output = Self; - - fn shl(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Shl, other); - Self::Expr(expr) - } -} - -impl Shr for Elem { - type Output = Self; - - fn shr(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Shr, other); - Self::Expr(expr) - } -} - -impl Rem for Elem { - type Output = Self; - - fn rem(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Mod, other); - Self::Expr(expr) - } -} - -impl BitAnd for Elem { - type Output = Self; - - fn bitand(self, other: Self) -> Self::Output { - let expr = RangeExpr::new(self, RangeOp::BitAnd, other); - Self::Expr(expr) - } -} - -impl BitOr for Elem { - type Output = Self; - - fn bitor(self, other: Self) -> Self::Output { - let expr = RangeExpr::new(self, RangeOp::BitOr, other); - Self::Expr(expr) - } -} - -impl BitXor for Elem { - type Output = Self; - - fn bitxor(self, other: Self) -> Self::Output { - let expr = RangeExpr::new(self, RangeOp::BitXor, other); - Self::Expr(expr) - } -} - -impl Elem { - pub fn wrapping_add(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Add(true), other); - Self::Expr(expr) - } - pub fn wrapping_sub(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Sub(true), other); - Self::Expr(expr) - } - pub fn wrapping_mul(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Mul(true), other); - Self::Expr(expr) - } - pub fn wrapping_div(self, other: Elem) -> Self { - let expr = RangeExpr::new(self, RangeOp::Div(true), other); - Self::Expr(expr) - } - - /// Creates a logical AND of two range elements - pub fn and(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::And, other); - Self::Expr(expr) - } - - /// Creates a logical OR of two range elements - pub fn or(self, other: Self) -> Self { - let expr = RangeExpr::new(self, RangeOp::Or, other); - Self::Expr(expr) - } - - pub fn maybe_elem_min(&self) -> Option { - match self { - Elem::Concrete(RangeConcrete { val, .. }) => Some(Elem::from(Concrete::min(val)?)), - _ => None, - } - } - - pub fn maybe_elem_max(&self) -> Option { - match self { - Elem::Concrete(RangeConcrete { val, .. }) => Some(Elem::from(Concrete::max(val)?)), - _ => None, - } - } -} - -/// For execution of operations to be performed on range expressions -pub trait ExecOp { - /// Attempts to execute ops by evaluating expressions and applying the op for the left-hand-side - /// and right-hand-side - fn exec_op(&self, maximize: bool, analyzer: &impl GraphLike) -> Result, GraphError> { - self.exec(self.spread(analyzer)?, maximize) - } - - fn exec( - &self, - parts: (Elem, Elem, Elem, Elem), - maximize: bool, - ) -> Result, GraphError>; - /// Cache execution - fn cache_exec_op( - &mut self, - maximize: bool, - analyzer: &impl GraphLike, - ) -> Result<(), GraphError>; - - fn spread( - &self, - analyzer: &impl GraphLike, - ) -> Result<(Elem, Elem, Elem, Elem), GraphError>; - - fn simplify_spread( - &self, - analyzer: &impl GraphLike, - ) -> Result<(Elem, Elem, Elem, Elem), GraphError>; - - fn uncache_exec(&mut self); - - fn simplify_exec_op( - &self, - maximize: bool, - analyzer: &impl GraphLike, - ) -> Result, GraphError>; - - /// Attempts to simplify an expression (i.e. just apply constant folding) - fn simplify_exec( - &self, - parts: (Elem, Elem, Elem, Elem), - maximize: bool, - ) -> Result, GraphError> { - self.exec(parts, maximize) - } -} - -impl ExecOp for RangeExpr { - fn cache_exec_op( - &mut self, - maximize: bool, - analyzer: &impl GraphLike, - ) -> Result<(), GraphError> { - self.lhs.cache_minimize(analyzer)?; - self.lhs.cache_maximize(analyzer)?; - self.rhs.cache_minimize(analyzer)?; - self.rhs.cache_maximize(analyzer)?; - let res = self.exec_op(maximize, analyzer)?; - if maximize { - self.maximized = Some(MinMaxed::Maximized(Box::new(res))); - } else { - self.minimized = Some(MinMaxed::Minimized(Box::new(res))); - } - Ok(()) - } - - fn uncache_exec(&mut self) { - self.lhs.uncache(); - self.rhs.uncache(); - } - - fn simplify_exec_op( - &self, - maximize: bool, - analyzer: &impl GraphLike, - ) -> Result, GraphError> { - let parts = self.simplify_spread(analyzer)?; - self.exec(parts, maximize) - } - - fn spread( - &self, - analyzer: &impl GraphLike, - ) -> Result< - ( - Elem, - Elem, - Elem, - Elem, - ), - GraphError, - > { - let lhs_min = self.lhs.minimize(analyzer)?; - let lhs_max = self.lhs.maximize(analyzer)?; - let rhs_min = self.rhs.minimize(analyzer)?; - let rhs_max = self.rhs.maximize(analyzer)?; - Ok((lhs_min, lhs_max, rhs_min, rhs_max)) - } - - fn simplify_spread( - &self, - analyzer: &impl GraphLike, - ) -> Result< - ( - Elem, - Elem, - Elem, - Elem, - ), - GraphError, - > { - let lhs_min = self.lhs.simplify_minimize(analyzer)?; - let lhs_max = self.lhs.simplify_maximize(analyzer)?; - let rhs_min = self.rhs.simplify_minimize(analyzer)?; - let rhs_max = self.rhs.simplify_maximize(analyzer)?; - Ok((lhs_min, lhs_max, rhs_min, rhs_max)) - } - - fn exec( - &self, - (lhs_min, lhs_max, rhs_min, rhs_max): ( - Elem, - Elem, - Elem, - Elem, - ), - maximize: bool, - ) -> Result, GraphError> { - tracing::trace!( - "executing: {} {} {}, lhs_min: {}, lhs_max: {}, rhs_min: {}, rhs_max: {}", - self.lhs, - self.op.to_string(), - self.rhs, - lhs_min, - lhs_max, - rhs_min, - rhs_max - ); - - let lhs_min_neg = lhs_min.pre_evaled_is_negative(); - let lhs_max_neg = lhs_max.pre_evaled_is_negative(); - let rhs_min_neg = rhs_min.pre_evaled_is_negative(); - let rhs_max_neg = rhs_max.pre_evaled_is_negative(); - - let res = match self.op { - RangeOp::Add(unchecked) => { - if unchecked { - let candidates = vec![ - lhs_min.range_wrapping_add(&rhs_min), - lhs_min.range_wrapping_add(&rhs_max), - lhs_max.range_wrapping_add(&rhs_min), - lhs_max.range_wrapping_add(&rhs_max), - lhs_max.range_add(&rhs_max), - lhs_min.range_add(&rhs_min), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } else if maximize { - // if we are maximizing, the largest value will always just be the the largest value + the largest value - lhs_max - .range_add(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } else { - lhs_min - .range_add(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } - } - RangeOp::Sub(unchecked) => { - if unchecked { - let candidates = vec![ - lhs_min.range_wrapping_sub(&rhs_min), - lhs_min.range_wrapping_sub(&rhs_max), - lhs_max.range_wrapping_sub(&rhs_min), - lhs_max.range_wrapping_sub(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } else if maximize { - // if we are maximizing, the largest value will always just be the the largest value - the smallest value - lhs_max - .range_sub(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } else { - // if we are minimizing, the smallest value will always be smallest value - largest value - lhs_min - .range_sub(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - } - RangeOp::Mul(unchecked) => { - if unchecked { - let candidates = vec![ - lhs_min.range_wrapping_mul(&rhs_min), - lhs_min.range_wrapping_mul(&rhs_max), - lhs_max.range_wrapping_mul(&rhs_min), - lhs_max.range_wrapping_mul(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } else if maximize { - // if we are maximizing, and both mins are negative and both maxes are positive, - // we dont know which will be larger of the two (i.e. -1*2**255 * -1*2**255 > 100*100) - match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - (true, true, true, true) => { - // all negative, will be min * min because those are furthest from 0 resulting in the - // largest positive value - lhs_min - .range_mul(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } - (true, false, true, false) => { - // we dont know if lhs_max * rhs_min is larger or lhs_min * rhs_max is smaller - match (lhs_min.range_mul(&rhs_min), lhs_max.range_mul(&rhs_max)) { - (Some(min_expr), Some(max_expr)) => { - match min_expr.range_ord(&max_expr) { - Some(std::cmp::Ordering::Less) => max_expr, - Some(std::cmp::Ordering::Greater) => min_expr, - _ => max_expr, - } - } - (None, Some(max_expr)) => max_expr, - (Some(min_expr), None) => min_expr, - (None, None) => Elem::Expr(self.clone()), - } - } - (_, false, _, false) => { - // rhs_max is positive, lhs_max is positive, guaranteed to be largest max value - lhs_max - .range_mul(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - (false, false, true, true) => { - // since we are forced to go negative here, values closest to 0 will ensure we get the maximum - lhs_min - .range_mul(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - (true, true, false, false) => { - // since we are forced to go negative here, values closest to 0 will ensure we get the maximum - lhs_max - .range_mul(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } - (true, _, true, _) => lhs_min - .range_mul(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())), - (false, true, _, _) | (_, _, false, true) => { - panic!("unsatisfiable range") - } - } - } else { - match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - (false, false, false, false) => { - // rhs_min is positive, lhs_min is positive, guaranteed to be smallest max value - lhs_min - .range_mul(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } - (true, true, true, true) => { - // all negative, will be max * max because those are closest to 0 resulting in the - // smallest positive value - lhs_max - .range_mul(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - (true, false, true, false) => { - // we dont know if lhs_max * rhs_min is smaller or lhs_min * rhs_max is smaller - match (lhs_max.range_mul(&rhs_min), lhs_min.range_mul(&rhs_max)) { - (Some(min_expr), Some(max_expr)) => { - match min_expr.range_ord(&max_expr) { - Some(std::cmp::Ordering::Less) => min_expr, - Some(std::cmp::Ordering::Greater) => max_expr, - _ => min_expr, - } - } - (None, Some(max_expr)) => max_expr, - (Some(min_expr), None) => min_expr, - (None, None) => Elem::Expr(self.clone()), - } - } - (true, _, _, false) => { - // rhs_max is positive, lhs_min is negative, guaranteed to be largest min value - lhs_min - .range_mul(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - (_, false, _, true) => { - // just lhs has a positive value, most negative will be lhs_max, rhs_max - lhs_max - .range_mul(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - (false, false, true, false) => lhs_max - .range_mul(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())), - (false, true, _, _) | (_, _, false, true) => { - panic!("unsatisfiable range") - } - } - } - } - RangeOp::Div(_unchecked) => { - let mut candidates = vec![ - lhs_min.range_div(&rhs_min), - lhs_min.range_div(&rhs_max), - lhs_max.range_div(&rhs_min), - lhs_max.range_div(&rhs_max), - ]; - - let one = Elem::from(Concrete::from(U256::from(1))); - let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); - - let min_contains = matches!( - rhs_min.range_ord(&one), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&one), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - candidates.push(lhs_min.range_div(&one)); - candidates.push(lhs_max.range_div(&one)); - } - - let min_contains = matches!( - rhs_min.range_ord(&negative_one), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&negative_one), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - candidates.push(lhs_min.range_div(&negative_one)); - candidates.push(lhs_max.range_div(&negative_one)); - } - - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - // if maximize { - // match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - // (true, false, true, false) => { - // // we dont know if lhs_min / rhs_min is larger or lhs_max / rhs_max is larger - // match (lhs_min.range_div(&rhs_min), lhs_max.range_div(&rhs_max)) { - // (Some(min_expr), Some(max_expr)) => { - // match min_expr.range_ord(&max_expr) { - // Some(std::cmp::Ordering::Less) => { - // max_expr - // } - // Some(std::cmp::Ordering::Greater) => { - // min_expr - // } - // _ => { - // max_expr - // } - // } - // } - // (None, Some(max_expr)) => { - // max_expr - // } - // (Some(min_expr), None) => { - // min_expr - // } - // (None, None) => Elem::Expr(self.clone()) - // } - // } - // (false, false, true, true) => { - // // since we are forced to go negative here, values closest to 0 will ensure we get the maximum - // lhs_min.range_div(&rhs_max).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, true, false, false) => { - // // since we are forced to go negative here, values closest to 0 will ensure we get the maximum - // lhs_max.range_div(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (_, false, false, _) => { - // // lhs is positive, rhs min is positive, guaranteed to give largest - // lhs_max.range_div(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (_, false, true, false) => { - // // lhs_max is positive and rhs_max is positive, guaranteed to be lhs_max and rhs_max - // lhs_max.range_div(&rhs_max).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, _, true, _) => { - // // at this point, its either all trues, or a single false - // // given that, to maximize, the only way to get a positive value is to use the most negative values - // lhs_min.range_div(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, true, _, _) | (_, _, false, true)=> { - // panic!("unsatisfiable range") - // } - // } - // } else { - // match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - // (false, false, false, false) => { - // // smallest number will be lhs_min / rhs_min since both are positive - // lhs_min.range_div(&rhs_max).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, true, true, true) => { - // // smallest number will be lhs_max / rhs_min since both are negative - // lhs_max.range_div(&rhs_max).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, true, true, false) => { - // // The way to maintain most negative value is lhs_min / rhs_max, all others would go - // // positive or guaranteed to be closer to 0 - // lhs_min.range_div(&rhs_max).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, false, true, false) => { - // // we dont know if lhs_min / rhs_max is larger or lhs_max / rhs_min is larger - // match (lhs_min.range_div(&rhs_max), lhs_max.range_div(&rhs_min)) { - // (Some(min_expr), Some(max_expr)) => { - // match min_expr.range_ord(&max_expr) { - // Some(std::cmp::Ordering::Less) => { - // min_expr - // } - // Some(std::cmp::Ordering::Greater) => { - // max_expr - // } - // _ => { - // min_expr - // } - // } - // } - // (None, Some(max_expr)) => { - // max_expr - // } - // (Some(min_expr), None) => { - // min_expr - // } - // (None, None) => Elem::Expr(self.clone()) - // } - // } - // (_, false, true, _) => { - // // We are going negative here, so it will be most positive / least negative - // lhs_max.range_div(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, _, false, _) => { - // // We are going negative here, so it will be most negative / least positive - // lhs_min.range_div(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, true, _, _) | (_, _, false, true)=> { - // panic!("unsatisfiable range") - // } - // } - // } - } - // RangeOp::Mod => { - // lhs.range_mod(&rhs).unwrap_or(Elem::Expr(self.clone())) - // } - RangeOp::Min => { - let candidates = vec![ - lhs_min.range_min(&rhs_min), - lhs_min.range_min(&rhs_max), - lhs_max.range_min(&rhs_min), - lhs_max.range_min(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - // if maximize { - // match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - // (true, _, true, _) | (false, _, false, _) => { - // // counter-intuitively, we want the maximum value from a call to minimum - // // this is due to the symbolic nature of the evaluation. We are still - // // using the minimum values but getting the larger of the minimum - // lhs_min.range_max(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, _, false, false) => { - // rhs_min //.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, false, true, _) => { - // lhs_min //lhs_min.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, true, _, _) | (_, _, false, true)=> { - // panic!("unsatisfiable range") - // } - // } - // } else { - // match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - // (true, _, true, _) | (false, _, false, _) => { - // lhs_min.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, _, false, false) => { - // lhs_min //.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, false, true, _) => { - // rhs_min //lhs_min.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, true, _, _) | (_, _, false, true)=> { - // panic!("unsatisfiable range") - // } - // } - // } - } - RangeOp::Max => { - let candidates = vec![ - lhs_min.range_max(&rhs_min), - lhs_min.range_max(&rhs_max), - lhs_max.range_max(&rhs_min), - lhs_max.range_max(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - // if maximize { - // match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - // (true, _, true, _) | (false, _, false, _) => { - // lhs_max.range_max(&rhs_max).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, _, false, false) => { - // rhs_max //.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, false, true, _) => { - // lhs_max //lhs_min.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, true, _, _) | (_, _, false, true)=> { - // panic!("unsatisfiable range") - // } - // } - // } else { - // match (lhs_min_neg, lhs_max_neg, rhs_min_neg, rhs_max_neg) { - // (_, true, _, true) | (_, false, _, false) => { - // // counter-intuitively, we want the minimum value from a call to maximum - // // this is due to the symbolic nature of the evaluation. We are still - // // using the maximum values but getting the smaller of the maximum - // lhs_max.range_min(&rhs_max).unwrap_or(Elem::Expr(self.clone())) - // } - // (_, false, true, true) => { - // lhs_max //.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (true, true, _, false) => { - // rhs_max //lhs_min.range_min(&rhs_min).unwrap_or(Elem::Expr(self.clone())) - // } - // (false, true, _, _) | (_, _, false, true)=> { - // panic!("unsatisfiable range") - // } - // } - // } - } - RangeOp::Gt => { - if maximize { - lhs_max - .range_gt(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } else { - lhs_min - .range_gt(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - } - RangeOp::Lt => { - if maximize { - lhs_min - .range_lt(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } else { - lhs_max - .range_lt(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } - } - RangeOp::Gte => { - if maximize { - lhs_max - .range_gte(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } else { - lhs_min - .range_gte(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } - } - RangeOp::Lte => { - if maximize { - lhs_min - .range_lte(&rhs_max) - .unwrap_or(Elem::Expr(self.clone())) - } else { - lhs_max - .range_lte(&rhs_min) - .unwrap_or(Elem::Expr(self.clone())) - } - } - RangeOp::Eq => { - let loc = if let Some(c) = lhs_min.maybe_concrete() { - c.loc - } else if let Some(c) = lhs_max.maybe_concrete() { - c.loc - } else if let Some(c) = rhs_min.maybe_concrete() { - c.loc - } else if let Some(c) = rhs_max.maybe_concrete() { - c.loc - } else { - Loc::Implicit - }; - - if maximize { - // check for any overlap - let lhs_max_rhs_min_ord = lhs_max.range_ord(&rhs_min); - let lhs_min_rhs_max_ord = lhs_min.range_ord(&rhs_max); - - // if lhs max is less than the rhs min, it has to be false - if matches!(lhs_max_rhs_min_ord, Some(std::cmp::Ordering::Less)) { - return Ok(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc, - })); - } - - // if lhs min is greater than the rhs max, it has to be false - if matches!(lhs_min_rhs_max_ord, Some(std::cmp::Ordering::Greater)) { - return Ok(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc, - })); - } - - // lhs_max >= rhs_min - // lhs_min <= rhs_max - // therefore its possible to set some value to true here - if lhs_max_rhs_min_ord.is_some() && lhs_min_rhs_max_ord.is_some() { - Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc, - }) - } else { - Elem::Expr(self.clone()) - } - } else { - // check if either lhs element is *not* contained by rhs - match ( - // check if lhs is constant - lhs_min.range_ord(&lhs_max), - // check if rhs is constant - rhs_min.range_ord(&rhs_max), - // check if lhs is equal to rhs - lhs_min.range_ord(&rhs_min), - ) { - ( - Some(std::cmp::Ordering::Equal), - Some(std::cmp::Ordering::Equal), - Some(std::cmp::Ordering::Equal), - ) => Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc, - }), - // if any of those are not equal, we can construct - // an element that is true - _ => Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc, - }), - } - } - } - RangeOp::Neq => { - let loc = if let Some(c) = lhs_min.maybe_concrete() { - c.loc - } else if let Some(c) = lhs_max.maybe_concrete() { - c.loc - } else if let Some(c) = rhs_min.maybe_concrete() { - c.loc - } else if let Some(c) = rhs_max.maybe_concrete() { - c.loc - } else { - Loc::Implicit - }; - if maximize { - // check if either lhs element is *not* contained by rhs - match ( - // check if lhs is constant - lhs_min.range_ord(&lhs_max), - // check if rhs is constant - rhs_min.range_ord(&rhs_max), - // check if lhs is equal to rhs - lhs_min.range_ord(&rhs_min), - ) { - ( - Some(std::cmp::Ordering::Equal), - Some(std::cmp::Ordering::Equal), - Some(std::cmp::Ordering::Equal), - ) => Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc, - }), - // if any of those are not equal, we can construct - // an element that is true - _ => Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc, - }), - } - } else { - // if they are constants and equal, we can stop here - // (rhs min == rhs max) == (lhs min == lhs max ) - if let ( - Some(std::cmp::Ordering::Equal), - Some(std::cmp::Ordering::Equal), - Some(std::cmp::Ordering::Equal), - ) = ( - // check if lhs is constant - lhs_min.range_ord(&lhs_max), - // check if rhs is constant - rhs_min.range_ord(&rhs_max), - // check if lhs is equal to rhs - lhs_min.range_ord(&rhs_min), - ) { - return Ok(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc, - })); - } - - // they aren't constants, check if there is any overlap - match ( - // check if lhs minimum is contained within the right hand side - // this means the values could be equal - // effectively: - // rhs min <= lhs min <= rhs max - lhs_min.range_ord(&rhs_min), - lhs_min.range_ord(&rhs_max), - ) { - (_, Some(std::cmp::Ordering::Equal)) - | (Some(std::cmp::Ordering::Equal), _) - | (Some(std::cmp::Ordering::Greater), Some(std::cmp::Ordering::Less)) => { - return Ok(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc, - })) - } - _ => {} - } - - match ( - // check if the lhs maximum is contained within the right hand side - // effectively: - // rhs min <= lhs max <= rhs max - lhs_max.range_ord(&rhs_min), - lhs_max.range_ord(&rhs_max), - ) { - (_, Some(std::cmp::Ordering::Equal)) - | (Some(std::cmp::Ordering::Equal), _) - | (Some(std::cmp::Ordering::Greater), Some(std::cmp::Ordering::Less)) => { - return Ok(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc, - })) - } - _ => {} - } - - Elem::Expr(self.clone()) - } - } - RangeOp::Shl => { - let candidates = vec![ - lhs_min.range_shl(&rhs_min), - lhs_min.range_shl(&rhs_max), - lhs_max.range_shl(&rhs_min), - lhs_max.range_shl(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::Shr => { - let candidates = vec![ - lhs_min.range_shr(&rhs_min), - lhs_min.range_shr(&rhs_max), - lhs_max.range_shr(&rhs_min), - lhs_max.range_shr(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::And => { - let candidates = vec![ - lhs_min.range_and(&rhs_min), - lhs_min.range_and(&rhs_max), - lhs_max.range_and(&rhs_min), - lhs_max.range_and(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::Or => { - let candidates = vec![ - lhs_min.range_or(&rhs_min), - lhs_min.range_or(&rhs_max), - lhs_max.range_or(&rhs_min), - lhs_max.range_or(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::Not => { - assert!(matches!(rhs_min, Elem::Null) && matches!(rhs_max, Elem::Null)); - let candidates = vec![lhs_min.range_not(), lhs_min.range_not()]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::Cast => { - // the weird thing about cast is that we really dont know until after the cast due to sizing things - // so we should just try them all then compare - let candidates = vec![ - lhs_min.range_cast(&rhs_min), - lhs_min.range_cast(&rhs_max), - lhs_max.range_cast(&rhs_min), - lhs_max.range_cast(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::Exp => { - // TODO: improve with smarter stuff - let candidates = vec![ - lhs_min.range_exp(&rhs_min), - lhs_min.range_exp(&rhs_max), - lhs_max.range_exp(&rhs_min), - lhs_max.range_exp(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::BitAnd => { - let mut candidates = vec![ - lhs_min.range_bit_and(&rhs_min), - lhs_min.range_bit_and(&rhs_max), - lhs_max.range_bit_and(&rhs_min), - lhs_max.range_bit_and(&rhs_max), - ]; - - let zero = Elem::from(Concrete::from(U256::from(0))); - let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); - - let min_contains = matches!( - rhs_min.range_ord(&zero), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&zero), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - candidates.push(lhs_min.range_bit_and(&zero)); - candidates.push(lhs_max.range_bit_and(&zero)); - } - - let min_contains = matches!( - rhs_min.range_ord(&negative_one), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&negative_one), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - candidates.push(lhs_min.range_bit_and(&negative_one)); - candidates.push(lhs_max.range_bit_and(&negative_one)); - } - - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::BitOr => { - let mut candidates = vec![ - lhs_min.range_bit_or(&rhs_min), - lhs_min.range_bit_or(&rhs_max), - lhs_max.range_bit_or(&rhs_min), - lhs_max.range_bit_or(&rhs_max), - ]; - - let zero = Elem::from(Concrete::from(U256::from(0))); - let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); - - let min_contains = matches!( - rhs_min.range_ord(&zero), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&zero), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - candidates.push(lhs_min.range_bit_or(&zero)); - candidates.push(lhs_max.range_bit_or(&zero)); - } - - let min_contains = matches!( - rhs_min.range_ord(&negative_one), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&negative_one), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - candidates.push(lhs_min.range_bit_or(&negative_one)); - candidates.push(lhs_max.range_bit_or(&negative_one)); - } - - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::BitXor => { - let mut candidates = vec![ - lhs_min.range_bit_xor(&rhs_min), - lhs_min.range_bit_xor(&rhs_max), - lhs_max.range_bit_xor(&rhs_min), - lhs_max.range_bit_xor(&rhs_max), - ]; - - let zero = Elem::from(Concrete::from(U256::from(0))); - let negative_one = Elem::from(Concrete::from(I256::from(-1i32))); - - let min_contains = matches!( - rhs_min.range_ord(&zero), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&zero), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - // if the rhs contains zero, in xor, thats just itself - candidates.push(lhs_max.range_bit_xor(&zero)); - } - - let min_contains = matches!( - rhs_min.range_ord(&negative_one), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - rhs_max.range_ord(&negative_one), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - candidates.push(lhs_min.range_bit_xor(&negative_one)); - candidates.push(lhs_max.range_bit_xor(&negative_one)); - } - - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::BitNot => { - let mut candidates = vec![lhs_min.range_bit_not(), lhs_max.range_bit_not()]; - - let zero = Elem::from(Concrete::from(U256::from(0))); - - let min_contains = matches!( - lhs_min.range_ord(&zero), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - lhs_max.range_ord(&zero), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - if min_contains && max_contains { - match lhs_min { - Elem::Concrete( - r @ RangeConcrete { - val: Concrete::Uint(..), - .. - }, - ) => candidates.push(Some(Elem::from(Concrete::max(&r.val).unwrap()))), - Elem::Concrete( - r @ RangeConcrete { - val: Concrete::Int(..), - .. - }, - ) => candidates.push(Some(Elem::from(Concrete::min(&r.val).unwrap()))), - _ => {} - } - } - - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - RangeOp::Concat => { - // TODO: improve with smarter stuff - let candidates = vec![ - lhs_min.range_concat(&rhs_min), - lhs_min.range_concat(&rhs_max), - lhs_max.range_concat(&rhs_min), - lhs_max.range_concat(&rhs_max), - ]; - let mut candidates = candidates.into_iter().flatten().collect::>(); - candidates.sort_by(|a, b| match a.range_ord(b) { - Some(r) => r, - _ => std::cmp::Ordering::Less, - }); - - if candidates.is_empty() { - return Ok(Elem::Expr(self.clone())); - } - - if maximize { - candidates[candidates.len() - 1].clone() - } else { - candidates[0].clone() - } - } - _ => Elem::Expr(self.clone()), - }; - Ok(res) - } -} diff --git a/shared/src/range/mod.rs b/shared/src/range/mod.rs deleted file mode 100644 index 57f8526e..00000000 --- a/shared/src/range/mod.rs +++ /dev/null @@ -1,749 +0,0 @@ -use crate::analyzer::AsDotStr; -use crate::analyzer::GraphError; -use crate::analyzer::GraphLike; -use crate::context::ContextNode; -use crate::context::ContextVarNode; -use crate::range::elem::RangeElem; -use crate::range::elem::RangeOp; - -use crate::range::elem_ty::Elem; -use crate::range::elem_ty::RangeConcrete; -use crate::range::elem_ty::RangeDyn; -use crate::range::range_string::ToRangeString; -use crate::Builtin; -use crate::Concrete; - -use crate::NodeIdx; -use ethers_core::types::Address; -use ethers_core::types::H256; -use ethers_core::types::I256; -use ethers_core::types::U256; -use std::collections::BTreeMap; - -use solang_parser::pt::Loc; - -pub mod elem; -pub mod elem_ty; -pub mod range_ops; -pub mod range_string; - -#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] -pub struct SolcRange { - pub min: Elem, - pub min_cached: Option>, - pub max: Elem, - pub max_cached: Option>, - pub exclusions: Vec>, -} - -impl AsDotStr for SolcRange { - fn as_dot_str(&self, analyzer: &impl GraphLike) -> String { - format!( - "[{}, {}] excluding: [{}]", - self.evaled_range_min(analyzer) - .unwrap() - .to_range_string(false, analyzer) - .s, - self.evaled_range_max(analyzer) - .unwrap() - .to_range_string(true, analyzer) - .s, - self.exclusions - .iter() - .map(|excl| excl.to_range_string(false, analyzer).s) - .collect::>() - .join(", ") - ) - } -} - -impl From for SolcRange { - fn from(b: bool) -> Self { - let val = Elem::Concrete(RangeConcrete { - val: Concrete::Bool(b), - loc: Loc::Implicit, - }); - Self::new(val.clone(), val, vec![]) - } -} - -impl SolcRange { - pub fn new(min: Elem, max: Elem, exclusions: Vec>) -> Self { - Self { - min, - min_cached: None, - max, - max_cached: None, - exclusions, - } - } - - pub fn is_const(&self, analyzer: &impl GraphLike) -> Result { - let min = self.evaled_range_min(analyzer)?; - let max = self.evaled_range_max(analyzer)?; - Ok(min.range_eq(&max)) - } - - pub fn min_is_negative(&self, analyzer: &impl GraphLike) -> Result { - self.min.is_negative(false, analyzer) - } - - pub fn default_bool() -> Self { - let min = Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc: Loc::Implicit, - }); - let max = Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc: Loc::Implicit, - }); - Self::new(min, max, vec![]) - } - pub fn from(c: Concrete) -> Option { - match c { - c @ Concrete::Uint(_, _) - | c @ Concrete::Int(_, _) - | c @ Concrete::Bool(_) - | c @ Concrete::Address(_) - | c @ Concrete::Bytes(_, _) => Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: c.clone(), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: c, - loc: Loc::Implicit, - }), - vec![], - )), - Concrete::String(s) => { - let val = s - .chars() - .enumerate() - .map(|(i, v)| { - let idx = Elem::from(Concrete::from(U256::from(i))); - let mut bytes = [0x00; 32]; - v.encode_utf8(&mut bytes[..]); - let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - (idx, v) - }) - .collect::>(); - let r = Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::from(s.len()))), - val, - loc: Loc::Implicit, - })); - Some(SolcRange::new(r.clone(), r, vec![])) - } - Concrete::DynBytes(b) => { - let val = b - .iter() - .enumerate() - .map(|(i, v)| { - let idx = Elem::from(Concrete::from(U256::from(i))); - let mut bytes = [0x00; 32]; - bytes[0] = *v; - let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - (idx, v) - }) - .collect::>(); - let r = Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::from(b.len()))), - val, - loc: Loc::Implicit, - })); - Some(SolcRange::new(r.clone(), r, vec![])) - } - _e => None, - } - } - - pub fn try_from_builtin(builtin: &Builtin) -> Option { - match builtin { - Builtin::Uint(size) => { - if *size == 256 { - Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, 0.into()), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, U256::MAX), - loc: Loc::Implicit, - }), - vec![], - )) - } else { - Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, 0.into()), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, U256::from(2).pow(U256::from(*size)) - 1), - loc: Loc::Implicit, - }), - vec![], - )) - } - } - Builtin::Int(size) => { - if *size == 256 { - Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, I256::MIN), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, I256::MAX), - loc: Loc::Implicit, - }), - vec![], - )) - } else { - let max: I256 = - I256::from_raw(U256::from(1u8) << U256::from(size - 1)) - I256::from(1); - let min = max * I256::from(-1i32) - I256::from(1i32); - Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, min), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, max), - loc: Loc::Implicit, - }), - vec![], - )) - } - } - Builtin::Bool => Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc: Loc::Implicit, - }), - vec![], - )), - Builtin::Address | Builtin::Payable | Builtin::AddressPayable => Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: Concrete::Address(Address::from_slice(&[0x00; 20])), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: Concrete::Address(Address::from_slice(&[0xff; 20])), - loc: Loc::Implicit, - }), - vec![], - )), - Builtin::Bytes(size) => { - let v: Vec<_> = (0..32u8) - .map(|i| if i < *size { 0xff } else { 0x00 }) - .collect(); - Some(SolcRange::new( - Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(*size, H256::from_slice(&[0x00; 32])), - loc: Loc::Implicit, - }), - Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(*size, H256::from_slice(&v[..])), - loc: Loc::Implicit, - }), - vec![], - )) - } - Builtin::DynamicBytes - | Builtin::String - | Builtin::Array(_) - | Builtin::Mapping(_, _) => Some(SolcRange::new( - Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::zero())), - val: Default::default(), - loc: Loc::Implicit, - })), - Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::MAX)), - val: Default::default(), - loc: Loc::Implicit, - })), - vec![], - )), - Builtin::SizedArray(s, _) => Some(SolcRange::new( - Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(*s)), - val: Default::default(), - loc: Loc::Implicit, - })), - Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(*s)), - val: Default::default(), - loc: Loc::Implicit, - })), - vec![], - )), - _ => None, - } - } - - pub fn lte_dyn(self, other: ContextVarNode) -> Self { - Self::new(self.min, self.max.min(Elem::from(other)), self.exclusions) - } - - pub fn gte_dyn(self, other: ContextVarNode) -> Self { - Self::new(self.min.max(Elem::from(other)), self.max, self.exclusions) - } - - pub fn lt_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min, - self.max.min( - Elem::from(other) - - Elem::Concrete(RangeConcrete { - val: U256::from(1).into(), - loc: Loc::Implicit, - }), - ), - self.exclusions, - ) - } - - pub fn gt_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.max( - Elem::from(other) - + Elem::Concrete(RangeConcrete { - val: U256::from(1).into(), - loc: Loc::Implicit, - }), - ), - self.max, - self.exclusions, - ) - } - - pub fn dyn_fn_from_op(op: RangeOp) -> &'static dyn Fn(SolcRange, ContextVarNode) -> SolcRange { - match op { - RangeOp::Add(false) => &Self::add_dyn, - RangeOp::Add(true) => &Self::wrapping_add_dyn, - RangeOp::Sub(false) => &Self::sub_dyn, - RangeOp::Sub(true) => &Self::wrapping_sub_dyn, - RangeOp::Mul(false) => &Self::mul_dyn, - RangeOp::Mul(true) => &Self::wrapping_mul_dyn, - RangeOp::Div(false) => &Self::div_dyn, - RangeOp::Div(true) => &Self::wrapping_mul_dyn, - RangeOp::Shr => &Self::shr_dyn, - RangeOp::Shl => &Self::shl_dyn, - RangeOp::Mod => &Self::mod_dyn, - RangeOp::Min => &Self::min_dyn, - RangeOp::Max => &Self::max_dyn, - RangeOp::Lt => &Self::lt_dyn, - RangeOp::Lte => &Self::lte_dyn, - RangeOp::Gt => &Self::gt_dyn, - RangeOp::Gte => &Self::gte_dyn, - RangeOp::Eq => &Self::eq_dyn, - RangeOp::Neq => &Self::neq_dyn, - RangeOp::Exp => &Self::exp_dyn, - RangeOp::BitAnd => &Self::bit_and_dyn, - RangeOp::BitOr => &Self::bit_or_dyn, - RangeOp::BitXor => &Self::bit_xor_dyn, - // RangeOp::And => ( - // &Self::and_dyn, - // (DynSide::Min, DynSide::Max), - // ), - // RangeOp::Or => ( - // &Self::or_dyn, - // (DynSide::Min, DynSide::Max), - // ), - e => unreachable!("Comparator operations shouldn't exist in a range: {:?}", e), - } - } - - pub fn add_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min + Elem::from(other), - self.max + Elem::from(other), - self.exclusions, - ) - } - - pub fn wrapping_add_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.wrapping_add(Elem::from(other)), - self.max.wrapping_add(Elem::from(other)), - self.exclusions, - ) - } - - pub fn sub_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min - Elem::from(other), - self.max - Elem::from(other), - self.exclusions, - ) - } - - pub fn wrapping_sub_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.wrapping_sub(Elem::from(other)), - self.max.wrapping_sub(Elem::from(other)), - self.exclusions, - ) - } - - pub fn mul_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min * Elem::from(other), - self.max * Elem::from(other), - self.exclusions, - ) - } - - pub fn wrapping_mul_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.wrapping_mul(Elem::from(other)), - self.max.wrapping_mul(Elem::from(other)), - self.exclusions, - ) - } - - pub fn exp_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.pow(Elem::from(other)), - self.max.pow(Elem::from(other)), - self.exclusions, - ) - } - - pub fn bit_and_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min & Elem::from(other), - self.max & Elem::from(other), - self.exclusions, - ) - } - - pub fn bit_or_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min | Elem::from(other), - self.max | Elem::from(other), - self.exclusions, - ) - } - - pub fn bit_xor_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min ^ Elem::from(other), - self.max ^ Elem::from(other), - self.exclusions, - ) - } - - pub fn div_dyn(self, other: ContextVarNode) -> Self { - let elem = Elem::from(other); - Self::new(self.min / elem.clone(), self.max / elem, self.exclusions) - } - - pub fn wrapping_div_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.wrapping_div(Elem::from(other)), - self.max.wrapping_div(Elem::from(other)), - self.exclusions, - ) - } - - pub fn shl_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min << Elem::from(other), - self.max << Elem::from(other), - self.exclusions, - ) - } - - pub fn shr_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min >> Elem::from(other), - self.max >> Elem::from(other), - self.exclusions, - ) - } - - pub fn mod_dyn(self, other: ContextVarNode) -> Self { - let elem = Elem::from(other); - Self::new( - Elem::from(Concrete::from(U256::zero())), - elem.clone() - Elem::from(Concrete::from(U256::from(1))).cast(elem), - self.exclusions, - ) - } - - pub fn min_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.min(Elem::from(other)), - self.max.min(Elem::from(other)), - self.exclusions, - ) - } - - pub fn max_dyn(self, other: ContextVarNode) -> Self { - Self::new( - self.min.max(Elem::from(other)), - self.max.max(Elem::from(other)), - self.exclusions, - ) - } - - pub fn eq_dyn(self, other: ContextVarNode) -> Self { - let min = self.min.eq(Elem::from(other)); - let max = self.max.eq(Elem::from(other)); - Self::new(min.clone().max(max.clone()), min.max(max), self.exclusions) - } - - pub fn neq_dyn(self, other: ContextVarNode) -> Self { - let min = self.min.neq(Elem::from(other)); - let max = self.max.neq(Elem::from(other)); - Self::new(min.clone().max(max.clone()), min.max(max), self.exclusions) - } -} - -impl Range for SolcRange { - type ElemTy = Elem; - fn range_min(&self) -> std::borrow::Cow<'_, Self::ElemTy> { - std::borrow::Cow::Borrowed(&self.min) - } - fn range_max(&self) -> std::borrow::Cow<'_, Self::ElemTy> { - std::borrow::Cow::Borrowed(&self.max) - } - fn range_min_mut(&mut self) -> &mut Self::ElemTy { - &mut self.min - } - fn range_max_mut(&mut self) -> &mut Self::ElemTy { - &mut self.max - } - - fn cache_eval(&mut self, analyzer: &impl GraphLike) -> Result<(), GraphError> { - if self.min_cached.is_none() { - let min = self.range_min_mut(); - min.cache_minimize(analyzer)?; - self.min_cached = Some(self.range_min().minimize(analyzer)?); - } - if self.max_cached.is_none() { - let max = self.range_max_mut(); - max.cache_maximize(analyzer)?; - self.max_cached = Some(self.range_max().maximize(analyzer)?); - } - Ok(()) - } - - fn evaled_range_min(&self, analyzer: &impl GraphLike) -> Result { - if let Some(cached) = &self.min_cached { - Ok(cached.clone()) - } else { - self.range_min().minimize(analyzer) - } - } - - fn evaled_range_max(&self, analyzer: &impl GraphLike) -> Result { - if let Some(cached) = &self.max_cached { - Ok(cached.clone()) - } else { - self.range_max().maximize(analyzer) - } - } - - fn simplified_range_min(&self, analyzer: &impl GraphLike) -> Result { - println!("simplified range min"); - self.range_min().simplify_minimize(analyzer) - } - fn simplified_range_max(&self, analyzer: &impl GraphLike) -> Result { - println!("simplified range max"); - self.range_max().simplify_maximize(analyzer) - } - fn range_exclusions(&self) -> Vec { - self.exclusions.clone() - } - fn set_range_min(&mut self, new: Self::ElemTy) { - self.min_cached = None; - self.min = new; - } - fn set_range_max(&mut self, new: Self::ElemTy) { - self.max_cached = None; - self.max = new; - } - - fn add_range_exclusion(&mut self, new: Self::ElemTy) { - if !self.exclusions.contains(&new) { - self.exclusions.push(new); - } - } - fn set_range_exclusions(&mut self, new: Vec) { - self.exclusions = new; - } - fn filter_min_recursion(&mut self, self_idx: NodeIdx, new_idx: NodeIdx) { - self.min.filter_recursion(self_idx, new_idx); - } - fn filter_max_recursion(&mut self, self_idx: NodeIdx, new_idx: NodeIdx) { - self.max.filter_recursion(self_idx, new_idx); - } -} - -pub trait Range { - type ElemTy: RangeElem + Clone; - fn cache_eval(&mut self, analyzer: &impl GraphLike) -> Result<(), GraphError>; - fn evaled_range_min(&self, analyzer: &impl GraphLike) -> Result; - fn evaled_range_max(&self, analyzer: &impl GraphLike) -> Result; - fn simplified_range_min(&self, analyzer: &impl GraphLike) -> Result; - fn simplified_range_max(&self, analyzer: &impl GraphLike) -> Result; - fn range_min(&self) -> std::borrow::Cow<'_, Self::ElemTy>; - fn range_max(&self) -> std::borrow::Cow<'_, Self::ElemTy>; - fn uncache_range_min(&mut self) { - self.range_min_mut().uncache(); - } - fn uncache_range_max(&mut self) { - self.range_max_mut().uncache(); - } - fn range_min_mut(&mut self) -> &mut Self::ElemTy; - fn range_max_mut(&mut self) -> &mut Self::ElemTy; - fn range_exclusions(&self) -> Vec - where - Self: std::marker::Sized; - fn set_range_min(&mut self, new: Self::ElemTy); - fn set_range_max(&mut self, new: Self::ElemTy); - fn set_range_exclusions(&mut self, new: Vec) - where - Self: std::marker::Sized; - fn add_range_exclusion(&mut self, new: Self::ElemTy) - where - Self: std::marker::Sized; - fn filter_min_recursion(&mut self, self_idx: NodeIdx, new_idx: NodeIdx); - fn filter_max_recursion(&mut self, self_idx: NodeIdx, new_idx: NodeIdx); - fn dependent_on(&self) -> Vec { - let mut deps = self.range_min().dependent_on(); - deps.extend(self.range_max().dependent_on()); - deps - } - - fn update_deps(&mut self, node: ContextVarNode, ctx: ContextNode, analyzer: &impl GraphLike) { - let deps = self.dependent_on(); - let mapping: BTreeMap = deps - .into_iter() - .filter(|dep| !dep.is_const(analyzer).unwrap()) - .map(|dep| { - let latest = dep.latest_version_in_ctx(ctx, analyzer).unwrap(); - if latest == node { - if let Some(prev) = latest.previous_version(analyzer) { - (dep, prev) - } else { - (dep, dep) - } - } else { - (dep, latest) - } - }) - .collect(); - - let mut min = self.range_min().into_owned(); - let mut max = self.range_max().into_owned(); - min.update_deps(&mapping); - max.update_deps(&mapping); - self.set_range_min(min); - self.set_range_max(max); - } -} - -pub trait RangeEval>: Range { - fn sat(&self, analyzer: &impl GraphLike) -> bool; - fn unsat(&self, analyzer: &impl GraphLike) -> bool { - !self.sat(analyzer) - } - fn contains(&self, other: &Self, analyzer: &impl GraphLike) -> bool; - fn contains_elem(&self, other: &T, analyzer: &impl GraphLike) -> bool; - fn overlaps(&self, other: &Self, analyzer: &impl GraphLike) -> bool; -} - -impl RangeEval> for SolcRange { - fn sat(&self, analyzer: &impl GraphLike) -> bool { - matches!( - self.evaled_range_min(analyzer) - .unwrap() - .range_ord(&self.evaled_range_max(analyzer).unwrap()), - None | Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ) - } - - fn contains(&self, other: &Self, analyzer: &impl GraphLike) -> bool { - let min_contains = matches!( - self.evaled_range_min(analyzer) - .unwrap() - .range_ord(&other.evaled_range_min(analyzer).unwrap()), - Some(std::cmp::Ordering::Less) | Some(std::cmp::Ordering::Equal) - ); - - let max_contains = matches!( - self.evaled_range_max(analyzer) - .unwrap() - .range_ord(&other.evaled_range_max(analyzer).unwrap()), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ); - - min_contains && max_contains - } - - fn contains_elem(&self, other: &Elem, analyzer: &impl GraphLike) -> bool { - let min_contains = match self - .evaled_range_min(analyzer) - .unwrap() - .range_ord(&other.minimize(analyzer).unwrap()) - { - Some(std::cmp::Ordering::Less) => true, - Some(std::cmp::Ordering::Equal) => return true, - _ => false, - }; - - let max_contains = match self - .evaled_range_max(analyzer) - .unwrap() - .range_ord(&other.maximize(analyzer).unwrap()) - { - Some(std::cmp::Ordering::Greater) => true, - Some(std::cmp::Ordering::Equal) => return true, - _ => false, - }; - - min_contains && max_contains - } - - fn overlaps(&self, other: &Self, analyzer: &impl GraphLike) -> bool { - let lhs_min = self.evaled_range_min(analyzer).unwrap(); - let rhs_max = other.evaled_range_max(analyzer).unwrap(); - - match lhs_min.range_ord(&rhs_max) { - Some(std::cmp::Ordering::Less) => { - // we know our min is less than the other max - // check that the max is greater than or eq their min - let lhs_max = self.evaled_range_max(analyzer).unwrap(); - let rhs_min = other.evaled_range_min(analyzer).unwrap(); - matches!( - lhs_max.range_ord(&rhs_min), - Some(std::cmp::Ordering::Greater) | Some(std::cmp::Ordering::Equal) - ) - } - Some(std::cmp::Ordering::Equal) => true, - _ => false, - } - } -} diff --git a/shared/src/range/range_ops.rs b/shared/src/range/range_ops.rs deleted file mode 100644 index f8ae3887..00000000 --- a/shared/src/range/range_ops.rs +++ /dev/null @@ -1,1792 +0,0 @@ -use crate::range::Elem; -use crate::range::RangeConcrete; -use crate::range::RangeDyn; -use crate::Concrete; -use ethers_core::types::H256; -use ethers_core::types::I256; -use ethers_core::types::U256; -use std::collections::BTreeMap; - -pub trait RangeAdd { - /// Perform addition between two range elements - fn range_add(&self, other: &Rhs) -> Option>; - fn range_wrapping_add(&self, other: &Rhs) -> Option>; -} - -impl RangeAdd for RangeConcrete { - fn range_add(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - let max = Concrete::max(&self.val).unwrap(); - let max_uint = max.into_u256().unwrap(); - Some(Elem::Concrete(RangeConcrete { - val: self - .val - .u256_as_original(lhs_val.saturating_add(rhs_val).min(max_uint)), - loc: self.loc, - })) - } - _ => { - match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) - | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - // neg_v guaranteed to be negative here - if neg_v.into_raw() > *val { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - neg_v.saturating_add(I256::from_raw(*val)), - ), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: self - .val - .u256_as_original(val.saturating_sub(neg_v.into_raw())), - loc: self.loc, - })) - } - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - let max = if *lhs_size == 256 { - I256::MAX - } else { - I256::from_raw(U256::from(1u8) << U256::from(*lhs_size - 1)) - - I256::from(1) - }; - let min = max * I256::from(-1i32) - I256::from(1i32); - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, l.saturating_add(*r).max(min)), - loc: self.loc, - })) - } - _ => None, - } - } - } - } - fn range_wrapping_add(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: self - .val - .u256_as_original(lhs_val.overflowing_add(rhs_val).0), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) - | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - I256::from_raw(neg_v.into_raw().overflowing_add(*val).0), - ), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, l.overflowing_add(*r).0), - loc: self.loc, - })) - } - _ => None, - }, - } - } -} - -impl RangeAdd for Elem { - fn range_add(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_add(b), - (Elem::Concrete(a), _) if a.val.into_u256() == Some(U256::zero()) => { - Some(other.clone()) - } - (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => Some(self.clone()), - _ => None, - } - } - fn range_wrapping_add(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_wrapping_add(b), - (Elem::Concrete(a), _) if a.val.into_u256() == Some(U256::zero()) => { - Some(other.clone()) - } - (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => Some(self.clone()), - _ => None, - } - } -} - -pub trait RangeSub { - /// Perform subtraction between two range elements - fn range_sub(&self, other: &Rhs) -> Option>; - fn range_wrapping_sub(&self, other: &Rhs) -> Option>; -} - -impl RangeSub for RangeConcrete { - fn range_sub(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - if lhs_val > rhs_val { - let val = lhs_val.saturating_sub(rhs_val); - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(val), - loc: self.loc, - })) - } else { - match self.val { - Concrete::Int(size, val) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(size, val.saturating_sub(I256::from_raw(rhs_val))), - loc: self.loc, - })), - _ => { - // TODO: this should cause a revert - let val = lhs_val.saturating_sub(rhs_val); - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(val), - loc: self.loc, - })) - } - } - } - } - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) => { - let max = if *lhs_size == 256 { - U256::MAX - } else { - U256::from(2).pow(U256::from(*lhs_size)) - 1 - }; - Some(Elem::Concrete(RangeConcrete { - val: self - .val - .u256_as_original(val.saturating_add(neg_v.into_raw()).min(max)), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - let max = if *lhs_size == 256 { - I256::MAX - } else { - I256::from_raw(U256::from(1u8) << U256::from(*lhs_size - 1)) - I256::from(1) - }; - - let min = max * I256::from(-1i32) - I256::from(1i32); - - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - neg_v.saturating_sub(I256::from_raw(*val).max(min)), - ), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, l.saturating_sub(*r)), - loc: self.loc, - })) - } - _ => None, - }, - } - } - - fn range_wrapping_sub(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - if lhs_val > rhs_val { - let val = lhs_val.overflowing_sub(rhs_val).0; - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(val), - loc: self.loc, - })) - } else { - match self.val { - Concrete::Int(size, val) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - size, - val.overflowing_sub(I256::from_raw(rhs_val)).0, - ), - loc: self.loc, - })), - _ => { - let val = lhs_val.overflowing_sub(rhs_val).0; - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(val), - loc: self.loc, - })) - } - } - } - } - _ => match (&self.val, &other.val) { - (Concrete::Uint(_lhs_size, val), Concrete::Int(_, neg_v)) => { - Some(Elem::Concrete(RangeConcrete { - val: self - .val - .u256_as_original(val.overflowing_add(neg_v.into_raw()).0), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - I256::from_raw(neg_v.into_raw().overflowing_sub(*val).0), - ), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, l.overflowing_sub(*r).0), - loc: self.loc, - })) - } - _ => None, - }, - } - } -} - -impl RangeSub for Elem { - fn range_sub(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_sub(b), - (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => Some(self.clone()), - _ => None, - } - } - - fn range_wrapping_sub(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_wrapping_sub(b), - (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => Some(self.clone()), - _ => None, - } - } -} - -pub trait RangeMul { - /// Perform multiplication between two range elements - fn range_mul(&self, other: &Rhs) -> Option>; - fn range_wrapping_mul(&self, other: &Rhs) -> Option>; -} - -impl RangeMul for RangeConcrete { - fn range_mul(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - let max = Concrete::max(&self.val).unwrap(); - let res = lhs_val - .saturating_mul(rhs_val) - .min(max.into_u256().unwrap()); - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(res), - loc: self.loc, - })) - } - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) - | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - let max = if *lhs_size == 256 { - I256::MAX - } else { - I256::from_raw(U256::from(1u8) << U256::from(*lhs_size - 1)) - I256::from(1) - }; - let min = max * I256::from(-1i32) - I256::from(1i32); - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - neg_v.saturating_mul(I256::from_raw(*val)).max(min), - ), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - let max = if *lhs_size == 256 { - I256::MAX - } else { - I256::from_raw(U256::from(1u8) << U256::from(*lhs_size - 1)) - I256::from(1) - }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, l.saturating_mul(*r).min(max)), - loc: self.loc, - })) - } - _ => None, - }, - } - } - - fn range_wrapping_mul(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - let _max = Concrete::max(&self.val).unwrap(); - let res = lhs_val.overflowing_mul(rhs_val).0; - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(res), - loc: self.loc, - })) - } - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) - | (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - neg_v.overflowing_mul(I256::from_raw(*val)).0, - ), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, l.overflowing_mul(*r).0), - loc: self.loc, - })) - } - _ => None, - }, - } - } -} - -impl RangeMul for Elem { - fn range_mul(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_mul(b), - (Elem::Concrete(a), _) if a.val.into_u256() == Some(U256::zero()) => Some(self.clone()), - (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => { - Some(other.clone()) - } - _ => None, - } - } - - fn range_wrapping_mul(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_wrapping_mul(b), - (Elem::Concrete(a), _) if a.val.into_u256() == Some(U256::zero()) => Some(self.clone()), - (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => { - Some(other.clone()) - } - _ => None, - } - } -} - -pub trait RangeExp { - /// Perform exponentiation between two range elements - fn range_exp(&self, other: &Rhs) -> Option>; -} - -impl RangeExp for RangeConcrete { - fn range_exp(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - let max = Concrete::max(&self.val).unwrap(); - if let Some(num) = lhs_val.checked_pow(rhs_val) { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(num.min(max.into_u256().unwrap())), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(max.into_u256().unwrap()), - loc: self.loc, - })) - } - } - _ => match (&self.val, &other.val) { - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - let pow2 = val % U256::from(2) == 0.into(); - if val > &U256::from(u32::MAX) { - if pow2 { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::max(&self.val).unwrap(), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::min(&self.val).unwrap(), - loc: self.loc, - })) - } - } else { - let min = Concrete::min(&self.val).unwrap().int_val().unwrap(); - let max = Concrete::max(&self.val).unwrap().int_val().unwrap(); - - if let Some(num) = neg_v.checked_pow(val.as_u32()) { - if pow2 { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, num.min(max)), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, num.max(min)), - loc: self.loc, - })) - } - } else if pow2 { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::max(&self.val).unwrap(), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::min(&self.val).unwrap(), - loc: self.loc, - })) - } - } - } - _ => None, - }, - } - } -} - -impl RangeExp for Elem { - fn range_exp(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_exp(b), - (Elem::Concrete(a), _) if a.val.into_u256() == Some(U256::zero()) => { - Some(Elem::from(Concrete::from(U256::from(1)))) - } - (_, Elem::Concrete(b)) if b.val.into_u256() == Some(U256::zero()) => { - Some(other.clone()) - } - _ => None, - } - } -} -pub trait RangeDiv { - /// Perform division between two range elements - fn range_div(&self, other: &Rhs) -> Option>; - - fn range_wrapping_div(&self, other: &Rhs) -> Option>; -} - -impl RangeDiv for RangeConcrete { - fn range_div(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - if rhs_val == 0.into() { - None - } else { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(lhs_val / rhs_val), - loc: self.loc, - })) - } - } - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) => { - if neg_v == &I256::from(0) { - None - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - I256::from_raw(val / neg_v.into_raw()) * I256::from(-1i32), - ), - loc: self.loc, - })) - } - } - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - if val == &U256::from(0) { - None - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *neg_v / I256::from_raw(*val)), - loc: self.loc, - })) - } - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - if r == &I256::from(0) { - None - } else { - let (val, overflow) = l.overflowing_div(*r); - if overflow { - None - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, val), - loc: self.loc, - })) - } - } - } - _ => None, - }, - } - } - - fn range_wrapping_div(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - if rhs_val == 0.into() { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(U256::zero()), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(lhs_val / rhs_val), - loc: self.loc, - })) - } - } - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) => { - if neg_v == &I256::from(0) { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::from(0i32)), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - I256::from_raw(val / neg_v.into_raw()) * I256::from(-1i32), - ), - loc: self.loc, - })) - } - } - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - if val == &U256::from(0) { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::from(0i32)), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *neg_v / I256::from_raw(*val)), - loc: self.loc, - })) - } - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - if r == &I256::from(0) { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::from(0i32)), - loc: self.loc, - })) - } else { - let (val, overflow) = l.overflowing_div(*r); - if overflow { - None - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, val), - loc: self.loc, - })) - } - } - } - _ => None, - }, - } - } -} - -impl RangeDiv for Elem { - fn range_div(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_div(b), - _ => None, - } - } - - fn range_wrapping_div(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_div(b), - _ => None, - } - } -} - -pub trait RangeMod { - /// Perform modulo between two range elements - fn range_mod(&self, other: &Rhs) -> Option>; -} - -impl RangeMod for RangeConcrete { - fn range_mod(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(lhs_val % rhs_val), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, neg_v)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::from_raw(*val) % *neg_v), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *neg_v % I256::from_raw(*val)), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *l % *r), - loc: self.loc, - })) - } - _ => None, - }, - } - } -} - -impl RangeMod for Elem { - fn range_mod(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_mod(b), - _ => None, - } - } -} - -pub trait RangeMin { - /// Take the minimum of two range elements - fn range_min(&self, other: &Rhs) -> Option>; -} - -impl RangeMin for RangeConcrete { - fn range_min(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(lhs_val.min(rhs_val)), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, _), Concrete::Int(_, neg_v)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *neg_v), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *neg_v), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *l.min(r)), - loc: self.loc, - })) - } - _ => None, - }, - } - } -} - -impl RangeMin for Elem { - fn range_min(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_min(b), - _ => None, - } - } -} - -pub trait RangeMax { - /// Take the maximum of two range elements - fn range_max(&self, other: &Rhs) -> Option>; -} - -impl RangeMax for RangeConcrete { - fn range_max(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(lhs_val.max(rhs_val)), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(lhs_size, val), Concrete::Int(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*lhs_size, *val), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, _), Concrete::Uint(_, val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*lhs_size, *val), - loc: self.loc, - })) - } - (Concrete::Int(lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, *l.max(r)), - loc: self.loc, - })) - } - _ => None, - }, - } - } -} - -impl RangeMax for Elem { - fn range_max(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_max(b), - _ => None, - } - } -} - -pub trait RangeOrd { - /// Perform a logical equality test - fn range_ord_eq(&self, other: &Rhs) -> Option>; - /// Perform a logical inequality test - fn range_neq(&self, other: &Rhs) -> Option>; - /// Perform a logical greater than test - fn range_gt(&self, other: &Rhs) -> Option>; - /// Perform a logical less than test - fn range_lt(&self, other: &Rhs) -> Option>; - /// Perform a logical greater than or equal test - fn range_gte(&self, other: &Rhs) -> Option>; - /// Perform a logical less than or equal test - fn range_lte(&self, other: &Rhs) -> Option>; -} - -impl RangeOrd for RangeConcrete { - fn range_ord_eq(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(lhs_val == rhs_val), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(_, _), Concrete::Int(_, _)) - | (Concrete::Int(_, _), Concrete::Uint(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(l == r), - loc: self.loc, - })) - } - _ => None, - }, - } - } - - fn range_neq(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(lhs_val != rhs_val), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(_, _), Concrete::Int(_, _)) - | (Concrete::Int(_, _), Concrete::Uint(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(l != r), - loc: self.loc, - })) - } - _ => None, - }, - } - } - - fn range_gt(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(lhs_val > rhs_val), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(l > r), - loc: self.loc, - })) - } - _ => None, - }, - } - } - - fn range_lt(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(lhs_val < rhs_val), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(l < r), - loc: self.loc, - })) - } - _ => None, - }, - } - } - - fn range_gte(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(lhs_val >= rhs_val), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(l >= r), - loc: self.loc, - })) - } - _ => None, - }, - } - } - - fn range_lte(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(lhs_val <= rhs_val), - loc: self.loc, - })), - _ => match (&self.val, &other.val) { - (Concrete::Uint(_lhs_size, _val), Concrete::Int(_, _)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(false), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, _), Concrete::Uint(_, _val)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(true), - loc: self.loc, - })) - } - (Concrete::Int(_lhs_size, l), Concrete::Int(_rhs_size, r)) => { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(l <= r), - loc: self.loc, - })) - } - _ => None, - }, - } - } -} - -impl RangeOrd for Elem { - fn range_ord_eq(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_ord_eq(b), - _ => None, - } - } - fn range_neq(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_neq(b), - _ => None, - } - } - fn range_gt(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_gt(b), - _ => None, - } - } - - fn range_lt(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_lt(b), - _ => None, - } - } - - fn range_gte(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_gte(b), - _ => None, - } - } - - fn range_lte(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_lte(b), - _ => None, - } - } -} - -pub trait RangeShift { - /// Perform a bitwise shift left - fn range_shl(&self, other: &Rhs) -> Option>; - /// Perform a bitwise shift right - fn range_shr(&self, other: &Rhs) -> Option>; -} - -impl RangeShift for RangeConcrete { - fn range_shl(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - if rhs_val > 256.into() { - return Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(U256::zero()), - loc: self.loc, - })); - } - let max = Concrete::max(&self.val).unwrap().into_u256().unwrap(); - if self.val.int_val().is_some() { - // ints get weird treatment because they can push into the negatives - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - self.val.int_size().unwrap(), - I256::from_raw(lhs_val << rhs_val), - ), - loc: self.loc, - })) - } else if rhs_val > lhs_val.leading_zeros().into() { - Some(Elem::Concrete(RangeConcrete { - val: max.into(), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original((lhs_val << rhs_val).min(max)), - loc: self.loc, - })) - } - } - _ => match (&self.val, &other.val) { - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - if val == &U256::zero() { - return Some(Elem::Concrete(self.clone())); - } - - let max = if *lhs_size == 256 { - I256::MAX - } else { - I256::from_raw(U256::from(1u8) << U256::from(*lhs_size - 1)) - I256::from(1) - }; - - let min = max * I256::from(-1i32) - I256::from(1i32); - let (abs, is_min) = neg_v.overflowing_abs(); - if is_min { - if val > &U256::zero() { - Some(Elem::from(self.clone())) - } else { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::zero()), - loc: self.loc, - })) - } - } else if val > &U256::from(abs.leading_zeros()) { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::zero()), - loc: self.loc, - })) - } else { - let raw = I256::from_raw(abs.into_raw() << val); - let as_int = if raw == I256::MIN { - raw - } else { - I256::from(-1i32) * raw - }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, as_int.max(min)), - loc: self.loc, - })) - } - } - _ => None, - }, - } - } - - fn range_shr(&self, other: &Self) -> Option> { - match (self.val.into_u256(), other.val.into_u256()) { - (Some(lhs_val), Some(rhs_val)) => { - if rhs_val == U256::zero() { - Some(Elem::Concrete(self.clone())) - } else if rhs_val > U256::from(256) { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(U256::zero()), - loc: self.loc, - })) - } else { - Some(Elem::Concrete(RangeConcrete { - val: self.val.u256_as_original(lhs_val >> rhs_val), - loc: self.loc, - })) - } - } - _ => match (&self.val, &other.val) { - (Concrete::Int(lhs_size, neg_v), Concrete::Uint(_, val)) => { - if val == &U256::zero() { - Some(Elem::Concrete(self.clone())) - } else if val > &U256::from(*lhs_size) { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::from(-1i32)), - loc: self.loc, - })) - } else { - let max = if *lhs_size == 256 { - I256::MAX - } else { - I256::from_raw(U256::from(1u8) << U256::from(*lhs_size - 1)) - - I256::from(1) - }; - let min = max * I256::from(-1i32) - I256::from(1i32); - - let (abs, is_min) = neg_v.overflowing_abs(); - let bits = if is_min { - 255 - } else { - 255 - abs.leading_zeros() - }; - - if val >= &U256::from(bits) { - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*lhs_size, I256::from(-1i32)), - loc: self.loc, - })) - } else { - let shr_val = abs.into_raw() >> val; - let as_int = I256::from_raw(shr_val); - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int( - *lhs_size, - (I256::from(-1i32) * as_int).max(min), - ), - loc: self.loc, - })) - } - } - } - _ => None, - }, - } - } -} - -impl RangeShift for Elem { - fn range_shl(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_shl(b), - _ => None, - } - } - fn range_shr(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_shr(b), - _ => None, - } - } -} - -pub trait RangeUnary { - /// Perform a logical NOT - fn range_not(&self) -> Option>; - /// Perform a logical AND - fn range_and(&self, other: &Rhs) -> Option>; - /// Perform a logical OR - fn range_or(&self, other: &Rhs) -> Option>; -} - -impl RangeUnary for RangeConcrete { - fn range_not(&self) -> Option> { - match self.val { - Concrete::Bool(b) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(!b), - loc: self.loc, - })), - _ => None, - } - } - - fn range_and(&self, other: &Self) -> Option> { - match (&self.val, &other.val) { - (Concrete::Bool(a), Concrete::Bool(b)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(*a && *b), - loc: self.loc, - })), - _ => None, - } - } - - fn range_or(&self, other: &Self) -> Option> { - match (&self.val, &other.val) { - (Concrete::Bool(a), Concrete::Bool(b)) => Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bool(*a || *b), - loc: self.loc, - })), - _ => None, - } - } -} - -impl RangeUnary for Elem { - fn range_not(&self) -> Option> { - match self { - Elem::Concrete(a) => a.range_not(), - _ => None, - } - } - fn range_and(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_and(b), - _ => None, - } - } - fn range_or(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_or(b), - _ => None, - } - } -} - -pub trait RangeCast { - /// Perform a cast on an element to the type of the right hand side - fn range_cast(&self, other: &Rhs) -> Option>; -} - -impl RangeCast for RangeConcrete { - fn range_cast(&self, other: &Self) -> Option> { - Some(Elem::Concrete(RangeConcrete { - val: self.val.clone().cast_from(&other.val)?, - loc: self.loc, - })) - } -} - -impl RangeCast>> for RangeConcrete { - fn range_cast(&self, other: &Box>) -> Option> { - match (self.val.clone(), other.val.iter().take(1).next()) { - ( - Concrete::Bytes(size, val), - Some(( - _, - Elem::Concrete(Self { - val: Concrete::Bytes(..), - .. - }), - )), - ) - | (Concrete::Bytes(size, val), None) => { - let mut existing = other.val.clone(); - let new = val - .0 - .iter() - .enumerate() - .map(|(i, v)| { - let idx = Elem::from(Concrete::from(U256::from(i))); - let mut bytes = [0x00; 32]; - bytes[0] = *v; - let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - (idx, v) - }) - .collect::>(); - existing.extend(new); - Some(Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::from(size))), - val: existing, - loc: other.loc, - }))) - } - ( - Concrete::DynBytes(val), - Some(( - _, - Elem::Concrete(Self { - val: Concrete::Bytes(..), - .. - }), - )), - ) - | (Concrete::DynBytes(val), None) => { - let mut existing = other.val.clone(); - let new = val - .iter() - .enumerate() - .map(|(i, v)| { - let idx = Elem::from(Concrete::from(U256::from(i))); - let mut bytes = [0x00; 32]; - bytes[0] = *v; - let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - (idx, v) - }) - .collect::>(); - existing.extend(new); - Some(Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::from(val.len()))), - val: existing, - loc: other.loc, - }))) - } - ( - Concrete::String(val), - Some(( - _, - Elem::Concrete(Self { - val: Concrete::String(..), - .. - }), - )), - ) - | (Concrete::String(val), None) => { - let mut existing = other.val.clone(); - let new = val - .chars() - .enumerate() - .map(|(i, v)| { - let idx = Elem::from(Concrete::from(U256::from(i))); - let mut bytes = [0x00; 32]; - v.encode_utf8(&mut bytes[..]); - let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - (idx, v) - }) - .collect::>(); - existing.extend(new); - Some(Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::from(val.len()))), - val: existing, - loc: other.loc, - }))) - } - _e => None, - } - } -} - -impl RangeCast> for RangeDyn { - fn range_cast(&self, other: &Self) -> Option> { - let val: Option<(_, &Elem)> = self.val.iter().take(1).next(); - let o_val: Option<(_, &Elem)> = other.val.iter().take(1).next(); - match (val, o_val) { - ( - Some(( - _, - &Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(..), - .. - }), - )), - Some(( - _, - &Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(..), - .. - }), - )), - ) - | ( - Some(( - _, - &Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(..), - .. - }), - )), - None, - ) => Some(Elem::ConcreteDyn(Box::new(self.clone()))), - ( - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::Uint(..), - .. - }), - )), - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::Uint(..), - .. - }), - )), - ) - | ( - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::Uint(..), - .. - }), - )), - None, - ) => Some(Elem::ConcreteDyn(Box::new(self.clone()))), - ( - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::Int(..), - .. - }), - )), - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::Int(..), - .. - }), - )), - ) - | ( - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::Int(..), - .. - }), - )), - None, - ) => Some(Elem::ConcreteDyn(Box::new(self.clone()))), - (Some((_, l @ Elem::Dynamic(_))), None) => Some(l.clone()), - (None, Some((_, r @ Elem::Dynamic(_)))) => Some(r.clone()), - (None, None) => Some(Elem::ConcreteDyn(Box::new(self.clone()))), - _e => None, - } - } -} - -impl RangeCast> for RangeDyn { - fn range_cast(&self, other: &RangeConcrete) -> Option> { - let (_k, val): (_, &Elem) = self.val.iter().take(1).next()?; - let o_val = &other.val; - // println!("HERE {:?} {:?} {:?}", k, val, o_val); - match (val, o_val) { - ( - &Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(1, ..), - .. - }), - Concrete::Bytes(size, _), - ) => { - let mut h = H256::default(); - for (i, (_, val)) in self.val.iter().take(*size as usize).enumerate() { - match val { - Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(1, v), - .. - }) => { - // consume as many as we can - h.0[i] = v.0[0]; - } - _ => break, - } - } - Some(Elem::Concrete(Concrete::Bytes(*size, h).into())) - } - _e => None, - } - } -} - -impl RangeCast for Elem { - fn range_cast(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_cast(b), - (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => { - // super dumb type stuff here that makes it so we have to specify - as RangeCast>::range_cast(a, b) - } - (Elem::ConcreteDyn(a), Elem::Concrete(b)) => a.range_cast(b), - (Elem::Concrete(a), Elem::ConcreteDyn(b)) => a.range_cast(b), - _e => None, - } - } -} - -pub trait RangeConcat { - /// Perform a cast on an element to the type of the right hand side - fn range_concat(&self, other: &Rhs) -> Option>; -} - -impl RangeConcat for RangeConcrete { - fn range_concat(&self, other: &Self) -> Option> { - Some(Elem::Concrete(RangeConcrete { - val: self.val.clone().concat(&other.val)?, - loc: self.loc, - })) - } -} - -impl RangeConcat> for RangeDyn { - fn range_concat(&self, other: &RangeConcrete) -> Option> { - match (other.val.clone(), self.val.iter().take(1).next()) { - ( - Concrete::DynBytes(val), - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(..), - .. - }), - )), - ) - | (Concrete::DynBytes(val), None) => { - let last = self.len.clone(); - let mut existing = self.val.clone(); - let new = val - .iter() - .enumerate() - .map(|(i, v)| { - let idx = Elem::from(Concrete::from(U256::from(i))); - let idx = last.clone() + idx; - let mut bytes = [0x00; 32]; - bytes[0] = *v; - let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - (idx, v) - }) - .collect::>(); - existing.extend(new); - Some(Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::from(val.len()))), - val: existing, - loc: other.loc, - }))) - } - ( - Concrete::String(val), - Some(( - _, - Elem::Concrete(RangeConcrete { - val: Concrete::String(..), - .. - }), - )), - ) - | (Concrete::String(val), None) => { - let last = self.len.clone(); - let mut existing = self.val.clone(); - let new = val - .chars() - .enumerate() - .map(|(i, v)| { - let idx = Elem::from(Concrete::from(U256::from(i))); - let idx = last.clone() + idx; - let mut bytes = [0x00; 32]; - v.encode_utf8(&mut bytes[..]); - let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - (idx, v) - }) - .collect::>(); - existing.extend(new); - Some(Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: Elem::from(Concrete::from(U256::from(val.len()))), - val: existing, - loc: other.loc, - }))) - } - _e => None, - } - } -} - -impl RangeConcat> for RangeDyn { - fn range_concat(&self, other: &Self) -> Option> { - let val: Option<(_, &Elem)> = self.val.iter().take(1).next(); - let o_val: Option<(_, &Elem)> = other.val.iter().take(1).next(); - match (val, o_val) { - ( - Some(( - _, - &Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(..), - .. - }), - )), - Some(( - _, - &Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(..), - .. - }), - )), - ) => { - let last = self.len.clone(); - let mut existing = self.val.clone(); - let other_vals = other - .val - .clone() - .into_iter() - .map(|(i, v)| (i + last.clone(), v)) - .collect::>(); - - existing.extend(other_vals); - Some(Elem::ConcreteDyn(Box::new(RangeDyn { - minimized: None, - maximized: None, - len: self.len.clone() + other.len.clone(), - val: existing, - loc: other.loc, - }))) - } - (Some((_, l @ Elem::Dynamic(_))), None) => Some(l.clone()), - (None, Some((_, r @ Elem::Dynamic(_)))) => Some(r.clone()), - (None, None) => Some(Elem::ConcreteDyn(Box::new(self.clone()))), - _e => None, - } - } -} - -impl RangeConcat for Elem { - fn range_concat(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_concat(b), - (Elem::ConcreteDyn(a), Elem::ConcreteDyn(b)) => a.range_concat(&**b), - (Elem::Concrete(c), Elem::ConcreteDyn(d)) - | (Elem::ConcreteDyn(d), Elem::Concrete(c)) => d.range_concat(c), - _e => None, - } - } -} - -pub trait RangeBitwise { - /// Perform a bitwise AND - fn range_bit_and(&self, other: &Rhs) -> Option>; - /// Perform a bitwise OR - fn range_bit_or(&self, other: &Rhs) -> Option>; - /// Perform a bitwise XOR - fn range_bit_xor(&self, other: &Rhs) -> Option>; - /// Perform a bitwise NOT - fn range_bit_not(&self) -> Option>; -} - -impl RangeBitwise for RangeConcrete { - fn range_bit_and(&self, other: &Self) -> Option> { - match (&self.val, &other.val) { - (Concrete::Uint(s, a), Concrete::Uint(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, *a & *b), - loc: self.loc, - })) - } - (Concrete::Int(s, a), Concrete::Int(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, *a & *b), - loc: self.loc, - })) - } - (Concrete::Uint(s, a), Concrete::Int(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, *a & b.into_raw()), - loc: self.loc, - })) - } - (Concrete::Int(s, a), Concrete::Uint(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, a.into_raw() & *b), - loc: self.loc, - })) - } - (Concrete::Bytes(s, a), Concrete::Bytes(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(*size, a & b), - loc: self.loc, - })) - } - _ => None, - } - } - - fn range_bit_or(&self, other: &Self) -> Option> { - match (&self.val, &other.val) { - (Concrete::Uint(s, a), Concrete::Uint(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, *a | *b), - loc: self.loc, - })) - } - (Concrete::Int(s, a), Concrete::Int(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, *a | *b), - loc: self.loc, - })) - } - (Concrete::Bytes(s, a), Concrete::Bytes(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(*size, a | b), - loc: self.loc, - })) - } - _ => None, - } - } - - fn range_bit_xor(&self, other: &Self) -> Option> { - match (&self.val, &other.val) { - (Concrete::Uint(s, a), Concrete::Uint(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, *a ^ *b), - loc: self.loc, - })) - } - (Concrete::Int(s, a), Concrete::Int(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, *a ^ *b), - loc: self.loc, - })) - } - (Concrete::Bytes(s, a), Concrete::Bytes(s2, b)) => { - let size = if s > s2 { s } else { s2 }; - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(*size, a ^ b), - loc: self.loc, - })) - } - _ => None, - } - } - - fn range_bit_not(&self) -> Option> { - match &self.val { - Concrete::Uint(size, a) => { - let max = Concrete::max(&self.val).unwrap().uint_val().unwrap(); - let val = U256( - a.0.into_iter() - .map(|i| !i) - .collect::>() - .try_into() - .unwrap(), - ); - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Uint(*size, val & max), - loc: self.loc, - })) - } - Concrete::Int(size, a) => { - let (val, _) = a.overflowing_neg(); - let (val, _) = val.overflowing_sub(1.into()); - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Int(*size, val), - loc: self.loc, - })) - } - Concrete::Bytes(s, a) => { - let mut h = H256::default(); - (0..*s).for_each(|i| { - h.0[i as usize] = !a.0[i as usize]; - }); - Some(Elem::Concrete(RangeConcrete { - val: Concrete::Bytes(*s, h), - loc: self.loc, - })) - } - _ => None, - } - } -} - -impl RangeBitwise for Elem { - fn range_bit_and(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_bit_and(b), - _ => None, - } - } - fn range_bit_or(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_bit_or(b), - _ => None, - } - } - fn range_bit_xor(&self, other: &Self) -> Option> { - match (self, other) { - (Elem::Concrete(a), Elem::Concrete(b)) => a.range_bit_xor(b), - _ => None, - } - } - - fn range_bit_not(&self) -> Option> { - match self { - Elem::Concrete(a) => a.range_bit_not(), - _ => None, - } - } -} diff --git a/src/context/exprs/array.rs b/src/context/exprs/array.rs deleted file mode 100644 index 2acc730b..00000000 --- a/src/context/exprs/array.rs +++ /dev/null @@ -1,183 +0,0 @@ -use crate::context::ExprErr; -use crate::context::IntoExprErr; -use crate::{ - context::exprs::{member_access::MemberAccess, require::Require}, - Builtin, ContextBuilder, Edge, Node, VarType, -}; -use shared::{analyzer::AnalyzerLike, context::*, range::elem::RangeOp}; -use solang_parser::helpers::CodeLocation; -use solang_parser::pt::{Expression, Loc}; - -impl Array for T where T: AnalyzerLike + Sized {} -pub trait Array: AnalyzerLike + Sized { - /// Gets the array type - #[tracing::instrument(level = "trace", skip_all)] - fn array_ty(&mut self, ty_expr: &Expression, ctx: ContextNode) -> Result<(), ExprErr> { - self.parse_ctx_expr(ty_expr, ctx)?; - self.apply_to_edges(ctx, ty_expr.loc(), &|analyzer, ctx, loc| { - if let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? { - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_ty(ctx, ty_expr, ret) - } else { - Err(ExprErr::NoLhs( - loc, - "No array specified for getting array type".to_string(), - )) - } - }) - } - - fn match_ty( - &mut self, - ctx: ContextNode, - ty_expr: &Expression, - ret: ExprRet, - ) -> Result<(), ExprErr> { - match ret { - ExprRet::Single(inner_ty) | ExprRet::SingleLiteral(inner_ty) => { - if let Some(var_type) = VarType::try_from_idx(self, inner_ty) { - let dyn_b = Builtin::Array(var_type); - if let Some(idx) = self.builtins().get(&dyn_b) { - ctx.push_expr(ExprRet::Single(*idx), self) - .into_expr_err(ty_expr.loc())?; - } else { - let idx = self.add_node(Node::Builtin(dyn_b.clone())); - self.builtins_mut().insert(dyn_b, idx); - ctx.push_expr(ExprRet::Single(idx), self) - .into_expr_err(ty_expr.loc())?; - } - Ok(()) - } else { - Err(ExprErr::ArrayTy(ty_expr.loc(), "Expected to be able to convert to a var type from an index to determine array type. This is a bug. Please report it at github.com/nascentxyz/pyrometer.".to_string())) - } - } - ExprRet::Multi(inner) => { - inner - .into_iter() - .map(|i| self.match_ty(ctx, ty_expr, i)) - .collect::, ExprErr>>()?; - Ok(()) - } - ExprRet::CtxKilled(kind) => { - ctx.kill(self, ty_expr.loc(), kind) - .into_expr_err(ty_expr.loc())?; - Ok(()) - } - ExprRet::Null => Ok(()), - } - } - - /// Indexes into an array - #[tracing::instrument(level = "trace", skip_all)] - fn index_into_array( - &mut self, - loc: Loc, - ty_expr: &Expression, - index_expr: &Expression, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - tracing::trace!("Indexing into array"); - self.parse_ctx_expr(index_expr, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(index_tys) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Could not find the index variable".to_string())) - }; - if matches!(index_tys, ExprRet::CtxKilled(_)) { - ctx.push_expr(index_tys, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.parse_ctx_expr(ty_expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(inner_tys) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Could not find the array".to_string())) - }; - if matches!(inner_tys, ExprRet::CtxKilled(_)) { - ctx.push_expr(inner_tys, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.index_into_array_inner( - ctx, - loc, - inner_tys.flatten(), - index_tys.clone().flatten(), - ) - }) - }) - } - - #[tracing::instrument(level = "trace", skip_all)] - fn index_into_array_inner( - &mut self, - ctx: ContextNode, - loc: Loc, - inner_paths: ExprRet, - index_paths: ExprRet, - ) -> Result<(), ExprErr> { - match (inner_paths, index_paths) { - (_, ExprRet::Null) | (ExprRet::Null, _) => Ok(()), - (_, ExprRet::CtxKilled(kind)) => { - ctx.kill(self, loc, kind).into_expr_err(loc) - } - (ExprRet::CtxKilled(kind), _) => { - ctx.kill(self, loc, kind).into_expr_err(loc) - } - (ExprRet::Single(parent), ExprRet::Single(index)) | (ExprRet::Single(parent), ExprRet::SingleLiteral(index)) => { - let index = ContextVarNode::from(index).latest_version(self); - let parent = ContextVarNode::from(parent).latest_version(self); - let idx = self.advance_var_in_ctx(index, loc, ctx)?; - if !parent.is_mapping(self).into_expr_err(loc)? && parent.is_indexable(self).into_expr_err(loc)? { - let len_var = self.tmp_length(parent, ctx, loc).latest_version(self); - self.handle_require_inner( - ctx, - loc, - &ExprRet::Single(len_var.latest_version(self).into()), - &ExprRet::Single(idx.latest_version(self).into()), - RangeOp::Gt, - RangeOp::Lt, - (RangeOp::Lte, RangeOp::Gte), - )?; - } - - let name = format!("{}[{}]", parent.name(self).into_expr_err(loc)?, index.name(self).into_expr_err(loc)?); - - if let Some(index_var) = ctx.var_by_name_or_recurse(self, &name).into_expr_err(loc)? { - let index_var = index_var.latest_version(self); - let index_var = self.advance_var_in_ctx(index_var, loc, ctx)?; - ctx.push_expr(ExprRet::Single(index_var.into()), self).into_expr_err(loc)?; - Ok(()) - } else { - let ty = parent.ty(self).into_expr_err(loc)?.clone(); - let ty = ty.get_index_dynamic_ty(index, self).into_expr_err(loc)?; - let index_var = ContextVar { - loc: Some(loc), - name: name.clone(), - display_name: format!( - "{}[{}]", - parent.display_name(self).into_expr_err(loc)?, - index.display_name(self).into_expr_err(loc)? - ), - storage: parent.storage(self).into_expr_err(loc)?.clone(), - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty, - }; - - let idx_node = self.add_node(Node::ContextVar(index_var)); - self.add_edge(idx_node, parent, Edge::Context(ContextEdge::IndexAccess)); - self.add_edge(idx_node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.add_var(idx_node.into(), self).into_expr_err(loc)?; - self.add_edge(index, idx_node, Edge::Context(ContextEdge::Index)); - - ctx.push_expr(ExprRet::Single(idx_node), self).into_expr_err(loc)?; - Ok(()) - } - } - e => Err(ExprErr::ArrayIndex(loc, format!("Expected single expr evaluation of index expression, but was: {e:?}. This is a bug. Please report it at github.com/nascentxyz/pyrometer."))), - } - } -} diff --git a/src/context/exprs/bin_op.rs b/src/context/exprs/bin_op.rs deleted file mode 100644 index 22804398..00000000 --- a/src/context/exprs/bin_op.rs +++ /dev/null @@ -1,797 +0,0 @@ -use crate::context::exprs::require::Require; -use crate::context::exprs::IntoExprErr; -use crate::context::{ContextBuilder, ExprErr}; -use ethers_core::types::{I256, U256}; - -use shared::range::elem::RangeElem; -use shared::range::elem_ty::RangeExpr; -use shared::{ - analyzer::AnalyzerLike, - context::*, - nodes::{BuiltInNode, Builtin, Concrete, VarType}, - range::{ - elem::RangeOp, - elem_ty::{Dynamic, Elem}, - Range, RangeEval, SolcRange, - }, - Edge, Node, -}; - -use solang_parser::pt::{Expression, Loc}; - -impl BinOp for T where T: AnalyzerLike + Sized {} -pub trait BinOp: AnalyzerLike + Sized { - /// Evaluate and execute a binary operation expression - #[tracing::instrument(level = "trace", skip_all)] - fn op_expr( - &mut self, - loc: Loc, - lhs_expr: &Expression, - rhs_expr: &Expression, - ctx: ContextNode, - op: RangeOp, - assign: bool, - ) -> Result<(), ExprErr> { - self.parse_ctx_expr(rhs_expr, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Binary operation had no right hand side".to_string())) - }; - if matches!(rhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let rhs_paths = rhs_paths.flatten(); - let rhs_ctx = ctx; - analyzer.parse_ctx_expr(lhs_expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, format!("Binary operation had no left hand side, Expr: {lhs_expr:#?}, rhs ctx: {}, curr ctx: {}", rhs_ctx.path(analyzer), ctx.path(analyzer)))) - }; - if matches!(lhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let lhs_paths = lhs_paths.flatten(); - analyzer.op_match(ctx, loc, &lhs_paths, &rhs_paths, op, assign) - }) - }) - } - - fn op_match( - &mut self, - ctx: ContextNode, - loc: Loc, - lhs_paths: &ExprRet, - rhs_paths: &ExprRet, - op: RangeOp, - assign: bool, - ) -> Result<(), ExprErr> { - match (lhs_paths, rhs_paths) { - (ExprRet::Null, _) => Err(ExprErr::NoLhs( - loc, - "No left hand side provided for binary operation".to_string(), - )), - (_, ExprRet::Null) => Err(ExprErr::NoRhs( - loc, - "No right hand side provided for binary operation".to_string(), - )), - (ExprRet::SingleLiteral(lhs), ExprRet::SingleLiteral(rhs)) => { - let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); - let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - lhs_cvar.try_increase_size(self).into_expr_err(loc)?; - rhs_cvar.try_increase_size(self).into_expr_err(loc)?; - ctx.push_expr(self.op(loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, self) - .into_expr_err(loc)?; - Ok(()) - } - (ExprRet::SingleLiteral(lhs), ExprRet::Single(rhs)) => { - ContextVarNode::from(*lhs) - .cast_from(&ContextVarNode::from(*rhs), self) - .into_expr_err(loc)?; - let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); - let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - ctx.push_expr(self.op(loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, self) - .into_expr_err(loc)?; - Ok(()) - } - (ExprRet::Single(lhs), ExprRet::SingleLiteral(rhs)) => { - ContextVarNode::from(*rhs) - .cast_from(&ContextVarNode::from(*lhs), self) - .into_expr_err(loc)?; - let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); - let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - ctx.push_expr(self.op(loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, self) - .into_expr_err(loc)?; - Ok(()) - } - (ExprRet::Single(lhs), ExprRet::Single(rhs)) => { - let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); - let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - ctx.push_expr(self.op(loc, lhs_cvar, rhs_cvar, ctx, op, assign)?, self) - .into_expr_err(loc)?; - Ok(()) - } - (lhs @ ExprRet::Single(..), ExprRet::Multi(rhs_sides)) => { - rhs_sides - .iter() - .map(|expr_ret| self.op_match(ctx, loc, lhs, expr_ret, op, assign)) - .collect::, ExprErr>>()?; - Ok(()) - } - (ExprRet::Multi(lhs_sides), rhs @ ExprRet::Single(..)) => { - lhs_sides - .iter() - .map(|expr_ret| self.op_match(ctx, loc, expr_ret, rhs, op, assign)) - .collect::, ExprErr>>()?; - Ok(()) - } - (_, ExprRet::CtxKilled(kind)) => ctx.kill(self, loc, *kind).into_expr_err(loc), - (ExprRet::CtxKilled(kind), _) => ctx.kill(self, loc, *kind).into_expr_err(loc), - (ExprRet::Multi(lhs_sides), ExprRet::Multi(rhs_sides)) => Err(ExprErr::UnhandledCombo( - loc, - format!("Unhandled combination in binop: {lhs_sides:?} {rhs_sides:?}"), - )), - (l, r) => Err(ExprErr::UnhandledCombo( - loc, - format!("Unhandled combination in binop: {l:?} {r:?}"), - )), - } - } - - /// Execute a binary operation after parsing the expressions - #[tracing::instrument(level = "trace", skip_all)] - fn op( - &mut self, - loc: Loc, - lhs_cvar: ContextVarNode, - rhs_cvar: ContextVarNode, - ctx: ContextNode, - op: RangeOp, - assign: bool, - ) -> Result { - tracing::trace!( - "binary op: {} {} {}, assign: {}", - lhs_cvar.display_name(self).into_expr_err(loc)?, - op.to_string(), - rhs_cvar.display_name(self).into_expr_err(loc)?, - assign - ); - - let unchecked = match op { - RangeOp::Add(u) | RangeOp::Sub(u) | RangeOp::Mul(u) | RangeOp::Div(u) => u, - _ => false, - }; - - let new_lhs = if assign { - self.advance_var_in_ctx(lhs_cvar, loc, ctx)? - } else { - let mut new_lhs_underlying = ContextVar { - loc: Some(loc), - name: format!( - "tmp{}({} {} {})", - ctx.new_tmp(self).into_expr_err(loc)?, - lhs_cvar.name(self).into_expr_err(loc)?, - op.to_string(), - rhs_cvar.name(self).into_expr_err(loc)? - ), - display_name: format!( - "({} {} {})", - lhs_cvar.display_name(self).into_expr_err(loc)?, - op.to_string(), - rhs_cvar.display_name(self).into_expr_err(loc)? - ), - storage: None, - is_tmp: true, - is_symbolic: lhs_cvar.is_symbolic(self).into_expr_err(loc)? - || rhs_cvar.is_symbolic(self).into_expr_err(loc)?, - is_return: false, - tmp_of: Some(TmpConstruction::new(lhs_cvar, op, Some(rhs_cvar))), - ty: lhs_cvar.underlying(self).into_expr_err(loc)?.ty.clone(), - }; - - // will potentially mutate the ty from concrete to builtin with a concrete range - new_lhs_underlying - .ty - .concrete_to_builtin(self) - .into_expr_err(loc)?; - - let new_var = self.add_node(Node::ContextVar(new_lhs_underlying)); - ctx.add_var(new_var.into(), self).into_expr_err(loc)?; - self.add_edge(new_var, ctx, Edge::Context(ContextEdge::Variable)); - ContextVarNode::from(new_var) - }; - - let mut new_rhs = rhs_cvar.latest_version(self); - - let expr = Elem::Expr(RangeExpr::::new( - Elem::from(Dynamic::new(lhs_cvar.latest_version(self).into())), - op, - Elem::from(Dynamic::new(rhs_cvar.latest_version(self).into())), - )); - - // TODO: change to only hit this path if !uncheck - - // TODO: If one of lhs_cvar OR rhs_cvar are not symbolic, - // apply the requirement on the symbolic expression side instead of - // ignoring the case where - - // if lhs_cvar.is_symbolic(self) && new_rhs.is_symbolic(self) { - if !unchecked { - match op { - RangeOp::Div(..) | RangeOp::Mod => { - if new_rhs.is_const(self).into_expr_err(loc)? { - if new_rhs - .evaled_range_min(self) - .into_expr_err(loc)? - .expect("No range?") - .range_eq(&Elem::from(Concrete::from(U256::zero()))) - { - let res = ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc); - let _ = self.add_if_err(res); - - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - } else if new_rhs.is_symbolic(self).into_expr_err(loc)? { - let tmp_rhs = self.advance_var_in_ctx(new_rhs, loc, ctx)?; - let zero_node = self.add_node(Node::Concrete(Concrete::from(U256::zero()))); - let var = ContextVar::new_from_concrete( - Loc::Implicit, - ctx, - zero_node.into(), - self, - ); - let zero_node = self.add_node(Node::ContextVar(var.into_expr_err(loc)?)); - - if self - .require( - tmp_rhs, - zero_node.into(), - ctx, - loc, - RangeOp::Neq, - RangeOp::Eq, - (RangeOp::Eq, RangeOp::Neq), - )? - .is_none() - { - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - - let tmp_var = ContextVar { - loc: Some(loc), - name: format!( - "tmp{}({} != 0)", - ctx.new_tmp(self).into_expr_err(loc)?, - tmp_rhs.name(self).into_expr_err(loc)?, - ), - display_name: format!( - "({} != 0)", - tmp_rhs.display_name(self).into_expr_err(loc)?, - ), - storage: None, - is_tmp: true, - tmp_of: Some(TmpConstruction::new( - new_lhs, - RangeOp::Gt, - Some(zero_node.into()), - )), - is_symbolic: true, - is_return: false, - ty: VarType::BuiltIn( - BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), - SolcRange::from(Concrete::Bool(true)), - ), - }; - - let cvar = ContextVarNode::from(self.add_node(Node::ContextVar(tmp_var))); - ctx.add_ctx_dep(cvar, self).into_expr_err(loc)?; - - let range = tmp_rhs - .ref_range(self) - .into_expr_err(loc)? - .expect("No range?"); - if range.min_is_negative(self).into_expr_err(loc)? { - let mut range_excls = range.range_exclusions(); - let excl = Elem::from(Concrete::from(I256::zero())); - if !range_excls.contains(&excl) { - range_excls.push(excl); - } - tmp_rhs - .set_range_exclusions(self, range_excls) - .into_expr_err(loc)?; - } else { - // the new min is max(1, rhs.min) - let min = Elem::max( - Elem::from(Dynamic::new(new_rhs.into())), - // tmp_rhs - // .range_min(self) - // .into_expr_err(loc)? - // .unwrap_or_else(|| { - // panic!("No range minimum: {:?}", tmp_rhs.underlying(self)) - // }), - Elem::from(Concrete::from(U256::from(1))).cast( - Elem::from(Dynamic::new(tmp_rhs.into())), // .range_min(self) - // .into_expr_err(loc)? - // .expect("No range minimum?"), - ), - ); - - tmp_rhs.set_range_min(self, min).into_expr_err(loc)?; - new_rhs = tmp_rhs; - } - } - } - RangeOp::Sub(..) => { - let lhs_cvar = lhs_cvar.latest_version(self); - if lhs_cvar.is_const(self).into_expr_err(loc)? { - if !lhs_cvar.is_int(self).into_expr_err(loc)? { - if let (Some(lmax), Some(rmin)) = ( - lhs_cvar.evaled_range_max(self).into_expr_err(loc)?, - rhs_cvar.evaled_range_min(self).into_expr_err(loc)?, - ) { - if matches!( - lmax.range_ord(&rmin), - Some(std::cmp::Ordering::Less) - | Some(std::cmp::Ordering::Equal) - ) { - ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; - - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - } - } - } else if lhs_cvar.is_symbolic(self).into_expr_err(loc)? { - let tmp_lhs = self.advance_var_in_ctx(lhs_cvar, loc, ctx)?; - if self - .require( - tmp_lhs, - new_rhs, - ctx, - loc, - RangeOp::Gte, - RangeOp::Lte, - (RangeOp::Lte, RangeOp::Gte), - )? - .is_none() - { - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - // the new min is max(lhs.min, rhs.min) - let min = Elem::max( - Elem::from(Dynamic::new(lhs_cvar.into())), - // .range_min(self) - // .into_expr_err(loc)? - // .unwrap_or_else(|| { - // panic!( - // "No range minimum: {:?}", - // tmp_lhs.ty(self).unwrap().as_dot_str(self) - // ) - // }), - Elem::from(rhs_cvar), - ); - tmp_lhs.set_range_min(self, min).into_expr_err(loc)?; - - let tmp_var = ContextVar { - loc: Some(loc), - name: format!( - "tmp{}({} >= {})", - ctx.new_tmp(self).into_expr_err(loc)?, - tmp_lhs.name(self).into_expr_err(loc)?, - new_rhs.name(self).into_expr_err(loc)?, - ), - display_name: format!( - "({} >= {})", - tmp_lhs.display_name(self).unwrap(), - new_rhs.display_name(self).unwrap(), - ), - storage: None, - is_tmp: true, - tmp_of: Some(TmpConstruction::new( - tmp_lhs, - RangeOp::Gte, - Some(new_rhs), - )), - is_symbolic: true, - is_return: false, - ty: VarType::BuiltIn( - BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), - SolcRange::from(Concrete::Bool(true)), - ), - }; - - let cvar = ContextVarNode::from(self.add_node(Node::ContextVar(tmp_var))); - ctx.add_ctx_dep(cvar, self).into_expr_err(loc)?; - } - } - RangeOp::Add(..) => { - let lhs_cvar = lhs_cvar.latest_version(self); - if lhs_cvar.is_symbolic(self).into_expr_err(loc)? { - let tmp_lhs = self.advance_var_in_ctx(lhs_cvar, loc, ctx)?; - - // the new max is min(lhs.max, (2**256 - rhs.min)) - let max = Elem::min( - Elem::from(Dynamic::new(lhs_cvar.into())), - // .range_max(self) - // .into_expr_err(loc)? - // .expect("No range max?"), - Elem::from(Concrete::from(U256::MAX)) - Elem::from(rhs_cvar), - ); - - tmp_lhs.set_range_max(self, max).into_expr_err(loc)?; - - let max_node = self.add_node(Node::Concrete(Concrete::from(U256::MAX))); - let tmp_max = ContextVar::new_from_concrete( - Loc::Implicit, - ctx, - max_node.into(), - self, - ); - let max_node = self.add_node(Node::ContextVar(tmp_max.into_expr_err(loc)?)); - - let tmp_rhs = self.op( - loc, - max_node.into(), - new_rhs, - ctx, - RangeOp::Sub(false), - false, - )?; - - if matches!(tmp_rhs, ExprRet::CtxKilled(_)) { - return Ok(tmp_rhs); - } - - let tmp_rhs = tmp_rhs.expect_single().into_expr_err(loc)?; - - if self - .require( - tmp_lhs, - tmp_rhs.into(), - ctx, - loc, - RangeOp::Lte, - RangeOp::Gte, - (RangeOp::Gte, RangeOp::Lte), - )? - .is_none() - { - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - - let tmp_var = ContextVar { - loc: Some(loc), - name: format!( - "tmp{}({} <= 2**256 - 1 - {})", - ctx.new_tmp(self).into_expr_err(loc)?, - tmp_lhs.name(self).into_expr_err(loc)?, - new_rhs.name(self).into_expr_err(loc)?, - ), - display_name: format!( - "({} <= 2**256 - 1 - {})", - tmp_lhs.display_name(self).unwrap(), - new_rhs.display_name(self).unwrap(), - ), - storage: None, - is_tmp: true, - tmp_of: Some(TmpConstruction::new( - tmp_lhs, - RangeOp::Lte, - Some(tmp_rhs.into()), - )), - is_symbolic: true, - is_return: false, - ty: VarType::BuiltIn( - BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), - SolcRange::from(Concrete::Bool(true)), - ), - }; - - let cvar = ContextVarNode::from(self.add_node(Node::ContextVar(tmp_var))); - ctx.add_ctx_dep(cvar, self).into_expr_err(loc)?; - } - } - RangeOp::Mul(..) => { - let lhs_cvar = lhs_cvar.latest_version(self); - if lhs_cvar.is_symbolic(self).into_expr_err(loc)? { - let tmp_lhs = self.advance_var_in_ctx(lhs_cvar, loc, ctx)?; - - // the new max is min(lhs.max, (2**256 / max(1, rhs.min))) - let max = Elem::min( - Elem::from(Dynamic::new(lhs_cvar.into())), - // .range_max(self) - // .into_expr_err(loc)? - // .expect("No range max?"), - Elem::from(Concrete::from(U256::MAX)) - / Elem::max( - Elem::from(Concrete::from(U256::from(1))), - Elem::from(rhs_cvar), - ), - ); - - tmp_lhs.set_range_max(self, max).into_expr_err(loc)?; - - let max_node = self.add_node(Node::Concrete(Concrete::from(U256::MAX))); - let tmp_max = ContextVar::new_from_concrete( - Loc::Implicit, - ctx, - max_node.into(), - self, - ); - let max_node = self.add_node(Node::ContextVar(tmp_max.into_expr_err(loc)?)); - - let tmp_rhs = - self.op(loc, max_node.into(), new_rhs, ctx, RangeOp::Div(true), true)?; - - if matches!(tmp_rhs, ExprRet::CtxKilled(_)) { - return Ok(tmp_rhs); - } - - let tmp_rhs = tmp_rhs.expect_single().into_expr_err(loc)?; - - if self - .require( - tmp_lhs, - tmp_rhs.into(), - ctx, - loc, - RangeOp::Lte, - RangeOp::Gte, - (RangeOp::Gte, RangeOp::Lte), - )? - .is_none() - { - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - - let tmp_var = ContextVar { - loc: Some(loc), - name: format!( - "tmp{}({} <= (2**256 - 1) / {})", - ctx.new_tmp(self).into_expr_err(loc)?, - tmp_lhs.name(self).into_expr_err(loc)?, - new_rhs.name(self).into_expr_err(loc)?, - ), - display_name: format!( - "({} <= (2**256 - 1) / {})", - tmp_lhs.display_name(self).unwrap(), - new_rhs.display_name(self).unwrap(), - ), - storage: None, - is_tmp: true, - tmp_of: Some(TmpConstruction::new( - tmp_lhs, - RangeOp::Lte, - Some(tmp_rhs.into()), - )), - is_symbolic: true, - is_return: false, - ty: VarType::BuiltIn( - BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), - SolcRange::from(Concrete::Bool(true)), - ), - }; - - let cvar = ContextVarNode::from(self.add_node(Node::ContextVar(tmp_var))); - ctx.add_ctx_dep(cvar, self).into_expr_err(loc)?; - } - } - RangeOp::Exp => { - if new_rhs.is_const(self).into_expr_err(loc)? { - if matches!( - new_rhs - .evaled_range_min(self) - .into_expr_err(loc)? - .expect("No range") - .range_ord(&Elem::from(Concrete::from(U256::zero()))), - Some(std::cmp::Ordering::Less) - ) { - ctx.kill(self, loc, KilledKind::Revert).into_expr_err(loc)?; - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - } else if new_rhs.is_symbolic(self).into_expr_err(loc)? { - let tmp_rhs = self.advance_var_in_ctx(rhs_cvar, loc, ctx)?; - // the new min is max(lhs.min, rhs.min) - let min = Elem::max( - Elem::from(Dynamic::new(rhs_cvar.into())), - // .range_min(self) - // .into_expr_err(loc)? - // .expect("No range minimum?"), - Elem::from(Concrete::from(U256::zero())), - ); - - tmp_rhs.set_range_min(self, min).into_expr_err(loc)?; - - let zero_node = self.add_node(Node::Concrete(Concrete::from(U256::zero()))); - let tmp_zero = ContextVar::new_from_concrete( - Loc::Implicit, - ctx, - zero_node.into(), - self, - ); - let zero_node = - self.add_node(Node::ContextVar(tmp_zero.into_expr_err(loc)?)); - - if self - .require( - tmp_rhs, - zero_node.into(), - ctx, - loc, - RangeOp::Gte, - RangeOp::Lte, - (RangeOp::Lte, RangeOp::Gte), - )? - .is_none() - { - return Ok(ExprRet::CtxKilled(KilledKind::Revert)); - } - - let tmp_var = ContextVar { - loc: Some(loc), - name: format!( - "tmp{}({} >= 0)", - ctx.new_tmp(self).into_expr_err(loc)?, - tmp_rhs.name(self).into_expr_err(loc)?, - ), - display_name: format!( - "({} >= 0)", - tmp_rhs.display_name(self).into_expr_err(loc)?, - ), - storage: None, - is_tmp: true, - tmp_of: Some(TmpConstruction::new( - tmp_rhs, - RangeOp::Gte, - Some(zero_node.into()), - )), - is_symbolic: true, - is_return: false, - ty: VarType::BuiltIn( - BuiltInNode::from(self.builtin_or_add(Builtin::Bool)), - SolcRange::from(Concrete::Bool(true)), - ), - }; - - let cvar = ContextVarNode::from(self.add_node(Node::ContextVar(tmp_var))); - ctx.add_ctx_dep(cvar, self).into_expr_err(loc)?; - new_rhs = tmp_rhs; - } - } - _ => {} - } - } - - // let lhs_range = if let Some(lhs_range) = new_lhs.range(self).into_expr_err(loc)? { - // lhs_range - // } else { - // new_rhs - // .range(self) - // .into_expr_err(loc)? - // .expect("Neither lhs nor rhs had a usable range") - // }; - - // let func = SolcRange::dyn_fn_from_op(op); - // let new_range = func(lhs_range, new_rhs); - new_lhs - .set_range_min(self, expr.clone()) - .into_expr_err(loc)?; - new_lhs.set_range_max(self, expr).into_expr_err(loc)?; - - // last ditch effort to prevent exponentiation from having a minimum of 1 instead of 0. - // if the lhs is 0 check if the rhs is also 0, otherwise set minimum to 0. - if matches!(op, RangeOp::Exp) { - if let (Some(old_lhs_range), Some(rhs_range)) = ( - lhs_cvar - .latest_version(self) - .ref_range(self) - .into_expr_err(loc)?, - new_rhs.ref_range(self).into_expr_err(loc)?, - ) { - let zero = Elem::from(Concrete::from(U256::zero())); - let zero_range = SolcRange::new(zero.clone(), zero.clone(), vec![]); - // We have to check if the the lhs and the right hand side contain the zero range. - // If they both do, we have to set the minimum to zero due to 0**0 = 1, but 0**x = 0. - // This is technically a slight widening of the interval and could be improved. - if old_lhs_range.contains(&zero_range, self) - && rhs_range.contains(&zero_range, self) - { - new_lhs.set_range_min(self, zero).into_expr_err(loc)?; - } - } - } - Ok(ExprRet::Single(new_lhs.into())) - } - - #[tracing::instrument(level = "trace", skip_all)] - fn bit_not( - &mut self, - loc: Loc, - lhs_expr: &Expression, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - self.parse_ctx_expr(lhs_expr, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Not operation had no element".to_string())) - }; - - if matches!(lhs, ExprRet::CtxKilled(_)) { - ctx.push_expr(lhs, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.bit_not_inner(ctx, loc, lhs.flatten()) - }) - } - - #[tracing::instrument(level = "trace", skip_all)] - fn bit_not_inner( - &mut self, - ctx: ContextNode, - loc: Loc, - lhs_expr: ExprRet, - ) -> Result<(), ExprErr> { - match lhs_expr { - ExprRet::CtxKilled(kind) => { - ctx.kill(self, loc, kind).into_expr_err(loc)?; - ctx.push_expr(lhs_expr, self).into_expr_err(loc)?; - Ok(()) - } - ExprRet::SingleLiteral(lhs) => { - // TODO: try to pop from the stack and if there is a single element there - // use it as a type hint, then place it back on the stack - ContextVarNode::from(lhs) - .try_increase_size(self) - .into_expr_err(loc)?; - self.bit_not_inner(ctx, loc, ExprRet::Single(lhs))?; - Ok(()) - } - ExprRet::Single(lhs) => { - let lhs_cvar = ContextVarNode::from(lhs); - tracing::trace!( - "bitwise not: {}", - lhs_cvar.display_name(self).into_expr_err(loc)? - ); - let out_var = ContextVar { - loc: Some(loc), - name: format!( - "tmp{}(~{})", - ctx.new_tmp(self).into_expr_err(loc)?, - lhs_cvar.name(self).into_expr_err(loc)?, - ), - display_name: format!("~{}", lhs_cvar.display_name(self).into_expr_err(loc)?,), - storage: None, - is_tmp: true, - tmp_of: Some(TmpConstruction::new(lhs_cvar, RangeOp::BitNot, None)), - is_symbolic: lhs_cvar.is_symbolic(self).into_expr_err(loc)?, - is_return: false, - ty: lhs_cvar.underlying(self).into_expr_err(loc)?.ty.clone(), - }; - - let expr = Elem::Expr(RangeExpr::::new( - Elem::from(Dynamic::new(lhs_cvar.latest_version(self).into())), - RangeOp::BitNot, - Elem::Null, - )); - - let out_var = ContextVarNode::from(self.add_node(Node::ContextVar(out_var))); - - out_var - .set_range_min(self, expr.clone()) - .into_expr_err(loc)?; - out_var.set_range_max(self, expr).into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(out_var.into()), self) - .into_expr_err(loc)?; - Ok(()) - } - ExprRet::Multi(f) => Err(ExprErr::MultiNot( - loc, - format!("Multiple elements in bitwise not expression: {f:?}"), - )), - ExprRet::Null => Err(ExprErr::NoRhs( - loc, - "No right hand side in `not` expression".to_string(), - )), - } - } -} diff --git a/src/context/exprs/member_access.rs b/src/context/exprs/member_access.rs deleted file mode 100644 index b3b68111..00000000 --- a/src/context/exprs/member_access.rs +++ /dev/null @@ -1,1092 +0,0 @@ -use crate::context::exprs::env::Env; -use crate::context::exprs::IntoExprErr; -use crate::context::ExprErr; -use crate::{context::exprs::variable::Variable, ContextBuilder, NodeIdx}; -use petgraph::{visit::EdgeRef, Direction}; -use shared::range::elem_ty::Elem; -use shared::range::Range; -use shared::{ - analyzer::AnalyzerLike, - context::*, - nodes::*, - range::SolcRange, - {Edge, Node}, -}; -use std::collections::BTreeSet; - -use ethers_core::types::{I256, U256}; - -use solang_parser::pt::{Expression, Identifier, Loc}; - -impl MemberAccess for T where T: AnalyzerLike + Sized {} -pub trait MemberAccess: AnalyzerLike + Sized { - fn visible_member_funcs( - &mut self, - ctx: ContextNode, - loc: Loc, - member_idx: NodeIdx, - ) -> Result, ExprErr> { - let res = match self.node(member_idx) { - Node::ContextVar(cvar) => match &cvar.ty { - VarType::User(TypeNode::Contract(con_node), _) => { - let mut funcs = con_node.linearized_functions(self); - self - .possible_library_funcs(ctx, con_node.0.into()) - .into_iter() - .for_each(|func| { - let name = func.name(self).unwrap(); - funcs.entry(name).or_insert(func); - }); - funcs.values().copied().collect() - }, - VarType::BuiltIn(bn, _) => self - .possible_library_funcs(ctx, bn.0.into()) - .into_iter() - .collect::>(), - VarType::Concrete(cnode) => { - let b = cnode.underlying(self).unwrap().as_builtin(); - let bn = self.builtin_or_add(b); - self.possible_library_funcs(ctx, bn) - .into_iter() - .collect::>() - } - VarType::User(TypeNode::Struct(sn), _) => self - .possible_library_funcs(ctx, sn.0.into()) - .into_iter() - .collect::>(), - VarType::User(TypeNode::Enum(en), _) => self - .possible_library_funcs(ctx, en.0.into()) - .into_iter() - .collect::>(), - VarType::User(TypeNode::Ty(ty), _) => self - .possible_library_funcs(ctx, ty.0.into()) - .into_iter() - .collect::>(), - VarType::User(TypeNode::Func(func_node), _) => self - .possible_library_funcs(ctx, func_node.0.into()) - .into_iter() - .collect::>(), - VarType::User(TypeNode::Unresolved(n), _) => { - match self.node(*n) { - Node::Unresolved(ident) => { - return Err(ExprErr::Unresolved(loc, format!("The type \"{}\" is currently unresolved but should have been resolved by now. This is a bug.", ident.name))) - } - _ => unreachable!() - } - } - }, - Node::Contract(_) => ContractNode::from(member_idx).funcs(self), - Node::Concrete(_) - | Node::Ty(_) - | Node::Struct(_) - | Node::Function(_) - | Node::Enum(_) - | Node::Builtin(_) => self - .possible_library_funcs(ctx, member_idx) - .into_iter() - .collect::>(), - e => { - return Err(ExprErr::MemberAccessNotFound( - loc, - format!("This type cannot have member functions: {:?}", e), - )) - } - }; - Ok(res) - } - - /// Gets the array type - #[tracing::instrument(level = "trace", skip_all)] - fn member_access( - &mut self, - loc: Loc, - member_expr: &Expression, - ident: &Identifier, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - // TODO: this is wrong as it overwrites a function call of the form elem.length(...) i believe - if ident.name == "length" { - return self.length(loc, member_expr, ctx); - } - - self.parse_ctx_expr(member_expr, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Attempted to perform member access without a left-hand side".to_string())); - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_member(ctx, loc, ident, ret) - }) - } - - fn match_member( - &mut self, - ctx: ContextNode, - loc: Loc, - ident: &Identifier, - ret: ExprRet, - ) -> Result<(), ExprErr> { - match ret { - ExprRet::Single(idx) | ExprRet::SingleLiteral(idx) => { - ctx.push_expr(self.member_access_inner(loc, idx, ident, ctx)?, self) - .into_expr_err(loc)?; - Ok(()) - } - ExprRet::Multi(inner) => inner - .into_iter() - .try_for_each(|ret| self.match_member(ctx, loc, ident, ret)), - ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), - ExprRet::Null => Ok(()), - } - } - - fn member_access_var_ty( - &mut self, - cvar: ContextVar, - loc: Loc, - member_idx: NodeIdx, - ident: &Identifier, - ctx: ContextNode, - ) -> Result { - match &cvar.ty { - VarType::User(TypeNode::Struct(struct_node), _) => { - self.struct_member_access(member_idx, *struct_node, ident, ctx, loc, Some(cvar)) - } - VarType::User(TypeNode::Enum(enum_node), _) => { - self.enum_member_access(member_idx, *enum_node, ident, ctx, loc) - } - VarType::User(TypeNode::Ty(ty_node), _) => { - self.ty_member_access(member_idx, *ty_node, ident, ctx, loc, Some(cvar)) - } - VarType::User(TypeNode::Contract(con_node), _) => { - self.contract_member_access(member_idx, *con_node, ident, ctx, loc, Some(cvar)) - } - VarType::BuiltIn(bn, _) => self.builtin_member_access( - loc, - ctx, - *bn, - ContextVarNode::from(member_idx) - .is_storage(self) - .into_expr_err(loc)?, - ident, - ), - VarType::Concrete(cn) => { - let builtin = cn.underlying(self).into_expr_err(loc)?.as_builtin(); - let bn = self.builtin_or_add(builtin).into(); - self.builtin_member_access( - loc, - ctx, - bn, - ContextVarNode::from(member_idx) - .is_storage(self) - .into_expr_err(loc)?, - ident, - ) - } - e => Err(ExprErr::UnhandledCombo( - loc, - format!("Unhandled member access: {:?}, {:?}", e, ident), - )), - } - } - - fn struct_member_access( - &mut self, - member_idx: NodeIdx, - struct_node: StructNode, - ident: &Identifier, - ctx: ContextNode, - loc: Loc, - maybe_parent: Option, - ) -> Result { - let name = format!( - "{}.{}", - struct_node.name(self).into_expr_err(loc)?, - ident.name - ); - tracing::trace!("Struct member access: {}", name); - if let Some(attr_var) = ctx.var_by_name_or_recurse(self, &name).into_expr_err(loc)? { - Ok(ExprRet::Single(attr_var.latest_version(self).into())) - } else if let Some(field) = struct_node.find_field(self, ident) { - let cvar = if let Some(parent) = maybe_parent { - parent - } else { - ContextVar::maybe_from_user_ty(self, loc, struct_node.into()).unwrap() - }; - if let Some(field_cvar) = ContextVar::maybe_new_from_field( - self, - loc, - &cvar, - field.underlying(self).unwrap().clone(), - ) { - let fc_node = self.add_node(Node::ContextVar(field_cvar)); - self.add_edge( - fc_node, - ContextVarNode::from(member_idx).first_version(self), - Edge::Context(ContextEdge::AttrAccess), - ); - ctx.add_var(fc_node.into(), self).into_expr_err(loc)?; - self.add_edge(fc_node, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(fc_node)) - } else { - panic!("Couldn't create field variable"); - } - } else if let Some(func) = self.library_func_search(ctx, struct_node.0.into(), ident) { - Ok(func) - } else { - Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access \"{}\" on struct \"{}\"", - ident.name, - struct_node.name(self).into_expr_err(loc)? - ), - )) - } - } - - fn enum_member_access( - &mut self, - _member_idx: NodeIdx, - enum_node: EnumNode, - ident: &Identifier, - ctx: ContextNode, - loc: Loc, - ) -> Result { - tracing::trace!("Enum member access: {}", ident.name); - - if let Some(variant) = enum_node - .variants(self) - .into_expr_err(loc)? - .iter() - .find(|variant| **variant == ident.name) - { - let var = - ContextVar::new_from_enum_variant(self, ctx, loc, enum_node, variant.to_string()) - .into_expr_err(loc)?; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(cvar)) - } else if let Some(ret) = self.library_func_search(ctx, enum_node.0.into(), ident) { - Ok(ret) - } else { - Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access \"{}\" on enum \"{}\"", - ident.name, - enum_node.name(self).into_expr_err(loc)? - ), - )) - } - } - - fn contract_member_access( - &mut self, - member_idx: NodeIdx, - con_node: ContractNode, - ident: &Identifier, - ctx: ContextNode, - loc: Loc, - maybe_parent: Option, - ) -> Result { - tracing::trace!( - "Contract member access: {}.{}", - con_node - .maybe_name(self) - .into_expr_err(loc)? - .unwrap_or_else(|| "interface".to_string()), - ident.name - ); - - if let Some(func) = con_node - .funcs(self) - .into_iter() - .find(|func_node| func_node.name(self).unwrap() == ident.name) - { - if let Some(func_cvar) = ContextVar::maybe_from_user_ty(self, loc, func.0.into()) { - let fn_node = self.add_node(Node::ContextVar(func_cvar)); - // this prevents attaching a dummy node to the parent which could cause a cycle in the graph - if maybe_parent.is_some() { - self.add_edge(fn_node, member_idx, Edge::Context(ContextEdge::FuncAccess)); - } - Ok(ExprRet::Single(fn_node)) - } else { - Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unable to construct the function \"{}\" in contract \"{}\"", - ident.name, - con_node.name(self).into_expr_err(loc)? - ), - )) - } - } else if let Some(func) = con_node - .structs(self) - .into_iter() - .find(|struct_node| struct_node.name(self).unwrap() == ident.name) - { - if let Some(struct_cvar) = ContextVar::maybe_from_user_ty(self, loc, func.0.into()) { - let struct_node = self.add_node(Node::ContextVar(struct_cvar)); - // this prevents attaching a dummy node to the parent which could cause a cycle in the graph - if maybe_parent.is_some() { - self.add_edge( - struct_node, - member_idx, - Edge::Context(ContextEdge::StructAccess), - ); - } - return Ok(ExprRet::Single(struct_node)); - } else { - return Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unable to construct the struct \"{}\" in contract \"{}\"", - ident.name, - con_node.name(self).into_expr_err(loc)? - ), - )); - } - } else { - match &*ident.name { - "name" => { - let c = Concrete::from(con_node.name(self).unwrap()); - let cnode = self.add_node(Node::Concrete(c)); - let cvar = ContextVar::new_from_concrete(loc, ctx, cnode.into(), self) - .into_expr_err(loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - return Ok(ExprRet::Single(node)); - } - "creationCode" | "runtimeCode" => { - let bn = self.builtin_or_add(Builtin::DynamicBytes); - let cvar = - ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - return Ok(ExprRet::Single(node)); - } - "interfaceId" => { - // TODO: actually calculate this - let bn = self.builtin_or_add(Builtin::Bytes(4)); - let cvar = - ContextVar::new_from_builtin(loc, bn.into(), self).into_expr_err(loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - return Ok(ExprRet::Single(node)); - } - _ => { - return Err(ExprErr::ContractFunctionNotFound( - loc, - format!( - "No function or struct with name {:?} in contract: {:?}. Functions: {:#?}", - ident.name, - con_node.name(self).unwrap(), - con_node - .funcs(self) - .iter() - .map(|func| func.name(self).unwrap()) - .collect::>() - ), - )) - } - } - } - } - - fn ty_member_access( - &mut self, - _member_idx: NodeIdx, - ty_node: TyNode, - ident: &Identifier, - ctx: ContextNode, - loc: Loc, - _maybe_parent: Option, - ) -> Result { - let name = ident.name.split('(').collect::>()[0]; - if let Some(func) = self.library_func_search(ctx, ty_node.0.into(), ident) { - Ok(func) - } else if let Some(func) = self.builtin_fn_or_maybe_add(name) { - Ok(ExprRet::Single(func)) - } else { - Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access \"{}\" on struct \"{}\"", - ident.name, - ty_node.name(self).into_expr_err(loc)? - ), - )) - } - } - - fn member_access_inner( - &mut self, - loc: Loc, - member_idx: NodeIdx, - ident: &Identifier, - ctx: ContextNode, - ) -> Result { - match self.node(member_idx) { - Node::ContextVar(cvar) => { - self.member_access_var_ty(cvar.clone(), loc, member_idx, ident, ctx) - } - Node::Contract(_c) => self.contract_member_access( - member_idx, - ContractNode::from(member_idx), - ident, - ctx, - loc, - None, - ), - Node::Struct(_c) => self.struct_member_access( - member_idx, - StructNode::from(member_idx), - ident, - ctx, - loc, - None, - ), - Node::Enum(_c) => { - self.enum_member_access(member_idx, EnumNode::from(member_idx), ident, ctx, loc) - } - Node::Ty(_ty) => { - self.ty_member_access(member_idx, TyNode::from(member_idx), ident, ctx, loc, None) - } - Node::Msg(_msg) => self.msg_access(loc, ctx, &ident.name), - Node::Block(_b) => self.block_access(loc, ctx, &ident.name), - Node::Builtin(ref _b) => { - self.builtin_member_access(loc, ctx, BuiltInNode::from(member_idx), false, ident) - } - e => Err(ExprErr::Todo( - loc, - format!("Member access on type: {e:?} is not yet supported"), - )), - } - } - - fn builtin_member_access( - &mut self, - loc: Loc, - ctx: ContextNode, - node: BuiltInNode, - is_storage: bool, - ident: &Identifier, - ) -> Result { - tracing::trace!("Looking for builtin member function"); - if let Some(ret) = self.library_func_search(ctx, node.0.into(), ident) { - Ok(ret) - } else { - match node.underlying(self).into_expr_err(loc)?.clone() { - Builtin::Address | Builtin::AddressPayable | Builtin::Payable => { - match &*ident.name { - "delegatecall" - | "call" - | "staticcall" - | "delegatecall(address, bytes)" - | "call(address, bytes)" - | "staticcall(address, bytes)" => { - // TODO: check if the address is known to be a certain type and the function signature is known - // and call into the function - let builtin_name = ident.name.split('(').collect::>()[0]; - let func_node = self.builtin_fn_or_maybe_add(builtin_name).unwrap(); - Ok(ExprRet::Single(func_node)) - } - "code" => { - // TODO: try to be smarter based on the address input - let bn = self.builtin_or_add(Builtin::DynamicBytes); - let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) - .into_expr_err(loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(node)) - } - "codehash" => { - // TODO: try to be smarter based on the address input - let bn = self.builtin_or_add(Builtin::Bytes(32)); - let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) - .into_expr_err(loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(node)) - } - "balance" => { - // TODO: try to be smarter based on the address input - let bn = self.builtin_or_add(Builtin::Uint(256)); - let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) - .into_expr_err(loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(node)) - } - _ if ident.name.starts_with("send") => { - let bn = self.builtin_or_add(Builtin::Bool); - let cvar = ContextVar::new_from_builtin(loc, bn.into(), self) - .into_expr_err(loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(node)) - } - _ if ident.name.starts_with("transfer") => Ok(ExprRet::Multi(vec![])), - _ => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on address: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - } - } - Builtin::Bool => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on bool: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - Builtin::String => match ident.name.split('(').collect::>()[0] { - "concat" => { - let fn_node = self.builtin_fn_or_maybe_add("concat").unwrap(); - Ok(ExprRet::Single(fn_node)) - } - _ => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on string: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - }, - Builtin::Bytes(size) => Err(ExprErr::MemberAccessNotFound( - loc, - format!("Unknown member access on bytes{}: {:?}", size, ident.name), - )), - Builtin::Rational => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on rational: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - Builtin::DynamicBytes => match ident.name.split('(').collect::>()[0] { - "concat" => { - let fn_node = self.builtin_fn_or_maybe_add("concat").unwrap(); - Ok(ExprRet::Single(fn_node)) - } - _ => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on bytes: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - }, - Builtin::Array(_) => { - if ident.name.starts_with("push") { - if is_storage { - let fn_node = self.builtin_fn_or_maybe_add("push").unwrap(); - Ok(ExprRet::Single(fn_node)) - } else { - Err(ExprErr::NonStoragePush( - loc, - "Trying to push to nonstorage array is not supported".to_string(), - )) - } - } else if ident.name.starts_with("pop") { - if is_storage { - let fn_node = self.builtin_fn_or_maybe_add("pop").unwrap(); - Ok(ExprRet::Single(fn_node)) - } else { - Err(ExprErr::NonStoragePush( - loc, - "Trying to pop from nonstorage array is not supported".to_string(), - )) - } - } else { - Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on array[]: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )) - } - } - Builtin::SizedArray(s, _) => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on array[{s}]: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - Builtin::Mapping(_, _) => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on mapping: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - Builtin::Func(_, _) => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown member access on func: {:?}, ctx: {}", - ident.name, - ctx.path(self) - ), - )), - Builtin::Int(size) => { - let max = if size == 256 { - I256::MAX - } else { - I256::from_raw(U256::from(1u8) << U256::from(size - 1)) - I256::from(1) - }; - match &*ident.name { - "max" => { - let c = Concrete::Int(size, max); - let node = self.add_node(Node::Concrete(c)).into(); - let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) - .into_expr_err(loc)?; - var.name = format!("int{size}.max"); - var.display_name = var.name.clone(); - var.is_tmp = true; - var.is_symbolic = false; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(cvar)) - } - "min" => { - let min = max * I256::from(-1i32) - I256::from(1i32); - let c = Concrete::Int(size, min); - let node = self.add_node(Node::Concrete(c)).into(); - let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) - .into_expr_err(loc)?; - var.name = format!("int{size}.min"); - var.display_name = var.name.clone(); - var.is_tmp = true; - var.is_symbolic = false; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(cvar)) - } - e => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown type attribute on int{size}: {e:?}, ctx: {}", - ctx.path(self) - ), - )), - } - } - Builtin::Uint(size) => match &*ident.name { - "max" => { - let size = size; - let max = if size == 256 { - U256::MAX - } else { - U256::from(2).pow(U256::from(size)) - 1 - }; - let c = Concrete::Uint(size, max); - let node = self.add_node(Node::Concrete(c)).into(); - let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) - .into_expr_err(loc)?; - var.name = format!("uint{size}.max"); - var.display_name = var.name.clone(); - var.is_tmp = true; - var.is_symbolic = false; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(cvar)) - } - "min" => { - let min = U256::zero(); - let c = Concrete::from(min); - let node = self.add_node(Node::Concrete(c)).into(); - let mut var = ContextVar::new_from_concrete(loc, ctx, node, self) - .into_expr_err(loc)?; - var.name = format!("int{size}.min"); - var.display_name = var.name.clone(); - var.is_tmp = true; - var.is_symbolic = false; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - Ok(ExprRet::Single(cvar)) - } - e => Err(ExprErr::MemberAccessNotFound( - loc, - format!( - "Unknown type attribute on uint{size}: {e:?}, ctx: {}", - ctx.path(self) - ), - )), - }, - } - } - } - - fn library_func_search( - &mut self, - ctx: ContextNode, - ty: NodeIdx, - ident: &Identifier, - ) -> Option { - self.possible_library_funcs(ctx, ty) - .iter() - .filter_map(|func| { - if let Ok(name) = func.name(self) { - Some((name, func)) - } else { - None - } - }) - .find_map(|(name, func)| { - if name == ident.name { - Some(ExprRet::Single((*func).into())) - } else { - None - } - }) - } - - fn possible_library_funcs(&mut self, ctx: ContextNode, ty: NodeIdx) -> BTreeSet { - let mut funcs: BTreeSet = BTreeSet::new(); - if let Some(associated_contract) = ctx.maybe_associated_contract(self).unwrap() { - // search for contract scoped `using` statements - funcs.extend( - self.graph().edges_directed(ty, Direction::Outgoing).filter(|edge| { - matches!(*edge.weight(), Edge::LibraryFunction(scope) if scope == associated_contract.into()) - }).map(|edge| edge.target().into()).collect::>() - ); - } - - // Search for global `using` funcs - if let Some(source) = ctx.maybe_associated_source(self) { - funcs.extend( - self.graph().edges_directed(ty, Direction::Outgoing).filter(|edge| { - matches!(*edge.weight(), Edge::LibraryFunction(scope) if scope == source) - }).map(|edge| edge.target().into()).collect::>() - ); - } - - funcs - } - - #[tracing::instrument(level = "trace", skip_all)] - fn index_access( - &mut self, - loc: Loc, - parent: NodeIdx, - dyn_builtin: BuiltInNode, - ident: &Identifier, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - self.variable(ident, ctx, None)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(index_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "No index in index access".to_string())) - }; - - if matches!(index_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(index_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_index_access(&index_paths, loc, parent.into(), dyn_builtin, ctx) - }) - } - - #[tracing::instrument(level = "trace", skip_all)] - fn match_index_access( - &mut self, - index_paths: &ExprRet, - loc: Loc, - parent: ContextVarNode, - dyn_builtin: BuiltInNode, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - match index_paths { - ExprRet::CtxKilled(kind) => ctx.kill(self, loc, *kind).into_expr_err(loc), - ExprRet::Single(idx) => { - let parent = parent.first_version(self); - let parent_name = parent.name(self).into_expr_err(loc)?; - let parent_display_name = parent.display_name(self).unwrap(); - - tracing::trace!( - "Index access: {}[{}]", - parent_display_name, - ContextVarNode::from(*idx) - .display_name(self) - .into_expr_err(loc)? - ); - let parent_ty = dyn_builtin; - let parent_stor = parent - .storage(self) - .into_expr_err(loc)? - .as_ref() - .expect("parent didnt have a storage location?"); - let indexed_var = ContextVar::new_from_index( - self, - loc, - parent_name, - parent_display_name, - parent_stor.clone(), - &parent_ty, - ContextVarNode::from(*idx), - ) - .into_expr_err(loc)?; - - let idx_node = self.add_node(Node::ContextVar(indexed_var)); - self.add_edge(idx_node, parent, Edge::Context(ContextEdge::IndexAccess)); - self.add_edge(idx_node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.add_var(idx_node.into(), self).into_expr_err(loc)?; - self.add_edge(*idx, idx_node, Edge::Context(ContextEdge::Index)); - ctx.push_expr(ExprRet::Single(idx_node), self) - .into_expr_err(loc)?; - Ok(()) - } - e => Err(ExprErr::UnhandledExprRet( - loc, - format!("Unhandled expression return in index access: {e:?}"), - )), - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn length( - &mut self, - loc: Loc, - input_expr: &Expression, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - self.parse_ctx_expr(input_expr, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Attempted to perform member access without a left-hand side".to_string())); - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_length(ctx, loc, ret, true) - }) - } - - #[tracing::instrument(level = "trace", skip_all)] - fn tmp_length( - &mut self, - arr: ContextVarNode, - array_ctx: ContextNode, - loc: Loc, - ) -> ContextVarNode { - let arr = arr.first_version(self); - let name = format!("{}.length", arr.name(self).unwrap()); - tracing::trace!("Length access: {}", name); - if let Some(attr_var) = array_ctx.var_by_name_or_recurse(self, &name).unwrap() { - attr_var.latest_version(self) - } else { - let range = if let Ok(Some(size)) = arr.ty(self).unwrap().maybe_array_size(self) { - SolcRange::from(Concrete::from(size)) - } else { - SolcRange::try_from_builtin(&Builtin::Uint(256)) - }; - - let len_var = ContextVar { - loc: Some(loc), - name: arr.name(self).unwrap() + ".length", - display_name: arr.display_name(self).unwrap() + ".length", - storage: None, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: VarType::BuiltIn( - BuiltInNode::from(self.builtin_or_add(Builtin::Uint(256))), - range, - ), - }; - let len_node = self.add_node(Node::ContextVar(len_var)); - - let next_arr = self - .advance_var_in_ctx(arr.latest_version(self), loc, array_ctx) - .unwrap(); - if next_arr - .underlying(self) - .unwrap() - .ty - .is_dyn_builtin(self) - .unwrap() - { - if let Some(r) = next_arr.ref_range(self).unwrap() { - let min = r.evaled_range_min(self).unwrap(); - let max = r.evaled_range_max(self).unwrap(); - - if let Some(mut rd) = min.maybe_range_dyn() { - rd.len = Elem::from(len_node); - let res = next_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - - if let Some(mut rd) = max.maybe_range_dyn() { - rd.len = Elem::from(len_node); - let res = next_arr - .set_range_max(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - } - } - - self.add_edge(len_node, arr, Edge::Context(ContextEdge::AttrAccess)); - self.add_edge(len_node, array_ctx, Edge::Context(ContextEdge::Variable)); - array_ctx.add_var(len_node.into(), self).unwrap(); - len_node.into() - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn match_length( - &mut self, - ctx: ContextNode, - loc: Loc, - elem_path: ExprRet, - update_len_bound: bool, - ) -> Result<(), ExprErr> { - match elem_path { - ExprRet::Null => { - ctx.push_expr(ExprRet::Null, self).into_expr_err(loc)?; - Ok(()) - } - ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), - ExprRet::Single(arr) => { - let next_arr = self.advance_var_in_ctx( - ContextVarNode::from(arr).latest_version(self), - loc, - ctx, - )?; - let arr = ContextVarNode::from(arr).first_version(self); - let name = format!("{}.length", arr.name(self).into_expr_err(loc)?); - tracing::trace!("Length access: {}", name); - if let Some(len_var) = ctx.var_by_name_or_recurse(self, &name).into_expr_err(loc)? { - let len_var = len_var.latest_version(self); - let new_len = self.advance_var_in_ctx(len_var, loc, ctx)?; - if update_len_bound - && next_arr - .underlying(self) - .into_expr_err(loc)? - .ty - .is_dyn_builtin(self) - .into_expr_err(loc)? - { - if let Some(r) = next_arr.ref_range(self).into_expr_err(loc)? { - let min = r.evaled_range_min(self).into_expr_err(loc)?; - let max = r.evaled_range_max(self).into_expr_err(loc)?; - - if let Some(mut rd) = min.maybe_range_dyn() { - rd.len = Elem::from(new_len); - let res = next_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - - if let Some(mut rd) = max.maybe_range_dyn() { - rd.len = Elem::from(new_len); - let res = next_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - } - } - ctx.push_expr(ExprRet::Single(new_len.into()), self) - .into_expr_err(loc)?; - Ok(()) - } else { - let range = if let Ok(Some(size)) = - arr.ty(self).into_expr_err(loc)?.maybe_array_size(self) - { - SolcRange::from(Concrete::from(size)) - } else { - SolcRange::try_from_builtin(&Builtin::Uint(256)) - }; - - let len_var = ContextVar { - loc: Some(loc), - name, - display_name: arr.display_name(self).into_expr_err(loc)? + ".length", - storage: None, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: VarType::BuiltIn( - BuiltInNode::from(self.builtin_or_add(Builtin::Uint(256))), - range, - ), - }; - let len_node = self.add_node(Node::ContextVar(len_var)); - - if next_arr - .underlying(self) - .into_expr_err(loc)? - .ty - .is_dyn_builtin(self) - .into_expr_err(loc)? - { - if let Some(r) = next_arr.ref_range(self).into_expr_err(loc)? { - let min = r.evaled_range_min(self).into_expr_err(loc)?; - let max = r.evaled_range_max(self).into_expr_err(loc)?; - - if let Some(mut rd) = min.maybe_range_dyn() { - rd.len = Elem::from(len_node); - let res = next_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - - if let Some(mut rd) = max.maybe_range_dyn() { - rd.len = Elem::from(len_node); - let res = next_arr - .set_range_max(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - } - } - - self.add_edge(len_node, arr, Edge::Context(ContextEdge::AttrAccess)); - self.add_edge(len_node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.add_var(len_node.into(), self).into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(len_node), self) - .into_expr_err(loc)?; - Ok(()) - } - } - e => todo!("here: {e:?}"), - } - } -} diff --git a/src/context/exprs/variable.rs b/src/context/exprs/variable.rs deleted file mode 100644 index d77dc4a2..00000000 --- a/src/context/exprs/variable.rs +++ /dev/null @@ -1,135 +0,0 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::ExprErr; -use crate::context::{exprs::env::Env, ContextBuilder}; -use shared::nodes::VarNode; -use shared::{analyzer::AnalyzerLike, context::*, Edge, Node}; -use solang_parser::pt::Expression; - -use solang_parser::pt::Identifier; - -impl Variable for T where T: AnalyzerLike + Sized {} - -pub trait Variable: AnalyzerLike + Sized { - #[tracing::instrument(level = "trace", skip_all)] - fn variable( - &mut self, - ident: &Identifier, - ctx: ContextNode, - recursion_target: Option, - ) -> Result<(), ExprErr> { - tracing::trace!( - "Getting variable: {}, loc: {:?}, ctx: {}", - &ident.name, - ident.loc, - ctx.path(self) - ); - let target_ctx = if let Some(recursion_target) = recursion_target { - recursion_target - } else { - ctx - }; - - // solang doesnt have `super` as a keyword - if let Some(cvar) = ctx.var_by_name(self, &ident.name) { - let cvar = cvar.latest_version(self); - self.apply_to_edges(target_ctx, ident.loc, &|analyzer, edge_ctx, _loc| { - let var = analyzer.advance_var_in_ctx(cvar, ident.loc, edge_ctx)?; - edge_ctx - .push_expr(ExprRet::Single(var.into()), analyzer) - .into_expr_err(ident.loc) - }) - } else if ident.name == "_" { - self.env_variable(ident, target_ctx)?; - Ok(()) - } else if let Some(cvar) = ctx - .var_by_name_or_recurse(self, &ident.name) - .into_expr_err(ident.loc)? - { - // check if we can inherit it - let cvar = cvar.latest_version(self); - self.apply_to_edges(target_ctx, ident.loc, &|analyzer, edge_ctx, _loc| { - let var = analyzer.advance_var_in_ctx(cvar, ident.loc, edge_ctx)?; - edge_ctx - .push_expr(ExprRet::Single(var.into()), analyzer) - .into_expr_err(ident.loc) - }) - // if let Some(recursion_target) = recursion_target { - // self.variable(ident, parent_ctx, Some(recursion_target)) - // } else { - // self.variable(ident, parent_ctx, Some(target_ctx)) - // } - } else if (self.env_variable(ident, target_ctx)?).is_some() { - Ok(()) - } else if let Some(idx) = self.user_types().get(&ident.name).cloned() { - let const_var = if let Node::Var(_v) = self.node(idx) { - VarNode::from(idx) - .const_value(ident.loc, self) - .into_expr_err(ident.loc)? - } else { - None - }; - - let var = if let Some(con) = const_var { - con - } else { - match self.node(idx) { - Node::Var(_) | Node::Enum(_) => { - match ContextVar::maybe_from_user_ty(self, ident.loc, idx) { - Some(v) => v, - None => { - return Err(ExprErr::VarBadType( - ident.loc, - format!( - "Could not create context variable from user type: {:?}", - self.node(idx) - ), - )) - } - } - } - _ => { - return target_ctx - .push_expr(ExprRet::Single(idx), self) - .into_expr_err(ident.loc) - } - } - }; - - let new_cvarnode = self.add_node(Node::ContextVar(var)); - - ctx.add_var(new_cvarnode.into(), self) - .into_expr_err(ident.loc)?; - self.add_edge( - new_cvarnode, - target_ctx, - Edge::Context(ContextEdge::Variable), - ); - target_ctx - .push_expr(ExprRet::Single(new_cvarnode), self) - .into_expr_err(ident.loc)?; - Ok(()) - } else if let Some(func_node) = self.builtin_fn_or_maybe_add(&ident.name) { - target_ctx - .push_expr(ExprRet::Single(func_node), self) - .into_expr_err(ident.loc)?; - Ok(()) - } else if let Some(_func) = target_ctx - .visible_funcs(self) - .into_expr_err(ident.loc)? - .iter() - .find(|func| func.name(self).unwrap() == ident.name) - { - Err(ExprErr::Todo( - ident.loc, - "Function as variables has limited support".to_string(), - )) - } else { - let node = self.add_node(Node::Unresolved(ident.clone())); - self.user_types_mut().insert(ident.name.clone(), node); - target_ctx - .push_expr(ExprRet::Single(node), self) - .into_expr_err(ident.loc)?; - Ok(()) - } - } -} diff --git a/src/context/func_call/internal_call.rs b/src/context/func_call/internal_call.rs deleted file mode 100644 index 9f32b079..00000000 --- a/src/context/func_call/internal_call.rs +++ /dev/null @@ -1,287 +0,0 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::ExprErr; -use crate::{func_call::FuncCaller, ContextBuilder}; -use shared::context::ExprRet; -use shared::nodes::{Builtin, Concrete, VarType}; -use shared::{ - analyzer::{AnalyzerLike, GraphLike}, - context::{ContextEdge, ContextNode, ContextVar, ContextVarNode}, - Edge, Node, -}; -use solang_parser::pt::{Expression, Identifier, Loc, NamedArgument}; - -impl InternalFuncCaller for T where - T: AnalyzerLike + Sized + GraphLike -{ -} -pub trait InternalFuncCaller: - AnalyzerLike + Sized + GraphLike -{ - #[tracing::instrument(level = "trace", skip_all)] - fn call_internal_named_func( - &mut self, - ctx: ContextNode, - loc: &Loc, - ident: &Identifier, - // _func_expr: &Expression, - input_args: &[NamedArgument], - ) -> Result<(), ExprErr> { - // It is a function call, check if we have the ident in scope - let funcs = ctx.visible_funcs(self).into_expr_err(*loc)?; - // filter down all funcs to those that match - let possible_funcs = funcs - .iter() - .filter(|func| { - let named_correctly = func - .name(self) - .unwrap() - .starts_with(&format!("{}(", ident.name)); - if !named_correctly { - false - } else { - // filter by params - let params = func.params(self); - if params.len() != input_args.len() { - false - } else { - params.iter().all(|param| { - input_args - .iter() - .any(|input| input.name.name == param.name(self).unwrap()) - }) - } - } - }) - .copied() - .collect::>(); - - if possible_funcs.is_empty() { - // check structs - let structs = ctx.visible_structs(self); - let possible_structs = structs - .iter() - .filter(|strukt| { - let named_correctly = strukt - .name(self) - .unwrap() - .starts_with(&ident.name.to_string()); - if !named_correctly { - false - } else { - // filter by params - let fields = strukt.fields(self); - if fields.len() != input_args.len() { - false - } else { - fields.iter().all(|field| { - input_args - .iter() - .any(|input| input.name.name == field.name(self).unwrap()) - }) - } - } - }) - .copied() - .collect::>(); - if possible_structs.is_empty() { - Err(ExprErr::FunctionNotFound( - *loc, - format!( - "No functions or structs found for named function call: {:?}", - ident.name - ), - )) - } else if possible_structs.len() == 1 { - let strukt = possible_structs[0]; - let var = - ContextVar::new_from_struct(*loc, strukt, ctx, self).into_expr_err(*loc)?; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(*loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - - strukt.fields(self).iter().try_for_each(|field| { - let field_cvar = ContextVar::maybe_new_from_field( - self, - *loc, - ContextVarNode::from(cvar) - .underlying(self) - .into_expr_err(*loc)?, - field.underlying(self).unwrap().clone(), - ) - .expect("Invalid struct field"); - - let fc_node = self.add_node(Node::ContextVar(field_cvar)); - self.add_edge(fc_node, cvar, Edge::Context(ContextEdge::AttrAccess)); - self.add_edge(fc_node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.add_var(fc_node.into(), self).into_expr_err(*loc)?; - let field_as_ret = ExprRet::Single(fc_node); - let input = input_args - .iter() - .find(|arg| arg.name.name == field.name(self).unwrap()) - .expect("No field in struct in struct construction"); - self.parse_ctx_expr(&input.expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(assignment) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Array creation failed".to_string())) - }; - - if matches!(assignment, ExprRet::CtxKilled(_)) { - ctx.push_expr(assignment, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - analyzer.match_assign_sides(ctx, loc, &field_as_ret, &assignment)?; - let _ = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; - Ok(()) - }) - })?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, _loc| { - ctx.push_expr(ExprRet::Single(cvar), analyzer) - .into_expr_err(*loc)?; - Ok(()) - })?; - Ok(()) - } else { - Err(ExprErr::Todo( - *loc, - "Disambiguation of struct construction not currently supported".to_string(), - )) - } - } else if possible_funcs.len() == 1 { - let func = possible_funcs[0]; - let params = func.params(self); - let inputs: Vec<_> = params - .iter() - .map(|param| { - let input = input_args - .iter() - .find(|arg| arg.name.name == param.name(self).unwrap()) - .expect( - "No parameter with named provided in named parameter function call", - ); - input.expr.clone() - }) - .collect(); - self.parse_inputs(ctx, *loc, &inputs[..])?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let inputs = ctx - .pop_expr_latest(loc, analyzer) - .into_expr_err(loc)? - .unwrap_or_else(|| ExprRet::Multi(vec![])); - analyzer.setup_fn_call(&ident.loc, &inputs, func.into(), ctx, None) - }) - } else { - todo!("Disambiguate named function call"); - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn call_internal_func( - &mut self, - ctx: ContextNode, - loc: &Loc, - ident: &Identifier, - func_expr: &Expression, - input_exprs: &[Expression], - ) -> Result<(), ExprErr> { - tracing::trace!("function call: {}(..)", ident.name); - // It is a function call, check if we have the ident in scope - let funcs = ctx.visible_funcs(self).into_expr_err(*loc)?; - // println!("visible funcs: {:#?}", funcs.iter().map(|f| f.name(self).unwrap()).collect::>()); - // filter down all funcs to those that match - let possible_funcs = funcs - .iter() - .filter(|func| { - func.name(self) - .unwrap() - .starts_with(&format!("{}(", ident.name)) - }) - .copied() - .collect::>(); - - if possible_funcs.is_empty() { - // this is a builtin, cast, or unknown function? - self.parse_ctx_expr(func_expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let ret = ctx - .pop_expr_latest(loc, analyzer) - .into_expr_err(loc)? - .unwrap_or_else(|| ExprRet::Multi(vec![])); - let ret = ret.flatten(); - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_intrinsic_fallback(ctx, &loc, input_exprs, ret) - }) - } else if possible_funcs.len() == 1 { - self.parse_inputs(ctx, *loc, input_exprs)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let inputs = ctx - .pop_expr_latest(loc, analyzer) - .into_expr_err(loc)? - .unwrap_or_else(|| ExprRet::Multi(vec![])); - let inputs = inputs.flatten(); - if matches!(inputs, ExprRet::CtxKilled(_)) { - ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.setup_fn_call(&ident.loc, &inputs, (possible_funcs[0]).into(), ctx, None) - }) - } else { - // this is the annoying case due to function overloading & type inference on number literals - self.parse_inputs(ctx, *loc, input_exprs)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let inputs = ctx - .pop_expr_latest(loc, analyzer) - .into_expr_err(loc)? - .unwrap_or_else(|| ExprRet::Multi(vec![])); - let inputs = inputs.flatten(); - if matches!(inputs, ExprRet::CtxKilled(_)) { - ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let resizeables: Vec<_> = inputs.as_flat_vec() - .iter() - .map(|idx| { - match VarType::try_from_idx(analyzer, *idx) { - Some(VarType::BuiltIn(bn, _)) => { - matches!(analyzer.node(bn), Node::Builtin(Builtin::Uint(_)) | Node::Builtin(Builtin::Int(_)) | Node::Builtin(Builtin::Bytes(_))) - // match analyzer.node(bn) { - // Node::Builtin(Builtin::Uint(s)) if s < &256 => true, - // Node::Builtin(Builtin::Int(s)) if s < &256 => true, - // Node::Builtin(Builtin::Bytes(s)) if s < &32 => true, - // _ => false - // } - } - Some(VarType::Concrete(c)) => { - matches!(analyzer.node(c), Node::Concrete(Concrete::Uint(_, _)) | Node::Concrete(Concrete::Int(_, _)) | Node::Concrete(Concrete::Bytes(_, _))) - } - _ => false - } - }) - .collect(); - if let Some(func) = analyzer.disambiguate_fn_call( - &ident.name, - resizeables, - &inputs, - &possible_funcs, - ) { - analyzer.setup_fn_call(&loc, &inputs, func.into(), ctx, None) - } else { - Err(ExprErr::FunctionNotFound( - loc, - format!( - "Could not disambiguate function, default input types: {}, possible functions: {:#?}", - inputs.try_as_func_input_str(analyzer), - possible_funcs - .iter() - .map(|i| i.name(analyzer).unwrap()) - .collect::>() - ), - )) - } - }) - } - } -} diff --git a/src/context/func_call/intrinsic_call.rs b/src/context/func_call/intrinsic_call.rs deleted file mode 100644 index 0804f6b5..00000000 --- a/src/context/func_call/intrinsic_call.rs +++ /dev/null @@ -1,1060 +0,0 @@ -use crate::context::func_call::FuncCaller; -use crate::context::{ - exprs::{Array, MemberAccess, Require}, - ContextBuilder, -}; -use crate::context::{ExprErr, IntoExprErr}; -use ethers_core::types::U256; -use shared::nodes::BuiltInNode; -use shared::nodes::StructNode; -use shared::nodes::TyNode; - -use shared::analyzer::Search; -use shared::analyzer::{AnalyzerLike, GraphLike}; -use shared::nodes::Concrete; - -use shared::{ - context::*, - nodes::{Builtin, VarType}, - range::{ - elem::RangeOp, - elem_ty::{Elem, RangeExpr}, - Range, SolcRange, - }, - Edge, Node, NodeIdx, -}; - -use solang_parser::pt::{Expression, Loc}; - -impl IntrinsicFuncCaller for T where - T: AnalyzerLike + Sized + GraphLike + Search -{ -} -pub trait IntrinsicFuncCaller: - AnalyzerLike + Sized + GraphLike + Search -{ - /// Calls an intrinsic/builtin function call (casts, require, etc.) - #[tracing::instrument(level = "trace", skip_all)] - fn intrinsic_func_call( - &mut self, - loc: &Loc, - input_exprs: &[Expression], - func_idx: NodeIdx, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - match self.node(func_idx) { - Node::Function(underlying) => { - if let Some(func_name) = &underlying.name { - match &*func_name.name { - "abi.decode" => { - // we skip the first because that is what is being decoded. - // TODO: check if we have a concrete bytes value - fn match_decode( - ctx: ContextNode, - loc: &Loc, - ret: ExprRet, - analyzer: &mut impl AnalyzerLike, - ) -> Result<(), ExprErr> { - match ret { - ExprRet::Single(expect_builtin) => { - match analyzer.node(expect_builtin) { - Node::Builtin(_) => { - let var = ContextVar::new_from_builtin( - *loc, - expect_builtin.into(), - analyzer, - ) - .into_expr_err(*loc)?; - let node = analyzer.add_node(Node::ContextVar(var)); - ctx.add_var(node.into(), analyzer) - .into_expr_err(*loc)?; - analyzer.add_edge( - node, - ctx, - Edge::Context(ContextEdge::Variable), - ); - ctx.push_expr(ExprRet::Single(node), analyzer) - .into_expr_err(*loc)?; - Ok(()) - } - Node::ContextVar(cvar) => { - let bn = analyzer - .builtin_or_add( - cvar.ty - .as_builtin(analyzer) - .into_expr_err(*loc)?, - ) - .into(); - let var = ContextVar::new_from_builtin( - *loc, bn, analyzer, - ) - .into_expr_err(*loc)?; - let node = analyzer.add_node(Node::ContextVar(var)); - ctx.add_var(node.into(), analyzer) - .into_expr_err(*loc)?; - analyzer.add_edge( - node, - ctx, - Edge::Context(ContextEdge::Variable), - ); - ctx.push_expr(ExprRet::Single(node), analyzer) - .into_expr_err(*loc)?; - Ok(()) - } - e => todo!("Unhandled type in abi.decode: {e:?}"), - } - } - ExprRet::Multi(inner) => inner.iter().try_for_each(|i| { - match_decode(ctx, loc, i.clone(), analyzer) - }), - ExprRet::CtxKilled(kind) => { - ctx.kill(analyzer, *loc, kind).into_expr_err(*loc) - } - e => panic!("This is invalid solidity: {:?}", e), - } - } - self.parse_ctx_expr(&input_exprs[1], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "abi.decode was not given the types for decoding".to_string())) - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - match_decode(ctx, &loc, ret, analyzer) - }) - } - "abi.encode" - | "abi.encodePacked" - | "abi.encodeCall" - | "abi.encodeWithSignature" - | "abi.encodeWithSelector" => { - // currently we dont support concrete abi encoding, TODO - let bn = self.builtin_or_add(Builtin::DynamicBytes); - let cvar = ContextVar::new_from_builtin(*loc, bn.into(), self) - .into_expr_err(*loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(*loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.push_expr(ExprRet::Single(node), self) - .into_expr_err(*loc)?; - Ok(()) - } - "delegatecall" | "staticcall" | "call" => { - ctx.pop_expr_latest(*loc, self).into_expr_err(*loc)?; - // TODO: try to be smarter based on the address input - let booln = self.builtin_or_add(Builtin::Bool); - let bool_cvar = ContextVar::new_from_builtin(*loc, booln.into(), self) - .into_expr_err(*loc)?; - let bool_node = self.add_node(Node::ContextVar(bool_cvar)); - ctx.add_var(bool_node.into(), self).into_expr_err(*loc)?; - self.add_edge(bool_node, ctx, Edge::Context(ContextEdge::Variable)); - - let bn = self.builtin_or_add(Builtin::DynamicBytes); - let cvar = ContextVar::new_from_builtin(*loc, bn.into(), self) - .into_expr_err(*loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(*loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.push_expr( - ExprRet::Multi(vec![ - ExprRet::Single(bool_node), - ExprRet::Single(node), - ]), - self, - ) - .into_expr_err(*loc)?; - Ok(()) - } - "code" => { - // TODO: try to be smarter based on the address input - let bn = self.builtin_or_add(Builtin::DynamicBytes); - let cvar = ContextVar::new_from_builtin(*loc, bn.into(), self) - .into_expr_err(*loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(*loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.push_expr(ExprRet::Single(node), self) - .into_expr_err(*loc)?; - Ok(()) - } - "balance" => { - // TODO: try to be smarter based on the address input - let bn = self.builtin_or_add(Builtin::Uint(256)); - let cvar = ContextVar::new_from_builtin(*loc, bn.into(), self) - .into_expr_err(*loc)?; - let node = self.add_node(Node::ContextVar(cvar)); - ctx.add_var(node.into(), self).into_expr_err(*loc)?; - self.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.push_expr(ExprRet::Single(node), self) - .into_expr_err(*loc)?; - Ok(()) - } - "require" | "assert" => { - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, _loc| { - analyzer.handle_require(input_exprs, ctx) - }) - } - "type" => self.parse_ctx_expr(&input_exprs[0], ctx), - "push" => { - assert!(input_exprs.len() == 2); - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(array) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "array[].push(..) was not an array to push to".to_string())) - }; - if matches!(array, ExprRet::CtxKilled(_)) { - ctx.push_expr(array, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.parse_ctx_expr(&input_exprs[1], ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(new_elem) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "array[].push(..) was not given an element to push".to_string())) - }; - - if matches!(new_elem, ExprRet::CtxKilled(_)) { - ctx.push_expr(new_elem, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - let arr = array.expect_single().into_expr_err(loc)?; - let arr = ContextVarNode::from(arr).latest_version(analyzer); - // get length - let len = analyzer.tmp_length(arr, ctx, loc); - - let len_as_idx = len.as_tmp(loc, ctx, analyzer).into_expr_err(loc)?; - // set length as index - analyzer.index_into_array_inner( - ctx, - loc, - ExprRet::Single(arr.latest_version(analyzer).into()), - ExprRet::Single(len_as_idx.latest_version(analyzer).into()), - )?; - let index = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?.unwrap(); - if matches!(index, ExprRet::CtxKilled(_)) { - ctx.push_expr(index, analyzer).into_expr_err(loc)?; - return Ok(()); - } - // assign index to new_elem - analyzer.match_assign_sides(ctx, loc, &index, &new_elem) - }) - }) - } - "pop" => { - assert!(input_exprs.len() == 1); - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(array) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "array[].pop() was not an array to pop from".to_string())) - }; - if matches!(array, ExprRet::CtxKilled(_)) { - ctx.push_expr(array, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - // get the array - let arr = array.expect_single().into_expr_err(loc)?; - let arr = ContextVarNode::from(arr).latest_version(analyzer); - - // get length - analyzer.match_length(ctx, loc, array, false)?; - let Some(len) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "array[].pop() was not an array to pop from".to_string())) - }; - let len = len.expect_single().into_expr_err(loc)?; - let next_len = analyzer.advance_var_in_ctx(len.into(), loc, ctx)?; - next_len.set_range_min(analyzer, Elem::from(len) - Elem::from(Concrete::from(U256::from(1)))).into_expr_err(loc)?; - next_len.set_range_max(analyzer, Elem::from(len) - Elem::from(Concrete::from(U256::from(1)))).into_expr_err(loc)?; - - // set length as index - analyzer.index_into_array_inner( - ctx, - loc, - ExprRet::Single(arr.latest_version(analyzer).into()), - ExprRet::Single(next_len.latest_version(analyzer).into()), - ) - }) - } - "concat" => self.concat(loc, input_exprs, ctx), - "keccak256" => { - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(_input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "abi.decode was not given the types for decoding".to_string())) - }; - let var = ContextVar::new_from_builtin( - loc, - analyzer.builtin_or_add(Builtin::Bytes(32)).into(), - analyzer, - ) - .into_expr_err(loc)?; - let cvar = analyzer.add_node(Node::ContextVar(var)); - ctx.push_expr(ExprRet::Single(cvar), analyzer).into_expr_err(loc)?; - Ok(()) - }) - } - "sha256" => { - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "abi.decode was not given the types for decoding".to_string())) - }; - if matches!(input, ExprRet::CtxKilled(_)) { - ctx.push_expr(input, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let var = ContextVar::new_from_builtin( - loc, - analyzer.builtin_or_add(Builtin::Bytes(32)).into(), - analyzer, - ) - .into_expr_err(loc)?; - let cvar = analyzer.add_node(Node::ContextVar(var)); - ctx.push_expr(ExprRet::Single(cvar), analyzer).into_expr_err(loc)?; - Ok(()) - }) - } - "ripemd160" => { - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "abi.decode was not given the types for decoding".to_string())) - }; - if matches!(input, ExprRet::CtxKilled(_)) { - ctx.push_expr(input, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let var = ContextVar::new_from_builtin( - loc, - analyzer.builtin_or_add(Builtin::Bytes(32)).into(), - analyzer, - ) - .into_expr_err(loc)?; - let cvar = analyzer.add_node(Node::ContextVar(var)); - ctx.push_expr(ExprRet::Single(cvar), analyzer).into_expr_err(loc)?; - Ok(()) - }) - } - "blockhash" => { - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "blockhash function was not provided a block number".to_string())) - }; - if matches!(input, ExprRet::CtxKilled(_)) { - ctx.push_expr(input, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let var = ContextVar::new_from_builtin( - loc, - analyzer.builtin_or_add(Builtin::Bytes(32)).into(), - analyzer, - ) - .into_expr_err(loc)?; - let cvar = analyzer.add_node(Node::ContextVar(var)); - ctx.push_expr(ExprRet::Single(cvar), analyzer).into_expr_err(loc)?; - Ok(()) - }) - } - "gasleft" => { - let var = ContextVar::new_from_builtin( - *loc, - self.builtin_or_add(Builtin::Uint(64)).into(), - self, - ) - .into_expr_err(*loc)?; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.push_expr(ExprRet::Single(cvar), self) - .into_expr_err(*loc)?; - Ok(()) - } - "ecrecover" => { - self.parse_inputs(ctx, *loc, input_exprs)?; - - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let cctx = Context::new_subctx( - ctx, - None, - loc, - None, - Some(func_idx.into()), - true, - analyzer, - None, - ) - .into_expr_err(loc)?; - let call_ctx = analyzer.add_node(Node::Context( - cctx - )); - ctx.set_child_call(call_ctx.into(), analyzer) - .into_expr_err(loc)?; - let call_node = analyzer.add_node(Node::FunctionCall); - analyzer.add_edge(call_node, func_idx, Edge::Context(ContextEdge::Call)); - analyzer.add_edge(call_node, ctx, Edge::Context(ContextEdge::Subcontext)); - analyzer.add_edge( - call_ctx, - call_node, - Edge::Context(ContextEdge::Subcontext), - ); - - let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "ecrecover did not receive inputs".to_string())) - }; - - if matches!(input, ExprRet::CtxKilled(_)) { - ctx.push_expr(input, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - let mut inner_vals = vec![]; - match input { - ExprRet::Single(var) - | ExprRet::SingleLiteral(var) => { - inner_vals.push( - ContextVarNode::from(var).display_name(analyzer).unwrap(), - ); - } - _ => inner_vals.push("".to_string()), - } - let inner_name = inner_vals.into_iter().collect::>().join(", "); - let mut var = ContextVar::new_from_builtin( - loc, - analyzer.builtin_or_add(Builtin::Address).into(), - analyzer, - ) - .into_expr_err(loc)?; - var.display_name = format!("ecrecover({})", inner_name); - var.is_symbolic = true; - var.is_return = true; - let cvar = analyzer.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), analyzer).into_expr_err(loc)?; - analyzer.add_edge(cvar, call_ctx, Edge::Context(ContextEdge::Variable)); - analyzer.add_edge(cvar, call_ctx, Edge::Context(ContextEdge::Return)); - ContextNode::from(call_ctx) - .add_return_node(loc, cvar.into(), analyzer) - .into_expr_err(loc)?; - - let rctx = Context::new_subctx( - call_ctx.into(), - Some(ctx), - loc, - None, - None, - true, - analyzer, - None, - ) - .into_expr_err(loc)?; - let ret_ctx = analyzer.add_node(Node::Context( - rctx - )); - ContextNode::from(call_ctx) - .set_child_call(ret_ctx.into(), analyzer) - .into_expr_err(loc)?; - analyzer.add_edge(ret_ctx, call_ctx, Edge::Context(ContextEdge::Continue)); - - let tmp_ret = ContextVarNode::from(cvar) - .as_tmp( - ContextNode::from(call_ctx).underlying(analyzer).unwrap().loc, - ret_ctx.into(), - analyzer, - ) - .unwrap(); - tmp_ret.underlying_mut(analyzer).unwrap().is_return = true; - tmp_ret.underlying_mut(analyzer).unwrap().display_name = - format!("ecrecover({}).return", inner_name); - ctx.add_var(tmp_ret, analyzer).into_expr_err(loc)?; - analyzer.add_edge(tmp_ret, ret_ctx, Edge::Context(ContextEdge::Variable)); - - ContextNode::from(ret_ctx).push_expr(ExprRet::Single(tmp_ret.into()), analyzer).into_expr_err(loc)?; - Ok(()) - }) - } - "addmod" => { - // TODO: actually calcuate this if possible - self.parse_inputs(ctx, *loc, input_exprs)?; - - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; - let var = ContextVar::new_from_builtin( - loc, - analyzer.builtin_or_add(Builtin::Uint(256)).into(), - analyzer, - ) - .into_expr_err(loc)?; - let cvar = analyzer.add_node(Node::ContextVar(var)); - ctx.push_expr(ExprRet::Single(cvar), analyzer) - .into_expr_err(loc)?; - Ok(()) - }) - } - "mulmod" => { - // TODO: actually calcuate this if possible - self.parse_inputs(ctx, *loc, input_exprs)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; - let var = ContextVar::new_from_builtin( - loc, - analyzer.builtin_or_add(Builtin::Uint(256)).into(), - analyzer, - ) - .into_expr_err(loc)?; - let cvar = analyzer.add_node(Node::ContextVar(var)); - ctx.push_expr(ExprRet::Single(cvar), analyzer) - .into_expr_err(loc)?; - Ok(()) - }) - } - "wrap" => { - if input_exprs.len() != 2 { - return Err(ExprErr::InvalidFunctionInput(*loc, format!("Expected a member type and an input to the wrap function, but got: {:?}", input_exprs))); - } - - self.parse_inputs(ctx, *loc, input_exprs)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "ecrecover did not receive inputs".to_string())) - }; - input.expect_length(2).into_expr_err(loc)?; - let ret = input.as_vec(); - let wrapping_ty = ret[0].expect_single().into_expr_err(loc)?; - let var = ContextVar::new_from_ty( - loc, - TyNode::from(wrapping_ty), - ctx, - analyzer, - ) - .into_expr_err(loc)?; - let to_be_wrapped = ret[1].expect_single().into_expr_err(loc)?; - let cvar = ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); - let next = analyzer.advance_var_in_ctx(cvar, loc, ctx)?; - let expr = Elem::Expr(RangeExpr::new(Elem::from(to_be_wrapped), RangeOp::Cast, Elem::from(cvar))); - next.set_range_min(analyzer, expr.clone()).into_expr_err(loc)?; - next.set_range_max(analyzer, expr).into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(cvar.into()), analyzer) - .into_expr_err(loc) - }) - } - "unwrap" => { - self.parse_inputs(ctx, *loc, input_exprs)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(input) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "ecrecover did not receive inputs".to_string())) - }; - input.expect_length(2).into_expr_err(loc)?; - let ret = input.as_vec(); - let wrapping_ty = ret[0].expect_single().into_expr_err(loc)?; - let mut var = ContextVar::new_from_builtin( - loc, - BuiltInNode::from(TyNode::from(wrapping_ty).underlying(analyzer).into_expr_err(loc)?.ty), - analyzer, - ) - .into_expr_err(loc)?; - let to_be_unwrapped = ret[1].expect_single().into_expr_err(loc)?; - var.display_name = format!("{}.unwrap({})", - TyNode::from(wrapping_ty).name(analyzer).into_expr_err(loc)?, - ContextVarNode::from(to_be_unwrapped).display_name(analyzer).into_expr_err(loc)? - ); - - let cvar = ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); - let next = analyzer.advance_var_in_ctx(cvar, loc, ctx)?; - let expr = Elem::Expr(RangeExpr::new(Elem::from(to_be_unwrapped), RangeOp::Cast, Elem::from(cvar))); - next.set_range_min(analyzer, expr.clone()).into_expr_err(loc)?; - next.set_range_max(analyzer, expr).into_expr_err(loc)?; - - cvar.set_range_min(analyzer, Elem::from(to_be_unwrapped)).into_expr_err(loc)?; - cvar.set_range_max(analyzer, Elem::from(to_be_unwrapped)).into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(cvar.into()), analyzer) - .into_expr_err(loc) - }) - } - e => Err(ExprErr::Todo( - *loc, - format!("builtin function: {e:?} doesn't exist or isn't implemented"), - )), - } - } else { - panic!("unnamed builtin?") - } - } - Node::Builtin(Builtin::Array(_)) => { - // create a new list - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(len_var) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Array creation failed".to_string())) - }; - - if matches!(len_var, ExprRet::CtxKilled(_)) { - ctx.push_expr(len_var, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let len_cvar = len_var.expect_single().into_expr_err(loc)?; - - let ty = VarType::try_from_idx(analyzer, func_idx); - - let new_arr = ContextVar { - loc: Some(loc), - name: format!("tmp_arr{}", ctx.new_tmp(analyzer).into_expr_err(loc)?), - display_name: "arr".to_string(), - storage: None, - is_tmp: true, - is_symbolic: false, - is_return: false, - tmp_of: None, - ty: ty.expect("No type for node"), - }; - - let arr = ContextVarNode::from(analyzer.add_node(Node::ContextVar(new_arr))); - - let len_var = ContextVar { - loc: Some(loc), - name: arr.name(analyzer).into_expr_err(loc)? + ".length", - display_name: arr.display_name(analyzer).unwrap() + ".length", - storage: None, - is_tmp: true, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: ContextVarNode::from(len_cvar) - .underlying(analyzer) - .into_expr_err(loc)? - .ty - .clone(), - }; - - let len_cvar = analyzer.add_node(Node::ContextVar(len_var)); - analyzer.add_edge(arr, ctx, Edge::Context(ContextEdge::Variable)); - ctx.add_var(arr, analyzer).into_expr_err(loc)?; - analyzer.add_edge(len_cvar, ctx, Edge::Context(ContextEdge::Variable)); - ctx.add_var(len_cvar.into(), analyzer).into_expr_err(loc)?; - analyzer.add_edge(len_cvar, arr, Edge::Context(ContextEdge::AttrAccess)); - - // update the length - if let Some(r) = arr.ref_range(analyzer).into_expr_err(loc)? { - let min = r.evaled_range_min(analyzer).into_expr_err(loc)?; - let max = r.evaled_range_max(analyzer).into_expr_err(loc)?; - - if let Some(mut rd) = min.maybe_range_dyn() { - rd.len = Elem::from(len_cvar); - arr.set_range_min(analyzer, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } - - if let Some(mut rd) = max.maybe_range_dyn() { - rd.len = Elem::from(len_cvar); - arr.set_range_min(analyzer, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc)?; - } - } - - ctx.push_expr(ExprRet::Single(arr.into()), analyzer) - .into_expr_err(loc)?; - Ok(()) - }) - } - Node::Builtin(ty) => { - // it is a cast - let ty = ty.clone(); - fn cast_match( - ctx: ContextNode, - loc: Loc, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ty: &Builtin, - ret: ExprRet, - func_idx: NodeIdx, - ) -> Result<(), ExprErr> { - match ret { - ExprRet::CtxKilled(kind) => { - ctx.kill(analyzer, loc, kind).into_expr_err(loc) - } - ExprRet::Null => Ok(()), - ExprRet::Single(cvar) | ExprRet::SingleLiteral(cvar) => { - let new_var = ContextVarNode::from(cvar) - .as_cast_tmp(loc, ctx, ty.clone(), analyzer) - .into_expr_err(loc)?; - - new_var.underlying_mut(analyzer).into_expr_err(loc)?.ty = - VarType::try_from_idx(analyzer, func_idx).expect(""); - // cast the ranges - if let Some(r) = ContextVarNode::from(cvar) - .range(analyzer) - .into_expr_err(loc)? - { - let curr_range = - SolcRange::try_from_builtin(ty).expect("No default range"); - let min = r - .range_min() - .into_owned() - .cast(curr_range.range_min().into_owned()); - let max = r - .range_max() - .into_owned() - .cast(curr_range.range_max().into_owned()); - new_var.set_range_min(analyzer, min).into_expr_err(loc)?; - new_var.set_range_max(analyzer, max).into_expr_err(loc)?; - // cast the range exclusions - TODO: verify this is correct - let mut exclusions = r.range_exclusions(); - exclusions.iter_mut().for_each(|range| { - *range = - range.clone().cast(curr_range.range_min().into_owned()); - }); - new_var - .set_range_exclusions(analyzer, exclusions) - .into_expr_err(loc)?; - } - - ctx.push_expr(ExprRet::Single(new_var.into()), analyzer) - .into_expr_err(loc)?; - Ok(()) - } - ExprRet::Multi(inner) => inner - .into_iter() - .try_for_each(|i| cast_match(ctx, loc, analyzer, ty, i, func_idx)), - } - } - - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Array creation failed".to_string())) - }; - - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - cast_match(ctx, loc, analyzer, &ty, ret, func_idx) - }) - } - Node::ContextVar(_c) => { - // its a user type - // TODO: figure out if we actually need to do anything? - // input_exprs - // .iter() - // .try_for_each(|expr| self.parse_ctx_expr(expr, ctx))?; - - // self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - // }) - - ctx.push_expr(ExprRet::Single(func_idx), self) - .into_expr_err(*loc)?; - Ok(()) - } - Node::Contract(_) => { - if input_exprs.len() != 1 { - return Err(ExprErr::InvalidFunctionInput( - *loc, - "Invalid number of inputs to a contract instantiation".to_string(), - )); - } - - self.parse_ctx_expr(&input_exprs[0], ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Array creation failed".to_string())) - }; - - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - let var = match ContextVar::maybe_from_user_ty(analyzer, loc, func_idx) { - Some(v) => v, - None => { - return Err(ExprErr::VarBadType( - loc, - format!( - "Could not create context variable from user type: {:?}", - analyzer.node(func_idx) - ), - )) - } - }; - let idx = ret.expect_single().into_expr_err(loc)?; - let contract_cvar = - ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); - contract_cvar - .set_range_min(analyzer, Elem::from(idx)) - .into_expr_err(loc)?; - contract_cvar - .set_range_max(analyzer, Elem::from(idx)) - .into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(contract_cvar.into()), analyzer) - .into_expr_err(loc) - }) - } - Node::Unresolved(_) => { - self.parse_inputs(ctx, *loc, input_exprs)?; - - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Function call failed".to_string())) - }; - - if matches!(inputs, ExprRet::CtxKilled(_)) { - ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let visible_funcs = ctx.visible_funcs(analyzer).into_expr_err(loc)? - .iter() - .map(|func| func.name(analyzer).unwrap()) - .collect::>(); - - if let Node::Unresolved(ident) = analyzer.node(func_idx) { - Err(ExprErr::FunctionNotFound( - loc, - format!( - "Could not find function: \"{}{}\", context: {}, visible functions: {:#?}", - ident.name, - inputs.try_as_func_input_str(analyzer), - ctx.path(analyzer), - visible_funcs - ) - )) - } else { - unreachable!() - } - }) - } - Node::Struct(_) => { - // struct construction - let strukt = StructNode::from(func_idx); - let var = - ContextVar::new_from_struct(*loc, strukt, ctx, self).into_expr_err(*loc)?; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(*loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - - self.parse_inputs(ctx, *loc, input_exprs)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Struct Function call failed".to_string())) - }; - - let inputs = inputs.as_vec(); - // set struct fields - strukt - .fields(analyzer) - .iter() - .zip(inputs) - .try_for_each(|(field, input)| { - let field_cvar = ContextVar::maybe_new_from_field( - analyzer, - loc, - ContextVarNode::from(cvar) - .underlying(analyzer) - .into_expr_err(loc)?, - field.underlying(analyzer).unwrap().clone(), - ) - .expect("Invalid struct field"); - - let fc_node = analyzer.add_node(Node::ContextVar(field_cvar)); - analyzer.add_edge( - fc_node, - cvar, - Edge::Context(ContextEdge::AttrAccess), - ); - analyzer.add_edge(fc_node, ctx, Edge::Context(ContextEdge::Variable)); - ctx.add_var(fc_node.into(), analyzer).into_expr_err(loc)?; - let field_as_ret = ExprRet::Single(fc_node); - analyzer.match_assign_sides(ctx, loc, &field_as_ret, &input)?; - let _ = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)?; - Ok(()) - })?; - - ctx.push_expr(ExprRet::Single(cvar), analyzer) - .into_expr_err(loc) - }) - } - e => Err(ExprErr::FunctionNotFound(*loc, format!("{e:?}"))), - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn concat( - &mut self, - loc: &Loc, - input_exprs: &[Expression], - ctx: ContextNode, - ) -> Result<(), ExprErr> { - input_exprs[1..].iter().try_for_each(|expr| { - self.parse_ctx_expr(expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let input = ctx - .pop_expr_latest(loc, analyzer) - .into_expr_err(loc)? - .unwrap_or(ExprRet::Null); - ctx.append_tmp_expr(input, analyzer).into_expr_err(loc) - }) - })?; - - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(inputs) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Concatenation failed".to_string())) - }; - if matches!(inputs, ExprRet::CtxKilled(_)) { - ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let inputs = inputs.as_vec(); - if inputs.is_empty() { - ctx.push_expr(ExprRet::Multi(vec![]), analyzer) - .into_expr_err(loc)?; - Ok(()) - } else { - let start = &inputs[0]; - if inputs.len() > 1 { - analyzer.match_concat(ctx, loc, start.clone(), &inputs[1..], None) - } else { - analyzer.match_concat(ctx, loc, start.clone(), &[], None) - } - } - }) - } - - fn match_concat( - &mut self, - ctx: ContextNode, - loc: Loc, - curr: ExprRet, - inputs: &[ExprRet], - accum_node: Option, - ) -> Result<(), ExprErr> { - if let Some(accum_node) = accum_node { - match curr.flatten() { - ExprRet::Single(var) | ExprRet::SingleLiteral(var) => { - self.concat_inner(loc, accum_node, ContextVarNode::from(var))?; - ctx.push_expr(ExprRet::Single(accum_node.into()), self) - .into_expr_err(loc)?; - Ok(()) - } - ExprRet::Null => { - ctx.push_expr(ExprRet::Single(accum_node.into()), self) - .into_expr_err(loc)?; - Ok(()) - } - ExprRet::Multi(inner) => inner - .into_iter() - .try_for_each(|i| self.match_concat(ctx, loc, i, inputs, Some(accum_node))), - ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), - } - } else { - match curr.flatten() { - ExprRet::Single(var) | ExprRet::SingleLiteral(var) => { - let acc = ContextVarNode::from(var) - .as_tmp(loc, ctx, self) - .into_expr_err(loc)?; - inputs - .iter() - .map(|i| self.match_concat(ctx, loc, i.clone(), inputs, Some(acc))) - .collect::, ExprErr>>()?; - ctx.push_expr(ExprRet::Single(acc.into()), self) - .into_expr_err(loc)?; - Ok(()) - } - ExprRet::Null => Err(ExprErr::NoRhs( - loc, - "No input provided to concat function".to_string(), - )), - ExprRet::Multi(inner) => inner - .into_iter() - .try_for_each(|i| self.match_concat(ctx, loc, i, inputs, None)), - ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), - } - } - } - - fn concat_inner( - &mut self, - loc: Loc, - accum: ContextVarNode, - right: ContextVarNode, - ) -> Result<(), ExprErr> { - match ( - accum.ty(self).into_expr_err(loc)?, - right.ty(self).into_expr_err(loc)?, - ) { - (VarType::Concrete(accum_cnode), VarType::Concrete(right_cnode)) => { - let new_ty = match ( - accum_cnode.underlying(self).into_expr_err(loc)?, - right_cnode.underlying(self).into_expr_err(loc)?, - ) { - (accum_node @ Concrete::String(..), right_node @ Concrete::String(..)) => { - let new_val = accum_node.clone().concat(right_node).unwrap(); - let new_cnode = self.add_node(Node::Concrete(new_val)); - VarType::Concrete(new_cnode.into()) - } - (accum_node @ Concrete::DynBytes(..), right_node @ Concrete::DynBytes(..)) => { - let new_val = accum_node.clone().concat(right_node).unwrap(); - let new_cnode = self.add_node(Node::Concrete(new_val)); - VarType::Concrete(new_cnode.into()) - } - (a, b) => { - // Invalid solidity - return Err(ExprErr::InvalidFunctionInput(loc, format!("Type mismatch: {a:?} for left hand side and type: {b:?} for right hand side"))); - } - }; - accum.underlying_mut(self).into_expr_err(loc)?.ty = new_ty; - Ok(()) - } - (VarType::Concrete(accum_cnode), VarType::BuiltIn(_bn, Some(r2))) => { - let underlying = accum_cnode.underlying(self).into_expr_err(loc)?; - // let val = match underlying { - // Concrete::String(val) => { - // val - // .chars() - // .enumerate() - // .map(|(i, v)| { - // let idx = Elem::from(Concrete::from(U256::from(i))); - // let mut bytes = [0x00; 32]; - // v.encode_utf8(&mut bytes[..]); - // let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - // (idx, v) - // }) - // .collect::>() - // } - // Concrete::DynBytes(val) => { - // val - // .iter() - // .enumerate() - // .map(|(i, v)| { - // let idx = Elem::from(Concrete::from(U256::from(i))); - // let mut bytes = [0x00; 32]; - // bytes[0] = *v; - // let v = Elem::from(Concrete::Bytes(1, H256::from(bytes))); - // (idx, v) - // }) - // .collect::>() - // } - // b => return Err(ExprErr::InvalidFunctionInput(loc, format!("Type mismatch: expected String or Bytes for concat input but found: {b:?}"))) - // }; - // TODO: Extend with bn - - let range = SolcRange::from(underlying.clone()).unwrap(); - let min = range.min.clone().concat(r2.min.clone()); - let max = range.max.clone().concat(r2.max.clone()); - accum.set_range_min(self, min).into_expr_err(loc)?; - accum.set_range_max(self, max).into_expr_err(loc)?; - - let new_ty = - VarType::BuiltIn(self.builtin_or_add(Builtin::String).into(), Some(range)); - accum.underlying_mut(self).into_expr_err(loc)?.ty = new_ty; - Ok(()) - } - (VarType::BuiltIn(_bn, Some(r)), VarType::BuiltIn(_bn2, Some(r2))) => { - // TODO: improve length calculation here - let min = r.min.clone().concat(r2.min.clone()); - let max = r.max.clone().concat(r2.max.clone()); - accum.set_range_min(self, min).into_expr_err(loc)?; - accum.set_range_max(self, max).into_expr_err(loc)?; - Ok(()) - } - (_, _) => Ok(()), - } - } -} diff --git a/src/context/func_call/mod.rs b/src/context/func_call/mod.rs deleted file mode 100644 index 897feec9..00000000 --- a/src/context/func_call/mod.rs +++ /dev/null @@ -1,1174 +0,0 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::func_call::{ - internal_call::InternalFuncCaller, intrinsic_call::IntrinsicFuncCaller, - namespaced_call::NameSpaceFuncCaller, -}; -use crate::context::ContextBuilder; -use crate::context::ExprErr; -use std::cell::RefCell; -use std::rc::Rc; - -use shared::analyzer::GraphLike; -use shared::context::ExprRet; -use shared::context::*; -use solang_parser::helpers::CodeLocation; -use std::collections::BTreeMap; - -use shared::range::Range; -use solang_parser::pt::{Expression, Loc, NamedArgument, StorageLocation}; - -use crate::VarType; - -use shared::{analyzer::AnalyzerLike, nodes::*, Edge, Node, NodeIdx}; - -pub mod internal_call; -pub mod intrinsic_call; -pub mod modifier; -pub mod namespaced_call; - -impl FuncCaller for T where - T: AnalyzerLike + Sized + GraphLike -{ -} -pub trait FuncCaller: - GraphLike + AnalyzerLike + Sized -{ - #[tracing::instrument(level = "trace", skip_all)] - fn named_fn_call_expr( - &mut self, - ctx: ContextNode, - loc: &Loc, - func_expr: &Expression, - input_exprs: &[NamedArgument], - ) -> Result<(), ExprErr> { - use solang_parser::pt::Expression::*; - match func_expr { - MemberAccess(loc, member_expr, ident) => { - self.call_name_spaced_named_func(ctx, loc, member_expr, ident, input_exprs) - } - Variable(ident) => self.call_internal_named_func(ctx, loc, ident, input_exprs), - e => Err(ExprErr::IntrinsicNamedArgs( - *loc, - format!("Cannot call intrinsic functions with named arguments. Call: {e:?}"), - )), - } - } - #[tracing::instrument(level = "trace", skip_all)] - fn fn_call_expr( - &mut self, - ctx: ContextNode, - loc: &Loc, - func_expr: &Expression, - input_exprs: &[Expression], - ) -> Result<(), ExprErr> { - use solang_parser::pt::Expression::*; - match func_expr { - MemberAccess(loc, member_expr, ident) => { - self.call_name_spaced_func(ctx, loc, member_expr, ident, input_exprs) - } - Variable(ident) => self.call_internal_func(ctx, loc, ident, func_expr, input_exprs), - _ => { - self.parse_ctx_expr(func_expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Function call to nonexistent function".to_string())) - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_intrinsic_fallback(ctx, &loc, input_exprs, ret) - }) - } - } - } - - fn match_intrinsic_fallback( - &mut self, - ctx: ContextNode, - loc: &Loc, - input_exprs: &[Expression], - ret: ExprRet, - ) -> Result<(), ExprErr> { - match ret { - ExprRet::Single(func_idx) | ExprRet::SingleLiteral(func_idx) => { - self.intrinsic_func_call(loc, input_exprs, func_idx, ctx) - } - ExprRet::Multi(inner) => inner - .into_iter() - .try_for_each(|ret| self.match_intrinsic_fallback(ctx, loc, input_exprs, ret)), - ExprRet::CtxKilled(kind) => ctx.kill(self, *loc, kind).into_expr_err(*loc), - ExprRet::Null => Ok(()), - } - } - - /// Disambiguates a function call by their inputs (length & type) - fn disambiguate_fn_call( - &mut self, - fn_name: &str, - literals: Vec, - input_paths: &ExprRet, - funcs: &[FunctionNode], - ) -> Option { - let input_paths = input_paths.clone().flatten(); - // try to find the function based on naive signature - // This doesnt do type inference on NumberLiterals (i.e. 100 could be uintX or intX, and there could - // be a function that takes an int256 but we evaled as uint256) - let fn_sig = format!("{}{}", fn_name, input_paths.try_as_func_input_str(self)); - if let Some(func) = funcs.iter().find(|func| func.name(self).unwrap() == fn_sig) { - return Some(*func); - } - - // filter by input len - let inputs = input_paths.as_flat_vec(); - let funcs: Vec<&FunctionNode> = funcs - .iter() - .filter(|func| func.params(self).len() == inputs.len()) - .collect(); - - if funcs.len() == 1 { - return Some(*funcs[0]); - } - - if !literals.iter().any(|i| *i) { - None - } else { - let funcs = funcs - .iter() - .filter(|func| { - let params = func.params(self); - params - .iter() - .zip(&inputs) - .enumerate() - .all(|(i, (param, input))| { - let param_ty = VarType::try_from_idx(self, (*param).into()).unwrap(); - let input_ty = ContextVarNode::from(*input).ty(self).unwrap(); - if param_ty.ty_eq(input_ty, self).unwrap() { - true - } else if literals[i] { - let possibilities = ContextVarNode::from(*input) - .ty(self) - .unwrap() - .possible_builtins_from_ty_inf(self); - let param_ty = param.ty(self).unwrap(); - match self.node(param_ty) { - Node::Builtin(b) => possibilities.contains(b), - _ => false, - } - } else { - false - } - }) - }) - .collect::>(); - if funcs.len() == 1 { - Some(**funcs[0]) - } else { - // this would be invalid solidity, likely the user needs to perform a cast - None - } - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn parse_inputs( - &mut self, - ctx: ContextNode, - loc: Loc, - inputs: &[Expression], - ) -> Result<(), ExprErr> { - let append = if ctx.underlying(self).into_expr_err(loc)?.tmp_expr.is_empty() { - Rc::new(RefCell::new(true)) - } else { - Rc::new(RefCell::new(false)) - }; - - inputs - .iter() - .try_for_each(|input| { - self.parse_ctx_expr(input, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Inputs did not have left hand sides".to_string())); - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - if *append.borrow() { - ctx.append_tmp_expr(ret, analyzer).into_expr_err(loc) - } else { - *append.borrow_mut() = true; - ctx.push_tmp_expr(ret, analyzer).into_expr_err(loc) - } - }) - })?; - if !inputs.is_empty() { - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_tmp_expr(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Inputs did not have left hand sides".to_string())); - }; - ctx.push_expr(ret, analyzer).into_expr_err(loc) - }) - } else { - Ok(()) - } - } - - /// Setups up storage variables for a function call and calls it - fn setup_fn_call( - &mut self, - loc: &Loc, - inputs: &ExprRet, - func_idx: NodeIdx, - ctx: ContextNode, - func_call_str: Option<&str>, - ) -> Result<(), ExprErr> { - // if we have a single match thats our function - let var = match ContextVar::maybe_from_user_ty(self, *loc, func_idx) { - Some(v) => v, - None => panic!( - "Could not create context variable from user type: {:?}", - self.node(func_idx) - ), - }; - - let new_cvarnode = self.add_node(Node::ContextVar(var)); - ctx.add_var(new_cvarnode.into(), self).into_expr_err(*loc)?; - self.add_edge(new_cvarnode, ctx, Edge::Context(ContextEdge::Variable)); - if let Some(func_node) = ContextVarNode::from(new_cvarnode) - .ty(self) - .into_expr_err(*loc)? - .func_node(self) - { - self.func_call(ctx, *loc, inputs, func_node, func_call_str, None) - } else { - unreachable!() - } - } - - /// Matches the input kinds and performs the call - fn func_call( - &mut self, - ctx: ContextNode, - loc: Loc, - input_paths: &ExprRet, - func: FunctionNode, - func_call_str: Option<&str>, - modifier_state: Option, - ) -> Result<(), ExprErr> { - let params = func.params(self); - let input_paths = input_paths.clone().flatten(); - if input_paths.has_killed() { - return ctx - .kill(self, loc, input_paths.killed_kind().unwrap()) - .into_expr_err(loc); - } - match input_paths { - ExprRet::Single(input_var) | ExprRet::SingleLiteral(input_var) => { - // if we get a single var, we expect the func to only take a single - // variable - self.func_call_inner( - false, - ctx, - func, - loc, - vec![ContextVarNode::from(input_var).latest_version(self)], - params, - func_call_str, - modifier_state, - ) - } - ExprRet::Multi(ref inputs) => { - if ExprRet::Multi(inputs.to_vec()).flatten().has_killed() { - return ctx - .kill( - self, - loc, - ExprRet::Multi(inputs.to_vec()).killed_kind().unwrap(), - ) - .into_expr_err(loc); - } - // check if the inputs length matchs func params length - // if they do, check that none are forks - if inputs.len() == params.len() { - let input_vars = inputs - .iter() - .map(|expr_ret| { - let var = expr_ret.expect_single().into_expr_err(loc)?; - Ok(ContextVarNode::from(var).latest_version(self)) - }) - .collect::, ExprErr>>()?; - self.func_call_inner( - false, - ctx, - func, - loc, - input_vars, - params, - func_call_str, - modifier_state, - ) - } else { - Err(ExprErr::InvalidFunctionInput( - loc, - format!( - "Length mismatch: {inputs:?} {params:?}, inputs as vars: {}, ctx: {}", - ExprRet::Multi(inputs.to_vec()).debug_str(self), - ctx.path(self) - ), - )) - } - } - e => todo!("here: {:?}", e), - } - } - - fn create_call_ctx( - &mut self, - curr_ctx: ContextNode, - loc: Loc, - func_node: FunctionNode, - modifier_state: Option, - ) -> Result { - let fn_ext = curr_ctx.is_fn_ext(func_node, self).into_expr_err(loc)?; - let ctx = Context::new_subctx( - curr_ctx, - None, - loc, - None, - Some(func_node), - fn_ext, - self, - modifier_state, - ) - .into_expr_err(loc)?; - let callee_ctx = ContextNode::from(self.add_node(Node::Context(ctx))); - curr_ctx - .set_child_call(callee_ctx, self) - .into_expr_err(loc)?; - let ctx_fork = self.add_node(Node::FunctionCall); - self.add_edge(ctx_fork, curr_ctx, Edge::Context(ContextEdge::Subcontext)); - self.add_edge(ctx_fork, func_node, Edge::Context(ContextEdge::Call)); - self.add_edge( - NodeIdx::from(callee_ctx.0), - ctx_fork, - Edge::Context(ContextEdge::Subcontext), - ); - Ok(callee_ctx) - } - - /// Maps inputs to function parameters such that if there is a renaming i.e. `a(uint256 x)` is called via `a(y)`, - /// we map `y -> x` for future lookups - fn map_inputs_to_params( - &mut self, - loc: Loc, - entry_call: bool, - params: Vec, - inputs: Vec, - callee_ctx: ContextNode, - ) -> Result, ExprErr> { - Ok(params - .iter() - .zip(inputs.iter()) - .filter_map(|(param, input)| { - if !entry_call { - if let Some(name) = - self.add_if_err(param.maybe_name(self).into_expr_err(loc))? - { - let res = input - .latest_version(self) - .underlying(self) - .into_expr_err(loc) - .cloned(); - let mut new_cvar = self.add_if_err(res)?; - new_cvar.loc = Some(param.loc(self).unwrap()); - new_cvar.name = name.clone(); - new_cvar.display_name = name; - new_cvar.is_tmp = false; - new_cvar.storage = if let Some(StorageLocation::Storage(_)) = - param.underlying(self).unwrap().storage - { - new_cvar.storage - } else { - None - }; - - if let Some(param_ty) = VarType::try_from_idx(self, param.ty(self).unwrap()) - { - let ty = new_cvar.ty.clone(); - if !ty.ty_eq(¶m_ty, self).unwrap() { - if let Some(new_ty) = ty.try_cast(¶m_ty, self).unwrap() { - new_cvar.ty = new_ty; - } - } - } - - let node = ContextVarNode::from(self.add_node(Node::ContextVar(new_cvar))); - self.add_edge( - node, - input.latest_version(self), - Edge::Context(ContextEdge::InputVariable), - ); - - if let (Some(r), Some(r2)) = - (node.range(self).unwrap(), param.range(self).unwrap()) - { - let new_min = - r.range_min().into_owned().cast(r2.range_min().into_owned()); - let new_max = - r.range_max().into_owned().cast(r2.range_max().into_owned()); - let res = node.try_set_range_min(self, new_min).into_expr_err(loc); - self.add_if_err(res); - let res = node.try_set_range_max(self, new_max).into_expr_err(loc); - self.add_if_err(res); - let res = node - .try_set_range_exclusions(self, r.exclusions) - .into_expr_err(loc); - self.add_if_err(res); - } - callee_ctx.add_var(node, self).unwrap(); - self.add_edge(node, callee_ctx, Edge::Context(ContextEdge::Variable)); - Some((*input, node)) - } else { - None - } - } else { - None - } - }) - .collect::>()) - } - - /// Checks if there are any modifiers and executes them prior to executing the function - #[tracing::instrument(level = "trace", skip_all)] - fn func_call_inner( - &mut self, - entry_call: bool, - ctx: ContextNode, - func_node: FunctionNode, - loc: Loc, - inputs: Vec, - params: Vec, - func_call_str: Option<&str>, - modifier_state: Option, - ) -> Result<(), ExprErr> { - // pseudocode: - // 1. Create context for the call - // 2. Check for modifiers - // 3. Call modifier 0, then 1, then 2, ... then N. - // 4. Call this function - // 5. Finish modifier N.. then 2, then 1, then 0 - let callee_ctx = if entry_call { - ctx - } else { - self.create_call_ctx(ctx, loc, func_node, modifier_state)? - }; - - // TODO: implement joining - // if !entry_call { - // let mapping = params - // .iter() - // .zip(inputs.iter()) - // .map(|(param, input)| (*input, *param)) - // .collect::>(); - // ctx.join(func_node, &mapping, self); - // } - - // handle remapping of variable names and bringing variables into the new context - let renamed_inputs = - self.map_inputs_to_params(loc, entry_call, params, inputs, callee_ctx)?; - - // begin modifier handling by making sure modifiers were set - if !func_node.modifiers_set(self).into_expr_err(loc)? { - self.set_modifiers(func_node, ctx)?; - } - - // get modifiers - let mods = func_node.modifiers(self); - self.apply_to_edges(callee_ctx, loc, &|analyzer, callee_ctx, loc| { - if let Some(mod_state) = &ctx.underlying(analyzer).into_expr_err(loc)?.modifier_state { - // we are iterating through modifiers - if mod_state.num + 1 < mods.len() { - // use the next modifier - let mut mstate = mod_state.clone(); - mstate.num += 1; - analyzer.call_modifier_for_fn(loc, callee_ctx, func_node, mstate) - } else { - // out of modifiers, execute the actual function call - analyzer.execute_call_inner( - loc, - ctx, - callee_ctx, - func_node, - &renamed_inputs, - func_call_str, - ) - } - } else if !mods.is_empty() { - // we have modifiers and havent executed them, start the process of executing them - let state = - ModifierState::new(0, loc, func_node, callee_ctx, ctx, renamed_inputs.clone()); - analyzer.call_modifier_for_fn(loc, callee_ctx, func_node, state) - } else { - // no modifiers, just execute the function - analyzer.execute_call_inner( - loc, - ctx, - callee_ctx, - func_node, - &renamed_inputs, - func_call_str, - ) - } - }) - } - - /// Actually executes the function - #[tracing::instrument(level = "trace", skip_all)] - fn execute_call_inner( - &mut self, - loc: Loc, - caller_ctx: ContextNode, - callee_ctx: ContextNode, - func_node: FunctionNode, - _renamed_inputs: &BTreeMap, - func_call_str: Option<&str>, - ) -> Result<(), ExprErr> { - if let Some(body) = func_node.underlying(self).into_expr_err(loc)?.body.clone() { - // add return nodes into the subctx - func_node - .returns(self) - .collect::>() - .into_iter() - .for_each(|ret| { - if let Some(var) = ContextVar::maybe_new_from_func_ret( - self, - ret.underlying(self).unwrap().clone(), - ) { - let cvar = self.add_node(Node::ContextVar(var)); - callee_ctx.add_var(cvar.into(), self).unwrap(); - self.add_edge(cvar, callee_ctx, Edge::Context(ContextEdge::Variable)); - } - }); - - self.parse_ctx_statement(&body, false, Some(callee_ctx)); - self.ctx_rets(loc, caller_ctx, callee_ctx) - } else { - let ret_ctx = Context::new_subctx( - callee_ctx, - Some(caller_ctx), - loc, - None, - None, - false, - self, - caller_ctx - .underlying(self) - .into_expr_err(loc)? - .modifier_state - .clone(), - ) - .unwrap(); - let ret_subctx = ContextNode::from(self.add_node(Node::Context(ret_ctx))); - self.add_edge(ret_subctx, caller_ctx, Edge::Context(ContextEdge::Continue)); - - let res = callee_ctx - .set_child_call(ret_subctx, self) - .into_expr_err(loc); - let _ = self.add_if_err(res); - self.apply_to_edges(callee_ctx, loc, &|analyzer, ctx, loc| { - func_node - .returns(analyzer) - .collect::>() - .into_iter() - .try_for_each(|ret| { - let underlying = ret.underlying(analyzer).unwrap(); - let mut var = - ContextVar::new_from_func_ret(ctx, analyzer, underlying.clone()) - .unwrap() - .expect("No type for return variable?"); - if let Some(func_call) = &func_call_str { - var.name = - format!("{}_{}", func_call, callee_ctx.new_tmp(analyzer).unwrap()); - var.display_name = func_call.to_string(); - } - let node = analyzer.add_node(Node::ContextVar(var)); - ctx.add_var(node.into(), analyzer).into_expr_err(loc)?; - analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Return)); - ctx.push_expr(ExprRet::Single(node), analyzer) - .into_expr_err(loc)?; - Ok(()) - }) - }) - } - } - - fn ctx_rets( - &mut self, - loc: Loc, - caller_ctx: ContextNode, - callee_ctx: ContextNode, - ) -> Result<(), ExprErr> { - tracing::trace!( - "Handling function call return for: {}, {}, depth: {:?}, {:?}", - caller_ctx.path(self), - callee_ctx.path(self), - caller_ctx.depth(self), - callee_ctx.depth(self), - ); - match callee_ctx.underlying(self).into_expr_err(loc)?.child { - Some(CallFork::Fork(w1, w2)) => { - self.ctx_rets(loc, caller_ctx, w1)?; - self.ctx_rets(loc, caller_ctx, w2)?; - Ok(()) - } - Some(CallFork::Call(c)) - if c.underlying(self).into_expr_err(loc)?.depth - >= caller_ctx.underlying(self).into_expr_err(loc)?.depth => - { - // follow rabbit hole - self.ctx_rets(loc, caller_ctx, c)?; - Ok(()) - } - _ => { - if callee_ctx.is_killed(self).into_expr_err(loc)? { - return Ok(()); - } - let callee_depth = callee_ctx.underlying(self).into_expr_err(loc)?.depth; - let caller_depth = caller_ctx.underlying(self).into_expr_err(loc)?.depth; - if callee_depth != caller_depth { - let ctx = Context::new_subctx( - callee_ctx, - Some(caller_ctx), - loc, - None, - None, - false, - self, - caller_ctx - .underlying(self) - .into_expr_err(loc)? - .modifier_state - .clone(), - ) - .unwrap(); - let ret_subctx = ContextNode::from(self.add_node(Node::Context(ctx))); - self.add_edge(ret_subctx, caller_ctx, Edge::Context(ContextEdge::Continue)); - - let res = callee_ctx - .set_child_call(ret_subctx, self) - .into_expr_err(loc); - let _ = self.add_if_err(res); - - let mut rets = callee_ctx.underlying(self).unwrap().ret.clone(); - - if rets.is_empty() { - let func_rets: Vec = callee_ctx - .associated_fn(self) - .into_expr_err(loc)? - .returns(self) - .collect(); - func_rets - .iter() - .filter_map(|ret| { - let n: String = ret.maybe_name(self).ok()??; - let ret_loc: Loc = ret.loc(self).ok()?; - Some((n, ret_loc)) - }) - .collect::>() - .into_iter() - .try_for_each(|(name, ret_loc)| { - if let Some(cvar) = callee_ctx - .var_by_name_or_recurse(self, &name) - .into_expr_err(loc)? - { - let cvar = cvar.latest_version(self); - // let ret_loc = ret.loc(self).into_expr_err(loc)?; - callee_ctx - .add_return_node(ret_loc, cvar, self) - .into_expr_err(loc)?; - self.add_edge( - cvar, - callee_ctx, - Edge::Context(ContextEdge::Return), - ); - } - Ok(()) - })?; - - // add unnamed rets - func_rets - .into_iter() - .filter(|ret| ret.maybe_name(self).unwrap().is_none()) - .collect::>() - .iter() - .try_for_each(|ret| { - let ret_loc = ret.loc(self).into_expr_err(loc)?; - let cvar = ContextVar::new_from_func_ret( - callee_ctx, - self, - ret.underlying(self).into_expr_err(loc)?.clone(), - ) - .into_expr_err(loc)? - .unwrap(); - let cvar = - ContextVarNode::from(self.add_node(Node::ContextVar(cvar))); - callee_ctx.add_var(cvar, self).into_expr_err(loc)?; - self.add_edge( - cvar, - callee_ctx, - Edge::Context(ContextEdge::Variable), - ); - callee_ctx - .add_return_node(ret_loc, cvar, self) - .into_expr_err(loc)?; - self.add_edge(cvar, callee_ctx, Edge::Context(ContextEdge::Return)); - Ok(()) - })?; - rets = callee_ctx.underlying(self).unwrap().ret.clone(); - } - let ret = rets - .into_iter() - .enumerate() - .map(|(i, (_, node))| { - let tmp_ret = node - .as_tmp(callee_ctx.underlying(self).unwrap().loc, ret_subctx, self) - .unwrap(); - tmp_ret.underlying_mut(self).into_expr_err(loc)?.is_return = true; - tmp_ret - .underlying_mut(self) - .into_expr_err(loc)? - .display_name = - format!("{}.{}", callee_ctx.associated_fn_name(self).unwrap(), i); - ret_subctx.add_var(tmp_ret, self).into_expr_err(loc)?; - self.add_edge( - tmp_ret, - ret_subctx, - Edge::Context(ContextEdge::Variable), - ); - Ok(ExprRet::Single(tmp_ret.into())) - }) - .collect::>()?; - ret_subctx - .push_expr(ExprRet::Multi(ret), self) - .into_expr_err(loc)?; - Ok(()) - } else { - let mut rets = callee_ctx.underlying(self).unwrap().ret.clone(); - - if rets.is_empty() { - callee_ctx - .associated_fn(self) - .into_expr_err(loc)? - .returns(self) - .filter_map(|ret| { - let n: String = ret.maybe_name(self).ok()??; - let ret_loc: Loc = ret.loc(self).ok()?; - Some((n, ret_loc)) - }) - .collect::>() - .into_iter() - .try_for_each(|(name, ret_loc)| { - if let Some(cvar) = callee_ctx - .var_by_name_or_recurse(self, &name) - .into_expr_err(loc)? - { - let cvar = cvar.latest_version(self); - // let ret_loc = ret.loc(self).into_expr_err(loc)?; - callee_ctx - .add_return_node(ret_loc, cvar, self) - .into_expr_err(loc)?; - self.add_edge( - cvar, - callee_ctx, - Edge::Context(ContextEdge::Return), - ); - } - Ok(()) - })?; - rets = callee_ctx.underlying(self).unwrap().ret.clone(); - } - callee_ctx - .push_expr( - ExprRet::Multi( - rets.iter() - .map(|(_, node)| ExprRet::Single((*node).into())) - .collect(), - ), - self, - ) - .into_expr_err(loc) - } - } - } - } - - /// Calls a modifier for a function - #[tracing::instrument(level = "trace", skip_all)] - fn call_modifier_for_fn( - &mut self, - loc: Loc, - func_ctx: ContextNode, - func_node: FunctionNode, - mod_state: ModifierState, - ) -> Result<(), ExprErr> { - let mod_node = func_node.modifiers(self)[mod_state.num]; - tracing::trace!( - "calling modifier {} for func {}", - mod_node.name(self).into_expr_err(loc)?, - func_node.name(self).into_expr_err(loc)? - ); - - let input_exprs = func_node - .modifier_input_vars(mod_state.num, self) - .into_expr_err(loc)?; - - input_exprs - .iter() - .try_for_each(|expr| self.parse_ctx_expr(expr, func_ctx))?; - self.apply_to_edges(func_ctx, loc, &|analyzer, ctx, loc| { - let input_paths = if input_exprs.is_empty() { - ExprRet::Multi(vec![]) - } else { - let Some(input_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, format!("No inputs to modifier, expected: {}", input_exprs.len()))) - }; - - if matches!(input_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(input_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - input_paths - }; - - analyzer.func_call( - ctx, - loc, - &input_paths, - mod_node, - None, - Some(mod_state.clone()), - ) - }) - } - - /// Resumes the parent function of a modifier - #[tracing::instrument(level = "trace", skip_all)] - fn resume_from_modifier( - &mut self, - ctx: ContextNode, - modifier_state: ModifierState, - ) -> Result<(), ExprErr> { - tracing::trace!( - "resuming from modifier: {}", - ctx.associated_fn_name(self) - .into_expr_err(modifier_state.loc)? - ); - - let mods = modifier_state.parent_fn.modifiers(self); - self.apply_to_edges(ctx, modifier_state.loc, &|analyzer, ctx, loc| { - if modifier_state.num + 1 < mods.len() { - // use the next modifier - let mut mstate = modifier_state.clone(); - mstate.num += 1; - - let loc = mods[mstate.num] - .underlying(analyzer) - .into_expr_err(mstate.loc)? - .loc; - - let pctx = Context::new_subctx( - ctx, - Some(modifier_state.parent_ctx), - loc, - None, - None, - false, - analyzer, - Some(modifier_state.clone()), - ) - .unwrap(); - let new_parent_subctx = ContextNode::from(analyzer.add_node(Node::Context(pctx))); - - analyzer.add_edge( - new_parent_subctx, - modifier_state.parent_ctx, - Edge::Context(ContextEdge::Continue), - ); - ctx.set_child_call(new_parent_subctx, analyzer) - .into_expr_err(modifier_state.loc)?; - - analyzer.call_modifier_for_fn( - mods[mstate.num] - .underlying(analyzer) - .into_expr_err(mstate.loc)? - .loc, - new_parent_subctx, - mstate.parent_fn, - mstate, - )?; - Ok(()) - } else { - let pctx = Context::new_subctx( - ctx, - Some(modifier_state.parent_ctx), - modifier_state.loc, - None, - None, - false, - analyzer, - None, - ) - .unwrap(); - let new_parent_subctx = ContextNode::from(analyzer.add_node(Node::Context(pctx))); - - analyzer.add_edge( - new_parent_subctx, - modifier_state.parent_ctx, - Edge::Context(ContextEdge::Continue), - ); - ctx.set_child_call(new_parent_subctx, analyzer) - .into_expr_err(modifier_state.loc)?; - - // actually execute the parent function - analyzer.execute_call_inner( - modifier_state.loc, - ctx, - new_parent_subctx, - modifier_state.parent_fn, - &modifier_state.renamed_inputs, - None, - )?; - - fn inherit_return_from_call( - analyzer: &mut (impl GraphLike + AnalyzerLike), - loc: Loc, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - let mctx = - Context::new_subctx(ctx, Some(ctx), loc, None, None, false, analyzer, None) - .unwrap(); - let modifier_after_subctx = - ContextNode::from(analyzer.add_node(Node::Context(mctx))); - - ctx.set_child_call(modifier_after_subctx, analyzer) - .into_expr_err(loc)?; - analyzer.add_edge( - modifier_after_subctx, - ctx, - Edge::Context(ContextEdge::Continue), - ); - - let ret = ctx.underlying(analyzer).unwrap().ret.clone(); - modifier_after_subctx.underlying_mut(analyzer).unwrap().ret = ret; - Ok(()) - } - - analyzer.apply_to_edges(new_parent_subctx, loc, &|analyzer, ctx, _loc| { - inherit_return_from_call(analyzer, modifier_state.loc, ctx) - }) - - // if edges.is_empty() { - // inherit_return_from_call(analyzer, modifier_state.loc, new_parent_subctx)?; - // } else { - // edges.iter().try_for_each(|i| { - // inherit_return_from_call(analyzer, modifier_state.loc, *i)?; - // Ok(()) - // })?; - // } - // Ok(()) - } - }) - } - - /// Inherit the input changes from a function call - fn inherit_input_changes( - &mut self, - loc: Loc, - to_ctx: ContextNode, - from_ctx: ContextNode, - renamed_inputs: &BTreeMap, - ) -> Result<(), ExprErr> { - if to_ctx != from_ctx { - self.apply_to_edges(to_ctx, loc, &|analyzer, to_ctx, loc| { - renamed_inputs - .iter() - .try_for_each(|(input_var, updated_var)| { - let new_input = analyzer.advance_var_in_ctx( - input_var.latest_version(analyzer), - loc, - to_ctx, - )?; - let latest_updated = updated_var.latest_version(analyzer); - if let Some(updated_var_range) = - latest_updated.range(analyzer).into_expr_err(loc)? - { - let res = new_input - .set_range_min(analyzer, updated_var_range.range_min().into_owned()) - .into_expr_err(loc); - let _ = analyzer.add_if_err(res); - let res = new_input - .set_range_max(analyzer, updated_var_range.range_max().into_owned()) - .into_expr_err(loc); - let _ = analyzer.add_if_err(res); - let res = new_input - .set_range_exclusions( - analyzer, - updated_var_range.range_exclusions(), - ) - .into_expr_err(loc); - let _ = analyzer.add_if_err(res); - } - Ok(()) - }) - })?; - } - Ok(()) - } - - /// Inherit the input changes from a function call - fn modifier_inherit_return(&mut self, mod_ctx: ContextNode, fn_ctx: ContextNode) { - let ret = fn_ctx.underlying(self).unwrap().ret.clone(); - mod_ctx.underlying_mut(self).unwrap().ret = ret; - } - - /// Inherit the storage changes from a function call - fn inherit_storage_changes( - &mut self, - loc: Loc, - inheritor_ctx: ContextNode, - grantor_ctx: ContextNode, - ) -> Result<(), ExprErr> { - if inheritor_ctx != grantor_ctx { - return self.apply_to_edges(inheritor_ctx, loc, &|analyzer, inheritor_ctx, loc| { - let vars = grantor_ctx.local_vars(analyzer).clone(); - vars.iter().try_for_each(|(name, old_var)| { - let var = old_var.latest_version(analyzer); - let underlying = var.underlying(analyzer).into_expr_err(loc)?; - if var.is_storage(analyzer).into_expr_err(loc)? { - if let Some(inheritor_var) = inheritor_ctx.var_by_name(analyzer, name) { - let inheritor_var = inheritor_var.latest_version(analyzer); - if let Some(r) = underlying.ty.range(analyzer).into_expr_err(loc)? { - let new_inheritor_var = analyzer - .advance_var_in_ctx( - inheritor_var, - underlying.loc.expect("No loc for val change"), - inheritor_ctx, - ) - .unwrap(); - let _ = new_inheritor_var - .set_range_min(analyzer, r.range_min().into_owned()); - let _ = new_inheritor_var - .set_range_max(analyzer, r.range_max().into_owned()); - let _ = new_inheritor_var - .set_range_exclusions(analyzer, r.range_exclusions()); - } - } else { - let new_in_inheritor = - analyzer.add_node(Node::ContextVar(underlying.clone())); - inheritor_ctx - .add_var(new_in_inheritor.into(), analyzer) - .into_expr_err(loc)?; - analyzer.add_edge( - new_in_inheritor, - inheritor_ctx, - Edge::Context(ContextEdge::Variable), - ); - analyzer.add_edge( - new_in_inheritor, - var, - Edge::Context(ContextEdge::InheritedVariable), - ); - } - } - Ok(()) - }) - }); - } - Ok(()) - } - - fn modifiers( - &mut self, - ctx: ContextNode, - func: FunctionNode, - ) -> Result, ExprErr> { - use std::fmt::Write; - let binding = func.underlying(self).unwrap().clone(); - let modifiers = binding.modifiers_as_base(); - if modifiers.is_empty() { - Ok(vec![]) - } else { - let res = modifiers - .iter() - .map(|modifier| { - assert_eq!(modifier.name.identifiers.len(), 1); - // construct arg string for function selector - let mut mod_name = format!("{}", modifier.name.identifiers[0]); - if let Some(args) = &modifier.args { - let args_str = args - .iter() - .map(|expr| { - let mctx = Context::new_subctx( - ctx, - None, - Loc::Implicit, - None, - None, - false, - self, - None, - ) - .into_expr_err(Loc::Implicit)?; - let callee_ctx = - ContextNode::from(self.add_node(Node::Context(mctx))); - let _res = ctx.set_child_call(callee_ctx, self); - self.parse_ctx_expr(expr, callee_ctx)?; - let f: Vec = - self.take_from_edge(ctx, expr.loc(), &|analyzer, ctx, loc| { - let ret = ctx - .pop_expr_latest(loc, analyzer) - .into_expr_err(loc)? - .unwrap(); - Ok(ret.try_as_func_input_str(analyzer)) - })?; - - ctx.delete_child(self).into_expr_err(expr.loc())?; - Ok(f.first().unwrap().clone()) - }) - .collect::, ExprErr>>()? - .join(", "); - let _ = write!(mod_name, "{args_str}"); - } else { - let _ = write!(mod_name, "()"); - } - let _ = write!(mod_name, ""); - let found: Option = ctx - .visible_modifiers(self) - .unwrap() - .iter() - .find(|modifier| modifier.name(self).unwrap() == mod_name) - .copied(); - Ok(found) - }) - .collect::>, ExprErr>>()? - .into_iter() - .flatten() - .collect::>(); - Ok(res) - } - } - - fn set_modifiers(&mut self, func: FunctionNode, ctx: ContextNode) -> Result<(), ExprErr> { - let modifiers = self.modifiers(ctx, func)?; - modifiers - .iter() - .enumerate() - .for_each(|(i, modifier)| self.add_edge(*modifier, func, Edge::FuncModifier(i))); - func.underlying_mut(self).unwrap().modifiers_set = true; - Ok(()) - } -} diff --git a/src/context/func_call/modifier.rs b/src/context/func_call/modifier.rs deleted file mode 100644 index a62adda5..00000000 --- a/src/context/func_call/modifier.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::func_call::FuncCaller; - -use crate::context::ExprErr; - -use shared::analyzer::GraphLike; -use shared::context::*; - -use solang_parser::pt::{Expression, Loc}; - -use shared::{analyzer::AnalyzerLike, nodes::*}; - -impl ModifierCaller for T where - T: AnalyzerLike + Sized + GraphLike -{ -} -pub trait ModifierCaller: - GraphLike + AnalyzerLike + Sized -{ - fn handle_modifiers( - &mut self, - ctx: ContextNode, - loc: Loc, - _input_paths: &ExprRet, - func: FunctionNode, - _func_call_str: Option, - ) -> Result { - if !func.modifiers_set(self).into_expr_err(loc)? { - self.set_modifiers(func, ctx)?; - } - - todo!() - } -} diff --git a/src/context/func_call/namespaced_call.rs b/src/context/func_call/namespaced_call.rs deleted file mode 100644 index 9a4118d2..00000000 --- a/src/context/func_call/namespaced_call.rs +++ /dev/null @@ -1,372 +0,0 @@ -use crate::context::{ - exprs::{IntoExprErr, MemberAccess}, - func_call::intrinsic_call::IntrinsicFuncCaller, - func_call::FuncCaller, - ContextBuilder, ExprErr, -}; -use shared::nodes::BuiltInNode; -use shared::{ - analyzer::{AnalyzerLike, GraphLike}, - context::{ContextNode, ContextVarNode, ExprRet}, - nodes::FunctionNode, - Node, NodeIdx, -}; -use solang_parser::pt::{Expression, Identifier, Loc, NamedArgument}; - -impl NameSpaceFuncCaller for T where - T: AnalyzerLike + Sized + GraphLike -{ -} -pub trait NameSpaceFuncCaller: - AnalyzerLike + Sized + GraphLike -{ - #[tracing::instrument(level = "trace", skip_all)] - fn call_name_spaced_named_func( - &mut self, - ctx: ContextNode, - _loc: &Loc, - member_expr: &Expression, - _ident: &Identifier, - _input_args: &[NamedArgument], - ) -> Result<(), ExprErr> { - self.parse_ctx_expr(member_expr, ctx)?; - todo!("here"); - } - - #[tracing::instrument(level = "trace", skip_all)] - fn call_name_spaced_func( - &mut self, - ctx: ContextNode, - loc: &Loc, - member_expr: &Expression, - ident: &Identifier, - input_exprs: &[Expression], - ) -> Result<(), ExprErr> { - use solang_parser::pt::Expression::*; - tracing::trace!("Calling name spaced function"); - if let Variable(Identifier { name, .. }) = member_expr { - if name == "abi" { - let func_name = format!("abi.{}", ident.name); - let fn_node = self - .builtin_fn_or_maybe_add(&func_name) - .unwrap_or_else(|| panic!("No builtin function with name {func_name}")); - return self.intrinsic_func_call(loc, input_exprs, fn_node, ctx); - } else if name == "super" { - if let Some(contract) = ctx.maybe_associated_contract(self).into_expr_err(*loc)? { - let supers = contract.super_contracts(self); - let possible_funcs: Vec<_> = supers - .iter() - .filter_map(|con_node| { - con_node - .linearized_functions(self) - .into_iter() - .find(|(func_name, _func_node)| func_name.starts_with(&ident.name)) - .map(|(_, node)| node) - }) - .collect(); - - if possible_funcs.is_empty() { - return Err(ExprErr::FunctionNotFound( - *loc, - "Could not find function in super".to_string(), - )); - } - self.parse_inputs(ctx, *loc, input_exprs)?; - return self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let inputs = if let Some(inputs) = - ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? - { - inputs - } else { - ExprRet::Multi(vec![]) - }; - if possible_funcs.len() == 1 { - let mut inputs = inputs.as_vec(); - let func = possible_funcs[0]; - if func.params(analyzer).len() < inputs.len() { - inputs = inputs[1..].to_vec(); - } - let inputs = ExprRet::Multi(inputs); - if inputs.has_killed() { - return ctx - .kill(analyzer, loc, inputs.killed_kind().unwrap()) - .into_expr_err(loc); - } - analyzer.setup_fn_call(&ident.loc, &inputs, func.into(), ctx, None) - } else { - // this is the annoying case due to function overloading & type inference on number literals - let mut lits = vec![false]; - lits.extend( - input_exprs - .iter() - .map(|expr| { - match expr { - Negate(_, expr) => { - // negative number potentially - matches!(**expr, NumberLiteral(..) | HexLiteral(..)) - } - NumberLiteral(..) | HexLiteral(..) => true, - _ => false, - } - }) - .collect::>(), - ); - - if inputs.has_killed() { - return ctx - .kill(analyzer, loc, inputs.killed_kind().unwrap()) - .into_expr_err(loc); - } - if let Some(func) = analyzer.disambiguate_fn_call( - &ident.name, - lits, - &inputs, - &possible_funcs, - ) { - analyzer.setup_fn_call(&loc, &inputs, func.into(), ctx, None) - } else { - Err(ExprErr::FunctionNotFound( - loc, - "Could not find function in super".to_string(), - )) - } - } - }); - } - } - } - - self.parse_ctx_expr(member_expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Namespace function call had no namespace".to_string())) - }; - - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - analyzer.match_namespaced_member(ctx, loc, member_expr, ident, input_exprs, ret) - }) - } - - fn match_namespaced_member( - &mut self, - ctx: ContextNode, - loc: Loc, - member_expr: &Expression, - ident: &Identifier, - input_exprs: &[Expression], - ret: ExprRet, - ) -> Result<(), ExprErr> { - match ret { - ExprRet::Single(inner) | ExprRet::SingleLiteral(inner) => { - self.call_name_spaced_func_inner(ctx, loc, member_expr, ident, input_exprs, inner) - } - ExprRet::Multi(inner) => inner.into_iter().try_for_each(|ret| { - self.match_namespaced_member(ctx, loc, member_expr, ident, input_exprs, ret) - }), - ExprRet::CtxKilled(kind) => ctx.kill(self, loc, kind).into_expr_err(loc), - ExprRet::Null => Err(ExprErr::NoLhs( - loc, - "No function found due to null".to_string(), - )), - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn call_name_spaced_func_inner( - &mut self, - ctx: ContextNode, - loc: Loc, - member_expr: &Expression, - ident: &Identifier, - input_exprs: &[Expression], - member: NodeIdx, - ) -> Result<(), ExprErr> { - use solang_parser::pt::Expression::*; - tracing::trace!( - "namespaced function call: {:?}.{:?}(..)", - ContextVarNode::from(member).display_name(self), - ident.name - ); - - let funcs = self.visible_member_funcs(ctx, loc, member)?; - // filter down all funcs to those that match - let possible_funcs = funcs - .iter() - .filter(|func| { - func.name(self) - .unwrap() - .starts_with(&format!("{}(", ident.name)) - }) - .copied() - .collect::>(); - - ctx.push_expr(ExprRet::Single(member), self) - .into_expr_err(loc)?; - - self.parse_inputs(ctx, loc, input_exprs)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(inputs) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Namespace function call had no inputs".to_string())) - }; - - if matches!(inputs, ExprRet::CtxKilled(_)) { - ctx.push_expr(inputs, analyzer).into_expr_err(loc)?; - return Ok(()); - } - if possible_funcs.is_empty() { - // TODO: this is extremely ugly. - if inputs.has_killed() { - return ctx.kill(analyzer, loc, inputs.killed_kind().unwrap()).into_expr_err(loc); - } - let mut inputs = inputs.as_vec(); - if let Node::ContextVar(_) = analyzer.node(member) { inputs.insert(0, ExprRet::Single(member)) } - let inputs = ExprRet::Multi(inputs); - - let as_input_str = inputs.try_as_func_input_str(analyzer); - - let lits = inputs.literals_list().into_expr_err(loc)?; - if lits.iter().any(|i| *i) { - // try to disambiguate - if lits[0] { - Err(ExprErr::Todo(loc, "First element in function call was literal".to_string())) - } else { - let ty = if let Node::ContextVar(cvar) = analyzer.node(member) { - cvar.ty.ty_idx() - } else { - member - }; - - let possible_builtins: Vec<_> = analyzer.builtin_fn_inputs().iter().filter_map(|(func_name, (inputs, _))| { - if func_name.starts_with(&ident.name) { - if let Some(input) = inputs.first() { - let Ok(implicitly_castable) = BuiltInNode::from(ty).implicitly_castable_to(&BuiltInNode::from(input.ty), analyzer) else { - return None - }; - if implicitly_castable { - Some(func_name.clone()) - } else { - None - } - } else { - None - } - } else { - None - } - }).collect::>(); - let possible_builtins: Vec<_> = possible_builtins.into_iter().filter_map(|name| { - analyzer.builtin_fn_or_maybe_add(&name).map(FunctionNode::from) - }).collect(); - if let Some(func) = - analyzer.disambiguate_fn_call(&ident.name, lits, &inputs, &possible_builtins) - { - let expr = &MemberAccess( - loc, - Box::new(member_expr.clone()), - Identifier { - loc: ident.loc, - name: func.name(analyzer).into_expr_err(loc)?.split('(').collect::>()[0].to_string(), - }, - ); - analyzer.parse_ctx_expr(expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Fallback function parse failure".to_string())) - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let mut modifier_input_exprs = vec![member_expr.clone()]; - modifier_input_exprs.extend(input_exprs.to_vec()); - analyzer.match_intrinsic_fallback(ctx, &loc, &modifier_input_exprs, ret) - }) - } else { - // analyzer.match_intrinsic_fallback(ctx, &loc, &modifier_input_exprs, ret) - Err(ExprErr::FunctionNotFound( - loc, - format!("Could not disambiguate function, possible functions: {:#?}", possible_builtins.iter().map(|i| i.name(analyzer).unwrap()).collect::>()) - )) - } - } - } else { - let expr = &MemberAccess( - loc, - Box::new(member_expr.clone()), - Identifier { - loc: ident.loc, - name: format!("{}{}", ident.name, as_input_str), - }, - ); - analyzer.parse_ctx_expr(expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Fallback function parse failure".to_string())) - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - let mut modifier_input_exprs = vec![member_expr.clone()]; - modifier_input_exprs.extend(input_exprs.to_vec()); - analyzer.match_intrinsic_fallback(ctx, &loc, &modifier_input_exprs, ret) - }) - } - } else if possible_funcs.len() == 1 { - let mut inputs = inputs.as_vec(); - let func = possible_funcs[0]; - if func.params(analyzer).len() > inputs.len() { - // Add the member back in if its a context variable - if let Node::ContextVar(_) = analyzer.node(member) { inputs.insert(0, ExprRet::Single(member)) } - } - let inputs = ExprRet::Multi(inputs); - if inputs.has_killed() { - return ctx.kill(analyzer, loc, inputs.killed_kind().unwrap()).into_expr_err(loc); - } - - - analyzer.setup_fn_call(&ident.loc, &inputs, func.into(), ctx, None) - } else { - // Add the member back in if its a context variable - let mut inputs = inputs.as_vec(); - if let Node::ContextVar(_) = analyzer.node(member) { inputs.insert(0, ExprRet::Single(member)) } - let inputs = ExprRet::Multi(inputs); - // this is the annoying case due to function overloading & type inference on number literals - let mut lits = vec![false]; - lits.extend( - input_exprs - .iter() - .map(|expr| { - match expr { - Negate(_, expr) => { - // negative number potentially - matches!(**expr, NumberLiteral(..) | HexLiteral(..)) - } - NumberLiteral(..) | HexLiteral(..) => true, - _ => false, - } - }) - .collect::>(), - ); - - if inputs.has_killed() { - return ctx.kill(analyzer, loc, inputs.killed_kind().unwrap()).into_expr_err(loc); - } - if let Some(func) = - analyzer.disambiguate_fn_call(&ident.name, lits, &inputs, &possible_funcs) - { - analyzer.setup_fn_call(&loc, &inputs, func.into(), ctx, None) - } else { - Err(ExprErr::FunctionNotFound( - loc, - format!("Could not disambiguate function, possible functions: {:#?}", possible_funcs.iter().map(|i| i.name(analyzer).unwrap()).collect::>()) - )) - } - } - }) - } -} diff --git a/src/context/mod.rs b/src/context/mod.rs deleted file mode 100644 index d2b35d2b..00000000 --- a/src/context/mod.rs +++ /dev/null @@ -1,1537 +0,0 @@ -use crate::context::yul::YulBuilder; -use ethers_core::types::I256; - -use ethers_core::types::U256; - -use shared::analyzer::GraphError; -use shared::analyzer::GraphLike; -use shared::context::*; -use solang_parser::helpers::CodeLocation; -use solang_parser::pt::YulStatement; - -use shared::range::elem_ty::Elem; -use shared::range::Range; -use solang_parser::pt::VariableDeclaration; - -use crate::VarType; -use petgraph::{visit::EdgeRef, Direction}; -use shared::{analyzer::AnalyzerLike, nodes::*, range::elem::RangeOp, Edge, Node, NodeIdx}; -use solang_parser::pt::{Expression, Loc, Statement}; - -// pub mod func; -// use func::*; -pub mod func_call; -use func_call::*; - -pub mod loops; -use loops::*; - -pub mod exprs; -use exprs::*; - -pub mod analyzers; -pub mod queries; - -pub mod yul; - -impl ContextBuilder for T where - T: AnalyzerLike + Sized + ExprParser -{ -} - -pub trait ContextBuilder: - AnalyzerLike + Sized + ExprParser -{ - fn parse_ctx_statement( - &mut self, - stmt: &Statement, - unchecked: bool, - parent_ctx: Option + Clone + Copy>, - ) where - Self: Sized, - { - if let Some(parent) = parent_ctx { - match self.node(parent) { - Node::Context(_) => { - let ctx = ContextNode::from(parent.into()); - if !ctx.killed_or_ret(self).unwrap() { - if let Some(live_edges) = - self.add_if_err(ctx.live_edges(self).into_expr_err(stmt.loc())) - { - if live_edges.is_empty() { - self.parse_ctx_stmt_inner(stmt, unchecked, parent_ctx) - } else { - live_edges.iter().for_each(|fork_ctx| { - self.parse_ctx_stmt_inner(stmt, unchecked, Some(*fork_ctx)); - }); - } - } - } - } - _ => self.parse_ctx_stmt_inner(stmt, unchecked, parent_ctx), - } - } else { - self.parse_ctx_stmt_inner(stmt, unchecked, parent_ctx) - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn parse_ctx_stmt_inner( - &mut self, - stmt: &Statement, - unchecked: bool, - parent_ctx: Option + Clone + Copy>, - ) where - Self: Sized, - { - use Statement::*; - // println!("stmt: {:#?}, node: {:#?}", stmt, if let Some(node) = parent_ctx { Some(self.node(node.into())) } else { None}); - // if let Some(ctx) = parent_ctx { - // if let Node::Context(_) = self.node(ctx) { - // println!("ctx: {}, {:#?}", ContextNode::from(ctx.into()).path(self), stmt); - // } - // } - - // at the end of a statement we shouldn't have anything in the stack? - if let Some(ctx) = parent_ctx { - if let Node::Context(_) = self.node(ctx) { - let c = ContextNode::from(ctx.into()); - let _ = c.pop_expr_latest(stmt.loc(), self); - if unchecked { - let _ = c.set_unchecked(self); - } else { - let _ = c.unset_unchecked(self); - } - - if c.killed_or_ret(self).unwrap() { - return; - } - } - } - - match stmt { - Block { - loc, - unchecked, - statements, - } => { - tracing::trace!("parsing block"); - let parent = parent_ctx.expect("Free floating contexts shouldn't happen"); - let mut entry_loc = None; - let mut mods_set = false; - let ctx_node = match self.node(parent) { - Node::Function(fn_node) => { - mods_set = fn_node.modifiers_set; - entry_loc = Some(fn_node.loc); - let ctx = Context::new( - FunctionNode::from(parent.into()), - self.add_if_err( - FunctionNode::from(parent.into()) - .name(self) - .into_expr_err(stmt.loc()), - ) - .unwrap(), - *loc, - ); - let ctx_node = self.add_node(Node::Context(ctx)); - self.add_edge(ctx_node, parent, Edge::Context(ContextEdge::Context)); - - ctx_node - } - Node::Context(_) => { - // let ctx = Context::new_subctx( - // ContextNode::from(parent.into()), - // *loc, - // false, - // self, - // ); - // let ctx_node = self.add_node(Node::Context(ctx)); - // self.add_edge(ctx_node, parent, Edge::Context(ContextEdge::Subcontext)); - // ctx_node - parent.into() - } - e => todo!( - "Expected a context to be created by a function or context but got: {:?}", - e - ), - }; - - // optionally add named input and named outputs into context - let (params, inputs): (Vec<_>, Vec<_>) = self - .graph() - .edges_directed(parent.into(), Direction::Incoming) - .filter(|edge| *edge.weight() == Edge::FunctionParam) - .map(|edge| FunctionParamNode::from(edge.source())) - .collect::>() - .into_iter() - .filter_map(|param_node| { - let res = param_node - .underlying(self) - .into_expr_err(stmt.loc()) - .cloned(); - let func_param = self.add_if_err(res)?; - if let Some(cvar) = ContextVar::maybe_new_from_func_param(self, func_param) - { - let cvar_node = self.add_node(Node::ContextVar(cvar)); - ContextNode::from(ctx_node) - .add_var(cvar_node.into(), self) - .unwrap(); - self.add_edge( - cvar_node, - ctx_node, - Edge::Context(ContextEdge::Variable), - ); - - Some((param_node, ContextVarNode::from(cvar_node))) - } else { - None - } - }) - .unzip(); - - self.graph() - .edges_directed(parent.into(), Direction::Incoming) - .filter(|edge| *edge.weight() == Edge::FunctionReturn) - .map(|edge| FunctionReturnNode::from(edge.source())) - .collect::>() - .iter() - .for_each(|ret_node| { - let res = ret_node.underlying(self).into_expr_err(stmt.loc()).cloned(); - let func_ret = self.add_if_err(res).unwrap(); - if let Some(cvar) = ContextVar::maybe_new_from_func_ret(self, func_ret) { - let cvar_node = self.add_node(Node::ContextVar(cvar)); - ContextNode::from(ctx_node) - .add_var(cvar_node.into(), self) - .unwrap(); - self.add_edge( - cvar_node, - ctx_node, - Edge::Context(ContextEdge::Variable), - ); - } - }); - - if let Some(fn_loc) = entry_loc { - if !mods_set { - let parent = FunctionNode::from(parent.into()); - let _ = self - .set_modifiers(parent, ctx_node.into()) - .map_err(|e| self.add_expr_err(e)); - } - - let res = self.func_call_inner( - true, - ctx_node.into(), - parent.into().into(), - fn_loc, - inputs, - params, - None, - None, - ); - if self.widen_if_limit_hit(ctx_node.into(), res) { - return; - } - let res = self.apply_to_edges(ctx_node.into(), *loc, &|analyzer, ctx, loc| { - if ctx.killed_or_ret(analyzer).into_expr_err(loc)? { - tracing::trace!("killing due to bad funciton call"); - let res = ContextNode::from(ctx_node) - .kill( - analyzer, - fn_loc, - ctx.underlying(analyzer).unwrap().killed.unwrap().1, - ) - .into_expr_err(fn_loc); - let _ = analyzer.add_if_err(res); - } - Ok(()) - }); - - if self.widen_if_limit_hit(ctx_node.into(), res) { - return; - } - - return; - } - - let res = self.apply_to_edges(ctx_node.into(), *loc, &|analyzer, ctx, _loc| { - statements - .iter() - .for_each(|stmt| analyzer.parse_ctx_statement(stmt, *unchecked, Some(ctx))); - Ok(()) - }); - if self.widen_if_limit_hit(ctx_node.into(), res) {} - } - VariableDefinition(loc, var_decl, maybe_expr) => { - let ctx = ContextNode::from( - parent_ctx - .expect("No context for variable definition?") - .into(), - ); - tracing::trace!( - "parsing variable definition, {:?} {var_decl:?}", - ctx.path(self) - ); - - if let Some(rhs) = maybe_expr { - match self.parse_ctx_expr(rhs, ctx) { - Ok(()) => { - let res = self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - if !ctx.killed_or_ret(analyzer).into_expr_err(loc)? { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, format!("Variable definition had no right hand side, {}", ctx.path(analyzer)))) - }; - - if matches!(rhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - analyzer.parse_ctx_expr(&var_decl.ty, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Variable definition had no left hand side".to_string())) - }; - - if matches!(lhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_var_def(ctx, var_decl, loc, &lhs_paths, Some(&rhs_paths))?; - Ok(()) - }) - } else { - Ok(()) - } - }); - let _ = self.widen_if_limit_hit(ctx, res); - } - ret => { - let _ = self.widen_if_limit_hit(ctx, ret); - } - } - } else { - let res = self.parse_ctx_expr(&var_decl.ty, ctx); - if self.widen_if_limit_hit(ctx, res) { - return; - } - let res = self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Variable definition had no left hand side".to_string())) - }; - if matches!(lhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_var_def(ctx, var_decl, loc, &lhs_paths, None)?; - Ok(()) - }); - let _ = self.widen_if_limit_hit(ctx, res); - } - } - Args(_loc, _args) => { - tracing::trace!("parsing args, {_args:?}"); - } - If(loc, if_expr, true_expr, maybe_false_expr) => { - tracing::trace!("parsing if, {if_expr:?}"); - let ctx = ContextNode::from(parent_ctx.expect("Dangling if statement").into()); - let res = self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - analyzer.cond_op_stmt(loc, if_expr, true_expr, maybe_false_expr, ctx) - }); - let _ = self.widen_if_limit_hit(ctx, res); - } - While(loc, cond, body) => { - tracing::trace!("parsing while, {cond:?}"); - if let Some(parent) = parent_ctx { - let res = self.apply_to_edges( - ContextNode::from(parent.into()), - *loc, - &|analyzer, ctx, loc| analyzer.while_loop(loc, ctx, cond, body), - ); - let _ = self.widen_if_limit_hit(parent.into().into(), res); - } - } - Expression(loc, expr) => { - tracing::trace!("parsing expr, {expr:?}"); - if let Some(parent) = parent_ctx { - let ctx = parent.into().into(); - match self.parse_ctx_expr(expr, ctx) { - Ok(()) => { - let res = self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - if ctx.killed_or_ret(analyzer).into_expr_err(loc)? { - tracing::trace!("killing due to bad expr"); - ContextNode::from(parent.into()) - .kill( - analyzer, - loc, - ctx.underlying(analyzer).unwrap().killed.unwrap().1, - ) - .into_expr_err(loc)?; - } - Ok(()) - }); - let _ = self.widen_if_limit_hit(ctx, res); - } - e => { - let _ = self.widen_if_limit_hit(ctx, e); - } - } - } - } - For(loc, maybe_for_start, maybe_for_middle, maybe_for_end, maybe_for_body) => { - tracing::trace!("parsing for loop"); - if let Some(parent) = parent_ctx { - let res = - self.apply_to_edges(parent.into().into(), *loc, &|analyzer, ctx, loc| { - analyzer.for_loop( - loc, - ctx, - maybe_for_start, - maybe_for_middle, - maybe_for_end, - maybe_for_body, - ) - }); - let _ = self.widen_if_limit_hit(parent.into().into(), res); - } - } - DoWhile(loc, while_stmt, while_expr) => { - tracing::trace!("parsing `do while`, {while_expr:?}"); - if let Some(parent) = parent_ctx { - let res = self.apply_to_edges( - ContextNode::from(parent.into()), - *loc, - &|analyzer, ctx, loc| analyzer.while_loop(loc, ctx, while_expr, while_stmt), - ); - let _ = self.widen_if_limit_hit(parent.into().into(), res); - } - } - Continue(_loc) => { - tracing::trace!("parsing continue"); - // TODO: We cheat in loops by just widening so continues dont matter yet - } - Break(_loc) => { - tracing::trace!("parsing break"); - // TODO: We cheat in loops by just widening so breaks dont matter yet - } - Assembly { - loc, - dialect: _, - flags: _, - block: yul_block, - } => { - tracing::trace!("parsing assembly"); - let ctx = ContextNode::from( - parent_ctx - .expect("No context for variable definition?") - .into(), - ); - let res = self.apply_to_edges(ctx, *loc, &|analyzer, ctx, _loc| { - analyzer.parse_ctx_yul_statement(&YulStatement::Block(yul_block.clone()), ctx); - Ok(()) - }); - let _ = self.widen_if_limit_hit(ctx, res); - } - Return(loc, maybe_ret_expr) => { - tracing::trace!("parsing return"); - if let Some(ret_expr) = maybe_ret_expr { - if let Some(parent) = parent_ctx { - let res = self.parse_ctx_expr(ret_expr, parent.into().into()); - if self.widen_if_limit_hit(parent.into().into(), res) { - return; - } - let res = self.apply_to_edges(parent.into().into(), *loc, &|analyzer, ctx, loc| { - let Ok(Some(ret)) = ctx.pop_expr_latest(loc, analyzer) else { - return Err(ExprErr::NoLhs(loc, "Return did not have a associated expression".to_string())); - }; - - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - let paths = ret.flatten(); - if paths.is_killed() { - tracing::trace!("killing due to bad return"); - let res = ContextNode::from(parent.into()) - .kill(analyzer, loc, paths.killed_kind().unwrap()) - .into_expr_err(loc); - let _ = analyzer.add_if_err(res); - return Ok(()); - } - analyzer.return_match(ctx, &loc, &paths); - Ok(()) - }); - let _ = self.widen_if_limit_hit(parent.into().into(), res); - } - } - } - Revert(loc, _maybe_err_path, _exprs) => { - tracing::trace!("parsing revert"); - if let Some(parent) = parent_ctx { - let parent = ContextNode::from(parent.into()); - let res = self.apply_to_edges(parent, *loc, &|analyzer, ctx, loc| { - let res = ctx - .kill(analyzer, loc, KilledKind::Revert) - .into_expr_err(loc); - let _ = analyzer.add_if_err(res); - Ok(()) - }); - let _ = self.add_if_err(res); - } - } - RevertNamedArgs(_loc, _maybe_err_path, _named_args) => { - tracing::trace!("parsing named revert"); - todo!("revert named args") - } - Emit(_loc, _emit_expr) => {} - Try(_loc, _try_expr, _maybe_returns, _clauses) => {} - Error(_loc) => {} - } - } - - fn widen_if_limit_hit(&mut self, ctx: ContextNode, maybe_err: Result<(), ExprErr>) -> bool { - match maybe_err { - Err(ExprErr::FunctionCallBlockTodo(_, _s)) => { - // dont kill for this one - false - } - Err(e @ ExprErr::GraphError(_, GraphError::MaxStackWidthReached(..), ..)) => { - // TODO: we should ideally peak at each if statement body and only widen variables referenced in there - // but for now we just delete the forks, and reset all local variables - self.add_expr_err(e); - true - } - Err(e) => { - let res = ctx - .kill(self, e.loc(), KilledKind::ParseError) - .into_expr_err(e.loc()); - let _ = self.add_if_err(res); - self.add_expr_err(e); - false - } - _ => false, - } - } - - fn return_match(&mut self, ctx: ContextNode, loc: &Loc, paths: &ExprRet) { - match paths { - ExprRet::CtxKilled(kind) => { - let _ = ctx.kill(self, *loc, *kind); - } - ExprRet::Single(expr) | ExprRet::SingleLiteral(expr) => { - let latest = ContextVarNode::from(*expr).latest_version(self); - // let ret = self.advance_var_in_ctx(latest, *loc, *ctx); - let path = ctx.path(self); - let res = latest.underlying_mut(self).into_expr_err(*loc); - match res { - Ok(var) => { - tracing::trace!("Returning: {}, {}", path, var.display_name); - var.is_return = true; - - self.add_edge(latest, ctx, Edge::Context(ContextEdge::Return)); - - let res = ctx.add_return_node(*loc, latest, self).into_expr_err(*loc); - // ctx.kill(self, *loc, KilledKind::Ended); - let _ = self.add_if_err(res); - } - Err(e) => self.add_expr_err(e), - } - } - ExprRet::Multi(rets) => { - rets.iter().for_each(|expr_ret| { - self.return_match(ctx, loc, expr_ret); - }); - } - ExprRet::Null => {} - } - } - - fn match_var_def( - &mut self, - ctx: ContextNode, - var_decl: &VariableDeclaration, - loc: Loc, - lhs_paths: &ExprRet, - rhs_paths: Option<&ExprRet>, - ) -> Result { - match (lhs_paths, rhs_paths) { - (ExprRet::CtxKilled(kind), _) | (_, Some(ExprRet::CtxKilled(kind))) => { - ctx.kill(self, loc, *kind).into_expr_err(loc)?; - Ok(true) - } - (ExprRet::Single(ty), Some(ExprRet::SingleLiteral(rhs))) => { - let ty = VarType::try_from_idx(self, *ty).expect("Not a known type"); - let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - let res = rhs_cvar.literal_cast_from_ty(ty, self).into_expr_err(loc); - let _ = self.add_if_err(res); - self.match_var_def( - ctx, - var_decl, - loc, - lhs_paths, - Some(&ExprRet::Single(rhs_cvar.into())), - ) - } - (ExprRet::Single(ty), Some(ExprRet::Single(rhs))) => { - let name = var_decl.name.clone().expect("Variable wasn't named"); - let ty = VarType::try_from_idx(self, *ty).expect("Not a known type"); - let var = ContextVar { - loc: Some(loc), - name: name.to_string(), - display_name: name.to_string(), - storage: var_decl.storage.clone(), - is_tmp: false, - is_symbolic: true, - tmp_of: None, - is_return: false, - ty, - }; - let lhs = ContextVarNode::from(self.add_node(Node::ContextVar(var))); - ctx.add_var(lhs, self).into_expr_err(loc)?; - self.add_edge(lhs, ctx, Edge::Context(ContextEdge::Variable)); - let rhs = ContextVarNode::from(*rhs); - - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let _ = analyzer.assign(loc, lhs, rhs, ctx)?; - // match_assign_ret(analyzer, ctx, ret); - Ok(()) - })?; - - Ok(false) - } - (ExprRet::Single(ty), None) => { - let name = var_decl.name.clone().expect("Variable wasn't named"); - let ty = VarType::try_from_idx(self, *ty).expect("Not a known type"); - let var = ContextVar { - loc: Some(loc), - name: name.to_string(), - display_name: name.to_string(), - storage: var_decl.storage.clone(), - is_tmp: false, - is_symbolic: true, - tmp_of: None, - is_return: false, - ty, - }; - let lhs = ContextVarNode::from(self.add_node(Node::ContextVar(var))); - ctx.add_var(lhs, self).into_expr_err(loc)?; - self.add_edge(lhs, ctx, Edge::Context(ContextEdge::Variable)); - Ok(false) - } - (l @ ExprRet::Single(_lhs), Some(ExprRet::Multi(rhs_sides))) => Ok(rhs_sides - .iter() - .map(|expr_ret| self.match_var_def(ctx, var_decl, loc, l, Some(expr_ret))) - .collect::, ExprErr>>()? - .iter() - .all(|e| *e)), - (ExprRet::Multi(lhs_sides), r @ Some(ExprRet::Single(_))) => Ok(lhs_sides - .iter() - .map(|expr_ret| self.match_var_def(ctx, var_decl, loc, expr_ret, r)) - .collect::, ExprErr>>()? - .iter() - .all(|e| *e)), - (ExprRet::Multi(lhs_sides), None) => Ok(lhs_sides - .iter() - .map(|expr_ret| self.match_var_def(ctx, var_decl, loc, expr_ret, None)) - .collect::, ExprErr>>()? - .iter() - .all(|e| *e)), - (ExprRet::Multi(lhs_sides), Some(ExprRet::Multi(rhs_sides))) => { - // try to zip sides if they are the same length - if lhs_sides.len() == rhs_sides.len() { - Ok(lhs_sides - .iter() - .zip(rhs_sides.iter()) - .map(|(lhs_expr_ret, rhs_expr_ret)| { - self.match_var_def(ctx, var_decl, loc, lhs_expr_ret, Some(rhs_expr_ret)) - }) - .collect::, ExprErr>>()? - .iter() - .all(|e| *e)) - } else { - Ok(rhs_sides - .iter() - .map(|rhs_expr_ret| { - self.match_var_def(ctx, var_decl, loc, lhs_paths, Some(rhs_expr_ret)) - }) - .collect::, ExprErr>>()? - .iter() - .all(|e| *e)) - } - } - (_e, _f) => Err(ExprErr::Todo( - loc, - "Unhandled ExprRet combination in `match_var_def`".to_string(), - )), - } - } - - fn parse_ctx_expr(&mut self, expr: &Expression, ctx: ContextNode) -> Result<(), ExprErr> { - if !ctx.killed_or_ret(self).unwrap() { - let edges = ctx.live_edges(self).into_expr_err(expr.loc())?; - if edges.is_empty() { - self.parse_ctx_expr_inner(expr, ctx) - } else { - edges - .iter() - .try_for_each(|fork_ctx| self.parse_ctx_expr(expr, *fork_ctx))?; - Ok(()) - } - } else { - Ok(()) - } - } - - #[tracing::instrument(level = "trace", skip_all, fields(ctx = %ctx.path(self)))] - fn parse_ctx_expr_inner(&mut self, expr: &Expression, ctx: ContextNode) -> Result<(), ExprErr> { - use Expression::*; - // println!( - // "ctx: {}, current stack: {:?}, \nexpr: {:?}\n", - // ctx.underlying(self).unwrap().path, - // ctx.underlying(self) - // .unwrap() - // .expr_ret_stack - // .iter() - // .map(|i| i.debug_str(self)) - // .collect::>(), - // expr - // ); - match expr { - // literals - NumberLiteral(loc, int, exp, _unit) => self.number_literal(ctx, *loc, int, exp, false), - AddressLiteral(loc, addr) => self.address_literal(ctx, *loc, addr), - StringLiteral(lits) => lits - .iter() - .try_for_each(|lit| self.string_literal(ctx, lit.loc, &lit.string)), - BoolLiteral(loc, b) => self.bool_literal(ctx, *loc, *b), - HexNumberLiteral(loc, b, _unit) => self.hex_num_literal(ctx, *loc, b, false), - HexLiteral(hexes) => self.hex_literals(ctx, hexes), - RationalNumberLiteral(loc, integer, fraction, exp, unit) => { - self.rational_number_literal(ctx, *loc, integer, fraction, exp, unit) - } - Negate(_loc, expr) => match &**expr { - NumberLiteral(loc, int, exp, _unit) => { - self.number_literal(ctx, *loc, int, exp, true) - } - HexNumberLiteral(loc, b, _unit) => self.hex_num_literal(ctx, *loc, b, true), - e => { - self.parse_ctx_expr(e, ctx)?; - self.apply_to_edges(ctx, e.loc(), &|analyzer, ctx, loc| { - tracing::trace!("Negate variable pop"); - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "No variable present to negate".to_string())) - }; - if matches!(rhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - // Solidity is dumb and used to allow negation of unsigned integers. - // That means we have to cast this as a int256. - let var = rhs_paths.expect_single().into_expr_err(loc)?; - - let zero = analyzer.add_node(Node::Concrete(Concrete::from(I256::from(0i32)))); - let zero = ContextVar::new_from_concrete( - Loc::Implicit, - ctx, - zero.into(), - analyzer, - ).into_expr_err(loc)?; - let zero = analyzer.add_node(Node::ContextVar(zero)); - let new_underlying = ContextVarNode::from(var) - .underlying(analyzer).into_expr_err(loc)? - .clone() - .as_cast_tmp(loc, ctx, Builtin::Int(256), analyzer).into_expr_err(loc)?; - let node = analyzer.add_node(Node::ContextVar(new_underlying)); - ctx.add_var(node.into(), analyzer).into_expr_err(loc)?; - analyzer.add_edge(node, ctx, Edge::Context(ContextEdge::Variable)); - - ContextVarNode::from(node).cast_from( - &ContextVarNode::from(zero), - analyzer - ).into_expr_err(loc)?; - - let lhs_paths = ExprRet::Single(zero); - analyzer.op_match( - ctx, - loc, - &lhs_paths, - &ExprRet::Single(ContextVarNode::from(node).latest_version(analyzer).into()), - RangeOp::Sub(true), - false, - ) - }) - } // e => todo!("UnaryMinus unexpected rhs: {e:?}"), - }, - UnaryPlus(_loc, e) => todo!("UnaryPlus unexpected rhs: {e:?}"), - - // Binary ops - Power(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Exp, false) - } - Add(loc, lhs_expr, rhs_expr) => self.op_expr( - *loc, - lhs_expr, - rhs_expr, - ctx, - RangeOp::Add(ctx.unchecked(self).into_expr_err(*loc)?), - false, - ), - AssignAdd(loc, lhs_expr, rhs_expr) => self.op_expr( - *loc, - lhs_expr, - rhs_expr, - ctx, - RangeOp::Add(ctx.unchecked(self).into_expr_err(*loc)?), - true, - ), - Subtract(loc, lhs_expr, rhs_expr) => self.op_expr( - *loc, - lhs_expr, - rhs_expr, - ctx, - RangeOp::Sub(ctx.unchecked(self).into_expr_err(*loc)?), - false, - ), - AssignSubtract(loc, lhs_expr, rhs_expr) => self.op_expr( - *loc, - lhs_expr, - rhs_expr, - ctx, - RangeOp::Sub(ctx.unchecked(self).into_expr_err(*loc)?), - true, - ), - Multiply(loc, lhs_expr, rhs_expr) => self.op_expr( - *loc, - lhs_expr, - rhs_expr, - ctx, - RangeOp::Mul(ctx.unchecked(self).into_expr_err(*loc)?), - false, - ), - AssignMultiply(loc, lhs_expr, rhs_expr) => self.op_expr( - *loc, - lhs_expr, - rhs_expr, - ctx, - RangeOp::Mul(ctx.unchecked(self).into_expr_err(*loc)?), - true, - ), - Divide(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Div(false), false) - } - AssignDivide(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Div(false), true) - } - Modulo(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Mod, false) - } - AssignModulo(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Mod, true) - } - ShiftLeft(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Shl, false) - } - AssignShiftLeft(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Shl, true) - } - ShiftRight(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Shr, false) - } - AssignShiftRight(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::Shr, true) - } - ConditionalOperator(loc, if_expr, true_expr, false_expr) => { - self.cond_op_expr(*loc, if_expr, true_expr, false_expr, ctx) - } - - // Bitwise ops - BitwiseAnd(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::BitAnd, false) - } - AssignAnd(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::BitAnd, true) - } - BitwiseXor(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::BitXor, false) - } - AssignXor(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::BitXor, true) - } - BitwiseOr(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::BitOr, false) - } - AssignOr(loc, lhs_expr, rhs_expr) => { - self.op_expr(*loc, lhs_expr, rhs_expr, ctx, RangeOp::BitOr, true) - } - BitwiseNot(loc, lhs_expr) => self.bit_not(*loc, lhs_expr, ctx), - - // assign - Assign(loc, lhs_expr, rhs_expr) => self.assign_exprs(*loc, lhs_expr, rhs_expr, ctx), - List(loc, params) => self.list(ctx, *loc, params), - // array - ArraySubscript(_loc, ty_expr, None) => self.array_ty(ty_expr, ctx), - ArraySubscript(loc, ty_expr, Some(index_expr)) => { - self.index_into_array(*loc, ty_expr, index_expr, ctx) - } - ArraySlice(loc, _lhs_expr, _maybe_middle_expr, _maybe_rhs) => Err(ExprErr::Todo( - *loc, - "Array slicing not currently supported".to_string(), - )), - ArrayLiteral(loc, _) => Err(ExprErr::Todo( - *loc, - "Array literal not currently supported".to_string(), - )), - - // Comparator - Equal(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::Eq, rhs, ctx), - NotEqual(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::Neq, rhs, ctx), - Less(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::Lt, rhs, ctx), - More(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::Gt, rhs, ctx), - LessEqual(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::Lte, rhs, ctx), - MoreEqual(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::Gte, rhs, ctx), - - // Logical - Not(loc, expr) => self.not(*loc, expr, ctx), - And(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::And, rhs, ctx), - Or(loc, lhs, rhs) => self.cmp(*loc, lhs, RangeOp::Or, rhs, ctx), - - // Function calls - FunctionCallBlock(loc, _func_expr, _input_exprs) => { - // TODO: update msg node - Err(ExprErr::Todo( - *loc, - "Function call block is unsupported. We shouldn't have hit this code path" - .to_string(), - )) - } - NamedFunctionCall(loc, func_expr, input_args) => { - self.named_fn_call_expr(ctx, loc, func_expr, input_args) - } - FunctionCall(loc, func_expr, input_exprs) => { - let updated_func_expr = match **func_expr { - FunctionCallBlock(_loc, ref inner_func_expr, ref call_block) => { - // we dont currently handle the `{value: .. gas: ..}` msg updating - self.add_expr_err(ExprErr::FunctionCallBlockTodo(call_block.loc(), "Function call block is currently unsupported. Relevant changes on `msg` will not take affect".to_string())); - inner_func_expr.clone() - } - _ => func_expr.clone(), - }; - - self.fn_call_expr(ctx, loc, &updated_func_expr, input_exprs) - } - // member - New(_loc, expr) => self.parse_ctx_expr(expr, ctx), - This(loc) => { - let var = ContextVar::new_from_contract( - *loc, - ctx.associated_contract(self).into_expr_err(*loc)?, - self, - ) - .into_expr_err(*loc)?; - let cvar = self.add_node(Node::ContextVar(var)); - ctx.add_var(cvar.into(), self).into_expr_err(*loc)?; - self.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - ctx.push_expr(ExprRet::Single(cvar), self) - .into_expr_err(*loc)?; - Ok(()) - } - MemberAccess(loc, member_expr, ident) => { - self.member_access(*loc, member_expr, ident, ctx) - } - - Delete(loc, expr) => { - fn delete_match( - ctx: ContextNode, - loc: &Loc, - analyzer: &mut (impl GraphLike + AnalyzerLike), - ret: ExprRet, - ) { - match ret { - ExprRet::CtxKilled(kind) => { - let _ = ctx.kill(analyzer, *loc, kind); - } - ExprRet::Single(cvar) | ExprRet::SingleLiteral(cvar) => { - let mut new_var = - analyzer.advance_var_in_ctx(cvar.into(), *loc, ctx).unwrap(); - let res = new_var.sol_delete_range(analyzer).into_expr_err(*loc); - let _ = analyzer.add_if_err(res); - } - ExprRet::Multi(inner) => { - inner - .iter() - .for_each(|i| delete_match(ctx, loc, analyzer, i.clone())); - } - ExprRet::Null => {} - } - } - - self.parse_ctx_expr(expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - tracing::trace!("Delete variable pop"); - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Delete operation had no right hand side".to_string())) - }; - - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - delete_match(ctx, &loc, analyzer, ret); - Ok(()) - }) - } - - // de/increment stuff - PreIncrement(loc, expr) => { - self.parse_ctx_expr(expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - tracing::trace!("PreIncrement variable pop"); - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "PreIncrement operation had no right hand side".to_string())) - }; - - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_in_de_crement(ctx, true, true, loc, &ret) - }) - } - PostIncrement(loc, expr) => { - self.parse_ctx_expr(expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - tracing::trace!("PostIncrement variable pop"); - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "PostIncrement operation had no right hand side".to_string())) - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_in_de_crement(ctx, false, true, loc, &ret) - }) - } - PreDecrement(loc, expr) => { - self.parse_ctx_expr(expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - tracing::trace!("PreDecrement variable pop"); - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "PreDecrement operation had no right hand side".to_string())) - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_in_de_crement(ctx, true, false, loc, &ret) - }) - } - PostDecrement(loc, expr) => { - self.parse_ctx_expr(expr, ctx)?; - self.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - tracing::trace!("PostDecrement variable pop"); - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "PostDecrement operation had no right hand side".to_string())) - }; - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_in_de_crement(ctx, false, false, loc, &ret) - }) - } - - // Misc. - Variable(ident) => self.variable(ident, ctx, None), - Type(loc, ty) => { - if let Some(builtin) = Builtin::try_from_ty(ty.clone(), self) { - if let Some(idx) = self.builtins().get(&builtin) { - ctx.push_expr(ExprRet::Single(*idx), self) - .into_expr_err(*loc)?; - Ok(()) - } else { - let idx = self.add_node(Node::Builtin(builtin.clone())); - self.builtins_mut().insert(builtin, idx); - ctx.push_expr(ExprRet::Single(idx), self) - .into_expr_err(*loc)?; - Ok(()) - } - } else { - ctx.push_expr(ExprRet::Null, self).into_expr_err(*loc)?; - Ok(()) - } - } - Parenthesis(_loc, expr) => self.parse_ctx_expr(expr, ctx), - } - } - - fn match_in_de_crement( - &mut self, - ctx: ContextNode, - pre: bool, - increment: bool, - loc: Loc, - rhs: &ExprRet, - ) -> Result<(), ExprErr> { - match rhs { - ExprRet::CtxKilled(kind) => { - ctx.kill(self, loc, *kind).into_expr_err(loc)?; - Ok(()) - } - ExprRet::SingleLiteral(var) => { - let res = ContextVarNode::from(*var) - .try_increase_size(self) - .into_expr_err(loc); - let _ = self.add_if_err(res); - self.match_in_de_crement(ctx, pre, increment, loc, &ExprRet::Single(*var)) - } - ExprRet::Single(var) => { - let cvar = ContextVarNode::from(*var); - let elem = Elem::from(cvar); - let one = Elem::from(Concrete::from(U256::from(1))).cast(elem.clone()); - // if let Some(r) = cvar.range(self).into_expr_err(loc)? { - if increment { - if pre { - let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; - let res = new_cvar - .set_range_min(self, elem.clone() + one.clone()) - .into_expr_err(loc); - let _ = self.add_if_err(res); - let res = new_cvar.set_range_max(self, elem + one).into_expr_err(loc); - let _ = self.add_if_err(res); - ctx.push_expr(ExprRet::Single(new_cvar.into()), self) - .into_expr_err(loc)?; - Ok(()) - } else { - let dup = cvar.as_tmp(loc, ctx, self).into_expr_err(loc)?; - let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; - let res = new_cvar - .set_range_min(self, elem.clone() + one.clone()) - .into_expr_err(loc); - let _ = self.add_if_err(res); - new_cvar - .set_range_max(self, elem + one) - .into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(dup.into()), self) - .into_expr_err(loc)?; - Ok(()) - } - } else if pre { - let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; - let res = new_cvar - .set_range_min(self, elem.clone() - one.clone()) - .into_expr_err(loc); - let _ = self.add_if_err(res); - new_cvar - .set_range_max(self, elem - one) - .into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(new_cvar.into()), self) - .into_expr_err(loc)?; - Ok(()) - } else { - let dup = cvar.as_tmp(loc, ctx, self).into_expr_err(loc)?; - let new_cvar = self.advance_var_in_ctx(cvar, loc, ctx)?; - let res = new_cvar - .set_range_min(self, elem.clone() - one.clone()) - .into_expr_err(loc); - let _ = self.add_if_err(res); - new_cvar - .set_range_max(self, elem - one) - .into_expr_err(loc)?; - ctx.push_expr(ExprRet::Single(dup.into()), self) - .into_expr_err(loc)?; - Ok(()) - } - // } else { - // panic!("No range in post-increment") - // } - } - ExprRet::Multi(inner) => inner - .iter() - .try_for_each(|expr| self.match_in_de_crement(ctx, pre, increment, loc, expr)), - ExprRet::Null => Ok(()), - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn assign_exprs( - &mut self, - loc: Loc, - lhs_expr: &Expression, - rhs_expr: &Expression, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - self.parse_ctx_expr(rhs_expr, ctx)?; - self.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(rhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "Assign operation had no right hand side".to_string())) - }; - - if matches!(rhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(rhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.parse_ctx_expr(lhs_expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(lhs_paths) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "Assign operation had no left hand side".to_string())) - }; - if matches!(lhs_paths, ExprRet::CtxKilled(_)) { - ctx.push_expr(lhs_paths, analyzer).into_expr_err(loc)?; - return Ok(()); - } - analyzer.match_assign_sides(ctx, loc, &lhs_paths.flatten(), &rhs_paths)?; - Ok(()) - }) - }) - } - - fn match_assign_sides( - &mut self, - ctx: ContextNode, - loc: Loc, - lhs_paths: &ExprRet, - rhs_paths: &ExprRet, - ) -> Result<(), ExprErr> { - match (lhs_paths, rhs_paths) { - (_, ExprRet::Null) | (ExprRet::Null, _) => Ok(()), - (ExprRet::CtxKilled(kind), _) | (_, ExprRet::CtxKilled(kind)) => { - ctx.kill(self, loc, *kind).into_expr_err(loc)?; - Ok(()) - } - (ExprRet::Single(lhs), ExprRet::SingleLiteral(rhs)) => { - let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); - let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - let res = rhs_cvar - .literal_cast_from(&lhs_cvar, self) - .into_expr_err(loc); - let _ = self.add_if_err(res); - ctx.push_expr(self.assign(loc, lhs_cvar, rhs_cvar, ctx)?, self) - .into_expr_err(loc)?; - Ok(()) - } - (ExprRet::Single(lhs), ExprRet::Single(rhs)) => { - let lhs_cvar = ContextVarNode::from(*lhs).latest_version(self); - let rhs_cvar = ContextVarNode::from(*rhs).latest_version(self); - ctx.push_expr(self.assign(loc, lhs_cvar, rhs_cvar, ctx)?, self) - .into_expr_err(loc)?; - Ok(()) - } - (l @ ExprRet::Single(_), ExprRet::Multi(rhs_sides)) => rhs_sides - .iter() - .try_for_each(|expr_ret| self.match_assign_sides(ctx, loc, l, expr_ret)), - (ExprRet::Multi(lhs_sides), r @ ExprRet::Single(_) | r @ ExprRet::SingleLiteral(_)) => { - lhs_sides - .iter() - .try_for_each(|expr_ret| self.match_assign_sides(ctx, loc, expr_ret, r)) - } - (ExprRet::Multi(lhs_sides), ExprRet::Multi(rhs_sides)) => { - // try to zip sides if they are the same length - if lhs_sides.len() == rhs_sides.len() { - lhs_sides.iter().zip(rhs_sides.iter()).try_for_each( - |(lhs_expr_ret, rhs_expr_ret)| { - self.match_assign_sides(ctx, loc, lhs_expr_ret, rhs_expr_ret) - }, - ) - } else { - rhs_sides.iter().try_for_each(|rhs_expr_ret| { - self.match_assign_sides(ctx, loc, lhs_paths, rhs_expr_ret) - }) - } - } - (e, f) => todo!("any: {:?} {:?}", e, f), - } - } - - fn assign( - &mut self, - loc: Loc, - lhs_cvar: ContextVarNode, - rhs_cvar: ContextVarNode, - ctx: ContextNode, - ) -> Result { - tracing::trace!( - "assigning: {} to {}", - lhs_cvar.display_name(self).unwrap(), - rhs_cvar.display_name(self).unwrap() - ); - let (new_lower_bound, new_upper_bound): (Elem, Elem) = ( - Elem::from(rhs_cvar.latest_version(self)), - Elem::from(rhs_cvar.latest_version(self)), - ); - - let new_lhs = self.advance_var_in_ctx(lhs_cvar.latest_version(self), loc, ctx)?; - if rhs_cvar.underlying(self).into_expr_err(loc)?.is_return { - if let Some(rhs_ctx) = rhs_cvar.maybe_ctx(self) { - self.add_edge( - rhs_cvar, - new_lhs, - Edge::Context(ContextEdge::ReturnAssign( - rhs_ctx.underlying(self).unwrap().ext_fn_call.is_some(), - )), - ); - } else { - return Err(ExprErr::GraphError( - loc, - GraphError::DetachedVariable(format!( - "No context for variable: {}, node idx: {}, curr ctx: {}, lhs ctx: {}", - rhs_cvar.display_name(self).unwrap(), - rhs_cvar.0, - ctx.path(self), - lhs_cvar.ctx(self).path(self) - )), - )); - } - } - if !lhs_cvar.ty_eq(&rhs_cvar, self).into_expr_err(loc)? { - let cast_to_min = match lhs_cvar.range_min(self).into_expr_err(loc)? { - Some(v) => v, - None => { - return Err(ExprErr::BadRange( - loc, - format!( - "No range during cast? {:?}, {:?}", - lhs_cvar.underlying(self).unwrap(), - rhs_cvar.underlying(self).unwrap(), - ), - )) - } - }; - - let cast_to_max = match lhs_cvar.range_max(self).into_expr_err(loc)? { - Some(v) => v, - None => { - return Err(ExprErr::BadRange( - loc, - format!( - "No range during cast? {:?}, {:?}", - lhs_cvar.underlying(self).unwrap(), - rhs_cvar.underlying(self).unwrap(), - ), - )) - } - }; - - let _ = new_lhs.try_set_range_min(self, new_lower_bound.cast(cast_to_min)); - let _ = new_lhs.try_set_range_max(self, new_upper_bound.cast(cast_to_max)); - } else { - let _ = new_lhs.try_set_range_min(self, new_lower_bound); - let _ = new_lhs.try_set_range_max(self, new_upper_bound); - } - if let Some(rhs_range) = rhs_cvar.ref_range(self).into_expr_err(loc)? { - let res = new_lhs - .try_set_range_exclusions(self, rhs_range.exclusions.clone()) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - - if let Some(arr) = lhs_cvar.index_to_array(self) { - if let Some(index) = lhs_cvar.index_access_to_index(self) { - let next_arr = self.advance_var_in_ctx(arr, loc, ctx)?; - if next_arr - .underlying(self) - .into_expr_err(loc)? - .ty - .is_dyn_builtin(self) - .into_expr_err(loc)? - { - if let Some(r) = next_arr.ref_range(self).into_expr_err(loc)? { - let min = r.evaled_range_min(self).into_expr_err(loc)?; - let max = r.evaled_range_max(self).into_expr_err(loc)?; - - if let Some(mut rd) = min.maybe_range_dyn() { - rd.val.insert(Elem::from(index), Elem::from(rhs_cvar)); - let res = next_arr - .set_range_min(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - - if let Some(mut rd) = max.maybe_range_dyn() { - rd.val.insert(Elem::from(index), Elem::from(rhs_cvar)); - let res = next_arr - .set_range_max(self, Elem::ConcreteDyn(Box::new(rd))) - .into_expr_err(loc); - let _ = self.add_if_err(res); - } - } - } - } - } - - Ok(ExprRet::Single(new_lhs.into())) - } - - #[tracing::instrument(level = "trace", skip_all, fields(ctx = %ctx.path(self)))] - fn advance_var_in_ctx( - &mut self, - cvar_node: ContextVarNode, - loc: Loc, - ctx: ContextNode, - ) -> Result { - tracing::trace!( - "advancing variable: {}", - cvar_node.display_name(self).into_expr_err(loc)? - ); - if let Some(cvar) = cvar_node.next_version(self) { - panic!( - "Not latest version of: {}", - cvar.display_name(self).unwrap() - ); - } - if let Some(child) = ctx.underlying(self).into_expr_err(loc)?.child { - return Err(ExprErr::GraphError( - loc, - GraphError::VariableUpdateInOldContext(format!( - "Variable update of {} in old context: parent: {}, child: {:#?}", - cvar_node.display_name(self).unwrap(), - ctx.path(self), - child - )), - )); - } - let mut new_cvar = cvar_node - .latest_version(self) - .underlying(self) - .into_expr_err(loc)? - .clone(); - // get the old context - let new_cvarnode; - - 'a: { - if let Some(old_ctx) = cvar_node.maybe_ctx(self) { - // get the previous version to remove and prevent spurious nodes - if let Some(prev) = cvar_node.latest_version(self).previous_version(self) { - let prev_version = prev.underlying(self).into_expr_err(loc)?; - // check if there was no change between the previous version and the latest version - if prev_version.eq_ignore_loc(&new_cvar) && old_ctx == ctx { - // there was no change in the current context, just give them the current variable - new_cvarnode = cvar_node.into(); - break 'a; - } - } - - new_cvar.loc = Some(loc); - new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); - if old_ctx != ctx { - ctx.add_var(new_cvarnode.into(), self).into_expr_err(loc)?; - self.add_edge(new_cvarnode, ctx, Edge::Context(ContextEdge::Variable)); - self.add_edge( - new_cvarnode, - cvar_node.0, - Edge::Context(ContextEdge::InheritedVariable), - ); - } else { - self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); - } - } else { - new_cvar.loc = Some(loc); - new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); - self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); - } - } - - Ok(ContextVarNode::from(new_cvarnode)) - } - - fn advance_var_in_curr_ctx( - &mut self, - cvar_node: ContextVarNode, - loc: Loc, - ) -> Result { - tracing::trace!( - "advancing variable: {}", - cvar_node.display_name(self).into_expr_err(loc)? - ); - if let Some(cvar) = cvar_node.next_version(self) { - panic!( - "Not latest version of: {}", - cvar.display_name(self).unwrap() - ); - } - let mut new_cvar = cvar_node - .latest_version(self) - .underlying(self) - .into_expr_err(loc)? - .clone(); - new_cvar.loc = Some(loc); - - let new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); - self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); - - Ok(ContextVarNode::from(new_cvarnode)) - } - - fn advance_var_underlying(&mut self, cvar_node: ContextVarNode, loc: Loc) -> &mut ContextVar { - assert_eq!(None, cvar_node.next_version(self)); - let mut new_cvar = cvar_node - .latest_version(self) - .underlying(self) - .unwrap() - .clone(); - new_cvar.loc = Some(loc); - let new_cvarnode = self.add_node(Node::ContextVar(new_cvar)); - self.add_edge(new_cvarnode, cvar_node.0, Edge::Context(ContextEdge::Prev)); - ContextVarNode::from(new_cvarnode) - .underlying_mut(self) - .unwrap() - } - - fn apply_to_edges( - &mut self, - ctx: ContextNode, - loc: Loc, - closure: &impl Fn(&mut Self, ContextNode, Loc) -> Result<(), ExprErr>, - ) -> Result<(), ExprErr> { - let live_edges = ctx.live_edges(self).into_expr_err(loc)?; - tracing::trace!( - "Applying to live edges of: {}. edges: {:#?}", - ctx.path(self), - live_edges.iter().map(|i| i.path(self)).collect::>(), - ); - if !ctx.killed_or_ret(self).into_expr_err(loc)? { - if ctx.underlying(self).into_expr_err(loc)?.child.is_some() { - if live_edges.is_empty() { - Ok(()) - } else { - live_edges - .iter() - .try_for_each(|ctx| closure(self, *ctx, loc)) - } - } else if live_edges.is_empty() { - closure(self, ctx, loc) - } else { - live_edges - .iter() - .try_for_each(|ctx| closure(self, *ctx, loc)) - } - } else { - Ok(()) - } - } - - fn take_from_edge( - &mut self, - ctx: ContextNode, - loc: Loc, - closure: &impl Fn(&mut Self, ContextNode, Loc) -> Result, - ) -> Result, ExprErr> { - let live_edges = ctx.live_edges(self).into_expr_err(loc)?; - tracing::trace!( - "Taking from live edges of: {}. edges: {:#?}", - ctx.path(self), - live_edges.iter().map(|i| i.path(self)).collect::>(), - ); - - if live_edges.is_empty() { - Ok(vec![closure(self, ctx, loc)?]) - } else { - live_edges - .iter() - .map(|ctx| closure(self, *ctx, loc)) - .collect::, ExprErr>>() - } - } -} diff --git a/src/context/queries/mod.rs b/src/context/queries/mod.rs deleted file mode 100644 index 18fde10f..00000000 --- a/src/context/queries/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod storage_write; -pub mod taint; diff --git a/src/context/queries/storage_write/access.rs b/src/context/queries/storage_write/access.rs deleted file mode 100644 index 86efab45..00000000 --- a/src/context/queries/storage_write/access.rs +++ /dev/null @@ -1,93 +0,0 @@ -use crate::analyzers::{VarBoundAnalyzer, *}; -use shared::{ - analyzer::*, - context::ContextNode, - nodes::{TypeNode, VarType}, - NodeIdx, -}; - -use ariadne::{Cache, Color, Config, Label, Report, ReportKind}; -use std::collections::BTreeMap; - -#[derive(Debug, Clone)] -pub struct AccessStorageWriteReport { - pub msgs: Vec, -} - -impl AccessStorageWriteReport { - pub fn new(msgs: Vec) -> Self { - Self { msgs } - } -} - -impl ReportDisplay for AccessStorageWriteReport { - fn report_kind(&self) -> ReportKind { - ReportKind::Custom("Access Analysis", Color::Green) - } - fn msg(&self, _analyzer: &impl GraphLike) -> String { - self.msgs.join(";\n") - } - - fn labels(&self, _analyzer: &impl GraphLike) -> Vec> { - vec![] - } - - fn reports(&self, analyzer: &impl GraphLike) -> Vec> { - let report = Report::build(self.report_kind(), "".to_string(), 0) - .with_message(self.msg(analyzer)) - .with_config( - Config::default() - .with_cross_gap(false) - .with_underlines(true) - .with_tab_width(4), - ); - vec![report.finish()] - } - - fn print_reports(&self, src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); - for report in reports.iter() { - report.print(&mut *src).unwrap(); - } - } - - fn eprint_reports(&self, mut src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); - reports.iter().for_each(|report| { - report.eprint(&mut src).unwrap(); - }); - } -} - -impl AccessStorageWriteQuery for T where T: VarBoundAnalyzer + Search + AnalyzerLike + Sized {} -pub trait AccessStorageWriteQuery: VarBoundAnalyzer + Search + AnalyzerLike + Sized { - #[allow(clippy::too_many_arguments)] - fn access_query( - &self, - _entry: NodeIdx, - _file_mapping: &'_ BTreeMap, - _report_config: ReportConfig, - _contract_name: String, - _storage_var_name: String, - ) -> AccessStorageWriteReport { - todo!() - } - - fn recurse(&self, ctx: ContextNode, storage_var_name: String) -> Vec { - if let Some(cvar) = ctx.var_by_name(self, &storage_var_name) { - match cvar.ty(self).unwrap() { - VarType::User(TypeNode::Struct(s_node), _) => { - let fields = s_node - .fields(self) - .iter() - .map(|field| format!("{}.{}", storage_var_name, field.name(self).unwrap())) - .collect(); - fields - } - _ => vec![storage_var_name], - } - } else { - vec![storage_var_name] - } - } -} diff --git a/src/context/queries/storage_write/mod.rs b/src/context/queries/storage_write/mod.rs deleted file mode 100644 index 17f8adc3..00000000 --- a/src/context/queries/storage_write/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod access; -pub use access::*; - -mod target; -pub use target::*; diff --git a/src/context/queries/storage_write/target.rs b/src/context/queries/storage_write/target.rs deleted file mode 100644 index d244f04d..00000000 --- a/src/context/queries/storage_write/target.rs +++ /dev/null @@ -1,156 +0,0 @@ -use crate::analyzers::{VarBoundAnalyzer, *}; -use shared::{ - analyzer::*, - range::{range_string::ToRangeString, Range, SolcRange}, - NodeIdx, -}; - -use ariadne::{Cache, Color, Config, Fmt, Label, Report, ReportKind, Span}; -use std::collections::BTreeMap; - -#[derive(Debug, Clone)] -pub struct StorageRangeReport { - pub target: SolcRange, - pub write_loc: Option, - pub analysis: VarBoundAnalysis, -} - -impl ReportDisplay for StorageRangeReport { - fn report_kind(&self) -> ReportKind { - ReportKind::Custom("Storage Write Query", Color::Green) - } - fn msg(&self, analyzer: &impl GraphLike) -> String { - let bounds_string = self - .analysis - .ctx - .ctx_deps(analyzer) - .unwrap() - .iter() - .filter_map(|(_name, cvar)| { - let min = if self.analysis.report_config.eval_bounds { - cvar.range(analyzer) - .unwrap()? - .evaled_range_min(analyzer) - .unwrap() - .to_range_string(false, analyzer) - .s - } else if self.analysis.report_config.simplify_bounds { - cvar.range(analyzer) - .unwrap()? - .simplified_range_min(analyzer) - .unwrap() - .to_range_string(false, analyzer) - .s - } else { - cvar.range(analyzer) - .unwrap()? - .range_min() - .to_range_string(false, analyzer) - .s - }; - - let max = if self.analysis.report_config.eval_bounds { - cvar.range(analyzer) - .unwrap()? - .evaled_range_max(analyzer) - .unwrap() - .to_range_string(true, analyzer) - .s - } else if self.analysis.report_config.simplify_bounds { - cvar.range(analyzer) - .unwrap()? - .simplified_range_max(analyzer) - .unwrap() - .to_range_string(true, analyzer) - .s - } else { - cvar.range(analyzer) - .unwrap()? - .range_max() - .to_range_string(true, analyzer) - .s - }; - - Some(format!( - "\"{}\" ∈ {{{}, {}}}", - cvar.display_name(analyzer).unwrap(), - min, - max, - )) - }) - .collect::>() - .join(" ∧ "); - format!( - "Found storage write that could lead to target value in ctx {}: \"{}\" ∈ {{{}, {}}}{}{} ", - self.analysis.ctx.path(analyzer), - self.analysis.var_name, - self.target - .evaled_range_min(analyzer).unwrap() .to_range_string(false, analyzer) - .s, - self.target - .evaled_range_max(analyzer).unwrap() .to_range_string(true, analyzer) - .s, - if bounds_string.is_empty() { - "" - } else { - ", where " - }, - bounds_string.fg(Color::Yellow) - ) - } - - fn labels(&self, _analyzer: &impl GraphLike) -> Vec> { - vec![] - } - - fn reports(&self, analyzer: &impl GraphLike) -> Vec> { - let mut report = Report::build( - self.analysis.report_kind(), - self.analysis.var_def.0.source(), - self.analysis.var_def.0.start(), - ) - .with_message(self.msg(analyzer)) - .with_config( - Config::default() - .with_cross_gap(false) - .with_underlines(true) - .with_tab_width(4), - ); - - report.add_labels(self.analysis.labels(analyzer)); - - let reports = vec![report.finish()]; - reports - } - - fn print_reports(&self, src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); - for report in reports.iter() { - report.print(&mut *src).unwrap(); - } - } - - fn eprint_reports(&self, mut src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); - reports.iter().for_each(|report| { - report.eprint(&mut src).unwrap(); - }); - } -} - -impl StorageRangeQuery for T where T: VarBoundAnalyzer + Search + AnalyzerLike + Sized {} -pub trait StorageRangeQuery: VarBoundAnalyzer + Search + AnalyzerLike + Sized { - #[allow(clippy::too_many_arguments)] - fn func_query( - &mut self, - _entry: NodeIdx, - _file_mapping: &'_ BTreeMap, - _report_config: ReportConfig, - _contract_name: String, - _func_name: String, - _storage_var_name: String, - _target: SolcRange, - ) -> Option { - todo!() - } -} diff --git a/src/context/queries/taint.rs b/src/context/queries/taint.rs deleted file mode 100644 index 83d03571..00000000 --- a/src/context/queries/taint.rs +++ /dev/null @@ -1,68 +0,0 @@ -use crate::analyzers::{VarBoundAnalyzer, *}; -use shared::context::CallFork; - -use shared::{analyzer::*, NodeIdx}; - -use ariadne::{Cache, Color, Config, Label, Report, ReportKind}; - -#[derive(Debug, Clone)] -pub struct TaintReport { - pub msgs: Vec, -} - -impl TaintReport { - pub fn new(msgs: Vec) -> Self { - Self { msgs } - } -} - -impl ReportDisplay for TaintReport { - fn report_kind(&self) -> ReportKind { - ReportKind::Custom("Taint Analysis", Color::Green) - } - fn msg(&self, _analyzer: &impl GraphLike) -> String { - self.msgs.join(";\n") - } - - fn labels(&self, _analyzer: &impl GraphLike) -> Vec> { - vec![] - } - - fn reports(&self, analyzer: &impl GraphLike) -> Vec> { - let report = Report::build(self.report_kind(), "".to_string(), 0) - .with_message(self.msg(analyzer)) - .with_config( - Config::default() - .with_cross_gap(false) - .with_underlines(true) - .with_tab_width(4), - ); - vec![report.finish()] - } - - fn print_reports(&self, src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); - for report in reports.iter() { - report.print(&mut *src).unwrap(); - } - } - - fn eprint_reports(&self, mut src: &mut impl Cache, analyzer: &impl GraphLike) { - let reports = &self.reports(analyzer); - reports.iter().for_each(|report| { - report.eprint(&mut src).unwrap(); - }); - } -} - -impl TaintQuery for T where T: VarBoundAnalyzer + Search + AnalyzerLike + Sized {} -pub trait TaintQuery: VarBoundAnalyzer + Search + AnalyzerLike + Sized { - #[allow(clippy::too_many_arguments)] - fn taint_query(&self, _entry: NodeIdx, _contract_name: String) { - todo!() - } - - fn recurse_children(&self, _child: CallFork) { - todo!() - } -} diff --git a/src/context/yul/mod.rs b/src/context/yul/mod.rs deleted file mode 100644 index 223167de..00000000 --- a/src/context/yul/mod.rs +++ /dev/null @@ -1,343 +0,0 @@ -use crate::context::exprs::IntoExprErr; -use crate::context::ContextBuilder; -use crate::context::ExprParser; -use crate::AnalyzerLike; -use crate::ExprErr; -use shared::context::Context; -use shared::context::ContextVar; -use shared::context::ContextVarNode; -use shared::context::ExprRet; -use shared::nodes::Builtin; -use shared::nodes::VarType; -use shared::{ - context::{ContextEdge, ContextNode}, - Edge, Node, -}; -use solang_parser::helpers::CodeLocation; -use solang_parser::pt::Expression; -use solang_parser::pt::Loc; - -use solang_parser::pt::{YulExpression, YulFor, YulStatement, YulSwitch}; - -mod yul_cond_op; -pub use yul_cond_op::*; - -mod yul_funcs; -pub use yul_funcs::*; - -impl YulBuilder for T where - T: AnalyzerLike + Sized + ExprParser -{ -} -pub trait YulBuilder: - AnalyzerLike + Sized + ExprParser -{ - #[tracing::instrument(level = "trace", skip_all, fields(ctx = %ctx.path(self)))] - fn parse_ctx_yul_statement(&mut self, stmt: &YulStatement, ctx: ContextNode) - where - Self: Sized, - { - if let Some(true) = self.add_if_err(ctx.is_ended(self).into_expr_err(stmt.loc())) { - return; - } - if let Some(live_edges) = self.add_if_err(ctx.live_edges(self).into_expr_err(stmt.loc())) { - if live_edges.is_empty() { - self.parse_ctx_yul_stmt_inner(stmt, ctx) - } else { - live_edges.iter().for_each(|fork_ctx| { - self.parse_ctx_yul_stmt_inner(stmt, *fork_ctx); - }); - } - } - } - - #[tracing::instrument(level = "trace", skip_all)] - fn parse_ctx_yul_stmt_inner(&mut self, stmt: &YulStatement, ctx: ContextNode) - where - Self: Sized, - { - use YulStatement::*; - // println!("ctx: {}, yul stmt: {:?}", ctx.path(self), stmt); - - let res = ctx - .pop_expr_latest(stmt.loc(), self) - .into_expr_err(stmt.loc()); - let _ = self.add_if_err(res); - - if ctx.is_killed(self).unwrap() { - return; - } - let ret = self.apply_to_edges(ctx, stmt.loc(), &|analyzer, ctx, _loc| { - match stmt { - Assign(loc, yul_exprs, yul_expr) => { - match yul_exprs - .iter() - .try_for_each(|expr| analyzer.parse_ctx_yul_expr(expr, ctx)) - { - Ok(()) => { - analyzer.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(lhs_side) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoLhs(loc, "No left hand side assignments in yul block".to_string())) - }; - if matches!(lhs_side, ExprRet::CtxKilled(_)) { - ctx.push_expr(lhs_side, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - analyzer.parse_ctx_yul_expr(yul_expr, ctx)?; - analyzer.apply_to_edges(ctx, loc, &|analyzer, ctx, loc| { - let Some(rhs_side) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "No right hand side assignments in yul block".to_string())) - }; - - if matches!(rhs_side, ExprRet::CtxKilled(_)) { - ctx.push_expr(rhs_side, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - analyzer.match_assign_sides( - ctx, - loc, - &lhs_side, - &rhs_side, - ) - }) - }) - } - Err(e) => Err(e), - } - } - VariableDeclaration(loc, yul_idents, maybe_yul_expr) => { - let nodes = yul_idents - .iter() - .map(|ident| { - let b_ty = analyzer.builtin_or_add(Builtin::Uint(256)); - let var = ContextVar { - loc: Some(ident.loc), - name: ident.id.name.clone(), - display_name: ident.id.name.clone(), - storage: None, - is_tmp: false, - tmp_of: None, - is_symbolic: true, - is_return: false, - ty: VarType::try_from_idx(analyzer, b_ty).unwrap(), - }; - let cvar = ContextVarNode::from(analyzer.add_node(Node::ContextVar(var))); - ctx.add_var(cvar, analyzer).unwrap(); - analyzer.add_edge(cvar, ctx, Edge::Context(ContextEdge::Variable)); - analyzer.advance_var_in_ctx(cvar, *loc, ctx).unwrap() - }) - .collect::>(); - - if let Some(yul_expr) = maybe_yul_expr { - analyzer.parse_ctx_yul_expr(yul_expr, ctx)?; - analyzer.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let Some(ret) = ctx.pop_expr_latest(loc, analyzer).into_expr_err(loc)? else { - return Err(ExprErr::NoRhs(loc, "No right hand side assignments in yul block".to_string())) - }; - - if matches!(ret, ExprRet::CtxKilled(_)) { - ctx.push_expr(ret, analyzer).into_expr_err(loc)?; - return Ok(()); - } - - analyzer.match_assign_yul(ctx, loc, &nodes, ret) - - }) - } else { - Ok(()) - } - } - If(loc, yul_expr, yul_block) => { - analyzer.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - let ret = analyzer.yul_cond_op_stmt(loc, yul_expr, yul_block, ctx); - let _ = analyzer.add_if_err(ret); - Ok(()) - }) - } - For(YulFor { - loc, - init_block: _, - condition: _, - post_block: _, - execution_block: _, - }) => { - let sctx = Context::new_subctx(ctx, None, *loc, None, None, false, analyzer, None) - .into_expr_err(*loc)?; - let subctx = ContextNode::from(analyzer.add_node(Node::Context(sctx))); - ctx.set_child_call(subctx, analyzer).into_expr_err(*loc)?; - analyzer.apply_to_edges(subctx, *loc, &|analyzer, subctx, loc| { - let vars = subctx.local_vars(analyzer).clone(); - vars.iter().for_each(|(name, var)| { - // widen to max range - if let Some(inheritor_var) = ctx.var_by_name(analyzer, name) { - let inheritor_var = inheritor_var.latest_version(analyzer); - if let Some(r) = var - .underlying(analyzer) - .unwrap() - .ty - .default_range(analyzer) - .unwrap() - { - let new_inheritor_var = analyzer - .advance_var_in_ctx(inheritor_var, loc, ctx) - .unwrap(); - let res = new_inheritor_var - .set_range_min(analyzer, r.min) - .into_expr_err(loc); - let _ = analyzer.add_if_err(res); - let res = new_inheritor_var - .set_range_max(analyzer, r.max) - .into_expr_err(loc); - let _ = analyzer.add_if_err(res); - } - } - }); - Ok(()) - }) - } - Switch(YulSwitch { - loc, - condition, - cases, - default, - }) => { - analyzer.apply_to_edges(ctx, *loc, &|analyzer, ctx, loc| { - analyzer.yul_switch_stmt(loc, condition.clone(), cases.to_vec(), default.clone(), ctx) - }) - } - Leave(loc) => { - Err(ExprErr::Todo(*loc, "Yul `leave` statements are not currently supported".to_string())) - } - Break(loc) => { - Err(ExprErr::Todo(*loc, "Yul `break` statements are not currently supported".to_string())) - } - Continue(loc) => { - Err(ExprErr::Todo(*loc, "Yul `continue` statements are not currently supported".to_string())) - } - Block(yul_block) => { - yul_block - .statements - .iter() - .for_each(|stmt| analyzer.parse_ctx_yul_stmt_inner(stmt, ctx)); - Ok(()) - } - FunctionDefinition(yul_func_def) => { - Err(ExprErr::Todo(yul_func_def.loc(), "Yul `function` defintions are not currently supported".to_string())) - } - FunctionCall(yul_func_call) => { - analyzer.yul_func_call(yul_func_call, ctx) - } - Error(loc) => { - Err(ExprErr::ParseError(*loc, "Could not parse this yul statement".to_string())) - } - } - }); - let _ = self.add_if_err(ret); - } - - #[tracing::instrument(level = "trace", skip_all)] - fn parse_ctx_yul_expr( - &mut self, - expr: &YulExpression, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - tracing::trace!("Parsing yul expression: {expr:?}"); - - let edges = ctx.live_edges(self).into_expr_err(expr.loc())?; - if edges.is_empty() { - self.parse_ctx_yul_expr_inner(expr, ctx) - } else { - edges - .iter() - .try_for_each(|fork_ctx| self.parse_ctx_yul_expr(expr, *fork_ctx))?; - Ok(()) - } - } - - fn parse_ctx_yul_expr_inner( - &mut self, - expr: &YulExpression, - ctx: ContextNode, - ) -> Result<(), ExprErr> { - use YulExpression::*; - match expr { - BoolLiteral(loc, b, _) => self.bool_literal(ctx, *loc, *b), - NumberLiteral(loc, int, expr, _unit) => { - self.number_literal(ctx, *loc, int, expr, false) - } - HexNumberLiteral(loc, b, _unit) => self.hex_num_literal(ctx, *loc, b, false), - HexStringLiteral(lit, _) => self.hex_literals(ctx, &[lit.clone()]), - StringLiteral(lit, _) => self.string_literal(ctx, lit.loc, &lit.string), - Variable(ident) => self.variable(ident, ctx, None), - FunctionCall(yul_func_call) => self.yul_func_call(yul_func_call, ctx), - SuffixAccess(_loc, _yul_member_expr, _ident) => Err(ExprErr::Todo( - expr.loc(), - "Yul member access not yet supported".to_string(), - )), - } - } - - fn match_assign_yul( - &mut self, - _ctx: ContextNode, - loc: Loc, - nodes: &[ContextVarNode], - ret: ExprRet, - ) -> Result<(), ExprErr> { - match ret { - s @ ExprRet::Single(_) | s @ ExprRet::SingleLiteral(_) => { - self.match_assign_yul_inner(loc, &nodes[0], s)?; - } - ExprRet::Multi(inner) => { - if inner.len() == nodes.len() { - inner - .into_iter() - .zip(nodes.iter()) - .map(|(ret, node)| self.match_assign_yul_inner(loc, node, ret)) - .collect::, ExprErr>>()?; - } else { - return Err(ExprErr::Todo( - loc, - format!("Differing number of assignees and assignors in yul expression, assignors: {}, assignees: {}", nodes.len(), inner.len()), - )); - }; - } - ExprRet::CtxKilled(_kind) => {} - ExprRet::Null => {} - } - - Ok(()) - } - - fn match_assign_yul_inner( - &mut self, - loc: Loc, - node: &ContextVarNode, - ret: ExprRet, - ) -> Result<(), ExprErr> { - match ret.flatten() { - ExprRet::Single(idx) | ExprRet::SingleLiteral(idx) => { - let assign = ContextVarNode::from(idx); - let assign_ty = assign.underlying(self).into_expr_err(loc)?.ty.clone(); - if assign_ty.is_dyn(self).into_expr_err(loc)? { - let b_ty = self.builtin_or_add(Builtin::Bytes(32)); - node.underlying_mut(self).into_expr_err(loc)?.ty = - VarType::try_from_idx(self, b_ty).unwrap(); - } else { - node.underlying_mut(self).into_expr_err(loc)?.ty = assign_ty; - } - } - ExprRet::Multi(_inner) => { - return Err(ExprErr::Todo( - loc, - "Multi in single assignment yul expression is unhandled".to_string(), - )) - } - ExprRet::CtxKilled(..) => {} - ExprRet::Null => {} - } - Ok(()) - } -} diff --git a/src/lib.rs b/src/lib.rs deleted file mode 100644 index 3d67b73b..00000000 --- a/src/lib.rs +++ /dev/null @@ -1,1134 +0,0 @@ -use crate::analyzers::LocStrSpan; -use crate::context::exprs::IntoExprErr; -use crate::exprs::ExprErr; -use ariadne::Source; -use ethers_core::types::U256; -use shared::analyzer::*; -use shared::context::ContextNode; -use shared::context::ExprRet; -use shared::context::{Context, ContextEdge}; -use shared::nodes::*; -use shared::{Edge, Node, NodeIdx}; -use solang_parser::diagnostics::Diagnostic; -use solang_parser::helpers::CodeLocation; -use solang_parser::pt::Identifier; -use solang_parser::pt::Import; -use std::collections::BTreeMap; - -use std::ffi::OsString; -use std::path::Path; - -use solang_parser::pt::{ - ContractDefinition, ContractPart, EnumDefinition, ErrorDefinition, Expression, - FunctionDefinition, FunctionTy, SourceUnit, SourceUnitPart, StructDefinition, TypeDefinition, - Using, UsingList, VariableDefinition, -}; -use std::path::PathBuf; -use std::{collections::HashMap, fs}; - -use ariadne::{Cache, Color, Config, Fmt, Label, Report, ReportKind, Span}; -use petgraph::{graph::*, Directed}; - -mod builtin_fns; - -pub mod context; -// pub mod range; -use context::*; -pub use shared; - -#[derive(Debug, Clone, Default)] -pub struct FinalPassItem { - pub funcs: Vec, - pub usings: Vec<(Using, NodeIdx)>, - pub inherits: Vec<(ContractNode, Vec)>, - pub vars: Vec<(VarNode, NodeIdx)>, -} -impl FinalPassItem { - pub fn new( - funcs: Vec, - usings: Vec<(Using, NodeIdx)>, - inherits: Vec<(ContractNode, Vec)>, - vars: Vec<(VarNode, NodeIdx)>, - ) -> Self { - Self { - funcs, - usings, - inherits, - vars, - } - } -} - -#[derive(Debug, Clone)] -pub struct Analyzer { - /// The root path of the contract to be analyzed - pub root: PathBuf, - /// Solidity remappings - as would be passed into the solidity compiler - pub remappings: Vec<(String, String)>, - /// Imported sources - the canonicalized string to the entry source element index - pub imported_srcs: BTreeMap>, - /// Since we use a staged approach to analysis, we analyze all user types first then go through and patch up any missing or unresolved - /// parts of a contract (i.e. we parsed a struct which is used as an input to a function signature, we have to know about the struct) - pub final_pass_items: Vec, - /// The next file number to use when parsing a new file - pub file_no: usize, - /// The index of the current `msg` node - pub msg: MsgNode, - /// The index of the current `block` node - pub block: BlockNode, - /// The underlying graph holding all of the elements of the contracts - pub graph: Graph, - /// The entry node - this is the root of the dag, all relevant things should eventually point back to this (otherwise can be discarded) - pub entry: NodeIdx, - /// A mapping of a solidity builtin to the index in the graph - pub builtins: HashMap, - /// A mapping of a user type's name to the index in the graph (i.e. `struct A` would mapped `A` -> index) - pub user_types: HashMap, - /// A mapping of solidity builtin function to a [Function] struct, i.e. `ecrecover` -> `Function { name: "ecrecover", ..}` - pub builtin_fns: HashMap, - /// A mapping of solidity builtin functions to their indices in the graph - pub builtin_fn_nodes: HashMap, - /// A mapping of solidity builtin function names to their parameters and returns, i.e. `ecrecover` -> `([hash, r, s, v], [signer])` - pub builtin_fn_inputs: HashMap, Vec)>, - /// Accumulated errors that happened while analyzing - pub expr_errs: Vec, - /// The maximum depth to analyze to (i.e. call depth) - pub max_depth: usize, - /// The maximum number of forks throughout the lifetime of the analysis. - pub max_width: usize, - /// Dummy function used during parsing to attach contexts to for more complex first-pass parsing (i.e. before `final_pass`) - pub parse_fn: FunctionNode, -} - -impl Default for Analyzer { - fn default() -> Self { - let mut a = Self { - root: Default::default(), - remappings: Default::default(), - imported_srcs: Default::default(), - final_pass_items: Default::default(), - file_no: 0, - msg: MsgNode(0), - block: BlockNode(0), - graph: Default::default(), - entry: NodeIndex::from(0), - builtins: Default::default(), - user_types: Default::default(), - builtin_fns: builtin_fns::builtin_fns(), - builtin_fn_nodes: Default::default(), - builtin_fn_inputs: Default::default(), - expr_errs: Default::default(), - max_depth: 1024, - max_width: 2_i32.pow(14) as usize, - parse_fn: NodeIdx::from(0).into(), - }; - a.builtin_fn_inputs = builtin_fns::builtin_fns_inputs(&mut a); - - let msg = Msg::default(); - let block = Block::default(); - let msg = a.graph.add_node(Node::Msg(msg)).into(); - let block = a.graph.add_node(Node::Block(block)).into(); - a.msg = msg; - a.block = block; - a.entry = a.add_node(Node::Entry); - let pf = Function { - name: Some(Identifier { - loc: solang_parser::pt::Loc::Implicit, - name: "".into(), - }), - ..Default::default() - }; - let parser_fn = FunctionNode::from(a.add_node(pf)); - a.add_edge(parser_fn, a.entry, Edge::Func); - a.parse_fn = parser_fn; - a - } -} - -impl GraphLike for Analyzer { - fn graph_mut(&mut self) -> &mut Graph { - &mut self.graph - } - - fn graph(&self) -> &Graph { - &self.graph - } -} - -impl AnalyzerLike for Analyzer { - type Expr = Expression; - type ExprErr = ExprErr; - - fn builtin_fn_nodes(&self) -> &HashMap { - &self.builtin_fn_nodes - } - - fn builtin_fn_nodes_mut(&mut self) -> &mut HashMap { - &mut self.builtin_fn_nodes - } - - fn max_depth(&self) -> usize { - self.max_depth - } - - fn max_width(&self) -> usize { - self.max_width - } - - fn add_expr_err(&mut self, err: ExprErr) { - if !self.expr_errs.contains(&err) { - self.expr_errs.push(err); - } - } - - fn expr_errs(&self) -> Vec { - self.expr_errs.clone() - } - - fn entry(&self) -> NodeIdx { - self.entry - } - - fn msg(&mut self) -> MsgNode { - self.msg - } - - fn block(&mut self) -> BlockNode { - self.block - } - - fn builtin_fns(&self) -> &HashMap { - &self.builtin_fns - } - - fn builtin_fn_inputs(&self) -> &HashMap, Vec)> { - &self.builtin_fn_inputs - } - - fn builtins(&self) -> &HashMap { - &self.builtins - } - fn builtins_mut(&mut self) -> &mut HashMap { - &mut self.builtins - } - fn user_types(&self) -> &HashMap { - &self.user_types - } - fn user_types_mut(&mut self) -> &mut HashMap { - &mut self.user_types - } - - fn parse_expr(&mut self, expr: &Expression, parent: Option) -> NodeIdx { - use Expression::*; - match expr { - Type(_loc, ty) => { - if let Some(builtin) = Builtin::try_from_ty(ty.clone(), self) { - if let Some(idx) = self.builtins.get(&builtin) { - *idx - } else { - let idx = self.add_node(Node::Builtin(builtin.clone())); - self.builtins.insert(builtin, idx); - idx - } - } else if let Some(idx) = self.complicated_parse(expr, parent) { - self.add_if_err(idx.expect_single().into_expr_err(expr.loc())) - .unwrap_or(0.into()) - } else { - 0.into() - } - } - Variable(ident) => { - if let Some(idx) = self.user_types.get(&ident.name) { - *idx - } else { - let node = self.add_node(Node::Unresolved(ident.clone())); - self.user_types.insert(ident.name.clone(), node); - node - } - } - ArraySubscript(_loc, ty_expr, None) => { - let inner_ty = self.parse_expr(ty_expr, parent); - if let Some(var_type) = VarType::try_from_idx(self, inner_ty) { - let dyn_b = Builtin::Array(var_type); - if let Some(idx) = self.builtins.get(&dyn_b) { - *idx - } else { - let idx = self.add_node(Node::Builtin(dyn_b.clone())); - self.builtins.insert(dyn_b, idx); - idx - } - } else { - inner_ty - } - } - ArraySubscript(loc, ty_expr, Some(idx_expr)) => { - let inner_ty = self.parse_expr(ty_expr, parent); - let idx = self.parse_expr(idx_expr, parent); - if let Some(var_type) = VarType::try_from_idx(self, inner_ty) { - let res = ConcreteNode::from(idx) - .underlying(self) - .into_expr_err(*loc) - .cloned(); - if let Some(concrete) = self.add_if_err(res) { - if let Some(size) = concrete.uint_val() { - let dyn_b = Builtin::SizedArray(size, var_type); - if let Some(idx) = self.builtins.get(&dyn_b) { - *idx - } else { - let idx = self.add_node(Node::Builtin(dyn_b.clone())); - self.builtins.insert(dyn_b, idx); - idx - } - } else { - inner_ty - } - } else { - inner_ty - } - } else { - inner_ty - } - } - NumberLiteral(_loc, integer, exponent, _unit) => { - let int = U256::from_dec_str(integer).unwrap(); - let val = if !exponent.is_empty() { - let exp = U256::from_dec_str(exponent).unwrap(); - int * U256::from(10).pow(exp) - } else { - int - }; - - self.add_node(Node::Concrete(Concrete::Uint(256, val))) - } - _ => { - if let Some(idx) = self.complicated_parse(expr, parent) { - self.add_if_err(idx.expect_single().into_expr_err(expr.loc())) - .unwrap_or(0.into()) - } else { - 0.into() - } - } - } - } -} - -impl Analyzer { - pub fn complicated_parse( - &mut self, - expr: &Expression, - parent: Option, - ) -> Option { - tracing::trace!("Parsing required compile-time evaluation"); - - let ctx = if let Some(parent) = parent { - let pf = Function { - name: Some(Identifier { - loc: solang_parser::pt::Loc::Implicit, - name: "".into(), - }), - ..Default::default() - }; - let parser_fn = FunctionNode::from(self.add_node(pf)); - self.add_edge(parser_fn, parent, Edge::Func); - - let dummy_ctx = Context::new(parser_fn, "".to_string(), expr.loc()); - let ctx = ContextNode::from(self.add_node(Node::Context(dummy_ctx))); - self.add_edge(ctx, parser_fn, Edge::Context(ContextEdge::Context)); - ctx - } else { - let dummy_ctx = Context::new(self.parse_fn, "".to_string(), expr.loc()); - let ctx = ContextNode::from(self.add_node(Node::Context(dummy_ctx))); - self.add_edge(ctx, self.entry(), Edge::Context(ContextEdge::Context)); - ctx - }; - - let full_stmt = solang_parser::pt::Statement::Return(expr.loc(), Some(expr.clone())); - self.parse_ctx_statement(&full_stmt, false, Some(ctx)); - let edges = self.add_if_err(ctx.all_edges(self).into_expr_err(expr.loc()))?; - if edges.len() == 1 { - let res = edges[0].return_nodes(self).into_expr_err(expr.loc()); - - let res = self.add_if_err(res); - - if let Some(res) = res { - res.last().map(|last| ExprRet::Single(last.1.into())) - } else { - None - } - } else if edges.is_empty() { - let res = ctx.return_nodes(self).into_expr_err(expr.loc()); - - let res = self.add_if_err(res); - - if let Some(res) = res { - res.last().map(|last| ExprRet::Single(last.1.into())) - } else { - None - } - } else { - self.add_expr_err(ExprErr::ParseError(expr.loc(), "Expected this to be compile-time evaluatable, but it was nondeterministic likely due to an external call via an interface".to_string())); - None - } - } - - pub fn set_remappings_and_root(&mut self, remappings_path: String) { - self.root = PathBuf::from(&remappings_path) - .parent() - .unwrap() - .to_path_buf(); - let remappings_file = fs::read_to_string(remappings_path) - .map_err(|err| err.to_string()) - .expect("Remappings file not found"); - - self.remappings = remappings_file - .lines() - .map(|x| x.trim()) - .filter(|x| !x.is_empty()) - .map(|x| x.split_once('=').expect("Invalid remapping")) - .map(|(name, path)| (name.to_owned(), path.to_owned())) - .collect(); - } - - pub fn print_errors( - &self, - file_mapping: &'_ BTreeMap, - mut src: &mut impl Cache, - ) { - if self.expr_errs.is_empty() { - } else { - self.expr_errs.iter().for_each(|error| { - let str_span = LocStrSpan::new(file_mapping, error.loc()); - let report = Report::build(ReportKind::Error, str_span.source(), str_span.start()) - .with_message(error.report_msg()) - .with_config( - Config::default() - .with_cross_gap(false) - .with_underlines(true) - .with_tab_width(4), - ) - .with_label( - Label::new(str_span) - .with_color(Color::Red) - .with_message(format!("{}", error.msg().fg(Color::Red))), - ); - report.finish().print(&mut src).unwrap(); - }); - } - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse( - &mut self, - src: &str, - current_path: &Path, - entry: bool, - ) -> ( - Option, - Vec<(Option, String, String, usize)>, - ) { - // tracing::trace!("parsing: {:?}", current_path); - let file_no = self.file_no; - let mut imported = vec![]; - match solang_parser::parse(src, file_no) { - Ok((source_unit, _comments)) => { - let parent = self.add_node(Node::SourceUnit(file_no)); - self.add_edge(parent, self.entry, Edge::Source); - let final_pass_part = self.parse_source_unit( - source_unit, - file_no, - parent, - &mut imported, - current_path, - ); - self.final_pass_items.push(final_pass_part); - if entry { - self.final_pass(); - } - - (Some(parent), imported) - } - Err(diagnostics) => { - print_diagnostics_report(src, current_path, diagnostics).unwrap(); - panic!("Failed to parse Solidity code for {current_path:?}."); - } - } - } - - pub fn final_pass(&mut self) { - let elems = self.final_pass_items.clone(); - elems.iter().for_each(|final_pass_item| { - final_pass_item - .inherits - .iter() - .for_each(|(contract, inherits)| { - contract.inherit(inherits.to_vec(), self); - }); - final_pass_item.funcs.iter().for_each(|func| { - // add params now that parsing is done - func.set_params_and_ret(self).unwrap(); - }); - - final_pass_item - .usings - .iter() - .for_each(|(using, scope_node)| { - self.parse_using(using, *scope_node); - }); - final_pass_item.vars.iter().for_each(|(var, parent)| { - let loc = var.underlying(self).unwrap().loc; - let res = var.parse_initializer(self, *parent).into_expr_err(loc); - let _ = self.add_if_err(res); - }); - }); - - elems.into_iter().for_each(|final_pass_item| { - final_pass_item.funcs.into_iter().for_each(|func| { - if let Some(body) = &func.underlying(self).unwrap().body.clone() { - self.parse_ctx_statement(body, false, Some(func)); - } - }); - }); - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_source_unit( - &mut self, - source_unit: SourceUnit, - file_no: usize, - parent: NodeIdx, - imported: &mut Vec<(Option, String, String, usize)>, - current_path: &Path, - ) -> FinalPassItem { - let mut all_funcs = vec![]; - let mut all_usings = vec![]; - let mut all_inherits = vec![]; - let mut all_vars = vec![]; - source_unit - .0 - .iter() - .enumerate() - .for_each(|(unit_part, source_unit_part)| { - let (_sup, funcs, usings, inherits, vars) = self.parse_source_unit_part( - source_unit_part, - file_no, - unit_part, - parent, - imported, - current_path, - ); - all_funcs.extend(funcs); - all_usings.extend(usings); - all_inherits.extend(inherits); - all_vars.extend(vars); - }); - FinalPassItem::new(all_funcs, all_usings, all_inherits, all_vars) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_source_unit_part( - &mut self, - sup: &SourceUnitPart, - file_no: usize, - unit_part: usize, - parent: NodeIdx, - imported: &mut Vec<(Option, String, String, usize)>, - current_path: &Path, - ) -> ( - NodeIdx, - Vec, - Vec<(Using, NodeIdx)>, - Vec<(ContractNode, Vec)>, - Vec<(VarNode, NodeIdx)>, - ) { - use SourceUnitPart::*; - - let sup_node = self.add_node(Node::SourceUnitPart(file_no, unit_part)); - self.add_edge(sup_node, parent, Edge::Part); - - let mut func_nodes = vec![]; - let mut usings = vec![]; - let mut inherits = vec![]; - let mut vars = vec![]; - - match sup { - ContractDefinition(def) => { - let (node, funcs, con_usings, unhandled_inherits, unhandled_vars) = - self.parse_contract_def(def, parent, imported); - self.add_edge(node, sup_node, Edge::Contract); - func_nodes.extend(funcs); - usings.extend(con_usings); - inherits.push((node, unhandled_inherits)); - vars.extend(unhandled_vars); - } - StructDefinition(def) => { - let node = self.parse_struct_def(def); - self.add_edge(node, sup_node, Edge::Struct); - } - EnumDefinition(def) => { - let node = self.parse_enum_def(def); - self.add_edge(node, sup_node, Edge::Enum); - } - ErrorDefinition(def) => { - let node = self.parse_err_def(def); - self.add_edge(node, sup_node, Edge::Error); - } - VariableDefinition(def) => { - let (node, maybe_func, needs_final_pass) = self.parse_var_def(def, false); - if let Some(func) = maybe_func { - func_nodes.push(self.handle_func(func, None)); - } - - if needs_final_pass { - vars.push((node, parent)); - } - - self.add_edge(node, sup_node, Edge::Var); - } - FunctionDefinition(def) => { - let node = self.parse_func_def(def, None); - func_nodes.push(node); - self.add_edge(node, sup_node, Edge::Func); - } - TypeDefinition(def) => { - let node = self.parse_ty_def(def); - self.add_edge(node, sup_node, Edge::Ty); - } - EventDefinition(_def) => todo!(), - Annotation(_anno) => todo!(), - Using(using) => usings.push((*using.clone(), parent)), - StraySemicolon(_loc) => todo!(), - PragmaDirective(_, _, _) => {} - ImportDirective(import) => { - imported.extend(self.parse_import(import, current_path, parent)) - } - } - (sup_node, func_nodes, usings, inherits, vars) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_import( - &mut self, - import: &Import, - current_path: &Path, - parent: NodeIdx, - ) -> Vec<(Option, String, String, usize)> { - match import { - Import::Plain(import_path, _) => { - tracing::trace!("parse_import, path: {:?}", import_path); - let remapping = self - .remappings - .iter() - .find(|x| import_path.string.starts_with(&x.0)); - - let remapped = if let Some((name, path)) = remapping { - self.root.join(path).join( - import_path - .string - .replacen(name, "", 1) - .trim_start_matches('/'), - ) - } else { - current_path - .parent() - .unwrap() - .join(import_path.string.clone()) - }; - - let canonical = fs::canonicalize(&remapped) - .unwrap_or_else(|_| panic!( - "Could not find file: {remapped:?}{}", - if self.remappings.is_empty() { - ". It looks like you didn't pass in any remappings. Try adding the `--remappings ./path/to/remappings.txt` to the command line input" - } else { "" } - ) - ); - let canonical_str_path = canonical.as_os_str(); - if let Some(other_entry) = self.imported_srcs.get(canonical_str_path) { - if let Some(o_e) = other_entry { - self.add_edge(*o_e, parent, Edge::Import); - } - return vec![]; - } - - let sol = fs::read_to_string(&canonical).unwrap_or_else(|_| { - panic!( - "Could not find file for dependency: {canonical:?}{}", - if self.remappings.is_empty() { - ". It looks like you didn't pass in any remappings. Try adding the `--remappings ./path/to/remappings.txt` to the command line input (where `remappings.txt` is the output of `forge remappings > remappings.txt`)" - } else { "" } - ) - }); - self.file_no += 1; - let file_no = self.file_no; - // breaks recursion issues - self.imported_srcs.insert(canonical_str_path.into(), None); - let (maybe_entry, mut inner_sources) = self.parse(&sol, &remapped, false); - self.imported_srcs - .insert(canonical_str_path.into(), maybe_entry); - if let Some(other_entry) = maybe_entry { - self.add_edge(other_entry, parent, Edge::Import); - } - - inner_sources.push(( - maybe_entry, - remapped.to_str().unwrap().to_owned(), - sol.to_string(), - file_no, - )); - inner_sources - } - Import::Rename(import_path, _elems, _) => { - tracing::trace!("parse_import, path: {:?}, Rename", import_path); - let remapping = self - .remappings - .iter() - .find(|x| import_path.string.starts_with(&x.0)); - - let remapped = if let Some((name, path)) = remapping { - self.root.join(path).join( - import_path - .string - .replacen(name, "", 1) - .trim_start_matches('/'), - ) - } else { - current_path - .parent() - .unwrap() - .join(import_path.string.clone()) - }; - - let canonical = fs::canonicalize(&remapped).unwrap_or_else(|_| panic!( - "Could not find file: {remapped:?}{}", - if self.remappings.is_empty() { - ". It looks like you didn't pass in any remappings. Try adding the `--remappings ./path/to/remappings.txt` to the command line input" - } else { "" } - ) - ); - let canonical_str_path = canonical.as_os_str(); - if let Some(other_entry) = self.imported_srcs.get(canonical_str_path) { - if let Some(o_e) = other_entry { - self.add_edge(*o_e, parent, Edge::Import); - } - return vec![]; - } - - let sol = fs::read_to_string(&canonical).unwrap_or_else(|_| { - panic!( - "Could not find file for dependency: {canonical:?}{}", - if self.remappings.is_empty() { - ". It looks like you didn't pass in any remappings. Try adding the `--remappings ./path/to/remappings.txt` to the command line input" - } else { "" } - ) - }); - self.file_no += 1; - let file_no = self.file_no; - - // breaks recursion issues - self.imported_srcs.insert(canonical_str_path.into(), None); - let (maybe_entry, mut inner_sources) = self.parse(&sol, &remapped, false); - self.imported_srcs - .insert(canonical_str_path.into(), maybe_entry); - if let Some(other_entry) = maybe_entry { - self.add_edge(other_entry, parent, Edge::Import); - } - - inner_sources.push(( - maybe_entry, - remapped.to_str().unwrap().to_owned(), - sol.to_string(), - file_no, - )); - inner_sources - } - e => todo!("import {:?}", e), - } - } - - // #[tracing::instrument(name = "parse_contract_def", skip_all, fields(name = format!("{:?}", contract_def.name)))] - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_contract_def( - &mut self, - contract_def: &ContractDefinition, - source: NodeIdx, - imports: &[(Option, String, String, usize)], - ) -> ( - ContractNode, - Vec, - Vec<(Using, NodeIdx)>, - Vec, - Vec<(VarNode, NodeIdx)>, - ) { - tracing::trace!( - "Parsing contract {}", - if let Some(ident) = &contract_def.name { - ident.name.clone() - } else { - "interface".to_string() - } - ); - use ContractPart::*; - - let (contract, unhandled_inherits) = - Contract::from_w_imports(contract_def.clone(), source, imports, self); - - let inherits = contract.inherits.clone(); - let con_name = contract.name.clone().unwrap().name; - let con_node: ContractNode = - if let Some(user_ty_node) = self.user_types.get(&con_name).cloned() { - let unresolved = self.node_mut(user_ty_node); - *unresolved = Node::Contract(contract); - user_ty_node.into() - } else { - let node = self.add_node(Node::Contract(contract)); - self.user_types.insert(con_name, node); - node.into() - }; - - inherits.iter().for_each(|contract_node| { - self.add_edge(*contract_node, con_node, Edge::InheritedContract); - }); - let mut usings = vec![]; - let mut func_nodes = vec![]; - let mut vars = vec![]; - contract_def.parts.iter().for_each(|cpart| match cpart { - StructDefinition(def) => { - let node = self.parse_struct_def(def); - self.add_edge(node, con_node, Edge::Struct); - } - EnumDefinition(def) => { - let node = self.parse_enum_def(def); - self.add_edge(node, con_node, Edge::Enum); - } - ErrorDefinition(def) => { - let node = self.parse_err_def(def); - self.add_edge(node, con_node, Edge::Error); - } - VariableDefinition(def) => { - let (node, maybe_func, needs_final_pass) = self.parse_var_def(def, true); - if let Some(func) = maybe_func { - func_nodes.push(self.handle_func(func, Some(con_node))); - } - - if needs_final_pass { - vars.push((node, con_node.into())); - } - - self.add_edge(node, con_node, Edge::Var); - } - FunctionDefinition(def) => { - let node = self.parse_func_def(def, Some(con_node)); - func_nodes.push(node); - } - TypeDefinition(def) => { - let node = self.parse_ty_def(def); - self.add_edge(node, con_node, Edge::Ty); - } - EventDefinition(_def) => {} - Annotation(_anno) => todo!(), - Using(using) => usings.push((*using.clone(), con_node.0.into())), - StraySemicolon(_loc) => todo!(), - }); - (con_node, func_nodes, usings, unhandled_inherits, vars) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_using(&mut self, using_def: &Using, scope_node: NodeIdx) { - tracing::trace!("Parsing \"using\" {:?}", using_def); - let Some(ref using_def_ty) = using_def.ty else { - self.add_expr_err(ExprErr::Todo(using_def.loc(), "Using statements with wildcards currently unsupported".to_string())); - return; - }; - let maybe_cvar_idx = self.parse_expr(using_def_ty, None); - let ty_idx = match VarType::try_from_idx(self, maybe_cvar_idx) { - Some(v_ty) => v_ty.ty_idx(), - None => { - self.add_expr_err(ExprErr::Unresolved( - using_def.loc(), - "Unable to deduce the type for which to apply the `using` statement to" - .to_string(), - )); - return; - } - }; - - match &using_def.list { - UsingList::Library(ident_paths) => { - ident_paths.identifiers.iter().for_each(|ident| { - if let Some(hopefully_contract) = self.user_types.get(&ident.name) { - match self.node(*hopefully_contract) { - Node::Contract(_) => { - let funcs = ContractNode::from(*hopefully_contract).funcs(self); - let relevant_funcs: Vec<_> = funcs - .iter() - .filter_map(|func| { - let first_param: FunctionParamNode = - *func.params(self).iter().take(1).next()?; - let param_ty = first_param.ty(self).unwrap(); - if param_ty == ty_idx { - Some(func) - } else { - None - } - }) - .copied() - .collect(); - relevant_funcs.iter().for_each(|func| { - self.add_edge(ty_idx, *func, Edge::LibraryFunction(scope_node)); - }); - } - _ => self.add_expr_err(ExprErr::ParseError( - using_def.loc(), - "Tried to use a non-contract as a contract in a `using` statement" - .to_string(), - )), - } - } else { - panic!("Cannot find library contract {}", ident.name); - } - }); - } - UsingList::Functions(vec_ident_paths) => { - vec_ident_paths.iter().for_each(|ident_paths| { - if ident_paths.path.identifiers.len() == 2 { - if let Some(hopefully_contract) = - self.user_types.get(&ident_paths.path.identifiers[0].name) - { - if let Some(func) = ContractNode::from(*hopefully_contract) - .funcs(self) - .iter() - .find(|func| { - func.name(self) - .unwrap() - .starts_with(&ident_paths.path.identifiers[1].name) - }) - { - self.add_edge(ty_idx, *func, Edge::LibraryFunction(scope_node)); - } else { - panic!( - "Cannot find library function {}.{}", - ident_paths.path.identifiers[0].name, - ident_paths.path.identifiers[1].name - ); - } - } else { - panic!( - "Cannot find library contract {}", - ident_paths.path.identifiers[0].name - ); - } - } else { - // looking for free floating function - let funcs = match self.node(scope_node) { - Node::Contract(_) => self.search_children( - ContractNode::from(scope_node).associated_source(self), - &Edge::Func, - ), - Node::SourceUnit(..) => self.search_children(scope_node, &Edge::Func), - _ => unreachable!(), - }; - if let Some(func) = funcs.iter().find(|func| { - FunctionNode::from(**func) - .name(self) - .unwrap() - .starts_with(&ident_paths.path.identifiers[0].name) - }) { - self.add_edge(ty_idx, *func, Edge::LibraryFunction(scope_node)); - } else { - panic!( - "Cannot find library function {}", - ident_paths.path.identifiers[0].name - ); - } - } - }); - } - UsingList::Error => todo!(), - } - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_enum_def(&mut self, enum_def: &EnumDefinition) -> EnumNode { - tracing::trace!("Parsing enum {:?}", enum_def); - let enu = Enum::from(enum_def.clone()); - let name = enu.name.clone().expect("Enum was not named").name; - - // check if we have an unresolved type by the same name - let enu_node: EnumNode = if let Some(user_ty_node) = self.user_types.get(&name).cloned() { - let unresolved = self.node_mut(user_ty_node); - *unresolved = Node::Enum(enu); - user_ty_node.into() - } else { - let node = self.add_node(enu); - self.user_types.insert(name, node); - node.into() - }; - - enu_node - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_struct_def(&mut self, struct_def: &StructDefinition) -> StructNode { - tracing::trace!("Parsing struct {:?}", struct_def.name); - let strukt = Struct::from(struct_def.clone()); - - let name = strukt.name.clone().expect("Struct was not named").name; - - // check if we have an unresolved type by the same name - let strukt_node: StructNode = - if let Some(user_ty_node) = self.user_types.get(&name).cloned() { - let unresolved = self.node_mut(user_ty_node); - *unresolved = Node::Struct(strukt); - user_ty_node.into() - } else { - let node = self.add_node(strukt); - self.user_types.insert(name, node); - node.into() - }; - - struct_def.fields.iter().for_each(|field| { - let f = Field::new(self, field.clone()); - let field_node = self.add_node(f); - self.add_edge(field_node, strukt_node, Edge::Field); - }); - strukt_node - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_err_def(&mut self, err_def: &ErrorDefinition) -> ErrorNode { - tracing::trace!("Parsing error {:?}", err_def); - let err_node = ErrorNode(self.add_node(Error::from(err_def.clone())).index()); - err_def.fields.iter().for_each(|field| { - let param = ErrorParam::new(self, field.clone()); - let field_node = self.add_node(param); - self.add_edge(field_node, err_node, Edge::ErrorParam); - }); - err_node - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn parse_func_def( - &mut self, - func_def: &FunctionDefinition, - con_node: Option, - ) -> FunctionNode { - let func = Function::from(func_def.clone()); - tracing::trace!( - "Parsing function {:?}", - func.name - .clone() - .unwrap_or_else(|| solang_parser::pt::Identifier { - loc: solang_parser::pt::Loc::Implicit, - name: "".to_string() - }) - .name - ); - self.handle_func(func, con_node) - } - - pub fn handle_func(&mut self, func: Function, con_node: Option) -> FunctionNode { - match func.ty { - FunctionTy::Constructor => { - let node = self.add_node(func); - let func_node = node.into(); - - if let Some(con_node) = con_node { - self.add_edge(node, con_node, Edge::Constructor); - } - func_node - } - FunctionTy::Fallback => { - let node = self.add_node(func); - let func_node = node.into(); - - if let Some(con_node) = con_node { - self.add_edge(node, con_node, Edge::FallbackFunc); - } - - func_node - } - FunctionTy::Receive => { - // receive function cannot have input/output - let node = self.add_node(func); - if let Some(con_node) = con_node { - self.add_edge(node, con_node, Edge::ReceiveFunc); - } - FunctionNode::from(node) - } - FunctionTy::Function => { - let fn_node = self.add_node(func); - if let Some(con_node) = con_node { - self.add_edge(fn_node, con_node, Edge::Func); - } - fn_node.into() - } - FunctionTy::Modifier => { - let fn_node = self.add_node(func); - if let Some(con_node) = con_node { - self.add_edge(fn_node, con_node, Edge::Modifier); - } - fn_node.into() - } - } - } - - pub fn parse_var_def( - &mut self, - var_def: &VariableDefinition, - in_contract: bool, - ) -> (VarNode, Option, bool) { - tracing::trace!("Parsing variable definition: {:?}", var_def.name); - let var = Var::new(self, var_def.clone(), in_contract); - let mut func = None; - if var.is_public() { - func = Some(Function::from(var_def.clone())); - } - let needs_final_pass = var.initializer_expr.is_some(); - let var_node = VarNode::from(self.add_node(var)); - self.user_types - .insert(var_node.name(self).unwrap(), var_node.into()); - (var_node, func, needs_final_pass) - } - - pub fn parse_ty_def(&mut self, ty_def: &TypeDefinition) -> TyNode { - tracing::trace!("Parsing type definition"); - let ty = Ty::new(self, ty_def.clone()); - let name = ty.name.name.clone(); - let ty_node: TyNode = if let Some(user_ty_node) = self.user_types.get(&name).cloned() { - let unresolved = self.node_mut(user_ty_node); - *unresolved = Node::Ty(ty); - user_ty_node.into() - } else { - let node = self.add_node(Node::Ty(ty)); - self.user_types.insert(name, node); - node.into() - }; - ty_node - } -} - -/// Print the report of parser's diagnostics -pub fn print_diagnostics_report( - content: &str, - path: &Path, - diagnostics: Vec, -) -> std::io::Result<()> { - let filename = path.file_name().unwrap().to_string_lossy().to_string(); - for diag in diagnostics { - let (start, end) = (diag.loc.start(), diag.loc.end()); - let mut report = Report::build(ReportKind::Error, &filename, start) - .with_message(format!("{:?}", diag.ty)) - .with_label( - Label::new((&filename, start..end)) - .with_color(Color::Red) - .with_message(format!("{}", diag.message.fg(Color::Red))), - ); - - for note in diag.notes { - report = report.with_note(note.message); - } - - report.finish().print((&filename, Source::from(content)))?; - } - Ok(()) -} diff --git a/tests/helpers.rs b/tests/helpers.rs deleted file mode 100644 index bd69d17d..00000000 --- a/tests/helpers.rs +++ /dev/null @@ -1,84 +0,0 @@ -use ariadne::sources; -use pyrometer::context::analyzers::ReportConfig; -use pyrometer::context::analyzers::{FunctionVarsBoundAnalyzer, ReportDisplay}; -use pyrometer::Analyzer; -use shared::analyzer::Search; -use shared::NodeIdx; -use shared::{nodes::FunctionNode, Edge}; -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::path::PathBuf; - -pub fn assert_no_ctx_killed(path_str: String, sol: &str) { - let mut analyzer = Analyzer::default(); - let (maybe_entry, mut all_sources) = - analyzer.parse(sol, &PathBuf::from(path_str.clone()), true); - all_sources.push((maybe_entry, path_str.clone(), sol.to_string(), 0)); - let entry = maybe_entry.unwrap(); - no_ctx_killed(analyzer, entry, path_str, all_sources); -} - -pub fn remapping_assert_no_ctx_killed(path_str: String, remapping_file: String, sol: &str) { - let mut analyzer = Analyzer::default(); - analyzer.set_remappings_and_root(remapping_file); - let (maybe_entry, mut all_sources) = - analyzer.parse(sol, &PathBuf::from(path_str.clone()), true); - all_sources.push((maybe_entry, path_str.clone(), sol.to_string(), 0)); - let entry = maybe_entry.unwrap(); - no_ctx_killed(analyzer, entry, path_str, all_sources); -} - -pub fn no_ctx_killed( - mut analyzer: Analyzer, - entry: NodeIdx, - path_str: String, - all_sources: Vec<(Option, String, String, usize)>, -) { - assert!( - analyzer.expr_errs.is_empty(), - "Analyzer encountered parse errors" - ); - - let config = ReportConfig { - eval_bounds: true, - simplify_bounds: false, - show_tmps: true, - show_consts: true, - show_symbolics: true, - show_initial_bounds: true, - show_all_lines: true, - show_reverts: true, - show_unreachables: true, - show_nonreverts: true, - }; - let file_mapping: BTreeMap<_, _> = vec![(0usize, path_str)].into_iter().collect(); - - let mut source_map = sources( - all_sources - .iter() - .map(|(_entry, name, src, _num)| (name.clone(), src)) - .collect::>(), - ); - - let funcs = analyzer.search_children(entry, &Edge::Func); - for func in funcs.into_iter() { - if let Some(ctx) = FunctionNode::from(func).maybe_body_ctx(&mut analyzer) { - if ctx.killed_loc(&analyzer).unwrap().is_some() { - analyzer - .bounds_for_all(&file_mapping, ctx, config) - .as_cli_compat(&file_mapping) - .print_reports(&mut source_map, &analyzer); - panic!("Killed context in test"); - } - ctx.all_edges(&analyzer).unwrap().iter().for_each(|subctx| { - if subctx.killed_loc(&analyzer).unwrap().is_some() { - analyzer - .bounds_for_all(&file_mapping, *subctx, config) - .as_cli_compat(&file_mapping) - .print_reports(&mut source_map, &analyzer); - panic!("Killed context in test"); - } - }); - } - } -} diff --git a/tests/test_data/dyn_types.sol b/tests/test_data/dyn_types.sol deleted file mode 100644 index 566550d6..00000000 --- a/tests/test_data/dyn_types.sol +++ /dev/null @@ -1,41 +0,0 @@ -contract DynTypes { - uint256[] storeVar; - - function bytes_dyn(bytes calldata x) public { - bytes memory y = x; - require(x.length < 10); - y[8] = 0xff; - require(y.length == 9); - } - - function array_dyn(uint256[] calldata x) public { - uint256[] memory y = x; - require(x.length < 10); - y[8] = 100; - require(y.length == 9); - } - - function nested_bytes_dyn(bytes[] calldata x) public { - bytes memory a = hex"1337"; - x[0] = a; - require(x[0][0] == hex"13"); - require(x.length == 1); - } - - function array_push(uint256 x) public { - require(x > 5); - storeVar.push(x); - // TODO: handle this better - require(storeVar[0] == x); - uint256 y = storeVar.pop(); - require(y == x); - } - - function indexInto() public returns (uint256) { - return storeVar[basicFunc()]; - } - - function basicFunc() public returns (uint256) { - return 1; - } -} diff --git a/tests/test_data/loops.sol b/tests/test_data/loops.sol deleted file mode 100644 index f644e12c..00000000 --- a/tests/test_data/loops.sol +++ /dev/null @@ -1,31 +0,0 @@ -contract For { - function const_loop() public { - uint256 x; - for (uint256 i; i < 10; i++) { - x += 1; - } - - x += 1; - require(x == 10); - return x; - } - - function const_loop_def_iter() public { - uint256 x; - for (uint256 i = 1; i < 10; i++) { - i += 1; - } - - require(x == 10); - return x; - } - - function while_loop(uint256 x) public { - while (x > 10) { - x -= 1; - } - - require(x == 10); - return x; - } -}