diff --git a/Nargo.toml b/Nargo.toml index 91e7ae5..bd38270 100644 --- a/Nargo.toml +++ b/Nargo.toml @@ -5,4 +5,4 @@ authors = [""] compiler_version = ">=0.37.0" [dependencies] -noir_sort = {tag = "v0.2.1", git = "https://github.com/noir-lang/noir_sort"} \ No newline at end of file +noir_sort = {tag = "v0.2.2", git = "https://github.com/noir-lang/noir_sort"} diff --git a/src/_comparison_tools/bounds_checker.nr b/src/_comparison_tools/bounds_checker.nr index cafb377..f173878 100644 --- a/src/_comparison_tools/bounds_checker.nr +++ b/src/_comparison_tools/bounds_checker.nr @@ -26,7 +26,8 @@ in this case, i == M * cost = 3 gates + 2 gates per iteration **/ pub fn get_validity_flags(boundary: u32) -> [Field; N] { - let flags: [Field; N] = __get_validity_flags(boundary); + //@Safety: The constraining is happening inside get_validity_flags_inner + let flags: [Field; N] = unsafe { __get_validity_flags(boundary) }; get_validity_flags_inner(boundary, flags) } diff --git a/src/_comparison_tools/lt.nr b/src/_comparison_tools/lt.nr index ae26de5..0d35fd6 100644 --- a/src/_comparison_tools/lt.nr +++ b/src/_comparison_tools/lt.nr @@ -14,7 +14,8 @@ pub unconstrained fn get_lte_predicate_large(x: Field, y: Field) -> bool { } pub fn lte_field_240_bit(x: Field, y: Field) -> bool { - let predicate = get_lte_predicate_large(x, y); + //@Safety: check the comments below + let predicate = unsafe { get_lte_predicate_large(x, y) }; let delta = y as Field - x as Field; // (x - y) * predicate @@ -23,6 +24,8 @@ pub fn lte_field_240_bit(x: Field, y: Field) -> bool { // (y - x) * p + (1 - p) * (x - y + 1) // (y - x) * p + x - y + 1 + p * (y - x) let lt_parameter = 2 * (predicate as Field) * delta - predicate as Field - delta + 1; + // checks that the bit length of lt_parameter is 240 + // i.e. checks the sign of lt_parameter lt_parameter.assert_max_bit_size::<240>(); predicate @@ -40,18 +43,24 @@ pub fn assert_lte_240_bit(x: Field, y: Field) { } pub fn lt_field_16_bit(x: Field, y: Field) -> bool { - let predicate = get_lt_predicate_f(x, y); + //@Safety: check the comments below + let predicate = unsafe { get_lt_predicate_f(x, y) }; let delta = y as Field - x as Field; let lt_parameter = 2 * (predicate as Field) * delta - predicate as Field - delta; + // checks that the bit length of lt_parameter is 16 + // i.e. checks the sign of lt_parameter lt_parameter.assert_max_bit_size::<16>(); predicate } pub fn lt_field_8_bit(x: Field, y: Field) -> bool { - let predicate = get_lt_predicate_f(x, y); + //@Safety: check the comments below + let predicate = unsafe { get_lt_predicate_f(x, y) }; let delta = y as Field - x as Field; let lt_parameter = 2 * (predicate as Field) * delta - predicate as Field - delta; + // checks that the bit length of lt_parameter is 8 + // i.e. checks the sign of lt_parameter lt_parameter.assert_max_bit_size::<8>(); predicate diff --git a/src/_string_tools/slice_field.nr b/src/_string_tools/slice_field.nr index 6b8c7e9..ef3c7d7 100644 --- a/src/_string_tools/slice_field.nr +++ b/src/_string_tools/slice_field.nr @@ -64,8 +64,11 @@ unconstrained fn __slice_200_bits_from_field(f: Field) -> (Field, Field, bool) { } pub fn slice_200_bits_from_field(f: Field) -> Field { - let (lo, hi, borrow) = __slice_200_bits_from_field(f); + //@Safety: check the comments below + let (lo, hi, borrow) = unsafe { __slice_200_bits_from_field(f) }; + // checks that lo and hi are the correct slices of f assert(hi * TWO_POW_200 + lo == f); + // checks that lo and hi are the correct bit sizes lo.assert_max_bit_size::<200>(); hi.assert_max_bit_size::<56>(); let lo_diff = PLO_200_felt - lo + (borrow as Field * TWO_POW_200); diff --git a/src/_string_tools/slice_packed_field.nr b/src/_string_tools/slice_packed_field.nr index 5751e45..04d0c85 100644 --- a/src/_string_tools/slice_packed_field.nr +++ b/src/_string_tools/slice_packed_field.nr @@ -607,7 +607,8 @@ unconstrained fn __divmod(numerator: Field, denominator: Field) -> (Field, Field * we know the quotient will fit into a 14 bit range check which will save us some fractional gates **/ fn divmod_31(numerator: Field) -> (Field, Field) { - let (quotient, remainder) = __divmod(numerator, 31); + //@Safety: we check the bit lengths of qf and rf and their relation to the numerator with assertions later + let (quotient, remainder) = unsafe { __divmod(numerator, 31) }; let qf = quotient as Field; let rf = remainder as Field; @@ -641,14 +642,17 @@ unconstrained fn decompose(val: Field) -> [Field; 16] { pub fn get_last_limb_path(last_limb_index: Field) -> [Field; OutputFields] { // TODO we offset by 1 explain why (0 byte length produces 0 - 1 which = invalid array index. we just add 1 and increase array length by 1 to compensate) let path = LAST_LIMB_PATH[last_limb_index + 1]; // 2 - let path_valid_bits = decompose(path); + //@Safety: check the comments below + let path_valid_bits = unsafe { decompose(path) }; let mut path_valid_sum: Field = 0; let mut path_valid_output: [Field; OutputFields] = [0; OutputFields]; for i in 0..OutputFields { + // we check that the path valid bits are binary assert(path_valid_bits[i] * path_valid_bits[i] - path_valid_bits[i] == 0); path_valid_sum += (path_valid_bits[i] * (1 << i as u8) as Field); path_valid_output[i] = path_valid_bits[i]; } + // we check that the path valid bits sum to the path assert(path_valid_sum == path); path_valid_output } @@ -659,7 +663,8 @@ pub fn get_last_limb_path(last_limb_index: Field) -> [Fie * @details cost 46 gates **/ pub fn slice_field(f: Field, num_bytes: Field) -> (Field, Field) { - let chunks = __slice_field(f, num_bytes); + //@Safety: we check the bit lengths of the chunks with assertions later + let chunks = unsafe { __slice_field(f, num_bytes) }; chunks[0].assert_max_bit_size::<8>(); // 1.25 gates chunks[1].assert_max_bit_size::<16>(); // 1.5 gates chunks[2].assert_max_bit_size::<32>(); // 1.75 gates @@ -863,7 +868,9 @@ mod test { // let start_byte = 26; let num_bytes = 0; let start_byte: u32 = 0; - let mut expected_slices: [Field; 3] = build_slices_for_test(text, start_byte, num_bytes); + //@Safety: this is a test + let mut expected_slices: [Field; 3] = + unsafe { build_slices_for_test(text, start_byte, num_bytes) }; let result_slices: [Field; 3] = slice_fields(slices, start_byte as Field, num_bytes as Field); assert(result_slices == expected_slices); @@ -895,16 +902,19 @@ mod test { for j in 0..18 { let start_byte: u32 = byte_positions[j]; let mut expected_slices: [Field; 3] = - build_slices_for_test(text, start_byte, num_bytes); + //@Safety: this is a test + unsafe { build_slices_for_test(text, start_byte, num_bytes) }; let result_slices: [Field; 3] = - slice_fields(slices, start_byte as Field, num_bytes as Field); + //@Safety: this is a test + unsafe { slice_fields(slices, start_byte as Field, num_bytes as Field) }; assert(result_slices == expected_slices); } for j in 0..18 { let start_byte: u32 = text.len() - num_bytes - byte_positions[j]; let mut expected_slices: [Field; 3] = - build_slices_for_test(text, start_byte, num_bytes); + //@Safety: this is a test + unsafe { build_slices_for_test(text, start_byte, num_bytes) }; let result_slices: [Field; 3] = slice_fields(slices, start_byte as Field, num_bytes as Field); assert(result_slices == expected_slices); diff --git a/src/_string_tools/sum_bytes_into_field.nr b/src/_string_tools/sum_bytes_into_field.nr index a9244b4..3f9a02b 100644 --- a/src/_string_tools/sum_bytes_into_field.nr +++ b/src/_string_tools/sum_bytes_into_field.nr @@ -185,10 +185,12 @@ fn sum_var_bytes_into_field( body_index: Field, num_bytes: Field, ) -> Field { - let path = get_path(num_bytes); // 5 gates + //@Safety: check the comments below + let path = unsafe { get_path(num_bytes) }; // 5 gates let path_f: [Field; 5] = [path[0] as Field, path[1] as Field, path[2] as Field, path[3] as Field, path[4] as Field]; + // checkes that path_f is the binary representation of num_bytes assert( path_f[0] + path_f[1] * 2 + path_f[2] * 4 + path_f[3] * 8 + path_f[4] * 16 == num_bytes as Field, diff --git a/src/get_string.nr b/src/get_string.nr index 81cca28..6808de8 100644 --- a/src/get_string.nr +++ b/src/get_string.nr @@ -30,7 +30,8 @@ fn process_escape_sequences(input: BoundedVec) -> BoundedVec< + (1 - is_escape_sequence) * character as Field; written_byte = written_byte * (1 - skip) + cached_byte * skip; - let written_byte_u8 = to_u8(written_byte); + //@Safety: we assert that the casting is done correctly + let written_byte_u8 = unsafe { to_u8(written_byte) }; assert(written_byte_u8 as Field == written_byte); result[result_ptr] = written_byte_u8; @@ -41,7 +42,8 @@ fn process_escape_sequences(input: BoundedVec) -> BoundedVec< } let written_byte: Field = character as Field * (1 - skip) + cached_byte * skip; - let written_byte_u8 = to_u8(written_byte); + //@Safety: we assert that the casting is done correctly + let written_byte_u8 = unsafe { to_u8(written_byte) }; assert(written_byte_u8 as Field == written_byte); result[result_ptr] = written_byte_u8; result_ptr += 1; diff --git a/src/getters.nr b/src/getters.nr index faa1661..519f2c9 100644 --- a/src/getters.nr +++ b/src/getters.nr @@ -52,7 +52,8 @@ impl BoundedVec, MaxNumKeys> { let root_object: JSONEntry = JSONEntry::from(self.json_entries_packed[self.root_index_in_transcript]); - let key_indices: BoundedVec = self.__get_keys_at_root(); + //@Safety: the length of the index is constrained later. + let key_indices: BoundedVec = unsafe { self.__get_keys_at_root() }; assert(key_indices.len as Field == root_object.num_children); diff --git a/src/json.nr b/src/json.nr index d412ede..4827829 100644 --- a/src/json.nr +++ b/src/json.nr @@ -251,7 +251,8 @@ impl Self { - let result = JSONContextStackEntry::__from_field(f); + //@Safety: the set of assertions done below will ensure the result is in the correct domain, and the computed result matches the input f + let result = unsafe { JSONContextStackEntry::__from_field(f) }; result.context.assert_max_bit_size::<8>(); // 1.25 result.num_entries.assert_max_bit_size::<16>(); // 1.5 @@ -119,8 +120,9 @@ impl JSONEntry { // 11.75 gates fn extract_entry_type_id_and_parent_index_from_field(f: Field) -> (Field, Field, Field) { + //@Safety: we check all the outputs are in the correct range and we can construct the input from the outputs of the decomposition let (id, parent_index, mid, entry_type) = - JSONEntry::__extract_entry_type_id_and_parent_index_from_field(f); + unsafe { JSONEntry::__extract_entry_type_id_and_parent_index_from_field(f) }; id.assert_max_bit_size::<8>(); // 1.25 parent_index.assert_max_bit_size::<16>(); // 1.5 entry_type.assert_max_bit_size::<16>(); // 1.5 @@ -136,15 +138,17 @@ impl JSONEntry { (id, parent_index, entry_type) } fn extract_entry_type_and_id_from_field(f: Field) -> (Field, Field) { - let (id, mid, entry_type) = JSONEntry::__extract_entry_type_and_id_from_field(f); + let (id, mid, entry_type) = unsafe { JSONEntry::__extract_entry_type_and_id_from_field(f) }; id.assert_max_bit_size::<8>(); // 1.25 entry_type.assert_max_bit_size::<16>(); // 1.5 mid.assert_max_bit_size::<136>(); // 5.5 assert(id + mid * 0x10000 + entry_type * 0x100000000000000000000000000000000000000 == f); (id, entry_type) } + fn extract_parent_index_from_field(f: Field) -> Field { - let (low, parent_index, hi) = JSONEntry::__extract_parent_index_from_field(f); + //@Safety: we check all the outputs are in the correct range and we can construct the input from the outputs of the decomposition + let (low, parent_index, hi) = unsafe { JSONEntry::__extract_parent_index_from_field(f) }; low.assert_max_bit_size::<16>(); // 1.75 hi.assert_max_bit_size::<128>(); // 5.5 @@ -188,7 +192,8 @@ impl JSONEntry { // 4 gates. oof } fn from_field(f: Field) -> Self { - let result = JSONEntry::__from_field(f); + //@Safety: we check all the outputs are in the correct range and match the input field value + let result = unsafe { JSONEntry::__from_field(f) }; result.entry_type.assert_max_bit_size::<8>(); result.json_length.assert_max_bit_size::<16>(); result.json_pointer.assert_max_bit_size::<16>(); @@ -204,7 +209,8 @@ impl JSONEntry { impl std::convert::From for JSONEntry { fn from(JSONEntryPacked { value: f }: JSONEntryPacked) -> Self { - let result = JSONEntry::__from_field(f); + //@Safety: we check all the outputs are in the correct range and match the input field value + let result = unsafe { JSONEntry::__from_field(f) }; result.entry_type.assert_max_bit_size::<8>(); result.json_length.assert_max_bit_size::<16>(); result.json_pointer.assert_max_bit_size::<16>(); diff --git a/src/keymap.nr b/src/keymap.nr index b6aac8e..07cd885 100644 --- a/src/keymap.nr +++ b/src/keymap.nr @@ -35,11 +35,17 @@ impl KeyIndexData { } fn from_field(packed: Field) -> Self { - let result = KeyIndexData::__from_field(packed); + //@Safety: check the comments below + let result = unsafe { KeyIndexData::__from_field(packed) }; + // checks that array_index is in range result.array_index.assert_max_bit_size::<16>(); + // checks that json_length is in range result.json_length.assert_max_bit_size::<16>(); + // checks that json_index is in range result.json_index.assert_max_bit_size::<16>(); + // checks that parent_id is in range result.parent_id.assert_max_bit_size::<16>(); + // checks that the input is a valid combination of the outputs of the decomposition assert(result.to_field() == packed); result } @@ -146,12 +152,15 @@ impl Self { // 10 gates - let r = TokenFlags::__from_field(f); + //@Safety: check the comments below + let r = unsafe { TokenFlags::__from_field(f) }; + // checks that the flags are binary assert(r.create_json_entry * r.create_json_entry == r.create_json_entry); assert( r.is_end_of_object_or_array * r.is_end_of_object_or_array @@ -49,6 +51,7 @@ impl TokenFlags { assert(r.is_value_token * r.is_value_token == r.is_value_token); assert(r.preserve_num_entries * r.preserve_num_entries == r.preserve_num_entries); + // asserts the relation of r and f assert(r.to_field() == f); r } diff --git a/src/transcript_entry.nr b/src/transcript_entry.nr index 076cd3f..2c08330 100644 --- a/src/transcript_entry.nr +++ b/src/transcript_entry.nr @@ -33,9 +33,13 @@ impl ValidationFlags { // 3 gates fn from_field(f: Field) -> Self { - let r = ValidationFlags::__from_field(f); + //@Safety: check the comments below + let r = unsafe { ValidationFlags::__from_field(f) }; + // checks pop_layer is a valid boolean assert(r.pop_layer * r.pop_layer == r.pop_layer); + // checks push_layer_type_of_root is a valid boolean assert(r.push_layer_type_of_root * r.push_layer_type_of_root == r.push_layer_type_of_root); + // checks the input field is a valid combination of the outputs of the decomposition assert(r.pop_layer * 0x10000 + r.push_layer_type_of_root * 0x100 + r.push_layer == f); r } @@ -67,7 +71,8 @@ impl RawTranscriptEntry { // 2 gates to get u16s // 5.25 gates total fn from_field(felt: Field) -> Self { - let result = RawTranscriptEntry::__from_field(felt); + //@Safety: check the comments below + let result = unsafe { RawTranscriptEntry::__from_field(felt) }; result.length.assert_max_bit_size::<16>(); result.index.assert_max_bit_size::<16>(); result.encoded_ascii.assert_max_bit_size::<14>(); @@ -83,7 +88,8 @@ impl RawTranscriptEntry { (ascii, remainder) } fn extract_ascii(f: Field) -> (Field, Field) { - let (ascii, remainder) = RawTranscriptEntry::__extract_ascii(f); + //@Safety: ascci length is checked. remainder size is checked. the relation to the input is asserted as well. + let (ascii, remainder) = unsafe { RawTranscriptEntry::__extract_ascii(f) }; ascii.assert_max_bit_size::<14>(); remainder.assert_max_bit_size::<32>(); assert(ascii + remainder * 0x10000 == f); @@ -119,14 +125,18 @@ impl ScanData { ScanData { scan_token, push_transcript, increase_length, is_potential_escape_sequence } } fn from_field(f: Field) -> Self { - let result = ScanData::__from_field(f); - + //@Safety: check the comments below + let result = unsafe { ScanData::__from_field(f) }; + // checks increase_length is a valid boolean assert(result.increase_length * result.increase_length == result.increase_length); + // checks push_transcript is a valid boolean assert(result.push_transcript * result.push_transcript == result.push_transcript); + // checks is_potential_escape_sequence is a valid boolean assert( result.is_potential_escape_sequence * result.is_potential_escape_sequence == result.is_potential_escape_sequence, ); + // checks the input field is a valid combination of the outputs of the decomposition assert( result.scan_token + result.push_transcript * 0x100 @@ -172,34 +182,50 @@ impl TranscriptEntry { } // 4 gates fn get_token(f: Field) -> Field { - let (token, remainder) = TranscriptEntry::__get_token(f); + //@Safety: check the comments below + let (token, remainder) = unsafe { TranscriptEntry::__get_token(f) }; + // checks that remainder is in range remainder.assert_max_bit_size::<32>(); + // checks that token is in range token.assert_max_bit_size::<8>(); + // checks that the input is a valid combination of the outputs of the decomposition assert(token + remainder * 0x100 == f); token } // 5.25 gates fn from_field(felt: Field) -> Self { - let result = TranscriptEntry::__from_field(felt); + //@Safety: check the comments below + let result = unsafe { TranscriptEntry::__from_field(felt) }; + // checks that length is in range result.length.assert_max_bit_size::<16>(); + // checks that index is in range result.index.assert_max_bit_size::<16>(); + // checks that token is in range result.token.assert_max_bit_size::<8>(); + // checks that the input is a valid combination of the outputs of the decomposition assert(result.token + result.index * 0x100 + result.length * 0x1000000 == felt); result } fn get_token_and_index_length_combined(f: Field) -> (Field, Field) { - let (token, remainder) = TranscriptEntry::__get_token(f); + //@Safety: check the comments below + let (token, remainder) = unsafe { TranscriptEntry::__get_token(f) }; + // checks that remainder is in range remainder.assert_max_bit_size::<32>(); + // checks that token is in range token.assert_max_bit_size::<8>(); + // checks that the input is a valid combination of the outputs of the decomposition assert(token + remainder * 0x100 == f); (token, remainder) } // 5.75 gates fn from_raw(raw_encoded: Field) -> Field { - let (ascii, remainder) = RawTranscriptEntry::__extract_ascii(raw_encoded); + //@Safety: check the comments below + let (ascii, remainder) = unsafe { RawTranscriptEntry::__extract_ascii(raw_encoded) }; + // checks that remainder is in range remainder.assert_max_bit_size::<32>(); + // checks that the input is a valid combination of the outputs of the decomposition assert(ascii + remainder * 0x10000 == raw_encoded); // this lookup enforces an implicit 10 bit range check on ascii let token = ASCII_TO_TOKEN_TABLE[ascii];