Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: add safety blocks and safety comments #31

Merged
merged 6 commits into from
Jan 14, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Nargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ authors = [""]
compiler_version = ">=0.37.0"

[dependencies]
noir_sort = {tag = "v0.2.1", git = "https://github.com/noir-lang/noir_sort"}
noir_sort = {tag = "v0.2.2", git = "https://github.com/noir-lang/noir_sort"}
3 changes: 2 additions & 1 deletion src/_comparison_tools/bounds_checker.nr
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ in this case, i == M
* cost = 3 gates + 2 gates per iteration
**/
pub fn get_validity_flags<let N: u32>(boundary: u32) -> [Field; N] {
let flags: [Field; N] = __get_validity_flags(boundary);
//@Safety: The constraining is happening inside get_validity_flags_inner
let flags: [Field; N] = unsafe { __get_validity_flags(boundary) };
get_validity_flags_inner(boundary, flags)
}

Expand Down
15 changes: 12 additions & 3 deletions src/_comparison_tools/lt.nr
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ pub unconstrained fn get_lte_predicate_large(x: Field, y: Field) -> bool {
}

pub fn lte_field_240_bit(x: Field, y: Field) -> bool {
let predicate = get_lte_predicate_large(x, y);
//@Safety: check the comments below
let predicate = unsafe { get_lte_predicate_large(x, y) };
let delta = y as Field - x as Field;

// (x - y) * predicate
Expand All @@ -23,6 +24,8 @@ pub fn lte_field_240_bit(x: Field, y: Field) -> bool {
// (y - x) * p + (1 - p) * (x - y + 1)
// (y - x) * p + x - y + 1 + p * (y - x)
let lt_parameter = 2 * (predicate as Field) * delta - predicate as Field - delta + 1;
// checks that the bit length of lt_parameter is 240
// i.e. checks the sign of lt_parameter
lt_parameter.assert_max_bit_size::<240>();

predicate
Expand All @@ -40,18 +43,24 @@ pub fn assert_lte_240_bit(x: Field, y: Field) {
}

pub fn lt_field_16_bit(x: Field, y: Field) -> bool {
let predicate = get_lt_predicate_f(x, y);
//@Safety: check the comments below
let predicate = unsafe { get_lt_predicate_f(x, y) };
let delta = y as Field - x as Field;
let lt_parameter = 2 * (predicate as Field) * delta - predicate as Field - delta;
// checks that the bit length of lt_parameter is 16
// i.e. checks the sign of lt_parameter
lt_parameter.assert_max_bit_size::<16>();

predicate
}

pub fn lt_field_8_bit(x: Field, y: Field) -> bool {
let predicate = get_lt_predicate_f(x, y);
//@Safety: check the comments below
let predicate = unsafe { get_lt_predicate_f(x, y) };
let delta = y as Field - x as Field;
let lt_parameter = 2 * (predicate as Field) * delta - predicate as Field - delta;
// checks that the bit length of lt_parameter is 8
// i.e. checks the sign of lt_parameter
lt_parameter.assert_max_bit_size::<8>();

predicate
Expand Down
5 changes: 4 additions & 1 deletion src/_string_tools/slice_field.nr
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,11 @@ unconstrained fn __slice_200_bits_from_field(f: Field) -> (Field, Field, bool) {
}

pub fn slice_200_bits_from_field(f: Field) -> Field {
let (lo, hi, borrow) = __slice_200_bits_from_field(f);
//@Safety: check the comments below
let (lo, hi, borrow) = unsafe { __slice_200_bits_from_field(f) };
// checks that lo and hi are the correct slices of f
assert(hi * TWO_POW_200 + lo == f);
// checks that lo and hi are the correct bit sizes
lo.assert_max_bit_size::<200>();
hi.assert_max_bit_size::<56>();
let lo_diff = PLO_200_felt - lo + (borrow as Field * TWO_POW_200);
Expand Down
24 changes: 17 additions & 7 deletions src/_string_tools/slice_packed_field.nr
Original file line number Diff line number Diff line change
Expand Up @@ -607,7 +607,8 @@ unconstrained fn __divmod(numerator: Field, denominator: Field) -> (Field, Field
* we know the quotient will fit into a 14 bit range check which will save us some fractional gates
**/
fn divmod_31(numerator: Field) -> (Field, Field) {
let (quotient, remainder) = __divmod(numerator, 31);
//@Safety: we check the bit lengths of qf and rf and their relation to the numerator with assertions later
let (quotient, remainder) = unsafe { __divmod(numerator, 31) };

let qf = quotient as Field;
let rf = remainder as Field;
Expand Down Expand Up @@ -641,14 +642,17 @@ unconstrained fn decompose(val: Field) -> [Field; 16] {
pub fn get_last_limb_path<let OutputFields: u32>(last_limb_index: Field) -> [Field; OutputFields] {
// TODO we offset by 1 explain why (0 byte length produces 0 - 1 which = invalid array index. we just add 1 and increase array length by 1 to compensate)
let path = LAST_LIMB_PATH[last_limb_index + 1]; // 2
let path_valid_bits = decompose(path);
//@Safety: check the comments below
let path_valid_bits = unsafe { decompose(path) };
let mut path_valid_sum: Field = 0;
let mut path_valid_output: [Field; OutputFields] = [0; OutputFields];
for i in 0..OutputFields {
// we check that the path valid bits are binary
assert(path_valid_bits[i] * path_valid_bits[i] - path_valid_bits[i] == 0);
path_valid_sum += (path_valid_bits[i] * (1 << i as u8) as Field);
path_valid_output[i] = path_valid_bits[i];
}
// we check that the path valid bits sum to the path
assert(path_valid_sum == path);
path_valid_output
}
Expand All @@ -659,7 +663,8 @@ pub fn get_last_limb_path<let OutputFields: u32>(last_limb_index: Field) -> [Fie
* @details cost 46 gates
**/
pub fn slice_field(f: Field, num_bytes: Field) -> (Field, Field) {
let chunks = __slice_field(f, num_bytes);
//@Safety: we check the bit lengths of the chunks with assertions later
let chunks = unsafe { __slice_field(f, num_bytes) };
chunks[0].assert_max_bit_size::<8>(); // 1.25 gates
chunks[1].assert_max_bit_size::<16>(); // 1.5 gates
chunks[2].assert_max_bit_size::<32>(); // 1.75 gates
Expand Down Expand Up @@ -863,7 +868,9 @@ mod test {
// let start_byte = 26;
let num_bytes = 0;
let start_byte: u32 = 0;
let mut expected_slices: [Field; 3] = build_slices_for_test(text, start_byte, num_bytes);
//@Safety: this is a test
let mut expected_slices: [Field; 3] =
unsafe { build_slices_for_test(text, start_byte, num_bytes) };
let result_slices: [Field; 3] =
slice_fields(slices, start_byte as Field, num_bytes as Field);
assert(result_slices == expected_slices);
Expand Down Expand Up @@ -895,16 +902,19 @@ mod test {
for j in 0..18 {
let start_byte: u32 = byte_positions[j];
let mut expected_slices: [Field; 3] =
build_slices_for_test(text, start_byte, num_bytes);
//@Safety: this is a test
unsafe { build_slices_for_test(text, start_byte, num_bytes) };
let result_slices: [Field; 3] =
slice_fields(slices, start_byte as Field, num_bytes as Field);
//@Safety: this is a test
unsafe { slice_fields(slices, start_byte as Field, num_bytes as Field) };
assert(result_slices == expected_slices);
}

for j in 0..18 {
let start_byte: u32 = text.len() - num_bytes - byte_positions[j];
let mut expected_slices: [Field; 3] =
build_slices_for_test(text, start_byte, num_bytes);
//@Safety: this is a test
unsafe { build_slices_for_test(text, start_byte, num_bytes) };
let result_slices: [Field; 3] =
slice_fields(slices, start_byte as Field, num_bytes as Field);
assert(result_slices == expected_slices);
Expand Down
4 changes: 3 additions & 1 deletion src/_string_tools/sum_bytes_into_field.nr
Original file line number Diff line number Diff line change
Expand Up @@ -185,10 +185,12 @@ fn sum_var_bytes_into_field<let N: u32>(
body_index: Field,
num_bytes: Field,
) -> Field {
let path = get_path(num_bytes); // 5 gates
//@Safety: check the comments below
let path = unsafe { get_path(num_bytes) }; // 5 gates
let path_f: [Field; 5] =
[path[0] as Field, path[1] as Field, path[2] as Field, path[3] as Field, path[4] as Field];

// checkes that path_f is the binary representation of num_bytes
assert(
path_f[0] + path_f[1] * 2 + path_f[2] * 4 + path_f[3] * 8 + path_f[4] * 16
== num_bytes as Field,
Expand Down
6 changes: 4 additions & 2 deletions src/get_string.nr
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ fn process_escape_sequences<let N: u32>(input: BoundedVec<u8, N>) -> BoundedVec<
+ (1 - is_escape_sequence) * character as Field;

written_byte = written_byte * (1 - skip) + cached_byte * skip;
let written_byte_u8 = to_u8(written_byte);
//@Safety: we assert that the casting is done correctly
let written_byte_u8 = unsafe { to_u8(written_byte) };
assert(written_byte_u8 as Field == written_byte);

result[result_ptr] = written_byte_u8;
Expand All @@ -41,7 +42,8 @@ fn process_escape_sequences<let N: u32>(input: BoundedVec<u8, N>) -> BoundedVec<
}

let written_byte: Field = character as Field * (1 - skip) + cached_byte * skip;
let written_byte_u8 = to_u8(written_byte);
//@Safety: we assert that the casting is done correctly
let written_byte_u8 = unsafe { to_u8(written_byte) };
assert(written_byte_u8 as Field == written_byte);
result[result_ptr] = written_byte_u8;
result_ptr += 1;
Expand Down
21 changes: 14 additions & 7 deletions src/getters.nr
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max

let keyhash = keyhash + self.root_id * two_pow_216;

let key_index = self.find_key_in_map(keyhash);
//@Safety: The assertion below checks that the keyhash is stored in the the index returned by the unconstrained function
let key_index = unsafe { self.find_key_in_map(keyhash) };

assert(self.key_hashes[key_index] == keyhash, "get_json_entry_unchecked: key not found");
let entry: JSONEntry = self.json_entries_packed[key_index].into();
Expand Down Expand Up @@ -91,7 +92,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max

let keyhash = keyhash + self.root_id * two_pow_216;

let key_index = self.find_key_in_map(keyhash);
//@Safety: The assertion below checks that the keyhash is stored in the the index returned by the unconstrained function
let key_index = unsafe { self.find_key_in_map(keyhash) };

assert(self.key_hashes[key_index] == keyhash, "get_json_entry_unchecked: key not found");
let entry: JSONEntry = self.json_entries_packed[key_index].into();
Expand All @@ -114,7 +116,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max

let keyhash = keyhash + self.root_id * two_pow_216;

let key_index = self.find_key_in_map(keyhash);
//@Safety: The assertion below checks that the keyhash is stored in the the index returned by the unconstrained function
let key_index = unsafe { self.find_key_in_map(keyhash) };

assert(self.key_hashes[key_index] == keyhash, "get_json_entry_unchecked: key not found");
let entry: JSONEntry = self.json_entries_packed[key_index].into();
Expand All @@ -136,7 +139,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max

let keyhash = keyhash + self.root_id * two_pow_216;

let key_index = self.find_key_in_map(keyhash);
//@Safety: The assertion below checks that the keyhash is stored in the the index returned by the unconstrained function
let key_index = unsafe { self.find_key_in_map(keyhash) };

assert(self.key_hashes[key_index] == keyhash, "get_json_entry_unchecked: key not found");
let entry: JSONEntry = self.json_entries_packed[key_index].into();
Expand Down Expand Up @@ -319,7 +323,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max

let keyhash = keyhash + self.root_id * two_pow_216;

let search_result = self.search_for_key_in_map(keyhash);
//@Safety: The assertion below checks that the keyhash is stored in the the index returned by the unconstrained function
let search_result = unsafe { self.search_for_key_in_map(keyhash) };
let found = search_result.found as Field;

let target_lt_smallest_entry = search_result.target_lt_smallest_entry as Field;
Expand Down Expand Up @@ -378,7 +383,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max

let keyhash = keyhash + self.root_id * two_pow_216;

let search_result = self.search_for_key_in_map(keyhash);
//@Safety: the assertion (search_result.lhs_index - search_result.rhs_index) * found == 0 constraints this function
let search_result = unsafe { self.search_for_key_in_map(keyhash) };
let found = search_result.found as Field;

let target_lt_smallest_entry = search_result.target_lt_smallest_entry as Field;
Expand Down Expand Up @@ -437,7 +443,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
) -> BoundedVec<BoundedVec<u8, MaxKeyBytes>, MaxNumKeys> {
let root_object: JSONEntry =
JSONEntry::from(self.json_entries_packed[self.root_index_in_transcript]);
let key_indices: BoundedVec<Field, MaxNumKeys> = self.__get_keys_at_root();
//@Safety: the length of the index is constrained later.
let key_indices: BoundedVec<Field, MaxNumKeys> = unsafe { self.__get_keys_at_root() };

assert(key_indices.len as Field == root_object.num_children);

Expand Down
9 changes: 6 additions & 3 deletions src/json.nr
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
let mut tokens: [Field; MaxNumTokens] = [0; MaxNumTokens];
// maybe 71.75 gates per iteration
for i in 0..MaxNumTokens {
__check_entry_ptr_bounds(entry_ptr, MaxNumValues);
//@Safety: check the comments below
unsafe { __check_entry_ptr_bounds(entry_ptr, MaxNumValues) };
// 5.25 gates
let TranscriptEntry { token, index, length } =
TranscriptEntry::from_field(self.transcript[i]);
Expand Down Expand Up @@ -462,7 +463,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
let mut scan_mode = GRAMMAR_SCAN;
let mut length: Field = 0;

let raw_transcript = self.__build_transcript();
//@Safety: check the comments below
let raw_transcript = unsafe { self.__build_transcript() };

// 14 gates per iteration, plus fixed cost for initing 2,048 size lookup table (4,096 gates)
let mut previous_was_potential_escape_sequence = 0;
Expand Down Expand Up @@ -570,7 +572,8 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
fn capture_missing_tokens(&mut self) {
let mut transcript_ptr: Field = 0;
// hmm probably need a null transcript value?!?!
let updated_transcript = self.__capture_missing_tokens();
//@Safety: check the comments below
let updated_transcript = unsafe { self.__capture_missing_tokens() };
// 26? gates per iteration
let range_valid: [Field; MaxNumTokens] = get_validity_flags(self.transcript_length);
for i in 0..MaxNumTokens {
Expand Down
18 changes: 12 additions & 6 deletions src/json_entry.nr
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ impl JSONContextStackEntry {
* @note method cost = 9.5 gates
**/
fn from_field(f: Field) -> Self {
let result = JSONContextStackEntry::__from_field(f);
//@Safety: the set of assertions done below will ensure the result is in the correct domain, and the computed result matches the input f
let result = unsafe { JSONContextStackEntry::__from_field(f) };

result.context.assert_max_bit_size::<8>(); // 1.25
result.num_entries.assert_max_bit_size::<16>(); // 1.5
Expand Down Expand Up @@ -119,8 +120,9 @@ impl JSONEntry {

// 11.75 gates
fn extract_entry_type_id_and_parent_index_from_field(f: Field) -> (Field, Field, Field) {
//@Safety: we check all the outputs are in the correct range and we can construct the input from the outputs of the decomposition
let (id, parent_index, mid, entry_type) =
JSONEntry::__extract_entry_type_id_and_parent_index_from_field(f);
unsafe { JSONEntry::__extract_entry_type_id_and_parent_index_from_field(f) };
id.assert_max_bit_size::<8>(); // 1.25
parent_index.assert_max_bit_size::<16>(); // 1.5
entry_type.assert_max_bit_size::<16>(); // 1.5
Expand All @@ -136,15 +138,17 @@ impl JSONEntry {
(id, parent_index, entry_type)
}
fn extract_entry_type_and_id_from_field(f: Field) -> (Field, Field) {
let (id, mid, entry_type) = JSONEntry::__extract_entry_type_and_id_from_field(f);
let (id, mid, entry_type) = unsafe { JSONEntry::__extract_entry_type_and_id_from_field(f) };
id.assert_max_bit_size::<8>(); // 1.25
entry_type.assert_max_bit_size::<16>(); // 1.5
mid.assert_max_bit_size::<136>(); // 5.5
assert(id + mid * 0x10000 + entry_type * 0x100000000000000000000000000000000000000 == f);
(id, entry_type)
}

fn extract_parent_index_from_field(f: Field) -> Field {
let (low, parent_index, hi) = JSONEntry::__extract_parent_index_from_field(f);
//@Safety: we check all the outputs are in the correct range and we can construct the input from the outputs of the decomposition
let (low, parent_index, hi) = unsafe { JSONEntry::__extract_parent_index_from_field(f) };

low.assert_max_bit_size::<16>(); // 1.75
hi.assert_max_bit_size::<128>(); // 5.5
Expand Down Expand Up @@ -188,7 +192,8 @@ impl JSONEntry {
// 4 gates. oof
}
fn from_field(f: Field) -> Self {
let result = JSONEntry::__from_field(f);
//@Safety: we check all the outputs are in the correct range and match the input field value
let result = unsafe { JSONEntry::__from_field(f) };
result.entry_type.assert_max_bit_size::<8>();
result.json_length.assert_max_bit_size::<16>();
result.json_pointer.assert_max_bit_size::<16>();
Expand All @@ -204,7 +209,8 @@ impl JSONEntry {

impl std::convert::From<JSONEntryPacked> for JSONEntry {
fn from(JSONEntryPacked { value: f }: JSONEntryPacked) -> Self {
let result = JSONEntry::__from_field(f);
//@Safety: we check all the outputs are in the correct range and match the input field value
let result = unsafe { JSONEntry::__from_field(f) };
result.entry_type.assert_max_bit_size::<8>();
result.json_length.assert_max_bit_size::<16>();
result.json_pointer.assert_max_bit_size::<16>();
Expand Down
13 changes: 11 additions & 2 deletions src/keymap.nr
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,17 @@ impl KeyIndexData {
}

fn from_field(packed: Field) -> Self {
let result = KeyIndexData::__from_field(packed);
//@Safety: check the comments below
let result = unsafe { KeyIndexData::__from_field(packed) };
// checks that array_index is in range
result.array_index.assert_max_bit_size::<16>();
// checks that json_length is in range
result.json_length.assert_max_bit_size::<16>();
// checks that json_index is in range
result.json_index.assert_max_bit_size::<16>();
// checks that parent_id is in range
result.parent_id.assert_max_bit_size::<16>();
// checks that the input is a valid combination of the outputs of the decomposition
assert(result.to_field() == packed);
result
}
Expand Down Expand Up @@ -146,12 +152,15 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
}

fn set_root_entry(&mut self) {
let root_index = self.__find_root_entry();
//@Safety: check the comments below
let root_index = unsafe { self.__find_root_entry() };

let packed_entry = self.json_entries_packed[root_index];
let entry: JSONEntry = packed_entry.into();

// checks that the entry is not empty
assert(packed_entry.value != 0);
// checks that the parent index is 0
assert(entry.parent_index == 0);
self.root_index_in_transcript = root_index;
}
Expand Down
5 changes: 4 additions & 1 deletion src/token_flags.nr
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,10 @@ impl TokenFlags {

fn from_field(f: Field) -> Self {
// 10 gates
let r = TokenFlags::__from_field(f);
//@Safety: check the comments below
let r = unsafe { TokenFlags::__from_field(f) };

// checks that the flags are binary
assert(r.create_json_entry * r.create_json_entry == r.create_json_entry);
assert(
r.is_end_of_object_or_array * r.is_end_of_object_or_array
Expand All @@ -49,6 +51,7 @@ impl TokenFlags {
assert(r.is_value_token * r.is_value_token == r.is_value_token);
assert(r.preserve_num_entries * r.preserve_num_entries == r.preserve_num_entries);

// asserts the relation of r and f
assert(r.to_field() == f);
r
}
Expand Down
Loading
Loading