forked from tezedge/tezedge
-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
28 additions
and
81 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,7 @@ | ||
// Copyright (c) SimpleStaking, Viable Systems and Tezedge Contributors | ||
// SPDX-FileCopyrightText: 2023 Nomadic Labs <[email protected]> | ||
// SPDX-FileCopyrightText: 2023 TriliTech <[email protected]> | ||
// SPDX-FileCopyrightText: 2023-2024 TriliTech <[email protected]> | ||
// | ||
// SPDX-License-Identifier: MIT | ||
|
||
use once_cell::sync::Lazy as SyncLazy; | ||
|
@@ -127,82 +128,39 @@ fn generate_struct_one_field_nom_read(encoding: &StructEncoding) -> TokenStream | |
|
||
fn generate_struct_many_fields_nom_read(encoding: &StructEncoding) -> TokenStream { | ||
let name = encoding.name; | ||
let (fields, hash) = encoding | ||
.fields | ||
.iter() | ||
.partition::<Vec<_>, _>(|f| !matches!(f.kind, FieldKind::Hash)); | ||
let field1 = fields.iter().map(|field| field.name); | ||
let field1 = encoding.fields.iter().map(|field| field.name); | ||
let field2 = field1.clone(); | ||
let field_name = fields | ||
let field_name = encoding | ||
.fields | ||
.iter() | ||
.map(|field| format!("{}::{}", name, field.name)); | ||
let field_nom_read = encoding.fields.iter().map(generate_struct_field_nom_read); | ||
if let Some(hash_field) = hash.first() { | ||
let field3 = field1.clone(); | ||
let hash_name = hash_field.name; | ||
quote_spanned! { | ||
hash_field.name.span()=> | ||
nom::combinator::map( | ||
tezos_data_encoding::nom::hashed( | ||
nom::sequence::tuple(( | ||
#(tezos_data_encoding::nom::field(#field_name, #field_nom_read)),* | ||
)) | ||
), | ||
|((#(#field2),*), #hash_name)| { | ||
#name { #(#field3),*, #hash_name: #hash_name.into() } | ||
}) | ||
} | ||
} else { | ||
quote_spanned! { | ||
encoding.name.span()=> | ||
nom::combinator::map( | ||
nom::sequence::tuple(( | ||
#(tezos_data_encoding::nom::field(#field_name, #field_nom_read)),* | ||
)), | ||
|(#(#field1),*)| #name { #(#field2),* } | ||
) | ||
} | ||
quote_spanned! { | ||
encoding.name.span()=> | ||
nom::combinator::map( | ||
nom::sequence::tuple(( | ||
#(tezos_data_encoding::nom::field(#field_name, #field_nom_read)),* | ||
)), | ||
|(#(#field1),*)| #name { #(#field2),* } | ||
) | ||
} | ||
} | ||
|
||
fn generate_struct_multi_fields_nom_read(encoding: &StructEncoding) -> TokenStream { | ||
let name = encoding.name; | ||
let (fields, hash) = encoding | ||
.fields | ||
.iter() | ||
.partition::<Vec<_>, _>(|f| !matches!(f.kind, FieldKind::Hash)); | ||
let field1 = fields.iter().map(|field| field.name); | ||
let field1 = encoding.fields.iter().map(|field| field.name); | ||
let field2 = field1.clone(); | ||
let field_name = fields | ||
let field_name = encoding | ||
.fields | ||
.iter() | ||
.map(|field| format!("{}::{}", name, field.name)); | ||
let field_nom_read = encoding.fields.iter().map(generate_struct_field_nom_read); | ||
if let Some(hash_field) = hash.first() { | ||
let field3 = field1.clone(); | ||
let field4 = field1.clone(); | ||
let hash_name = hash_field.name; | ||
quote_spanned! { | ||
hash_field.name.span()=> | ||
nom::combinator::map( | ||
tezos_data_encoding::nom::hashed( | ||
(|input| { | ||
#(let (input, #field1) = tezos_data_encoding::nom::field(#field_name, #field_nom_read)(input)?;)* | ||
Ok((input, (#(#field2),* ))) | ||
}) | ||
), | ||
|((#(#field3),*), #hash_name)| { | ||
#name { #(#field4),*, #hash_name: #hash_name.into() } | ||
} | ||
) | ||
} | ||
} else { | ||
quote_spanned! { | ||
encoding.name.span()=> | ||
(|input| { | ||
#(let (input, #field1) = tezos_data_encoding::nom::field(#field_name, #field_nom_read)(input)?;)* | ||
Ok((input, #name { #(#field2),* })) | ||
}) | ||
} | ||
quote_spanned! { | ||
encoding.name.span()=> | ||
(|input| { | ||
#(let (input, #field1) = tezos_data_encoding::nom::field(#field_name, #field_nom_read)(input)?;)* | ||
Ok((input, #name { #(#field2),* })) | ||
}) | ||
} | ||
} | ||
|
||
|
@@ -222,7 +180,6 @@ fn generate_struct_field_nom_read(field: &FieldEncoding) -> TokenStream { | |
} | ||
} | ||
FieldKind::Skip => quote!(|input| Ok((input, Default::default()))), | ||
FieldKind::Hash => unreachable!(), | ||
} | ||
} | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,6 @@ | ||
// Copyright (c) SimpleStaking, Viable Systems and Tezedge Contributors | ||
// SPDX-FileCopyrightText: 2024 TriliTech <[email protected]> | ||
// | ||
// SPDX-License-Identifier: MIT | ||
|
||
use std::{ | ||
|
@@ -42,7 +44,6 @@ pub const COMPOSITE: Symbol = Symbol("composite"); | |
|
||
/// Attribute name used to mark field/variant as ignored. | ||
pub const SKIP: Symbol = Symbol("skip"); | ||
pub const HASH: Symbol = Symbol("hash"); | ||
|
||
/// Attribute used to specify maximal size/lengh. | ||
pub const MAX: Symbol = Symbol("max"); | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,6 @@ | ||
// Copyright (c) SimpleStaking, Viable Systems, Nomadic Labs and Tezedge Contributors | ||
// SPDX-CopyrightText: 2022-2023 TriliTech <[email protected]> | ||
// SPDX-CopyrightText: 2022-2024 TriliTech <[email protected]> | ||
// | ||
// SPDX-License-Identifier: MIT | ||
|
||
use bitvec::slice::BitSlice; | ||
|
@@ -506,7 +507,7 @@ where | |
move |input| parser(input).map_err(|e| e.map(|e| e.add_field(name))) | ||
} | ||
|
||
/// Applies the `parser` to the input, addin enum variant context to the error. | ||
/// Applies the `parser` to the input, adding enum variant context to the error. | ||
#[inline(always)] | ||
pub fn variant<'a, O, F>( | ||
name: &'static str, | ||
|
@@ -566,17 +567,6 @@ pub fn n_bignum(mut input: NomInput) -> NomResult<BigUint> { | |
Ok((input, BigUint::from_bytes_be(&bitvec.into_vec()))) | ||
} | ||
|
||
pub fn hashed<'a, O, F>(mut parser: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, (O, Vec<u8>)> | ||
where | ||
F: FnMut(NomInput<'a>) -> NomResult<'a, O>, | ||
{ | ||
move |input| { | ||
let (rest, result) = parser(input)?; | ||
let hash = crypto::blake2b::digest_256(&input[..input.len() - rest.len()]); | ||
Ok((rest, (result, hash))) | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod test { | ||
use num_bigint::BigInt; | ||
|