Skip to content

Commit

Permalink
Metadata json (#278)
Browse files Browse the repository at this point in the history
* Move over metadata JSON from old CML

* Cardano Node Metadata JSON

Ports over the cardano-node format for tx metadata JSON

* metadata json tests + more metadatum map functionality
  • Loading branch information
rooooooooob authored Dec 5, 2023
1 parent 98c9f14 commit ec7cc48
Show file tree
Hide file tree
Showing 7 changed files with 587 additions and 7 deletions.
12 changes: 6 additions & 6 deletions chain/rust/src/json/json_serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ impl<'de> Deserialize<'de> for Value {
D: Deserializer<'de>,
{
let s = <String as serde::de::Deserialize>::deserialize(deserializer)?;
Value::from_string(s).map_err(|err| serde::de::Error::custom(format!("{:?}", err)))
Value::from_string(&s).map_err(|err| serde::de::Error::custom(format!("{:?}", err)))
}
}

Expand Down Expand Up @@ -122,7 +122,7 @@ impl Display for JsonParseError {

impl std::error::Error for JsonParseError {}

fn tokenize_string(string: String) -> Vec<JsonToken> {
fn tokenize_string(string: &str) -> Vec<JsonToken> {
fn are_we_inside_string(tokens: &Vec<JsonToken>) -> bool {
if tokens.is_empty() {
return false;
Expand Down Expand Up @@ -533,7 +533,7 @@ impl Value {
}
}

pub fn from_string(from: String) -> Result<Self, JsonParseError> {
pub fn from_string(from: &str) -> Result<Self, JsonParseError> {
let tokens = tokenize_string(from);
parse_json(tokens)
}
Expand Down Expand Up @@ -987,7 +987,7 @@ mod tests {

fn run_cases(cases: Vec<(String, Vec<JsonToken>, Value)>) {
for (case, correct_tokens, correct) in cases {
let computed_tokens = tokenize_string(case.clone());
let computed_tokens = tokenize_string(&case);
assert_eq!(
computed_tokens,
correct_tokens,
Expand Down Expand Up @@ -1217,7 +1217,7 @@ mod tests {
\\\"",
];
for case in cases.into_iter() {
let computed_tokens = tokenize_string(case.to_string());
let computed_tokens = tokenize_string(case);
let parsed = parse_json(computed_tokens.clone());
assert!(
parsed.is_err(),
Expand Down Expand Up @@ -1411,7 +1411,7 @@ mod tests {
];

for (case, correct) in cases {
let computed_tokens = tokenize_string(case.to_string());
let computed_tokens = tokenize_string(case);
let parsed = parse_json(computed_tokens);
assert!(
parsed.is_ok(),
Expand Down
Loading

0 comments on commit ec7cc48

Please sign in to comment.