diff --git a/core/src/schema/scenario.rs b/core/src/schema/scenario.rs index efb87a53..40e5a2ea 100644 --- a/core/src/schema/scenario.rs +++ b/core/src/schema/scenario.rs @@ -1,14 +1,29 @@ use anyhow::{Context, Result}; -use std::path::PathBuf; +use serde::Deserialize; +use std::{collections::BTreeMap, path::PathBuf}; -use crate::Namespace; +use crate::{Content, Namespace}; pub struct Scenario { namespace: Namespace, - scenario: Namespace, + scenario: ScenarioNamespace, name: String, } +#[derive(Deserialize)] +#[cfg_attr(test, derive(Debug, PartialEq))] +struct ScenarioNamespace { + #[serde(flatten)] + collections: BTreeMap, +} + +#[derive(Deserialize)] +#[cfg_attr(test, derive(Debug, PartialEq))] +struct ScenarioCollection { + #[serde(flatten)] + fields: BTreeMap, +} + impl Scenario { pub fn new(namespace: Namespace, namespace_path: PathBuf, scenario: &str) -> Result { let scenario_path = namespace_path @@ -41,6 +56,7 @@ impl Scenario { self.has_extra_collections() .context(anyhow!("failed to build scenario '{}'", self.name))?; self.trim_namespace_collections(); + self.trim_fields()?; Ok(self.namespace) } @@ -50,8 +66,9 @@ impl Scenario { let extra_collections: Vec<_> = self .scenario + .collections .keys() - .filter(|c| !collections.contains(c)) + .filter(|c| !collections.contains(&c.as_str())) .collect(); if !extra_collections.is_empty() { @@ -71,14 +88,14 @@ impl Scenario { } fn trim_namespace_collections(&mut self) { - let scenario_collections: Vec<_> = self.scenario.keys().collect(); + let scenario_collections: Vec<_> = self.scenario.collections.keys().collect(); let trim_collections: Vec<_> = self .namespace .keys() .map(ToOwned::to_owned) .into_iter() - .filter(|c| !scenario_collections.contains(&c.as_str())) + .filter(|c| !scenario_collections.contains(&c)) .collect(); for trim_collection in trim_collections { @@ -86,6 +103,58 @@ impl Scenario { self.namespace.remove_collection(&trim_collection); } } + + fn trim_fields(&mut self) -> Result<()> { + for (name, collection) in self.scenario.collections.iter() { + // Nothing to trim + if collection.fields.is_empty() { + continue; + } + + let namespace_collection = self.namespace.get_collection_mut(name)?; + + Self::trim_collection_fields(namespace_collection, &collection.fields) + .context(anyhow!("failed to trim collection '{}'", name))?; + } + + Ok(()) + } + + fn trim_collection_fields( + collection: &mut Content, + fields: &BTreeMap, + ) -> Result<()> { + match collection { + Content::Object(map) => { + let map_keys: Vec<_> = map.fields.keys().collect(); + + for field in fields.keys() { + if !map_keys.contains(&field) { + return Err(anyhow!( + "'{}' is not a field on the object, therefore it cannot be included", + field + )); + } + } + let trim_fields: Vec<_> = map_keys + .into_iter() + .filter(|c| !fields.contains_key(c.as_str())) + .map(ToOwned::to_owned) + .collect(); + + for trim_field in trim_fields { + debug!("removing field '{}'", trim_field); + map.fields.remove(trim_field.as_str()); + } + } + Content::Array(arr) => { + Self::trim_collection_fields(&mut arr.content, fields)?; + } + _ => return Err(anyhow!("cannot select fields to include from a non-object")), + }; + + Ok(()) + } } #[cfg(test)] @@ -101,12 +170,21 @@ mod tests { use super::Scenario; - macro_rules! scenario { + macro_rules! namespace { { $($inner:tt)* } => { serde_json::from_value::(serde_json::json!($($inner)*)) - .expect("could not deserialize scenario into a namespace") + .expect("could not deserialize into a namespace") + } + } + + macro_rules! scenario { + { + $($inner:tt)* + } => { + serde_json::from_value::(serde_json::json!($($inner)*)) + .expect("could not deserialize into a scenario namespace") } } @@ -154,14 +232,109 @@ mod tests { #[test] fn build_filter_collections() { let scenario = Scenario { - namespace: scenario!({"collection1": {}, "collection2": {}}), + namespace: namespace!({"collection1": {}, "collection2": {}}), scenario: scenario!({"collection1": {}}), name: "test".to_string(), }; let actual = scenario.build().unwrap(); - let expected = scenario!({"collection1": {}}); + let expected = namespace!({"collection1": {}}); + + assert_eq!(actual, expected); + } + + #[test] + fn build_filter_fields() { + let scenario = Scenario { + namespace: namespace!({ + "collection1": { + "type": "object", + "nully": {"type": "null"}, + "stringy": {"type": "string", "pattern": "test"} + }, + "collection2": {} + }), + scenario: scenario!({"collection1": {"nully": {}}}), + name: "test".to_string(), + }; + + let actual = scenario.build().unwrap(); + let expected = namespace!({ + "collection1": { + "type": "object", + "nully": {"type": "null"} + } + }); assert_eq!(actual, expected); } + + #[test] + fn build_filter_fields_array() { + let scenario = Scenario { + namespace: namespace!({ + "collection1": { + "type": "array", + "length": 5, + "content": { + "type": "object", + "nully": {"type": "null"}, + "stringy": {"type": "string", "pattern": "test"} + } + }, + "collection2": {} + }), + scenario: scenario!({"collection1": {"nully": {}}}), + name: "test".to_string(), + }; + + let actual = scenario.build().unwrap(); + let expected = namespace!({ + "collection1": { + "type": "array", + "length": 5, + "content": { + "type": "object", + "nully": {"type": "null"}, + } + } + }); + + assert_eq!(actual, expected); + } + + #[test] + #[should_panic(expected = "'null' is not a field on the object")] + fn build_filter_extra_field() { + let scenario = Scenario { + namespace: namespace!({ + "collection1": { + "type": "object", + "nully": {"type": "null"}, + "stringy": {"type": "string", "pattern": "test"} + }, + "collection2": {} + }), + scenario: scenario!({"collection1": {"null": {}}}), + name: "test".to_string(), + }; + + scenario.build().unwrap(); + } + + #[test] + #[should_panic(expected = "cannot select fields to include from a non-object")] + fn build_filter_field_scalar() { + let scenario = Scenario { + namespace: namespace!({ + "collection1": { + "type": "null" + }, + }), + scenario: scenario!({"collection1": {"nully": {}}}), + name: "test".to_string(), + }; + + scenario.build().unwrap(); + } } diff --git a/docs/docs/getting_started/core-concepts.md b/docs/docs/getting_started/core-concepts.md index ed80689f..d16063f9 100644 --- a/docs/docs/getting_started/core-concepts.md +++ b/docs/docs/getting_started/core-concepts.md @@ -128,32 +128,38 @@ is a little involved so there is a section devoted to just the [schema][schema]. ## Scenarios Since [collections](#collections) correspond to closely to a database collection, we will have numerous use cases which only uses a subset of the -collections in a namespace. This is were we will use scenarios. +collections in a namespace or even only a subset of the fields in the +collections. This is were we will use scenarios. Scenarios allow us to define a specific use case for the data in a namespace. So expanding from our `bank` example, we can create a scenario which only -generates data for users by having the following directory structure: +generates data for a users `search-by-name` feature by having the following +directory structure: ``` └── bank/ ├── scenarios - │   └── users-only.json + │   └── search-by-name.json ├── transactions.json └── users.json ``` -This creates a scenario called `users-only` by having a `[scenario-name].json` -inside the `scenarios/` directory inside our [namespace](#namespaces). -The definition for this scenario will look as follow: +This creates a scenario called `search-by-name` by having a +`[scenario-name].json` inside the `scenarios/` directory inside our +[namespace](#namespaces). The definition for this scenario will look as +follow: -```json synth-scenario[users-only.json] +```json synth-scenario[search-by-name.json] { - "users": {} + "users": { + "username": {}, + "id": {} + } } ``` -This definition explicitly marks the `users` collection for inclusion inside -this scenario. +This definition explicitly marks the `username` and `id` fields from the +`users` collection for inclusion inside this scenario. ## Importing datasets diff --git a/examples/bank/bank_db/scenarios/search-by-email.json b/examples/bank/bank_db/scenarios/search-by-email.json new file mode 100644 index 00000000..508778fb --- /dev/null +++ b/examples/bank/bank_db/scenarios/search-by-email.json @@ -0,0 +1,6 @@ +{ + "users": { + "email": {}, + "id": {} + } +} diff --git a/synth/tests/examples.rs b/synth/tests/examples.rs index 65fb7f08..98c33915 100644 --- a/synth/tests/examples.rs +++ b/synth/tests/examples.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use paste::paste; +use test_macros::{file_stem, parent, parent2, tmpl_ignore}; // Skipping fmt is needed until this fix is released // https://github.com/rust-lang/rustfmt/pull/5142 @@ -21,7 +21,7 @@ macro_rules! test_examples { )) .await?; - let expected = include_str!(concat!("examples/", stringify!($name), "/output.json")) + let expected = include_str!(concat!("examples/", stringify!($name), "/", stringify!($ns), "/output.json")) .replace("\r\n", "\n"); assert_eq!(actual, expected); @@ -38,30 +38,29 @@ test_examples!( random_variants / random, ); -macro_rules! test_scenarios { - ($($name:ident / $ns:ident,)*) => { - $( - paste!{ - #[async_std::test] - async fn [<$name _scenario>]() -> Result<()> { - let actual = generate_scenario(concat!( - "../examples/", - stringify!($name), - "/", - stringify!($ns) - ), Some("users-only".to_string())) - .await?; +#[tmpl_ignore( + "examples/bank/bank_db/scenarios", + exclude_dir = true, + filter_extension = "json" +)] +#[async_std::test] +async fn PATH_IDENT() -> Result<()> { + let actual = generate_scenario( + concat!("../", parent2!(PATH)), + Some(file_stem!(PATH).to_string()), + ) + .await; - let expected = include_str!(concat!("examples/", stringify!($name), "/scenarios/users-only.json")) - .replace("\r\n", "\n"); + assert!( + actual.is_ok(), + "did not expect error: {}", + actual.unwrap_err() + ); - assert_eq!(actual, expected); + let expected = + include_str!(concat!(parent!(PATH), "/", file_stem!(PATH), ".json")).replace("\r\n", "\n"); - Ok(()) - } - } - )* - }; -} + assert_eq!(actual.unwrap(), expected); -test_scenarios!(bank / bank_db,); + Ok(()) +} diff --git a/synth/tests/examples/bank/output.json b/synth/tests/examples/bank/bank_db/output.json similarity index 100% rename from synth/tests/examples/bank/output.json rename to synth/tests/examples/bank/bank_db/output.json diff --git a/synth/tests/examples/bank/bank_db/scenarios/search-by-email.json b/synth/tests/examples/bank/bank_db/scenarios/search-by-email.json new file mode 100644 index 00000000..1635d06b --- /dev/null +++ b/synth/tests/examples/bank/bank_db/scenarios/search-by-email.json @@ -0,0 +1 @@ +{"users":[{"email":"eveline@example.com","id":1},{"email":"darlene@example.com","id":2},{"email":"oswaldo@example.net","id":3},{"email":"anissa@example.net","id":4},{"email":"stefanie@example.org","id":5},{"email":"adam@example.com","id":6},{"email":"mertie@example.org","id":7},{"email":"lura@example.org","id":8},{"email":"marielle@example.org","id":9},{"email":"eliane@example.org","id":10},{"email":"reed@example.com","id":11}]} diff --git a/synth/tests/examples/bank/scenarios/users-only.json b/synth/tests/examples/bank/bank_db/scenarios/users-only.json similarity index 100% rename from synth/tests/examples/bank/scenarios/users-only.json rename to synth/tests/examples/bank/bank_db/scenarios/users-only.json diff --git a/synth/tests/examples/message_board/output.json b/synth/tests/examples/message_board/synth/output.json similarity index 100% rename from synth/tests/examples/message_board/output.json rename to synth/tests/examples/message_board/synth/output.json diff --git a/synth/tests/examples/random_variants/output.json b/synth/tests/examples/random_variants/random/output.json similarity index 100% rename from synth/tests/examples/random_variants/output.json rename to synth/tests/examples/random_variants/random/output.json diff --git a/test_macros/src/lib.rs b/test_macros/src/lib.rs index 6a95b7ec..e93c475a 100644 --- a/test_macros/src/lib.rs +++ b/test_macros/src/lib.rs @@ -1,5 +1,6 @@ extern crate proc_macro; -use std::{collections::HashMap, ffi::OsStr}; +use quote::quote; +use std::{collections::HashMap, ffi::OsStr, path::Path}; use ignore::{DirEntry, WalkBuilder}; use proc_macro2::{Ident, Span, TokenStream}; @@ -166,6 +167,60 @@ pub fn tmpl_ignore( input.interpolate(template.into()).into() } +/// Gets the parent of a path string +/// # Examples +/// ``` +/// use test_macros::parent; +/// +/// assert_eq!(parent!("some/nested/path"), "some/nested"); +/// ``` +#[proc_macro] +pub fn parent(path: proc_macro::TokenStream) -> proc_macro::TokenStream { + let path_str = parse_macro_input!(path as LitStr).value(); + + let path = Path::new(&path_str); + let parent = path.parent().unwrap(); + let parent_str = LitStr::new(&parent.display().to_string(), Span::call_site()); + + quote! {#parent_str}.into() +} + +/// Gets the parent of the parent of a path string - ie the parent 2 levels up +/// # Examples +/// ``` +/// use test_macros::parent2; +/// +/// assert_eq!(parent2!("some/deeply/nested/path"), "some/deeply"); +/// ``` +#[proc_macro] +pub fn parent2(path: proc_macro::TokenStream) -> proc_macro::TokenStream { + let path_str = parse_macro_input!(path as LitStr).value(); + + let path = Path::new(&path_str); + let parent = path.parent().unwrap().parent().unwrap(); + let parent_str = LitStr::new(&parent.display().to_string(), Span::call_site()); + + quote! {#parent_str}.into() +} + +/// Gets the file stem of a parent string +/// # Examples +/// ``` +/// use test_macros::file_stem; +/// +/// assert_eq!(file_stem!("path/to/file.json"), "file"); +/// ``` +#[proc_macro] +pub fn file_stem(path: proc_macro::TokenStream) -> proc_macro::TokenStream { + let path_str = parse_macro_input!(path as LitStr).value(); + + let path = Path::new(&path_str); + let parent = path.file_stem().unwrap().to_str().unwrap(); + let parent_str = LitStr::new(parent, Span::call_site()); + + quote! {#parent_str}.into() +} + #[cfg(test)] mod tests { use super::{File, IgnoreInput};