diff --git a/Cargo.lock b/Cargo.lock index 293afe6b..ba92a7c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1338,7 +1338,6 @@ dependencies = [ "Inflector", "assert_matches", "cynic-parser", - "graphql-parser", "insta", "once_cell", "rstest", diff --git a/cynic-querygen/Cargo.toml b/cynic-querygen/Cargo.toml index 1255841a..25814013 100644 --- a/cynic-querygen/Cargo.toml +++ b/cynic-querygen/Cargo.toml @@ -14,7 +14,6 @@ rust-version = { workspace = true } [dependencies] Inflector = { version = "0.11.4", default-features = false } -graphql-parser = "0.4" once_cell = "1.9" rust_decimal = "1.22" thiserror = "1.0.30" diff --git a/cynic-querygen/src/lib.rs b/cynic-querygen/src/lib.rs index c4dce535..82f37580 100644 --- a/cynic-querygen/src/lib.rs +++ b/cynic-querygen/src/lib.rs @@ -15,7 +15,7 @@ pub enum Error { UnsupportedQueryDocument(String), #[error("could not parse query document: {0}")] - QueryParseError(graphql_parser::query::ParseError), + QueryParseError(cynic_parser::Error), #[error("could not parse schema document: {0}")] SchemaParseError(cynic_parser::Error), @@ -116,7 +116,7 @@ pub fn document_to_fragment_structs( .map_err(Error::SchemaParseError)?; let query = - graphql_parser::parse_query::<&str>(query.as_ref()).map_err(Error::QueryParseError)?; + cynic_parser::parse_executable_document(query.as_ref()).map_err(Error::QueryParseError)?; let (schema, typename_id) = add_builtins(schema); diff --git a/cynic-querygen/src/query_parsing/inputs.rs b/cynic-querygen/src/query_parsing/inputs.rs index 8ab4b9dc..c53e7442 100644 --- a/cynic-querygen/src/query_parsing/inputs.rs +++ b/cynic-querygen/src/query_parsing/inputs.rs @@ -159,7 +159,7 @@ mod tests { fn deduplicates_input_types_if_same() { let (schema, typename_id) = &*GITHUB_SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" query ($filterOne: IssueFilters!, $filterTwo: IssueFilters!) { cynic: repository(owner: "obmarg", name: "cynic") { @@ -191,7 +191,7 @@ mod tests { fn finds_variable_input_types() { let (schema, typename_id) = &*GITHUB_SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" query MyQuery($input: IssueFilters!) { cynic: repository(owner: "obmarg", name: "cynic") { @@ -224,7 +224,7 @@ mod tests { let (schema, typename_id) = &*TEST_CASE_SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" query MyQuery($input: SelfRecursiveInput!, $input2: RecursiveInputParent!) { recursiveInputField(recursive: $input, recursive2: $input2) diff --git a/cynic-querygen/src/query_parsing/mod.rs b/cynic-querygen/src/query_parsing/mod.rs index 292c2fa6..c5fbce1a 100644 --- a/cynic-querygen/src/query_parsing/mod.rs +++ b/cynic-querygen/src/query_parsing/mod.rs @@ -3,17 +3,17 @@ use std::rc::Rc; mod inputs; mod leaf_types; mod normalisation; -mod parser; mod sorting; mod value; mod variables; -use parser::Document; use variables::VariableStructDetails; pub use normalisation::Variable; pub use value::{LiteralContext, TypedValue}; +use cynic_parser::{executable as parser, ExecutableDocument}; + use crate::{ casings::CasingExt, naming::Namer, @@ -21,10 +21,10 @@ use crate::{ Error, TypeIndex, }; -pub fn parse_query_document<'text>( - doc: &Document<'text>, - type_index: &Rc>, -) -> Result, Error> { +pub fn parse_query_document<'a>( + doc: &'a ExecutableDocument, + type_index: &Rc>, +) -> Result, Error> { let normalised = normalisation::normalise(doc, type_index)?; let input_objects = inputs::extract_input_objects(&normalised)?; @@ -42,7 +42,7 @@ pub fn parse_query_document<'text>( namers .selection_sets - .force_name(&operation.root, &operation_name); + .force_name(&operation.root, operation_name.clone()); variable_struct_details .force_name_variables_for(&operation.root, format!("{operation_name}Variables")); diff --git a/cynic-querygen/src/query_parsing/normalisation.rs b/cynic-querygen/src/query_parsing/normalisation.rs index 9deb8e12..8ec6198c 100644 --- a/cynic-querygen/src/query_parsing/normalisation.rs +++ b/cynic-querygen/src/query_parsing/normalisation.rs @@ -1,17 +1,14 @@ use std::{ collections::{hash_map::DefaultHasher, BTreeSet, HashMap, HashSet}, - convert::TryInto, hash::{Hash, Hasher}, rc::Rc, }; +use cynic_parser::{executable::Iter, ExecutableDocument}; use inflector::Inflector; use super::{ - parser::{ - self, Definition, Document, FragmentDefinition, OperationDefinition, TypeCondition, - VariableDefinition, - }, + parser::{self, FragmentDefinition, OperationDefinition, VariableDefinition}, sorting::Vertex, value::TypedValue, }; @@ -123,26 +120,24 @@ pub struct NormalisedDocument<'query, 'schema> { pub operations: Vec>, } -pub fn normalise<'query, 'doc, 'schema>( - document: &'doc Document<'query>, - type_index: &'doc Rc>, -) -> Result, Error> { +pub fn normalise<'docs>( + document: &'docs ExecutableDocument, + type_index: &Rc>, +) -> Result, Error> { let fragment_map = extract_fragments(document); - let mut selection_sets: SelectionSetSet<'query, 'schema> = BTreeSet::new(); - let mut inline_fragments: InlineFragmentsSet<'query, 'schema> = BTreeSet::new(); + let mut selection_sets: SelectionSetSet<'docs, 'docs> = BTreeSet::new(); + let mut inline_fragments: InlineFragmentsSet<'docs, 'docs> = BTreeSet::new(); let mut operations = Vec::new(); - for definition in &document.definitions { - if let Definition::Operation(operation) = definition { - operations.push(normalise_operation( - operation, - &fragment_map, - type_index, - &mut selection_sets, - &mut inline_fragments, - )?); - } + for operation in document.operations() { + operations.push(normalise_operation( + operation, + &fragment_map, + type_index, + &mut selection_sets, + &mut inline_fragments, + )?); } Ok(NormalisedDocument { @@ -152,111 +147,59 @@ pub fn normalise<'query, 'doc, 'schema>( }) } -fn normalise_operation<'query, 'doc, 'schema>( - operation: &'doc OperationDefinition<'query>, - fragment_map: &FragmentMap<'query, 'doc>, - type_index: &'doc Rc>, - selection_sets_out: &mut SelectionSetSet<'query, 'schema>, - inline_fragments_out: &mut InlineFragmentsSet<'query, 'schema>, -) -> Result, Error> { - match operation { - OperationDefinition::SelectionSet(selection_set) => { - let mut normaliser = Normaliser::new( - type_index, - fragment_map, - selection_sets_out, - inline_fragments_out, - &[], - ); - let root = - normaliser.normalise_object_selection_set(selection_set, GraphPath::for_query())?; - - Ok(NormalisedOperation { - root, - name: None, - kind: OperationKind::Query, - variables: normaliser.variables, - }) +fn normalise_operation<'docs>( + operation: OperationDefinition<'docs>, + fragment_map: &FragmentMap<'docs>, + type_index: &Rc>, + selection_sets_out: &mut SelectionSetSet<'docs, 'docs>, + inline_fragments_out: &mut InlineFragmentsSet<'docs, 'docs>, +) -> Result, Error> { + let mut normaliser = Normaliser::new( + type_index, + fragment_map, + selection_sets_out, + inline_fragments_out, + operation.variable_definitions(), + ); + + let (kind, starting_path) = match operation.operation_type() { + cynic_parser::common::OperationType::Query => { + (OperationKind::Query, GraphPath::for_query()) } - OperationDefinition::Query(query) => { - let mut normaliser = Normaliser::new( - type_index, - fragment_map, - selection_sets_out, - inline_fragments_out, - &query.variable_definitions, - ); - - let root = normaliser - .normalise_object_selection_set(&query.selection_set, GraphPath::for_query())?; - - Ok(NormalisedOperation { - root, - name: query.name.map(Inflector::to_pascal_case), - kind: OperationKind::Query, - variables: normaliser.variables, - }) + cynic_parser::common::OperationType::Mutation => { + (OperationKind::Mutation, GraphPath::for_mutation()) } - OperationDefinition::Mutation(mutation) => { - let mut normaliser = Normaliser::new( - type_index, - fragment_map, - selection_sets_out, - inline_fragments_out, - &mutation.variable_definitions, - ); - - let root = normaliser.normalise_object_selection_set( - &mutation.selection_set, - GraphPath::for_mutation(), - )?; - - Ok(NormalisedOperation { - root, - name: mutation.name.map(Inflector::to_pascal_case), - kind: OperationKind::Mutation, - variables: normaliser.variables, - }) - } - OperationDefinition::Subscription(subscription) => { - let mut normaliser = Normaliser::new( - type_index, - fragment_map, - selection_sets_out, - inline_fragments_out, - &subscription.variable_definitions, - ); - - let root = normaliser.normalise_object_selection_set( - &subscription.selection_set, - GraphPath::for_subscription(), - )?; - - Ok(NormalisedOperation { - root, - name: subscription.name.map(Inflector::to_pascal_case), - kind: OperationKind::Subscription, - variables: normaliser.variables, - }) + cynic_parser::common::OperationType::Subscription => { + (OperationKind::Subscription, GraphPath::for_subscription()) } - } + }; + + let root = + normaliser.normalise_object_selection_set(operation.selection_set(), starting_path)?; + + Ok(NormalisedOperation { + root, + name: operation.name().map(Inflector::to_pascal_case), + kind, + variables: normaliser.variables, + }) } -struct Normaliser<'a, 'query, 'schema, 'doc> { - type_index: &'a Rc>, - fragment_map: &'a FragmentMap<'query, 'doc>, - selection_sets_out: &'a mut SelectionSetSet<'query, 'schema>, - inline_fragments_out: &'a mut InlineFragmentsSet<'query, 'schema>, - variables: Vec>, +struct Normaliser<'a, 'docs> { + type_index: &'a Rc>, + fragment_map: &'a FragmentMap<'docs>, + selection_sets_out: &'a mut SelectionSetSet<'docs, 'docs>, + inline_fragments_out: &'a mut InlineFragmentsSet<'docs, 'docs>, + variables: Vec>, } -impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { +impl<'a, 'docs> Normaliser<'a, 'docs> { fn new( - type_index: &'a Rc>, - fragment_map: &'a FragmentMap<'query, 'doc>, - selection_sets_out: &'a mut SelectionSetSet<'query, 'schema>, - inline_fragments_out: &'a mut InlineFragmentsSet<'query, 'schema>, - variable_definitions: &'a [parser::VariableDefinition<'query>], + type_index: &'a Rc>, + fragment_map: &'a FragmentMap<'docs>, + selection_sets_out: &'a mut SelectionSetSet<'docs, 'docs>, + inline_fragments_out: &'a mut InlineFragmentsSet<'docs, 'docs>, + variable_definitions: Iter<'docs, VariableDefinition<'docs>>, ) -> Self { Normaliser { type_index, @@ -264,7 +207,6 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { selection_sets_out, inline_fragments_out, variables: variable_definitions - .iter() .map(|var| Variable::from(var, type_index)) .collect(), } @@ -272,9 +214,9 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { fn normalise_object_selection_set( &mut self, - selection_set: &parser::SelectionSet<'query>, - current_path: GraphPath<'query>, - ) -> Result>, Error> { + selection_sets: impl Iterator>, + current_path: GraphPath<'docs>, + ) -> Result>, Error> { let current_type = self.type_index.type_for_path(¤t_path)?; // Awkwardly using a set of hashes & to dedup so we don't fuck @@ -282,7 +224,7 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { let mut seen_selections = HashSet::new(); let mut selections = Vec::new(); - for item in &selection_set.items { + for item in selection_sets { let new_selections = self.convert_selection(item, ¤t_path)?; for selection in new_selections { let mut hasher = DefaultHasher::new(); @@ -312,35 +254,38 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { fn convert_selection( &mut self, - selection: &parser::Selection<'query>, - current_path: &GraphPath<'query>, - ) -> Result>, Error> { + selection: parser::Selection<'docs>, + current_path: &GraphPath<'docs>, + ) -> Result>, Error> { match selection { parser::Selection::Field(field) => { - let new_path = current_path.push(field.name); + let new_path = current_path.push(field.name()); let schema_field = self.type_index.field_for_path(&new_path)?; let inner_field = match schema_field.value_type.inner_ref().lookup()? { - OutputType::Object(_) if field.selection_set.items.is_empty() => { + OutputType::Object(_) if field.selection_set().len() == 0 => { return Err(Error::NoFieldSelected(schema_field.name.into())); } OutputType::Object(_) => Field::Composite( - self.normalise_object_selection_set(&field.selection_set, new_path)?, + self.normalise_object_selection_set(field.selection_set(), new_path)?, ), OutputType::Interface(_) | OutputType::Union(_) => { - self.normalise_abstract_selection_set(&field.selection_set, new_path)? + self.normalise_abstract_selection_set(field.selection_set(), new_path)? } OutputType::Enum(_) | OutputType::Scalar(_) => Field::Leaf, }; let mut arguments = Vec::new(); - for (name, value) in &field.arguments { + for argument in field.arguments() { + let name = argument.name(); + let value = argument.value(); + let schema_arg = schema_field .arguments .iter() - .find(|arg| arg.name == *name) - .ok_or_else(|| Error::UnknownArgument(name.to_string()))?; + .find(|arg| arg.name == name) + .ok_or_else(|| dbg!(Error::UnknownArgument(name.to_string())))?; arguments.push(( schema_arg.name, @@ -353,8 +298,8 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { } Ok(vec![Selection::Field(FieldSelection::new( - field.name, - field.alias, + field.name(), + field.alias(), arguments, schema_field, inner_field, @@ -363,19 +308,16 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { parser::Selection::FragmentSpread(spread) => { let fragment = self .fragment_map - .get(spread.fragment_name) - .ok_or_else(|| Error::UnknownFragment(spread.fragment_name.to_string()))?; + .get(spread.fragment_name()) + .ok_or_else(|| Error::UnknownFragment(spread.fragment_name().to_string()))?; - let TypeCondition::On(target_type_name) = fragment.type_condition; let current_type = self.type_index.type_for_path(current_path)?; - let target_type = self.type_index.lookup_type(target_type_name)?; + let target_type = self.type_index.lookup_type(fragment.type_condition())?; current_type.allows_fragment_target_of(&target_type)?; Ok(fragment - .selection_set - .items - .iter() + .selection_set() .map(|item| self.convert_selection(item, current_path)) .collect::, _>>()? .into_iter() @@ -383,7 +325,7 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { .collect()) } parser::Selection::InlineFragment(fragment) => { - if let Some(TypeCondition::On(target_type_name)) = fragment.type_condition { + if let Some(target_type_name) = fragment.type_condition() { let current_type = self.type_index.type_for_path(current_path)?; let target_type = self.type_index.lookup_type(target_type_name)?; @@ -391,9 +333,7 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { } Ok(fragment - .selection_set - .items - .iter() + .selection_set() .map(|item| self.convert_selection(item, current_path)) .collect::, _>>()? .into_iter() @@ -405,14 +345,13 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { fn normalise_abstract_selection_set( &mut self, - selection_set: &parser::SelectionSet<'query>, - current_path: GraphPath<'query>, - ) -> Result, Error> { + selection_set: Iter<'docs, parser::Selection<'docs>>, + current_path: GraphPath<'docs>, + ) -> Result, Error> { let schema_field = self.type_index.field_for_path(¤t_path)?; let spread_selections = selection_set - .items - .iter() + .clone() .filter(|s| !matches!(s, parser::Selection::Field(_))) .collect::>(); @@ -424,10 +363,7 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { } let non_spread_selections = selection_set - .items - .iter() .filter(|s| matches!(s, parser::Selection::Field(_))) - .cloned() .collect::>(); let mut fragment_selections = vec![]; @@ -438,40 +374,41 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { for selection in spread_selections { match selection { parser::Selection::FragmentSpread(spread) => { - let fragment = self - .fragment_map - .get(spread.fragment_name) - .ok_or_else(|| Error::UnknownFragment(spread.fragment_name.to_string()))?; + let fragment = + self.fragment_map + .get(spread.fragment_name()) + .ok_or_else(|| { + Error::UnknownFragment(spread.fragment_name().to_string()) + })?; - let parser::TypeCondition::On(target_type) = fragment.type_condition; + schema_field_type.allows_fragment_target_of( + &self.type_index.lookup_type(fragment.type_condition())?, + )?; - schema_field_type - .allows_fragment_target_of(&self.type_index.lookup_type(target_type)?)?; - - let mut selection_set = fragment.selection_set.clone(); - - selection_set.items.extend(non_spread_selections.clone()); + let selections = fragment + .selection_set() + .chain(non_spread_selections.iter().copied()); fragment_selections.push(self.normalise_object_selection_set( - &selection_set, - GraphPath::for_named_type(target_type), + selections, + GraphPath::for_named_type(fragment.type_condition()), )?) } parser::Selection::InlineFragment(inline_fragment) => { - let target_type = match inline_fragment.type_condition { + let target_type = match inline_fragment.type_condition() { None => return Err(Error::MissingTypeCondition), - Some(parser::TypeCondition::On(target_type)) => target_type, + Some(target_type) => target_type, }; schema_field_type .allows_fragment_target_of(&self.type_index.lookup_type(target_type)?)?; - let mut selection_set = inline_fragment.selection_set.clone(); - - selection_set.items.extend(non_spread_selections.clone()); + let selections = inline_fragment + .selection_set() + .chain(non_spread_selections.iter().copied()); fragment_selections.push(self.normalise_object_selection_set( - &selection_set, + selections, GraphPath::for_named_type(target_type), )?) } @@ -495,28 +432,21 @@ impl<'a, 'query, 'schema, 'doc> Normaliser<'a, 'query, 'schema, 'doc> { } } -impl<'query, 'schema> Variable<'query, 'schema> { - fn from(def: &VariableDefinition<'query>, type_index: &Rc>) -> Self { +impl<'a> Variable<'a, 'a> { + fn from(def: VariableDefinition<'a>, type_index: &Rc>) -> Self { Variable { - name: def.name, + name: def.name(), value_type: InputFieldType::from_variable_definition(def, type_index), } } } -type FragmentMap<'query, 'doc> = HashMap<&'query str, &'doc FragmentDefinition<'query>>; +type FragmentMap<'query> = HashMap<&'query str, FragmentDefinition<'query>>; -fn extract_fragments<'query, 'doc>(document: &'doc Document<'query>) -> FragmentMap<'query, 'doc> { +fn extract_fragments(document: &ExecutableDocument) -> FragmentMap<'_> { document - .definitions - .iter() - .flat_map(|definition| { - if let Definition::Fragment(fragment) = definition { - Some((fragment.name, fragment)) - } else { - None - } - }) + .fragments() + .flat_map(|fragment| Some((fragment.name(), fragment))) .collect() } @@ -593,7 +523,7 @@ mod tests { fn normalise_deduplicates_identical_selections() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" { allFilms { @@ -627,7 +557,7 @@ mod tests { fn normalise_does_not_deduplicate_differing_selections() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" { allFilms { @@ -660,7 +590,7 @@ mod tests { fn check_output_makes_sense() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" { allFilms { @@ -686,7 +616,7 @@ mod tests { fn check_fragment_spread_output() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" fragment FilmFields on Film { id @@ -723,7 +653,7 @@ mod tests { fn check_fragment_type_mismatches() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" fragment FilmFields on Film { id @@ -749,7 +679,7 @@ mod tests { fn check_field_selected() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" query MyQuery { allFilms(after: "") { @@ -780,7 +710,7 @@ mod tests { fn check_no_field_selected() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" query MyQuery { allFilms(after: "") { @@ -803,7 +733,7 @@ mod tests { fn check_inline_fragment_output() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" query AllFilms { allFilms { @@ -838,7 +768,7 @@ mod tests { fn check_inline_fragment_type_mismatches() { let (schema, typename_id) = &*SCHEMA; let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id)); - let query = graphql_parser::parse_query::<&str>( + let query = cynic_parser::parse_executable_document( r#" query AllFilms { allFilms { diff --git a/cynic-querygen/src/query_parsing/parser.rs b/cynic-querygen/src/query_parsing/parser.rs deleted file mode 100644 index b0bacd7f..00000000 --- a/cynic-querygen/src/query_parsing/parser.rs +++ /dev/null @@ -1,11 +0,0 @@ -// Alias all the graphql_parser query types so we don't have to specify generic -// parameters everywhere -pub type Document<'a> = graphql_parser::query::Document<'a, &'a str>; -pub type Definition<'a> = graphql_parser::query::Definition<'a, &'a str>; -pub type FragmentDefinition<'a> = graphql_parser::query::FragmentDefinition<'a, &'a str>; -pub type OperationDefinition<'a> = graphql_parser::query::OperationDefinition<'a, &'a str>; -pub type SelectionSet<'a> = graphql_parser::query::SelectionSet<'a, &'a str>; -pub type Selection<'a> = graphql_parser::query::Selection<'a, &'a str>; -pub type TypeCondition<'a> = graphql_parser::query::TypeCondition<'a, &'a str>; -pub type Value<'a> = graphql_parser::query::Value<'a, &'a str>; -pub type VariableDefinition<'a> = graphql_parser::query::VariableDefinition<'a, &'a str>; diff --git a/cynic-querygen/src/query_parsing/value.rs b/cynic-querygen/src/query_parsing/value.rs index d8f532e2..c462caf7 100644 --- a/cynic-querygen/src/query_parsing/value.rs +++ b/cynic-querygen/src/query_parsing/value.rs @@ -6,7 +6,7 @@ use crate::{ Error, }; -use super::{normalisation::Variable, parser}; +use super::normalisation::Variable; /// A literal value from a GraphQL query, along with it's type #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -31,17 +31,21 @@ pub enum TypedValue<'query, 'schema> { impl<'query, 'schema> TypedValue<'query, 'schema> { pub fn from_query_value( - value: &parser::Value<'query>, + value: cynic_parser::Value<'query>, field_type: InputFieldType<'schema>, variable_definitions: &[Variable<'query, 'schema>], ) -> Result { Ok(match value { - parser::Value::Variable(name) => { + cynic_parser::Value::Variable(variable) => { + let name = variable.name(); + + dbg!(variable_definitions); + // If this is just a variable then we'll take it's type as our value type. let value_type = variable_definitions .iter() - .find(|var| var.name == *name) - .ok_or_else(|| Error::UnknownArgument(name.to_string()))? + .find(|var| var.name == name) + .ok_or_else(|| dbg!(Error::UnknownArgument(name.to_string())))? .value_type .clone(); @@ -51,17 +55,19 @@ impl<'query, 'schema> TypedValue<'query, 'schema> { field_type, } } - parser::Value::Int(num) => TypedValue::Int(num.as_i64().unwrap(), field_type), - parser::Value::Float(num) => TypedValue::Float(Decimal::from_f64(*num), field_type), - parser::Value::String(s) => TypedValue::String(s.clone(), field_type), - parser::Value::Boolean(b) => TypedValue::Boolean(*b, field_type), - parser::Value::Null => TypedValue::Null(field_type), - parser::Value::Enum(e) => TypedValue::Enum(e, field_type), - parser::Value::List(values) => { + cynic_parser::Value::Int(num) => TypedValue::Int(num.as_i64(), field_type), + cynic_parser::Value::Float(num) => { + TypedValue::Float(Decimal::from_f64(num.as_f64()), field_type) + } + cynic_parser::Value::String(s) => TypedValue::String(s.to_string(), field_type), + cynic_parser::Value::Boolean(b) => TypedValue::Boolean(b.value(), field_type), + cynic_parser::Value::Null(_) => TypedValue::Null(field_type), + cynic_parser::Value::Enum(e) => TypedValue::Enum(e.name(), field_type), + cynic_parser::Value::List(values) => { let inner_type = field_type.list_inner_type()?; TypedValue::List( values - .iter() + .items() .map(|val| { TypedValue::from_query_value( val, @@ -73,22 +79,24 @@ impl<'query, 'schema> TypedValue<'query, 'schema> { field_type, ) } - parser::Value::Object(obj) => { + cynic_parser::Value::Object(obj) => { if let InputType::InputObject(obj_type) = field_type.inner_ref().lookup()? { TypedValue::Object( - obj.iter() - .map(|(k, v)| { - let field = obj_type + obj.fields() + .map(|query_field| { + let field_name = query_field.name(); + + let schema_field = obj_type .fields .iter() - .find(|field| field.name == *k) - .ok_or_else(|| Error::UnknownType(k.to_string()))?; + .find(|schema_field| schema_field.name == field_name) + .ok_or_else(|| Error::UnknownType(field_name.to_string()))?; Ok(( - *k, + field_name, TypedValue::from_query_value( - v, - field.value_type.clone(), + query_field.value(), + schema_field.value_type.clone(), variable_definitions, )?, )) diff --git a/cynic-querygen/src/schema/fields.rs b/cynic-querygen/src/schema/fields.rs index 0ae1fbd4..3e8bb193 100644 --- a/cynic-querygen/src/schema/fields.rs +++ b/cynic-querygen/src/schema/fields.rs @@ -76,31 +76,27 @@ impl<'schema> InputField<'schema> { } impl<'schema> InputFieldType<'schema> { - pub fn from_variable_definition<'query>( - def: &graphql_parser::query::VariableDefinition<'query, &'query str>, + pub fn from_variable_definition( + def: cynic_parser::executable::VariableDefinition<'schema>, type_index: &Rc>, ) -> Self { - InputFieldType::from_query_type(&def.var_type, type_index) + InputFieldType::from_query_type(&def.ty(), type_index) } - fn from_query_type<'query>( - query_type: &graphql_parser::query::Type<'query, &'query str>, + fn from_query_type( + query_type: &cynic_parser::executable::Type<'schema>, type_index: &Rc>, ) -> Self { - use graphql_parser::schema::Type; + use cynic_parser::common::WrappingType; - match query_type { - Type::NamedType(name) => { - InputFieldType::NamedType(InputTypeRef::new_owned(name.to_string(), type_index)) + let mut ty = InputFieldType::NamedType(InputTypeRef::new(query_type.name(), type_index)); + for wrapping in query_type.wrappers().collect::>().into_iter().rev() { + match wrapping { + WrappingType::NonNull => ty = InputFieldType::NonNullType(Box::new(ty)), + WrappingType::List => ty = InputFieldType::ListType(Box::new(ty)), } - Type::ListType(inner) => InputFieldType::ListType(Box::new(Self::from_query_type( - inner.as_ref(), - type_index, - ))), - Type::NonNullType(inner) => InputFieldType::NonNullType(Box::new( - Self::from_query_type(inner.as_ref(), type_index), - )), } + ty } pub fn inner_name(&self) -> Cow<'schema, str> {