Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: parse schema with cynic_parser in querygen #1124

Merged
merged 2 commits into from
Jan 16, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
finish updating to use type_system from parser
  • Loading branch information
obmarg committed Jan 16, 2025
commit 1e41f5170bdbd9feaceef478f4362dc337f2d722
7 changes: 0 additions & 7 deletions cynic-parser/ast-generator/src/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,6 @@ pub struct EntityOutput {
pub requires: BTreeSet<EntityRef>,
pub id: EntityRef,
pub contents: String,
pub kind: EntityKind,
}

#[derive(Clone, Copy, PartialEq)]
pub enum EntityKind {
Union,
Object,
}

pub fn imports(
Expand Down
2 changes: 1 addition & 1 deletion cynic-parser/ast-generator/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ fn format_code(text: impl ToString) -> anyhow::Result<String> {
use xshell::{cmd, Shell};
let sh = Shell::new()?;

let stdout = cmd!(sh, "rustfmt").stdin(&text.to_string()).read()?;
let stdout = cmd!(sh, "rustfmt").stdin(text.to_string()).read()?;

Ok(stdout)
}
3 changes: 1 addition & 2 deletions cynic-parser/ast-generator/src/object.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use cynic_parser::type_system::{FieldDefinition, ObjectDefinition, TypeDefinitio

use crate::{
exts::{FieldExt, ScalarExt},
file::{EntityKind, EntityOutput, EntityRef},
file::{EntityOutput, EntityRef},
format_code,
idents::IdIdent,
};
Expand Down Expand Up @@ -122,7 +122,6 @@ pub fn object_output(
.collect(),
id: EntityRef::new(TypeDefinition::Object(object)).unwrap(),
contents,
kind: EntityKind::Object,
})
}

Expand Down
3 changes: 1 addition & 2 deletions cynic-parser/ast-generator/src/union.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use cynic_parser::type_system::{TypeDefinition, UnionDefinition};

use crate::{
exts::UnionExt,
file::{EntityKind, EntityOutput, EntityRef},
file::{EntityOutput, EntityRef},
format_code,
idents::IdIdent,
};
Expand Down Expand Up @@ -106,7 +106,6 @@ pub fn union_output(
.collect(),
id: EntityRef::new(TypeDefinition::Union(union_definition)).unwrap(),
contents,
kind: EntityKind::Union,
})
}

Expand Down
3 changes: 2 additions & 1 deletion cynic-querygen/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,14 @@ rust-version = { workspace = true }

[dependencies]
Inflector = { version = "0.11.4", default-features = false }
cynic-parser.path = "../cynic-parser"
graphql-parser = "0.4"
once_cell = "1.9"
rust_decimal = "1.22"
thiserror = "1.0.30"
uuid = { version = "1", features = ["v4"] }

cynic-parser.workspace = true

[dev-dependencies]
assert_matches = "1.4"
insta = "1.17.1"
Expand Down
6 changes: 4 additions & 2 deletions cynic-querygen/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ mod query_parsing;
mod schema;

use output::Output;
use schema::{GraphPath, TypeIndex};
use schema::{add_builtins, GraphPath, TypeIndex};

#[derive(thiserror::Error, Debug)]
pub enum Error {
Expand Down Expand Up @@ -118,7 +118,9 @@ pub fn document_to_fragment_structs(
let query =
graphql_parser::parse_query::<&str>(query.as_ref()).map_err(Error::QueryParseError)?;

let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = add_builtins(schema);

let type_index = Rc::new(TypeIndex::from_schema(&schema, typename_id));
let mut parsed_output = query_parsing::parse_query_document(&query, &type_index)?;

add_schema_name(&mut parsed_output, options.schema_name.as_deref());
Expand Down
40 changes: 24 additions & 16 deletions cynic-querygen/src/query_parsing/inputs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,12 +150,15 @@ mod tests {
use {
super::*,
crate::{query_parsing::normalisation::normalise, TypeIndex},
cynic_parser::{type_system::ids::FieldDefinitionId, TypeSystemDocument},
schema::add_builtins,
std::sync::LazyLock,
};

#[test]
fn deduplicates_input_types_if_same() {
let schema = load_graphql_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*GITHUB_SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
query ($filterOne: IssueFilters!, $filterTwo: IssueFilters!) {
Expand Down Expand Up @@ -186,8 +189,8 @@ mod tests {

#[test]
fn finds_variable_input_types() {
let schema = load_graphql_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*GITHUB_SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
query MyQuery($input: IssueFilters!) {
Expand Down Expand Up @@ -218,8 +221,8 @@ mod tests {

#[test]
fn test_extracting_recursive_types() {
let schema = load_test_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*TEST_CASE_SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));

let query = graphql_parser::parse_query::<&str>(
r#"
Expand All @@ -236,15 +239,20 @@ mod tests {
assert_eq!(input_objects.len(), 3);
}

fn load_graphql_schema() -> cynic_parser::TypeSystemDocument {
cynic_parser::parse_type_system_document(include_str!("../../../schemas/github.graphql"))
.unwrap()
}

fn load_test_schema() -> cynic_parser::TypeSystemDocument {
cynic_parser::parse_type_system_document(include_str!(
"../../../schemas/test_cases.graphql"
static GITHUB_SCHEMA: LazyLock<(TypeSystemDocument, FieldDefinitionId)> = LazyLock::new(|| {
let schema = cynic_parser::parse_type_system_document(include_str!(
"../../../schemas/github.graphql"
))
.unwrap()
}
.unwrap();
add_builtins(schema)
});

static TEST_CASE_SCHEMA: LazyLock<(TypeSystemDocument, FieldDefinitionId)> =
LazyLock::new(|| {
let schema = cynic_parser::parse_type_system_document(include_str!(
"../../../schemas/test_cases.graphql"
))
.unwrap();
add_builtins(schema)
});
}
52 changes: 29 additions & 23 deletions cynic-querygen/src/query_parsing/normalisation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -580,14 +580,19 @@ impl crate::naming::Nameable for Rc<InlineFragments<'_, '_>> {

#[cfg(test)]
mod tests {
use {super::*, crate::schema, cynic_parser::TypeSystemDocument};
use {
super::*,
crate::schema::add_builtins,
cynic_parser::{type_system::ids::FieldDefinitionId, TypeSystemDocument},
std::sync::LazyLock,
};

use assert_matches::assert_matches;

#[test]
fn normalise_deduplicates_identical_selections() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
{
Expand Down Expand Up @@ -620,8 +625,8 @@ mod tests {

#[test]
fn normalise_does_not_deduplicate_differing_selections() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
{
Expand Down Expand Up @@ -653,8 +658,8 @@ mod tests {

#[test]
fn check_output_makes_sense() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
{
Expand All @@ -679,8 +684,8 @@ mod tests {

#[test]
fn check_fragment_spread_output() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
fragment FilmFields on Film {
Expand Down Expand Up @@ -716,8 +721,8 @@ mod tests {

#[test]
fn check_fragment_type_mismatches() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
fragment FilmFields on Film {
Expand All @@ -742,8 +747,8 @@ mod tests {

#[test]
fn check_field_selected() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
query MyQuery {
Expand Down Expand Up @@ -773,8 +778,8 @@ mod tests {

#[test]
fn check_no_field_selected() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
query MyQuery {
Expand All @@ -796,8 +801,8 @@ mod tests {

#[test]
fn check_inline_fragment_output() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
query AllFilms {
Expand Down Expand Up @@ -831,8 +836,8 @@ mod tests {

#[test]
fn check_inline_fragment_type_mismatches() {
let schema = load_schema();
let type_index = Rc::new(TypeIndex::from_schema(schema));
let (schema, typename_id) = &*SCHEMA;
let type_index = Rc::new(TypeIndex::from_schema(schema, *typename_id));
let query = graphql_parser::parse_query::<&str>(
r#"
query AllFilms {
Expand All @@ -852,10 +857,11 @@ mod tests {
)
}

fn load_schema() -> TypeSystemDocument {
cynic_parser::parse_type_system_document(include_str!(
static SCHEMA: LazyLock<(TypeSystemDocument, FieldDefinitionId)> = LazyLock::new(|| {
let schema = cynic_parser::parse_type_system_document(include_str!(
"../../../schemas/starwars.schema.graphql"
))
.unwrap()
}
.unwrap();
add_builtins(schema)
});
}
49 changes: 49 additions & 0 deletions cynic-querygen/src/schema/builtins.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
use cynic_parser::{
common::TypeWrappers,
type_system::{
ids::FieldDefinitionId,
storage::{FieldDefinitionRecord, ScalarDefinitionRecord, TypeRecord},
writer,
},
TypeSystemDocument,
};

pub(crate) fn add_builtins(schema: TypeSystemDocument) -> (TypeSystemDocument, FieldDefinitionId) {
let mut writer = writer::TypeSystemAstWriter::update(schema);

// Add the builtins
for name in ["String", "Int", "Float", "Boolean", "ID"] {
let name = writer.intern_string(name);
writer.scalar_definition(ScalarDefinitionRecord {
name,
description: None,
directives: Default::default(),
span: Default::default(),
name_span: Default::default(),
});
}

let typename_id = {
let ty = {
let name = writer.intern_string("String");
writer.type_reference(TypeRecord {
name,
name_start: Default::default(),
wrappers: TypeWrappers::none().wrap_non_null(),
span: Default::default(),
})
};
let name = writer.intern_string("__typename");
writer.field_definition(FieldDefinitionRecord {
name,
name_span: Default::default(),
ty,
arguments: Default::default(),
description: Default::default(),
directives: Default::default(),
span: Default::default(),
})
};

(writer.finish(), typename_id)
}
2 changes: 1 addition & 1 deletion cynic-querygen/src/schema/fields.rs
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ macro_rules! impl_field_type_from_parser_type {
use cynic_parser::common::WrappingType;

let mut ty = $target::NamedType($ref_type::new(parser_type.name(), type_index));
for wrapping in parser_type.wrappers() {
for wrapping in parser_type.wrappers().collect::<Vec<_>>().into_iter().rev() {
match wrapping {
WrappingType::NonNull => ty = $target::NonNullType(Box::new(ty)),
WrappingType::List => ty = $target::ListType(Box::new(ty)),
Expand Down
2 changes: 2 additions & 0 deletions cynic-querygen/src/schema/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
mod builtins;
mod fields;
mod type_index;
mod type_refs;

pub(crate) use builtins::add_builtins;
pub use fields::*;

pub use type_index::{GraphPath, TypeIndex};
Expand Down
Loading
Loading