From c7fc4935d154c7e250f0ec48aa787c996f28e353 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 1 Jun 2016 02:34:20 +0800 Subject: [PATCH 01/34] Updated `graphql.Float` coercion - if value type is explicitly `float32`, return `float32` - if value type is explicitly `float64`, return `float64` - if value type is `string`, return `float64` - for everything else, use system-default Addresses issue #130 --- scalars.go | 18 ++++++------- scalars_serialization_test.go | 48 +++++++++++++++++++++++++---------- 2 files changed, 44 insertions(+), 22 deletions(-) diff --git a/scalars.go b/scalars.go index 77842a1f..c72da612 100644 --- a/scalars.go +++ b/scalars.go @@ -96,27 +96,27 @@ var Int = NewScalar(ScalarConfig{ }, }) -func coerceFloat32(value interface{}) interface{} { +func coerceFloat(value interface{}) interface{} { switch value := value.(type) { case bool: if value == true { - return float32(1) + return 1.0 } - return float32(0) + return 0.0 case int: - return float32(value) + return float64(value) case float32: return value case float64: - return float32(value) + return value case string: val, err := strconv.ParseFloat(value, 0) if err != nil { return nil } - return coerceFloat32(val) + return val } - return float32(0) + return 0.0 } // Float is the GraphQL float type definition. @@ -125,8 +125,8 @@ var Float = NewScalar(ScalarConfig{ Description: "The `Float` scalar type represents signed double-precision fractional " + "values as specified by " + "[IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). ", - Serialize: coerceFloat32, - ParseValue: coerceFloat32, + Serialize: coerceFloat, + ParseValue: coerceFloat, ParseLiteral: func(valueAST ast.Value) interface{} { switch valueAST := valueAST.(type) { case *ast.FloatValue: diff --git a/scalars_serialization_test.go b/scalars_serialization_test.go index 4dbe2488..24d4d1da 100644 --- a/scalars_serialization_test.go +++ b/scalars_serialization_test.go @@ -12,7 +12,7 @@ type intSerializationTest struct { Value interface{} Expected interface{} } -type float32SerializationTest struct { +type float64SerializationTest struct { Value interface{} Expected interface{} } @@ -37,6 +37,12 @@ func TestTypeSystem_Scalar_SerializesOutputInt(t *testing.T) { {float32(-1.1), -1}, {float32(1e5), 100000}, {float32(math.MaxFloat32), nil}, + {float64(0.1), 0}, + {float64(1.1), 1}, + {float64(-1.1), -1}, + {float64(1e5), 100000}, + {float64(math.MaxFloat32), nil}, + {float64(math.MaxFloat64), nil}, // Maybe a safe Go/Javascript `int`, but bigger than 2^32, so not // representable as a GraphQL Int {9876504321, nil}, @@ -71,34 +77,49 @@ func TestTypeSystem_Scalar_SerializesOutputInt(t *testing.T) { {[]int{}, nil}, } - for _, test := range tests { + for i, test := range tests { val := graphql.Int.Serialize(test.Value) if val != test.Expected { - reflectedValue := reflect.ValueOf(test.Value) - t.Fatalf("Failed Int.Serialize(%v(%v)), expected: %v, got %v", reflectedValue.Type(), test.Value, test.Expected, val) + reflectedTestValue := reflect.ValueOf(test.Value) + reflectedExpectedValue := reflect.ValueOf(test.Expected) + reflectedValue := reflect.ValueOf(val) + t.Fatalf("Failed test #%d - Int.Serialize(%v(%v)), expected: %v(%v), got %v(%v)", + i, reflectedTestValue.Type(), test.Value, + reflectedExpectedValue.Type(), test.Expected, + reflectedValue.Type(), val, + ) } } } func TestTypeSystem_Scalar_SerializesOutputFloat(t *testing.T) { - tests := []float32SerializationTest{ - {int(1), float32(1.0)}, - {int(0), float32(0.0)}, - {int(-1), float32(-1.0)}, + tests := []float64SerializationTest{ + {int(1), 1.0}, + {int(0), 0.0}, + {int(-1), -1.0}, {float32(0.1), float32(0.1)}, {float32(1.1), float32(1.1)}, {float32(-1.1), float32(-1.1)}, - {"-1.1", float32(-1.1)}, + {float64(0.1), float64(0.1)}, + {float64(1.1), float64(1.1)}, + {float64(-1.1), float64(-1.1)}, + {"-1.1", -1.1}, {"one", nil}, - {false, float32(0.0)}, - {true, float32(1.0)}, + {false, 0.0}, + {true, 1.0}, } for i, test := range tests { val := graphql.Float.Serialize(test.Value) if val != test.Expected { - reflectedValue := reflect.ValueOf(test.Value) - t.Fatalf("Failed test #%d - Float.Serialize(%v(%v)), expected: %v, got %v", i, reflectedValue.Type(), test.Value, test.Expected, val) + reflectedTestValue := reflect.ValueOf(test.Value) + reflectedExpectedValue := reflect.ValueOf(test.Expected) + reflectedValue := reflect.ValueOf(val) + t.Fatalf("Failed test #%d - Float.Serialize(%v(%v)), expected: %v(%v), got %v(%v)", + i, reflectedTestValue.Type(), test.Value, + reflectedExpectedValue.Type(), test.Expected, + reflectedValue.Type(), val, + ) } } } @@ -108,6 +129,7 @@ func TestTypeSystem_Scalar_SerializesOutputStrings(t *testing.T) { {"string", "string"}, {int(1), "1"}, {float32(-1.1), "-1.1"}, + {float64(-1.1), "-1.1"}, {true, "true"}, {false, "false"}, } From 453e0b52552b1d5400b6b4e40bc0b86a1dab9fe3 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 1 Jun 2016 14:03:04 +0800 Subject: [PATCH 02/34] Combine invariants into AST-aware error. Commit: 6e736bf4733ada6aa15943c0529ed126e8dd8ef1 [6e736bf] Parents: 86db7340be Author: Lee Byron Date: 8 April 2016 at 9:51:04 AM SGT Labels: HEAD --- executor.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/executor.go b/executor.go index b03c8a01..53efdbc1 100644 --- a/executor.go +++ b/executor.go @@ -664,7 +664,9 @@ func completeAbstractValue(eCtx *ExecutionContext, returnType Abstract, fieldAST } err := invariant(runtimeType != nil, - fmt.Sprintf(`Could not determine runtime type of value "%v" for field %v.%v.`, result, info.ParentType, info.FieldName), + fmt.Sprintf(`Abstract type %v must resolve to an Object type at runtime `+ + `for field %v.%v with value "%v", received "%v".`, + returnType, info.ParentType, info.FieldName, result, runtimeType), ) if err != nil { panic(err) From 66e6c34ac0993ca218758f5eedf07c6d770b9404 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 1 Jun 2016 14:05:18 +0800 Subject: [PATCH 03/34] Remove unused fragment in queries in unit tests Commit: cbded829525a68b9e0dab61621992cbf01348981 [cbded82] Parents: dd02973028 Author: Zhaojun Zhang Date: 26 April 2016 at 2:04:47 AM SGT Committer: Lee Byron --- directives_test.go | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/directives_test.go b/directives_test.go index 164ecd7b..3443ccb4 100644 --- a/directives_test.go +++ b/directives_test.go @@ -342,9 +342,6 @@ func TestDirectivesWorksOnInlineFragmentIfFalseOmitsInlineFragment(t *testing.T) b } } - fragment Frag on TestType { - b - } ` expected := &graphql.Result{ Data: map[string]interface{}{ @@ -368,9 +365,6 @@ func TestDirectivesWorksOnInlineFragmentIfTrueIncludesInlineFragment(t *testing. b } } - fragment Frag on TestType { - b - } ` expected := &graphql.Result{ Data: map[string]interface{}{ @@ -395,9 +389,6 @@ func TestDirectivesWorksOnInlineFragmentUnlessFalseIncludesInlineFragment(t *tes b } } - fragment Frag on TestType { - b - } ` expected := &graphql.Result{ Data: map[string]interface{}{ @@ -422,9 +413,6 @@ func TestDirectivesWorksOnInlineFragmentUnlessTrueIncludesInlineFragment(t *test b } } - fragment Frag on TestType { - b - } ` expected := &graphql.Result{ Data: map[string]interface{}{ From f1164a67d88e15a841f05b88f3f34b9ed0652836 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 1 Jun 2016 14:08:12 +0800 Subject: [PATCH 04/34] Deepen introspection query https://github.com/graphql/graphql-js/pull/364 Commit: 0b72e7038761bec9fb5319cc08ea93ff8b071a9c [0b72e70] Parents: cbded82952 Author: Mike Solomon Date: 26 April 2016 at 2:37:25 AM SGT Committer: Lee Byron --- testutil/introspection_query.go | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/testutil/introspection_query.go b/testutil/introspection_query.go index d92b81e2..a8914154 100644 --- a/testutil/introspection_query.go +++ b/testutil/introspection_query.go @@ -76,6 +76,22 @@ var IntrospectionQuery = ` ofType { kind name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } } } } From 237fab47918359a4e16994397c514323858196fd Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 1 Jun 2016 14:19:26 +0800 Subject: [PATCH 05/34] Improve validation error message when field names conflict https://github.com/graphql/graphql-js/pull/363 * Improve validation error message when field names conflict * Remove extra hint and add unit test --- rules.go | 4 +- ...s_overlapping_fields_can_be_merged_test.go | 91 +++++++++++-------- 2 files changed, 57 insertions(+), 38 deletions(-) diff --git a/rules.go b/rules.go index c5be3f03..02e28ea2 100644 --- a/rules.go +++ b/rules.go @@ -1510,8 +1510,8 @@ func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRul reason := c.Reason reportError( context, - fmt.Sprintf( - `Fields "%v" conflict because %v.`, + fmt.Sprintf(`Fields "%v" conflict because %v. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, responseName, reasonMessage(reason), ), diff --git a/rules_overlapping_fields_can_be_merged_test.go b/rules_overlapping_fields_can_be_merged_test.go index b38b13a1..311d5c7f 100644 --- a/rules_overlapping_fields_can_be_merged_test.go +++ b/rules_overlapping_fields_can_be_merged_test.go @@ -74,7 +74,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_SameAliasesWithDifferentFieldTarg fido: nickname } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "fido" conflict because name and nickname are different fields.`, 3, 9, 4, 9), + testutil.RuleError(`Fields "fido" conflict because name and nickname are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 3, 9, 4, 9), }) } func TestValidate_OverlappingFieldsCanBeMerged_SameAliasesAllowedOnNonOverlappingFields(t *testing.T) { @@ -96,7 +98,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_AliasMaskingDirectFieldAccess(t * name } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "name" conflict because nickname and name are different fields.`, 3, 9, 4, 9), + testutil.RuleError(`Fields "name" conflict because nickname and name are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 3, 9, 4, 9), }) } func TestValidate_OverlappingFieldsCanBeMerged_DifferentArgs_SecondAddsAnArgument(t *testing.T) { @@ -106,7 +110,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_DifferentArgs_SecondAddsAnArgumen doesKnowCommand(dogCommand: HEEL) } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "doesKnowCommand" conflict because they have differing arguments.`, 3, 9, 4, 9), + testutil.RuleError(`Fields "doesKnowCommand" conflict because they have differing arguments. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 3, 9, 4, 9), }) } func TestValidate_OverlappingFieldsCanBeMerged_DifferentArgs_SecondMissingAnArgument(t *testing.T) { @@ -116,7 +122,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_DifferentArgs_SecondMissingAnArgu doesKnowCommand } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "doesKnowCommand" conflict because they have differing arguments.`, 3, 9, 4, 9), + testutil.RuleError(`Fields "doesKnowCommand" conflict because they have differing arguments. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 3, 9, 4, 9), }) } func TestValidate_OverlappingFieldsCanBeMerged_ConflictingArgs(t *testing.T) { @@ -126,7 +134,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_ConflictingArgs(t *testing.T) { doesKnowCommand(dogCommand: HEEL) } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "doesKnowCommand" conflict because they have differing arguments.`, 3, 9, 4, 9), + testutil.RuleError(`Fields "doesKnowCommand" conflict because they have differing arguments. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 3, 9, 4, 9), }) } func TestValidate_OverlappingFieldsCanBeMerged_AllowDifferentArgsWhereNoConflictIsPossible(t *testing.T) { @@ -156,7 +166,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_EncountersConflictInFragments(t * x: b } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "x" conflict because a and b are different fields.`, 7, 9, 10, 9), + testutil.RuleError(`Fields "x" conflict because a and b are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 7, 9, 10, 9), }) } func TestValidate_OverlappingFieldsCanBeMerged_ReportsEachConflictOnce(t *testing.T) { @@ -183,9 +195,15 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReportsEachConflictOnce(t *testin x: b } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "x" conflict because a and b are different fields.`, 18, 9, 21, 9), - testutil.RuleError(`Fields "x" conflict because a and c are different fields.`, 18, 9, 14, 11), - testutil.RuleError(`Fields "x" conflict because b and c are different fields.`, 21, 9, 14, 11), + testutil.RuleError(`Fields "x" conflict because a and b are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 18, 9, 21, 9), + testutil.RuleError(`Fields "x" conflict because a and c are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 18, 9, 14, 11), + testutil.RuleError(`Fields "x" conflict because b and c are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 21, 9, 14, 11), }) } func TestValidate_OverlappingFieldsCanBeMerged_DeepConflict(t *testing.T) { @@ -199,7 +217,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_DeepConflict(t *testing.T) { } } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Fields "field" conflict because subfields "x" conflict because a and b are different fields.`, + testutil.RuleError(`Fields "field" conflict because subfields "x" conflict because a and b are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 3, 9, 4, 11, 6, 9, @@ -219,9 +238,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_DeepConflictWithMultipleIssues(t } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "field" conflict because subfields "x" conflict because a and b are different fields and `+ - `subfields "y" conflict because c and d are different fields.`, + testutil.RuleError(`Fields "field" conflict because subfields "x" conflict because a and b are different fields and `+ + `subfields "y" conflict because c and d are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 3, 9, 4, 11, 5, 11, @@ -245,9 +264,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_VeryDeepConflict(t *testing.T) { } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "field" conflict because subfields "deepField" conflict because subfields "x" conflict because `+ - `a and b are different fields.`, + testutil.RuleError(`Fields "field" conflict because subfields "deepField" conflict because subfields "x" conflict because `+ + `a and b are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 3, 9, 4, 11, 5, 13, @@ -274,9 +293,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReportsDeepConflictToNearestCommo } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "deepField" conflict because subfields "x" conflict because `+ - `a and b are different fields.`, + testutil.RuleError(`Fields "deepField" conflict because subfields "x" conflict because `+ + `a and b are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 4, 11, 5, 13, 7, 11, @@ -486,8 +505,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Conf } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "scalar" conflict because they return conflicting types Int and String!.`, + testutil.RuleError(`Fields "scalar" conflict because they return conflicting types Int and String!. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 5, 15, 8, 15), }) @@ -526,8 +545,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Disa } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "scalar" conflict because they return conflicting types Int and String.`, + testutil.RuleError(`Fields "scalar" conflict because they return conflicting types Int and String. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 5, 15, 8, 15), }) @@ -545,8 +564,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Disa } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "scalar" conflict because they return conflicting types String! and String.`, + testutil.RuleError(`Fields "scalar" conflict because they return conflicting types String! and String. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 5, 15, 8, 15), }) @@ -568,8 +587,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Disa } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "box" conflict because they return conflicting types [StringBox] and StringBox.`, + testutil.RuleError(`Fields "box" conflict because they return conflicting types [StringBox] and StringBox. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 5, 15, 10, 15), }) @@ -590,8 +609,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Disa } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "box" conflict because they return conflicting types StringBox and [StringBox].`, + testutil.RuleError(`Fields "box" conflict because they return conflicting types StringBox and [StringBox]. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 5, 15, 10, 15), }) @@ -614,8 +633,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Disa } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "val" conflict because scalar and unrelatedField are different fields.`, + testutil.RuleError(`Fields "val" conflict because scalar and unrelatedField are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 6, 17, 7, 17), }) @@ -637,8 +656,8 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Disa } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "box" conflict because subfields "scalar" conflict because they return conflicting types String and Int.`, + testutil.RuleError(`Fields "box" conflict because subfields "scalar" conflict because they return conflicting types String and Int. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 5, 15, 6, 17, 10, 15, @@ -704,9 +723,9 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Comp } } `, []gqlerrors.FormattedError{ - testutil.RuleError( - `Fields "edges" conflict because subfields "node" conflict because subfields "id" conflict because `+ - `id and name are different fields.`, + testutil.RuleError(`Fields "edges" conflict because subfields "node" conflict because subfields "id" conflict because `+ + `id and name are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, 14, 11, 15, 13, 16, 15, From 46c5850c75445ab1f1005625ca26022074b6d8b3 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 1 Jun 2016 14:49:40 +0800 Subject: [PATCH 06/34] Rename `type_comparator` test to mark as internal test. --- type_comparators_test.go => type_comparators_internal_test.go | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename type_comparators_test.go => type_comparators_internal_test.go (100%) diff --git a/type_comparators_test.go b/type_comparators_internal_test.go similarity index 100% rename from type_comparators_test.go rename to type_comparators_internal_test.go From 91a3aa2ea11af3094e56df2b6f93fd42ce24963e Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 1 Jun 2016 23:16:47 +0800 Subject: [PATCH 07/34] Include possible field, argument, type names when validation fails (#355) * Add suggestionList to return strings based on how simular they are to the input * Suggests valid fields in `FieldsOnCorrectType` * Suggest argument names * Suggested valid type names * Fix flow and unit test * addressed comments in PR: move file, update comment, filter out more options, remove redundant warning * fix typos * fix lint Commit: 5bc1b2541d1b5767de4016e10ae77021f81310fc [5bc1b25] Parents: 4b08c36e86 Author: Yuzhi Date: 27 April 2016 at 2:48:45 AM SGT Committer: Lee Byron --- rules.go | 188 +++++++++++++++++++++++++-- rules_fields_on_correct_type_test.go | 32 +++-- rules_known_type_names_test.go | 2 +- suggested_list_internal_test.go | 28 ++++ testutil/rules_test_harness.go | 4 - validator_test.go | 8 +- 6 files changed, 229 insertions(+), 33 deletions(-) create mode 100644 suggested_list_internal_test.go diff --git a/rules.go b/rules.go index 02e28ea2..7a11c344 100644 --- a/rules.go +++ b/rules.go @@ -7,6 +7,7 @@ import ( "github.com/graphql-go/graphql/language/kinds" "github.com/graphql-go/graphql/language/printer" "github.com/graphql-go/graphql/language/visitor" + "math" "sort" "strings" ) @@ -162,8 +163,14 @@ func DefaultValuesOfCorrectTypeRule(context *ValidationContext) *ValidationRuleI VisitorOpts: visitorOpts, } } - -func UndefinedFieldMessage(fieldName string, ttypeName string, suggestedTypes []string) string { +func quoteStrings(slice []string) []string { + quoted := []string{} + for _, s := range slice { + quoted = append(quoted, fmt.Sprintf(`"%v"`, s)) + } + return quoted +} +func UndefinedFieldMessage(fieldName string, ttypeName string, suggestedTypes []string, suggestedFields []string) string { quoteStrings := func(slice []string) []string { quoted := []string{} @@ -175,15 +182,27 @@ func UndefinedFieldMessage(fieldName string, ttypeName string, suggestedTypes [] // construct helpful (but long) message message := fmt.Sprintf(`Cannot query field "%v" on type "%v".`, fieldName, ttypeName) - suggestions := strings.Join(quoteStrings(suggestedTypes), ", ") const MaxLength = 5 if len(suggestedTypes) > 0 { + suggestions := "" if len(suggestedTypes) > MaxLength { suggestions = strings.Join(quoteStrings(suggestedTypes[0:MaxLength]), ", ") + fmt.Sprintf(`, and %v other types`, len(suggestedTypes)-MaxLength) + } else { + suggestions = strings.Join(quoteStrings(suggestedTypes), ", ") + } + message = fmt.Sprintf(`%v However, this field exists on %v. `+ + `Perhaps you meant to use an inline fragment?`, message, suggestions) + } + if len(suggestedFields) > 0 { + suggestions := "" + if len(suggestedFields) > MaxLength { + suggestions = strings.Join(quoteStrings(suggestedFields[0:MaxLength]), ", ") + + fmt.Sprintf(`, or %v other field`, len(suggestedFields)-MaxLength) + } else { + suggestions = strings.Join(quoteStrings(suggestedFields), ", ") } - message = message + fmt.Sprintf(` However, this field exists on %v.`, suggestions) - message = message + ` Perhaps you meant to use an inline fragment?` + message = fmt.Sprintf(`%v Did you mean to query %v?`, message, suggestions) } return message @@ -232,11 +251,29 @@ func FieldsOnCorrectTypeRule(context *ValidationContext) *ValidationRuleInstance } } - message := UndefinedFieldMessage(nodeName, ttype.Name(), suggestedTypes) + suggestedFieldNames := []string{} + suggestedFields := []string{} + switch ttype := ttype.(type) { + case *Object: + for name := range ttype.Fields() { + suggestedFieldNames = append(suggestedFieldNames, name) + } + suggestedFields = suggestionList(nodeName, suggestedFieldNames) + case *Interface: + for name := range ttype.Fields() { + suggestedFieldNames = append(suggestedFieldNames, name) + } + suggestedFields = suggestionList(nodeName, suggestedFieldNames) + case *InputObject: + for name := range ttype.Fields() { + suggestedFieldNames = append(suggestedFieldNames, name) + } + suggestedFields = suggestionList(nodeName, suggestedFieldNames) + } reportError( context, - message, + UndefinedFieldMessage(nodeName, ttype.Name(), suggestedTypes, suggestedFields), []ast.Node{node}, ) } @@ -380,6 +417,28 @@ func FragmentsOnCompositeTypesRule(context *ValidationContext) *ValidationRuleIn } } +func unknownArgMessage(argName string, fieldName string, parentTypeName string, suggestedArgs []string) string { + message := fmt.Sprintf(`Unknown argument "%v" on field "%v" of type "%v".`, argName, fieldName, parentTypeName) + + if len(suggestedArgs) > 0 { + suggestions := strings.Join(quoteStrings(suggestedArgs), ", ") + message = fmt.Sprintf(`%v Perhaps you meant %v?`, message, suggestions) + } + + return message +} + +func unknownDirectiveArgMessage(argName string, directiveName string, suggestedArgs []string) string { + message := fmt.Sprintf(`Unknown argument "%v" on directive "@%v".`, argName, directiveName) + + if len(suggestedArgs) > 0 { + suggestions := strings.Join(quoteStrings(suggestedArgs), ", ") + message = fmt.Sprintf(`%v Perhaps you meant %v?`, message, suggestions) + } + + return message +} + // KnownArgumentNamesRule Known argument names // // A GraphQL field is only valid if all supplied arguments are defined by @@ -399,6 +458,7 @@ func KnownArgumentNamesRule(context *ValidationContext) *ValidationRuleInstance if argumentOf == nil { return action, result } + var fieldArgDef *Argument if argumentOf.GetKind() == kinds.Field { fieldDef := context.FieldDef() if fieldDef == nil { @@ -408,8 +468,9 @@ func KnownArgumentNamesRule(context *ValidationContext) *ValidationRuleInstance if node.Name != nil { nodeName = node.Name.Value } - var fieldArgDef *Argument + argNames := []string{} for _, arg := range fieldDef.Args { + argNames = append(argNames, arg.Name()) if arg.Name() == nodeName { fieldArgDef = arg } @@ -422,7 +483,7 @@ func KnownArgumentNamesRule(context *ValidationContext) *ValidationRuleInstance } reportError( context, - fmt.Sprintf(`Unknown argument "%v" on field "%v" of type "%v".`, nodeName, fieldDef.Name, parentTypeName), + unknownArgMessage(nodeName, fieldDef.Name, parentTypeName, suggestionList(nodeName, argNames)), []ast.Node{node}, ) } @@ -435,8 +496,10 @@ func KnownArgumentNamesRule(context *ValidationContext) *ValidationRuleInstance if node.Name != nil { nodeName = node.Name.Value } + argNames := []string{} var directiveArgDef *Argument for _, arg := range directive.Args { + argNames = append(argNames, arg.Name()) if arg.Name() == nodeName { directiveArgDef = arg } @@ -444,7 +507,7 @@ func KnownArgumentNamesRule(context *ValidationContext) *ValidationRuleInstance if directiveArgDef == nil { reportError( context, - fmt.Sprintf(`Unknown argument "%v" on directive "@%v".`, nodeName, directive.Name), + unknownDirectiveArgMessage(nodeName, directive.Name, suggestionList(nodeName, argNames)), []ast.Node{node}, ) } @@ -606,6 +669,23 @@ func KnownFragmentNamesRule(context *ValidationContext) *ValidationRuleInstance } } +func unknownTypeMessage(typeName string, suggestedTypes []string) string { + message := fmt.Sprintf(`Unknown type "%v".`, typeName) + + const MaxLength = 5 + if len(suggestedTypes) > 0 { + suggestions := "" + if len(suggestedTypes) < MaxLength { + suggestions = strings.Join(quoteStrings(suggestedTypes), ", ") + } else { + suggestions = strings.Join(quoteStrings(suggestedTypes[0:MaxLength]), ", ") + } + message = fmt.Sprintf(`%v Perhaps you meant one of the following: %v?`, message, suggestions) + } + + return message +} + // KnownTypeNamesRule Known type names // // A GraphQL document is only valid if referenced types (specifically @@ -643,9 +723,13 @@ func KnownTypeNamesRule(context *ValidationContext) *ValidationRuleInstance { } ttype := context.Schema().Type(typeNameValue) if ttype == nil { + suggestedTypes := []string{} + for key := range context.Schema().TypeMap() { + suggestedTypes = append(suggestedTypes, key) + } reportError( context, - fmt.Sprintf(`Unknown type "%v".`, typeNameValue), + unknownTypeMessage(typeNameValue, suggestionList(typeNameValue, suggestedTypes)), []ast.Node{node}, ) } @@ -2210,3 +2294,85 @@ func isValidLiteralValue(ttype Input, valueAST ast.Value) (bool, []string) { return true, nil } + +// Internal struct to sort results from suggestionList() +type suggestionListResult struct { + Options []string + Distances []float64 +} + +func (s suggestionListResult) Len() int { + return len(s.Options) +} +func (s suggestionListResult) Swap(i, j int) { + s.Options[i], s.Options[j] = s.Options[j], s.Options[i] +} +func (s suggestionListResult) Less(i, j int) bool { + return s.Distances[i] < s.Distances[j] +} + +// suggestionList Given an invalid input string and a list of valid options, returns a filtered +// list of valid options sorted based on their similarity with the input. +func suggestionList(input string, options []string) []string { + dists := []float64{} + filteredOpts := []string{} + inputThreshold := float64(len(input) / 2) + + for _, opt := range options { + dist := lexicalDistance(input, opt) + threshold := math.Max(inputThreshold, float64(len(opt)/2)) + threshold = math.Max(threshold, 1) + if dist <= threshold { + filteredOpts = append(filteredOpts, opt) + dists = append(dists, dist) + } + } + //sort results + suggested := suggestionListResult{filteredOpts, dists} + sort.Sort(suggested) + return suggested.Options +} + +// lexicalDistance Computes the lexical distance between strings A and B. +// The "distance" between two strings is given by counting the minimum number +// of edits needed to transform string A into string B. An edit can be an +// insertion, deletion, or substitution of a single character, or a swap of two +// adjacent characters. +// This distance can be useful for detecting typos in input or sorting +func lexicalDistance(a, b string) float64 { + d := [][]float64{} + aLen := len(a) + bLen := len(b) + for i := 0; i <= aLen; i++ { + d = append(d, []float64{float64(i)}) + } + for k := 1; k <= bLen; k++ { + d[0] = append(d[0], float64(k)) + } + + for i := 1; i <= aLen; i++ { + for k := 1; k <= bLen; k++ { + cost := 1.0 + if a[i-1] == b[k-1] { + cost = 0.0 + } + minCostFloat := math.Min( + d[i-1][k]+1.0, + d[i][k-1]+1.0, + ) + minCostFloat = math.Min( + minCostFloat, + d[i-1][k-1]+cost, + ) + d[i] = append(d[i], minCostFloat) + + if i > 1 && k < 1 && + a[i-1] == b[k-2] && + a[i-2] == b[k-1] { + d[i][k] = math.Min(d[i][k], d[i-2][k-2]+cost) + } + } + } + + return d[aLen][bLen] +} diff --git a/rules_fields_on_correct_type_test.go b/rules_fields_on_correct_type_test.go index 294a0682..e9d00b6a 100644 --- a/rules_fields_on_correct_type_test.go +++ b/rules_fields_on_correct_type_test.go @@ -73,7 +73,7 @@ func TestValidate_FieldsOnCorrectType_FieldNotDefinedOnFragment(t *testing.T) { meowVolume } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "meowVolume" on type "Dog".`, 3, 9), + testutil.RuleError(`Cannot query field "meowVolume" on type "Dog". Did you mean to query "barkVolume"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_IgnoreDeeplyUnknownField(t *testing.T) { @@ -106,7 +106,7 @@ func TestValidate_FieldsOnCorrectType_FieldNotDefinedOnInlineFragment(t *testing } } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "meowVolume" on type "Dog".`, 4, 11), + testutil.RuleError(`Cannot query field "meowVolume" on type "Dog". Did you mean to query "barkVolume"?`, 4, 11), }) } func TestValidate_FieldsOnCorrectType_AliasedFieldTargetNotDefined(t *testing.T) { @@ -115,7 +115,7 @@ func TestValidate_FieldsOnCorrectType_AliasedFieldTargetNotDefined(t *testing.T) volume : mooVolume } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "mooVolume" on type "Dog".`, 3, 9), + testutil.RuleError(`Cannot query field "mooVolume" on type "Dog". Did you mean to query "barkVolume"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_AliasedLyingFieldTargetNotDefined(t *testing.T) { @@ -124,7 +124,7 @@ func TestValidate_FieldsOnCorrectType_AliasedLyingFieldTargetNotDefined(t *testi barkVolume : kawVolume } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "kawVolume" on type "Dog".`, 3, 9), + testutil.RuleError(`Cannot query field "kawVolume" on type "Dog". Did you mean to query "barkVolume"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_NotDefinedOnInterface(t *testing.T) { @@ -142,7 +142,7 @@ func TestValidate_FieldsOnCorrectType_DefinedOnImplementorsButNotOnInterface(t * nickname } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "nickname" on type "Pet". However, this field exists on "Cat", "Dog". Perhaps you meant to use an inline fragment?`, 3, 9), + testutil.RuleError(`Cannot query field "nickname" on type "Pet". However, this field exists on "Cat", "Dog". Perhaps you meant to use an inline fragment? Did you mean to query "name"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_MetaFieldSelectionOnUnion(t *testing.T) { @@ -184,27 +184,33 @@ func TestValidate_FieldsOnCorrectType_ValidFieldInInlineFragment(t *testing.T) { } func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithNoSuggestions(t *testing.T) { - message := graphql.UndefinedFieldMessage("T", "f", []string{}) - expected := `Cannot query field "T" on type "f".` + message := graphql.UndefinedFieldMessage("f", "T", []string{}, []string{}) + expected := `Cannot query field "f" on type "T".` if message != expected { t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) } } func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithNoSmallNumbersOfSuggestions(t *testing.T) { - message := graphql.UndefinedFieldMessage("T", "f", []string{"A", "B"}) - expected := `Cannot query field "T" on type "f". ` + + message := graphql.UndefinedFieldMessage("f", "T", []string{"A", "B"}, []string{"z", "y"}) + expected := `Cannot query field "f" on type "T". ` + `However, this field exists on "A", "B". ` + - `Perhaps you meant to use an inline fragment?` + `Perhaps you meant to use an inline fragment? ` + + `Did you mean to query "z", "y"?` if message != expected { t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) } } func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithLotsOfSuggestions(t *testing.T) { - message := graphql.UndefinedFieldMessage("T", "f", []string{"A", "B", "C", "D", "E", "F"}) - expected := `Cannot query field "T" on type "f". ` + + message := graphql.UndefinedFieldMessage( + "f", "T", + []string{"A", "B", "C", "D", "E", "F"}, + []string{"z", "y", "x", "w", "v", "u"}, + ) + expected := `Cannot query field "f" on type "T". ` + `However, this field exists on "A", "B", "C", "D", "E", and 1 other types. ` + - `Perhaps you meant to use an inline fragment?` + `Perhaps you meant to use an inline fragment? ` + + `Did you mean to query "z", "y", "x", "w", "v", or 1 other field?` if message != expected { t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) } diff --git a/rules_known_type_names_test.go b/rules_known_type_names_test.go index eec9a0ae..e984f932 100644 --- a/rules_known_type_names_test.go +++ b/rules_known_type_names_test.go @@ -34,7 +34,7 @@ func TestValidate_KnownTypeNames_UnknownTypeNamesAreInValid(t *testing.T) { `, []gqlerrors.FormattedError{ testutil.RuleError(`Unknown type "JumbledUpLetters".`, 2, 23), testutil.RuleError(`Unknown type "Badger".`, 5, 25), - testutil.RuleError(`Unknown type "Peettt".`, 8, 29), + testutil.RuleError(`Unknown type "Peettt". Perhaps you meant one of the following: "Pet"?`, 8, 29), }) } diff --git a/suggested_list_internal_test.go b/suggested_list_internal_test.go new file mode 100644 index 00000000..52e04ae3 --- /dev/null +++ b/suggested_list_internal_test.go @@ -0,0 +1,28 @@ +package graphql + +import ( + "reflect" + "testing" +) + +func TestSuggestionList_ReturnsResultsWhenInputIsEmpty(t *testing.T) { + expected := []string{"a"} + result := suggestionList("", []string{"a"}) + if !reflect.DeepEqual(expected, result) { + t.Fatalf("Expected %v, got: %v", expected, result) + } +} +func TestSuggestionList_ReturnsEmptyArrayWhenThereAreNoOptions(t *testing.T) { + expected := []string{} + result := suggestionList("input", []string{}) + if !reflect.DeepEqual(expected, result) { + t.Fatalf("Expected %v, got: %v", expected, result) + } +} +func TestSuggestionList_ReturnsOptionsSortedBasedOnSimilarity(t *testing.T) { + expected := []string{"abc", "ab"} + result := suggestionList("abc", []string{"a", "ab", "abc"}) + if !reflect.DeepEqual(expected, result) { + t.Fatalf("Expected %v, got: %v", expected, result) + } +} diff --git a/testutil/rules_test_harness.go b/testutil/rules_test_harness.go index 035d8a61..db8ed7e8 100644 --- a/testutil/rules_test_harness.go +++ b/testutil/rules_test_harness.go @@ -182,10 +182,6 @@ func init() { dogType, catType, }, - ResolveType: func(p graphql.ResolveTypeParams) *graphql.Object { - // not used for validation - return nil - }, }) var intelligentInterface = graphql.NewInterface(graphql.InterfaceConfig{ Name: "Intelligent", diff --git a/validator_test.go b/validator_test.go index f7f19e57..770c2f35 100644 --- a/validator_test.go +++ b/validator_test.go @@ -1,6 +1,7 @@ package graphql_test import ( + "reflect" "testing" "github.com/graphql-go/graphql" @@ -10,7 +11,6 @@ import ( "github.com/graphql-go/graphql/language/parser" "github.com/graphql-go/graphql/language/source" "github.com/graphql-go/graphql/testutil" - "reflect" ) func expectValid(t *testing.T, schema *graphql.Schema, queryString string) { @@ -74,19 +74,19 @@ func TestValidator_SupportsFullValidation_ValidatesUsingACustomTypeInfo(t *testi expectedErrors := []gqlerrors.FormattedError{ { - Message: "Cannot query field \"catOrDog\" on type \"QueryRoot\".", + Message: `Cannot query field "catOrDog" on type "QueryRoot". Did you mean to query "catOrDog"?`, Locations: []location.SourceLocation{ {Line: 3, Column: 9}, }, }, { - Message: "Cannot query field \"furColor\" on type \"Cat\".", + Message: `Cannot query field "furColor" on type "Cat". Did you mean to query "furColor"?`, Locations: []location.SourceLocation{ {Line: 5, Column: 13}, }, }, { - Message: "Cannot query field \"isHousetrained\" on type \"Dog\".", + Message: `Cannot query field "isHousetrained" on type "Dog". Did you mean to query "isHousetrained"?`, Locations: []location.SourceLocation{ {Line: 8, Column: 13}, }, From 599c57728612eda52b37719b72a9604f5efc8ba1 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Fri, 3 Jun 2016 15:23:33 +0800 Subject: [PATCH 08/34] Minor clean up - `quoteStrings` already defined outside on package scope --- rules.go | 9 --------- 1 file changed, 9 deletions(-) diff --git a/rules.go b/rules.go index 7a11c344..1d99beba 100644 --- a/rules.go +++ b/rules.go @@ -172,15 +172,6 @@ func quoteStrings(slice []string) []string { } func UndefinedFieldMessage(fieldName string, ttypeName string, suggestedTypes []string, suggestedFields []string) string { - quoteStrings := func(slice []string) []string { - quoted := []string{} - for _, s := range slice { - quoted = append(quoted, fmt.Sprintf(`"%v"`, s)) - } - return quoted - } - - // construct helpful (but long) message message := fmt.Sprintf(`Cannot query field "%v" on type "%v".`, fieldName, ttypeName) const MaxLength = 5 if len(suggestedTypes) > 0 { From 96fc0ad1a282a05d8de5226289fed06952525647 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 00:25:58 +0800 Subject: [PATCH 09/34] Minor refactoring of error messages for unknown fields Commit: 30a783a2a4f100cad0f31085284895bf51aa565a [30a783a] Parents: 5bc1b2541d Author: Lee Byron Date: 27 April 2016 at 8:49:33 AM SGT --- definition.go | 3 + rules.go | 206 +++++++++++++-------------- rules_fields_on_correct_type_test.go | 55 ++++--- validator_test.go | 6 +- 4 files changed, 147 insertions(+), 123 deletions(-) diff --git a/definition.go b/definition.go index a3b03d22..b203ad6c 100644 --- a/definition.go +++ b/definition.go @@ -120,6 +120,9 @@ var _ Output = (*NonNull)(nil) // Composite interface for types that may describe the parent context of a selection set. type Composite interface { Name() string + Description() string + String() string + Error() error } var _ Composite = (*Object)(nil) diff --git a/rules.go b/rules.go index 1d99beba..c544a0ff 100644 --- a/rules.go +++ b/rules.go @@ -2,14 +2,15 @@ package graphql import ( "fmt" + "math" + "sort" + "strings" + "github.com/graphql-go/graphql/gqlerrors" "github.com/graphql-go/graphql/language/ast" "github.com/graphql-go/graphql/language/kinds" "github.com/graphql-go/graphql/language/printer" "github.com/graphql-go/graphql/language/visitor" - "math" - "sort" - "strings" ) // SpecifiedRules set includes all validation rules defined by the GraphQL spec. @@ -170,32 +171,33 @@ func quoteStrings(slice []string) []string { } return quoted } -func UndefinedFieldMessage(fieldName string, ttypeName string, suggestedTypes []string, suggestedFields []string) string { - +func quotedOrList(slice []string) string { + quoted := quoteStrings(slice) + if len(quoted) > 1 { + return fmt.Sprintf("%v or %v", strings.Join(quoted[0:len(quoted)-1], ", "), quoted[len(quoted)-1]) + } + return quoted[0] +} +func UndefinedFieldMessage(fieldName string, ttypeName string, suggestedTypeNames []string, suggestedFieldNames []string) string { message := fmt.Sprintf(`Cannot query field "%v" on type "%v".`, fieldName, ttypeName) const MaxLength = 5 - if len(suggestedTypes) > 0 { + if len(suggestedTypeNames) > 0 { suggestions := "" - if len(suggestedTypes) > MaxLength { - suggestions = strings.Join(quoteStrings(suggestedTypes[0:MaxLength]), ", ") + - fmt.Sprintf(`, and %v other types`, len(suggestedTypes)-MaxLength) + if len(suggestedTypeNames) > MaxLength { + suggestions = quotedOrList(suggestedTypeNames[0:MaxLength]) } else { - suggestions = strings.Join(quoteStrings(suggestedTypes), ", ") + suggestions = quotedOrList(suggestedTypeNames) } - message = fmt.Sprintf(`%v However, this field exists on %v. `+ - `Perhaps you meant to use an inline fragment?`, message, suggestions) - } - if len(suggestedFields) > 0 { + message = fmt.Sprintf(`%v Did you mean to use an inline fragment on %v?`, message, suggestions) + } else if len(suggestedFieldNames) > 0 { suggestions := "" - if len(suggestedFields) > MaxLength { - suggestions = strings.Join(quoteStrings(suggestedFields[0:MaxLength]), ", ") + - fmt.Sprintf(`, or %v other field`, len(suggestedFields)-MaxLength) + if len(suggestedFieldNames) > MaxLength { + suggestions = quotedOrList(suggestedFieldNames[0:MaxLength]) } else { - suggestions = strings.Join(quoteStrings(suggestedFields), ", ") + suggestions = quotedOrList(suggestedFieldNames) } - message = fmt.Sprintf(`%v Did you mean to query %v?`, message, suggestions) + message = fmt.Sprintf(`%v Did you mean %v?`, message, suggestions) } - return message } @@ -216,55 +218,23 @@ func FieldsOnCorrectTypeRule(context *ValidationContext) *ValidationRuleInstance if ttype != nil { fieldDef := context.FieldDef() if fieldDef == nil { - // This isn't valid. Let's find suggestions, if any. - suggestedTypes := []string{} - + // This field doesn't exist, lets look for suggestions. nodeName := "" if node.Name != nil { nodeName = node.Name.Value } + // First determine if there are any suggested types to condition on. + suggestedTypeNames := getSuggestedTypeNames(context.Schema(), ttype, nodeName) - if ttype, ok := ttype.(Abstract); ok && IsAbstractType(ttype) { - siblingInterfaces := getSiblingInterfacesIncludingField(context.Schema(), ttype, nodeName) - implementations := getImplementationsIncludingField(context.Schema(), ttype, nodeName) - suggestedMaps := map[string]bool{} - for _, s := range siblingInterfaces { - if _, ok := suggestedMaps[s]; !ok { - suggestedMaps[s] = true - suggestedTypes = append(suggestedTypes, s) - } - } - for _, s := range implementations { - if _, ok := suggestedMaps[s]; !ok { - suggestedMaps[s] = true - suggestedTypes = append(suggestedTypes, s) - } - } - } - + // If there are no suggested types, then perhaps this was a typo? suggestedFieldNames := []string{} - suggestedFields := []string{} - switch ttype := ttype.(type) { - case *Object: - for name := range ttype.Fields() { - suggestedFieldNames = append(suggestedFieldNames, name) - } - suggestedFields = suggestionList(nodeName, suggestedFieldNames) - case *Interface: - for name := range ttype.Fields() { - suggestedFieldNames = append(suggestedFieldNames, name) - } - suggestedFields = suggestionList(nodeName, suggestedFieldNames) - case *InputObject: - for name := range ttype.Fields() { - suggestedFieldNames = append(suggestedFieldNames, name) - } - suggestedFields = suggestionList(nodeName, suggestedFieldNames) + if len(suggestedTypeNames) == 0 { + suggestedFieldNames = getSuggestedFieldNames(context.Schema(), ttype, nodeName) } reportError( context, - UndefinedFieldMessage(nodeName, ttype.Name(), suggestedTypes, suggestedFields), + UndefinedFieldMessage(nodeName, ttype.Name(), suggestedTypeNames, suggestedFieldNames), []ast.Node{node}, ) } @@ -280,73 +250,100 @@ func FieldsOnCorrectTypeRule(context *ValidationContext) *ValidationRuleInstance } } -// Return implementations of `type` that include `fieldName` as a valid field. -func getImplementationsIncludingField(schema *Schema, ttype Abstract, fieldName string) []string { +// getSuggestedTypeNames Go through all of the implementations of type, as well as the interfaces +// that they implement. If any of those types include the provided field, +// suggest them, sorted by how often the type is referenced, starting +// with Interfaces. +func getSuggestedTypeNames(schema *Schema, ttype Output, fieldName string) []string { - result := []string{} - for _, t := range schema.PossibleTypes(ttype) { - fields := t.Fields() - if _, ok := fields[fieldName]; ok { - result = append(result, fmt.Sprintf(`%v`, t.Name())) - } - } - - sort.Strings(result) - return result -} - -// Go through all of the implementations of type, and find other interaces -// that they implement. If those interfaces include `field` as a valid field, -// return them, sorted by how often the implementations include the other -// interface. -func getSiblingInterfacesIncludingField(schema *Schema, ttype Abstract, fieldName string) []string { - implementingObjects := schema.PossibleTypes(ttype) - - result := []string{} - suggestedInterfaceSlice := []*suggestedInterface{} + possibleTypes := schema.PossibleTypes(ttype) - // stores a map of interface name => index in suggestedInterfaceSlice + suggestedObjectTypes := []string{} + suggestedInterfaces := []*suggestedInterface{} + // stores a map of interface name => index in suggestedInterfaces suggestedInterfaceMap := map[string]int{} + // stores a maps of object name => true to remove duplicates from results + suggestedObjectMap := map[string]bool{} - for _, t := range implementingObjects { - for _, i := range t.Interfaces() { - if i == nil { - continue - } - fields := i.Fields() - if _, ok := fields[fieldName]; !ok { + for _, possibleType := range possibleTypes { + if field, ok := possibleType.Fields()[fieldName]; !ok || field == nil { + continue + } + // This object type defines this field. + suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name()) + suggestedObjectMap[possibleType.Name()] = true + + for _, possibleInterface := range possibleType.Interfaces() { + if field, ok := possibleInterface.Fields()[fieldName]; !ok || field == nil { continue } - index, ok := suggestedInterfaceMap[i.Name()] + + // This interface type defines this field. + + // - find the index of the suggestedInterface and retrieving the interface + // - increase count + index, ok := suggestedInterfaceMap[possibleInterface.Name()] if !ok { - suggestedInterfaceSlice = append(suggestedInterfaceSlice, &suggestedInterface{ - name: i.Name(), + suggestedInterfaces = append(suggestedInterfaces, &suggestedInterface{ + name: possibleInterface.Name(), count: 0, }) - index = len(suggestedInterfaceSlice) - 1 + index = len(suggestedInterfaces) - 1 + suggestedInterfaceMap[possibleInterface.Name()] = index } - if index < len(suggestedInterfaceSlice) { - s := suggestedInterfaceSlice[index] - if s.name == i.Name() { + if index < len(suggestedInterfaces) { + s := suggestedInterfaces[index] + if s.name == possibleInterface.Name() { s.count = s.count + 1 } } } } - sort.Sort(suggestedInterfaceSortedSlice(suggestedInterfaceSlice)) - for _, s := range suggestedInterfaceSlice { - result = append(result, fmt.Sprintf(`%v`, s.name)) + // sort results (by count usage for interfaces, alphabetical order for objects) + sort.Sort(suggestedInterfaceSortedSlice(suggestedInterfaces)) + sort.Sort(sort.StringSlice(suggestedObjectTypes)) + + // return concatenated slices of both interface and object type names + // and removing duplicates + // ordered by: interface (sorted) and object (sorted) + results := []string{} + for _, s := range suggestedInterfaces { + if _, ok := suggestedObjectMap[s.name]; !ok { + results = append(results, s.name) + + } } - return result + results = append(results, suggestedObjectTypes...) + return results +} + +// getSuggestedFieldNames For the field name provided, determine if there are any similar field names +// that may be the result of a typo. +func getSuggestedFieldNames(schema *Schema, ttype Output, fieldName string) []string { + fields := FieldDefinitionMap{} + switch ttype := ttype.(type) { + case *Object: + fields = ttype.Fields() + case *Interface: + fields = ttype.Fields() + default: + return []string{} + } + + possibleFieldNames := []string{} + for possibleFieldName := range fields { + possibleFieldNames = append(possibleFieldNames, possibleFieldName) + } + return suggestionList(fieldName, possibleFieldNames) } +// suggestedInterface an internal struct to sort interface by usage count type suggestedInterface struct { name string count int } - type suggestedInterfaceSortedSlice []*suggestedInterface func (s suggestedInterfaceSortedSlice) Len() int { @@ -356,7 +353,10 @@ func (s suggestedInterfaceSortedSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } func (s suggestedInterfaceSortedSlice) Less(i, j int) bool { - return s[i].count < s[j].count + if s[i].count == s[j].count { + return s[i].name < s[j].name + } + return s[i].count > s[j].count } // FragmentsOnCompositeTypesRule Fragments on composite type diff --git a/rules_fields_on_correct_type_test.go b/rules_fields_on_correct_type_test.go index e9d00b6a..cb0cf871 100644 --- a/rules_fields_on_correct_type_test.go +++ b/rules_fields_on_correct_type_test.go @@ -73,7 +73,7 @@ func TestValidate_FieldsOnCorrectType_FieldNotDefinedOnFragment(t *testing.T) { meowVolume } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "meowVolume" on type "Dog". Did you mean to query "barkVolume"?`, 3, 9), + testutil.RuleError(`Cannot query field "meowVolume" on type "Dog". Did you mean "barkVolume"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_IgnoreDeeplyUnknownField(t *testing.T) { @@ -106,7 +106,7 @@ func TestValidate_FieldsOnCorrectType_FieldNotDefinedOnInlineFragment(t *testing } } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "meowVolume" on type "Dog". Did you mean to query "barkVolume"?`, 4, 11), + testutil.RuleError(`Cannot query field "meowVolume" on type "Dog". Did you mean "barkVolume"?`, 4, 11), }) } func TestValidate_FieldsOnCorrectType_AliasedFieldTargetNotDefined(t *testing.T) { @@ -115,7 +115,7 @@ func TestValidate_FieldsOnCorrectType_AliasedFieldTargetNotDefined(t *testing.T) volume : mooVolume } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "mooVolume" on type "Dog". Did you mean to query "barkVolume"?`, 3, 9), + testutil.RuleError(`Cannot query field "mooVolume" on type "Dog". Did you mean "barkVolume"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_AliasedLyingFieldTargetNotDefined(t *testing.T) { @@ -124,7 +124,7 @@ func TestValidate_FieldsOnCorrectType_AliasedLyingFieldTargetNotDefined(t *testi barkVolume : kawVolume } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "kawVolume" on type "Dog". Did you mean to query "barkVolume"?`, 3, 9), + testutil.RuleError(`Cannot query field "kawVolume" on type "Dog". Did you mean "barkVolume"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_NotDefinedOnInterface(t *testing.T) { @@ -142,7 +142,7 @@ func TestValidate_FieldsOnCorrectType_DefinedOnImplementorsButNotOnInterface(t * nickname } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "nickname" on type "Pet". However, this field exists on "Cat", "Dog". Perhaps you meant to use an inline fragment? Did you mean to query "name"?`, 3, 9), + testutil.RuleError(`Cannot query field "nickname" on type "Pet". Did you mean to use an inline fragment on "Cat" or "Dog"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_MetaFieldSelectionOnUnion(t *testing.T) { @@ -167,7 +167,7 @@ func TestValidate_FieldsOnCorrectType_DefinedImplementorsQueriedOnUnion(t *testi name } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "name" on type "CatOrDog". However, this field exists on "Being", "Pet", "Canine", "Cat", "Dog". Perhaps you meant to use an inline fragment?`, 3, 9), + testutil.RuleError(`Cannot query field "name" on type "CatOrDog". Did you mean to use an inline fragment on "Being", "Pet", "Canine", "Cat" or "Dog"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_ValidFieldInInlineFragment(t *testing.T) { @@ -191,26 +191,47 @@ func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithNoSuggestions(t *test } } -func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithNoSmallNumbersOfSuggestions(t *testing.T) { +func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithNoSmallNumbersOfTypeSuggestions(t *testing.T) { + message := graphql.UndefinedFieldMessage("f", "T", []string{"A", "B"}, []string{}) + expected := `Cannot query field "f" on type "T". ` + + `Did you mean to use an inline fragment on "A" or "B"?` + if message != expected { + t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) + } +} + +func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithNoSmallNumbersOfFieldSuggestions(t *testing.T) { + message := graphql.UndefinedFieldMessage("f", "T", []string{}, []string{"z", "y"}) + expected := `Cannot query field "f" on type "T". ` + + `Did you mean "z" or "y"?` + if message != expected { + t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) + } +} +func TestValidate_FieldsOnCorrectTypeErrorMessage_OnlyShowsOneSetOfSuggestionsAtATimePreferringTypes(t *testing.T) { message := graphql.UndefinedFieldMessage("f", "T", []string{"A", "B"}, []string{"z", "y"}) expected := `Cannot query field "f" on type "T". ` + - `However, this field exists on "A", "B". ` + - `Perhaps you meant to use an inline fragment? ` + - `Did you mean to query "z", "y"?` + `Did you mean to use an inline fragment on "A" or "B"?` if message != expected { t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) } } -func TestValidate_FieldsOnCorrectTypeErrorMessage_WorksWithLotsOfSuggestions(t *testing.T) { + +func TestValidate_FieldsOnCorrectTypeErrorMessage_LimitLotsOfTypeSuggestions(t *testing.T) { + message := graphql.UndefinedFieldMessage("f", "T", []string{"A", "B", "C", "D", "E", "F"}, []string{}) + expected := `Cannot query field "f" on type "T". ` + + `Did you mean to use an inline fragment on "A", "B", "C", "D" or "E"?` + if message != expected { + t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) + } +} + +func TestValidate_FieldsOnCorrectTypeErrorMessage_LimitLotsOfFieldSuggestions(t *testing.T) { message := graphql.UndefinedFieldMessage( - "f", "T", - []string{"A", "B", "C", "D", "E", "F"}, - []string{"z", "y", "x", "w", "v", "u"}, + "f", "T", []string{}, []string{"z", "y", "x", "w", "v", "u"}, ) expected := `Cannot query field "f" on type "T". ` + - `However, this field exists on "A", "B", "C", "D", "E", and 1 other types. ` + - `Perhaps you meant to use an inline fragment? ` + - `Did you mean to query "z", "y", "x", "w", "v", or 1 other field?` + `Did you mean "z", "y", "x", "w" or "v"?` if message != expected { t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) } diff --git a/validator_test.go b/validator_test.go index 770c2f35..deb838b7 100644 --- a/validator_test.go +++ b/validator_test.go @@ -74,19 +74,19 @@ func TestValidator_SupportsFullValidation_ValidatesUsingACustomTypeInfo(t *testi expectedErrors := []gqlerrors.FormattedError{ { - Message: `Cannot query field "catOrDog" on type "QueryRoot". Did you mean to query "catOrDog"?`, + Message: `Cannot query field "catOrDog" on type "QueryRoot". Did you mean "catOrDog"?`, Locations: []location.SourceLocation{ {Line: 3, Column: 9}, }, }, { - Message: `Cannot query field "furColor" on type "Cat". Did you mean to query "furColor"?`, + Message: `Cannot query field "furColor" on type "Cat". Did you mean "furColor"?`, Locations: []location.SourceLocation{ {Line: 5, Column: 13}, }, }, { - Message: `Cannot query field "isHousetrained" on type "Dog". Did you mean to query "isHousetrained"?`, + Message: `Cannot query field "isHousetrained" on type "Dog". Did you mean "isHousetrained"?`, Locations: []location.SourceLocation{ {Line: 8, Column: 13}, }, From 2b3b1036fa5c4281bc9d7dba023ea34dbc26087a Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 05:55:46 +0800 Subject: [PATCH 10/34] Unit test quotedOrList (suggestion quoting utility) --- quoted_or_list_internal_test.go | 35 +++++++++++++++++++ rules.go | 50 +++++++++++----------------- rules_fields_on_correct_type_test.go | 6 ++-- rules_known_type_names_test.go | 2 +- 4 files changed, 59 insertions(+), 34 deletions(-) create mode 100644 quoted_or_list_internal_test.go diff --git a/quoted_or_list_internal_test.go b/quoted_or_list_internal_test.go new file mode 100644 index 00000000..a2caccbe --- /dev/null +++ b/quoted_or_list_internal_test.go @@ -0,0 +1,35 @@ +package graphql + +import ( + "reflect" + "testing" +) + +func TestQuotedOrList_DoesNoAcceptAnEmptyList(t *testing.T) { + expected := "" + result := quotedOrList([]string{}) + if !reflect.DeepEqual(expected, result) { + t.Fatalf("Expected %v, got: %v", expected, result) + } +} +func TestQuotedOrList_ReturnsSingleQuotedItem(t *testing.T) { + expected := `"A"` + result := quotedOrList([]string{"A"}) + if !reflect.DeepEqual(expected, result) { + t.Fatalf("Expected %v, got: %v", expected, result) + } +} +func TestQuotedOrList_ReturnsTwoItems(t *testing.T) { + expected := `"A" or "B"` + result := quotedOrList([]string{"A", "B"}) + if !reflect.DeepEqual(expected, result) { + t.Fatalf("Expected %v, got: %v", expected, result) + } +} +func TestQuotedOrList_ReturnsCommaSeparatedManyItemList(t *testing.T) { + expected := `"A", "B", "C", "D", or "E"` + result := quotedOrList([]string{"A", "B", "C", "D", "E", "F"}) + if !reflect.DeepEqual(expected, result) { + t.Fatalf("Expected %v, got: %v", expected, result) + } +} diff --git a/rules.go b/rules.go index c544a0ff..664d8e80 100644 --- a/rules.go +++ b/rules.go @@ -171,32 +171,32 @@ func quoteStrings(slice []string) []string { } return quoted } + +// quotedOrList Given [ A, B, C ] return '"A", "B", or "C"'. +// Notice oxford comma func quotedOrList(slice []string) string { + maxLength := 5 + if len(slice) == 0 { + return "" + } quoted := quoteStrings(slice) - if len(quoted) > 1 { - return fmt.Sprintf("%v or %v", strings.Join(quoted[0:len(quoted)-1], ", "), quoted[len(quoted)-1]) + if maxLength > len(quoted) { + maxLength = len(quoted) + } + if maxLength > 2 { + return fmt.Sprintf("%v, or %v", strings.Join(quoted[0:maxLength-1], ", "), quoted[maxLength-1]) + } + if maxLength > 1 { + return fmt.Sprintf("%v or %v", strings.Join(quoted[0:maxLength-1], ", "), quoted[maxLength-1]) } return quoted[0] } func UndefinedFieldMessage(fieldName string, ttypeName string, suggestedTypeNames []string, suggestedFieldNames []string) string { message := fmt.Sprintf(`Cannot query field "%v" on type "%v".`, fieldName, ttypeName) - const MaxLength = 5 if len(suggestedTypeNames) > 0 { - suggestions := "" - if len(suggestedTypeNames) > MaxLength { - suggestions = quotedOrList(suggestedTypeNames[0:MaxLength]) - } else { - suggestions = quotedOrList(suggestedTypeNames) - } - message = fmt.Sprintf(`%v Did you mean to use an inline fragment on %v?`, message, suggestions) + message = fmt.Sprintf(`%v Did you mean to use an inline fragment on %v?`, message, quotedOrList(suggestedTypeNames)) } else if len(suggestedFieldNames) > 0 { - suggestions := "" - if len(suggestedFieldNames) > MaxLength { - suggestions = quotedOrList(suggestedFieldNames[0:MaxLength]) - } else { - suggestions = quotedOrList(suggestedFieldNames) - } - message = fmt.Sprintf(`%v Did you mean %v?`, message, suggestions) + message = fmt.Sprintf(`%v Did you mean %v?`, message, quotedOrList(suggestedFieldNames)) } return message } @@ -412,8 +412,7 @@ func unknownArgMessage(argName string, fieldName string, parentTypeName string, message := fmt.Sprintf(`Unknown argument "%v" on field "%v" of type "%v".`, argName, fieldName, parentTypeName) if len(suggestedArgs) > 0 { - suggestions := strings.Join(quoteStrings(suggestedArgs), ", ") - message = fmt.Sprintf(`%v Perhaps you meant %v?`, message, suggestions) + message = fmt.Sprintf(`%v Did you mean %v?`, message, quotedOrList(suggestedArgs)) } return message @@ -423,8 +422,7 @@ func unknownDirectiveArgMessage(argName string, directiveName string, suggestedA message := fmt.Sprintf(`Unknown argument "%v" on directive "@%v".`, argName, directiveName) if len(suggestedArgs) > 0 { - suggestions := strings.Join(quoteStrings(suggestedArgs), ", ") - message = fmt.Sprintf(`%v Perhaps you meant %v?`, message, suggestions) + message = fmt.Sprintf(`%v Did you mean %v?`, message, quotedOrList(suggestedArgs)) } return message @@ -662,16 +660,8 @@ func KnownFragmentNamesRule(context *ValidationContext) *ValidationRuleInstance func unknownTypeMessage(typeName string, suggestedTypes []string) string { message := fmt.Sprintf(`Unknown type "%v".`, typeName) - - const MaxLength = 5 if len(suggestedTypes) > 0 { - suggestions := "" - if len(suggestedTypes) < MaxLength { - suggestions = strings.Join(quoteStrings(suggestedTypes), ", ") - } else { - suggestions = strings.Join(quoteStrings(suggestedTypes[0:MaxLength]), ", ") - } - message = fmt.Sprintf(`%v Perhaps you meant one of the following: %v?`, message, suggestions) + message = fmt.Sprintf(`%v Did you mean %v?`, message, quotedOrList(suggestedTypes)) } return message diff --git a/rules_fields_on_correct_type_test.go b/rules_fields_on_correct_type_test.go index cb0cf871..833a8349 100644 --- a/rules_fields_on_correct_type_test.go +++ b/rules_fields_on_correct_type_test.go @@ -167,7 +167,7 @@ func TestValidate_FieldsOnCorrectType_DefinedImplementorsQueriedOnUnion(t *testi name } `, []gqlerrors.FormattedError{ - testutil.RuleError(`Cannot query field "name" on type "CatOrDog". Did you mean to use an inline fragment on "Being", "Pet", "Canine", "Cat" or "Dog"?`, 3, 9), + testutil.RuleError(`Cannot query field "name" on type "CatOrDog". Did you mean to use an inline fragment on "Being", "Pet", "Canine", "Cat", or "Dog"?`, 3, 9), }) } func TestValidate_FieldsOnCorrectType_ValidFieldInInlineFragment(t *testing.T) { @@ -220,7 +220,7 @@ func TestValidate_FieldsOnCorrectTypeErrorMessage_OnlyShowsOneSetOfSuggestionsAt func TestValidate_FieldsOnCorrectTypeErrorMessage_LimitLotsOfTypeSuggestions(t *testing.T) { message := graphql.UndefinedFieldMessage("f", "T", []string{"A", "B", "C", "D", "E", "F"}, []string{}) expected := `Cannot query field "f" on type "T". ` + - `Did you mean to use an inline fragment on "A", "B", "C", "D" or "E"?` + `Did you mean to use an inline fragment on "A", "B", "C", "D", or "E"?` if message != expected { t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) } @@ -231,7 +231,7 @@ func TestValidate_FieldsOnCorrectTypeErrorMessage_LimitLotsOfFieldSuggestions(t "f", "T", []string{}, []string{"z", "y", "x", "w", "v", "u"}, ) expected := `Cannot query field "f" on type "T". ` + - `Did you mean "z", "y", "x", "w" or "v"?` + `Did you mean "z", "y", "x", "w", or "v"?` if message != expected { t.Fatalf("Unexpected message, expected: %v, got %v", expected, message) } diff --git a/rules_known_type_names_test.go b/rules_known_type_names_test.go index e984f932..611a8037 100644 --- a/rules_known_type_names_test.go +++ b/rules_known_type_names_test.go @@ -34,7 +34,7 @@ func TestValidate_KnownTypeNames_UnknownTypeNamesAreInValid(t *testing.T) { `, []gqlerrors.FormattedError{ testutil.RuleError(`Unknown type "JumbledUpLetters".`, 2, 23), testutil.RuleError(`Unknown type "Badger".`, 5, 25), - testutil.RuleError(`Unknown type "Peettt". Perhaps you meant one of the following: "Pet"?`, 8, 29), + testutil.RuleError(`Unknown type "Peettt". Did you mean "Pet"?`, 8, 29), }) } From b71c9064356c3cfd7036884aead7f4744ae3b31d Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 06:02:49 +0800 Subject: [PATCH 11/34] Bug: printer can print non-parsable value This fixes a bug where an empty "block" list could be skipped by the printer. Commit: ea5b241487c884edb561b12e0a92e947107bbfc1 [ea5b241] Parents: ec05b5404b Author: Lee Byron Date: 5 May 2016 at 7:26:11 AM SGT Commit Date: 5 May 2016 at 7:26:13 AM SGT --- language/printer/printer.go | 8 +++++--- language/printer/schema_printer_test.go | 2 ++ schema-kitchen-sink.graphql | 2 ++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/language/printer/printer.go b/language/printer/printer.go index 47f9b2a4..82ff1305 100644 --- a/language/printer/printer.go +++ b/language/printer/printer.go @@ -92,11 +92,13 @@ func wrap(start, maybeString, end string) string { } return start + maybeString + end } + +// Given array, print each item on its own line, wrapped in an indented "{ }" block. func block(maybeArray interface{}) string { - if maybeArray == nil { - return "" - } s := toSliceString(maybeArray) + if len(s) == 0 { + return "{}" + } return indent("{\n"+join(s, "\n")) + "\n}" } diff --git a/language/printer/schema_printer_test.go b/language/printer/schema_printer_test.go index 3e0f6f69..73bc8992 100644 --- a/language/printer/schema_printer_test.go +++ b/language/printer/schema_printer_test.go @@ -90,6 +90,8 @@ extend type Foo { seven(argument: [String]): Type } +type NoFields {} + directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT diff --git a/schema-kitchen-sink.graphql b/schema-kitchen-sink.graphql index efc1b469..c7e940ba 100644 --- a/schema-kitchen-sink.graphql +++ b/schema-kitchen-sink.graphql @@ -37,6 +37,8 @@ extend type Foo { seven(argument: [String]): Type } +type NoFields {} + directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT directive @include(if: Boolean!) From 16704019ced7dbc5c54a77064d1d692ffb26cc0f Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 06:13:57 +0800 Subject: [PATCH 12/34] documentation of schema constructor Commit: 88cf354cd65e37fa0793600f6f2a3e6d1b29d21f [88cf354] Parents: 2ac41f6f19 Author: Lee Byron Date: 7 May 2016 at 3:09:16 AM SGT --- schema.go | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/schema.go b/schema.go index f3a4eaff..00d4c40b 100644 --- a/schema.go +++ b/schema.go @@ -14,16 +14,26 @@ type SchemaConfig struct { type TypeMap map[string]Type -//Schema Definition -//A Schema is created by supplying the root types of each type of operation, -//query, mutation (optional) and subscription (optional). A schema definition is then supplied to the -//validator and executor. -//Example: -// myAppSchema, err := NewSchema(SchemaConfig({ -// Query: MyAppQueryRootType, -// Mutation: MyAppMutationRootType, -// Subscription: MyAppSubscriptionRootType, -// }); +// Schema Definition +// A Schema is created by supplying the root types of each type of operation, +// query, mutation (optional) and subscription (optional). A schema definition is then supplied to the +// validator and executor. +// Example: +// myAppSchema, err := NewSchema(SchemaConfig({ +// Query: MyAppQueryRootType, +// Mutation: MyAppMutationRootType, +// Subscription: MyAppSubscriptionRootType, +// }); +// Note: If an array of `directives` are provided to GraphQLSchema, that will be +// the exact list of directives represented and allowed. If `directives` is not +// provided then a default set of the built-in `[ @include, @skip ]` directives +// will be used. If you wish to provide *additional// directives to these +// built-ins, you must explicitly declare them. Example: +// directives: [ +// myCustomDirective, +// GraphQLIncludeDirective, +// GraphQLSkipDirective +// ] type Schema struct { typeMap TypeMap directives []*Directive @@ -454,8 +464,8 @@ func isEqualType(typeA Type, typeB Type) bool { } /** - * Provided a type and a super type, return true if the first type is either - * equal or a subset of the second super type (covariant). + // Provided a type and a super type, return true if the first type is either + // equal or a subset of the second super type (covariant). */ func isTypeSubTypeOf(schema *Schema, maybeSubType Type, superType Type) bool { // Equivalent type is a valid subtype From 73f83b424851451d448e1482bfb1fef492ede5d9 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 06:14:42 +0800 Subject: [PATCH 13/34] isTypeSubTypeOf documentation --- schema.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/schema.go b/schema.go index 00d4c40b..7d5b5ede 100644 --- a/schema.go +++ b/schema.go @@ -463,10 +463,8 @@ func isEqualType(typeA Type, typeB Type) bool { return false } -/** - // Provided a type and a super type, return true if the first type is either - // equal or a subset of the second super type (covariant). - */ +// isTypeSubTypeOf Provided a type and a super type, return true if the first type is either +// equal or a subset of the second super type (covariant). func isTypeSubTypeOf(schema *Schema, maybeSubType Type, superType Type) bool { // Equivalent type is a valid subtype if maybeSubType == superType { From 8c1f318bb9f6dc13ac44c9a331e9d882fadf537b Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 11:41:51 +0800 Subject: [PATCH 14/34] Improved `KnownArgumentNames` tests for coverage --- rules_known_argument_names_test.go | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rules_known_argument_names_test.go b/rules_known_argument_names_test.go index 7536161d..574a0037 100644 --- a/rules_known_argument_names_test.go +++ b/rules_known_argument_names_test.go @@ -75,6 +75,16 @@ func TestValidate_KnownArgumentNames_UndirectiveArgsAreInvalid(t *testing.T) { testutil.RuleError(`Unknown argument "unless" on directive "@skip".`, 3, 19), }) } +func TestValidate_KnownArgumentNames_UndirectiveArgsAreInvalidWithSuggestion(t *testing.T) { + testutil.ExpectFailsRule(t, graphql.KnownArgumentNamesRule, ` + { + dog @skip(of: true) + } + `, []gqlerrors.FormattedError{ + testutil.RuleError(`Unknown argument "of" on directive "@skip". `+ + `Did you mean "if"?`, 3, 19), + }) +} func TestValidate_KnownArgumentNames_InvalidArgName(t *testing.T) { testutil.ExpectFailsRule(t, graphql.KnownArgumentNamesRule, ` fragment invalidArgName on Dog { @@ -94,6 +104,16 @@ func TestValidate_KnownArgumentNames_UnknownArgsAmongstKnownArgs(t *testing.T) { testutil.RuleError(`Unknown argument "unknown" on field "doesKnowCommand" of type "Dog".`, 3, 55), }) } +func TestValidate_KnownArgumentNames_UnknownArgsAmongstKnownArgsWithSuggestions(t *testing.T) { + testutil.ExpectFailsRule(t, graphql.KnownArgumentNamesRule, ` + fragment oneGoodArgOneInvalidArg on Dog { + doesKnowCommand(ddogCommand: SIT,) + } + `, []gqlerrors.FormattedError{ + testutil.RuleError(`Unknown argument "ddogCommand" on field "doesKnowCommand" of type "Dog". `+ + `Did you mean "dogCommand"?`, 3, 25), + }) +} func TestValidate_KnownArgumentNames_UnknownArgsDeeply(t *testing.T) { testutil.ExpectFailsRule(t, graphql.KnownArgumentNamesRule, ` { From a19ac6c9058aa4c3edc1eb539352ed8c0e4981a2 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 12:19:22 +0800 Subject: [PATCH 15/34] Export introspection in public API Commit: 71b6a4aaecf064c7095bca81cc4285fa74ba175e [71b6a4a] Parents: 980bdf403f Author: Lee Byron Date: 7 May 2016 at 4:26:20 AM SGT --- introspection.go | 105 ++++++++++++++++++++++++++++------------------- 1 file changed, 63 insertions(+), 42 deletions(-) diff --git a/introspection.go b/introspection.go index e837be2e..d8d4242f 100644 --- a/introspection.go +++ b/introspection.go @@ -19,23 +19,44 @@ const ( TypeKindNonNull = "NON_NULL" ) -var directiveType *Object -var schemaType *Object -var typeType *Object -var fieldType *Object -var inputValueType *Object -var enumValueType *Object +// SchemaType is type definition for __Schema +var SchemaType *Object -var typeKindEnum *Enum -var directiveLocationEnum *Enum +// DirectiveType is type definition for __Directive +var DirectiveType *Object +// TypeType is type definition for __Type +var TypeType *Object + +// FieldType is type definition for __Field +var FieldType *Object + +// InputValueType is type definition for __InputValue +var InputValueType *Object + +// EnumValueType is type definition for __EnumValue +var EnumValueType *Object + +// TypeKindEnumType is type definition for __TypeKind +var TypeKindEnumType *Enum + +// DirectiveLocationEnumType is type definition for __DirectiveLocation +var DirectiveLocationEnumType *Enum + +// Meta-field definitions. + +// SchemaMetaFieldDef Meta field definition for Schema var SchemaMetaFieldDef *FieldDefinition + +// TypeMetaFieldDef Meta field definition for types var TypeMetaFieldDef *FieldDefinition + +// TypeNameMetaFieldDef Meta field definition for type names var TypeNameMetaFieldDef *FieldDefinition func init() { - typeKindEnum = NewEnum(EnumConfig{ + TypeKindEnumType = NewEnum(EnumConfig{ Name: "__TypeKind", Description: "An enum describing what kind of type a given `__Type` is", Values: EnumValueConfigMap{ @@ -81,7 +102,7 @@ func init() { }, }) - directiveLocationEnum = NewEnum(EnumConfig{ + DirectiveLocationEnumType = NewEnum(EnumConfig{ Name: "__DirectiveLocation", Description: "A Directive can be adjacent to many parts of the GraphQL language, a " + "__DirectiveLocation describes one such possible adjacencies.", @@ -118,7 +139,7 @@ func init() { }) // Note: some fields (for e.g "fields", "interfaces") are defined later due to cyclic reference - typeType = NewObject(ObjectConfig{ + TypeType = NewObject(ObjectConfig{ Name: "__Type", Description: "The fundamental unit of any GraphQL Schema is the type. There are " + "many kinds of types in GraphQL as represented by the `__TypeKind` enum." + @@ -131,7 +152,7 @@ func init() { Fields: Fields{ "kind": &Field{ - Type: NewNonNull(typeKindEnum), + Type: NewNonNull(TypeKindEnumType), Resolve: func(p ResolveParams) (interface{}, error) { switch p.Source.(type) { case *Scalar: @@ -169,7 +190,7 @@ func init() { }, }) - inputValueType = NewObject(ObjectConfig{ + InputValueType = NewObject(ObjectConfig{ Name: "__InputValue", Description: "Arguments provided to Fields or Directives and the input fields of an " + "InputObject are represented as Input Values which describe their type " + @@ -182,7 +203,7 @@ func init() { Type: String, }, "type": &Field{ - Type: NewNonNull(typeType), + Type: NewNonNull(TypeType), }, "defaultValue": &Field{ Type: String, @@ -212,7 +233,7 @@ func init() { }, }) - fieldType = NewObject(ObjectConfig{ + FieldType = NewObject(ObjectConfig{ Name: "__Field", Description: "Object and Interface types are described by a list of Fields, each of " + "which has a name, potentially a list of arguments, and a return type.", @@ -224,7 +245,7 @@ func init() { Type: String, }, "args": &Field{ - Type: NewNonNull(NewList(NewNonNull(inputValueType))), + Type: NewNonNull(NewList(NewNonNull(InputValueType))), Resolve: func(p ResolveParams) (interface{}, error) { if field, ok := p.Source.(*FieldDefinition); ok { return field.Args, nil @@ -233,7 +254,7 @@ func init() { }, }, "type": &Field{ - Type: NewNonNull(typeType), + Type: NewNonNull(TypeType), }, "isDeprecated": &Field{ Type: NewNonNull(Boolean), @@ -250,7 +271,7 @@ func init() { }, }) - directiveType = NewObject(ObjectConfig{ + DirectiveType = NewObject(ObjectConfig{ Name: "__Directive", Description: "A Directive provides a way to describe alternate runtime execution and " + "type validation behavior in a GraphQL document. " + @@ -267,12 +288,12 @@ func init() { }, "locations": &Field{ Type: NewNonNull(NewList( - NewNonNull(directiveLocationEnum), + NewNonNull(DirectiveLocationEnumType), )), }, "args": &Field{ Type: NewNonNull(NewList( - NewNonNull(inputValueType), + NewNonNull(InputValueType), )), }, // NOTE: the following three fields are deprecated and are no longer part @@ -335,7 +356,7 @@ func init() { }, }) - schemaType = NewObject(ObjectConfig{ + SchemaType = NewObject(ObjectConfig{ Name: "__Schema", Description: `A GraphQL Schema defines the capabilities of a GraphQL server. ` + `It exposes all available types and directives on the server, as well as ` + @@ -344,7 +365,7 @@ func init() { "types": &Field{ Description: "A list of all types supported by this server.", Type: NewNonNull(NewList( - NewNonNull(typeType), + NewNonNull(TypeType), )), Resolve: func(p ResolveParams) (interface{}, error) { if schema, ok := p.Source.(Schema); ok { @@ -359,7 +380,7 @@ func init() { }, "queryType": &Field{ Description: "The type that query operations will be rooted at.", - Type: NewNonNull(typeType), + Type: NewNonNull(TypeType), Resolve: func(p ResolveParams) (interface{}, error) { if schema, ok := p.Source.(Schema); ok { return schema.QueryType(), nil @@ -370,7 +391,7 @@ func init() { "mutationType": &Field{ Description: `If this server supports mutation, the type that ` + `mutation operations will be rooted at.`, - Type: typeType, + Type: TypeType, Resolve: func(p ResolveParams) (interface{}, error) { if schema, ok := p.Source.(Schema); ok { if schema.MutationType() != nil { @@ -383,7 +404,7 @@ func init() { "subscriptionType": &Field{ Description: `If this server supports subscription, the type that ` + `subscription operations will be rooted at.`, - Type: typeType, + Type: TypeType, Resolve: func(p ResolveParams) (interface{}, error) { if schema, ok := p.Source.(Schema); ok { if schema.SubscriptionType() != nil { @@ -396,7 +417,7 @@ func init() { "directives": &Field{ Description: `A list of all directives supported by this server.`, Type: NewNonNull(NewList( - NewNonNull(directiveType), + NewNonNull(DirectiveType), )), Resolve: func(p ResolveParams) (interface{}, error) { if schema, ok := p.Source.(Schema); ok { @@ -408,7 +429,7 @@ func init() { }, }) - enumValueType = NewObject(ObjectConfig{ + EnumValueType = NewObject(ObjectConfig{ Name: "__EnumValue", Description: "One possible value for a given Enum. Enum values are unique values, not " + "a placeholder for a string or numeric value. However an Enum value is " + @@ -437,8 +458,8 @@ func init() { // Again, adding field configs to __Type that have cyclic reference here // because golang don't like them too much during init/compile-time - typeType.AddFieldConfig("fields", &Field{ - Type: NewList(NewNonNull(fieldType)), + TypeType.AddFieldConfig("fields", &Field{ + Type: NewList(NewNonNull(FieldType)), Args: FieldConfigArgument{ "includeDeprecated": &ArgumentConfig{ Type: Boolean, @@ -476,8 +497,8 @@ func init() { return nil, nil }, }) - typeType.AddFieldConfig("interfaces", &Field{ - Type: NewList(NewNonNull(typeType)), + TypeType.AddFieldConfig("interfaces", &Field{ + Type: NewList(NewNonNull(TypeType)), Resolve: func(p ResolveParams) (interface{}, error) { switch ttype := p.Source.(type) { case *Object: @@ -486,8 +507,8 @@ func init() { return nil, nil }, }) - typeType.AddFieldConfig("possibleTypes", &Field{ - Type: NewList(NewNonNull(typeType)), + TypeType.AddFieldConfig("possibleTypes", &Field{ + Type: NewList(NewNonNull(TypeType)), Resolve: func(p ResolveParams) (interface{}, error) { switch ttype := p.Source.(type) { case *Interface: @@ -498,8 +519,8 @@ func init() { return nil, nil }, }) - typeType.AddFieldConfig("enumValues", &Field{ - Type: NewList(NewNonNull(enumValueType)), + TypeType.AddFieldConfig("enumValues", &Field{ + Type: NewList(NewNonNull(EnumValueType)), Args: FieldConfigArgument{ "includeDeprecated": &ArgumentConfig{ Type: Boolean, @@ -525,8 +546,8 @@ func init() { return nil, nil }, }) - typeType.AddFieldConfig("inputFields", &Field{ - Type: NewList(NewNonNull(inputValueType)), + TypeType.AddFieldConfig("inputFields", &Field{ + Type: NewList(NewNonNull(InputValueType)), Resolve: func(p ResolveParams) (interface{}, error) { switch ttype := p.Source.(type) { case *InputObject: @@ -539,15 +560,15 @@ func init() { return nil, nil }, }) - typeType.AddFieldConfig("ofType", &Field{ - Type: typeType, + TypeType.AddFieldConfig("ofType", &Field{ + Type: TypeType, }) // Note that these are FieldDefinition and not FieldConfig, - // so the format for args is different. d + // so the format for args is different. SchemaMetaFieldDef = &FieldDefinition{ Name: "__schema", - Type: NewNonNull(schemaType), + Type: NewNonNull(SchemaType), Description: "Access the current type schema of this server.", Args: []*Argument{}, Resolve: func(p ResolveParams) (interface{}, error) { @@ -556,7 +577,7 @@ func init() { } TypeMetaFieldDef = &FieldDefinition{ Name: "__type", - Type: typeType, + Type: TypeType, Description: "Request the type information of a single type.", Args: []*Argument{ { From e324096ce0dd36fd5e141d7131ef7c7cc834e90d Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 17:06:01 +0800 Subject: [PATCH 16/34] Update `NewSchema()` to use new exported introspective types --- schema.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schema.go b/schema.go index 7d5b5ede..4863e4e4 100644 --- a/schema.go +++ b/schema.go @@ -94,8 +94,8 @@ func NewSchema(config SchemaConfig) (Schema, error) { if schema.SubscriptionType() != nil { initialTypes = append(initialTypes, schema.SubscriptionType()) } - if schemaType != nil { - initialTypes = append(initialTypes, schemaType) + if SchemaType != nil { + initialTypes = append(initialTypes, SchemaType) } for _, ttype := range config.Types { From 9cbbd654ac105e916dd5247d318617e53483eaa6 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 17:13:21 +0800 Subject: [PATCH 17/34] RFC: Schema Language Directives (#376) This implements adding directives to the experimental schema language by extending the *locations* a directive can be used. Notice that this provides no semantic meaning to these directives - they are purely a mechanism for annotating an AST - however future directives which contain semantic meaning may be introduced in the future (the first will be `@deprecated`). Commit: 1b6824bc5df15f8edb259d535aa41a81e2a07234 [1b6824b] Parents: 71b6a4aaec Author: Lee Byron Date: 7 May 2016 at 5:56:25 AM SGT Labels: HEAD --- directives.go | 13 ++ introspection.go | 40 +++++ language/ast/type_definitions.go | 126 ++++++++------- language/parser/parser.go | 105 ++++++++---- language/parser/schema_parser_test.go | 48 +++++- language/printer/printer.go | 205 +++++++++++++++++++++--- language/printer/schema_printer_test.go | 23 +++ language/visitor/visitor.go | 20 ++- rules.go | 54 +++++-- rules_known_directives_rule_test.go | 80 ++++++++- schema-kitchen-sink.graphql | 23 +++ testutil/rules_test_harness.go | 70 +++++++- 12 files changed, 677 insertions(+), 130 deletions(-) diff --git a/directives.go b/directives.go index 676eab75..81d2ddac 100644 --- a/directives.go +++ b/directives.go @@ -1,6 +1,7 @@ package graphql const ( + // Operations DirectiveLocationQuery = "QUERY" DirectiveLocationMutation = "MUTATION" DirectiveLocationSubscription = "SUBSCRIPTION" @@ -8,6 +9,18 @@ const ( DirectiveLocationFragmentDefinition = "FRAGMENT_DEFINITION" DirectiveLocationFragmentSpread = "FRAGMENT_SPREAD" DirectiveLocationInlineFragment = "INLINE_FRAGMENT" + + // Schema Definitions + DirectiveLocationScalar = "SCALAR" + DirectiveLocationObject = "OBJECT" + DirectiveLocationFieldDefinition = "FIELD_DEFINITION" + DirectiveLocationArgumentDefinition = "ARGUMENT_DEFINITION" + DirectiveLocationInterface = "INTERFACE" + DirectiveLocationUnion = "UNION" + DirectiveLocationEnum = "ENUM" + DirectiveLocationEnumValue = "ENUM_VALUE" + DirectiveLocationInputObject = "INPUT_OBJECT" + DirectiveLocationInputFieldDefinition = "INPUT_FIELD_DEFINITION" ) // Directive structs are used by the GraphQL runtime as a way of modifying execution diff --git a/introspection.go b/introspection.go index d8d4242f..b4b58c54 100644 --- a/introspection.go +++ b/introspection.go @@ -135,6 +135,46 @@ func init() { Value: DirectiveLocationInlineFragment, Description: "Location adjacent to an inline fragment.", }, + "SCALAR": &EnumValueConfig{ + Value: DirectiveLocationScalar, + Description: "Location adjacent to a scalar definition.", + }, + "OBJECT": &EnumValueConfig{ + Value: DirectiveLocationObject, + Description: "Location adjacent to a object definition.", + }, + "FIELD_DEFINITION": &EnumValueConfig{ + Value: DirectiveLocationFieldDefinition, + Description: "Location adjacent to a field definition.", + }, + "ARGUMENT_DEFINITION": &EnumValueConfig{ + Value: DirectiveLocationArgumentDefinition, + Description: "Location adjacent to an argument definition.", + }, + "INTERFACE": &EnumValueConfig{ + Value: DirectiveLocationInterface, + Description: "Location adjacent to an interface definition.", + }, + "UNION": &EnumValueConfig{ + Value: DirectiveLocationUnion, + Description: "Location adjacent to a union definition.", + }, + "ENUM": &EnumValueConfig{ + Value: DirectiveLocationEnum, + Description: "Location adjacent to an enum definition.", + }, + "ENUM_VALUE": &EnumValueConfig{ + Value: DirectiveLocationEnumValue, + Description: "Location adjacent to an enum value definition.", + }, + "INPUT_OBJECT": &EnumValueConfig{ + Value: DirectiveLocationInputObject, + Description: "Location adjacent to an input object type definition.", + }, + "INPUT_FIELD_DEFINITION": &EnumValueConfig{ + Value: DirectiveLocationInputFieldDefinition, + Description: "Location adjacent to an input object field definition.", + }, }, }) diff --git a/language/ast/type_definitions.go b/language/ast/type_definitions.go index dd7940c9..9679f41f 100644 --- a/language/ast/type_definitions.go +++ b/language/ast/type_definitions.go @@ -100,9 +100,10 @@ func (def *OperationTypeDefinition) GetLoc() *Location { // ScalarDefinition implements Node, Definition type ScalarDefinition struct { - Kind string - Loc *Location - Name *Name + Kind string + Loc *Location + Name *Name + Directives []*Directive } func NewScalarDefinition(def *ScalarDefinition) *ScalarDefinition { @@ -110,9 +111,10 @@ func NewScalarDefinition(def *ScalarDefinition) *ScalarDefinition { def = &ScalarDefinition{} } return &ScalarDefinition{ - Kind: kinds.ScalarDefinition, - Loc: def.Loc, - Name: def.Name, + Kind: kinds.ScalarDefinition, + Loc: def.Loc, + Name: def.Name, + Directives: def.Directives, } } @@ -146,6 +148,7 @@ type ObjectDefinition struct { Loc *Location Name *Name Interfaces []*Named + Directives []*Directive Fields []*FieldDefinition } @@ -158,6 +161,7 @@ func NewObjectDefinition(def *ObjectDefinition) *ObjectDefinition { Loc: def.Loc, Name: def.Name, Interfaces: def.Interfaces, + Directives: def.Directives, Fields: def.Fields, } } @@ -188,11 +192,12 @@ func (def *ObjectDefinition) GetOperation() string { // FieldDefinition implements Node type FieldDefinition struct { - Kind string - Loc *Location - Name *Name - Arguments []*InputValueDefinition - Type Type + Kind string + Loc *Location + Name *Name + Arguments []*InputValueDefinition + Type Type + Directives []*Directive } func NewFieldDefinition(def *FieldDefinition) *FieldDefinition { @@ -200,11 +205,12 @@ func NewFieldDefinition(def *FieldDefinition) *FieldDefinition { def = &FieldDefinition{} } return &FieldDefinition{ - Kind: kinds.FieldDefinition, - Loc: def.Loc, - Name: def.Name, - Arguments: def.Arguments, - Type: def.Type, + Kind: kinds.FieldDefinition, + Loc: def.Loc, + Name: def.Name, + Arguments: def.Arguments, + Type: def.Type, + Directives: def.Directives, } } @@ -223,6 +229,7 @@ type InputValueDefinition struct { Name *Name Type Type DefaultValue Value + Directives []*Directive } func NewInputValueDefinition(def *InputValueDefinition) *InputValueDefinition { @@ -235,6 +242,7 @@ func NewInputValueDefinition(def *InputValueDefinition) *InputValueDefinition { Name: def.Name, Type: def.Type, DefaultValue: def.DefaultValue, + Directives: def.Directives, } } @@ -248,10 +256,11 @@ func (def *InputValueDefinition) GetLoc() *Location { // InterfaceDefinition implements Node, Definition type InterfaceDefinition struct { - Kind string - Loc *Location - Name *Name - Fields []*FieldDefinition + Kind string + Loc *Location + Name *Name + Directives []*Directive + Fields []*FieldDefinition } func NewInterfaceDefinition(def *InterfaceDefinition) *InterfaceDefinition { @@ -259,10 +268,11 @@ func NewInterfaceDefinition(def *InterfaceDefinition) *InterfaceDefinition { def = &InterfaceDefinition{} } return &InterfaceDefinition{ - Kind: kinds.InterfaceDefinition, - Loc: def.Loc, - Name: def.Name, - Fields: def.Fields, + Kind: kinds.InterfaceDefinition, + Loc: def.Loc, + Name: def.Name, + Directives: def.Directives, + Fields: def.Fields, } } @@ -292,10 +302,11 @@ func (def *InterfaceDefinition) GetOperation() string { // UnionDefinition implements Node, Definition type UnionDefinition struct { - Kind string - Loc *Location - Name *Name - Types []*Named + Kind string + Loc *Location + Name *Name + Directives []*Directive + Types []*Named } func NewUnionDefinition(def *UnionDefinition) *UnionDefinition { @@ -303,10 +314,11 @@ func NewUnionDefinition(def *UnionDefinition) *UnionDefinition { def = &UnionDefinition{} } return &UnionDefinition{ - Kind: kinds.UnionDefinition, - Loc: def.Loc, - Name: def.Name, - Types: def.Types, + Kind: kinds.UnionDefinition, + Loc: def.Loc, + Name: def.Name, + Directives: def.Directives, + Types: def.Types, } } @@ -336,10 +348,11 @@ func (def *UnionDefinition) GetOperation() string { // EnumDefinition implements Node, Definition type EnumDefinition struct { - Kind string - Loc *Location - Name *Name - Values []*EnumValueDefinition + Kind string + Loc *Location + Name *Name + Directives []*Directive + Values []*EnumValueDefinition } func NewEnumDefinition(def *EnumDefinition) *EnumDefinition { @@ -347,10 +360,11 @@ func NewEnumDefinition(def *EnumDefinition) *EnumDefinition { def = &EnumDefinition{} } return &EnumDefinition{ - Kind: kinds.EnumDefinition, - Loc: def.Loc, - Name: def.Name, - Values: def.Values, + Kind: kinds.EnumDefinition, + Loc: def.Loc, + Name: def.Name, + Directives: def.Directives, + Values: def.Values, } } @@ -380,9 +394,10 @@ func (def *EnumDefinition) GetOperation() string { // EnumValueDefinition implements Node, Definition type EnumValueDefinition struct { - Kind string - Loc *Location - Name *Name + Kind string + Loc *Location + Name *Name + Directives []*Directive } func NewEnumValueDefinition(def *EnumValueDefinition) *EnumValueDefinition { @@ -390,9 +405,10 @@ func NewEnumValueDefinition(def *EnumValueDefinition) *EnumValueDefinition { def = &EnumValueDefinition{} } return &EnumValueDefinition{ - Kind: kinds.EnumValueDefinition, - Loc: def.Loc, - Name: def.Name, + Kind: kinds.EnumValueDefinition, + Loc: def.Loc, + Name: def.Name, + Directives: def.Directives, } } @@ -406,10 +422,11 @@ func (def *EnumValueDefinition) GetLoc() *Location { // InputObjectDefinition implements Node, Definition type InputObjectDefinition struct { - Kind string - Loc *Location - Name *Name - Fields []*InputValueDefinition + Kind string + Loc *Location + Name *Name + Directives []*Directive + Fields []*InputValueDefinition } func NewInputObjectDefinition(def *InputObjectDefinition) *InputObjectDefinition { @@ -417,10 +434,11 @@ func NewInputObjectDefinition(def *InputObjectDefinition) *InputObjectDefinition def = &InputObjectDefinition{} } return &InputObjectDefinition{ - Kind: kinds.InputObjectDefinition, - Loc: def.Loc, - Name: def.Name, - Fields: def.Fields, + Kind: kinds.InputObjectDefinition, + Loc: def.Loc, + Name: def.Name, + Directives: def.Directives, + Fields: def.Fields, } } diff --git a/language/parser/parser.go b/language/parser/parser.go index d1e01c50..5519f057 100644 --- a/language/parser/parser.go +++ b/language/parser/parser.go @@ -931,7 +931,7 @@ func parseOperationTypeDefinition(parser *Parser) (interface{}, error) { } /** - * ScalarTypeDefinition : scalar Name + * ScalarTypeDefinition : scalar Name Directives? */ func parseScalarTypeDefinition(parser *Parser) (*ast.ScalarDefinition, error) { start := parser.Token.Start @@ -943,15 +943,20 @@ func parseScalarTypeDefinition(parser *Parser) (*ast.ScalarDefinition, error) { if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } def := ast.NewScalarDefinition(&ast.ScalarDefinition{ - Name: name, - Loc: loc(parser, start), + Name: name, + Directives: directives, + Loc: loc(parser, start), }) return def, nil } /** - * ObjectTypeDefinition : type Name ImplementsInterfaces? { FieldDefinition+ } + * ObjectTypeDefinition : type Name ImplementsInterfaces? Directives? { FieldDefinition+ } */ func parseObjectTypeDefinition(parser *Parser) (*ast.ObjectDefinition, error) { start := parser.Token.Start @@ -967,6 +972,10 @@ func parseObjectTypeDefinition(parser *Parser) (*ast.ObjectDefinition, error) { if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } iFields, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseFieldDefinition, lexer.TokenKind[lexer.BRACE_R]) if err != nil { return nil, err @@ -981,6 +990,7 @@ func parseObjectTypeDefinition(parser *Parser) (*ast.ObjectDefinition, error) { Name: name, Loc: loc(parser, start), Interfaces: interfaces, + Directives: directives, Fields: fields, }), nil } @@ -1000,7 +1010,7 @@ func parseImplementsInterfaces(parser *Parser) ([]*ast.Named, error) { return types, err } types = append(types, ttype) - if peek(parser, lexer.TokenKind[lexer.BRACE_L]) { + if !peek(parser, lexer.TokenKind[lexer.NAME]) { break } } @@ -1009,7 +1019,7 @@ func parseImplementsInterfaces(parser *Parser) ([]*ast.Named, error) { } /** - * FieldDefinition : Name ArgumentsDefinition? : Type + * FieldDefinition : Name ArgumentsDefinition? : Type Directives? */ func parseFieldDefinition(parser *Parser) (interface{}, error) { start := parser.Token.Start @@ -1029,11 +1039,16 @@ func parseFieldDefinition(parser *Parser) (interface{}, error) { if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } return ast.NewFieldDefinition(&ast.FieldDefinition{ - Name: name, - Arguments: args, - Type: ttype, - Loc: loc(parser, start), + Name: name, + Arguments: args, + Type: ttype, + Directives: directives, + Loc: loc(parser, start), }), nil } @@ -1059,7 +1074,7 @@ func parseArgumentDefs(parser *Parser) ([]*ast.InputValueDefinition, error) { } /** - * InputValueDefinition : Name : Type DefaultValue? + * InputValueDefinition : Name : Type DefaultValue? Directives? */ func parseInputValueDef(parser *Parser) (interface{}, error) { start := parser.Token.Start @@ -1087,16 +1102,21 @@ func parseInputValueDef(parser *Parser) (interface{}, error) { defaultValue = val } } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } return ast.NewInputValueDefinition(&ast.InputValueDefinition{ Name: name, Type: ttype, DefaultValue: defaultValue, + Directives: directives, Loc: loc(parser, start), }), nil } /** - * InterfaceTypeDefinition : interface Name { FieldDefinition+ } + * InterfaceTypeDefinition : interface Name Directives? { FieldDefinition+ } */ func parseInterfaceTypeDefinition(parser *Parser) (*ast.InterfaceDefinition, error) { start := parser.Token.Start @@ -1108,6 +1128,10 @@ func parseInterfaceTypeDefinition(parser *Parser) (*ast.InterfaceDefinition, err if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } iFields, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseFieldDefinition, lexer.TokenKind[lexer.BRACE_R]) if err != nil { return nil, err @@ -1119,14 +1143,15 @@ func parseInterfaceTypeDefinition(parser *Parser) (*ast.InterfaceDefinition, err } } return ast.NewInterfaceDefinition(&ast.InterfaceDefinition{ - Name: name, - Loc: loc(parser, start), - Fields: fields, + Name: name, + Directives: directives, + Loc: loc(parser, start), + Fields: fields, }), nil } /** - * UnionTypeDefinition : union Name = UnionMembers + * UnionTypeDefinition : union Name Directives? = UnionMembers */ func parseUnionTypeDefinition(parser *Parser) (*ast.UnionDefinition, error) { start := parser.Token.Start @@ -1138,6 +1163,10 @@ func parseUnionTypeDefinition(parser *Parser) (*ast.UnionDefinition, error) { if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } _, err = expect(parser, lexer.TokenKind[lexer.EQUALS]) if err != nil { return nil, err @@ -1147,9 +1176,10 @@ func parseUnionTypeDefinition(parser *Parser) (*ast.UnionDefinition, error) { return nil, err } return ast.NewUnionDefinition(&ast.UnionDefinition{ - Name: name, - Loc: loc(parser, start), - Types: types, + Name: name, + Directives: directives, + Loc: loc(parser, start), + Types: types, }), nil } @@ -1176,7 +1206,7 @@ func parseUnionMembers(parser *Parser) ([]*ast.Named, error) { } /** - * EnumTypeDefinition : enum Name { EnumValueDefinition+ } + * EnumTypeDefinition : enum Name Directives? { EnumValueDefinition+ } */ func parseEnumTypeDefinition(parser *Parser) (*ast.EnumDefinition, error) { start := parser.Token.Start @@ -1188,6 +1218,10 @@ func parseEnumTypeDefinition(parser *Parser) (*ast.EnumDefinition, error) { if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } iEnumValueDefs, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseEnumValueDefinition, lexer.TokenKind[lexer.BRACE_R]) if err != nil { return nil, err @@ -1199,14 +1233,15 @@ func parseEnumTypeDefinition(parser *Parser) (*ast.EnumDefinition, error) { } } return ast.NewEnumDefinition(&ast.EnumDefinition{ - Name: name, - Loc: loc(parser, start), - Values: values, + Name: name, + Directives: directives, + Loc: loc(parser, start), + Values: values, }), nil } /** - * EnumValueDefinition : EnumValue + * EnumValueDefinition : EnumValue Directives? * * EnumValue : Name */ @@ -1216,14 +1251,19 @@ func parseEnumValueDefinition(parser *Parser) (interface{}, error) { if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } return ast.NewEnumValueDefinition(&ast.EnumValueDefinition{ - Name: name, - Loc: loc(parser, start), + Name: name, + Directives: directives, + Loc: loc(parser, start), }), nil } /** - * InputObjectTypeDefinition : input Name { InputValueDefinition+ } + * InputObjectTypeDefinition : input Name Directives? { InputValueDefinition+ } */ func parseInputObjectTypeDefinition(parser *Parser) (*ast.InputObjectDefinition, error) { start := parser.Token.Start @@ -1235,6 +1275,10 @@ func parseInputObjectTypeDefinition(parser *Parser) (*ast.InputObjectDefinition, if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } iInputValueDefinitions, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseInputValueDef, lexer.TokenKind[lexer.BRACE_R]) if err != nil { return nil, err @@ -1246,9 +1290,10 @@ func parseInputObjectTypeDefinition(parser *Parser) (*ast.InputObjectDefinition, } } return ast.NewInputObjectDefinition(&ast.InputObjectDefinition{ - Name: name, - Loc: loc(parser, start), - Fields: fields, + Name: name, + Directives: directives, + Loc: loc(parser, start), + Fields: fields, }), nil } diff --git a/language/parser/schema_parser_test.go b/language/parser/schema_parser_test.go index ce6e552c..8387567a 100644 --- a/language/parser/schema_parser_test.go +++ b/language/parser/schema_parser_test.go @@ -29,6 +29,7 @@ func testLoc(start int, end int) *ast.Location { Start: start, End: end, } } + func TestSchemaParser_SimpleType(t *testing.T) { body := ` @@ -45,6 +46,7 @@ type Hello { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ @@ -53,7 +55,8 @@ type Hello { Value: "world", Loc: testLoc(16, 21), }), - Arguments: []*ast.InputValueDefinition{}, + Arguments: []*ast.InputValueDefinition{}, + Directives: []*ast.Directive{}, Type: ast.NewNamed(&ast.Named{ Loc: testLoc(23, 29), Name: ast.NewName(&ast.Name{ @@ -89,6 +92,7 @@ extend type Hello { Value: "Hello", Loc: testLoc(13, 18), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ @@ -97,7 +101,8 @@ extend type Hello { Value: "world", Loc: testLoc(23, 28), }), - Arguments: []*ast.InputValueDefinition{}, + Directives: []*ast.Directive{}, + Arguments: []*ast.InputValueDefinition{}, Type: ast.NewNamed(&ast.Named{ Loc: testLoc(30, 36), Name: ast.NewName(&ast.Name{ @@ -132,6 +137,7 @@ type Hello { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ @@ -140,7 +146,8 @@ type Hello { Value: "world", Loc: testLoc(16, 21), }), - Arguments: []*ast.InputValueDefinition{}, + Directives: []*ast.Directive{}, + Arguments: []*ast.InputValueDefinition{}, Type: ast.NewNonNull(&ast.NonNull{ Kind: "NonNullType", Loc: testLoc(23, 30), @@ -174,6 +181,7 @@ func TestSchemaParser_SimpleTypeInheritingInterface(t *testing.T) { Value: "Hello", Loc: testLoc(5, 10), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{ ast.NewNamed(&ast.Named{ Name: ast.NewName(&ast.Name{ @@ -204,6 +212,7 @@ func TestSchemaParser_SimpleTypeInheritingMultipleInterfaces(t *testing.T) { Value: "Hello", Loc: testLoc(5, 10), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{ ast.NewNamed(&ast.Named{ Name: ast.NewName(&ast.Name{ @@ -241,13 +250,15 @@ func TestSchemaParser_SingleValueEnum(t *testing.T) { Value: "Hello", Loc: testLoc(5, 10), }), + Directives: []*ast.Directive{}, Values: []*ast.EnumValueDefinition{ ast.NewEnumValueDefinition(&ast.EnumValueDefinition{ Name: ast.NewName(&ast.Name{ Value: "WORLD", Loc: testLoc(13, 18), }), - Loc: testLoc(13, 18), + Directives: []*ast.Directive{}, + Loc: testLoc(13, 18), }), }, }), @@ -270,20 +281,23 @@ func TestSchemaParser_DoubleValueEnum(t *testing.T) { Value: "Hello", Loc: testLoc(5, 10), }), + Directives: []*ast.Directive{}, Values: []*ast.EnumValueDefinition{ ast.NewEnumValueDefinition(&ast.EnumValueDefinition{ Name: ast.NewName(&ast.Name{ Value: "WO", Loc: testLoc(13, 15), }), - Loc: testLoc(13, 15), + Directives: []*ast.Directive{}, + Loc: testLoc(13, 15), }), ast.NewEnumValueDefinition(&ast.EnumValueDefinition{ Name: ast.NewName(&ast.Name{ Value: "RLD", Loc: testLoc(17, 20), }), - Loc: testLoc(17, 20), + Directives: []*ast.Directive{}, + Loc: testLoc(17, 20), }), }, }), @@ -309,6 +323,7 @@ interface Hello { Value: "Hello", Loc: testLoc(11, 16), }), + Directives: []*ast.Directive{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ Loc: testLoc(21, 34), @@ -316,7 +331,8 @@ interface Hello { Value: "world", Loc: testLoc(21, 26), }), - Arguments: []*ast.InputValueDefinition{}, + Directives: []*ast.Directive{}, + Arguments: []*ast.InputValueDefinition{}, Type: ast.NewNamed(&ast.Named{ Loc: testLoc(28, 34), Name: ast.NewName(&ast.Name{ @@ -349,6 +365,7 @@ type Hello { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ @@ -357,6 +374,7 @@ type Hello { Value: "world", Loc: testLoc(16, 21), }), + Directives: []*ast.Directive{}, Arguments: []*ast.InputValueDefinition{ ast.NewInputValueDefinition(&ast.InputValueDefinition{ Loc: testLoc(22, 35), @@ -372,6 +390,7 @@ type Hello { }), }), DefaultValue: nil, + Directives: []*ast.Directive{}, }), }, Type: ast.NewNamed(&ast.Named{ @@ -406,6 +425,7 @@ type Hello { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ @@ -432,8 +452,10 @@ type Hello { Value: true, Loc: testLoc(38, 42), }), + Directives: []*ast.Directive{}, }), }, + Directives: []*ast.Directive{}, Type: ast.NewNamed(&ast.Named{ Loc: testLoc(45, 51), Name: ast.NewName(&ast.Name{ @@ -466,6 +488,7 @@ type Hello { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ @@ -474,6 +497,7 @@ type Hello { Value: "world", Loc: testLoc(16, 21), }), + Directives: []*ast.Directive{}, Arguments: []*ast.InputValueDefinition{ ast.NewInputValueDefinition(&ast.InputValueDefinition{ Loc: testLoc(22, 38), @@ -492,6 +516,7 @@ type Hello { }), }), DefaultValue: nil, + Directives: []*ast.Directive{}, }), }, Type: ast.NewNamed(&ast.Named{ @@ -526,6 +551,7 @@ type Hello { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ @@ -534,6 +560,7 @@ type Hello { Value: "world", Loc: testLoc(16, 21), }), + Directives: []*ast.Directive{}, Arguments: []*ast.InputValueDefinition{ ast.NewInputValueDefinition(&ast.InputValueDefinition{ Loc: testLoc(22, 37), @@ -549,6 +576,7 @@ type Hello { }), }), DefaultValue: nil, + Directives: []*ast.Directive{}, }), ast.NewInputValueDefinition(&ast.InputValueDefinition{ Loc: testLoc(39, 50), @@ -564,6 +592,7 @@ type Hello { }), }), DefaultValue: nil, + Directives: []*ast.Directive{}, }), }, Type: ast.NewNamed(&ast.Named{ @@ -595,6 +624,7 @@ func TestSchemaParser_SimpleUnion(t *testing.T) { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Types: []*ast.Named{ ast.NewNamed(&ast.Named{ Loc: testLoc(14, 19), @@ -624,6 +654,7 @@ func TestSchemaParser_UnionWithTwoTypes(t *testing.T) { Value: "Hello", Loc: testLoc(6, 11), }), + Directives: []*ast.Directive{}, Types: []*ast.Named{ ast.NewNamed(&ast.Named{ Loc: testLoc(14, 16), @@ -660,6 +691,7 @@ func TestSchemaParser_Scalar(t *testing.T) { Value: "Hello", Loc: testLoc(7, 12), }), + Directives: []*ast.Directive{}, }), }, }) @@ -683,6 +715,7 @@ input Hello { Value: "Hello", Loc: testLoc(7, 12), }), + Directives: []*ast.Directive{}, Fields: []*ast.InputValueDefinition{ ast.NewInputValueDefinition(&ast.InputValueDefinition{ Loc: testLoc(17, 30), @@ -698,6 +731,7 @@ input Hello { }), }), DefaultValue: nil, + Directives: []*ast.Directive{}, }), }, }), diff --git a/language/printer/printer.go b/language/printer/printer.go index 82ff1305..95f51c55 100644 --- a/language/printer/printer.go +++ b/language/printer/printer.go @@ -29,6 +29,23 @@ func getMapValue(m map[string]interface{}, key string) interface{} { } return valMap } +func getMapSliceValue(m map[string]interface{}, key string) []interface{} { + tokens := strings.Split(key, ".") + valMap := m + for _, token := range tokens { + v, ok := valMap[token] + if !ok { + return []interface{}{} + } + switch v := v.(type) { + case []interface{}: + return v + default: + return []interface{}{} + } + } + return []interface{}{} +} func getMapValueString(m map[string]interface{}, key string) string { tokens := strings.Split(key, ".") valMap := m @@ -465,12 +482,27 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ "ScalarDefinition": func(p visitor.VisitFuncParams) (string, interface{}) { switch node := p.Node.(type) { case *ast.ScalarDefinition: - name := fmt.Sprintf("%v", node.Name) - str := "scalar " + name + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + "scalar", + fmt.Sprintf("%v", node.Name), + join(directives, " "), + }, " ") return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") - str := "scalar " + name + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + "scalar", + name, + join(directives, " "), + }, " ") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil @@ -481,13 +513,33 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ name := fmt.Sprintf("%v", node.Name) interfaces := toSliceString(node.Interfaces) fields := node.Fields - str := "type " + name + " " + wrap("implements ", join(interfaces, ", "), " ") + block(fields) + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + "type", + name, + wrap("implements ", join(interfaces, ", "), ""), + join(directives, " "), + block(fields), + }, " ") return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") interfaces := toSliceString(getMapValue(node, "Interfaces")) fields := getMapValue(node, "Fields") - str := "type " + name + " " + wrap("implements ", join(interfaces, ", "), " ") + block(fields) + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + "type", + name, + wrap("implements ", join(interfaces, ", "), ""), + join(directives, " "), + block(fields), + }, " ") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil @@ -498,13 +550,21 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ name := fmt.Sprintf("%v", node.Name) ttype := fmt.Sprintf("%v", node.Type) args := toSliceString(node.Arguments) - str := name + wrap("(", join(args, ", "), ")") + ": " + ttype + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := name + wrap("(", join(args, ", "), ")") + ": " + ttype + wrap(" ", join(directives, " "), "") return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") ttype := getMapValueString(node, "Type") args := toSliceString(getMapValue(node, "Arguments")) - str := name + wrap("(", join(args, ", "), ")") + ": " + ttype + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := name + wrap("(", join(args, ", "), ")") + ": " + ttype + wrap(" ", join(directives, " "), "") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil @@ -515,13 +575,30 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ name := fmt.Sprintf("%v", node.Name) ttype := fmt.Sprintf("%v", node.Type) defaultValue := fmt.Sprintf("%v", node.DefaultValue) - str := name + ": " + ttype + wrap(" = ", defaultValue, "") + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + name + ": " + ttype, + wrap("= ", defaultValue, ""), + join(directives, " "), + }, " ") + return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") ttype := getMapValueString(node, "Type") defaultValue := getMapValueString(node, "DefaultValue") - str := name + ": " + ttype + wrap(" = ", defaultValue, "") + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + name + ": " + ttype, + wrap("= ", defaultValue, ""), + join(directives, " "), + }, " ") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil @@ -531,12 +608,30 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ case *ast.InterfaceDefinition: name := fmt.Sprintf("%v", node.Name) fields := node.Fields - str := "interface " + name + " " + block(fields) + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + "interface", + name, + join(directives, " "), + block(fields), + }, " ") return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") fields := getMapValue(node, "Fields") - str := "interface " + name + " " + block(fields) + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + "interface", + name, + join(directives, " "), + block(fields), + }, " ") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil @@ -546,12 +641,30 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ case *ast.UnionDefinition: name := fmt.Sprintf("%v", node.Name) types := toSliceString(node.Types) - str := "union " + name + " = " + join(types, " | ") + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + "union", + name, + join(directives, " "), + "= " + join(types, " | "), + }, " ") return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") types := toSliceString(getMapValue(node, "Types")) - str := "union " + name + " = " + join(types, " | ") + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + "union", + name, + join(directives, " "), + "= " + join(types, " | "), + }, " ") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil @@ -561,12 +674,30 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ case *ast.EnumDefinition: name := fmt.Sprintf("%v", node.Name) values := node.Values - str := "enum " + name + " " + block(values) + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + "enum", + name, + join(directives, " "), + block(values), + }, " ") return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") values := getMapValue(node, "Values") - str := "enum " + name + " " + block(values) + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + "enum", + name, + join(directives, " "), + block(values), + }, " ") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil @@ -575,10 +706,26 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ switch node := p.Node.(type) { case *ast.EnumValueDefinition: name := fmt.Sprintf("%v", node.Name) - return visitor.ActionUpdate, name + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + name, + join(directives, " "), + }, " ") + return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") - return visitor.ActionUpdate, name + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + name, + join(directives, " "), + }, " ") + return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil }, @@ -587,11 +734,31 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ case *ast.InputObjectDefinition: name := fmt.Sprintf("%v", node.Name) fields := node.Fields - return visitor.ActionUpdate, "input " + name + " " + block(fields) + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + "input", + name, + join(directives, " "), + block(fields), + }, " ") + return visitor.ActionUpdate, str case map[string]interface{}: name := getMapValueString(node, "Name") fields := getMapValue(node, "Fields") - return visitor.ActionUpdate, "input " + name + " " + block(fields) + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + "input", + name, + join(directives, " "), + block(fields), + }, " ") + return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil }, diff --git a/language/printer/schema_printer_test.go b/language/printer/schema_printer_test.go index 73bc8992..2ce5ed00 100644 --- a/language/printer/schema_printer_test.go +++ b/language/printer/schema_printer_test.go @@ -67,29 +67,52 @@ type Foo implements Bar { six(argument: InputType = {key: "value"}): Type } +type AnnotatedObject @onObject(arg: "value") { + annotatedField(arg: Type = "default" @onArg): Type @onField +} + interface Bar { one: Type four(argument: String = "string"): String } +interface AnnotatedInterface @onInterface { + annotatedField(arg: Type @onArg): Type @onField +} + union Feed = Story | Article | Advert +union AnnotatedUnion @onUnion = A | B + scalar CustomScalar +scalar AnnotatedScalar @onScalar + enum Site { DESKTOP MOBILE } +enum AnnotatedEnum @onEnum { + ANNOTATED_VALUE @onEnumValue + OTHER_VALUE +} + input InputType { key: String! answer: Int = 42 } +input AnnotatedInput @onInputObjectType { + annotatedField: Type @onField +} + extend type Foo { seven(argument: [String]): Type } +extend type Foo @onType {} + type NoFields {} directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT diff --git a/language/visitor/visitor.go b/language/visitor/visitor.go index b59df235..15d6791e 100644 --- a/language/visitor/visitor.go +++ b/language/visitor/visitor.go @@ -86,37 +86,50 @@ var QueryDocumentKeys = KeyMap{ "SchemaDefinition": []string{"OperationTypes"}, "OperationTypeDefinition": []string{"Type"}, - "ScalarDefinition": []string{"Name"}, + "ScalarDefinition": []string{ + "Name", + "Directives", + }, "ObjectDefinition": []string{ "Name", "Interfaces", + "Directives", "Fields", }, "FieldDefinition": []string{ "Name", "Arguments", "Type", + "Directives", }, "InputValueDefinition": []string{ "Name", "Type", "DefaultValue", + "Directives", }, "InterfaceDefinition": []string{ "Name", + "Directives", "Fields", }, "UnionDefinition": []string{ "Name", + "Directives", "Types", }, "EnumDefinition": []string{ "Name", + "Directives", "Values", }, - "EnumValueDefinition": []string{"Name"}, + "EnumValueDefinition": []string{ + "Name", + "Directives", + }, "InputObjectDefinition": []string{ "Name", + "Directives", "Fields", }, @@ -372,10 +385,13 @@ Loop: nodeIn = node } parentConcrete, _ := parent.(ast.Node) + // ancestorsConcrete slice may contain nil values ancestorsConcrete := []ast.Node{} for _, ancestor := range ancestors { if ancestorConcrete, ok := ancestor.(ast.Node); ok { ancestorsConcrete = append(ancestorsConcrete, ancestorConcrete) + } else { + ancestorsConcrete = append(ancestorsConcrete, nil) } } diff --git a/rules.go b/rules.go index 664d8e80..467d1af7 100644 --- a/rules.go +++ b/rules.go @@ -549,15 +549,7 @@ func KnownDirectivesRule(context *ValidationContext) *ValidationRuleInstance { ) } - var appliedTo ast.Node - if len(p.Ancestors) > 0 { - appliedTo = p.Ancestors[len(p.Ancestors)-1] - } - if appliedTo == nil { - return action, result - } - - candidateLocation := getLocationForAppliedNode(appliedTo) + candidateLocation := getDirectiveLocationForASTPath(p.Ancestors) directiveHasLocation := false for _, loc := range directiveDef.Locations { @@ -592,7 +584,14 @@ func KnownDirectivesRule(context *ValidationContext) *ValidationRuleInstance { } } -func getLocationForAppliedNode(appliedTo ast.Node) string { +func getDirectiveLocationForASTPath(ancestors []ast.Node) string { + var appliedTo ast.Node + if len(ancestors) > 0 { + appliedTo = ancestors[len(ancestors)-1] + } + if appliedTo == nil { + return "" + } kind := appliedTo.GetKind() if kind == kinds.OperationDefinition { appliedTo, _ := appliedTo.(*ast.OperationDefinition) @@ -618,6 +617,41 @@ func getLocationForAppliedNode(appliedTo ast.Node) string { if kind == kinds.FragmentDefinition { return DirectiveLocationFragmentDefinition } + if kind == kinds.ScalarDefinition { + return DirectiveLocationScalar + } + if kind == kinds.ObjectDefinition { + return DirectiveLocationObject + } + if kind == kinds.FieldDefinition { + return DirectiveLocationFieldDefinition + } + if kind == kinds.InterfaceDefinition { + return DirectiveLocationInterface + } + if kind == kinds.UnionDefinition { + return DirectiveLocationUnion + } + if kind == kinds.EnumDefinition { + return DirectiveLocationEnum + } + if kind == kinds.EnumValueDefinition { + return DirectiveLocationEnumValue + } + if kind == kinds.InputObjectDefinition { + return DirectiveLocationInputObject + } + if kind == kinds.InputValueDefinition { + var parentNode ast.Node + if len(ancestors) >= 3 { + parentNode = ancestors[len(ancestors)-3] + } + if parentNode.GetKind() == kinds.InputObjectDefinition { + return DirectiveLocationInputFieldDefinition + } else { + return DirectiveLocationArgumentDefinition + } + } return "" } diff --git a/rules_known_directives_rule_test.go b/rules_known_directives_rule_test.go index ea6c07e9..85ec0f04 100644 --- a/rules_known_directives_rule_test.go +++ b/rules_known_directives_rule_test.go @@ -64,23 +64,93 @@ func TestValidate_KnownDirectives_WithManyUnknownDirectives(t *testing.T) { } func TestValidate_KnownDirectives_WithWellPlacedDirectives(t *testing.T) { testutil.ExpectPassesRule(t, graphql.KnownDirectivesRule, ` - query Foo { + query Foo @onQuery { name @include(if: true) ...Frag @include(if: true) skippedField @skip(if: true) ...SkippedFrag @skip(if: true) } + + mutation Bar @onMutation { + someField + } `) } func TestValidate_KnownDirectives_WithMisplacedDirectives(t *testing.T) { testutil.ExpectFailsRule(t, graphql.KnownDirectivesRule, ` query Foo @include(if: true) { - name @operationOnly - ...Frag @operationOnly + name @onQuery + ...Frag @onQuery + } + + mutation Bar @onQuery { + someField } `, []gqlerrors.FormattedError{ testutil.RuleError(`Directive "include" may not be used on QUERY.`, 2, 17), - testutil.RuleError(`Directive "operationOnly" may not be used on FIELD.`, 3, 14), - testutil.RuleError(`Directive "operationOnly" may not be used on FRAGMENT_SPREAD.`, 4, 17), + testutil.RuleError(`Directive "onQuery" may not be used on FIELD.`, 3, 14), + testutil.RuleError(`Directive "onQuery" may not be used on FRAGMENT_SPREAD.`, 4, 17), + testutil.RuleError(`Directive "onQuery" may not be used on MUTATION.`, 7, 20), + }) +} + +func TestValidate_KnownDirectives_WithinSchemaLanguage_WithWellPlacedDirectives(t *testing.T) { + testutil.ExpectPassesRule(t, graphql.KnownDirectivesRule, ` + type MyObj implements MyInterface @onObject { + myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition + } + + scalar MyScalar @onScalar + + interface MyInterface @onInterface { + myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition + } + + union MyUnion @onUnion = MyObj | Other + + enum MyEnum @onEnum { + MY_VALUE @onEnumValue + } + + input MyInput @onInputObject { + myField: Int @onInputFieldDefinition + } + `) +} + +func TestValidate_KnownDirectives_WithinSchemaLanguage_WithMisplacedDirectives(t *testing.T) { + testutil.ExpectFailsRule(t, graphql.KnownDirectivesRule, ` + type MyObj implements MyInterface @onInterface { + myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition + } + + scalar MyScalar @onEnum + + interface MyInterface @onObject { + myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition + } + + union MyUnion @onEnumValue = MyObj | Other + + enum MyEnum @onScalar { + MY_VALUE @onUnion + } + + input MyInput @onEnum { + myField: Int @onArgumentDefinition + } + `, []gqlerrors.FormattedError{ + testutil.RuleError(`Directive "onInterface" may not be used on OBJECT.`, 2, 43), + testutil.RuleError(`Directive "onInputFieldDefinition" may not be used on ARGUMENT_DEFINITION.`, 3, 30), + testutil.RuleError(`Directive "onInputFieldDefinition" may not be used on FIELD_DEFINITION.`, 3, 63), + testutil.RuleError(`Directive "onEnum" may not be used on SCALAR.`, 6, 25), + testutil.RuleError(`Directive "onObject" may not be used on INTERFACE.`, 8, 31), + testutil.RuleError(`Directive "onInputFieldDefinition" may not be used on ARGUMENT_DEFINITION.`, 9, 30), + testutil.RuleError(`Directive "onInputFieldDefinition" may not be used on FIELD_DEFINITION.`, 9, 63), + testutil.RuleError(`Directive "onEnumValue" may not be used on UNION.`, 12, 23), + testutil.RuleError(`Directive "onScalar" may not be used on ENUM.`, 14, 21), + testutil.RuleError(`Directive "onUnion" may not be used on ENUM_VALUE.`, 15, 20), + testutil.RuleError(`Directive "onEnum" may not be used on INPUT_OBJECT.`, 18, 23), + testutil.RuleError(`Directive "onArgumentDefinition" may not be used on INPUT_FIELD_DEFINITION.`, 19, 24), }) } diff --git a/schema-kitchen-sink.graphql b/schema-kitchen-sink.graphql index c7e940ba..582d94ae 100644 --- a/schema-kitchen-sink.graphql +++ b/schema-kitchen-sink.graphql @@ -14,29 +14,52 @@ type Foo implements Bar { six(argument: InputType = {key: "value"}): Type } +type AnnotatedObject @onObject(arg: "value") { + annotatedField(arg: Type = "default" @onArg): Type @onField +} + interface Bar { one: Type four(argument: String = "string"): String } +interface AnnotatedInterface @onInterface { + annotatedField(arg: Type @onArg): Type @onField +} + union Feed = Story | Article | Advert +union AnnotatedUnion @onUnion = A | B + scalar CustomScalar +scalar AnnotatedScalar @onScalar + enum Site { DESKTOP MOBILE } +enum AnnotatedEnum @onEnum { + ANNOTATED_VALUE @onEnumValue + OTHER_VALUE +} + input InputType { key: String! answer: Int = 42 } +input AnnotatedInput @onInputObjectType { + annotatedField: Type @onField +} + extend type Foo { seven(argument: [String]): Type } +extend type Foo @onType {} + type NoFields {} directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT diff --git a/testutil/rules_test_harness.go b/testutil/rules_test_harness.go index db8ed7e8..495c1750 100644 --- a/testutil/rules_test_harness.go +++ b/testutil/rules_test_harness.go @@ -455,12 +455,76 @@ func init() { schema, err := graphql.NewSchema(graphql.SchemaConfig{ Query: queryRoot, Directives: []*graphql.Directive{ + graphql.IncludeDirective, + graphql.SkipDirective, graphql.NewDirective(graphql.DirectiveConfig{ - Name: "operationOnly", + Name: "onQuery", Locations: []string{graphql.DirectiveLocationQuery}, }), - graphql.IncludeDirective, - graphql.SkipDirective, + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onMutation", + Locations: []string{graphql.DirectiveLocationMutation}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onSubscription", + Locations: []string{graphql.DirectiveLocationSubscription}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onField", + Locations: []string{graphql.DirectiveLocationField}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onFragmentDefinition", + Locations: []string{graphql.DirectiveLocationFragmentDefinition}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onFragmentSpread", + Locations: []string{graphql.DirectiveLocationFragmentSpread}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onInlineFragment", + Locations: []string{graphql.DirectiveLocationInlineFragment}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onScalar", + Locations: []string{graphql.DirectiveLocationScalar}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onObject", + Locations: []string{graphql.DirectiveLocationObject}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onFieldDefinition", + Locations: []string{graphql.DirectiveLocationFieldDefinition}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onArgumentDefinition", + Locations: []string{graphql.DirectiveLocationArgumentDefinition}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onInterface", + Locations: []string{graphql.DirectiveLocationInterface}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onUnion", + Locations: []string{graphql.DirectiveLocationUnion}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onEnum", + Locations: []string{graphql.DirectiveLocationEnum}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onEnumValue", + Locations: []string{graphql.DirectiveLocationEnumValue}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onInputObject", + Locations: []string{graphql.DirectiveLocationInputObject}, + }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onInputFieldDefinition", + Locations: []string{graphql.DirectiveLocationInputFieldDefinition}, + }), }, Types: []graphql.Type{ catType, From 988ab2e8a65a3a09d15a3418ac18b83cfaa754ba Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 18:35:53 +0800 Subject: [PATCH 18/34] RFC: Directive location: schema definition (#382) This allows directives to be defined on schema definitions. Commit: 0aa78f61a2dc150b5ea9ee4f50b68a736796f068 [0aa78f6] Parents: 1b6824bc5d Author: Lee Byron Date: 7 May 2016 at 7:05:27 AM SGT Labels: HEAD --- directives.go | 1 + introspection.go | 4 ++++ language/ast/type_definitions.go | 2 ++ language/parser/parser.go | 12 ++++++++---- language/printer/printer.go | 22 ++++++++++++++++++---- language/visitor/visitor.go | 5 ++++- rules.go | 3 +++ rules_known_directives_rule_test.go | 9 +++++++++ testutil/rules_test_harness.go | 4 ++++ 9 files changed, 53 insertions(+), 9 deletions(-) diff --git a/directives.go b/directives.go index 81d2ddac..078d7fa1 100644 --- a/directives.go +++ b/directives.go @@ -11,6 +11,7 @@ const ( DirectiveLocationInlineFragment = "INLINE_FRAGMENT" // Schema Definitions + DirectiveLocationSchema = "SCHEMA" DirectiveLocationScalar = "SCALAR" DirectiveLocationObject = "OBJECT" DirectiveLocationFieldDefinition = "FIELD_DEFINITION" diff --git a/introspection.go b/introspection.go index b4b58c54..c87ed300 100644 --- a/introspection.go +++ b/introspection.go @@ -135,6 +135,10 @@ func init() { Value: DirectiveLocationInlineFragment, Description: "Location adjacent to an inline fragment.", }, + "SCHEMA": &EnumValueConfig{ + Value: DirectiveLocationSchema, + Description: "Location adjacent to a schema definition.", + }, "SCALAR": &EnumValueConfig{ Value: DirectiveLocationScalar, Description: "Location adjacent to a scalar definition.", diff --git a/language/ast/type_definitions.go b/language/ast/type_definitions.go index 9679f41f..a5132dd4 100644 --- a/language/ast/type_definitions.go +++ b/language/ast/type_definitions.go @@ -36,6 +36,7 @@ var _ TypeSystemDefinition = (*DirectiveDefinition)(nil) type SchemaDefinition struct { Kind string Loc *Location + Directives []*Directive OperationTypes []*OperationTypeDefinition } @@ -46,6 +47,7 @@ func NewSchemaDefinition(def *SchemaDefinition) *SchemaDefinition { return &SchemaDefinition{ Kind: kinds.SchemaDefinition, Loc: def.Loc, + Directives: def.Directives, OperationTypes: def.OperationTypes, } } diff --git a/language/parser/parser.go b/language/parser/parser.go index 5519f057..2e51939c 100644 --- a/language/parser/parser.go +++ b/language/parser/parser.go @@ -877,7 +877,7 @@ func parseNamed(parser *Parser) (*ast.Named, error) { /* Implements the parsing rules in the Type Definition section. */ /** - * SchemaDefinition : schema { OperationTypeDefinition+ } + * SchemaDefinition : schema Directives? { OperationTypeDefinition+ } * * OperationTypeDefinition : OperationType : NamedType */ @@ -887,6 +887,10 @@ func parseSchemaDefinition(parser *Parser) (*ast.SchemaDefinition, error) { if err != nil { return nil, err } + directives, err := parseDirectives(parser) + if err != nil { + return nil, err + } operationTypesI, err := many( parser, lexer.TokenKind[lexer.BRACE_L], @@ -902,11 +906,11 @@ func parseSchemaDefinition(parser *Parser) (*ast.SchemaDefinition, error) { operationTypes = append(operationTypes, op) } } - def := ast.NewSchemaDefinition(&ast.SchemaDefinition{ + return ast.NewSchemaDefinition(&ast.SchemaDefinition{ OperationTypes: operationTypes, + Directives: directives, Loc: loc(parser, start), - }) - return def, nil + }), nil } func parseOperationTypeDefinition(parser *Parser) (interface{}, error) { diff --git a/language/printer/printer.go b/language/printer/printer.go index 95f51c55..3add3852 100644 --- a/language/printer/printer.go +++ b/language/printer/printer.go @@ -455,13 +455,27 @@ var printDocASTReducer = map[string]visitor.VisitFunc{ "SchemaDefinition": func(p visitor.VisitFuncParams) (string, interface{}) { switch node := p.Node.(type) { case *ast.SchemaDefinition: - operationTypesBlock := block(node.OperationTypes) - str := fmt.Sprintf("schema %v", operationTypesBlock) + directives := []string{} + for _, directive := range node.Directives { + directives = append(directives, fmt.Sprintf("%v", directive.Name)) + } + str := join([]string{ + "schema", + join(directives, " "), + block(node.OperationTypes), + }, " ") return visitor.ActionUpdate, str case map[string]interface{}: operationTypes := toSliceString(getMapValue(node, "OperationTypes")) - operationTypesBlock := block(operationTypes) - str := fmt.Sprintf("schema %v", operationTypesBlock) + directives := []string{} + for _, directive := range getMapSliceValue(node, "Directives") { + directives = append(directives, fmt.Sprintf("%v", directive)) + } + str := join([]string{ + "schema", + join(directives, " "), + block(operationTypes), + }, " ") return visitor.ActionUpdate, str } return visitor.ActionNoChange, nil diff --git a/language/visitor/visitor.go b/language/visitor/visitor.go index 15d6791e..9a1c2ac2 100644 --- a/language/visitor/visitor.go +++ b/language/visitor/visitor.go @@ -83,7 +83,10 @@ var QueryDocumentKeys = KeyMap{ "List": []string{"Type"}, "NonNull": []string{"Type"}, - "SchemaDefinition": []string{"OperationTypes"}, + "SchemaDefinition": []string{ + "Directives", + "OperationTypes", + }, "OperationTypeDefinition": []string{"Type"}, "ScalarDefinition": []string{ diff --git a/rules.go b/rules.go index 467d1af7..a03e24a5 100644 --- a/rules.go +++ b/rules.go @@ -617,6 +617,9 @@ func getDirectiveLocationForASTPath(ancestors []ast.Node) string { if kind == kinds.FragmentDefinition { return DirectiveLocationFragmentDefinition } + if kind == kinds.SchemaDefinition { + return DirectiveLocationSchema + } if kind == kinds.ScalarDefinition { return DirectiveLocationScalar } diff --git a/rules_known_directives_rule_test.go b/rules_known_directives_rule_test.go index 85ec0f04..f3d8231c 100644 --- a/rules_known_directives_rule_test.go +++ b/rules_known_directives_rule_test.go @@ -115,6 +115,10 @@ func TestValidate_KnownDirectives_WithinSchemaLanguage_WithWellPlacedDirectives( input MyInput @onInputObject { myField: Int @onInputFieldDefinition } + + schema @onSchema { + query: MyQuery + } `) } @@ -139,6 +143,10 @@ func TestValidate_KnownDirectives_WithinSchemaLanguage_WithMisplacedDirectives(t input MyInput @onEnum { myField: Int @onArgumentDefinition } + + schema @onObject { + query: MyQuery + } `, []gqlerrors.FormattedError{ testutil.RuleError(`Directive "onInterface" may not be used on OBJECT.`, 2, 43), testutil.RuleError(`Directive "onInputFieldDefinition" may not be used on ARGUMENT_DEFINITION.`, 3, 30), @@ -152,5 +160,6 @@ func TestValidate_KnownDirectives_WithinSchemaLanguage_WithMisplacedDirectives(t testutil.RuleError(`Directive "onUnion" may not be used on ENUM_VALUE.`, 15, 20), testutil.RuleError(`Directive "onEnum" may not be used on INPUT_OBJECT.`, 18, 23), testutil.RuleError(`Directive "onArgumentDefinition" may not be used on INPUT_FIELD_DEFINITION.`, 19, 24), + testutil.RuleError(`Directive "onObject" may not be used on SCHEMA.`, 22, 16), }) } diff --git a/testutil/rules_test_harness.go b/testutil/rules_test_harness.go index 495c1750..0dc8df63 100644 --- a/testutil/rules_test_harness.go +++ b/testutil/rules_test_harness.go @@ -485,6 +485,10 @@ func init() { Name: "onInlineFragment", Locations: []string{graphql.DirectiveLocationInlineFragment}, }), + graphql.NewDirective(graphql.DirectiveConfig{ + Name: "onSchema", + Locations: []string{graphql.DirectiveLocationSchema}, + }), graphql.NewDirective(graphql.DirectiveConfig{ Name: "onScalar", Locations: []string{graphql.DirectiveLocationScalar}, From 47b81e324c82a84f0373d0c4bae432f9d16f8f01 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 19:22:16 +0800 Subject: [PATCH 19/34] Deprecated directive (#384) This adds a new directive as part of the experimental schema language: ``` directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ENUM_VALUE ``` It also adds support for this directive in the schemaPrinter and buildASTSchema. Additionally exports a new helper `specifiedDirectives` which is encoured to be used when addressing the collection of all directives defined by the spec. The `@deprecated` directive is optimistically added to this collection. While it's currently experimental, it will become part of the schema definition language RFC. Commit: 5375c9b20452801b69dba208cac15d32e02ac608 [5375c9b] Parents: 0aa78f61a2 Author: Lee Byron Date: 9 May 2016 at 5:56:16 AM SGT Labels: HEAD --- directives.go | 37 +++++++++++++++++++++++++++++++++---- introspection.go | 2 +- schema.go | 23 ++++++++++------------- 3 files changed, 44 insertions(+), 18 deletions(-) diff --git a/directives.go b/directives.go index 078d7fa1..371e968d 100644 --- a/directives.go +++ b/directives.go @@ -24,6 +24,16 @@ const ( DirectiveLocationInputFieldDefinition = "INPUT_FIELD_DEFINITION" ) +// DefaultDeprecationReason Constant string used for default reason for a deprecation. +const DefaultDeprecationReason = "No longer supported" + +// SpecifiedRules The full list of specified directives. +const SpecifiedDirectives = []*Directive{ + IncludeDirective, + SkipDirective, + DeprecatedDirective, +} + // Directive structs are used by the GraphQL runtime as a way of modifying execution // behavior. Type system creators will usually not create these directly. type Directive struct { @@ -90,8 +100,8 @@ func NewDirective(config DirectiveConfig) *Directive { return dir } -// IncludeDirective is used to conditionally include fields or fragments -var IncludeDirective = NewDirective(DirectiveConfig{ +// IncludeDirective is used to conditionally include fields or fragments. +const IncludeDirective = NewDirective(DirectiveConfig{ Name: "include", Description: "Directs the executor to include this field or fragment only when " + "the `if` argument is true.", @@ -108,8 +118,8 @@ var IncludeDirective = NewDirective(DirectiveConfig{ }, }) -// SkipDirective Used to conditionally skip (exclude) fields or fragments -var SkipDirective = NewDirective(DirectiveConfig{ +// SkipDirective Used to conditionally skip (exclude) fields or fragments. +const SkipDirective = NewDirective(DirectiveConfig{ Name: "skip", Description: "Directs the executor to skip this field or fragment when the `if` " + "argument is true.", @@ -125,3 +135,22 @@ var SkipDirective = NewDirective(DirectiveConfig{ DirectiveLocationInlineFragment, }, }) + +// DeprecatedDirective Used to declare element of a GraphQL schema as deprecated. +const DeprecatedDirective = NewDirective(DirectiveConfig{ + Name: "deprecated", + Description: "Marks an element of a GraphQL schema as no longer supported.", + Args: FieldConfigArgument{ + "reason": &ArgumentConfig{ + Type: String, + Description: "Explains why this element was deprecated, usually also including a " + + "suggestion for how to access supported similar data. Formatted" + + "in [Markdown](https://daringfireball.net/projects/markdown/).", + DefaultValue: DefaultDeprecationReason, + }, + }, + Locations: []string{ + DirectiveLocationFieldDefinition, + DirectiveLocationEnumValue, + }, +}) diff --git a/introspection.go b/introspection.go index c87ed300..8415bd07 100644 --- a/introspection.go +++ b/introspection.go @@ -136,7 +136,7 @@ func init() { Description: "Location adjacent to an inline fragment.", }, "SCHEMA": &EnumValueConfig{ - Value: DirectiveLocationSchema, + Value: DirectiveLocationSchema, Description: "Location adjacent to a schema definition.", }, "SCALAR": &EnumValueConfig{ diff --git a/schema.go b/schema.go index 4863e4e4..420a8e60 100644 --- a/schema.go +++ b/schema.go @@ -26,14 +26,14 @@ type TypeMap map[string]Type // }); // Note: If an array of `directives` are provided to GraphQLSchema, that will be // the exact list of directives represented and allowed. If `directives` is not -// provided then a default set of the built-in `[ @include, @skip ]` directives -// will be used. If you wish to provide *additional// directives to these -// built-ins, you must explicitly declare them. Example: -// directives: [ -// myCustomDirective, -// GraphQLIncludeDirective, -// GraphQLSkipDirective -// ] +// provided then a default set of the specified directives (e.g. @include and +// @skip) will be used. If you wish to provide *additional* directives to these +// specified directives, you must explicitly declare them. Example: +// +// const MyAppSchema = new GraphQLSchema({ +// ... +// directives: specifiedDirectives.concat([ myCustomDirective ]), +// }) type Schema struct { typeMap TypeMap directives []*Directive @@ -67,13 +67,10 @@ func NewSchema(config SchemaConfig) (Schema, error) { schema.mutationType = config.Mutation schema.subscriptionType = config.Subscription - // Provide `@include() and `@skip()` directives by default. + // Provide specified directives (e.g. @include and @skip) by default. schema.directives = config.Directives if len(schema.directives) == 0 { - schema.directives = []*Directive{ - IncludeDirective, - SkipDirective, - } + schema.directives = SpecifiedDirectives } // Ensure directive definitions are error-free for _, dir := range schema.directives { From 0dcbbd2cc3483a9f087f8727974f47af99f75bbb Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 19:25:28 +0800 Subject: [PATCH 20/34] Revert back directives to use `var`; `const` not applicable here --- directives.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/directives.go b/directives.go index 371e968d..7ed839f7 100644 --- a/directives.go +++ b/directives.go @@ -28,7 +28,7 @@ const ( const DefaultDeprecationReason = "No longer supported" // SpecifiedRules The full list of specified directives. -const SpecifiedDirectives = []*Directive{ +var SpecifiedDirectives = []*Directive{ IncludeDirective, SkipDirective, DeprecatedDirective, @@ -101,7 +101,7 @@ func NewDirective(config DirectiveConfig) *Directive { } // IncludeDirective is used to conditionally include fields or fragments. -const IncludeDirective = NewDirective(DirectiveConfig{ +var IncludeDirective = NewDirective(DirectiveConfig{ Name: "include", Description: "Directs the executor to include this field or fragment only when " + "the `if` argument is true.", @@ -119,7 +119,7 @@ const IncludeDirective = NewDirective(DirectiveConfig{ }) // SkipDirective Used to conditionally skip (exclude) fields or fragments. -const SkipDirective = NewDirective(DirectiveConfig{ +var SkipDirective = NewDirective(DirectiveConfig{ Name: "skip", Description: "Directs the executor to skip this field or fragment when the `if` " + "argument is true.", @@ -137,7 +137,7 @@ const SkipDirective = NewDirective(DirectiveConfig{ }) // DeprecatedDirective Used to declare element of a GraphQL schema as deprecated. -const DeprecatedDirective = NewDirective(DirectiveConfig{ +var DeprecatedDirective = NewDirective(DirectiveConfig{ Name: "deprecated", Description: "Marks an element of a GraphQL schema as no longer supported.", Args: FieldConfigArgument{ From 1225ab0ab3ab7c683322b60605ea60f348facf69 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 19:22:16 +0800 Subject: [PATCH 21/34] Deprecated directive (#384) This adds a new directive as part of the experimental schema language: ``` directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ENUM_VALUE ``` It also adds support for this directive in the schemaPrinter and buildASTSchema. Additionally exports a new helper `specifiedDirectives` which is encoured to be used when addressing the collection of all directives defined by the spec. The `@deprecated` directive is optimistically added to this collection. While it's currently experimental, it will become part of the schema definition language RFC. Commit: 5375c9b20452801b69dba208cac15d32e02ac608 [5375c9b] Parents: 0aa78f61a2 Author: Lee Byron Date: 9 May 2016 at 5:56:16 AM SGT Labels: HEAD --- directives.go | 33 +++++++++++++++++++++++++++++++-- introspection.go | 2 +- schema.go | 23 ++++++++++------------- 3 files changed, 42 insertions(+), 16 deletions(-) diff --git a/directives.go b/directives.go index 078d7fa1..7ed839f7 100644 --- a/directives.go +++ b/directives.go @@ -24,6 +24,16 @@ const ( DirectiveLocationInputFieldDefinition = "INPUT_FIELD_DEFINITION" ) +// DefaultDeprecationReason Constant string used for default reason for a deprecation. +const DefaultDeprecationReason = "No longer supported" + +// SpecifiedRules The full list of specified directives. +var SpecifiedDirectives = []*Directive{ + IncludeDirective, + SkipDirective, + DeprecatedDirective, +} + // Directive structs are used by the GraphQL runtime as a way of modifying execution // behavior. Type system creators will usually not create these directly. type Directive struct { @@ -90,7 +100,7 @@ func NewDirective(config DirectiveConfig) *Directive { return dir } -// IncludeDirective is used to conditionally include fields or fragments +// IncludeDirective is used to conditionally include fields or fragments. var IncludeDirective = NewDirective(DirectiveConfig{ Name: "include", Description: "Directs the executor to include this field or fragment only when " + @@ -108,7 +118,7 @@ var IncludeDirective = NewDirective(DirectiveConfig{ }, }) -// SkipDirective Used to conditionally skip (exclude) fields or fragments +// SkipDirective Used to conditionally skip (exclude) fields or fragments. var SkipDirective = NewDirective(DirectiveConfig{ Name: "skip", Description: "Directs the executor to skip this field or fragment when the `if` " + @@ -125,3 +135,22 @@ var SkipDirective = NewDirective(DirectiveConfig{ DirectiveLocationInlineFragment, }, }) + +// DeprecatedDirective Used to declare element of a GraphQL schema as deprecated. +var DeprecatedDirective = NewDirective(DirectiveConfig{ + Name: "deprecated", + Description: "Marks an element of a GraphQL schema as no longer supported.", + Args: FieldConfigArgument{ + "reason": &ArgumentConfig{ + Type: String, + Description: "Explains why this element was deprecated, usually also including a " + + "suggestion for how to access supported similar data. Formatted" + + "in [Markdown](https://daringfireball.net/projects/markdown/).", + DefaultValue: DefaultDeprecationReason, + }, + }, + Locations: []string{ + DirectiveLocationFieldDefinition, + DirectiveLocationEnumValue, + }, +}) diff --git a/introspection.go b/introspection.go index c87ed300..8415bd07 100644 --- a/introspection.go +++ b/introspection.go @@ -136,7 +136,7 @@ func init() { Description: "Location adjacent to an inline fragment.", }, "SCHEMA": &EnumValueConfig{ - Value: DirectiveLocationSchema, + Value: DirectiveLocationSchema, Description: "Location adjacent to a schema definition.", }, "SCALAR": &EnumValueConfig{ diff --git a/schema.go b/schema.go index 4863e4e4..420a8e60 100644 --- a/schema.go +++ b/schema.go @@ -26,14 +26,14 @@ type TypeMap map[string]Type // }); // Note: If an array of `directives` are provided to GraphQLSchema, that will be // the exact list of directives represented and allowed. If `directives` is not -// provided then a default set of the built-in `[ @include, @skip ]` directives -// will be used. If you wish to provide *additional// directives to these -// built-ins, you must explicitly declare them. Example: -// directives: [ -// myCustomDirective, -// GraphQLIncludeDirective, -// GraphQLSkipDirective -// ] +// provided then a default set of the specified directives (e.g. @include and +// @skip) will be used. If you wish to provide *additional* directives to these +// specified directives, you must explicitly declare them. Example: +// +// const MyAppSchema = new GraphQLSchema({ +// ... +// directives: specifiedDirectives.concat([ myCustomDirective ]), +// }) type Schema struct { typeMap TypeMap directives []*Directive @@ -67,13 +67,10 @@ func NewSchema(config SchemaConfig) (Schema, error) { schema.mutationType = config.Mutation schema.subscriptionType = config.Subscription - // Provide `@include() and `@skip()` directives by default. + // Provide specified directives (e.g. @include and @skip) by default. schema.directives = config.Directives if len(schema.directives) == 0 { - schema.directives = []*Directive{ - IncludeDirective, - SkipDirective, - } + schema.directives = SpecifiedDirectives } // Ensure directive definitions are error-free for _, dir := range schema.directives { From d92356847defa39601d9f3a8dd7376f4eed85de8 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 20:57:36 +0800 Subject: [PATCH 22/34] Factor out closure functions to normal functions The functions within this validator did not need to close over any state, which allows them to be pure functions. Commit: 3a01fac4623b37eb7efcdece7a2c33ef74750aeb [3a01fac] Parents: 5375c9b204 Author: Lee Byron Date: 10 May 2016 at 6:12:08 AM SGT Commit Date: 10 May 2016 at 6:12:11 AM SGT Labels: HEAD --- rules.go | 99 ++++++++++++++++++++++++++++---------------------------- 1 file changed, 50 insertions(+), 49 deletions(-) diff --git a/rules.go b/rules.go index a03e24a5..72385bb2 100644 --- a/rules.go +++ b/rules.go @@ -1381,6 +1381,55 @@ func doTypesConflict(type1 Output, type2 Output) bool { return false } +// getSubfieldMap Given two overlapping fields, produce the combined collection of subfields. +func getSubfieldMap(context *ValidationContext, ast1 *ast.Field, type1 Output, ast2 *ast.Field, type2 Output) map[string][]*fieldDefPair { + selectionSet1 := ast1.SelectionSet + selectionSet2 := ast2.SelectionSet + if selectionSet1 != nil && selectionSet2 != nil { + visitedFragmentNames := map[string]bool{} + subfieldMap := collectFieldASTsAndDefs( + context, + GetNamed(type1), + selectionSet1, + visitedFragmentNames, + nil, + ) + subfieldMap = collectFieldASTsAndDefs( + context, + GetNamed(type2), + selectionSet2, + visitedFragmentNames, + subfieldMap, + ) + return subfieldMap + } + return nil +} + +// subfieldConflicts Given a series of Conflicts which occurred between two sub-fields, generate a single Conflict. +func subfieldConflicts(conflicts []*conflict, responseName string, ast1 *ast.Field, ast2 *ast.Field) *conflict { + if len(conflicts) > 0 { + conflictReasons := []conflictReason{} + conflictFieldsLeft := []ast.Node{ast1} + conflictFieldsRight := []ast.Node{ast2} + for _, c := range conflicts { + conflictReasons = append(conflictReasons, c.Reason) + conflictFieldsLeft = append(conflictFieldsLeft, c.FieldsLeft...) + conflictFieldsRight = append(conflictFieldsRight, c.FieldsRight...) + } + + return &conflict{ + Reason: conflictReason{ + Name: responseName, + Message: conflictReasons, + }, + FieldsLeft: conflictFieldsLeft, + FieldsRight: conflictFieldsRight, + } + } + return nil +} + // OverlappingFieldsCanBeMergedRule Overlapping fields can be merged // // A selection set is only valid if all fields (including spreading any @@ -1388,8 +1437,6 @@ func doTypesConflict(type1 Output, type2 Output) bool { // without ambiguity. func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRuleInstance { - var getSubfieldMap func(ast1 *ast.Field, type1 Output, ast2 *ast.Field, type2 Output) map[string][]*fieldDefPair - var subfieldConflicts func(conflicts []*conflict, responseName string, ast1 *ast.Field, ast2 *ast.Field) *conflict var findConflicts func(parentFieldsAreMutuallyExclusive bool, fieldMap map[string][]*fieldDefPair) (conflicts []*conflict) comparedSet := newPairSet() @@ -1490,7 +1537,7 @@ func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRul } } - subFieldMap := getSubfieldMap(ast1, type1, ast2, type2) + subFieldMap := getSubfieldMap(context, ast1, type1, ast2, type2) if subFieldMap != nil { conflicts := findConflicts(fieldsAreMutuallyExclusive, subFieldMap) return subfieldConflicts(conflicts, responseName, ast1, ast2) @@ -1499,52 +1546,6 @@ func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRul return nil } - getSubfieldMap = func(ast1 *ast.Field, type1 Output, ast2 *ast.Field, type2 Output) map[string][]*fieldDefPair { - selectionSet1 := ast1.SelectionSet - selectionSet2 := ast2.SelectionSet - if selectionSet1 != nil && selectionSet2 != nil { - visitedFragmentNames := map[string]bool{} - subfieldMap := collectFieldASTsAndDefs( - context, - GetNamed(type1), - selectionSet1, - visitedFragmentNames, - nil, - ) - subfieldMap = collectFieldASTsAndDefs( - context, - GetNamed(type2), - selectionSet2, - visitedFragmentNames, - subfieldMap, - ) - return subfieldMap - } - return nil - } - - subfieldConflicts = func(conflicts []*conflict, responseName string, ast1 *ast.Field, ast2 *ast.Field) *conflict { - if len(conflicts) > 0 { - conflictReasons := []conflictReason{} - conflictFieldsLeft := []ast.Node{ast1} - conflictFieldsRight := []ast.Node{ast2} - for _, c := range conflicts { - conflictReasons = append(conflictReasons, c.Reason) - conflictFieldsLeft = append(conflictFieldsLeft, c.FieldsLeft...) - conflictFieldsRight = append(conflictFieldsRight, c.FieldsRight...) - } - - return &conflict{ - Reason: conflictReason{ - Name: responseName, - Message: conflictReasons, - }, - FieldsLeft: conflictFieldsLeft, - FieldsRight: conflictFieldsRight, - } - } - return nil - } findConflicts = func(parentFieldsAreMutuallyExclusive bool, fieldMap map[string][]*fieldDefPair) (conflicts []*conflict) { // ensure field traversal From 6b9ac5edcca7ea363f5bbb8d7c02a93c27ba7c29 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Tue, 7 Jun 2016 21:06:48 +0800 Subject: [PATCH 23/34] Factor out more closure functions Two more functions from the overlapping-fields validator which now accept arguments rather than closing over them locally. Commit: cf9be874228f4e16762b481d0abed5575f5587db [cf9be87] Parents: 3a01fac462 Author: Lee Byron Date: 10 May 2016 at 6:22:53 AM SGT Commit Date: 10 May 2016 at 6:24:50 AM SGT --- rules.go | 230 +++++++++++++++++++++++++++---------------------------- 1 file changed, 115 insertions(+), 115 deletions(-) diff --git a/rules.go b/rules.go index 72385bb2..7e9d26ce 100644 --- a/rules.go +++ b/rules.go @@ -1430,144 +1430,144 @@ func subfieldConflicts(conflicts []*conflict, responseName string, ast1 *ast.Fie return nil } -// OverlappingFieldsCanBeMergedRule Overlapping fields can be merged -// -// A selection set is only valid if all fields (including spreading any -// fragments) either correspond to distinct response names or can be merged -// without ambiguity. -func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRuleInstance { - - var findConflicts func(parentFieldsAreMutuallyExclusive bool, fieldMap map[string][]*fieldDefPair) (conflicts []*conflict) - - comparedSet := newPairSet() - findConflict := func(parentFieldsAreMutuallyExclusive bool, responseName string, field *fieldDefPair, field2 *fieldDefPair) *conflict { +// findConflicts Find all Conflicts within a collection of fields. +func findConflicts(context *ValidationContext, parentFieldsAreMutuallyExclusive bool, fieldMap map[string][]*fieldDefPair, comparedSet *pairSet) (conflicts []*conflict) { + + // ensure field traversal + orderedName := sort.StringSlice{} + for responseName := range fieldMap { + orderedName = append(orderedName, responseName) + } + orderedName.Sort() + + for _, responseName := range orderedName { + fields, _ := fieldMap[responseName] + for _, fieldA := range fields { + for _, fieldB := range fields { + c := findConflict(context, parentFieldsAreMutuallyExclusive, responseName, fieldA, fieldB, comparedSet) + if c != nil { + conflicts = append(conflicts, c) + } + } + } + } + return conflicts +} - parentType1 := field.ParentType - ast1 := field.Field - def1 := field.FieldDef +// findConflict Determines if there is a conflict between two particular fields. +func findConflict(context *ValidationContext, parentFieldsAreMutuallyExclusive bool, responseName string, field *fieldDefPair, field2 *fieldDefPair, comparedSet *pairSet) *conflict { - parentType2 := field2.ParentType - ast2 := field2.Field - def2 := field2.FieldDef + parentType1 := field.ParentType + ast1 := field.Field + def1 := field.FieldDef - // Not a pair. - if ast1 == ast2 { - return nil - } + parentType2 := field2.ParentType + ast2 := field2.Field + def2 := field2.FieldDef - // Memoize, do not report the same issue twice. - // Note: Two overlapping ASTs could be encountered both when - // `parentFieldsAreMutuallyExclusive` is true and is false, which could - // produce different results (when `true` being a subset of `false`). - // However we do not need to include this piece of information when - // memoizing since this rule visits leaf fields before their parent fields, - // ensuring that `parentFieldsAreMutuallyExclusive` is `false` the first - // time two overlapping fields are encountered, ensuring that the full - // set of validation rules are always checked when necessary. - if comparedSet.Has(ast1, ast2) { - return nil - } - comparedSet.Add(ast1, ast2) + // Not a pair. + if ast1 == ast2 { + return nil + } - // The return type for each field. - var type1 Type - var type2 Type - if def1 != nil { - type1 = def1.Type + // Memoize, do not report the same issue twice. + // Note: Two overlapping ASTs could be encountered both when + // `parentFieldsAreMutuallyExclusive` is true and is false, which could + // produce different results (when `true` being a subset of `false`). + // However we do not need to include this piece of information when + // memoizing since this rule visits leaf fields before their parent fields, + // ensuring that `parentFieldsAreMutuallyExclusive` is `false` the first + // time two overlapping fields are encountered, ensuring that the full + // set of validation rules are always checked when necessary. + if comparedSet.Has(ast1, ast2) { + return nil + } + comparedSet.Add(ast1, ast2) + + // The return type for each field. + var type1 Type + var type2 Type + if def1 != nil { + type1 = def1.Type + } + if def2 != nil { + type2 = def2.Type + } + + // If it is known that two fields could not possibly apply at the same + // time, due to the parent types, then it is safe to permit them to diverge + // in aliased field or arguments used as they will not present any ambiguity + // by differing. + // It is known that two parent types could never overlap if they are + // different Object types. Interface or Union types might overlap - if not + // in the current state of the schema, then perhaps in some future version, + // thus may not safely diverge. + _, isParentType1Object := parentType1.(*Object) + _, isParentType2Object := parentType2.(*Object) + fieldsAreMutuallyExclusive := parentFieldsAreMutuallyExclusive || parentType1 != parentType2 && isParentType1Object && isParentType2Object + + if !fieldsAreMutuallyExclusive { + // Two aliases must refer to the same field. + name1 := "" + name2 := "" + + if ast1.Name != nil { + name1 = ast1.Name.Value } - if def2 != nil { - type2 = def2.Type + if ast2.Name != nil { + name2 = ast2.Name.Value } - - // If it is known that two fields could not possibly apply at the same - // time, due to the parent types, then it is safe to permit them to diverge - // in aliased field or arguments used as they will not present any ambiguity - // by differing. - // It is known that two parent types could never overlap if they are - // different Object types. Interface or Union types might overlap - if not - // in the current state of the schema, then perhaps in some future version, - // thus may not safely diverge. - _, isParentType1Object := parentType1.(*Object) - _, isParentType2Object := parentType2.(*Object) - fieldsAreMutuallyExclusive := parentFieldsAreMutuallyExclusive || parentType1 != parentType2 && isParentType1Object && isParentType2Object - - if !fieldsAreMutuallyExclusive { - // Two aliases must refer to the same field. - name1 := "" - name2 := "" - - if ast1.Name != nil { - name1 = ast1.Name.Value - } - if ast2.Name != nil { - name2 = ast2.Name.Value - } - if name1 != name2 { - return &conflict{ - Reason: conflictReason{ - Name: responseName, - Message: fmt.Sprintf(`%v and %v are different fields`, name1, name2), - }, - FieldsLeft: []ast.Node{ast1}, - FieldsRight: []ast.Node{ast2}, - } - } - - // Two field calls must have the same arguments. - if !sameArguments(ast1.Arguments, ast2.Arguments) { - return &conflict{ - Reason: conflictReason{ - Name: responseName, - Message: `they have differing arguments`, - }, - FieldsLeft: []ast.Node{ast1}, - FieldsRight: []ast.Node{ast2}, - } + if name1 != name2 { + return &conflict{ + Reason: conflictReason{ + Name: responseName, + Message: fmt.Sprintf(`%v and %v are different fields`, name1, name2), + }, + FieldsLeft: []ast.Node{ast1}, + FieldsRight: []ast.Node{ast2}, } } - if type1 != nil && type2 != nil && doTypesConflict(type1, type2) { + // Two field calls must have the same arguments. + if !sameArguments(ast1.Arguments, ast2.Arguments) { return &conflict{ Reason: conflictReason{ Name: responseName, - Message: fmt.Sprintf(`they return conflicting types %v and %v`, type1, type2), + Message: `they have differing arguments`, }, FieldsLeft: []ast.Node{ast1}, FieldsRight: []ast.Node{ast2}, } } + } - subFieldMap := getSubfieldMap(context, ast1, type1, ast2, type2) - if subFieldMap != nil { - conflicts := findConflicts(fieldsAreMutuallyExclusive, subFieldMap) - return subfieldConflicts(conflicts, responseName, ast1, ast2) + if type1 != nil && type2 != nil && doTypesConflict(type1, type2) { + return &conflict{ + Reason: conflictReason{ + Name: responseName, + Message: fmt.Sprintf(`they return conflicting types %v and %v`, type1, type2), + }, + FieldsLeft: []ast.Node{ast1}, + FieldsRight: []ast.Node{ast2}, } + } - return nil + subFieldMap := getSubfieldMap(context, ast1, type1, ast2, type2) + if subFieldMap != nil { + conflicts := findConflicts(context, fieldsAreMutuallyExclusive, subFieldMap, comparedSet) + return subfieldConflicts(conflicts, responseName, ast1, ast2) } - findConflicts = func(parentFieldsAreMutuallyExclusive bool, fieldMap map[string][]*fieldDefPair) (conflicts []*conflict) { + return nil +} - // ensure field traversal - orderedName := sort.StringSlice{} - for responseName := range fieldMap { - orderedName = append(orderedName, responseName) - } - orderedName.Sort() - - for _, responseName := range orderedName { - fields, _ := fieldMap[responseName] - for _, fieldA := range fields { - for _, fieldB := range fields { - c := findConflict(parentFieldsAreMutuallyExclusive, responseName, fieldA, fieldB) - if c != nil { - conflicts = append(conflicts, c) - } - } - } - } - return conflicts - } +// OverlappingFieldsCanBeMergedRule Overlapping fields can be merged +// +// A selection set is only valid if all fields (including spreading any +// fragments) either correspond to distinct response names or can be merged +// without ambiguity. +func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRuleInstance { + comparedSet := newPairSet() var reasonMessage func(message interface{}) string reasonMessage = func(message interface{}) string { @@ -1606,7 +1606,7 @@ func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRul nil, nil, ) - conflicts := findConflicts(false, fieldMap) + conflicts := findConflicts(context, false, fieldMap, comparedSet) if len(conflicts) > 0 { for _, c := range conflicts { responseName := c.Reason.Name From bb278945350158f44ed14b1b89cbb38decdf4c60 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Wed, 8 Jun 2016 14:42:47 +0800 Subject: [PATCH 24/34] Validation: context.getFragmentSpreads now accepts selectionSet rather than fragment AST node Commit: 688a1ee20db01ca80fdec2189298078380d81ed6 [688a1ee] Parents: cf9be87422 Author: Lee Byron Date: 10 May 2016 at 9:54:10 AM SGT --- rules.go | 2 +- validator.go | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/rules.go b/rules.go index 7e9d26ce..49ea6842 100644 --- a/rules.go +++ b/rules.go @@ -858,7 +858,7 @@ func NoFragmentCyclesRule(context *ValidationContext) *ValidationRuleInstance { } visitedFrags[fragmentName] = true - spreadNodes := context.FragmentSpreads(fragment) + spreadNodes := context.FragmentSpreads(fragment.SelectionSet) if len(spreadNodes) == 0 { return } diff --git a/validator.go b/validator.go index 1e057935..491fc667 100644 --- a/validator.go +++ b/validator.go @@ -95,7 +95,7 @@ type ValidationContext struct { variableUsages map[HasSelectionSet][]*VariableUsage recursiveVariableUsages map[*ast.OperationDefinition][]*VariableUsage recursivelyReferencedFragments map[*ast.OperationDefinition][]*ast.FragmentDefinition - fragmentSpreads map[HasSelectionSet][]*ast.FragmentSpread + fragmentSpreads map[*ast.SelectionSet][]*ast.FragmentSpread } func NewValidationContext(schema *Schema, astDoc *ast.Document, typeInfo *TypeInfo) *ValidationContext { @@ -107,7 +107,7 @@ func NewValidationContext(schema *Schema, astDoc *ast.Document, typeInfo *TypeIn variableUsages: map[HasSelectionSet][]*VariableUsage{}, recursiveVariableUsages: map[*ast.OperationDefinition][]*VariableUsage{}, recursivelyReferencedFragments: map[*ast.OperationDefinition][]*ast.FragmentDefinition{}, - fragmentSpreads: map[HasSelectionSet][]*ast.FragmentSpread{}, + fragmentSpreads: map[*ast.SelectionSet][]*ast.FragmentSpread{}, } } @@ -146,13 +146,13 @@ func (ctx *ValidationContext) Fragment(name string) *ast.FragmentDefinition { f, _ := ctx.fragments[name] return f } -func (ctx *ValidationContext) FragmentSpreads(node HasSelectionSet) []*ast.FragmentSpread { +func (ctx *ValidationContext) FragmentSpreads(node *ast.SelectionSet) []*ast.FragmentSpread { if spreads, ok := ctx.fragmentSpreads[node]; ok && spreads != nil { return spreads } spreads := []*ast.FragmentSpread{} - setsToVisit := []*ast.SelectionSet{node.GetSelectionSet()} + setsToVisit := []*ast.SelectionSet{node} for { if len(setsToVisit) == 0 { @@ -189,14 +189,14 @@ func (ctx *ValidationContext) RecursivelyReferencedFragments(operation *ast.Oper fragments := []*ast.FragmentDefinition{} collectedNames := map[string]bool{} - nodesToVisit := []HasSelectionSet{operation} + nodesToVisit := []*ast.SelectionSet{operation.SelectionSet} for { if len(nodesToVisit) == 0 { break } - var node HasSelectionSet + var node *ast.SelectionSet node, nodesToVisit = nodesToVisit[len(nodesToVisit)-1], nodesToVisit[:len(nodesToVisit)-1] spreads := ctx.FragmentSpreads(node) @@ -210,7 +210,7 @@ func (ctx *ValidationContext) RecursivelyReferencedFragments(operation *ast.Oper fragment := ctx.Fragment(fragName) if fragment != nil { fragments = append(fragments, fragment) - nodesToVisit = append(nodesToVisit, fragment) + nodesToVisit = append(nodesToVisit, fragment.SelectionSet) } } From 6fecefe32f24ab6b8ce7d2cbeccadb0bb0ab863d Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Thu, 9 Jun 2016 14:51:52 +0800 Subject: [PATCH 25/34] Validation: improving overlapping fields quality (#386) This improves the overlapping fields validation performance and improves error reporting quality by separating the concepts of checking fields "within" a single collection of fields from checking fields "between" two different collections of fields. This ensures for deeply overlapping fields that nested fields are not checked against each other repeatedly. Extending this concept further, fragment spreads are no longer expanded inline before looking for conflicts, instead the fields within a fragment are compared to the fields with the selection set which contained the referencing fragment spread. e.g. ```graphql { same: a same: b ...X } fragment X on T { same: c same: d } ``` In the above example, the initial query body is checked "within" so `a` is compared to `b`. Also, the fragment `X` is checked "within" so `c` is compared to `d`. Because of the fragment spread, the query body and fragment `X` are checked "between" so that `a` and `b` are each compared to `c` and `d`. In this trivial example, no fewer checks are performed, but in the case where fragments are referenced multiple times, this reduces the overall number of checks (regardless of memoization). **BREAKING**: This can change the order of fields reported when a conflict arises when fragment spreads are involved. If you are checking the precise output of errors (e.g. for unit tests), you may find existing errors change from `"a" and "c" are different fields` to `"c" and "a" are different fields`. From a perf point of view, this is fairly minor as the memoization "PairSet" was already keeping these repeated checks from consuming time, however this will reduce the number of memoized hits because of the algorithm improvement. From an error reporting point of view, this reports nearest-common-ancestor issues when found in a fragment that comes later in the validation process. I've added a test which fails with the existing impl and now passes, as well as changed a comment. This also fixes an error where validation issues could be missed because of an over-eager memoization. I've also modified the `PairSet` to be aware of both forms of memoization, also represented by a previously failing test. Commit: 4afb263289485897fdfec37aaf6d5f1e5451dcb3 [4afb263] Parents: 688a1ee20d Author: Lee Byron Date: 11 May 2016 at 5:50:26 AM SGT Labels: HEAD --- rules.go | 505 ------------- rules_overlapping_fields_can_be_merged.go | 706 ++++++++++++++++++ ...s_overlapping_fields_can_be_merged_test.go | 158 +++- 3 files changed, 855 insertions(+), 514 deletions(-) create mode 100644 rules_overlapping_fields_can_be_merged.go diff --git a/rules.go b/rules.go index 49ea6842..8effa7b4 100644 --- a/rules.go +++ b/rules.go @@ -804,27 +804,6 @@ func LoneAnonymousOperationRule(context *ValidationContext) *ValidationRuleInsta } } -type nodeSet struct { - set map[ast.Node]bool -} - -func newNodeSet() *nodeSet { - return &nodeSet{ - set: map[ast.Node]bool{}, - } -} -func (set *nodeSet) Has(node ast.Node) bool { - _, ok := set.set[node] - return ok -} -func (set *nodeSet) Add(node ast.Node) bool { - if set.Has(node) { - return false - } - set.set[node] = true - return true -} - func CycleErrorMessage(fragName string, spreadNames []string) string { via := "" if len(spreadNames) > 0 { @@ -1150,490 +1129,6 @@ func NoUnusedVariablesRule(context *ValidationContext) *ValidationRuleInstance { } } -type fieldDefPair struct { - ParentType Composite - Field *ast.Field - FieldDef *FieldDefinition -} - -func collectFieldASTsAndDefs(context *ValidationContext, parentType Named, selectionSet *ast.SelectionSet, visitedFragmentNames map[string]bool, astAndDefs map[string][]*fieldDefPair) map[string][]*fieldDefPair { - - if astAndDefs == nil { - astAndDefs = map[string][]*fieldDefPair{} - } - if visitedFragmentNames == nil { - visitedFragmentNames = map[string]bool{} - } - if selectionSet == nil { - return astAndDefs - } - for _, selection := range selectionSet.Selections { - switch selection := selection.(type) { - case *ast.Field: - fieldName := "" - if selection.Name != nil { - fieldName = selection.Name.Value - } - var fieldDef *FieldDefinition - if parentType, ok := parentType.(*Object); ok { - fieldDef, _ = parentType.Fields()[fieldName] - } - if parentType, ok := parentType.(*Interface); ok { - fieldDef, _ = parentType.Fields()[fieldName] - } - - responseName := fieldName - if selection.Alias != nil { - responseName = selection.Alias.Value - } - _, ok := astAndDefs[responseName] - if !ok { - astAndDefs[responseName] = []*fieldDefPair{} - } - if parentType, ok := parentType.(Composite); ok { - astAndDefs[responseName] = append(astAndDefs[responseName], &fieldDefPair{ - ParentType: parentType, - Field: selection, - FieldDef: fieldDef, - }) - } else { - astAndDefs[responseName] = append(astAndDefs[responseName], &fieldDefPair{ - Field: selection, - FieldDef: fieldDef, - }) - } - case *ast.InlineFragment: - inlineFragmentType := parentType - if selection.TypeCondition != nil { - parentType, _ := typeFromAST(*context.Schema(), selection.TypeCondition) - inlineFragmentType = parentType - } - astAndDefs = collectFieldASTsAndDefs( - context, - inlineFragmentType, - selection.SelectionSet, - visitedFragmentNames, - astAndDefs, - ) - case *ast.FragmentSpread: - fragName := "" - if selection.Name != nil { - fragName = selection.Name.Value - } - if _, ok := visitedFragmentNames[fragName]; ok { - continue - } - visitedFragmentNames[fragName] = true - fragment := context.Fragment(fragName) - if fragment == nil { - continue - } - parentType, _ := typeFromAST(*context.Schema(), fragment.TypeCondition) - astAndDefs = collectFieldASTsAndDefs( - context, - parentType, - fragment.SelectionSet, - visitedFragmentNames, - astAndDefs, - ) - } - } - return astAndDefs -} - -// pairSet A way to keep track of pairs of things when the ordering of the pair does -// not matter. We do this by maintaining a sort of double adjacency sets. -type pairSet struct { - data map[ast.Node]*nodeSet -} - -func newPairSet() *pairSet { - return &pairSet{ - data: map[ast.Node]*nodeSet{}, - } -} -func (pair *pairSet) Has(a ast.Node, b ast.Node) bool { - first, ok := pair.data[a] - if !ok || first == nil { - return false - } - res := first.Has(b) - return res -} -func (pair *pairSet) Add(a ast.Node, b ast.Node) bool { - pair.data = pairSetAdd(pair.data, a, b) - pair.data = pairSetAdd(pair.data, b, a) - return true -} - -func pairSetAdd(data map[ast.Node]*nodeSet, a, b ast.Node) map[ast.Node]*nodeSet { - set, ok := data[a] - if !ok || set == nil { - set = newNodeSet() - data[a] = set - } - set.Add(b) - return data -} - -type conflictReason struct { - Name string - Message interface{} // conflictReason || []conflictReason -} -type conflict struct { - Reason conflictReason - FieldsLeft []ast.Node - FieldsRight []ast.Node -} - -func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool { - if len(args1) != len(args2) { - return false - } - - for _, arg1 := range args1 { - arg1Name := "" - if arg1.Name != nil { - arg1Name = arg1.Name.Value - } - - var foundArgs2 *ast.Argument - for _, arg2 := range args2 { - arg2Name := "" - if arg2.Name != nil { - arg2Name = arg2.Name.Value - } - if arg1Name == arg2Name { - foundArgs2 = arg2 - } - break - } - if foundArgs2 == nil { - return false - } - if sameValue(arg1.Value, foundArgs2.Value) == false { - return false - } - } - - return true -} -func sameValue(value1 ast.Value, value2 ast.Value) bool { - if value1 == nil && value2 == nil { - return true - } - val1 := printer.Print(value1) - val2 := printer.Print(value2) - - return val1 == val2 -} - -func sameType(typeA, typeB Type) bool { - if typeA == typeB { - return true - } - - if typeA, ok := typeA.(*List); ok { - if typeB, ok := typeB.(*List); ok { - return sameType(typeA.OfType, typeB.OfType) - } - } - if typeA, ok := typeA.(*NonNull); ok { - if typeB, ok := typeB.(*NonNull); ok { - return sameType(typeA.OfType, typeB.OfType) - } - } - - return false -} - -// Two types conflict if both types could not apply to a value simultaneously. -// Composite types are ignored as their individual field types will be compared -// later recursively. However List and Non-Null types must match. -func doTypesConflict(type1 Output, type2 Output) bool { - if type1, ok := type1.(*List); ok { - if type2, ok := type2.(*List); ok { - return doTypesConflict(type1.OfType, type2.OfType) - } - return true - } - if type2, ok := type2.(*List); ok { - if type1, ok := type1.(*List); ok { - return doTypesConflict(type1.OfType, type2.OfType) - } - return true - } - if type1, ok := type1.(*NonNull); ok { - if type2, ok := type2.(*NonNull); ok { - return doTypesConflict(type1.OfType, type2.OfType) - } - return true - } - if type2, ok := type2.(*NonNull); ok { - if type1, ok := type1.(*NonNull); ok { - return doTypesConflict(type1.OfType, type2.OfType) - } - return true - } - if IsLeafType(type1) || IsLeafType(type2) { - return type1 != type2 - } - return false -} - -// getSubfieldMap Given two overlapping fields, produce the combined collection of subfields. -func getSubfieldMap(context *ValidationContext, ast1 *ast.Field, type1 Output, ast2 *ast.Field, type2 Output) map[string][]*fieldDefPair { - selectionSet1 := ast1.SelectionSet - selectionSet2 := ast2.SelectionSet - if selectionSet1 != nil && selectionSet2 != nil { - visitedFragmentNames := map[string]bool{} - subfieldMap := collectFieldASTsAndDefs( - context, - GetNamed(type1), - selectionSet1, - visitedFragmentNames, - nil, - ) - subfieldMap = collectFieldASTsAndDefs( - context, - GetNamed(type2), - selectionSet2, - visitedFragmentNames, - subfieldMap, - ) - return subfieldMap - } - return nil -} - -// subfieldConflicts Given a series of Conflicts which occurred between two sub-fields, generate a single Conflict. -func subfieldConflicts(conflicts []*conflict, responseName string, ast1 *ast.Field, ast2 *ast.Field) *conflict { - if len(conflicts) > 0 { - conflictReasons := []conflictReason{} - conflictFieldsLeft := []ast.Node{ast1} - conflictFieldsRight := []ast.Node{ast2} - for _, c := range conflicts { - conflictReasons = append(conflictReasons, c.Reason) - conflictFieldsLeft = append(conflictFieldsLeft, c.FieldsLeft...) - conflictFieldsRight = append(conflictFieldsRight, c.FieldsRight...) - } - - return &conflict{ - Reason: conflictReason{ - Name: responseName, - Message: conflictReasons, - }, - FieldsLeft: conflictFieldsLeft, - FieldsRight: conflictFieldsRight, - } - } - return nil -} - -// findConflicts Find all Conflicts within a collection of fields. -func findConflicts(context *ValidationContext, parentFieldsAreMutuallyExclusive bool, fieldMap map[string][]*fieldDefPair, comparedSet *pairSet) (conflicts []*conflict) { - - // ensure field traversal - orderedName := sort.StringSlice{} - for responseName := range fieldMap { - orderedName = append(orderedName, responseName) - } - orderedName.Sort() - - for _, responseName := range orderedName { - fields, _ := fieldMap[responseName] - for _, fieldA := range fields { - for _, fieldB := range fields { - c := findConflict(context, parentFieldsAreMutuallyExclusive, responseName, fieldA, fieldB, comparedSet) - if c != nil { - conflicts = append(conflicts, c) - } - } - } - } - return conflicts -} - -// findConflict Determines if there is a conflict between two particular fields. -func findConflict(context *ValidationContext, parentFieldsAreMutuallyExclusive bool, responseName string, field *fieldDefPair, field2 *fieldDefPair, comparedSet *pairSet) *conflict { - - parentType1 := field.ParentType - ast1 := field.Field - def1 := field.FieldDef - - parentType2 := field2.ParentType - ast2 := field2.Field - def2 := field2.FieldDef - - // Not a pair. - if ast1 == ast2 { - return nil - } - - // Memoize, do not report the same issue twice. - // Note: Two overlapping ASTs could be encountered both when - // `parentFieldsAreMutuallyExclusive` is true and is false, which could - // produce different results (when `true` being a subset of `false`). - // However we do not need to include this piece of information when - // memoizing since this rule visits leaf fields before their parent fields, - // ensuring that `parentFieldsAreMutuallyExclusive` is `false` the first - // time two overlapping fields are encountered, ensuring that the full - // set of validation rules are always checked when necessary. - if comparedSet.Has(ast1, ast2) { - return nil - } - comparedSet.Add(ast1, ast2) - - // The return type for each field. - var type1 Type - var type2 Type - if def1 != nil { - type1 = def1.Type - } - if def2 != nil { - type2 = def2.Type - } - - // If it is known that two fields could not possibly apply at the same - // time, due to the parent types, then it is safe to permit them to diverge - // in aliased field or arguments used as they will not present any ambiguity - // by differing. - // It is known that two parent types could never overlap if they are - // different Object types. Interface or Union types might overlap - if not - // in the current state of the schema, then perhaps in some future version, - // thus may not safely diverge. - _, isParentType1Object := parentType1.(*Object) - _, isParentType2Object := parentType2.(*Object) - fieldsAreMutuallyExclusive := parentFieldsAreMutuallyExclusive || parentType1 != parentType2 && isParentType1Object && isParentType2Object - - if !fieldsAreMutuallyExclusive { - // Two aliases must refer to the same field. - name1 := "" - name2 := "" - - if ast1.Name != nil { - name1 = ast1.Name.Value - } - if ast2.Name != nil { - name2 = ast2.Name.Value - } - if name1 != name2 { - return &conflict{ - Reason: conflictReason{ - Name: responseName, - Message: fmt.Sprintf(`%v and %v are different fields`, name1, name2), - }, - FieldsLeft: []ast.Node{ast1}, - FieldsRight: []ast.Node{ast2}, - } - } - - // Two field calls must have the same arguments. - if !sameArguments(ast1.Arguments, ast2.Arguments) { - return &conflict{ - Reason: conflictReason{ - Name: responseName, - Message: `they have differing arguments`, - }, - FieldsLeft: []ast.Node{ast1}, - FieldsRight: []ast.Node{ast2}, - } - } - } - - if type1 != nil && type2 != nil && doTypesConflict(type1, type2) { - return &conflict{ - Reason: conflictReason{ - Name: responseName, - Message: fmt.Sprintf(`they return conflicting types %v and %v`, type1, type2), - }, - FieldsLeft: []ast.Node{ast1}, - FieldsRight: []ast.Node{ast2}, - } - } - - subFieldMap := getSubfieldMap(context, ast1, type1, ast2, type2) - if subFieldMap != nil { - conflicts := findConflicts(context, fieldsAreMutuallyExclusive, subFieldMap, comparedSet) - return subfieldConflicts(conflicts, responseName, ast1, ast2) - } - - return nil -} - -// OverlappingFieldsCanBeMergedRule Overlapping fields can be merged -// -// A selection set is only valid if all fields (including spreading any -// fragments) either correspond to distinct response names or can be merged -// without ambiguity. -func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRuleInstance { - comparedSet := newPairSet() - - var reasonMessage func(message interface{}) string - reasonMessage = func(message interface{}) string { - switch reason := message.(type) { - case string: - return reason - case conflictReason: - return reasonMessage(reason.Message) - case []conflictReason: - messages := []string{} - for _, r := range reason { - messages = append(messages, fmt.Sprintf( - `subfields "%v" conflict because %v`, - r.Name, - reasonMessage(r.Message), - )) - } - return strings.Join(messages, " and ") - } - return "" - } - - visitorOpts := &visitor.VisitorOptions{ - KindFuncMap: map[string]visitor.NamedVisitFuncs{ - kinds.SelectionSet: { - // Note: we validate on the reverse traversal so deeper conflicts will be - // caught first, for correct calculation of mutual exclusivity and for - // clearer error messages. - Leave: func(p visitor.VisitFuncParams) (string, interface{}) { - if selectionSet, ok := p.Node.(*ast.SelectionSet); ok && selectionSet != nil { - parentType, _ := context.ParentType().(Named) - fieldMap := collectFieldASTsAndDefs( - context, - parentType, - selectionSet, - nil, - nil, - ) - conflicts := findConflicts(context, false, fieldMap, comparedSet) - if len(conflicts) > 0 { - for _, c := range conflicts { - responseName := c.Reason.Name - reason := c.Reason - reportError( - context, - fmt.Sprintf(`Fields "%v" conflict because %v. `+ - `Use different aliases on the fields to fetch both if this was intentional.`, - responseName, - reasonMessage(reason), - ), - append(c.FieldsLeft, c.FieldsRight...), - ) - } - return visitor.ActionNoChange, nil - } - } - return visitor.ActionNoChange, nil - }, - }, - }, - } - return &ValidationRuleInstance{ - VisitorOpts: visitorOpts, - } -} - func getFragmentType(context *ValidationContext, name string) Type { frag := context.Fragment(name) if frag == nil { diff --git a/rules_overlapping_fields_can_be_merged.go b/rules_overlapping_fields_can_be_merged.go new file mode 100644 index 00000000..f44849b5 --- /dev/null +++ b/rules_overlapping_fields_can_be_merged.go @@ -0,0 +1,706 @@ +package graphql + +import ( + "fmt" + "strings" + + "github.com/graphql-go/graphql/language/ast" + "github.com/graphql-go/graphql/language/kinds" + "github.com/graphql-go/graphql/language/printer" + "github.com/graphql-go/graphql/language/visitor" +) + +func fieldsConflictMessage(responseName string, reason conflictReason) string { + return fmt.Sprintf(`Fields "%v" conflict because %v. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + responseName, + fieldsConflictReasonMessage(reason), + ) +} + +func fieldsConflictReasonMessage(message interface{}) string { + switch reason := message.(type) { + case string: + return reason + case conflictReason: + return fieldsConflictReasonMessage(reason.Message) + case []conflictReason: + messages := []string{} + for _, r := range reason { + messages = append(messages, fmt.Sprintf( + `subfields "%v" conflict because %v`, + r.Name, + fieldsConflictReasonMessage(r.Message), + )) + } + return strings.Join(messages, " and ") + } + return "" +} + +// OverlappingFieldsCanBeMergedRule Overlapping fields can be merged +// +// A selection set is only valid if all fields (including spreading any +// fragments) either correspond to distinct response names or can be merged +// without ambiguity. +func OverlappingFieldsCanBeMergedRule(context *ValidationContext) *ValidationRuleInstance { + + // A memoization for when two fragments are compared "between" each other for + // conflicts. Two fragments may be compared many times, so memoizing this can + // dramatically improve the performance of this validator. + comparedSet := newPairSet() + + // A cache for the "field map" and list of fragment names found in any given + // selection set. Selection sets may be asked for this information multiple + // times, so this improves the performance of this validator. + cacheMap := map[*ast.SelectionSet]*fieldsAndFragmentNames{} + + visitorOpts := &visitor.VisitorOptions{ + KindFuncMap: map[string]visitor.NamedVisitFuncs{ + kinds.SelectionSet: { + Kind: func(p visitor.VisitFuncParams) (string, interface{}) { + if selectionSet, ok := p.Node.(*ast.SelectionSet); ok && selectionSet != nil { + parentType, _ := context.ParentType().(Named) + + rule := &overlappingFieldsCanBeMergedRule{ + context: context, + comparedSet: comparedSet, + cacheMap: cacheMap, + } + conflicts := rule.findConflictsWithinSelectionSet(parentType, selectionSet) + if len(conflicts) > 0 { + for _, c := range conflicts { + responseName := c.Reason.Name + reason := c.Reason + reportError( + context, + fieldsConflictMessage(responseName, reason), + append(c.FieldsLeft, c.FieldsRight...), + ) + } + return visitor.ActionNoChange, nil + } + } + return visitor.ActionNoChange, nil + }, + }, + }, + } + return &ValidationRuleInstance{ + VisitorOpts: visitorOpts, + } +} + +/** + * Algorithm: + * + * Conflicts occur when two fields exist in a query which will produce the same + * response name, but represent differing values, thus creating a conflict. + * The algorithm below finds all conflicts via making a series of comparisons + * between fields. In order to compare as few fields as possible, this makes + * a series of comparisons "within" sets of fields and "between" sets of fields. + * + * Given any selection set, a collection produces both a set of fields by + * also including all inline fragments, as well as a list of fragments + * referenced by fragment spreads. + * + * A) Each selection set represented in the document first compares "within" its + * collected set of fields, finding any conflicts between every pair of + * overlapping fields. + * Note: This is the *only time* that a the fields "within" a set are compared + * to each other. After this only fields "between" sets are compared. + * + * B) Also, if any fragment is referenced in a selection set, then a + * comparison is made "between" the original set of fields and the + * referenced fragment. + * + * C) Also, if multiple fragments are referenced, then comparisons + * are made "between" each referenced fragment. + * + * D) When comparing "between" a set of fields and a referenced fragment, first + * a comparison is made between each field in the original set of fields and + * each field in the the referenced set of fields. + * + * E) Also, if any fragment is referenced in the referenced selection set, + * then a comparison is made "between" the original set of fields and the + * referenced fragment (recursively referring to step D). + * + * F) When comparing "between" two fragments, first a comparison is made between + * each field in the first referenced set of fields and each field in the the + * second referenced set of fields. + * + * G) Also, any fragments referenced by the first must be compared to the + * second, and any fragments referenced by the second must be compared to the + * first (recursively referring to step F). + * + * H) When comparing two fields, if both have selection sets, then a comparison + * is made "between" both selection sets, first comparing the set of fields in + * the first selection set with the set of fields in the second. + * + * I) Also, if any fragment is referenced in either selection set, then a + * comparison is made "between" the other set of fields and the + * referenced fragment. + * + * J) Also, if two fragments are referenced in both selection sets, then a + * comparison is made "between" the two fragments. + * + */ + +type overlappingFieldsCanBeMergedRule struct { + context *ValidationContext + + // A memoization for when two fragments are compared "between" each other for + // conflicts. Two fragments may be compared many times, so memoizing this can + // dramatically improve the performance of this validator. + comparedSet *pairSet + + // A cache for the "field map" and list of fragment names found in any given + // selection set. Selection sets may be asked for this information multiple + // times, so this improves the performance of this validator. + cacheMap map[*ast.SelectionSet]*fieldsAndFragmentNames +} + +// Find all conflicts found "within" a selection set, including those found +// via spreading in fragments. Called when visiting each SelectionSet in the +// GraphQL Document. +func (rule *overlappingFieldsCanBeMergedRule) findConflictsWithinSelectionSet(parentType Named, selectionSet *ast.SelectionSet) []conflict { + conflicts := []conflict{} + + fieldsInfo := rule.getFieldsAndFragmentNames(parentType, selectionSet) + + // (A) Find find all conflicts "within" the fields of this selection set. + // Note: this is the *only place* `collectConflictsWithin` is called. + conflicts = rule.collectConflictsWithin(conflicts, fieldsInfo) + + // (B) Then collect conflicts between these fields and those represented by + // each spread fragment name found. + for i := 0; i < len(fieldsInfo.fragmentNames); i++ { + + conflicts = rule.collectConflictsBetweenFieldsAndFragment(conflicts, false, fieldsInfo, fieldsInfo.fragmentNames[i]) + + // (C) Then compare this fragment with all other fragments found in this + // selection set to collect conflicts between fragments spread together. + // This compares each item in the list of fragment names to every other item + // in that same list (except for itself). + for k := i + 1; k < len(fieldsInfo.fragmentNames); k++ { + conflicts = rule.collectConflictsBetweenFragments(conflicts, false, fieldsInfo.fragmentNames[i], fieldsInfo.fragmentNames[k]) + } + } + return conflicts +} + +// Collect all conflicts found between a set of fields and a fragment reference +// including via spreading in any nested fragments. +func (rule *overlappingFieldsCanBeMergedRule) collectConflictsBetweenFieldsAndFragment(conflicts []conflict, areMutuallyExclusive bool, fieldsInfo *fieldsAndFragmentNames, fragmentName string) []conflict { + fragment := rule.context.Fragment(fragmentName) + if fragment == nil { + return conflicts + } + + fieldsInfo2 := rule.getReferencedFieldsAndFragmentNames(fragment) + + // (D) First collect any conflicts between the provided collection of fields + // and the collection of fields represented by the given fragment. + conflicts = rule.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsInfo, fieldsInfo2) + + // (E) Then collect any conflicts between the provided collection of fields + // and any fragment names found in the given fragment. + for _, fragmentName2 := range fieldsInfo2.fragmentNames { + conflicts = rule.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsInfo2, fragmentName2) + } + + return conflicts + +} + +// Collect all conflicts found between two fragments, including via spreading in +// any nested fragments. +func (rule *overlappingFieldsCanBeMergedRule) collectConflictsBetweenFragments(conflicts []conflict, areMutuallyExclusive bool, fragmentName1 string, fragmentName2 string) []conflict { + fragment1 := rule.context.Fragment(fragmentName1) + fragment2 := rule.context.Fragment(fragmentName2) + + if fragment1 == nil || fragment2 == nil { + return conflicts + } + + // No need to compare a fragment to itself. + if fragment1 == fragment2 { + return conflicts + } + + // Memoize so two fragments are not compared for conflicts more than once. + if rule.comparedSet.Has(fragmentName1, fragmentName2, areMutuallyExclusive) { + return conflicts + } + rule.comparedSet.Add(fragmentName1, fragmentName2, areMutuallyExclusive) + + fieldsInfo1 := rule.getReferencedFieldsAndFragmentNames(fragment1) + fieldsInfo2 := rule.getReferencedFieldsAndFragmentNames(fragment2) + + // (F) First, collect all conflicts between these two collections of fields + // (not including any nested fragments). + conflicts = rule.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsInfo1, fieldsInfo2) + + // (G) Then collect conflicts between the first fragment and any nested + // fragments spread in the second fragment. + for _, innerFragmentName2 := range fieldsInfo2.fragmentNames { + conflicts = rule.collectConflictsBetweenFragments(conflicts, areMutuallyExclusive, fragmentName1, innerFragmentName2) + } + + // (G) Then collect conflicts between the second fragment and any nested + // fragments spread in the first fragment. + for _, innerFragmentName1 := range fieldsInfo1.fragmentNames { + conflicts = rule.collectConflictsBetweenFragments(conflicts, areMutuallyExclusive, innerFragmentName1, fragmentName2) + } + + return conflicts +} + +// Find all conflicts found between two selection sets, including those found +// via spreading in fragments. Called when determining if conflicts exist +// between the sub-fields of two overlapping fields. +func (rule *overlappingFieldsCanBeMergedRule) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, parentType1 Named, selectionSet1 *ast.SelectionSet, parentType2 Named, selectionSet2 *ast.SelectionSet) []conflict { + conflicts := []conflict{} + + fieldsInfo1 := rule.getFieldsAndFragmentNames(parentType1, selectionSet1) + fieldsInfo2 := rule.getFieldsAndFragmentNames(parentType2, selectionSet2) + + // (H) First, collect all conflicts between these two collections of field. + conflicts = rule.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsInfo1, fieldsInfo2) + + // (I) Then collect conflicts between the first collection of fields and + // those referenced by each fragment name associated with the second. + for _, fragmentName2 := range fieldsInfo2.fragmentNames { + conflicts = rule.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsInfo1, fragmentName2) + } + + // (I) Then collect conflicts between the second collection of fields and + // those referenced by each fragment name associated with the first. + for _, fragmentName1 := range fieldsInfo1.fragmentNames { + conflicts = rule.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsInfo2, fragmentName1) + } + + // (J) Also collect conflicts between any fragment names by the first and + // fragment names by the second. This compares each item in the first set of + // names to each item in the second set of names. + for _, fragmentName1 := range fieldsInfo1.fragmentNames { + for _, fragmentName2 := range fieldsInfo2.fragmentNames { + conflicts = rule.collectConflictsBetweenFragments(conflicts, areMutuallyExclusive, fragmentName1, fragmentName2) + } + } + return conflicts +} + +// Collect all Conflicts "within" one collection of fields. +func (rule *overlappingFieldsCanBeMergedRule) collectConflictsWithin(conflicts []conflict, fieldsInfo *fieldsAndFragmentNames) []conflict { + // A field map is a keyed collection, where each key represents a response + // name and the value at that key is a list of all fields which provide that + // response name. For every response name, if there are multiple fields, they + // must be compared to find a potential conflict. + for _, responseName := range fieldsInfo.fieldsOrder { + fields, ok := fieldsInfo.fieldMap[responseName] + if !ok { + continue + } + // This compares every field in the list to every other field in this list + // (except to itself). If the list only has one item, nothing needs to + // be compared. + if len(fields) <= 1 { + continue + } + for i := 0; i < len(fields); i++ { + for k := i + 1; k < len(fields); k++ { + // within one collection is never mutually exclusive + isMutuallyExclusive := false + conflict := rule.findConflict(isMutuallyExclusive, responseName, fields[i], fields[k]) + if conflict != nil { + conflicts = append(conflicts, *conflict) + } + } + } + } + return conflicts +} + +// Collect all Conflicts between two collections of fields. This is similar to, +// but different from the `collectConflictsWithin` function above. This check +// assumes that `collectConflictsWithin` has already been called on each +// provided collection of fields. This is true because this validator traverses +// each individual selection set. +func (rule *overlappingFieldsCanBeMergedRule) collectConflictsBetween(conflicts []conflict, parentFieldsAreMutuallyExclusive bool, + fieldsInfo1 *fieldsAndFragmentNames, + fieldsInfo2 *fieldsAndFragmentNames) []conflict { + // A field map is a keyed collection, where each key represents a response + // name and the value at that key is a list of all fields which provide that + // response name. For any response name which appears in both provided field + // maps, each field from the first field map must be compared to every field + // in the second field map to find potential conflicts. + for _, responseName := range fieldsInfo1.fieldsOrder { + fields1, ok1 := fieldsInfo1.fieldMap[responseName] + fields2, ok2 := fieldsInfo2.fieldMap[responseName] + if !ok1 || !ok2 { + continue + } + for i := 0; i < len(fields1); i++ { + for k := 0; k < len(fields2); k++ { + conflict := rule.findConflict(parentFieldsAreMutuallyExclusive, responseName, fields1[i], fields2[k]) + if conflict != nil { + conflicts = append(conflicts, *conflict) + } + } + } + } + return conflicts +} + +// findConflict Determines if there is a conflict between two particular fields. +func (rule *overlappingFieldsCanBeMergedRule) findConflict(parentFieldsAreMutuallyExclusive bool, responseName string, field *fieldDefPair, field2 *fieldDefPair) *conflict { + + parentType1 := field.ParentType + ast1 := field.Field + def1 := field.FieldDef + + parentType2 := field2.ParentType + ast2 := field2.Field + def2 := field2.FieldDef + + // If it is known that two fields could not possibly apply at the same + // time, due to the parent types, then it is safe to permit them to diverge + // in aliased field or arguments used as they will not present any ambiguity + // by differing. + // It is known that two parent types could never overlap if they are + // different Object types. Interface or Union types might overlap - if not + // in the current state of the schema, then perhaps in some future version, + // thus may not safely diverge. + _, isParentType1Object := parentType1.(*Object) + _, isParentType2Object := parentType2.(*Object) + areMutuallyExclusive := parentFieldsAreMutuallyExclusive || parentType1 != parentType2 && isParentType1Object && isParentType2Object + + // The return type for each field. + var type1 Type + var type2 Type + if def1 != nil { + type1 = def1.Type + } + if def2 != nil { + type2 = def2.Type + } + + if !areMutuallyExclusive { + // Two aliases must refer to the same field. + name1 := "" + name2 := "" + + if ast1.Name != nil { + name1 = ast1.Name.Value + } + if ast2.Name != nil { + name2 = ast2.Name.Value + } + if name1 != name2 { + return &conflict{ + Reason: conflictReason{ + Name: responseName, + Message: fmt.Sprintf(`%v and %v are different fields`, name1, name2), + }, + FieldsLeft: []ast.Node{ast1}, + FieldsRight: []ast.Node{ast2}, + } + } + + // Two field calls must have the same arguments. + if !sameArguments(ast1.Arguments, ast2.Arguments) { + return &conflict{ + Reason: conflictReason{ + Name: responseName, + Message: `they have differing arguments`, + }, + FieldsLeft: []ast.Node{ast1}, + FieldsRight: []ast.Node{ast2}, + } + } + } + + if type1 != nil && type2 != nil && doTypesConflict(type1, type2) { + return &conflict{ + Reason: conflictReason{ + Name: responseName, + Message: fmt.Sprintf(`they return conflicting types %v and %v`, type1, type2), + }, + FieldsLeft: []ast.Node{ast1}, + FieldsRight: []ast.Node{ast2}, + } + } + + // Collect and compare sub-fields. Use the same "visited fragment names" list + // for both collections so fields in a fragment reference are never + // compared to themselves. + selectionSet1 := ast1.SelectionSet + selectionSet2 := ast2.SelectionSet + if selectionSet1 != nil && selectionSet2 != nil { + conflicts := rule.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, GetNamed(type1), selectionSet1, GetNamed(type2), selectionSet2) + return subfieldConflicts(conflicts, responseName, ast1, ast2) + } + return nil +} + +// Given a selection set, return the collection of fields (a mapping of response +// name to field ASTs and definitions) as well as a list of fragment names +// referenced via fragment spreads. +func (rule *overlappingFieldsCanBeMergedRule) getFieldsAndFragmentNames(parentType Named, selectionSet *ast.SelectionSet) *fieldsAndFragmentNames { + if cached, ok := rule.cacheMap[selectionSet]; ok && cached != nil { + return cached + } + + astAndDefs := astAndDefCollection{} + fieldsOrder := []string{} + fragmentNames := []string{} + fragmentNamesMap := map[string]bool{} + + var collectFieldsAndFragmentNames func(parentType Named, selectionSet *ast.SelectionSet) + collectFieldsAndFragmentNames = func(parentType Named, selectionSet *ast.SelectionSet) { + for _, selection := range selectionSet.Selections { + switch selection := selection.(type) { + case *ast.Field: + fieldName := "" + if selection.Name != nil { + fieldName = selection.Name.Value + } + var fieldDef *FieldDefinition + if parentType, ok := parentType.(*Object); ok { + fieldDef, _ = parentType.Fields()[fieldName] + } + if parentType, ok := parentType.(*Interface); ok { + fieldDef, _ = parentType.Fields()[fieldName] + } + + responseName := fieldName + if selection.Alias != nil { + responseName = selection.Alias.Value + } + + fieldDefPairs, ok := astAndDefs[responseName] + if !ok || fieldDefPairs == nil { + fieldDefPairs = []*fieldDefPair{} + fieldsOrder = append(fieldsOrder, responseName) + } + + fieldDefPairs = append(fieldDefPairs, &fieldDefPair{ + ParentType: parentType, + Field: selection, + FieldDef: fieldDef, + }) + astAndDefs[responseName] = fieldDefPairs + case *ast.FragmentSpread: + fieldName := "" + if selection.Name != nil { + fieldName = selection.Name.Value + } + if val, ok := fragmentNamesMap[fieldName]; !ok || !val { + fragmentNamesMap[fieldName] = true + fragmentNames = append(fragmentNames, fieldName) + } + case *ast.InlineFragment: + typeCondition := selection.TypeCondition + inlineFragmentType := parentType + if typeCondition != nil { + ttype, err := typeFromAST(*(rule.context.Schema()), typeCondition) + if err == nil { + inlineFragmentType, _ = ttype.(Named) + } + } + collectFieldsAndFragmentNames(inlineFragmentType, selection.SelectionSet) + } + } + } + collectFieldsAndFragmentNames(parentType, selectionSet) + + cached := &fieldsAndFragmentNames{ + fieldMap: astAndDefs, + fieldsOrder: fieldsOrder, + fragmentNames: fragmentNames, + } + + rule.cacheMap[selectionSet] = cached + return cached +} + +func (rule *overlappingFieldsCanBeMergedRule) getReferencedFieldsAndFragmentNames(fragment *ast.FragmentDefinition) *fieldsAndFragmentNames { + // Short-circuit building a type from the AST if possible. + if cached, ok := rule.cacheMap[fragment.SelectionSet]; ok && cached != nil { + return cached + } + fragmentType, err := typeFromAST(*(rule.context.Schema()), fragment.TypeCondition) + if err != nil { + return nil + } + return rule.getFieldsAndFragmentNames(fragmentType, fragment.SelectionSet) +} + +type conflictReason struct { + Name string + Message interface{} // conflictReason || []conflictReason +} +type conflict struct { + Reason conflictReason + FieldsLeft []ast.Node + FieldsRight []ast.Node +} + +// a.k.a AstAndDef +type fieldDefPair struct { + ParentType Named + Field *ast.Field + FieldDef *FieldDefinition +} +type astAndDefCollection map[string][]*fieldDefPair + +// cache struct for fields, its order and fragments names +type fieldsAndFragmentNames struct { + fieldMap astAndDefCollection + fieldsOrder []string // stores the order of field names in fieldMap + fragmentNames []string +} + +// pairSet A way to keep track of pairs of things when the ordering of the pair does +// not matter. We do this by maintaining a sort of double adjacency sets. +type pairSet struct { + data map[string]map[string]bool +} + +func newPairSet() *pairSet { + return &pairSet{ + data: map[string]map[string]bool{}, + } +} +func (pair *pairSet) Has(a string, b string, areMutuallyExclusive bool) bool { + first, ok := pair.data[a] + if !ok || first == nil { + return false + } + res, ok := first[b] + if !ok { + return false + } + // areMutuallyExclusive being false is a superset of being true, + // hence if we want to know if this PairSet "has" these two with no + // exclusivity, we have to ensure it was added as such. + if !areMutuallyExclusive { + return res == false + } + return true +} +func (pair *pairSet) Add(a string, b string, areMutuallyExclusive bool) { + pair.data = pairSetAdd(pair.data, a, b, areMutuallyExclusive) + pair.data = pairSetAdd(pair.data, b, a, areMutuallyExclusive) +} +func pairSetAdd(data map[string]map[string]bool, a, b string, areMutuallyExclusive bool) map[string]map[string]bool { + set, ok := data[a] + if !ok || set == nil { + set = map[string]bool{} + } + set[b] = areMutuallyExclusive + data[a] = set + return data +} + +func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool { + if len(args1) != len(args2) { + return false + } + + for _, arg1 := range args1 { + arg1Name := "" + if arg1.Name != nil { + arg1Name = arg1.Name.Value + } + + var foundArgs2 *ast.Argument + for _, arg2 := range args2 { + arg2Name := "" + if arg2.Name != nil { + arg2Name = arg2.Name.Value + } + if arg1Name == arg2Name { + foundArgs2 = arg2 + } + break + } + if foundArgs2 == nil { + return false + } + if sameValue(arg1.Value, foundArgs2.Value) == false { + return false + } + } + + return true +} + +func sameValue(value1 ast.Value, value2 ast.Value) bool { + if value1 == nil && value2 == nil { + return true + } + val1 := printer.Print(value1) + val2 := printer.Print(value2) + + return val1 == val2 +} + +// Two types conflict if both types could not apply to a value simultaneously. +// Composite types are ignored as their individual field types will be compared +// later recursively. However List and Non-Null types must match. +func doTypesConflict(type1 Output, type2 Output) bool { + if type1, ok := type1.(*List); ok { + if type2, ok := type2.(*List); ok { + return doTypesConflict(type1.OfType, type2.OfType) + } + return true + } + if type2, ok := type2.(*List); ok { + if type1, ok := type1.(*List); ok { + return doTypesConflict(type1.OfType, type2.OfType) + } + return true + } + if type1, ok := type1.(*NonNull); ok { + if type2, ok := type2.(*NonNull); ok { + return doTypesConflict(type1.OfType, type2.OfType) + } + return true + } + if type2, ok := type2.(*NonNull); ok { + if type1, ok := type1.(*NonNull); ok { + return doTypesConflict(type1.OfType, type2.OfType) + } + return true + } + if IsLeafType(type1) || IsLeafType(type2) { + return type1 != type2 + } + return false +} + +// subfieldConflicts Given a series of Conflicts which occurred between two sub-fields, generate a single Conflict. +func subfieldConflicts(conflicts []conflict, responseName string, ast1 *ast.Field, ast2 *ast.Field) *conflict { + if len(conflicts) > 0 { + conflictReasons := []conflictReason{} + conflictFieldsLeft := []ast.Node{ast1} + conflictFieldsRight := []ast.Node{ast2} + for _, c := range conflicts { + conflictReasons = append(conflictReasons, c.Reason) + conflictFieldsLeft = append(conflictFieldsLeft, c.FieldsLeft...) + conflictFieldsRight = append(conflictFieldsRight, c.FieldsRight...) + } + + return &conflict{ + Reason: conflictReason{ + Name: responseName, + Message: conflictReasons, + }, + FieldsLeft: conflictFieldsLeft, + FieldsRight: conflictFieldsRight, + } + } + return nil +} diff --git a/rules_overlapping_fields_can_be_merged_test.go b/rules_overlapping_fields_can_be_merged_test.go index 311d5c7f..7034a858 100644 --- a/rules_overlapping_fields_can_be_merged_test.go +++ b/rules_overlapping_fields_can_be_merged_test.go @@ -198,12 +198,12 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReportsEachConflictOnce(t *testin testutil.RuleError(`Fields "x" conflict because a and b are different fields. `+ `Use different aliases on the fields to fetch both if this was intentional.`, 18, 9, 21, 9), - testutil.RuleError(`Fields "x" conflict because a and c are different fields. `+ + testutil.RuleError(`Fields "x" conflict because c and a are different fields. `+ `Use different aliases on the fields to fetch both if this was intentional.`, - 18, 9, 14, 11), - testutil.RuleError(`Fields "x" conflict because b and c are different fields. `+ + 14, 11, 18, 9), + testutil.RuleError(`Fields "x" conflict because c and b are different fields. `+ `Use different aliases on the fields to fetch both if this was intentional.`, - 21, 9, 14, 11), + 14, 11, 21, 9), }) } func TestValidate_OverlappingFieldsCanBeMerged_DeepConflict(t *testing.T) { @@ -302,6 +302,92 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReportsDeepConflictToNearestCommo 8, 13), }) } +func TestValidate_OverlappingFieldsCanBeMerged_ReportsDeepConflictToNearestCommonAncestorInFragments(t *testing.T) { + testutil.ExpectFailsRule(t, graphql.OverlappingFieldsCanBeMergedRule, ` + { + field { + ...F + } + field { + ...F + } + } + fragment F on T { + deepField { + deeperField { + x: a + } + deeperField { + x: b + } + }, + deepField { + deeperField { + y + } + } + } + `, []gqlerrors.FormattedError{ + testutil.RuleError(`Fields "deeperField" conflict because subfields "x" conflict because `+ + `a and b are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 12, 11, + 13, 13, + 15, 11, + 16, 13), + }) +} +func TestValidate_OverlappingFieldsCanBeMerged_ReportsDeepConflictInNestedFragments(t *testing.T) { + testutil.ExpectFailsRule(t, graphql.OverlappingFieldsCanBeMergedRule, ` + { + field { + ...F + } + field { + ...I + } + } + fragment F on T { + x: a + ...G + } + fragment G on T { + y: c + } + fragment I on T { + y: d + ...J + } + fragment J on T { + x: b + } + `, []gqlerrors.FormattedError{ + testutil.RuleError(`Fields "field" conflict because `+ + `subfields "x" conflict because a and b are different fields and `+ + `subfields "y" conflict because c and d are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 3, 9, + 11, 9, + 15, 9, + 6, 9, + 22, 9, + 18, 9), + }) +} +func TestValidate_OverlappingFieldsCanBeMerged_IgnoresUnknownFragments(t *testing.T) { + testutil.ExpectPassesRule(t, graphql.OverlappingFieldsCanBeMergedRule, ` + { + field + ...Unknown + ...Known + } + + fragment Known on T { + field + ...OtherUnknown + } + `) +} var someBoxInterface *graphql.Interface var stringBoxObject *graphql.Object @@ -551,6 +637,60 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Disa 8, 15), }) } +func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_ReportsCorrectlyWhenANonExclusiveFollosAnExclusive(t *testing.T) { + testutil.ExpectFailsRuleWithSchema(t, &schema, graphql.OverlappingFieldsCanBeMergedRule, ` + { + someBox { + ... on IntBox { + deepBox { + ...X + } + } + } + someBox { + ... on StringBox { + deepBox { + ...Y + } + } + } + memoed: someBox { + ... on IntBox { + deepBox { + ...X + } + } + } + memoed: someBox { + ... on StringBox { + deepBox { + ...Y + } + } + } + other: someBox { + ...X + } + other: someBox { + ...Y + } + } + fragment X on SomeBox { + scalar + } + fragment Y on SomeBox { + scalar: unrelatedField + } + `, []gqlerrors.FormattedError{ + testutil.RuleError(`Fields "other" conflict because subfields "scalar" conflict `+ + `because scalar and unrelatedField are different fields. `+ + `Use different aliases on the fields to fetch both if this was intentional.`, + 31, 11, + 39, 11, + 34, 11, + 42, 11), + }) +} func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_DisallowsDifferingReturnTypeNullabilityDespiteNoOverlap(t *testing.T) { testutil.ExpectFailsRuleWithSchema(t, &schema, graphql.OverlappingFieldsCanBeMergedRule, ` { @@ -724,14 +864,14 @@ func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_Comp } `, []gqlerrors.FormattedError{ testutil.RuleError(`Fields "edges" conflict because subfields "node" conflict because subfields "id" conflict because `+ - `id and name are different fields. `+ + `name and id are different fields. `+ `Use different aliases on the fields to fetch both if this was intentional.`, - 14, 11, - 15, 13, - 16, 15, 5, 13, 6, 15, - 7, 17), + 7, 17, + 14, 11, + 15, 13, + 16, 15), }) } func TestValidate_OverlappingFieldsCanBeMerged_ReturnTypesMustBeUnambiguous_IgnoresUnknownTypes(t *testing.T) { From d060075331f52565f089b6f06a6964e8a43d1357 Mon Sep 17 00:00:00 2001 From: Matthias Einwag Date: Fri, 10 Jun 2016 15:08:25 +0200 Subject: [PATCH 26/34] Improve lexer performance by using a byte array as source --- gqlerrors/syntax.go | 5 +- graphql.go | 2 +- language/lexer/lexer.go | 227 +++++++++++++++++---------------- language/location/location.go | 4 +- language/parser/parser.go | 4 +- language/source/source.go | 2 +- testutil/rules_test_harness.go | 4 +- validator_test.go | 2 +- 8 files changed, 132 insertions(+), 118 deletions(-) diff --git a/gqlerrors/syntax.go b/gqlerrors/syntax.go index 4235a040..e3a3cc5a 100644 --- a/gqlerrors/syntax.go +++ b/gqlerrors/syntax.go @@ -4,10 +4,11 @@ import ( "fmt" "regexp" + "strings" + "github.com/graphql-go/graphql/language/ast" "github.com/graphql-go/graphql/language/location" "github.com/graphql-go/graphql/language/source" - "strings" ) func NewSyntaxError(s *source.Source, position int, description string) *Error { @@ -44,7 +45,7 @@ func highlightSourceAtLocation(s *source.Source, l location.SourceLocation) stri lineNum := fmt.Sprintf("%d", line) nextLineNum := fmt.Sprintf("%d", (line + 1)) padLen := len(nextLineNum) - lines := regexp.MustCompile("\r\n|[\n\r]").Split(s.Body, -1) + lines := regexp.MustCompile("\r\n|[\n\r]").Split(string(s.Body), -1) var highlight string if line >= 2 { highlight += fmt.Sprintf("%s: %s\n", lpad(padLen, prevLineNum), printLine(lines[line-2])) diff --git a/graphql.go b/graphql.go index 047535b4..af9dd65a 100644 --- a/graphql.go +++ b/graphql.go @@ -34,7 +34,7 @@ type Params struct { func Do(p Params) *Result { source := source.NewSource(&source.Source{ - Body: p.RequestString, + Body: []byte(p.RequestString), Name: "GraphQL request", }) AST, err := parser.Parse(parser.ParseParams{Source: source}) diff --git a/language/lexer/lexer.go b/language/lexer/lexer.go index 4c149c34..806f3546 100644 --- a/language/lexer/lexer.go +++ b/language/lexer/lexer.go @@ -1,7 +1,9 @@ package lexer import ( + "bytes" "fmt" + "unicode/utf8" "github.com/graphql-go/graphql/gqlerrors" "github.com/graphql-go/graphql/language/source" @@ -102,12 +104,6 @@ func Lex(s *source.Source) Lexer { } } -func runeStringValueAt(body string, start, end int) string { - // convert body string to runes, to handle unicode - bodyRunes := []rune(body) - return string(bodyRunes[start:end]) -} - // Reads an alphanumeric + underscore name from the source. // [_A-Za-z][_0-9A-Za-z]* func readName(source *source.Source, position int) Token { @@ -115,69 +111,69 @@ func readName(source *source.Source, position int) Token { bodyLength := len(body) end := position + 1 for { - code := charCodeAt(body, end) - if (end != bodyLength) && code != 0 && - (code == 95 || // _ - code >= 48 && code <= 57 || // 0-9 - code >= 65 && code <= 90 || // A-Z - code >= 97 && code <= 122) { // a-z - end++ + code, n := runeAt(body, end) + if (end != bodyLength) && + (code == '_' || // _ + code >= '0' && code <= '9' || // 0-9 + code >= 'A' && code <= 'Z' || // A-Z + code >= 'a' && code <= 'z') { // a-z + end += n continue } else { break } } - return makeToken(TokenKind[NAME], position, end, runeStringValueAt(body, position, end)) + return makeToken(TokenKind[NAME], position, end, string(body[position:end])) } // Reads a number token from the source file, either a float // or an int depending on whether a decimal point appears. // Int: -?(0|[1-9][0-9]*) // Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)? -func readNumber(s *source.Source, start int, firstCode rune) (Token, error) { +func readNumber(s *source.Source, start int, firstCode rune, codeLength int) (Token, error) { code := firstCode body := s.Body position := start isFloat := false - if code == 45 { // - - position++ - code = charCodeAt(body, position) + if code == '-' { // - + position += codeLength + code, codeLength = runeAt(body, position) } - if code == 48 { // 0 - position++ - code = charCodeAt(body, position) - if code >= 48 && code <= 57 { + if code == '0' { // 0 + position += codeLength + code, codeLength = runeAt(body, position) + if code >= '0' && code <= '9' { description := fmt.Sprintf("Invalid number, unexpected digit after 0: %v.", printCharCode(code)) return Token{}, gqlerrors.NewSyntaxError(s, position, description) } } else { - p, err := readDigits(s, position, code) + p, err := readDigits(s, position, code, codeLength) if err != nil { return Token{}, err } position = p - code = charCodeAt(body, position) + code, codeLength = runeAt(body, position) } - if code == 46 { // . + if code == '.' { // . isFloat = true - position++ - code = charCodeAt(body, position) - p, err := readDigits(s, position, code) + position += codeLength + code, codeLength = runeAt(body, position) + p, err := readDigits(s, position, code, codeLength) if err != nil { return Token{}, err } position = p - code = charCodeAt(body, position) + code, codeLength = runeAt(body, position) } - if code == 69 || code == 101 { // E e + if code == 'E' || code == 'e' { // E e isFloat = true - position++ - code = charCodeAt(body, position) - if code == 43 || code == 45 { // + - - position++ - code = charCodeAt(body, position) + position += codeLength + code, codeLength = runeAt(body, position) + if code == '+' || code == '-' { // + - + position += codeLength + code, codeLength = runeAt(body, position) } - p, err := readDigits(s, position, code) + p, err := readDigits(s, position, code, codeLength) if err != nil { return Token{}, err } @@ -187,19 +183,20 @@ func readNumber(s *source.Source, start int, firstCode rune) (Token, error) { if isFloat { kind = TokenKind[FLOAT] } - return makeToken(kind, start, position, runeStringValueAt(body, start, position)), nil + + return makeToken(kind, start, position, string(body[start:position])), nil } // Returns the new position in the source after reading digits. -func readDigits(s *source.Source, start int, firstCode rune) (int, error) { +func readDigits(s *source.Source, start int, firstCode rune, codeLength int) (int, error) { body := s.Body position := start code := firstCode - if code >= 48 && code <= 57 { // 0 - 9 + if code >= '0' && code <= '9' { // 0 - 9 for { - if code >= 48 && code <= 57 { // 0 - 9 - position++ - code = charCodeAt(body, position) + if code >= '0' && code <= '9' { // 0 - 9 + position += codeLength + code, codeLength = runeAt(body, position) continue } else { break @@ -217,68 +214,75 @@ func readString(s *source.Source, start int) (Token, error) { position := start + 1 chunkStart := position var code rune - var value string + var n int + var valueBuffer bytes.Buffer for { - code = charCodeAt(body, position) + code, n = runeAt(body, position) if position < len(body) && // not LineTerminator code != 0x000A && code != 0x000D && // not Quote (") - code != 34 { + code != '"' { // SourceCharacter if code < 0x0020 && code != 0x0009 { return Token{}, gqlerrors.NewSyntaxError(s, position, fmt.Sprintf(`Invalid character within String: %v.`, printCharCode(code))) } - position++ - if code == 92 { // \ - value += body[chunkStart : position-1] - code = charCodeAt(body, position) + position += n + if code == '\\' { // \ + valueBuffer.Write(body[chunkStart : position-1]) + code, n = runeAt(body, position) switch code { - case 34: - value += "\"" + case '"': + valueBuffer.WriteRune('"') break - case 47: - value += "\\/" + case '/': + valueBuffer.WriteRune('/') break - case 92: - value += "\\" + case '\\': + valueBuffer.WriteRune('\\') break - case 98: - value += "\b" + case 'b': + valueBuffer.WriteRune('\b') break - case 102: - value += "\f" + case 'f': + valueBuffer.WriteRune('\f') break - case 110: - value += "\n" + case 'n': + valueBuffer.WriteRune('\n') break - case 114: - value += "\r" + case 'r': + valueBuffer.WriteRune('\r') break - case 116: - value += "\t" + case 't': + valueBuffer.WriteRune('\t') break - case 117: // u + case 'u': + // Check if there are at least 4 bytes available + if len(body) <= position+4 { + return Token{}, gqlerrors.NewSyntaxError(s, position, + fmt.Sprintf("Invalid character escape sequence: "+ + "\\u%v", body[position+1:])) + } charCode := uniCharCode( - charCodeAt(body, position+1), - charCodeAt(body, position+2), - charCodeAt(body, position+3), - charCodeAt(body, position+4), + rune(body[position+1]), + rune(body[position+2]), + rune(body[position+3]), + rune(body[position+4]), ) if charCode < 0 { return Token{}, gqlerrors.NewSyntaxError(s, position, fmt.Sprintf("Invalid character escape sequence: "+ "\\u%v", body[position+1:position+5])) } - value += fmt.Sprintf("%c", charCode) + valueBuffer.WriteRune(charCode) position += 4 break default: return Token{}, gqlerrors.NewSyntaxError(s, position, fmt.Sprintf(`Invalid character escape sequence: \\%c.`, code)) } - position++ + position += n chunkStart = position } continue @@ -286,10 +290,12 @@ func readString(s *source.Source, start int) (Token, error) { break } } - if code != 34 { // quote (") + if code != '"' { // quote (") return Token{}, gqlerrors.NewSyntaxError(s, position, "Unterminated string.") } - value += runeStringValueAt(body, chunkStart, position) + stringContent := body[chunkStart:position] + valueBuffer.Write(stringContent) + value := valueBuffer.String() return makeToken(TokenKind[STRING], start, position+1, value), nil } @@ -344,7 +350,7 @@ func readToken(s *source.Source, fromPosition int) (Token, error) { if position >= bodyLength { return makeToken(TokenKind[EOF], position, position, ""), nil } - code := charCodeAt(body, position) + code, codeLength := runeAt(body, position) // SourceCharacter if code < 0x0020 && code != 0x0009 && code != 0x000A && code != 0x000D { @@ -353,66 +359,68 @@ func readToken(s *source.Source, fromPosition int) (Token, error) { switch code { // ! - case 33: + case '!': return makeToken(TokenKind[BANG], position, position+1, ""), nil // $ - case 36: + case '$': return makeToken(TokenKind[DOLLAR], position, position+1, ""), nil // ( - case 40: + case '(': return makeToken(TokenKind[PAREN_L], position, position+1, ""), nil // ) - case 41: + case ')': return makeToken(TokenKind[PAREN_R], position, position+1, ""), nil // . - case 46: - if charCodeAt(body, position+1) == 46 && charCodeAt(body, position+2) == 46 { + case '.': + next1, _ := runeAt(body, position+1) + next2, _ := runeAt(body, position+2) + if next1 == '.' && next2 == '.' { return makeToken(TokenKind[SPREAD], position, position+3, ""), nil } break // : - case 58: + case ':': return makeToken(TokenKind[COLON], position, position+1, ""), nil // = - case 61: + case '=': return makeToken(TokenKind[EQUALS], position, position+1, ""), nil // @ - case 64: + case '@': return makeToken(TokenKind[AT], position, position+1, ""), nil // [ - case 91: + case '[': return makeToken(TokenKind[BRACKET_L], position, position+1, ""), nil // ] - case 93: + case ']': return makeToken(TokenKind[BRACKET_R], position, position+1, ""), nil // { - case 123: + case '{': return makeToken(TokenKind[BRACE_L], position, position+1, ""), nil // | - case 124: + case '|': return makeToken(TokenKind[PIPE], position, position+1, ""), nil // } - case 125: + case '}': return makeToken(TokenKind[BRACE_R], position, position+1, ""), nil // A-Z - case 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 89, 90: + case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', + 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': return readName(s, position), nil // _ // a-z - case 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, - 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122: + case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', + 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z': return readName(s, position), nil // - // 0-9 - case 45, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57: - token, err := readNumber(s, position, code) + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + token, err := readNumber(s, position, code, codeLength) if err != nil { return token, err } return token, nil // " - case 34: + case '"': token, err := readString(s, position) if err != nil { return token, err @@ -423,24 +431,29 @@ func readToken(s *source.Source, fromPosition int) (Token, error) { return Token{}, gqlerrors.NewSyntaxError(s, position, description) } -func charCodeAt(body string, position int) rune { - r := []rune(body) - if len(r) > position { - return r[position] +func runeAt(body []byte, position int) (code rune, charWidth int) { + if len(body) <= position { + return 0, utf8.RuneError + } + + c := body[position] + if c < utf8.RuneSelf { + return rune(c), 1 } - return -1 + r, n := utf8.DecodeRune(body[position:]) + return r, n } // Reads from body starting at startPosition until it finds a non-whitespace // or commented character, then returns the position of that character for lexing. // lexing. -func positionAfterWhitespace(body string, startPosition int) int { +func positionAfterWhitespace(body []byte, startPosition int) int { bodyLength := len(body) position := startPosition for { if position < bodyLength { - code := charCodeAt(body, position) + code, n := runeAt(body, position) // Skip Ignored if code == 0xFEFF || // BOM @@ -452,16 +465,16 @@ func positionAfterWhitespace(body string, startPosition int) int { code == 0x000D || // carriage return // Comma code == 0x002C { - position++ + position += n } else if code == 35 { // # - position++ + position += n for { - code := charCodeAt(body, position) + code, n := runeAt(body, position) if position < bodyLength && code != 0 && // SourceCharacter but not LineTerminator (code > 0x001F || code == 0x0009) && code != 0x000A && code != 0x000D { - position++ + position += n continue } else { break diff --git a/language/location/location.go b/language/location/location.go index ec667caa..04bbde6e 100644 --- a/language/location/location.go +++ b/language/location/location.go @@ -12,14 +12,14 @@ type SourceLocation struct { } func GetLocation(s *source.Source, position int) SourceLocation { - body := "" + body := []byte{} if s != nil { body = s.Body } line := 1 column := position + 1 lineRegexp := regexp.MustCompile("\r\n|[\n\r]") - matches := lineRegexp.FindAllStringIndex(body, -1) + matches := lineRegexp.FindAllIndex(body, -1) for _, match := range matches { matchIndex := match[0] if matchIndex < position { diff --git a/language/parser/parser.go b/language/parser/parser.go index d1e01c50..46ddf751 100644 --- a/language/parser/parser.go +++ b/language/parser/parser.go @@ -36,7 +36,7 @@ func Parse(p ParseParams) (*ast.Document, error) { sourceObj = p.Source.(*source.Source) default: body, _ := p.Source.(string) - sourceObj = source.NewSource(&source.Source{Body: body}) + sourceObj = source.NewSource(&source.Source{Body: []byte(body)}) } parser, err := makeParser(sourceObj, p.Options) if err != nil { @@ -58,7 +58,7 @@ func parseValue(p ParseParams) (ast.Value, error) { sourceObj = p.Source.(*source.Source) default: body, _ := p.Source.(string) - sourceObj = source.NewSource(&source.Source{Body: body}) + sourceObj = source.NewSource(&source.Source{Body: []byte(body)}) } parser, err := makeParser(sourceObj, p.Options) if err != nil { diff --git a/language/source/source.go b/language/source/source.go index c75192d4..f14af003 100644 --- a/language/source/source.go +++ b/language/source/source.go @@ -5,7 +5,7 @@ const ( ) type Source struct { - Body string + Body []byte Name string } diff --git a/testutil/rules_test_harness.go b/testutil/rules_test_harness.go index 035d8a61..809a1eff 100644 --- a/testutil/rules_test_harness.go +++ b/testutil/rules_test_harness.go @@ -481,7 +481,7 @@ func init() { } func expectValidRule(t *testing.T, schema *graphql.Schema, rules []graphql.ValidationRuleFn, queryString string) { source := source.NewSource(&source.Source{ - Body: queryString, + Body: []byte(queryString), }) AST, err := parser.Parse(parser.ParseParams{Source: source}) if err != nil { @@ -498,7 +498,7 @@ func expectValidRule(t *testing.T, schema *graphql.Schema, rules []graphql.Valid } func expectInvalidRule(t *testing.T, schema *graphql.Schema, rules []graphql.ValidationRuleFn, queryString string, expectedErrors []gqlerrors.FormattedError) { source := source.NewSource(&source.Source{ - Body: queryString, + Body: []byte(queryString), }) AST, err := parser.Parse(parser.ParseParams{Source: source}) if err != nil { diff --git a/validator_test.go b/validator_test.go index f7f19e57..67b7a3dd 100644 --- a/validator_test.go +++ b/validator_test.go @@ -15,7 +15,7 @@ import ( func expectValid(t *testing.T, schema *graphql.Schema, queryString string) { source := source.NewSource(&source.Source{ - Body: queryString, + Body: []byte(queryString), Name: "GraphQL request", }) AST, err := parser.Parse(parser.ParseParams{Source: source}) From db630ca86238de555fc62a808b76b986c3466f81 Mon Sep 17 00:00:00 2001 From: Hafiz Ismail Date: Sun, 12 Jun 2016 22:58:30 +0800 Subject: [PATCH 27/34] Fixes tests that was broken with enhancements made to the `lexer` with commit #137 - Increased coverage for `lexer` package to 100% - Added more tests to cover parsing unicode strings (#135) as well. - Fixed invalid test for `lexer` properly escaping slashes for TokenString type --- gqlerrors/syntax.go | 1 - language/lexer/lexer.go | 63 ++++---- language/lexer/lexer_test.go | 210 +++++++++++++++++++++----- language/parser/parser_test.go | 113 ++++++++++++-- language/parser/schema_parser_test.go | 4 +- 5 files changed, 312 insertions(+), 79 deletions(-) diff --git a/gqlerrors/syntax.go b/gqlerrors/syntax.go index e3a3cc5a..abad6ade 100644 --- a/gqlerrors/syntax.go +++ b/gqlerrors/syntax.go @@ -3,7 +3,6 @@ package gqlerrors import ( "fmt" "regexp" - "strings" "github.com/graphql-go/graphql/language/ast" diff --git a/language/lexer/lexer.go b/language/lexer/lexer.go index 806f3546..865c9d6e 100644 --- a/language/lexer/lexer.go +++ b/language/lexer/lexer.go @@ -83,10 +83,6 @@ type Token struct { Value string } -func (t *Token) String() string { - return fmt.Sprintf("%s", tokenDescription[t.Kind]) -} - type Lexer func(resetPosition int) (Token, error) func Lex(s *source.Source) Lexer { @@ -106,24 +102,28 @@ func Lex(s *source.Source) Lexer { // Reads an alphanumeric + underscore name from the source. // [_A-Za-z][_0-9A-Za-z]* -func readName(source *source.Source, position int) Token { +// position: Points to the byte position in the byte array +// runePosition: Points to the rune position in the byte array +func readName(source *source.Source, position, runePosition int) Token { body := source.Body bodyLength := len(body) - end := position + 1 + endByte := position + 1 + endRune := runePosition + 1 for { - code, n := runeAt(body, end) - if (end != bodyLength) && + code, _ := runeAt(body, endByte) + if (endByte != bodyLength) && (code == '_' || // _ code >= '0' && code <= '9' || // 0-9 code >= 'A' && code <= 'Z' || // A-Z code >= 'a' && code <= 'z') { // a-z - end += n + endByte++ + endRune++ continue } else { break } } - return makeToken(TokenKind[NAME], position, end, string(body[position:end])) + return makeToken(TokenKind[NAME], runePosition, endRune, string(body[position:endByte])) } // Reads a number token from the source file, either a float @@ -212,6 +212,7 @@ func readDigits(s *source.Source, start int, firstCode rune, codeLength int) (in func readString(s *source.Source, start int) (Token, error) { body := s.Body position := start + 1 + runePosition := start + 1 chunkStart := position var code rune var n int @@ -226,9 +227,10 @@ func readString(s *source.Source, start int) (Token, error) { // SourceCharacter if code < 0x0020 && code != 0x0009 { - return Token{}, gqlerrors.NewSyntaxError(s, position, fmt.Sprintf(`Invalid character within String: %v.`, printCharCode(code))) + return Token{}, gqlerrors.NewSyntaxError(s, runePosition, fmt.Sprintf(`Invalid character within String: %v.`, printCharCode(code))) } position += n + runePosition++ if code == '\\' { // \ valueBuffer.Write(body[chunkStart : position-1]) code, n = runeAt(body, position) @@ -260,9 +262,9 @@ func readString(s *source.Source, start int) (Token, error) { case 'u': // Check if there are at least 4 bytes available if len(body) <= position+4 { - return Token{}, gqlerrors.NewSyntaxError(s, position, + return Token{}, gqlerrors.NewSyntaxError(s, runePosition, fmt.Sprintf("Invalid character escape sequence: "+ - "\\u%v", body[position+1:])) + "\\u%v", string(body[position+1:]))) } charCode := uniCharCode( rune(body[position+1]), @@ -271,18 +273,20 @@ func readString(s *source.Source, start int) (Token, error) { rune(body[position+4]), ) if charCode < 0 { - return Token{}, gqlerrors.NewSyntaxError(s, position, + return Token{}, gqlerrors.NewSyntaxError(s, runePosition, fmt.Sprintf("Invalid character escape sequence: "+ - "\\u%v", body[position+1:position+5])) + "\\u%v", string(body[position+1:position+5]))) } valueBuffer.WriteRune(charCode) position += 4 + runePosition += 4 break default: - return Token{}, gqlerrors.NewSyntaxError(s, position, + return Token{}, gqlerrors.NewSyntaxError(s, runePosition, fmt.Sprintf(`Invalid character escape sequence: \\%c.`, code)) } position += n + runePosition++ chunkStart = position } continue @@ -291,7 +295,7 @@ func readString(s *source.Source, start int) (Token, error) { } } if code != '"' { // quote (") - return Token{}, gqlerrors.NewSyntaxError(s, position, "Unterminated string.") + return Token{}, gqlerrors.NewSyntaxError(s, runePosition, "Unterminated string.") } stringContent := body[chunkStart:position] valueBuffer.Write(stringContent) @@ -346,7 +350,7 @@ func printCharCode(code rune) string { func readToken(s *source.Source, fromPosition int) (Token, error) { body := s.Body bodyLength := len(body) - position := positionAfterWhitespace(body, fromPosition) + position, runePosition := positionAfterWhitespace(body, fromPosition) if position >= bodyLength { return makeToken(TokenKind[EOF], position, position, ""), nil } @@ -354,7 +358,7 @@ func readToken(s *source.Source, fromPosition int) (Token, error) { // SourceCharacter if code < 0x0020 && code != 0x0009 && code != 0x000A && code != 0x000D { - return Token{}, gqlerrors.NewSyntaxError(s, position, fmt.Sprintf(`Invalid character %v`, printCharCode(code))) + return Token{}, gqlerrors.NewSyntaxError(s, runePosition, fmt.Sprintf(`Invalid character %v`, printCharCode(code))) } switch code { @@ -405,12 +409,12 @@ func readToken(s *source.Source, fromPosition int) (Token, error) { // A-Z case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': - return readName(s, position), nil + return readName(s, position, runePosition), nil // _ // a-z case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z': - return readName(s, position), nil + return readName(s, position, runePosition), nil // - // 0-9 case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': @@ -428,12 +432,14 @@ func readToken(s *source.Source, fromPosition int) (Token, error) { return token, nil } description := fmt.Sprintf("Unexpected character %v.", printCharCode(code)) - return Token{}, gqlerrors.NewSyntaxError(s, position, description) + return Token{}, gqlerrors.NewSyntaxError(s, runePosition, description) } +// Gets the rune from the byte array at given byte position and it's width in bytes func runeAt(body []byte, position int) (code rune, charWidth int) { if len(body) <= position { - return 0, utf8.RuneError + // + return -1, utf8.RuneError } c := body[position] @@ -448,9 +454,11 @@ func runeAt(body []byte, position int) (code rune, charWidth int) { // Reads from body starting at startPosition until it finds a non-whitespace // or commented character, then returns the position of that character for lexing. // lexing. -func positionAfterWhitespace(body []byte, startPosition int) int { +// Returns both byte positions and rune position +func positionAfterWhitespace(body []byte, startPosition int) (position int, runePosition int) { bodyLength := len(body) - position := startPosition + position = startPosition + runePosition = startPosition for { if position < bodyLength { code, n := runeAt(body, position) @@ -466,8 +474,10 @@ func positionAfterWhitespace(body []byte, startPosition int) int { // Comma code == 0x002C { position += n + runePosition++ } else if code == 35 { // # position += n + runePosition++ for { code, n := runeAt(body, position) if position < bodyLength && @@ -475,6 +485,7 @@ func positionAfterWhitespace(body []byte, startPosition int) int { // SourceCharacter but not LineTerminator (code > 0x001F || code == 0x0009) && code != 0x000A && code != 0x000D { position += n + runePosition++ continue } else { break @@ -488,7 +499,7 @@ func positionAfterWhitespace(body []byte, startPosition int) int { break } } - return position + return position, runePosition } func GetTokenDesc(token Token) string { diff --git a/language/lexer/lexer_test.go b/language/lexer/lexer_test.go index f18a8b66..ac59c846 100644 --- a/language/lexer/lexer_test.go +++ b/language/lexer/lexer_test.go @@ -13,10 +13,57 @@ type Test struct { } func createSource(body string) *source.Source { - return source.NewSource(&source.Source{Body: body}) + return source.NewSource(&source.Source{Body: []byte(body)}) } -func TestDisallowsUncommonControlCharacters(t *testing.T) { +func TestLexer_GetTokenDesc(t *testing.T) { + expected := `Name "foo"` + tokenDescription := GetTokenDesc(Token{ + Kind: TokenKind[NAME], + Start: 2, + End: 5, + Value: "foo", + }) + if expected != tokenDescription { + t.Errorf("Expected %v, got %v", expected, tokenDescription) + } + + expected = `Name` + tokenDescription = GetTokenDesc(Token{ + Kind: TokenKind[NAME], + Start: 0, + End: 0, + Value: "", + }) + if expected != tokenDescription { + t.Errorf("Expected %v, got %v", expected, tokenDescription) + } + + expected = `String "foo"` + tokenDescription = GetTokenDesc(Token{ + Kind: TokenKind[STRING], + Start: 2, + End: 5, + Value: "foo", + }) + if expected != tokenDescription { + t.Errorf("Expected %v, got %v", expected, tokenDescription) + } + + expected = `String` + tokenDescription = GetTokenDesc(Token{ + Kind: TokenKind[STRING], + Start: 0, + End: 0, + Value: "", + }) + if expected != tokenDescription { + t.Errorf("Expected %v, got %v", expected, tokenDescription) + } + +} + +func TestLexer_DisallowsUncommonControlCharacters(t *testing.T) { tests := []Test{ { Body: "\u0007", @@ -30,15 +77,15 @@ func TestDisallowsUncommonControlCharacters(t *testing.T) { for _, test := range tests { _, err := Lex(createSource(test.Body))(0) if err == nil { - t.Fatalf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) + t.Errorf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) } if err.Error() != test.Expected { - t.Fatalf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) + t.Errorf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) } } } -func TestAcceptsBOMHeader(t *testing.T) { +func TestLexer_AcceptsBOMHeader(t *testing.T) { tests := []Test{ { Body: "\uFEFF foo", @@ -51,17 +98,17 @@ func TestAcceptsBOMHeader(t *testing.T) { }, } for _, test := range tests { - token, err := Lex(&source.Source{Body: test.Body})(0) + token, err := Lex(&source.Source{Body: []byte(test.Body)})(0) if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Errorf("unexpected error: %v", err) } if !reflect.DeepEqual(token, test.Expected) { - t.Fatalf("unexpected token, expected: %v, got: %v", test.Expected, token) + t.Errorf("unexpected token, expected: %v, got: %v", test.Expected, token) } } } -func TestSkipsWhiteSpace(t *testing.T) { +func TestLexer_SkipsWhiteSpace(t *testing.T) { tests := []Test{ { Body: ` @@ -97,19 +144,28 @@ func TestSkipsWhiteSpace(t *testing.T) { Value: "foo", }, }, + { + Body: ``, + Expected: Token{ + Kind: TokenKind[EOF], + Start: 0, + End: 0, + Value: "", + }, + }, } for _, test := range tests { - token, err := Lex(&source.Source{Body: test.Body})(0) + token, err := Lex(&source.Source{Body: []byte(test.Body)})(0) if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Errorf("unexpected error: %v", err) } if !reflect.DeepEqual(token, test.Expected) { - t.Fatalf("unexpected token, expected: %v, got: %v, body: %s", test.Expected, token, test.Body) + t.Errorf("unexpected token, expected: %v, got: %v, body: %s", test.Expected, token, test.Body) } } } -func TestErrorsRespectWhitespace(t *testing.T) { +func TestLexer_ErrorsRespectWhitespace(t *testing.T) { body := ` ? @@ -125,7 +181,39 @@ func TestErrorsRespectWhitespace(t *testing.T) { } } -func TestLexesStrings(t *testing.T) { +func TestLexer_LexesNames(t *testing.T) { + tests := []Test{ + { + Body: "simple", + Expected: Token{ + Kind: TokenKind[NAME], + Start: 0, + End: 6, + Value: "simple", + }, + }, + { + Body: "Capital", + Expected: Token{ + Kind: TokenKind[NAME], + Start: 0, + End: 7, + Value: "Capital", + }, + }, + } + for _, test := range tests { + token, err := Lex(&source.Source{Body: []byte(test.Body)})(0) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + if !reflect.DeepEqual(token, test.Expected) { + t.Errorf("unexpected token, expected: %v, got: %v", test.Expected, token) + } + } +} + +func TestLexer_LexesStrings(t *testing.T) { tests := []Test{ { Body: "\"simple\"", @@ -169,7 +257,7 @@ func TestLexesStrings(t *testing.T) { Kind: TokenKind[STRING], Start: 0, End: 15, - Value: "slashes \\ \\/", + Value: "slashes \\ /", }, }, { @@ -181,19 +269,46 @@ func TestLexesStrings(t *testing.T) { Value: "unicode \u1234\u5678\u90AB\uCDEF", }, }, + { + Body: "\"unicode фы世界\"", + Expected: Token{ + Kind: TokenKind[STRING], + Start: 0, + End: 20, + Value: "unicode фы世界", + }, + }, + { + Body: "\"фы世界\"", + Expected: Token{ + Kind: TokenKind[STRING], + Start: 0, + End: 12, + Value: "фы世界", + }, + }, + { + Body: "\"Has a фы世界 multi-byte character.\"", + Expected: Token{ + Kind: TokenKind[STRING], + Start: 0, + End: 40, + Value: "Has a фы世界 multi-byte character.", + }, + }, } for _, test := range tests { - token, err := Lex(&source.Source{Body: test.Body})(0) + token, err := Lex(&source.Source{Body: []byte(test.Body)})(0) if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Errorf("unexpected error: %v", err) } if !reflect.DeepEqual(token, test.Expected) { - t.Fatalf("unexpected token, expected: %v, got: %v", test.Expected, token) + t.Errorf("unexpected token, expected: %v, got: %v", test.Expected, token) } } } -func TestLexReportsUsefulStringErrors(t *testing.T) { +func TestLexer_ReportsUsefulStringErrors(t *testing.T) { tests := []Test{ { Body: "\"", @@ -299,21 +414,40 @@ func TestLexReportsUsefulStringErrors(t *testing.T) { 1: "bad \uXXXF esc" ^ +`, + }, + { + Body: "\"bad \\u123", + Expected: `Syntax Error GraphQL (1:7) Invalid character escape sequence: \u123 + +1: "bad \u123 + ^ +`, + }, + { + // some unicode chars take more than one column of text + // current implementation does not handle this + Body: "\"bфы世ыы𠱸d \\uXXXF esc\"", + Expected: `Syntax Error GraphQL (1:12) Invalid character escape sequence: \uXXXF + +1: "bфы世ыы𠱸d \uXXXF esc" + ^ `, }, } for _, test := range tests { _, err := Lex(createSource(test.Body))(0) if err == nil { - t.Fatalf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) + t.Errorf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) } + if err.Error() != test.Expected { - t.Fatalf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) + t.Errorf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) } } } -func TestLexesNumbers(t *testing.T) { +func TestLexer_LexesNumbers(t *testing.T) { tests := []Test{ { Body: "4", @@ -463,15 +597,15 @@ func TestLexesNumbers(t *testing.T) { for _, test := range tests { token, err := Lex(createSource(test.Body))(0) if err != nil { - t.Fatalf("unexpected error: %v, test: %s", err, test) + t.Errorf("unexpected error: %v, test: %s", err, test) } if !reflect.DeepEqual(token, test.Expected) { - t.Fatalf("unexpected token, expected: %v, got: %v, test: %v", test.Expected, token, test) + t.Errorf("unexpected token, expected: %v, got: %v, test: %v", test.Expected, token, test) } } } -func TestLexReportsUsefulNumbeErrors(t *testing.T) { +func TestLexer_ReportsUsefulNumberErrors(t *testing.T) { tests := []Test{ { Body: "00", @@ -542,15 +676,15 @@ func TestLexReportsUsefulNumbeErrors(t *testing.T) { for _, test := range tests { _, err := Lex(createSource(test.Body))(0) if err == nil { - t.Fatalf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) + t.Errorf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) } if err.Error() != test.Expected { - t.Fatalf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) + t.Errorf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) } } } -func TestLexesPunctuation(t *testing.T) { +func TestLexer_LexesPunctuation(t *testing.T) { tests := []Test{ { Body: "!", @@ -673,15 +807,15 @@ func TestLexesPunctuation(t *testing.T) { for _, test := range tests { token, err := Lex(createSource(test.Body))(0) if err != nil { - t.Fatalf("unexpected error :%v, test: %v", err, test) + t.Errorf("unexpected error :%v, test: %v", err, test) } if !reflect.DeepEqual(token, test.Expected) { - t.Fatalf("unexpected token, expected: %v, got: %v, test: %v", test.Expected, token, test) + t.Errorf("unexpected token, expected: %v, got: %v, test: %v", test.Expected, token, test) } } } -func TestLexReportsUsefulUnknownCharacterError(t *testing.T) { +func TestLexer_ReportsUsefulUnknownCharacterError(t *testing.T) { tests := []Test{ { Body: "..", @@ -713,21 +847,29 @@ func TestLexReportsUsefulUnknownCharacterError(t *testing.T) { 1: ※ ^ +`, + }, + { + Body: "ф", + Expected: `Syntax Error GraphQL (1:1) Unexpected character "\\u0444". + +1: ф + ^ `, }, } for _, test := range tests { _, err := Lex(createSource(test.Body))(0) if err == nil { - t.Fatalf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) + t.Errorf("unexpected nil error\nexpected:\n%v\n\ngot:\n%v", test.Expected, err) } if err.Error() != test.Expected { - t.Fatalf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) + t.Errorf("unexpected error.\nexpected:\n%v\n\ngot:\n%v", test.Expected, err.Error()) } } } -func TestLexRerportsUsefulInformationForDashesInNames(t *testing.T) { +func TestLexer_ReportsUsefulInformationForDashesInNames(t *testing.T) { q := "a-b" lexer := Lex(createSource(q)) firstToken, err := lexer(0) diff --git a/language/parser/parser_test.go b/language/parser/parser_test.go index f8697310..e2198f22 100644 --- a/language/parser/parser_test.go +++ b/language/parser/parser_test.go @@ -17,7 +17,7 @@ import ( func TestBadToken(t *testing.T) { _, err := Parse(ParseParams{ Source: &source.Source{ - Body: "query _ {\n me {\n id`\n }\n}", + Body: []byte("query _ {\n me {\n id`\n }\n}"), Name: "GraphQL", }, }) @@ -137,7 +137,10 @@ fragment MissingOn Type func TestParseProvidesUsefulErrorsWhenUsingSource(t *testing.T) { test := errorMessageTest{ - source.NewSource(&source.Source{Body: "query", Name: "MyQuery.graphql"}), + source.NewSource(&source.Source{ + Body: []byte("query"), + Name: "MyQuery.graphql", + }), `Syntax Error MyQuery.graphql (1:6) Expected {, found EOF`, false, } @@ -189,7 +192,7 @@ func TestDoesNotAllowNullAsValue(t *testing.T) { testErrorMessage(t, test) } -func TestParsesMultiByteCharacters(t *testing.T) { +func TestParsesMultiByteCharacters_Unicode(t *testing.T) { doc := ` # This comment has a \u0A0A multi-byte character. @@ -266,6 +269,83 @@ func TestParsesMultiByteCharacters(t *testing.T) { } } +func TestParsesMultiByteCharacters_UnicodeText(t *testing.T) { + + doc := ` + # This comment has a фы世界 multi-byte character. + { field(arg: "Has a фы世界 multi-byte character.") } + ` + astDoc := parse(t, doc) + + expectedASTDoc := ast.NewDocument(&ast.Document{ + Loc: ast.NewLocation(&ast.Location{ + Start: 67, + End: 121, + }), + Definitions: []ast.Node{ + ast.NewOperationDefinition(&ast.OperationDefinition{ + Loc: ast.NewLocation(&ast.Location{ + Start: 67, + End: 119, + }), + Operation: "query", + SelectionSet: ast.NewSelectionSet(&ast.SelectionSet{ + Loc: ast.NewLocation(&ast.Location{ + Start: 67, + End: 119, + }), + Selections: []ast.Selection{ + ast.NewField(&ast.Field{ + Loc: ast.NewLocation(&ast.Location{ + Start: 67, + End: 117, + }), + Name: ast.NewName(&ast.Name{ + Loc: ast.NewLocation(&ast.Location{ + Start: 69, + End: 74, + }), + Value: "field", + }), + Arguments: []*ast.Argument{ + ast.NewArgument(&ast.Argument{ + Loc: ast.NewLocation(&ast.Location{ + Start: 75, + End: 116, + }), + Name: ast.NewName(&ast.Name{ + + Loc: ast.NewLocation(&ast.Location{ + Start: 75, + End: 78, + }), + Value: "arg", + }), + Value: ast.NewStringValue(&ast.StringValue{ + + Loc: ast.NewLocation(&ast.Location{ + Start: 80, + End: 116, + }), + Value: "Has a фы世界 multi-byte character.", + }), + }), + }, + }), + }, + }), + }), + }, + }) + + astDocQuery := printer.Print(astDoc) + expectedASTDocQuery := printer.Print(expectedASTDoc) + + if !reflect.DeepEqual(astDocQuery, expectedASTDocQuery) { + t.Fatalf("unexpected document, expected: %v, got: %v", astDocQuery, expectedASTDocQuery) + } +} + func TestParsesKitchenSink(t *testing.T) { b, err := ioutil.ReadFile("../../kitchen-sink.graphql") if err != nil { @@ -309,18 +389,17 @@ func TestAllowsNonKeywordsAnywhereNameIsAllowed(t *testing.T) { } } -// -//func TestParsesExperimentalSubscriptionFeature(t *testing.T) { -// source := ` -// subscription Foo { -// subscriptionField -// } -// ` -// _, err := Parse(ParseParams{Source: source}) -// if err != nil { -// t.Fatalf("unexpected error: %v", err) -// } -//} +func TestParsesExperimentalSubscriptionFeature(t *testing.T) { + source := ` + subscription Foo { + subscriptionField + } + ` + _, err := Parse(ParseParams{Source: source}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} func TestParsesAnonymousMutationOperations(t *testing.T) { source := ` @@ -378,7 +457,9 @@ func TestParseCreatesAst(t *testing.T) { } } ` - source := source.NewSource(&source.Source{Body: body}) + source := source.NewSource(&source.Source{ + Body: []byte(body), + }) document, err := Parse( ParseParams{ Source: source, diff --git a/language/parser/schema_parser_test.go b/language/parser/schema_parser_test.go index ce6e552c..510c7d26 100644 --- a/language/parser/schema_parser_test.go +++ b/language/parser/schema_parser_test.go @@ -739,10 +739,10 @@ input Hello { `, Nodes: []ast.Node{}, Source: &source.Source{ - Body: ` + Body: []byte(` input Hello { world(foo: Int): String -}`, +}`), Name: "GraphQL", }, Positions: []int{22}, From 5315cd2881badd526ce61eda1f115ff3dc6897a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Chris=20Ram=C3=B3n?= Date: Wed, 28 Sep 2016 08:52:58 -0500 Subject: [PATCH 28/34] README: add Documentation section This changes adds new `Documentation` section within the README.md and adds the first doc link to lib's godoc. --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 981bf30b..2aade5da 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,10 @@ A *work-in-progress* implementation of GraphQL for Go. +### Documentation + +godoc: https://godoc.org/github.com/graphql-go/graphql + ### Getting Started To install the library, run: From bd9f6e8e0f843423b8634e27331ef9bc84bc2da7 Mon Sep 17 00:00:00 2001 From: irfan sharif Date: Sun, 30 Oct 2016 21:04:20 -0400 Subject: [PATCH 29/34] Minor fix to stop `go vet` from complaining --- language/parser/parser_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/language/parser/parser_test.go b/language/parser/parser_test.go index e2198f22..86116db1 100644 --- a/language/parser/parser_test.go +++ b/language/parser/parser_test.go @@ -97,7 +97,7 @@ func TestParseProvidesUsefulErrors(t *testing.T) { ^ `, Positions: []int{1}, - Locations: []location.SourceLocation{{1, 2}}, + Locations: []location.SourceLocation{{Line: 1, Column: 2}}, } checkError(t, err, expectedError) From 3a2eb83733c96c98113e3e9bb5eab11043f2abc6 Mon Sep 17 00:00:00 2001 From: Leo Correa Date: Sun, 13 Nov 2016 09:38:55 +0700 Subject: [PATCH 30/34] Add 1.7 and tip to build matrix for Go --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 5fcef0e5..88ef0874 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,8 @@ language: go go: - 1.4 + - 1.7 + - tip before_install: - go get github.com/axw/gocov/gocov From 77541969c9851ae38a7612e776db9af61e73c05a Mon Sep 17 00:00:00 2001 From: irfan sharif Date: Sun, 30 Oct 2016 21:04:20 -0400 Subject: [PATCH 31/34] Minor fix to stop `go vet` from complaining --- language/parser/parser_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/language/parser/parser_test.go b/language/parser/parser_test.go index e2198f22..86116db1 100644 --- a/language/parser/parser_test.go +++ b/language/parser/parser_test.go @@ -97,7 +97,7 @@ func TestParseProvidesUsefulErrors(t *testing.T) { ^ `, Positions: []int{1}, - Locations: []location.SourceLocation{{1, 2}}, + Locations: []location.SourceLocation{{Line: 1, Column: 2}}, } checkError(t, err, expectedError) From 2ed0252b1f5f2707507a6bfbf0d98ebf3b1e1594 Mon Sep 17 00:00:00 2001 From: Leo Correa Date: Sun, 13 Nov 2016 10:11:19 +0700 Subject: [PATCH 32/34] Fix ObjectConfig duplicate json tag. Name had a 'description' json tag which was duplicate --- definition.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/definition.go b/definition.go index a3b03d22..f26f3002 100644 --- a/definition.go +++ b/definition.go @@ -389,7 +389,7 @@ type IsTypeOfFn func(p IsTypeOfParams) bool type InterfacesThunk func() []*Interface type ObjectConfig struct { - Name string `json:"description"` + Name string `json:"name"` Interfaces interface{} `json:"interfaces"` Fields interface{} `json:"fields"` IsTypeOf IsTypeOfFn `json:"isTypeOf"` From 547c31fd2f58c48ae6a23428369987721c11199a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Chris=20Ram=C3=B3n?= Date: Tue, 29 Nov 2016 19:46:52 -0500 Subject: [PATCH 33/34] executor: add graphql tag This changes adds support for `graphql` tags on structs. Very useful when struct fields name does not match the fields of an object types. --- executor.go | 23 +++++++++++++---------- executor_test.go | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+), 10 deletions(-) diff --git a/executor.go b/executor.go index b03c8a01..bf093f94 100644 --- a/executor.go +++ b/executor.go @@ -800,10 +800,6 @@ func defaultResolveFn(p ResolveParams) (interface{}, error) { return nil, nil } if sourceVal.Type().Kind() == reflect.Struct { - // find field based on struct's json tag - // we could potentially create a custom `graphql` tag, but its unnecessary at this point - // since graphql speaks to client in a json-like way anyway - // so json tags are a good way to start with for i := 0; i < sourceVal.NumField(); i++ { valueField := sourceVal.Field(i) typeField := sourceVal.Type().Field(i) @@ -812,15 +808,22 @@ func defaultResolveFn(p ResolveParams) (interface{}, error) { return valueField.Interface(), nil } tag := typeField.Tag - jsonTag := tag.Get("json") - jsonOptions := strings.Split(jsonTag, ",") - if len(jsonOptions) == 0 { - continue + checkTag := func(tagName string) bool { + t := tag.Get(tagName) + tOptions := strings.Split(t, ",") + if len(tOptions) == 0 { + return false + } + if tOptions[0] != p.Info.FieldName { + return false + } + return true } - if jsonOptions[0] != p.Info.FieldName { + if checkTag("json") || checkTag("graphql") { + return valueField.Interface(), nil + } else { continue } - return valueField.Interface(), nil } return nil, nil } diff --git a/executor_test.go b/executor_test.go index a8cbecf3..09cd80de 100644 --- a/executor_test.go +++ b/executor_test.go @@ -1592,3 +1592,50 @@ func TestMutation_ExecutionDoesNotAddErrorsFromFieldResolveFn(t *testing.T) { t.Fatalf("wrong result, unexpected errors: %+v", result.Errors) } } + +func TestGraphqlTag(t *testing.T) { + typeObjectType := graphql.NewObject(graphql.ObjectConfig{ + Name: "Type", + Fields: graphql.Fields{ + "fooBar": &graphql.Field{Type: graphql.String}, + }, + }) + var baz = &graphql.Field{ + Type: typeObjectType, + Description: "typeObjectType", + Resolve: func(p graphql.ResolveParams) (interface{}, error) { + t := struct { + FooBar string `graphql:"fooBar"` + }{"foo bar value"} + return t, nil + }, + } + q := graphql.NewObject(graphql.ObjectConfig{ + Name: "Query", + Fields: graphql.Fields{ + "baz": baz, + }, + }) + schema, err := graphql.NewSchema(graphql.SchemaConfig{ + Query: q, + }) + if err != nil { + t.Fatalf("unexpected error, got: %v", err) + } + query := "{ baz { fooBar } }" + result := graphql.Do(graphql.Params{ + Schema: schema, + RequestString: query, + }) + if len(result.Errors) != 0 { + t.Fatalf("wrong result, unexpected errors: %+v", result.Errors) + } + expectedData := map[string]interface{}{ + "baz": map[string]interface{}{ + "fooBar": "foo bar value", + }, + } + if !reflect.DeepEqual(result.Data, expectedData) { + t.Fatalf("unexpected result, got: %+v, expected: %+v", expectedData, result.Data) + } +} From f8ec35c54158eff2c31c30f2371c53f55963a883 Mon Sep 17 00:00:00 2001 From: Shawn Smith Date: Thu, 15 Sep 2016 13:59:33 +0900 Subject: [PATCH 34/34] gofmt -s --- language/visitor/visitor_test.go | 8 ++++---- rules.go | 14 +++++++------- validator.go | 4 ++-- variables_test.go | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/language/visitor/visitor_test.go b/language/visitor/visitor_test.go index 436560d8..898c89ae 100644 --- a/language/visitor/visitor_test.go +++ b/language/visitor/visitor_test.go @@ -51,7 +51,7 @@ func TestVisitor_AllowsEditingANodeBothOnEnterAndOnLeave(t *testing.T) { v := &visitor.VisitorOptions{ KindFuncMap: map[string]visitor.NamedVisitFuncs{ - kinds.OperationDefinition: visitor.NamedVisitFuncs{ + kinds.OperationDefinition: { Enter: func(p visitor.VisitFuncParams) (string, interface{}) { if node, ok := p.Node.(*ast.OperationDefinition); ok { selectionSet = node.SelectionSet @@ -120,7 +120,7 @@ func TestVisitor_AllowsEditingTheRootNodeOnEnterAndOnLeave(t *testing.T) { v := &visitor.VisitorOptions{ KindFuncMap: map[string]visitor.NamedVisitFuncs{ - kinds.Document: visitor.NamedVisitFuncs{ + kinds.Document: { Enter: func(p visitor.VisitFuncParams) (string, interface{}) { if node, ok := p.Node.(*ast.Document); ok { visited["didEnter"] = true @@ -931,7 +931,7 @@ func TestVisitor_VisitInParallel_AllowsSkippingDifferentSubTrees(t *testing.T) { } v := []*visitor.VisitorOptions{ - &visitor.VisitorOptions{ + { Enter: func(p visitor.VisitFuncParams) (string, interface{}) { switch node := p.Node.(type) { case *ast.Name: @@ -960,7 +960,7 @@ func TestVisitor_VisitInParallel_AllowsSkippingDifferentSubTrees(t *testing.T) { return visitor.ActionNoChange, nil }, }, - &visitor.VisitorOptions{ + { Enter: func(p visitor.VisitFuncParams) (string, interface{}) { switch node := p.Node.(type) { case *ast.Name: diff --git a/rules.go b/rules.go index c5be3f03..3b3c4456 100644 --- a/rules.go +++ b/rules.go @@ -146,12 +146,12 @@ func DefaultValuesOfCorrectTypeRule(context *ValidationContext) *ValidationRuleI return visitor.ActionSkip, nil }, }, - kinds.SelectionSet: visitor.NamedVisitFuncs{ + kinds.SelectionSet: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { return visitor.ActionSkip, nil }, }, - kinds.FragmentDefinition: visitor.NamedVisitFuncs{ + kinds.FragmentDefinition: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { return visitor.ActionSkip, nil }, @@ -1875,7 +1875,7 @@ func UniqueInputFieldNamesRule(context *ValidationContext) *ValidationRuleInstan visitorOpts := &visitor.VisitorOptions{ KindFuncMap: map[string]visitor.NamedVisitFuncs{ - kinds.ObjectValue: visitor.NamedVisitFuncs{ + kinds.ObjectValue: { Enter: func(p visitor.VisitFuncParams) (string, interface{}) { knownNameStack = append(knownNameStack, knownNames) knownNames = map[string]*ast.Name{} @@ -1887,7 +1887,7 @@ func UniqueInputFieldNamesRule(context *ValidationContext) *ValidationRuleInstan return visitor.ActionNoChange, nil }, }, - kinds.ObjectField: visitor.NamedVisitFuncs{ + kinds.ObjectField: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { if node, ok := p.Node.(*ast.ObjectField); ok { fieldName := "" @@ -1943,7 +1943,7 @@ func UniqueOperationNamesRule(context *ValidationContext) *ValidationRuleInstanc return visitor.ActionSkip, nil }, }, - kinds.FragmentDefinition: visitor.NamedVisitFuncs{ + kinds.FragmentDefinition: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { return visitor.ActionSkip, nil }, @@ -1963,7 +1963,7 @@ func UniqueVariableNamesRule(context *ValidationContext) *ValidationRuleInstance visitorOpts := &visitor.VisitorOptions{ KindFuncMap: map[string]visitor.NamedVisitFuncs{ - kinds.OperationDefinition: visitor.NamedVisitFuncs{ + kinds.OperationDefinition: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { if node, ok := p.Node.(*ast.OperationDefinition); ok && node != nil { knownVariableNames = map[string]*ast.Name{} @@ -1971,7 +1971,7 @@ func UniqueVariableNamesRule(context *ValidationContext) *ValidationRuleInstance return visitor.ActionNoChange, nil }, }, - kinds.VariableDefinition: visitor.NamedVisitFuncs{ + kinds.VariableDefinition: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { if node, ok := p.Node.(*ast.VariableDefinition); ok && node != nil { variableName := "" diff --git a/validator.go b/validator.go index 1e057935..7baf109f 100644 --- a/validator.go +++ b/validator.go @@ -231,12 +231,12 @@ func (ctx *ValidationContext) VariableUsages(node HasSelectionSet) []*VariableUs visitor.Visit(node, visitor.VisitWithTypeInfo(typeInfo, &visitor.VisitorOptions{ KindFuncMap: map[string]visitor.NamedVisitFuncs{ - kinds.VariableDefinition: visitor.NamedVisitFuncs{ + kinds.VariableDefinition: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { return visitor.ActionSkip, nil }, }, - kinds.Variable: visitor.NamedVisitFuncs{ + kinds.Variable: { Kind: func(p visitor.VisitFuncParams) (string, interface{}) { if node, ok := p.Node.(*ast.Variable); ok && node != nil { usages = append(usages, &VariableUsage{ diff --git a/variables_test.go b/variables_test.go index 545f3c7d..ff815f0c 100644 --- a/variables_test.go +++ b/variables_test.go @@ -533,7 +533,7 @@ func TestVariables_ObjectsAndNullability_UsingVariables_ErrorsOnDeepNestedErrors "\nIn field \"na\": In field \"c\": Expected \"String!\", found null." + "\nIn field \"nb\": Expected \"String!\", found null.", Locations: []location.SourceLocation{ - location.SourceLocation{ + { Line: 2, Column: 19, }, },