diff --git a/gometalinter.json b/gometalinter.json index b105c34dec..3420e33f8c 100644 --- a/gometalinter.json +++ b/gometalinter.json @@ -16,6 +16,7 @@ "vet" ], "Exclude": [ - "pkg/document/directivelocation_enum.go" + "pkg/document/directivelocation_enum.go", + "../../../../../../../usr/local/go/src/.." ] } diff --git a/pkg/document/arguments.go b/pkg/document/arguments.go index a937416e67..91ebbb1c37 100644 --- a/pkg/document/arguments.go +++ b/pkg/document/arguments.go @@ -3,7 +3,7 @@ package document // Argument as specified in // http://facebook.github.io/graphql/draft/#Argument type Argument struct { - Name ByteSlice + Name string Value Value } diff --git a/pkg/document/argumentsdefinition.go b/pkg/document/argumentsdefinition.go index 7dfae65a01..2ccc75015c 100644 --- a/pkg/document/argumentsdefinition.go +++ b/pkg/document/argumentsdefinition.go @@ -1,16 +1,14 @@ package document -import "bytes" - // ArgumentsDefinition as specified in: // http://facebook.github.io/graphql/draft/#ArgumentsDefinition type ArgumentsDefinition []InputValueDefinition // GetByName returns InputValueDefinition by $name or nil if not found -func (a ArgumentsDefinition) GetByName(name []byte) *InputValueDefinition { +func (a ArgumentsDefinition) GetByName(name string) *InputValueDefinition { for _, definition := range a { - if bytes.Equal(definition.Name, name) { + if definition.Name == name { return &definition } } diff --git a/pkg/document/byteslice.go b/pkg/document/byteslice.go deleted file mode 100644 index 4d868c4601..0000000000 --- a/pkg/document/byteslice.go +++ /dev/null @@ -1,12 +0,0 @@ -package document - -import "bytes" - -// ByteSlice is an alias for []byte which implements MarshalJSON to pretty print string byte slices -type ByteSlice []byte - -// MarshalJSON is implemented to make the default json encoder work -func (b ByteSlice) MarshalJSON() ([]byte, error) { - b = bytes.Replace(b, []byte("\n"), []byte("\\n"), -1) - return append([]byte(`"`), append(b, []byte(`"`)...)...), nil -} diff --git a/pkg/document/directivedefinition.go b/pkg/document/directivedefinition.go index 367449f2ad..7e8edc6d40 100644 --- a/pkg/document/directivedefinition.go +++ b/pkg/document/directivedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // DirectiveDefinition as specified in // http://facebook.github.io/graphql/draft/#DirectiveDefinition type DirectiveDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string ArgumentsDefinition ArgumentsDefinition DirectiveLocations DirectiveLocations } @@ -26,9 +24,9 @@ func (d DirectiveDefinition) ContainsLocation(location DirectiveLocation) bool { type DirectiveDefinitions []DirectiveDefinition // GetByName returns the DirectiveDefinition via $name -func (d DirectiveDefinitions) GetByName(name []byte) *DirectiveDefinition { +func (d DirectiveDefinitions) GetByName(name string) *DirectiveDefinition { for _, directive := range d { - if bytes.Equal(directive.Name, name) { + if directive.Name == name { return &directive } } diff --git a/pkg/document/directives.go b/pkg/document/directives.go index ed5f305659..a9e989ccc3 100644 --- a/pkg/document/directives.go +++ b/pkg/document/directives.go @@ -3,7 +3,7 @@ package document // Directive as specified in: // http://facebook.github.io/graphql/draft/#Directive type Directive struct { - Name ByteSlice + Name string Arguments Arguments } diff --git a/pkg/document/enumtypedefinition.go b/pkg/document/enumtypedefinition.go index 8b63d68996..b2b81f2c0f 100644 --- a/pkg/document/enumtypedefinition.go +++ b/pkg/document/enumtypedefinition.go @@ -1,14 +1,14 @@ package document import ( - "bytes" + "strings" ) // EnumTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#EnumTypeDefinition type EnumTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string EnumValuesDefinition EnumValuesDefinition Directives Directives } @@ -16,17 +16,17 @@ type EnumTypeDefinition struct { // TitleCaseName returns the EnumTypeDefinition's Name // as title case string. example: // episode => Episode -func (e EnumTypeDefinition) TitleCaseName() []byte { - return bytes.Title(e.Name) +func (e EnumTypeDefinition) TitleCaseName() string { + return strings.Title(e.Name) } // EnumTypeDefinitions is the plural of EnumTypeDefinition type EnumTypeDefinitions []EnumTypeDefinition // HasDefinition returns true if a EnumTypeDefinition with $name is contained -func (e EnumTypeDefinitions) HasDefinition(name []byte) bool { +func (e EnumTypeDefinitions) HasDefinition(name string) bool { for _, definition := range e { - if bytes.Equal(definition.Name, name) { + if definition.Name == name { return true } } diff --git a/pkg/document/enumvaluedefinition.go b/pkg/document/enumvaluedefinition.go index 8dc81259b2..9e73a6fd43 100644 --- a/pkg/document/enumvaluedefinition.go +++ b/pkg/document/enumvaluedefinition.go @@ -1,20 +1,20 @@ package document import ( - "bytes" + "strings" ) // EnumValueDefinition as specified in: // http://facebook.github.io/graphql/draft/#EnumValueDefinition type EnumValueDefinition struct { - Description ByteSlice - EnumValue ByteSlice + Description string + EnumValue string Directives Directives } // ProperCaseVal returns the EnumValueDefinition's EnumValue // as proper case string. example: // NORTH => North -func (e EnumValueDefinition) ProperCaseVal() []byte { - return bytes.Title(bytes.ToLower(e.EnumValue)) +func (e EnumValueDefinition) ProperCaseVal() string { + return strings.Title(strings.ToLower(e.EnumValue)) } diff --git a/pkg/document/field.go b/pkg/document/field.go index 518f81dad6..8e6ba25236 100644 --- a/pkg/document/field.go +++ b/pkg/document/field.go @@ -3,8 +3,8 @@ package document // Field as specified in: // http://facebook.github.io/graphql/draft/#Field type Field struct { - Alias ByteSlice - Name ByteSlice + Alias string + Name string Arguments Arguments Directives Directives SelectionSet SelectionSet diff --git a/pkg/document/fielddefinition.go b/pkg/document/fielddefinition.go index 50c4641e17..5dd6ad267f 100644 --- a/pkg/document/fielddefinition.go +++ b/pkg/document/fielddefinition.go @@ -1,32 +1,32 @@ package document import ( - "bytes" "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" + "strings" ) // FieldDefinition as specified in: // http://facebook.github.io/graphql/draft/#FieldDefinition type FieldDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string ArgumentsDefinition ArgumentsDefinition Type Type Directives Directives } // NameAsTitle trims all prefixed __ and formats the name with strings.Title -func (f FieldDefinition) NameAsTitle() []byte { - return bytes.Title(bytes.TrimPrefix(f.Name, []byte("__"))) +func (f FieldDefinition) NameAsTitle() string { + return strings.Title(strings.TrimPrefix(f.Name, "__")) } // NameAsGoTypeName returns the field definition name as a go type name -func (f FieldDefinition) NameAsGoTypeName() []byte { +func (f FieldDefinition) NameAsGoTypeName() string { name := f.NameAsTitle() - name = append(bytes.ToLower(name[:1]), name[1:]...) + name = strings.ToLower(name[:1]) + name[1:] - if bytes.Equal(name, literal.TYPE) { + if name == literal.TYPE { name = literal.GRAPHQLTYPE } diff --git a/pkg/document/fragmentdefinition.go b/pkg/document/fragmentdefinition.go index d654066214..258f544ef6 100644 --- a/pkg/document/fragmentdefinition.go +++ b/pkg/document/fragmentdefinition.go @@ -1,11 +1,9 @@ package document -import "bytes" - // FragmentDefinition as specified in // http://facebook.github.io/graphql/draft/#FragmentDefinition type FragmentDefinition struct { - FragmentName ByteSlice // but not on + FragmentName string // but not on TypeCondition NamedType Directives Directives SelectionSet SelectionSet @@ -15,9 +13,9 @@ type FragmentDefinition struct { type FragmentDefinitions []FragmentDefinition // GetByName returns the fragment definition with the given name if contained -func (f FragmentDefinitions) GetByName(name []byte) (FragmentDefinition, bool) { +func (f FragmentDefinitions) GetByName(name string) (FragmentDefinition, bool) { for _, fragment := range f { - if bytes.Equal(fragment.FragmentName, name) { + if fragment.FragmentName == name { return fragment, true } } diff --git a/pkg/document/fragmentspread.go b/pkg/document/fragmentspread.go index 5017b6aa45..fa385495bc 100644 --- a/pkg/document/fragmentspread.go +++ b/pkg/document/fragmentspread.go @@ -3,7 +3,7 @@ package document // FragmentSpread as specified in: // http://facebook.github.io/graphql/draft/#FragmentSpread type FragmentSpread struct { - FragmentName ByteSlice + FragmentName string Directives Directives } diff --git a/pkg/document/implementsinterfaces.go b/pkg/document/implementsinterfaces.go index 263f4f84ab..c4d412221e 100644 --- a/pkg/document/implementsinterfaces.go +++ b/pkg/document/implementsinterfaces.go @@ -2,4 +2,4 @@ package document // ImplementsInterfaces as specified in: // http://facebook.github.io/graphql/draft/#ImplementsInterfaces -type ImplementsInterfaces []ByteSlice +type ImplementsInterfaces []string diff --git a/pkg/document/inputfieldsdefinition.go b/pkg/document/inputfieldsdefinition.go index b566746df4..f776e86da8 100644 --- a/pkg/document/inputfieldsdefinition.go +++ b/pkg/document/inputfieldsdefinition.go @@ -1,15 +1,13 @@ package document -import "bytes" - // InputFieldsDefinition as specified in: // http://facebook.github.io/graphql/draft/#InputFieldsDefinition type InputFieldsDefinition []InputValueDefinition // GetByName returns a InputValueDefinition by $name or nil if not found -func (i InputFieldsDefinition) GetByName(name []byte) *InputValueDefinition { +func (i InputFieldsDefinition) GetByName(name string) *InputValueDefinition { for _, definition := range i { - if bytes.Equal(definition.Name, name) { + if definition.Name == name { return &definition } } diff --git a/pkg/document/inputobjecttypedefinition.go b/pkg/document/inputobjecttypedefinition.go index a257d4c9f3..57d445631f 100644 --- a/pkg/document/inputobjecttypedefinition.go +++ b/pkg/document/inputobjecttypedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // InputObjectTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#InputObjectTypeDefinition type InputObjectTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string InputFieldsDefinition InputFieldsDefinition Directives Directives } @@ -15,10 +13,10 @@ type InputObjectTypeDefinition struct { type InputObjectTypeDefinitions []InputObjectTypeDefinition // HasDefinition returns true if an InputObjectTypeDefinition with $name is contained -func (i InputObjectTypeDefinitions) HasDefinition(name []byte) bool { +func (i InputObjectTypeDefinitions) HasDefinition(name string) bool { for _, definition := range i { - if bytes.Equal(definition.Name, name) { + if definition.Name == name { return true } } @@ -27,9 +25,9 @@ func (i InputObjectTypeDefinitions) HasDefinition(name []byte) bool { } // GetByName returns a InputObjectTypeDefinition by $name or nil if not found -func (i InputObjectTypeDefinitions) GetByName(name []byte) *InputObjectTypeDefinition { +func (i InputObjectTypeDefinitions) GetByName(name string) *InputObjectTypeDefinition { for _, definition := range i { - if bytes.Equal(definition.Name, name) { + if definition.Name == name { return &definition } } diff --git a/pkg/document/inputvaluedefinition.go b/pkg/document/inputvaluedefinition.go index 578dd06c69..5950be77b0 100644 --- a/pkg/document/inputvaluedefinition.go +++ b/pkg/document/inputvaluedefinition.go @@ -3,8 +3,8 @@ package document // InputValueDefinition as specified in: // http://facebook.github.io/graphql/draft/#InputValueDefinition type InputValueDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string Type Type DefaultValue Value Directives Directives diff --git a/pkg/document/interfacetypedefinition.go b/pkg/document/interfacetypedefinition.go index 17faf162be..9eb2c9b6ac 100644 --- a/pkg/document/interfacetypedefinition.go +++ b/pkg/document/interfacetypedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // InterfaceTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#InterfaceTypeDefinition type InterfaceTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string FieldsDefinition FieldsDefinition Directives Directives } @@ -15,9 +13,9 @@ type InterfaceTypeDefinition struct { type InterfaceTypeDefinitions []InterfaceTypeDefinition // GetByName returns the interface type definition by name if contained -func (i InterfaceTypeDefinitions) GetByName(name []byte) *InterfaceTypeDefinition { +func (i InterfaceTypeDefinitions) GetByName(name string) *InterfaceTypeDefinition { for _, iFace := range i { - if bytes.Equal(iFace.Name, name) { + if iFace.Name == name { return &iFace } } diff --git a/pkg/document/listtype.go b/pkg/document/listtype.go index 67a70340e3..790ae24f21 100644 --- a/pkg/document/listtype.go +++ b/pkg/document/listtype.go @@ -1,7 +1,5 @@ package document -import "bytes" - // ListType as specified in: // https://facebook.github.io/graphql/draft/#ListType type ListType struct { @@ -10,8 +8,8 @@ type ListType struct { } // TypeName returns the unwrapped (in case of list type) type name -func (l ListType) TypeName() []byte { - for bytes.Equal(l.Type.GetTypeKind(), ListTypeKind) { +func (l ListType) TypeName() string { + for l.Type.GetTypeKind() == ListTypeKind { l = l.Type.(ListType) } return l.Type.(NamedType).Name @@ -19,7 +17,7 @@ func (l ListType) TypeName() []byte { // IsBaseType returns if the unwrapped (in case of list type) type name is a base type func (l ListType) IsBaseType() bool { - for bytes.Equal(l.Type.GetTypeKind(), ListTypeKind) { + for l.Type.GetTypeKind() == ListTypeKind { l = l.Type.(ListType) } return l.Type.(NamedType).IsBaseType() @@ -31,9 +29,9 @@ func (l ListType) GetTypeKind() TypeKind { } // AsGoType returns the GraphQL List Type Name as valid go type -func (l ListType) AsGoType() []byte { - return append([]byte("[]"), l.Type.AsGoType()...) +func (l ListType) AsGoType() string { + return "[]" + l.Type.AsGoType() } // ListTypeKind marks a Type as ListType -var ListTypeKind TypeKind = []byte("ListType") +var ListTypeKind TypeKind = "ListType" diff --git a/pkg/document/namedtype.go b/pkg/document/namedtype.go index cf64ca5e54..dd2c8fba21 100644 --- a/pkg/document/namedtype.go +++ b/pkg/document/namedtype.go @@ -1,34 +1,31 @@ package document import ( - "bytes" "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" + "strings" ) // NamedType as specified in: // https://facebook.github.io/graphql/draft/#NamedType type NamedType struct { - Name ByteSlice + Name string NonNull bool } // TypeName returns the name of the type and makes NamedType implement the Type interface -func (n NamedType) TypeName() []byte { +func (n NamedType) TypeName() string { return n.Name } // IsBaseType returns if the type is a base scalar (ID,String,Float,Boolean,Int) or a custom type func (n NamedType) IsBaseType() bool { - if bytes.Equal(n.Name, literal.ID) || - bytes.Equal(n.Name, literal.STRING) || - bytes.Equal(n.Name, literal.FLOAT) || - bytes.Equal(n.Name, literal.BOOLEAN) || - bytes.Equal(n.Name, literal.INT) { + switch n.Name { + case literal.ID, literal.STRING, literal.FLOAT, literal.BOOLEAN, literal.INT: return true + default: + return false } - - return false } // GetTypeKind returns the NamedTypeKind @@ -37,23 +34,24 @@ func (n NamedType) GetTypeKind() TypeKind { } // AsGoType returns the GraphQL Named Type Name as valid go type -func (n NamedType) AsGoType() []byte { +func (n NamedType) AsGoType() string { - if bytes.Equal(n.Name, literal.INT) { + switch n.Name { + case literal.INT: return literal.GOINT32 - } else if bytes.Equal(n.Name, literal.FLOAT) { + case literal.FLOAT: return literal.GOFLOAT32 - } else if bytes.Equal(n.Name, literal.STRING) { + case literal.STRING: return literal.GOSTRING - } else if bytes.Equal(n.Name, literal.BOOLEAN) { + case literal.BOOLEAN: return literal.GOBOOL - } else if bytes.Equal(n.Name, literal.NULL) { + case literal.NULL: return literal.GONIL + default: + return strings.Title(strings.TrimPrefix(n.Name, "__")) } - return bytes.Title(bytes.TrimPrefix(n.Name, []byte("__"))) - } // NamedTypeKind marks a Type as NamedType -var NamedTypeKind TypeKind = []byte("NamedType") +var NamedTypeKind TypeKind = "NamedType" diff --git a/pkg/document/objectfield.go b/pkg/document/objectfield.go index 5cef735631..876303915e 100644 --- a/pkg/document/objectfield.go +++ b/pkg/document/objectfield.go @@ -3,7 +3,7 @@ package document // ObjectField as specified in: // http://facebook.github.io/graphql/draft/#ObjectField type ObjectField struct { - Name ByteSlice + Name string Value Value } diff --git a/pkg/document/objecttypedefinition.go b/pkg/document/objecttypedefinition.go index a3a28cb050..41ed7c5420 100644 --- a/pkg/document/objecttypedefinition.go +++ b/pkg/document/objecttypedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // ObjectTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#ObjectTypeDefinition type ObjectTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string FieldsDefinition FieldsDefinition ImplementsInterfaces ImplementsInterfaces Directives Directives @@ -16,9 +14,9 @@ type ObjectTypeDefinition struct { type ObjectTypeDefinitions []ObjectTypeDefinition // HasType returns if a type with $name is contained -func (o ObjectTypeDefinitions) HasType(name []byte) bool { +func (o ObjectTypeDefinitions) HasType(name string) bool { for _, objectType := range o { - if bytes.Equal(objectType.Name, name) { + if objectType.Name == name { return true } } @@ -27,9 +25,9 @@ func (o ObjectTypeDefinitions) HasType(name []byte) bool { } // ObjectTypeDefinitionByName returns ObjectTypeDefinition,true if it is contained -func (o *ObjectTypeDefinitions) ObjectTypeDefinitionByName(name []byte) *ObjectTypeDefinition { +func (o *ObjectTypeDefinitions) ObjectTypeDefinitionByName(name string) *ObjectTypeDefinition { for _, objectType := range *o { - if bytes.Equal(objectType.Name, name) { + if objectType.Name == name { return &objectType } } diff --git a/pkg/document/operationdefinition.go b/pkg/document/operationdefinition.go index 84ee90bcc7..0568f4fc29 100644 --- a/pkg/document/operationdefinition.go +++ b/pkg/document/operationdefinition.go @@ -4,7 +4,7 @@ package document // http://facebook.github.io/graphql/draft/#OperationDefinition type OperationDefinition struct { OperationType OperationType - Name ByteSlice + Name string VariableDefinitions VariableDefinitions Directives Directives SelectionSet SelectionSet diff --git a/pkg/document/scalartypedefinition.go b/pkg/document/scalartypedefinition.go index 87fe9ac40e..53588f7902 100644 --- a/pkg/document/scalartypedefinition.go +++ b/pkg/document/scalartypedefinition.go @@ -3,8 +3,8 @@ package document // ScalarTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#sec-Scalars type ScalarTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string Directives Directives } diff --git a/pkg/document/schemadefinition.go b/pkg/document/schemadefinition.go index b3600587aa..c6f65a4547 100644 --- a/pkg/document/schemadefinition.go +++ b/pkg/document/schemadefinition.go @@ -1,7 +1,6 @@ package document import ( - "bytes" "fmt" "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" ) @@ -9,9 +8,9 @@ import ( // SchemaDefinition as specified in: // http://facebook.github.io/graphql/draft/#SchemaDefinition type SchemaDefinition struct { - Query ByteSlice - Mutation ByteSlice - Subscription ByteSlice + Query string + Mutation string + Subscription string Directives Directives } @@ -37,19 +36,19 @@ func (s SchemaDefinition) IsDefined() bool { } // SetOperationType sets the operationType and operationName and will return an error in case of setting one value multiple times -func (s *SchemaDefinition) SetOperationType(operationType, operationName []byte) error { +func (s *SchemaDefinition) SetOperationType(operationType, operationName string) error { - if bytes.Equal(operationType, literal.QUERY) { + if operationType == literal.QUERY { if len(s.Query) == 0 { s.Query = operationName return nil } - } else if bytes.Equal(operationType, literal.MUTATION) { + } else if operationType == literal.MUTATION { if len(s.Mutation) == 0 { s.Mutation = operationName return nil } - } else if bytes.Equal(operationType, literal.SUBSCRIPTION) { + } else if operationType == literal.SUBSCRIPTION { if len(s.Subscription) == 0 { s.Subscription = operationName return nil @@ -63,4 +62,4 @@ func (s *SchemaDefinition) SetOperationType(operationType, operationName []byte) // RootOperationTypeDefinition as specified in // http://facebook.github.io/graphql/draft/#RootOperationTypeDefinition -type RootOperationTypeDefinition []byte +type RootOperationTypeDefinition string diff --git a/pkg/document/schemadefinition_test.go b/pkg/document/schemadefinition_test.go index d7f246e79d..2584df32f2 100644 --- a/pkg/document/schemadefinition_test.go +++ b/pkg/document/schemadefinition_test.go @@ -29,16 +29,16 @@ var _ = Describe("SchemaDefinition", func() { Expect(err).To(c.expectErr) } - Expect(actualOut).To(c.expectOut) + Expect(string(actualOut)).To(c.expectOut) }, Entry("should marshal simple SchemaDefinition", Case{ input: SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), - Subscription: []byte("Subscription"), + Query: "Query", + Mutation: "Mutation", + Subscription: "Subscription", }, expectErr: Not(HaveOccurred()), - expectOut: Equal([]byte(`{"Query":"Query","Mutation":"Mutation","Subscription":"Subscription","Directives":null}`)), + expectOut: Equal(`{"Query":"Query","Mutation":"Mutation","Subscription":"Subscription","Directives":null}`), }), ) }) diff --git a/pkg/document/type.go b/pkg/document/type.go index 2c40f49927..b36a52b77d 100644 --- a/pkg/document/type.go +++ b/pkg/document/type.go @@ -1,13 +1,13 @@ package document // TypeKind marks Types to identify them -type TypeKind []byte +type TypeKind string // Type as specified in: // http://facebook.github.io/graphql/draft/#Type type Type interface { GetTypeKind() TypeKind - AsGoType() []byte + AsGoType() string IsBaseType() bool - TypeName() []byte + TypeName() string } diff --git a/pkg/document/type_test.go b/pkg/document/type_test.go index 1d3e89d50a..67a5e409ec 100644 --- a/pkg/document/type_test.go +++ b/pkg/document/type_test.go @@ -26,7 +26,7 @@ func TestAsGoType(t *testing.T) { Name: literal.INT, }, expectErr: BeNil(), - expectValues: Equal([]byte("int32")), + expectValues: Equal("int32"), }, { it: "should convert gql 'Float' to go 'float32'", @@ -34,7 +34,7 @@ func TestAsGoType(t *testing.T) { Name: literal.FLOAT, }, expectErr: BeNil(), - expectValues: Equal([]byte("float32")), + expectValues: Equal("float32"), }, { it: "should convert gql 'String' to go 'string'", @@ -42,7 +42,7 @@ func TestAsGoType(t *testing.T) { Name: literal.STRING, }, expectErr: BeNil(), - expectValues: Equal([]byte("string")), + expectValues: Equal("string"), }, { it: "should convert gql 'Boolean' to go 'bool'", @@ -51,7 +51,7 @@ func TestAsGoType(t *testing.T) { NonNull: false, }, expectErr: BeNil(), - expectValues: Equal([]byte("bool")), + expectValues: Equal("bool"), }, { it: "should convert gql '[Int]' to go '[]int32'", @@ -62,7 +62,7 @@ func TestAsGoType(t *testing.T) { }}, expectErr: BeNil(), - expectValues: Equal([]byte("[]int32")), + expectValues: Equal("[]int32"), }, { it: "should convert gql '[[Int]]' to go '[][]int32'", @@ -75,7 +75,7 @@ func TestAsGoType(t *testing.T) { }}, expectErr: BeNil(), - expectValues: Equal([]byte("[][]int32")), + expectValues: Equal("[][]int32"), }, } diff --git a/pkg/document/typedefinition.go b/pkg/document/typedefinition.go index 0ab40a8644..ef82328192 100644 --- a/pkg/document/typedefinition.go +++ b/pkg/document/typedefinition.go @@ -3,7 +3,7 @@ package document // TypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#TypeDefinition type TypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string FieldsDefinition FieldsDefinition } diff --git a/pkg/document/uniontypedefinition.go b/pkg/document/uniontypedefinition.go index abb0b3c58c..01f0af19cb 100644 --- a/pkg/document/uniontypedefinition.go +++ b/pkg/document/uniontypedefinition.go @@ -1,26 +1,24 @@ package document -import "bytes" - // UnionTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#UnionTypeDefinition type UnionTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description string + Name string UnionMemberTypes UnionMemberTypes Directives Directives } // GroupingFuncName returns a name to name a function after. Example: -// "Direction" => "isDirection" -func (u UnionTypeDefinition) GroupingFuncName() []byte { - return append([]byte("Is"), u.Name...) +// "Direction" => "IsDirection" +func (u UnionTypeDefinition) GroupingFuncName() string { + return "Is" + u.Name } // HasMemberType returns true if a member with the given name is contained -func (u UnionTypeDefinition) HasMemberType(name []byte) bool { +func (u UnionTypeDefinition) HasMemberType(name string) bool { for _, unionMemberType := range u.UnionMemberTypes { - if bytes.Equal(unionMemberType, name) { + if unionMemberType == name { return true } } @@ -30,15 +28,15 @@ func (u UnionTypeDefinition) HasMemberType(name []byte) bool { // UnionMemberTypes as specified in: // http://facebook.github.io/graphql/draft/#UnionMemberTypes -type UnionMemberTypes []ByteSlice +type UnionMemberTypes []string // UnionTypeDefinitions is the plural of UnionTypeDefinition type UnionTypeDefinitions []UnionTypeDefinition // GetByName returns the UnionTypeDefinition by $name if it is contained -func (u UnionTypeDefinitions) GetByName(name []byte) *UnionTypeDefinition { +func (u UnionTypeDefinitions) GetByName(name string) *UnionTypeDefinition { for _, definition := range u { - if bytes.Equal(definition.Name, name) { + if definition.Name == name { return &definition } } diff --git a/pkg/document/value.go b/pkg/document/value.go index f2268588a4..630ccd6485 100644 --- a/pkg/document/value.go +++ b/pkg/document/value.go @@ -9,7 +9,7 @@ type Value interface { // VariableValue as specified in: // http://facebook.github.io/graphql/draft/#Variable type VariableValue struct { - Name []byte + Name string } func (VariableValue) isValue() {} @@ -49,7 +49,7 @@ func (FloatValue) ValueType() ValueType { // StringValue as specified in: // http://facebook.github.io/graphql/draft/#StringValue type StringValue struct { - Val ByteSlice + Val string } func (StringValue) isValue() {} @@ -86,7 +86,7 @@ func (NullValue) ValueType() ValueType { // EnumValue as specified in: // http://facebook.github.io/graphql/draft/#EnumValue type EnumValue struct { - Name ByteSlice // but not true or false or null + Name string // but not true or false or null } func (EnumValue) isValue() {} diff --git a/pkg/document/variabledefinitions.go b/pkg/document/variabledefinitions.go index 1775172ebe..9d128fc75e 100644 --- a/pkg/document/variabledefinitions.go +++ b/pkg/document/variabledefinitions.go @@ -3,7 +3,7 @@ package document // VariableDefinition as specified in: // http://facebook.github.io/graphql/draft/#VariableDefinition type VariableDefinition struct { - Variable ByteSlice + Variable string Type Type DefaultValue Value } diff --git a/pkg/lexer/fixtures/introspection_lexed.golden b/pkg/lexer/fixtures/introspection_lexed.golden index b76d7d7772..3a26d859b1 100644 --- a/pkg/lexer/fixtures/introspection_lexed.golden +++ b/pkg/lexer/fixtures/introspection_lexed.golden @@ -6,7 +6,7 @@ "Line": 1, "Char": 1 }, - "Description": "query" + "Description": "" }, { "Keyword": 1, @@ -15,7 +15,7 @@ "Line": 1, "Char": 7 }, - "Description": "IntrospectionQuery" + "Description": "" }, { "Keyword": 43, @@ -24,7 +24,7 @@ "Line": 1, "Char": 26 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -33,7 +33,7 @@ "Line": 2, "Char": 3 }, - "Description": "__schema" + "Description": "" }, { "Keyword": 43, @@ -42,7 +42,7 @@ "Line": 2, "Char": 12 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -51,7 +51,7 @@ "Line": 3, "Char": 5 }, - "Description": "queryType" + "Description": "" }, { "Keyword": 43, @@ -60,7 +60,7 @@ "Line": 3, "Char": 15 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -69,7 +69,7 @@ "Line": 4, "Char": 7 }, - "Description": "name" + "Description": "" }, { "Keyword": 44, @@ -78,7 +78,7 @@ "Line": 5, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -87,7 +87,7 @@ "Line": 6, "Char": 5 }, - "Description": "mutationType" + "Description": "" }, { "Keyword": 43, @@ -96,7 +96,7 @@ "Line": 6, "Char": 18 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -105,7 +105,7 @@ "Line": 7, "Char": 7 }, - "Description": "name" + "Description": "" }, { "Keyword": 44, @@ -114,7 +114,7 @@ "Line": 8, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -123,7 +123,7 @@ "Line": 9, "Char": 5 }, - "Description": "subscriptionType" + "Description": "" }, { "Keyword": 43, @@ -132,7 +132,7 @@ "Line": 9, "Char": 22 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -141,7 +141,7 @@ "Line": 10, "Char": 7 }, - "Description": "name" + "Description": "" }, { "Keyword": 44, @@ -150,7 +150,7 @@ "Line": 11, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -159,7 +159,7 @@ "Line": 12, "Char": 5 }, - "Description": "types" + "Description": "" }, { "Keyword": 43, @@ -168,7 +168,7 @@ "Line": 12, "Char": 11 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -177,7 +177,7 @@ "Line": 13, "Char": 7 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -186,7 +186,7 @@ "Line": 13, "Char": 10 }, - "Description": "FullType" + "Description": "" }, { "Keyword": 44, @@ -195,7 +195,7 @@ "Line": 14, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -204,7 +204,7 @@ "Line": 15, "Char": 5 }, - "Description": "directives" + "Description": "" }, { "Keyword": 43, @@ -213,7 +213,7 @@ "Line": 15, "Char": 16 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -222,7 +222,7 @@ "Line": 16, "Char": 7 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -231,7 +231,7 @@ "Line": 17, "Char": 7 }, - "Description": "description" + "Description": "" }, { "Keyword": 1, @@ -240,7 +240,7 @@ "Line": 18, "Char": 7 }, - "Description": "locations" + "Description": "" }, { "Keyword": 1, @@ -249,7 +249,7 @@ "Line": 19, "Char": 7 }, - "Description": "args" + "Description": "" }, { "Keyword": 43, @@ -258,7 +258,7 @@ "Line": 19, "Char": 12 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -267,7 +267,7 @@ "Line": 20, "Char": 9 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -276,7 +276,7 @@ "Line": 20, "Char": 12 }, - "Description": "InputValue" + "Description": "" }, { "Keyword": 44, @@ -285,7 +285,7 @@ "Line": 21, "Char": 7 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -294,7 +294,7 @@ "Line": 22, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -303,7 +303,7 @@ "Line": 23, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -312,7 +312,7 @@ "Line": 24, "Char": 1 }, - "Description": "}" + "Description": "" }, { "Keyword": 38, @@ -321,7 +321,7 @@ "Line": 26, "Char": 1 }, - "Description": "fragment" + "Description": "" }, { "Keyword": 1, @@ -330,7 +330,7 @@ "Line": 26, "Char": 10 }, - "Description": "FullType" + "Description": "" }, { "Keyword": 18, @@ -339,7 +339,7 @@ "Line": 26, "Char": 19 }, - "Description": "on" + "Description": "" }, { "Keyword": 1, @@ -348,7 +348,7 @@ "Line": 26, "Char": 22 }, - "Description": "__Type" + "Description": "" }, { "Keyword": 43, @@ -357,7 +357,7 @@ "Line": 26, "Char": 29 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -366,7 +366,7 @@ "Line": 27, "Char": 3 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -375,7 +375,7 @@ "Line": 28, "Char": 3 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -384,7 +384,7 @@ "Line": 29, "Char": 3 }, - "Description": "description" + "Description": "" }, { "Keyword": 1, @@ -393,7 +393,7 @@ "Line": 30, "Char": 3 }, - "Description": "fields" + "Description": "" }, { "Keyword": 39, @@ -402,7 +402,7 @@ "Line": 30, "Char": 9 }, - "Description": "(" + "Description": "" }, { "Keyword": 1, @@ -411,7 +411,7 @@ "Line": 30, "Char": 10 }, - "Description": "includeDeprecated" + "Description": "" }, { "Keyword": 4, @@ -420,7 +420,7 @@ "Line": 30, "Char": 27 }, - "Description": ":" + "Description": "" }, { "Keyword": 32, @@ -429,7 +429,7 @@ "Line": 30, "Char": 29 }, - "Description": "true" + "Description": "" }, { "Keyword": 40, @@ -438,7 +438,7 @@ "Line": 30, "Char": 33 }, - "Description": ")" + "Description": "" }, { "Keyword": 43, @@ -447,7 +447,7 @@ "Line": 30, "Char": 35 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -456,7 +456,7 @@ "Line": 31, "Char": 5 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -465,7 +465,7 @@ "Line": 32, "Char": 5 }, - "Description": "description" + "Description": "" }, { "Keyword": 1, @@ -474,7 +474,7 @@ "Line": 33, "Char": 5 }, - "Description": "args" + "Description": "" }, { "Keyword": 43, @@ -483,7 +483,7 @@ "Line": 33, "Char": 10 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -492,7 +492,7 @@ "Line": 34, "Char": 7 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -501,7 +501,7 @@ "Line": 34, "Char": 10 }, - "Description": "InputValue" + "Description": "" }, { "Keyword": 44, @@ -510,7 +510,7 @@ "Line": 35, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 22, @@ -519,7 +519,7 @@ "Line": 36, "Char": 5 }, - "Description": "type" + "Description": "" }, { "Keyword": 43, @@ -528,7 +528,7 @@ "Line": 36, "Char": 10 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -537,7 +537,7 @@ "Line": 37, "Char": 7 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -546,7 +546,7 @@ "Line": 37, "Char": 10 }, - "Description": "TypeRef" + "Description": "" }, { "Keyword": 44, @@ -555,7 +555,7 @@ "Line": 38, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -564,7 +564,7 @@ "Line": 39, "Char": 5 }, - "Description": "isDeprecated" + "Description": "" }, { "Keyword": 1, @@ -573,7 +573,7 @@ "Line": 40, "Char": 5 }, - "Description": "deprecationReason" + "Description": "" }, { "Keyword": 44, @@ -582,7 +582,7 @@ "Line": 41, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -591,7 +591,7 @@ "Line": 42, "Char": 3 }, - "Description": "inputFields" + "Description": "" }, { "Keyword": 43, @@ -600,7 +600,7 @@ "Line": 42, "Char": 15 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -609,7 +609,7 @@ "Line": 43, "Char": 5 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -618,7 +618,7 @@ "Line": 43, "Char": 8 }, - "Description": "InputValue" + "Description": "" }, { "Keyword": 44, @@ -627,7 +627,7 @@ "Line": 44, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -636,7 +636,7 @@ "Line": 45, "Char": 3 }, - "Description": "interfaces" + "Description": "" }, { "Keyword": 43, @@ -645,7 +645,7 @@ "Line": 45, "Char": 14 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -654,7 +654,7 @@ "Line": 46, "Char": 5 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -663,7 +663,7 @@ "Line": 46, "Char": 8 }, - "Description": "TypeRef" + "Description": "" }, { "Keyword": 44, @@ -672,7 +672,7 @@ "Line": 47, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -681,7 +681,7 @@ "Line": 48, "Char": 3 }, - "Description": "enumValues" + "Description": "" }, { "Keyword": 39, @@ -690,7 +690,7 @@ "Line": 48, "Char": 13 }, - "Description": "(" + "Description": "" }, { "Keyword": 1, @@ -699,7 +699,7 @@ "Line": 48, "Char": 14 }, - "Description": "includeDeprecated" + "Description": "" }, { "Keyword": 4, @@ -708,7 +708,7 @@ "Line": 48, "Char": 31 }, - "Description": ":" + "Description": "" }, { "Keyword": 32, @@ -717,7 +717,7 @@ "Line": 48, "Char": 33 }, - "Description": "true" + "Description": "" }, { "Keyword": 40, @@ -726,7 +726,7 @@ "Line": 48, "Char": 37 }, - "Description": ")" + "Description": "" }, { "Keyword": 43, @@ -735,7 +735,7 @@ "Line": 48, "Char": 39 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -744,7 +744,7 @@ "Line": 49, "Char": 5 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -753,7 +753,7 @@ "Line": 50, "Char": 5 }, - "Description": "description" + "Description": "" }, { "Keyword": 1, @@ -762,7 +762,7 @@ "Line": 51, "Char": 5 }, - "Description": "isDeprecated" + "Description": "" }, { "Keyword": 1, @@ -771,7 +771,7 @@ "Line": 52, "Char": 5 }, - "Description": "deprecationReason" + "Description": "" }, { "Keyword": 44, @@ -780,7 +780,7 @@ "Line": 53, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -789,7 +789,7 @@ "Line": 54, "Char": 3 }, - "Description": "possibleTypes" + "Description": "" }, { "Keyword": 43, @@ -798,7 +798,7 @@ "Line": 54, "Char": 17 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -807,7 +807,7 @@ "Line": 55, "Char": 5 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -816,7 +816,7 @@ "Line": 55, "Char": 8 }, - "Description": "TypeRef" + "Description": "" }, { "Keyword": 44, @@ -825,7 +825,7 @@ "Line": 56, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -834,7 +834,7 @@ "Line": 57, "Char": 1 }, - "Description": "}" + "Description": "" }, { "Keyword": 38, @@ -843,7 +843,7 @@ "Line": 59, "Char": 1 }, - "Description": "fragment" + "Description": "" }, { "Keyword": 1, @@ -852,7 +852,7 @@ "Line": 59, "Char": 10 }, - "Description": "InputValue" + "Description": "" }, { "Keyword": 18, @@ -861,7 +861,7 @@ "Line": 59, "Char": 21 }, - "Description": "on" + "Description": "" }, { "Keyword": 1, @@ -870,7 +870,7 @@ "Line": 59, "Char": 24 }, - "Description": "__InputValue" + "Description": "" }, { "Keyword": 43, @@ -879,7 +879,7 @@ "Line": 59, "Char": 37 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -888,7 +888,7 @@ "Line": 60, "Char": 3 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -897,7 +897,7 @@ "Line": 61, "Char": 3 }, - "Description": "description" + "Description": "" }, { "Keyword": 22, @@ -906,7 +906,7 @@ "Line": 62, "Char": 3 }, - "Description": "type" + "Description": "" }, { "Keyword": 43, @@ -915,7 +915,7 @@ "Line": 62, "Char": 8 }, - "Description": "{" + "Description": "" }, { "Keyword": 12, @@ -924,7 +924,7 @@ "Line": 63, "Char": 5 }, - "Description": "..." + "Description": "" }, { "Keyword": 1, @@ -933,7 +933,7 @@ "Line": 63, "Char": 8 }, - "Description": "TypeRef" + "Description": "" }, { "Keyword": 44, @@ -942,7 +942,7 @@ "Line": 64, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 1, @@ -951,7 +951,7 @@ "Line": 65, "Char": 3 }, - "Description": "defaultValue" + "Description": "" }, { "Keyword": 44, @@ -960,7 +960,7 @@ "Line": 66, "Char": 1 }, - "Description": "}" + "Description": "" }, { "Keyword": 38, @@ -969,7 +969,7 @@ "Line": 68, "Char": 1 }, - "Description": "fragment" + "Description": "" }, { "Keyword": 1, @@ -978,7 +978,7 @@ "Line": 68, "Char": 10 }, - "Description": "TypeRef" + "Description": "" }, { "Keyword": 18, @@ -987,7 +987,7 @@ "Line": 68, "Char": 18 }, - "Description": "on" + "Description": "" }, { "Keyword": 1, @@ -996,7 +996,7 @@ "Line": 68, "Char": 21 }, - "Description": "__Type" + "Description": "" }, { "Keyword": 43, @@ -1005,7 +1005,7 @@ "Line": 68, "Char": 28 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1014,7 +1014,7 @@ "Line": 69, "Char": 3 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1023,7 +1023,7 @@ "Line": 70, "Char": 3 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -1032,7 +1032,7 @@ "Line": 71, "Char": 3 }, - "Description": "ofType" + "Description": "" }, { "Keyword": 43, @@ -1041,7 +1041,7 @@ "Line": 71, "Char": 10 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1050,7 +1050,7 @@ "Line": 72, "Char": 5 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1059,7 +1059,7 @@ "Line": 73, "Char": 5 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -1068,7 +1068,7 @@ "Line": 74, "Char": 5 }, - "Description": "ofType" + "Description": "" }, { "Keyword": 43, @@ -1077,7 +1077,7 @@ "Line": 74, "Char": 12 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1086,7 +1086,7 @@ "Line": 75, "Char": 7 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1095,7 +1095,7 @@ "Line": 76, "Char": 7 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -1104,7 +1104,7 @@ "Line": 77, "Char": 7 }, - "Description": "ofType" + "Description": "" }, { "Keyword": 43, @@ -1113,7 +1113,7 @@ "Line": 77, "Char": 14 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1122,7 +1122,7 @@ "Line": 78, "Char": 9 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1131,7 +1131,7 @@ "Line": 79, "Char": 9 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -1140,7 +1140,7 @@ "Line": 80, "Char": 9 }, - "Description": "ofType" + "Description": "" }, { "Keyword": 43, @@ -1149,7 +1149,7 @@ "Line": 80, "Char": 16 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1158,7 +1158,7 @@ "Line": 81, "Char": 11 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1167,7 +1167,7 @@ "Line": 82, "Char": 11 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -1176,7 +1176,7 @@ "Line": 83, "Char": 11 }, - "Description": "ofType" + "Description": "" }, { "Keyword": 43, @@ -1185,7 +1185,7 @@ "Line": 83, "Char": 18 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1194,7 +1194,7 @@ "Line": 84, "Char": 13 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1203,7 +1203,7 @@ "Line": 85, "Char": 13 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -1212,7 +1212,7 @@ "Line": 86, "Char": 13 }, - "Description": "ofType" + "Description": "" }, { "Keyword": 43, @@ -1221,7 +1221,7 @@ "Line": 86, "Char": 20 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1230,7 +1230,7 @@ "Line": 87, "Char": 15 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1239,7 +1239,7 @@ "Line": 88, "Char": 15 }, - "Description": "name" + "Description": "" }, { "Keyword": 1, @@ -1248,7 +1248,7 @@ "Line": 89, "Char": 15 }, - "Description": "ofType" + "Description": "" }, { "Keyword": 43, @@ -1257,7 +1257,7 @@ "Line": 89, "Char": 22 }, - "Description": "{" + "Description": "" }, { "Keyword": 1, @@ -1266,7 +1266,7 @@ "Line": 90, "Char": 17 }, - "Description": "kind" + "Description": "" }, { "Keyword": 1, @@ -1275,7 +1275,7 @@ "Line": 91, "Char": 17 }, - "Description": "name" + "Description": "" }, { "Keyword": 44, @@ -1284,7 +1284,7 @@ "Line": 92, "Char": 15 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -1293,7 +1293,7 @@ "Line": 93, "Char": 13 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -1302,7 +1302,7 @@ "Line": 94, "Char": 11 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -1311,7 +1311,7 @@ "Line": 95, "Char": 9 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -1320,7 +1320,7 @@ "Line": 96, "Char": 7 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -1329,7 +1329,7 @@ "Line": 97, "Char": 5 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -1338,7 +1338,7 @@ "Line": 98, "Char": 3 }, - "Description": "}" + "Description": "" }, { "Keyword": 44, @@ -1347,6 +1347,6 @@ "Line": 99, "Char": 1 }, - "Description": "}" + "Description": "" } ] \ No newline at end of file diff --git a/pkg/lexer/lexer.go b/pkg/lexer/lexer.go index d845de3c0d..04358230e8 100644 --- a/pkg/lexer/lexer.go +++ b/pkg/lexer/lexer.go @@ -1,112 +1,94 @@ package lexer import ( - "bufio" - "bytes" "fmt" "github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword" "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" "github.com/jensneuse/graphql-go-tools/pkg/lexing/position" "github.com/jensneuse/graphql-go-tools/pkg/lexing/runes" "github.com/jensneuse/graphql-go-tools/pkg/lexing/token" - "io" + "strings" "unicode" - "unicode/utf8" ) // Lexer emits tokens from a input reader type Lexer struct { - reader *bufio.Reader - buffer *bytes.Buffer - line int - char int - charPositionBeforeLineTerminator int + input string + inputPosition int + textPosition position.Position + beforeLastLineTerminatorTextPosition position.Position +} + +type parsedRune struct { + r rune + pos position.Position } // NewLexer initializes a new lexer func NewLexer() *Lexer { - return &Lexer{ - buffer: &bytes.Buffer{}, - } + return &Lexer{} } // SetInput sets the new reader as input and resets all position stats -func (l *Lexer) SetInput(reader io.Reader) { - if l.reader == nil { - l.reader = bufio.NewReader(reader) - } else { - l.reader.Reset(reader) - } - - l.line = 1 - l.char = 1 +func (l *Lexer) SetInput(input string) { + l.input = input + l.inputPosition = 0 + l.textPosition.Line = 1 + l.textPosition.Char = 1 } // Read emits the next token, this cannot be undone func (l *Lexer) Read() (tok token.Token, err error) { - var r rune - var pos position.Position + var next parsedRune for { - r, pos, err = l.readRune() - if err == io.EOF { - tok = token.EOF - tok.Position = pos - return tok, nil - } else if err != nil { - return tok, err - } - - if !l.runeIsWhitespace(r) { + next = l.readRune() + if !l.runeIsWhitespace(next.r) { break } } - if tok, matched := l.matchSingleRuneToken(pos, r); matched { + var matched bool + tok, matched = l.matchSingleRuneToken(next) + if matched { return tok, nil } - switch r { + switch next.r { case runes.QUOTE: - return l.readString(pos) + return l.readString(next) case runes.DOT: - return l.readSpread(pos) + return l.readSpread(next) case runes.DOLLAR: - return l.readVariable(pos) + return l.readVariable(next) } - if unicode.IsDigit(r) { - return l.readDigit(pos, r) + if runeIsDigit(next.r) { + return l.readDigit(next) } - return l.readIdent(pos, r) + return l.readIdent(next) } func (l *Lexer) swallowWhitespace() (err error) { - var peeked []byte + var next parsedRune for { - peeked, err = l.reader.Peek(1) - if err == io.EOF { + next = l.readRune() + + if next.r == runes.EOF { return nil - } else if err != nil { - return err } - if l.bytesIsWhitespace(peeked) { - _, _, err = l.readRune() - if err != nil { - return err - } - } else { - return nil + if !l.runeIsWhitespace(next.r) { + return l.unreadRune() } } } -// Peek will emit the next token without advancing the reader position +// Peek will emit the next keyword without advancing the reader position func (l *Lexer) Peek(ignoreWhitespace bool) (key keyword.Keyword, err error) { if ignoreWhitespace { @@ -116,21 +98,19 @@ func (l *Lexer) Peek(ignoreWhitespace bool) (key keyword.Keyword, err error) { } } - peeked, err := l.reader.Peek(1) - if err == io.EOF { - return keyword.EOF, nil - } else if err != nil { + next := l.peekRune() + if err != nil { return key, err } - return l.keywordFromBytes(peeked) + return l.keywordFromRune(next) } -func (l *Lexer) keywordFromBytes(b []byte) (key keyword.Keyword, err error) { - - r, _ := utf8.DecodeRune(b) +func (l *Lexer) keywordFromRune(r rune) (key keyword.Keyword, err error) { switch r { + case runes.EOF: + return keyword.EOF, nil case runes.SPACE: return keyword.SPACE, nil case runes.TAB: @@ -168,66 +148,54 @@ func (l *Lexer) keywordFromBytes(b []byte) (key keyword.Keyword, err error) { case runes.AND: return keyword.AND, nil case runes.DOT: - return l.peekSpread() + if l.peekEquals("...") { + return keyword.SPREAD, nil + } + return key, fmt.Errorf("keywordFromRune: must be '...'") } if unicode.IsDigit(r) { - isFloat, err := l.peekIsFloat() - if err != nil { - return key, err - } else if isFloat { + if l.peekIsFloat() { return keyword.FLOAT, nil - } else { - return keyword.INTEGER, nil } + return keyword.INTEGER, nil } - return l.peekIdent() + return l.peekIdent(), nil } -func (l *Lexer) peekSpread() (key keyword.Keyword, err error) { +func (l *Lexer) peekIsFloat() (isFloat bool) { - actual, err := l.reader.Peek(len(literal.SPREAD)) - if err != nil { - return key, err - } + var hasDot bool + var peeked rune - if bytes.Equal(actual, literal.SPREAD) { - return keyword.SPREAD, nil - } + for i := l.inputPosition; i < len(l.input); i++ { - return keyword.UNDEFINED, nil -} + peeked = rune(l.input[i]) -func (l *Lexer) peekIsFloat() (isFloat bool, err error) { - - peeked, err := l.reader.Peek(32) - if err == io.EOF { - err = nil - } else if err != nil { - return false, err - } - - for pos := range peeked { - r, _ := utf8.DecodeRune(peeked[pos : pos+1]) - - if !isFloat && r == runes.DOT { - isFloat = true - } else if isFloat && r == runes.DOT { - return false, fmt.Errorf("peekIsFloat: invalid input") - } else if !unicode.IsDigit(r) { - break + if peeked == runes.EOF { + return hasDot + } else if l.runeIsWhitespace(peeked) { + return hasDot + } else if peeked == runes.DOT && !hasDot { + hasDot = true + } else if peeked == runes.DOT && hasDot { + return false + } else if !unicode.IsDigit(peeked) { + return false } } - return isFloat, err + return hasDot } -func (l *Lexer) matchSingleRuneToken(position position.Position, run rune) (tok token.Token, matched bool) { +func (l *Lexer) matchSingleRuneToken(r parsedRune) (tok token.Token, matched bool) { matched = true - switch run { + switch r.r { + case runes.EOF: + tok = token.EOF case runes.PIPE: tok = token.Pipe case runes.EQUALS: @@ -256,174 +224,106 @@ func (l *Lexer) matchSingleRuneToken(position position.Position, run rune) (tok matched = false } - tok.Position = position + tok.Position = r.pos return } -func (l *Lexer) readIdent(position position.Position, beginWith rune) (tok token.Token, err error) { - - tok.Position = position - - _, err = l.buffer.WriteRune(beginWith) - if err != nil { - return tok, err - } +func (l *Lexer) readIdent(startRune parsedRune) (tok token.Token, err error) { - var peeked []byte - var r rune + tok.Position = startRune.pos + start := l.inputPosition - 1 + var r parsedRune for { - peeked, err = l.reader.Peek(1) - if err == io.EOF { - err = nil + r = l.readRune() + if !runeIsIdent(r.r) { break - } else if err != nil { - return } + } - if l.bytesIsIdent(peeked) { - r, _, err = l.readRune() - if err != nil { - return tok, err - } - - _, err = l.buffer.WriteRune(r) - if err != nil { - return tok, err - } - } else { - break + if r.r != runes.EOF && l.inputPosition > start+1 { + err = l.unreadRune() + if err != nil { + return tok, err } } - tok.Literal = make([]byte, l.buffer.Len()) - copy(tok.Literal, l.buffer.Bytes()) - l.buffer.Reset() + end := l.inputPosition - tok.Keyword = l.identKeywordFromBytes(tok.Literal) + tok.Literal = l.input[start:end] + tok.Keyword = l.keywordFromIdentString(tok.Literal) return } -const identWantBytes = 13 +const identWantRunes = 13 -func (l *Lexer) peekIdent() (k keyword.Keyword, err error) { +func (l *Lexer) peekIdent() (k keyword.Keyword) { - peeked, err := l.peekEOFSafe(identWantBytes) - if err != nil { - return k, err - } - - nonIdentPosition := bytes.IndexFunc(peeked, func(r rune) bool { - return !l.runeIsIdent(r) - }) + start := l.inputPosition - if l.isUnterminatedIdent(identWantBytes, len(peeked), nonIdentPosition) { - return keyword.IDENT, nil + end := l.inputPosition + identWantRunes + if end > len(l.input) { + end = len(l.input) } - if !l.isIndexFuncResultUnsatisfied(nonIdentPosition) { - peeked = peeked[:nonIdentPosition] - } - - return l.identKeywordFromBytes(peeked), nil -} + peeked := l.input[start:end] -func (l *Lexer) isUnterminatedIdent(nWantBytes, nGotBytes, nonIdentPosition int) bool { - return l.isIndexFuncResultUnsatisfied(nonIdentPosition) && nWantBytes == nGotBytes -} - -func (l *Lexer) isIndexFuncResultUnsatisfied(result int) bool { - return result == -1 -} - -func (l *Lexer) peekEOFSafe(n int) ([]byte, error) { - peeked, err := l.reader.Peek(n) - if err == nil || err == io.EOF { - return peeked, nil - } - - return nil, err -} - -func (l *Lexer) identKeywordFromBytes(ident []byte) (k keyword.Keyword) { - switch len(ident) { - case 2: - if bytes.Equal(ident, literal.ON) { - k = keyword.ON - return - } - case 4: - if bytes.Equal(ident, literal.TRUE) { - k = keyword.TRUE - return - } else if bytes.Equal(ident, literal.NULL) { - k = keyword.NULL - return - } else if bytes.Equal(ident, literal.TYPE) { - k = keyword.TYPE - return - } else if bytes.Equal(ident, literal.ENUM) { - k = keyword.ENUM - return - } - case 5: - if bytes.Equal(ident, literal.FALSE) { - k = keyword.FALSE - return - } else if bytes.Equal(ident, literal.UNION) { - k = keyword.UNION - return - } else if bytes.Equal(ident, literal.INPUT) { - k = keyword.INPUT - return - } else if bytes.Equal(ident, literal.QUERY) { - k = keyword.QUERY - return - } - case 6: - if bytes.Equal(ident, literal.SCHEMA) { - k = keyword.SCHEMA - return - } else if bytes.Equal(ident, literal.SCALAR) { - k = keyword.SCALAR - return - } - case 8: - if bytes.Equal(ident, literal.MUTATION) { - k = keyword.MUTATION - return - } else if bytes.Equal(ident, literal.FRAGMENT) { - k = keyword.FRAGMENT - return - } - case 9: - if bytes.Equal(ident, literal.INTERFACE) { - k = keyword.INTERFACE - return - } else if bytes.Equal(ident, literal.DIRECTIVE) { - k = keyword.DIRECTIVE - return - } - case 10: - if bytes.Equal(ident, literal.IMPLEMENTS) { - k = keyword.IMPLEMENTS - return - } - case 12: - if bytes.Equal(ident, literal.SUBSCRIPTION) { - k = keyword.SUBSCRIPTION - return + for i, r := range peeked { + if !runeIsIdent(r) { + peeked = peeked[:i] + break } } - return keyword.IDENT + return l.keywordFromIdentString(peeked) +} + +func (l *Lexer) keywordFromIdentString(ident string) (k keyword.Keyword) { + switch ident { + case "on": + return keyword.ON + case "true": + return keyword.TRUE + case "type": + return keyword.TYPE + case "null": + return keyword.NULL + case "enum": + return keyword.ENUM + case "false": + return keyword.FALSE + case "union": + return keyword.UNION + case "query": + return keyword.QUERY + case "input": + return keyword.INPUT + case "schema": + return keyword.SCHEMA + case "scalar": + return keyword.SCALAR + case "mutation": + return keyword.MUTATION + case "fragment": + return keyword.FRAGMENT + case "interface": + return keyword.INTERFACE + case "directive": + return keyword.DIRECTIVE + case "implements": + return keyword.IMPLEMENTS + case "subscription": + return keyword.SUBSCRIPTION + default: + return keyword.IDENT + } } -func (l *Lexer) readVariable(position position.Position) (tok token.Token, err error) { +func (l *Lexer) readVariable(startRune parsedRune) (tok token.Token, err error) { - tok.Position = position + tok.Position = startRune.pos tok.Keyword = keyword.VARIABLE peeked, err := l.Peek(false) @@ -438,7 +338,7 @@ func (l *Lexer) readVariable(position position.Position) (tok token.Token, err e return tok, fmt.Errorf("readVariable: must not have whitespace after $") } - ident, err := l.readIdent(position, runes.DOLLAR) + ident, err := l.readIdent(startRune) if err != nil { return tok, err } @@ -447,326 +347,278 @@ func (l *Lexer) readVariable(position position.Position) (tok token.Token, err e return } -func (l *Lexer) readSpread(position position.Position) (tok token.Token, err error) { +func (l *Lexer) readSpread(startRune parsedRune) (tok token.Token, err error) { - tok.Position = position - - isSpread, err := l.peekEquals([]byte(".."), true, false) - if err != nil { - return tok, err - } + isSpread := l.peekEquals("..") if !isSpread { - return tok, fmt.Errorf("readSpread: invalid '.' at position %s", position.String()) + tok.Position = startRune.pos + return tok, fmt.Errorf("readSpread: invalid '.' at position %s", startRune.pos.String()) } + l.swallowAmount(2) + tok = token.Spread - tok.Position = position + tok.Position = startRune.pos return } -func (l *Lexer) readString(pos position.Position) (tok token.Token, err error) { +func (l *Lexer) readString(startRune parsedRune) (tok token.Token, err error) { - isMultiLineString, err := l.peekEquals([]byte(`""`), true, true) - if err != nil { - return tok, err - } + isMultiLineString := l.peekEquals("\"\"") if isMultiLineString { - return l.readMultiLineString(pos) + l.swallowAmount(2) + return l.readMultiLineString(startRune) } - return l.readSingleLineString(pos) + return l.readSingleLineString(startRune) } -func (l *Lexer) swallowAmount(amount int) error { +func (l *Lexer) swallowAmount(amount int) { for i := 0; i < amount; i++ { - _, _, err := l.readRune() - if err != nil { - return err - } + l.readRune() } - - return nil } -func (l *Lexer) peekEquals(equals []byte, swallow, returnErrorOnEOF bool) (bool, error) { - - var matches bool - peeked, err := l.reader.Peek(len(equals)) - if !returnErrorOnEOF && err == io.EOF { - return false, nil - } +func (l *Lexer) peekEquals(equals string) bool { - if err != nil { - return matches, err - } + start := l.inputPosition + end := l.inputPosition + len(equals) - matches = bytes.Equal(equals, peeked) - if swallow && matches { - err = l.swallowAmount(len(equals)) + if end > len(l.input) { + return false } - return matches, err + return l.input[start:end] == equals } -func (l *Lexer) readDigit(position position.Position, beginWith rune) (tok token.Token, err error) { +func (l *Lexer) readDigit(startRune parsedRune) (tok token.Token, err error) { - tok.Position = position + tok.Position = startRune.pos - _, err = l.buffer.WriteRune(beginWith) - if err != nil { - return tok, err - } + start := l.inputPosition - 1 - _, err = l.writeNextDigitsToBuffer() - if err != nil { - l.buffer.Reset() - return tok, err + var r parsedRune + for { + r = l.readRune() + if !runeIsDigit(r.r) { + break + } } - isFloat, err := l.peekEquals([]byte("."), true, false) - if err != nil { - return tok, err - } + isFloat := r.r == runes.DOT if isFloat { - return l.readFloat(position, tok.Literal) + l.swallowAmount(1) + return l.readFloat(startRune.pos, start) } - tok.Keyword = keyword.INTEGER + if r.r != runes.EOF { + err = l.unreadRune() + if err != nil { + return tok, err + } + } + + end := l.inputPosition - tok.Literal = make([]byte, l.buffer.Len()) - copy(tok.Literal, l.buffer.Bytes()) - l.buffer.Reset() + tok.Keyword = keyword.INTEGER + tok.Literal = l.input[start:end] return } -func (l *Lexer) readFloat(position position.Position, integerPart []byte) (tok token.Token, err error) { +func (l *Lexer) readFloat(position position.Position, start int) (tok token.Token, err error) { tok.Position = position - _, err = l.buffer.WriteRune(runes.DOT) - if err != nil { - l.buffer.Reset() - return tok, err + var valid bool + + var r parsedRune + for { + r = l.readRune() + if !runeIsDigit(r.r) { + break + } else if !valid { + valid = true + } } - totalMatches, err := l.writeNextDigitsToBuffer() - if err != nil { - l.buffer.Reset() - return tok, err + if !valid { + return tok, fmt.Errorf("readFloat: incomplete float, must have digits after dot") } - if totalMatches == 0 { - l.buffer.Reset() - return tok, fmt.Errorf("readFloat: expected float part after '.'") + if r.r != runes.EOF { + err = l.unreadRune() + if err != nil { + return tok, err + } } + end := l.inputPosition + tok.Keyword = keyword.FLOAT - tok.Literal = make([]byte, l.buffer.Len()) - copy(tok.Literal, l.buffer.Bytes()) - l.buffer.Reset() + tok.Literal = l.input[start:end] return } -func (l *Lexer) writeNextDigitsToBuffer() (totalMatches int, err error) { - - var r rune +func (l *Lexer) trimStartEnd(input, trim string) string { + return strings.TrimSuffix(strings.TrimPrefix(input, trim), trim) +} - for { - r, _, err = l.readRune() - if err == io.EOF { - err = nil - break - } else if err != nil { - return totalMatches, err - } +func (l *Lexer) readRune() (r parsedRune) { - if unicode.IsDigit(r) { - _, err = l.buffer.WriteRune(r) - if err != nil { - return totalMatches, err - } + r.pos.Line = l.textPosition.Line + r.pos.Char = l.textPosition.Char - totalMatches++ + if l.inputPosition < len(l.input) { + r.r = rune(l.input[l.inputPosition]) + if r.r == runes.LINETERMINATOR { + l.beforeLastLineTerminatorTextPosition = l.textPosition + l.textPosition.Line++ + l.textPosition.Char = 1 } else { - err = l.unreadRune() - if err != nil { - return totalMatches, err - } - break + l.textPosition.Char++ } + + l.inputPosition++ + } else { + r.r = runes.EOF } return } -func (l *Lexer) trimStartEnd(input, trim []byte) []byte { - return bytes.TrimSuffix(bytes.TrimPrefix(input, trim), trim) -} - -func (l *Lexer) readRune() (r rune, position position.Position, err error) { +func (l *Lexer) unreadRune() error { - if l.reader == nil { - return r, position, fmt.Errorf("readRune: reader must not be nil") + if l.inputPosition == 0 { + return fmt.Errorf("unreadRune: cannot unread from inputPosition 0") } - position.Line = l.line - position.Char = l.char - - r, size, err := l.reader.ReadRune() - if err != nil { - return r, position, err - } + l.inputPosition-- + r := rune(l.input[l.inputPosition]) if r == runes.LINETERMINATOR { - l.charPositionBeforeLineTerminator = l.char - l.line++ - l.char = 1 + l.textPosition = l.beforeLastLineTerminatorTextPosition } else { - l.char += size + l.textPosition.Char-- } - return r, position, err + return nil } -func (l *Lexer) unreadRune() error { - - err := l.reader.UnreadRune() - if err != nil { - return err - } - - isLineTerminator, err := l.peekEquals([]byte("\n"), false, false) - if err != nil { - return err - } +func (l *Lexer) peekRune() (r rune) { - if isLineTerminator { - l.line = l.line - 1 - l.char = l.charPositionBeforeLineTerminator - } else { - l.char = l.char - 1 + if l.inputPosition < len(l.input) { + return rune(l.input[l.inputPosition]) } - return nil + return runes.EOF } -func (l *Lexer) runeIsIdent(r rune) bool { - return unicode.IsLetter(r) || - unicode.IsDigit(r) || - r == runes.NEGATIVESIGN || - r == runes.UNDERSCORE +func runeIsIdent(r rune) bool { + switch r { + case 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', runes.NEGATIVESIGN, runes.UNDERSCORE: + return true + default: + return false + } } -func (l *Lexer) bytesIsIdent(b []byte) bool { - r, _ := utf8.DecodeRune(b) - return l.runeIsIdent(r) +func runeIsDigit(r rune) bool { + switch r { + case '1', '2', '3', '4', '5', '6', '7', '8', '9', '0': + return true + default: + return false + } } func (l *Lexer) runeIsWhitespace(r rune) bool { - return r == runes.SPACE || - r == runes.TAB || - r == runes.LINETERMINATOR || - r == runes.COMMA -} - -func (l *Lexer) bytesIsWhitespace(b []byte) bool { - return bytes.Equal(b, literal.SPACE) || - bytes.Equal(b, literal.TAB) || - bytes.Equal(b, literal.LINETERMINATOR) || - bytes.Equal(b, literal.COMMA) + switch r { + case runes.SPACE, runes.TAB, runes.LINETERMINATOR, runes.COMMA: + return true + default: + return false + } } -func (l *Lexer) readMultiLineString(pos position.Position) (tok token.Token, err error) { +func (l *Lexer) readMultiLineString(startRune parsedRune) (tok token.Token, err error) { tok.Keyword = keyword.STRING - tok.Position = pos + tok.Position = startRune.pos + + start := l.inputPosition var escaped bool for { - nextRune, _, err := l.readRune() - if err != nil { - return tok, err - } + nextRune := l.readRune() - switch nextRune { + switch nextRune.r { case runes.QUOTE: if escaped { - l.buffer.WriteRune(nextRune) escaped = false } else { - isMultiLineStringEnd, err := l.peekEquals([]byte(`""`), true, true) - if err != nil { - return tok, err - } + isMultiLineStringEnd := l.peekEquals("\"\"") if !isMultiLineStringEnd { - l.buffer.WriteRune(nextRune) escaped = false } else { - tok.Literal = make([]byte, l.buffer.Len()) - copy(tok.Literal, l.buffer.Bytes()) - l.buffer.Reset() - tok.Literal = l.trimStartEnd(tok.Literal, literal.LINETERMINATOR) + + end := l.inputPosition - 1 + l.swallowAmount(2) + + tok.Literal = l.trimStartEnd(l.input[start:end], literal.LINETERMINATOR) return tok, nil } } case runes.BACKSLASH: if escaped { - l.buffer.WriteRune(nextRune) escaped = false } else { escaped = true } default: - l.buffer.WriteRune(nextRune) escaped = false } } } -func (l *Lexer) readSingleLineString(pos position.Position) (tok token.Token, err error) { +func (l *Lexer) readSingleLineString(startRune parsedRune) (tok token.Token, err error) { tok.Keyword = keyword.STRING - tok.Position = pos + tok.Position = startRune.pos + + start := l.inputPosition var escaped bool for { - nextRune, _, err := l.readRune() - if err != nil { - return tok, err - } + nextRune := l.readRune() - switch nextRune { + switch nextRune.r { case runes.QUOTE: if escaped { - l.buffer.WriteRune(nextRune) escaped = false } else { - tok.Literal = make([]byte, l.buffer.Len()) - copy(tok.Literal, l.buffer.Bytes()) - l.buffer.Reset() + end := l.inputPosition - 1 + tok.Literal = l.input[start:end] return tok, nil } case runes.BACKSLASH: if escaped { - l.buffer.WriteRune(nextRune) escaped = false } else { escaped = true } default: - l.buffer.WriteRune(nextRune) escaped = false } } diff --git a/pkg/lexer/lexer.test b/pkg/lexer/lexer.test deleted file mode 100755 index 4ea5025486..0000000000 Binary files a/pkg/lexer/lexer.test and /dev/null differ diff --git a/pkg/lexer/lexer_test.go b/pkg/lexer/lexer_test.go index 604e2d10bd..9d02bad2b2 100644 --- a/pkg/lexer/lexer_test.go +++ b/pkg/lexer/lexer_test.go @@ -1,7 +1,6 @@ package lexer import ( - "bytes" "encoding/json" "fmt" "github.com/jensneuse/diffview" @@ -14,7 +13,6 @@ import ( . "github.com/onsi/gomega" "github.com/onsi/gomega/types" "github.com/sebdah/goldie" - "io" "io/ioutil" "testing" ) @@ -27,8 +25,7 @@ func TestLexer(t *testing.T) { func TestLexerRegressions(t *testing.T) { lexer := NewLexer() - reader := bytes.NewReader(introspectionQuery) - lexer.SetInput(reader) + lexer.SetInput(introspectionQuery) var total []token.Token for { @@ -40,8 +37,6 @@ func TestLexerRegressions(t *testing.T) { break } - tok.Description = string(tok.Literal) - total = append(total, tok) } @@ -63,27 +58,18 @@ func TestLexerRegressions(t *testing.T) { } var _ = Describe("Lexer.Read", func() { - It("should not panic if reader is nil", func() { - lexer := NewLexer() - f := func() { - _, err := lexer.Read() - Expect(err).To(HaveOccurred()) - } - - Expect(f).ShouldNot(Panic()) - }) It("should read correctly from reader when re-setting input", func() { lexer := NewLexer() - lexer.SetInput(bytes.NewReader([]byte("x"))) + lexer.SetInput("x") _, err := lexer.Read() Expect(err).NotTo(HaveOccurred()) - lexer.SetInput(bytes.NewReader([]byte("x"))) + lexer.SetInput("x") x, err := lexer.Read() Expect(err).NotTo(HaveOccurred()) Expect(x).To(Equal(token.Token{ Keyword: keyword.IDENT, - Literal: []byte("x"), + Literal: "x", Position: position.Position{ Line: 1, Char: 1, @@ -92,13 +78,13 @@ var _ = Describe("Lexer.Read", func() { }) It("should read eof multiple times correctly", func() { lexer := NewLexer() - lexer.SetInput(bytes.NewReader([]byte("x"))) + lexer.SetInput("x") x, err := lexer.Read() Expect(err).NotTo(HaveOccurred()) Expect(x).To(Equal(token.Token{ Keyword: keyword.IDENT, - Literal: []byte("x"), + Literal: "x", Position: position.Position{ Line: 1, Char: 1, @@ -109,18 +95,7 @@ var _ = Describe("Lexer.Read", func() { Expect(err).NotTo(HaveOccurred()) Expect(eof1).To(Equal(token.Token{ Keyword: keyword.EOF, - Literal: []byte("eof"), - Position: position.Position{ - Line: 1, - Char: 2, - }, - })) - - eof2, err := lexer.Read() - Expect(err).NotTo(HaveOccurred()) - Expect(eof2).To(Equal(token.Token{ - Keyword: keyword.EOF, - Literal: []byte("eof"), + Literal: "eof", Position: position.Position{ Line: 1, Char: 2, @@ -132,7 +107,7 @@ var _ = Describe("Lexer.Read", func() { var _ = Describe("Lexer.Read", func() { type Case struct { - in []byte + in string out token.Token expectErr types.GomegaMatcher } @@ -145,7 +120,7 @@ var _ = Describe("Lexer.Read", func() { DescribeTable("Read Single Token", func(c Case) { - lexer.SetInput(bytes.NewReader(c.in)) + lexer.SetInput(c.in) tok, err := lexer.Read() if c.expectErr != nil { Expect(err).To(c.expectErr) @@ -156,10 +131,10 @@ var _ = Describe("Lexer.Read", func() { }, Entry("should read integer", Case{ - in: []byte("1337"), + in: "1337", out: token.Token{ Keyword: keyword.INTEGER, - Literal: []byte("1337"), + Literal: "1337", Position: position.Position{ Line: 1, Char: 1, @@ -167,10 +142,10 @@ var _ = Describe("Lexer.Read", func() { }, }), Entry("should read integer with comma at the end", Case{ - in: []byte("1337,"), + in: "1337,", out: token.Token{ Keyword: keyword.INTEGER, - Literal: []byte("1337"), + Literal: "1337", Position: position.Position{ Line: 1, Char: 1, @@ -178,10 +153,10 @@ var _ = Describe("Lexer.Read", func() { }, }), Entry("should read float", Case{ - in: []byte("13.37"), + in: "13.37", out: token.Token{ Keyword: keyword.FLOAT, - Literal: []byte("13.37"), + Literal: "13.37", Position: position.Position{ Line: 1, Char: 1, @@ -189,8 +164,8 @@ var _ = Describe("Lexer.Read", func() { }, }), Entry("should fail reading incomplete float", Case{ - in: []byte("13."), - expectErr: Not(BeNil()), + in: "13.", + expectErr: HaveOccurred(), out: token.Token{ Position: position.Position{ Line: 1, @@ -199,10 +174,10 @@ var _ = Describe("Lexer.Read", func() { }, }), Entry("should read single line string", Case{ - in: []byte(`"foo bar"`), + in: `"foo bar"`, out: token.Token{ Keyword: keyword.STRING, - Literal: []byte(`foo bar`), + Literal: `foo bar`, Position: position.Position{ Line: 1, Char: 1, @@ -210,10 +185,10 @@ var _ = Describe("Lexer.Read", func() { }, }), Entry("should read single line string with escaped quote", Case{ - in: []byte(`"foo bar \" baz"`), + in: "\"foo bar \\\" baz\"", out: token.Token{ Keyword: keyword.STRING, - Literal: []byte(`foo bar " baz`), + Literal: "foo bar \\\" baz", Position: position.Position{ Line: 1, Char: 1, @@ -221,10 +196,10 @@ var _ = Describe("Lexer.Read", func() { }, }), Entry("should read multi line string with escaped quote", Case{ - in: []byte(`"""foo bar \""" baz"""`), + in: "\"\"\"foo bar \\\"\\\"\\\" baz\"\"\"", out: token.Token{ Keyword: keyword.STRING, - Literal: []byte(`foo bar """ baz`), + Literal: "foo bar \\\"\\\"\\\" baz", Position: position.Position{ Line: 1, Char: 1, @@ -232,13 +207,13 @@ var _ = Describe("Lexer.Read", func() { }, }), Entry("should read multi single line string", Case{ - in: []byte(`""" + in: `""" foo -bar"""`), +bar"""`, out: token.Token{ Keyword: keyword.STRING, - Literal: []byte(`foo -bar`), + Literal: `foo +bar`, Position: position.Position{ Line: 1, Char: 1, @@ -246,12 +221,12 @@ bar`), }, }), Entry("should read multi single line string with correct whitespace trimming", Case{ - in: []byte(`""" + in: `""" foo -"""`), +"""`, out: token.Token{ Keyword: keyword.STRING, - Literal: []byte(`foo`), + Literal: `foo`, Position: position.Position{ Line: 1, Char: 1, @@ -259,7 +234,7 @@ foo }, }), Entry("should read pipe", Case{ - in: []byte("|"), + in: "|", out: token.Token{ Keyword: keyword.PIPE, Literal: literal.PIPE, @@ -270,17 +245,17 @@ foo }, }), Entry("should not read dot", Case{ - in: []byte("."), + in: ".", out: token.Token{ Position: position.Position{ Line: 1, Char: 1, }, }, - expectErr: Not(BeNil()), + expectErr: HaveOccurred(), }), Entry("should read spread (...)", Case{ - in: []byte("..."), + in: "...", out: token.Token{ Keyword: keyword.SPREAD, Literal: literal.SPREAD, @@ -291,10 +266,10 @@ foo }, }), Entry("should read $123", Case{ - in: []byte("$123"), + in: "$123", out: token.Token{ Keyword: keyword.VARIABLE, - Literal: []byte("123"), + Literal: "123", Position: position.Position{ Line: 1, Char: 1, @@ -302,10 +277,10 @@ foo }, }), Entry("should read $foo", Case{ - in: []byte("$foo"), + in: "$foo", out: token.Token{ Keyword: keyword.VARIABLE, - Literal: []byte("foo"), + Literal: "foo", Position: position.Position{ Line: 1, Char: 1, @@ -313,10 +288,10 @@ foo }, }), Entry("should read $_foo", Case{ - in: []byte("$_foo"), + in: "$_foo", out: token.Token{ Keyword: keyword.VARIABLE, - Literal: []byte("_foo"), + Literal: "_foo", Position: position.Position{ Line: 1, Char: 1, @@ -324,10 +299,10 @@ foo }, }), Entry("should read $123 ", Case{ - in: []byte("$123 "), + in: "$123 ", out: token.Token{ Keyword: keyword.VARIABLE, - Literal: []byte("123"), + Literal: "123", Position: position.Position{ Line: 1, Char: 1, @@ -335,10 +310,10 @@ foo }, }), Entry("should read $123\n", Case{ - in: []byte("$123\n"), + in: "$123\n", out: token.Token{ Keyword: keyword.VARIABLE, - Literal: []byte("123"), + Literal: "123", Position: position.Position{ Line: 1, Char: 1, @@ -346,10 +321,10 @@ foo }, }), Entry("should read @", Case{ - in: []byte("@"), + in: "@", out: token.Token{ Keyword: keyword.AT, - Literal: []byte("@"), + Literal: "@", Position: position.Position{ Line: 1, Char: 1, @@ -357,10 +332,10 @@ foo }, }), Entry("should read =", Case{ - in: []byte("="), + in: "=", out: token.Token{ Keyword: keyword.EQUALS, - Literal: []byte("="), + Literal: "=", Position: position.Position{ Line: 1, Char: 1, @@ -368,10 +343,10 @@ foo }, }), Entry("should read :", Case{ - in: []byte(":"), + in: ":", out: token.Token{ Keyword: keyword.COLON, - Literal: []byte(":"), + Literal: ":", Position: position.Position{ Line: 1, Char: 1, @@ -379,10 +354,10 @@ foo }, }), Entry("should read !", Case{ - in: []byte("!"), + in: "!", out: token.Token{ Keyword: keyword.BANG, - Literal: []byte("!"), + Literal: "!", Position: position.Position{ Line: 1, Char: 1, @@ -390,10 +365,10 @@ foo }, }), Entry("should read (", Case{ - in: []byte("("), + in: "(", out: token.Token{ Keyword: keyword.BRACKETOPEN, - Literal: []byte("("), + Literal: "(", Position: position.Position{ Line: 1, Char: 1, @@ -401,10 +376,10 @@ foo }, }), Entry("should read )", Case{ - in: []byte(")"), + in: ")", out: token.Token{ Keyword: keyword.BRACKETCLOSE, - Literal: []byte(")"), + Literal: ")", Position: position.Position{ Line: 1, Char: 1, @@ -412,10 +387,10 @@ foo }, }), Entry("should read {", Case{ - in: []byte("{"), + in: "{", out: token.Token{ Keyword: keyword.CURLYBRACKETOPEN, - Literal: []byte("{"), + Literal: "{", Position: position.Position{ Line: 1, Char: 1, @@ -423,10 +398,10 @@ foo }, }), Entry("should read }", Case{ - in: []byte("}"), + in: "}", out: token.Token{ Keyword: keyword.CURLYBRACKETCLOSE, - Literal: []byte("}"), + Literal: "}", Position: position.Position{ Line: 1, Char: 1, @@ -434,10 +409,10 @@ foo }, }), Entry("should read [", Case{ - in: []byte("["), + in: "[", out: token.Token{ Keyword: keyword.SQUAREBRACKETOPEN, - Literal: []byte("["), + Literal: "[", Position: position.Position{ Line: 1, Char: 1, @@ -445,10 +420,10 @@ foo }, }), Entry("should read ]", Case{ - in: []byte("]"), + in: "]", out: token.Token{ Keyword: keyword.SQUAREBRACKETCLOSE, - Literal: []byte("]"), + Literal: "]", Position: position.Position{ Line: 1, Char: 1, @@ -456,10 +431,10 @@ foo }, }), Entry("should read &", Case{ - in: []byte("&"), + in: "&", out: token.Token{ Keyword: keyword.AND, - Literal: []byte("&"), + Literal: "&", Position: position.Position{ Line: 1, Char: 1, @@ -467,7 +442,7 @@ foo }, }), Entry("should read EOF", Case{ - in: []byte(""), + in: "", out: token.Token{ Keyword: keyword.EOF, Literal: literal.EOF, @@ -478,10 +453,10 @@ foo }, }), Entry("should read ident 'foo'", Case{ - in: []byte("foo"), + in: "foo", out: token.Token{ Keyword: keyword.IDENT, - Literal: []byte("foo"), + Literal: "foo", Position: position.Position{ Line: 1, Char: 1, @@ -489,10 +464,10 @@ foo }, }), Entry("should read ident 'foo' from 'foo:'", Case{ - in: []byte("foo:"), + in: "foo:", out: token.Token{ Keyword: keyword.IDENT, - Literal: []byte("foo"), + Literal: "foo", Position: position.Position{ Line: 1, Char: 1, @@ -500,10 +475,10 @@ foo }, }), Entry("should read ident true", Case{ - in: []byte("true"), + in: "true", out: token.Token{ Keyword: keyword.TRUE, - Literal: []byte("true"), + Literal: "true", Position: position.Position{ Line: 1, Char: 1, @@ -511,10 +486,10 @@ foo }, }), Entry("should read ident false", Case{ - in: []byte("false"), + in: "false", out: token.Token{ Keyword: keyword.FALSE, - Literal: []byte("false"), + Literal: "false", Position: position.Position{ Line: 1, Char: 1, @@ -526,7 +501,7 @@ foo var _ = Describe("Lexer.Peek()", func() { type Case struct { - input []byte + input string expectErr types.GomegaMatcher expectKey types.GomegaMatcher expectNextToken types.GomegaMatcher @@ -540,7 +515,7 @@ var _ = Describe("Lexer.Peek()", func() { }) DescribeTable("Peek Tests", func(c Case) { - lexer.SetInput(bytes.NewReader(c.input)) + lexer.SetInput(c.input) key, err := lexer.Peek(true) if c.expectErr != nil { Expect(err).To(c.expectErr) @@ -558,13 +533,13 @@ var _ = Describe("Lexer.Peek()", func() { } }, Entry("should peek EOF", Case{ - input: []byte(""), + input: "", expectKey: Equal(keyword.EOF), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.EOF, - Literal: []byte("eof"), + Literal: "eof", Position: position.Position{ Line: 1, Char: 1, @@ -572,13 +547,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek query", Case{ - input: []byte("query"), + input: "query ", expectKey: Equal(keyword.QUERY), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.QUERY, - Literal: []byte("query"), + Literal: "query", Position: position.Position{ Line: 1, Char: 1, @@ -586,13 +561,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek mutation", Case{ - input: []byte("mutation"), + input: "mutation", expectKey: Equal(keyword.MUTATION), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.MUTATION, - Literal: []byte("mutation"), + Literal: "mutation", Position: position.Position{ Line: 1, Char: 1, @@ -600,13 +575,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek subscription", Case{ - input: []byte("subscription"), + input: "subscription", expectKey: Equal(keyword.SUBSCRIPTION), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.SUBSCRIPTION, - Literal: []byte("subscription"), + Literal: "subscription", Position: position.Position{ Line: 1, Char: 1, @@ -614,13 +589,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek fragment", Case{ - input: []byte("fragment"), + input: "fragment", expectKey: Equal(keyword.FRAGMENT), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.FRAGMENT, - Literal: []byte("fragment"), + Literal: "fragment", Position: position.Position{ Line: 1, Char: 1, @@ -628,13 +603,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek spread (...)", Case{ - input: []byte("..."), + input: "...", expectKey: Equal(keyword.SPREAD), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.SPREAD, - Literal: []byte("..."), + Literal: "...", Position: position.Position{ Line: 1, Char: 1, @@ -642,13 +617,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'implements'", Case{ - input: []byte("implements"), + input: "implements", expectKey: Equal(keyword.IMPLEMENTS), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.IMPLEMENTS, - Literal: []byte("implements"), + Literal: "implements", Position: position.Position{ Line: 1, Char: 1, @@ -656,13 +631,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'schema'", Case{ - input: []byte("schema"), + input: "schema", expectKey: Equal(keyword.SCHEMA), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.SCHEMA, - Literal: []byte("schema"), + Literal: "schema", Position: position.Position{ Line: 1, Char: 1, @@ -670,13 +645,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'scalar'", Case{ - input: []byte("scalar"), + input: "scalar", expectKey: Equal(keyword.SCALAR), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.SCALAR, - Literal: []byte("scalar"), + Literal: "scalar", Position: position.Position{ Line: 1, Char: 1, @@ -684,13 +659,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'type'", Case{ - input: []byte("type"), + input: "type", expectKey: Equal(keyword.TYPE), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.TYPE, - Literal: []byte("type"), + Literal: "type", Position: position.Position{ Line: 1, Char: 1, @@ -698,13 +673,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'interface'", Case{ - input: []byte("interface"), + input: "interface", expectKey: Equal(keyword.INTERFACE), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.INTERFACE, - Literal: []byte("interface"), + Literal: "interface", Position: position.Position{ Line: 1, Char: 1, @@ -712,13 +687,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'union'", Case{ - input: []byte("union"), + input: "union", expectKey: Equal(keyword.UNION), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.UNION, - Literal: []byte("union"), + Literal: "union", Position: position.Position{ Line: 1, Char: 1, @@ -726,13 +701,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'enum'", Case{ - input: []byte("enum"), + input: "enum", expectKey: Equal(keyword.ENUM), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.ENUM, - Literal: []byte("enum"), + Literal: "enum", Position: position.Position{ Line: 1, Char: 1, @@ -740,13 +715,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'input'", Case{ - input: []byte("input"), + input: "input", expectKey: Equal(keyword.INPUT), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.INPUT, - Literal: []byte("input"), + Literal: "input", Position: position.Position{ Line: 1, Char: 1, @@ -754,13 +729,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'directive'", Case{ - input: []byte("directive"), + input: "directive", expectKey: Equal(keyword.DIRECTIVE), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.DIRECTIVE, - Literal: []byte("directive"), + Literal: "directive", Position: position.Position{ Line: 1, Char: 1, @@ -768,13 +743,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek 'inputValue' as ident", Case{ - input: []byte("inputValue"), + input: "inputValue", expectKey: Equal(keyword.IDENT), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.IDENT, - Literal: []byte("inputValue"), + Literal: "inputValue", Position: position.Position{ Line: 1, Char: 1, @@ -782,13 +757,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek ON", Case{ - input: []byte("on"), + input: "on", expectKey: Equal(keyword.ON), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.ON, - Literal: []byte("on"), + Literal: "on", Position: position.Position{ Line: 1, Char: 1, @@ -796,13 +771,13 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek ON with whitespace behind", Case{ - input: []byte("on "), + input: "on ", expectKey: Equal(keyword.ON), expectErr: BeNil(), expectNextTokenErr: BeNil(), expectNextToken: Equal(token.Token{ Keyword: keyword.ON, - Literal: []byte("on"), + Literal: "on", Position: position.Position{ Line: 1, Char: 1, @@ -810,11 +785,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek ignore comma", Case{ - input: []byte(","), + input: ",", expectKey: Equal(keyword.EOF), expectNextToken: Equal(token.Token{ Keyword: keyword.EOF, - Literal: []byte("eof"), + Literal: "eof", Position: position.Position{ Line: 1, Char: 2, @@ -822,11 +797,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek '$color:' as variable color", Case{ - input: []byte("$color:"), + input: "$color:", expectKey: Equal(keyword.VARIABLE), expectNextToken: Equal(token.Token{ Keyword: keyword.VARIABLE, - Literal: []byte("color"), + Literal: "color", Position: position.Position{ Line: 1, Char: 1, @@ -834,16 +809,16 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek '$ color:' as invalid", Case{ - input: []byte("$ color:"), + input: "$ color:", expectErr: BeNil(), expectNextTokenErr: HaveOccurred(), }), Entry("should peek ignore space", Case{ - input: []byte(" "), + input: " ", expectKey: Equal(keyword.EOF), expectNextToken: Equal(token.Token{ Keyword: keyword.EOF, - Literal: []byte("eof"), + Literal: "eof", Position: position.Position{ Line: 1, Char: 2, @@ -851,11 +826,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek ignore tab", Case{ - input: []byte(" "), + input: " ", expectKey: Equal(keyword.EOF), expectNextToken: Equal(token.Token{ Keyword: keyword.EOF, - Literal: []byte("eof"), + Literal: "eof", Position: position.Position{ Line: 1, Char: 2, @@ -863,11 +838,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek ignore line terminator", Case{ - input: []byte("\n"), + input: "\n", expectKey: Equal(keyword.EOF), expectNextToken: Equal(token.Token{ Keyword: keyword.EOF, - Literal: []byte("eof"), + Literal: "eof", Position: position.Position{ Line: 2, Char: 1, @@ -875,11 +850,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek single line string", Case{ - input: []byte(`"foo"`), + input: `"foo"`, expectKey: Equal(keyword.STRING), expectNextToken: Equal(token.Token{ Keyword: keyword.STRING, - Literal: []byte("foo"), + Literal: "foo", Position: position.Position{ Line: 1, Char: 1, @@ -887,11 +862,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek multi line string", Case{ - input: []byte(`"""foo"""`), + input: `"""foo"""`, expectKey: Equal(keyword.STRING), expectNextToken: Equal(token.Token{ Keyword: keyword.STRING, - Literal: []byte("foo"), + Literal: "foo", Position: position.Position{ Line: 1, Char: 1, @@ -899,11 +874,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek variable", Case{ - input: []byte("$foo"), + input: "$foo", expectKey: Equal(keyword.VARIABLE), expectNextToken: Equal(token.Token{ Keyword: keyword.VARIABLE, - Literal: []byte("foo"), + Literal: "foo", Position: position.Position{ Line: 1, Char: 1, @@ -911,15 +886,15 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should throw error when reading invalid variable", Case{ - input: []byte("$ foo"), + input: "$ foo", expectNextTokenErr: HaveOccurred(), }), Entry("should peek pipe", Case{ - input: []byte("|"), + input: "|", expectKey: Equal(keyword.PIPE), expectNextToken: Equal(token.Token{ Keyword: keyword.PIPE, - Literal: []byte("|"), + Literal: "|", Position: position.Position{ Line: 1, Char: 1, @@ -927,11 +902,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek equals", Case{ - input: []byte("="), + input: "=", expectKey: Equal(keyword.EQUALS), expectNextToken: Equal(token.Token{ Keyword: keyword.EQUALS, - Literal: []byte("="), + Literal: "=", Position: position.Position{ Line: 1, Char: 1, @@ -939,11 +914,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek at", Case{ - input: []byte("@"), + input: "@", expectKey: Equal(keyword.AT), expectNextToken: Equal(token.Token{ Keyword: keyword.AT, - Literal: []byte("@"), + Literal: "@", Position: position.Position{ Line: 1, Char: 1, @@ -951,11 +926,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek null", Case{ - input: []byte("null"), + input: "null", expectKey: Equal(keyword.NULL), expectNextToken: Equal(token.Token{ Keyword: keyword.NULL, - Literal: []byte("null"), + Literal: "null", Position: position.Position{ Line: 1, Char: 1, @@ -963,11 +938,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek colon", Case{ - input: []byte(":"), + input: ":", expectKey: Equal(keyword.COLON), expectNextToken: Equal(token.Token{ Keyword: keyword.COLON, - Literal: []byte(":"), + Literal: ":", Position: position.Position{ Line: 1, Char: 1, @@ -975,11 +950,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek bang", Case{ - input: []byte("!"), + input: "!", expectKey: Equal(keyword.BANG), expectNextToken: Equal(token.Token{ Keyword: keyword.BANG, - Literal: []byte("!"), + Literal: "!", Position: position.Position{ Line: 1, Char: 1, @@ -987,11 +962,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek bracket open", Case{ - input: []byte("("), + input: "(", expectKey: Equal(keyword.BRACKETOPEN), expectNextToken: Equal(token.Token{ Keyword: keyword.BRACKETOPEN, - Literal: []byte("("), + Literal: "(", Position: position.Position{ Line: 1, Char: 1, @@ -999,11 +974,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek bracket close", Case{ - input: []byte(")"), + input: ")", expectKey: Equal(keyword.BRACKETCLOSE), expectNextToken: Equal(token.Token{ Keyword: keyword.BRACKETCLOSE, - Literal: []byte(")"), + Literal: ")", Position: position.Position{ Line: 1, Char: 1, @@ -1011,11 +986,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek squared bracket open", Case{ - input: []byte("["), + input: "[", expectKey: Equal(keyword.SQUAREBRACKETOPEN), expectNextToken: Equal(token.Token{ Keyword: keyword.SQUAREBRACKETOPEN, - Literal: []byte("["), + Literal: "[", Position: position.Position{ Line: 1, Char: 1, @@ -1023,11 +998,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek squared bracket close", Case{ - input: []byte("]"), + input: "]", expectKey: Equal(keyword.SQUAREBRACKETCLOSE), expectNextToken: Equal(token.Token{ Keyword: keyword.SQUAREBRACKETCLOSE, - Literal: []byte("]"), + Literal: "]", Position: position.Position{ Line: 1, Char: 1, @@ -1035,11 +1010,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek curly bracket open", Case{ - input: []byte("{"), + input: "{", expectKey: Equal(keyword.CURLYBRACKETOPEN), expectNextToken: Equal(token.Token{ Keyword: keyword.CURLYBRACKETOPEN, - Literal: []byte("{"), + Literal: "{", Position: position.Position{ Line: 1, Char: 1, @@ -1047,11 +1022,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek curly bracket close", Case{ - input: []byte("}"), + input: "}", expectKey: Equal(keyword.CURLYBRACKETCLOSE), expectNextToken: Equal(token.Token{ Keyword: keyword.CURLYBRACKETCLOSE, - Literal: []byte("}"), + Literal: "}", Position: position.Position{ Line: 1, Char: 1, @@ -1059,11 +1034,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek and", Case{ - input: []byte("&"), + input: "&", expectKey: Equal(keyword.AND), expectNextToken: Equal(token.Token{ Keyword: keyword.AND, - Literal: []byte("&"), + Literal: "&", Position: position.Position{ Line: 1, Char: 1, @@ -1071,11 +1046,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek ident", Case{ - input: []byte("foo"), + input: "foo", expectKey: Equal(keyword.IDENT), expectNextToken: Equal(token.Token{ Keyword: keyword.IDENT, - Literal: []byte("foo"), + Literal: "foo", Position: position.Position{ Line: 1, Char: 1, @@ -1083,11 +1058,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek integer", Case{ - input: []byte("1337"), + input: "1337", expectKey: Equal(keyword.INTEGER), expectNextToken: Equal(token.Token{ Keyword: keyword.INTEGER, - Literal: []byte("1337"), + Literal: "1337", Position: position.Position{ Line: 1, Char: 1, @@ -1095,11 +1070,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek float", Case{ - input: []byte("13.37"), + input: "13.37", expectKey: Equal(keyword.FLOAT), expectNextToken: Equal(token.Token{ Keyword: keyword.FLOAT, - Literal: []byte("13.37"), + Literal: "13.37", Position: position.Position{ Line: 1, Char: 1, @@ -1107,11 +1082,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek true", Case{ - input: []byte("true "), + input: "true ", expectKey: Equal(keyword.TRUE), expectNextToken: Equal(token.Token{ Keyword: keyword.TRUE, - Literal: []byte("true"), + Literal: "true", Position: position.Position{ Line: 1, Char: 1, @@ -1119,11 +1094,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek true with space in front", Case{ - input: []byte(" true "), + input: " true ", expectKey: Equal(keyword.TRUE), expectNextToken: Equal(token.Token{ Keyword: keyword.TRUE, - Literal: []byte("true"), + Literal: "true", Position: position.Position{ Line: 1, Char: 2, @@ -1131,11 +1106,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek true with multiple spaces in front", Case{ - input: []byte(" true"), + input: " true", expectKey: Equal(keyword.TRUE), expectNextToken: Equal(token.Token{ Keyword: keyword.TRUE, - Literal: []byte("true"), + Literal: "true", Position: position.Position{ Line: 1, Char: 4, @@ -1143,11 +1118,11 @@ var _ = Describe("Lexer.Peek()", func() { }), }), Entry("should peek false", Case{ - input: []byte("false "), + input: "false ", expectKey: Equal(keyword.FALSE), expectNextToken: Equal(token.Token{ Keyword: keyword.FALSE, - Literal: []byte("false"), + Literal: "false", Position: position.Position{ Line: 1, Char: 1, @@ -1159,9 +1134,8 @@ var _ = Describe("Lexer.Peek()", func() { var _ = Describe("Lexer.peekIsFloat", func() { type Case struct { - in []byte - isFloat bool - expectErr types.GomegaMatcher + in string + isFloat bool } var lexer *Lexer @@ -1172,59 +1146,93 @@ var _ = Describe("Lexer.peekIsFloat", func() { DescribeTable("peekIsFloat cases", func(c Case) { - lexer.SetInput(bytes.NewReader(c.in)) - actualIsFloat, err := lexer.peekIsFloat() + lexer.SetInput(c.in) + actualIsFloat := lexer.peekIsFloat() Expect(actualIsFloat).To(Equal(c.isFloat)) - if c.expectErr != nil { - Expect(err).To(c.expectErr) - } }, Entry("should identify 13.37 as float", Case{ - in: []byte("13.37"), - expectErr: BeNil(), - isFloat: true, + in: "13.37", + isFloat: true, }), Entry("should identify 13.37 as float (with space suffix)", Case{ - in: []byte("13.37 "), - expectErr: BeNil(), - isFloat: true, + in: "13.37 ", + isFloat: true, }), Entry("should identify 13.37 as float (with tab suffix)", Case{ - in: []byte("13.37 "), - expectErr: BeNil(), - isFloat: true, + in: "13.37 ", + isFloat: true, }), Entry("should identify 13.37 as float (with line terminator suffix)", Case{ - in: []byte("13.37\n"), - expectErr: BeNil(), - isFloat: true, + in: "13.37\n", + isFloat: true, }), Entry("should identify 13.37 as float (with comma suffix)", Case{ - in: []byte("13.37,"), - expectErr: BeNil(), - isFloat: true, + in: "13.37,", + isFloat: true, }), Entry("should identify 1337 as non float", Case{ - in: []byte("1337"), - expectErr: BeNil(), - isFloat: false, + in: "1337", + isFloat: false, }), ) }) func BenchmarkPeekIsFloat(b *testing.B) { - input := bytes.NewReader([]byte("13373737.37")) + input := "13373737.37" lexer := NewLexer() b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { - input.Seek(0, io.SeekStart) lexer.SetInput(input) lexer.peekIsFloat() } } +var _ = Describe("Lexer.readMultiLineString", func() { + lexer := NewLexer() + lexer.SetInput("\"\"\"foo\"\"\" x") + + It("should read foo", func() { + foo, err := lexer.Read() + Expect(err).To(BeNil()) + Expect(foo).To(Equal(token.Token{ + Literal: "foo", + Keyword: keyword.STRING, + Position: position.Position{ + Line: 1, + Char: 1, + }, + })) + }) + + It("should read x", func() { + foo, err := lexer.Read() + Expect(err).To(BeNil()) + Expect(foo).To(Equal(token.Token{ + Literal: "x", + Keyword: keyword.IDENT, + Position: position.Position{ + Line: 1, + Char: 11, + }, + })) + }) + + It("should read eof", func() { + foo, err := lexer.Read() + Expect(err).To(BeNil()) + Expect(foo).To(Equal(token.Token{ + Literal: "eof", + Keyword: keyword.EOF, + Position: position.Position{ + Line: 1, + Char: 12, + }, + })) + }) +}) + var _ = Describe("Lexer.Read", func() { type Case struct { - in []byte + in string out []token.Token } @@ -1236,7 +1244,7 @@ var _ = Describe("Lexer.Read", func() { DescribeTable("Read Multiple Tokens", func(c Case) { - lexer.SetInput(bytes.NewReader(c.in)) + lexer.SetInput(c.in) for i := 0; i < len(c.out); i++ { peeked, _ := lexer.Peek(true) Expect(peeked).To(Equal(c.out[i].Keyword), fmt.Sprintf("Token: %d", i+1)) @@ -1245,125 +1253,126 @@ var _ = Describe("Lexer.Read", func() { Expect(tok).To(Equal(c.out[i])) } - }, Entry("should read ident followed by colon", Case{ - in: []byte("foo:"), - out: []token.Token{ - { - Keyword: keyword.IDENT, - Literal: []byte("foo"), - Position: position.Position{ - Line: 1, - Char: 1, + }, + Entry("should read ident followed by colon", Case{ + in: "foo:", + out: []token.Token{ + { + Keyword: keyword.IDENT, + Literal: "foo", + Position: position.Position{ + Line: 1, + Char: 1, + }, }, - }, - { - Keyword: keyword.COLON, - Literal: []byte(":"), - Position: position.Position{ - Line: 1, - Char: 4, + { + Keyword: keyword.COLON, + Literal: ":", + Position: position.Position{ + Line: 1, + Char: 4, + }, }, }, - }, - }), + }), Entry("should read complex nested structure", Case{ - in: []byte(`Goland { + in: `Goland { ... on GoWater { ... on GoAir { go } } } - `), + `, out: []token.Token{ { Keyword: keyword.IDENT, - Literal: []byte("Goland"), + Literal: "Goland", Position: position.Position{1, 1}, }, { Keyword: keyword.CURLYBRACKETOPEN, - Literal: []byte("{"), + Literal: "{", Position: position.Position{1, 8}, }, { Keyword: keyword.SPREAD, - Literal: []byte("..."), + Literal: "...", Position: position.Position{2, 6}, }, { Keyword: keyword.ON, - Literal: []byte("on"), + Literal: "on", Position: position.Position{2, 10}, }, { Keyword: keyword.IDENT, - Literal: []byte("GoWater"), + Literal: "GoWater", Position: position.Position{2, 13}, }, { Keyword: keyword.CURLYBRACKETOPEN, - Literal: []byte("{"), + Literal: "{", Position: position.Position{2, 21}, }, { Keyword: keyword.SPREAD, - Literal: []byte("..."), + Literal: "...", Position: position.Position{3, 7}, }, { Keyword: keyword.ON, - Literal: []byte("on"), + Literal: "on", Position: position.Position{3, 11}, }, { Keyword: keyword.IDENT, - Literal: []byte("GoAir"), + Literal: "GoAir", Position: position.Position{3, 14}, }, { Keyword: keyword.CURLYBRACKETOPEN, - Literal: []byte("{"), + Literal: "{", Position: position.Position{3, 20}, }, { Keyword: keyword.IDENT, - Literal: []byte("go"), + Literal: "go", Position: position.Position{4, 8}, }, { Keyword: keyword.CURLYBRACKETCLOSE, - Literal: []byte("}"), + Literal: "}", Position: position.Position{5, 7}, }, { Keyword: keyword.CURLYBRACKETCLOSE, - Literal: []byte("}"), + Literal: "}", Position: position.Position{6, 6}, }, { Keyword: keyword.CURLYBRACKETCLOSE, - Literal: []byte("}"), + Literal: "}", Position: position.Position{7, 5}, }, { Keyword: keyword.EOF, - Literal: []byte("eof"), + Literal: "eof", Position: position.Position{8, 5}, }, }, }), Entry("should read multiple keywords", Case{ - in: []byte(`1337 1338 1339 "foo" "bar" """foo bar""" """foo + in: `1337 1338 1339 "foo" "bar" """foo bar""" """foo bar""" """foo bar baz """ -13.37`), +13.37`, out: []token.Token{ { Keyword: keyword.INTEGER, - Literal: []byte("1337"), + Literal: "1337", Position: position.Position{ Line: 1, Char: 1, @@ -1371,7 +1380,7 @@ baz }, { Keyword: keyword.INTEGER, - Literal: []byte("1338"), + Literal: "1338", Position: position.Position{ Line: 1, Char: 6, @@ -1379,7 +1388,7 @@ baz }, { Keyword: keyword.INTEGER, - Literal: []byte("1339"), + Literal: "1339", Position: position.Position{ Line: 1, Char: 11, @@ -1387,7 +1396,7 @@ baz }, { Keyword: keyword.STRING, - Literal: []byte(`foo`), + Literal: `foo`, Position: position.Position{ Line: 1, Char: 16, @@ -1395,7 +1404,7 @@ baz }, { Keyword: keyword.STRING, - Literal: []byte(`bar`), + Literal: `bar`, Position: position.Position{ Line: 1, Char: 22, @@ -1403,7 +1412,7 @@ baz }, { Keyword: keyword.STRING, - Literal: []byte(`foo bar`), + Literal: `foo bar`, Position: position.Position{ Line: 1, Char: 28, @@ -1411,8 +1420,8 @@ baz }, { Keyword: keyword.STRING, - Literal: []byte(`foo -bar`), + Literal: `foo +bar`, Position: position.Position{ Line: 1, Char: 42, @@ -1420,9 +1429,9 @@ bar`), }, { Keyword: keyword.STRING, - Literal: []byte(`foo + Literal: `foo bar -baz`), +baz`, Position: position.Position{ Line: 2, Char: 8, @@ -1430,7 +1439,7 @@ baz`), }, { Keyword: keyword.FLOAT, - Literal: []byte("13.37"), + Literal: "13.37", Position: position.Position{ Line: 6, Char: 1, @@ -1439,12 +1448,12 @@ baz`), }, }), Entry("should read the introspection query", Case{ - in: []byte(`query IntrospectionQuery { - __schema {`), + in: `query IntrospectionQuery { + __schema {`, out: []token.Token{ { Keyword: keyword.QUERY, - Literal: []byte("query"), + Literal: "query", Position: position.Position{ Line: 1, Char: 1, @@ -1452,7 +1461,7 @@ baz`), }, { Keyword: keyword.IDENT, - Literal: []byte("IntrospectionQuery"), + Literal: "IntrospectionQuery", Position: position.Position{ Line: 1, Char: 7, @@ -1468,7 +1477,7 @@ baz`), }, { Keyword: keyword.IDENT, - Literal: []byte("__schema"), + Literal: "__schema", Position: position.Position{ Line: 2, Char: 3, @@ -1485,11 +1494,11 @@ baz`), }, }), Entry("should read '1,2,3' as three integers", Case{ - in: []byte("1,2,3"), + in: "1,2,3", out: []token.Token{ { Keyword: keyword.INTEGER, - Literal: []byte("1"), + Literal: "1", Position: position.Position{ Line: 1, Char: 1, @@ -1497,7 +1506,7 @@ baz`), }, { Keyword: keyword.INTEGER, - Literal: []byte("2"), + Literal: "2", Position: position.Position{ Line: 1, Char: 3, @@ -1505,7 +1514,7 @@ baz`), }, { Keyword: keyword.INTEGER, - Literal: []byte("3"), + Literal: "3", Position: position.Position{ Line: 1, Char: 5, @@ -1519,21 +1528,18 @@ baz`), func BenchmarkLexer(b *testing.B) { lexer := NewLexer() - reader := bytes.NewReader(introspectionQuery) b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { + b.StopTimer() + lexer.SetInput(introspectionQuery) + b.StartTimer() + var tok token.Token var key keyword.Keyword - - _, err := reader.Seek(0, io.SeekStart) - if err != nil { - b.Fatal(err) - } - - lexer.SetInput(reader) + var err error for err == nil && tok.Keyword != keyword.EOF && key != keyword.EOF { key, err = lexer.Peek(true) @@ -1549,7 +1555,7 @@ func BenchmarkLexer(b *testing.B) { } } -var introspectionQuery = []byte(`query IntrospectionQuery { +var introspectionQuery = `query IntrospectionQuery { __schema { queryType { name @@ -1647,4 +1653,4 @@ fragment TypeRef on __Type { } } } -}`) +}` diff --git a/pkg/lexer/memprofile.out b/pkg/lexer/memprofile.out deleted file mode 100644 index 982463e78b..0000000000 Binary files a/pkg/lexer/memprofile.out and /dev/null differ diff --git a/pkg/lexer/profile.out b/pkg/lexer/profile.out deleted file mode 100644 index f53487f3d9..0000000000 Binary files a/pkg/lexer/profile.out and /dev/null differ diff --git a/pkg/lexing/literal/literal.go b/pkg/lexing/literal/literal.go index ab26930b3b..06a9f1ba37 100644 --- a/pkg/lexing/literal/literal.go +++ b/pkg/lexing/literal/literal.go @@ -3,62 +3,62 @@ package literal import "bytes" var ( - COLON = []byte(":") - BANG = []byte("!") - LINETERMINATOR = []byte("\n") - TAB = []byte(" ") - SPACE = []byte(" ") - QUOTE = []byte(`"`) - COMMA = []byte(",") - AT = []byte("@") - DOLLAR = []byte("$") - DOT = []byte(".") - SPREAD = []byte("...") - PIPE = []byte("|") - SLASH = []byte("/") - BACKSLASH = []byte("\\") - EQUALS = []byte("=") - NEGATIVESIGN = []byte("-") - AND = []byte("&") + COLON = ":" + BANG = "!" + LINETERMINATOR = "\n" + TAB = " " + SPACE = " " + QUOTE = `"` + COMMA = "," + AT = "@" + DOLLAR = "$" + DOT = "." + SPREAD = "..." + PIPE = "|" + SLASH = "/" + BACKSLASH = "\\" + EQUALS = "=" + NEGATIVESIGN = "-" + AND = "&" - BRACKETOPEN = []byte("(") - BRACKETCLOSE = []byte(")") - SQUAREBRACKETOPEN = []byte("[") - SQUAREBRACKETCLOSE = []byte("]") - CURLYBRACKETOPEN = []byte("{") - CURLYBRACKETCLOSE = []byte("}") + BRACKETOPEN = "(" + BRACKETCLOSE = ")" + SQUAREBRACKETOPEN = "[" + SQUAREBRACKETCLOSE = "]" + CURLYBRACKETOPEN = "{" + CURLYBRACKETCLOSE = "}" - GOBOOL = []byte("bool") - GOINT32 = []byte("int32") - GOFLOAT32 = []byte("float32") - GOSTRING = []byte("string") - GONIL = []byte("nil") + GOBOOL = "bool" + GOINT32 = "int32" + GOFLOAT32 = "float32" + GOSTRING = "string" + GONIL = "nil" - EOF = []byte("eof") - ID = []byte("ID") - BOOLEAN = []byte("Boolean") - STRING = []byte("String") - INT = []byte("Int") - FLOAT = []byte("Float") - TYPE = []byte("type") - GRAPHQLTYPE = []byte("graphqlType") - INTERFACE = []byte("interface") - INPUT = []byte("input") - SCHEMA = []byte("schema") - SCALAR = []byte("scalar") - UNION = []byte("union") - ENUM = []byte("enum") - DIRECTIVE = []byte("directive") - QUERY = []byte("query") - MUTATION = []byte("mutation") - SUBSCRIPTION = []byte("subscription") - IMPLEMENTS = []byte("implements") - ON = []byte("on") - FRAGMENT = []byte("fragment") - NULL = []byte("null") + EOF = "eof" + ID = "ID" + BOOLEAN = "Boolean" + STRING = "String" + INT = "Int" + FLOAT = "Float" + TYPE = "type" + GRAPHQLTYPE = "graphqlType" + INTERFACE = "interface" + INPUT = "input" + SCHEMA = "schema" + SCALAR = "scalar" + UNION = "union" + ENUM = "enum" + DIRECTIVE = "directive" + QUERY = "query" + MUTATION = "mutation" + SUBSCRIPTION = "subscription" + IMPLEMENTS = "implements" + ON = "on" + FRAGMENT = "fragment" + NULL = "null" - TRUE = []byte("true") - FALSE = []byte("false") + TRUE = "true" + FALSE = "false" ) type Literal []byte diff --git a/pkg/lexing/token/token.go b/pkg/lexing/token/token.go index e954781695..029367b5c5 100644 --- a/pkg/lexing/token/token.go +++ b/pkg/lexing/token/token.go @@ -2,7 +2,6 @@ package token import ( "fmt" - "github.com/jensneuse/graphql-go-tools/pkg/document" "github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword" "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" "github.com/jensneuse/graphql-go-tools/pkg/lexing/position" @@ -10,7 +9,7 @@ import ( type Token struct { Keyword keyword.Keyword - Literal document.ByteSlice + Literal string Position position.Position Description string } diff --git a/pkg/parser/arguments_parser.go b/pkg/parser/arguments_parser.go index efd831d7dc..4bd2628439 100644 --- a/pkg/parser/arguments_parser.go +++ b/pkg/parser/arguments_parser.go @@ -22,7 +22,7 @@ func (p *Parser) parseArguments() (arguments document.Arguments, err error) { return } - var valueName []byte + var valueName string for { key, err = p.l.Peek(true) @@ -64,9 +64,11 @@ func (p *Parser) parseArguments() (arguments document.Arguments, err error) { return nil, err } - arguments = append(arguments, document.Argument{ + argument := document.Argument{ Name: valueName, Value: value, - }) + } + + arguments = append(arguments, argument) } } diff --git a/pkg/parser/arguments_parser_test.go b/pkg/parser/arguments_parser_test.go index b4526129c7..c4a30994bb 100644 --- a/pkg/parser/arguments_parser_test.go +++ b/pkg/parser/arguments_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -28,9 +27,9 @@ func TestArgumentsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.Arguments{ document.Argument{ - Name: []byte("name"), + Name: "name", Value: document.StringValue{ - Val: []byte("Gophus"), + Val: "Gophus", }, }, }), @@ -41,14 +40,14 @@ func TestArgumentsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.Arguments{ document.Argument{ - Name: []byte("fooBars"), + Name: "fooBars", Value: document.ListValue{ Values: []document.Value{ document.StringValue{ - Val: []byte("foo"), + Val: "foo", }, document.StringValue{ - Val: []byte("bar"), + Val: "bar", }, }, }, @@ -61,7 +60,7 @@ func TestArgumentsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.Arguments{ document.Argument{ - Name: []byte("integers"), + Name: "integers", Value: document.ListValue{ Values: []document.Value{ document.IntValue{ @@ -84,15 +83,15 @@ func TestArgumentsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.Arguments{ document.Argument{ - Name: []byte("name"), + Name: "name", Value: document.StringValue{ - Val: []byte("Gophus"), + Val: "Gophus", }, }, document.Argument{ - Name: []byte("surname"), + Name: "surname", Value: document.StringValue{ - Val: []byte("Gophersson"), + Val: "Gophersson", }, }, }), @@ -122,9 +121,8 @@ func TestArgumentsParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseArguments() Expect(err).To(test.expectErr) diff --git a/pkg/parser/argumentsdefinition_parser_test.go b/pkg/parser/argumentsdefinition_parser_test.go index c2de72e00c..5f90c1dd47 100644 --- a/pkg/parser/argumentsdefinition_parser_test.go +++ b/pkg/parser/argumentsdefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -28,9 +27,9 @@ func TestArgumentsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ArgumentsDefinition{ document.InputValueDefinition{ - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }), @@ -47,15 +46,15 @@ func TestArgumentsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ArgumentsDefinition{ document.InputValueDefinition{ - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, document.InputValueDefinition{ - Name: []byte("outputValue"), + Name: "outputValue", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -78,9 +77,9 @@ func TestArgumentsDefinitionParser(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.ArgumentsDefinition(document.ArgumentsDefinition{ document.InputValueDefinition{ - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, })), @@ -92,9 +91,8 @@ func TestArgumentsDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseArgumentsDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/bool_value_parser_test.go b/pkg/parser/bool_value_parser_test.go index 5fa9506a23..98ad4a7a24 100644 --- a/pkg/parser/bool_value_parser_test.go +++ b/pkg/parser/bool_value_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -45,9 +44,8 @@ func TestBoolValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedBoolValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/defaultvalue_parser_test.go b/pkg/parser/defaultvalue_parser_test.go index 761672db3a..6a06c6e2a8 100644 --- a/pkg/parser/defaultvalue_parser_test.go +++ b/pkg/parser/defaultvalue_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -49,9 +48,8 @@ func TestDefaultValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseDefaultValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/directivedefinition_parser_test.go b/pkg/parser/directivedefinition_parser_test.go index c5095982f5..c8c97b507e 100644 --- a/pkg/parser/directivedefinition_parser_test.go +++ b/pkg/parser/directivedefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -27,7 +26,7 @@ func TestDirectiveDefinitionParser(t *testing.T) { input: "@ somewhere on QUERY", expectErr: BeNil(), expectValues: Equal(document.DirectiveDefinition{ - Name: []byte("somewhere"), + Name: "somewhere", DirectiveLocations: document.DirectiveLocations{ document.DirectiveLocationQUERY, }, @@ -38,7 +37,7 @@ func TestDirectiveDefinitionParser(t *testing.T) { input: "@ somewhere on | QUERY", expectErr: BeNil(), expectValues: Equal(document.DirectiveDefinition{ - Name: []byte("somewhere"), + Name: "somewhere", DirectiveLocations: document.DirectiveLocations{ document.DirectiveLocationQUERY, }, @@ -49,12 +48,12 @@ func TestDirectiveDefinitionParser(t *testing.T) { input: "@ somewhere(inputValue: Int) on QUERY", expectErr: BeNil(), expectValues: Equal(document.DirectiveDefinition{ - Name: []byte("somewhere"), + Name: "somewhere", ArgumentsDefinition: document.ArgumentsDefinition{ document.InputValueDefinition{ - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }, @@ -68,7 +67,7 @@ func TestDirectiveDefinitionParser(t *testing.T) { input: "@ somewhere QUERY", expectErr: Not(BeNil()), expectValues: Equal(document.DirectiveDefinition{ - Name: []byte("somewhere"), + Name: "somewhere", }), }, { @@ -76,7 +75,7 @@ func TestDirectiveDefinitionParser(t *testing.T) { input: "@ somewhere off QUERY", expectErr: Not(BeNil()), expectValues: Equal(document.DirectiveDefinition{ - Name: []byte("somewhere"), + Name: "somewhere", }), }, { @@ -84,7 +83,7 @@ func TestDirectiveDefinitionParser(t *testing.T) { input: "@ somewhere on QUERY | thisshouldntwork", expectErr: Not(BeNil()), expectValues: Equal(document.DirectiveDefinition{ - Name: []byte("somewhere"), + Name: "somewhere", DirectiveLocations: document.DirectiveLocations{ document.DirectiveLocationQUERY, }, @@ -97,9 +96,8 @@ func TestDirectiveDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseDirectiveDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/directives_parser_test.go b/pkg/parser/directives_parser_test.go index 76866cef72..d14d5880f9 100644 --- a/pkg/parser/directives_parser_test.go +++ b/pkg/parser/directives_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -28,10 +27,10 @@ func TestDirectivesParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -46,10 +45,10 @@ func TestDirectivesParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -57,10 +56,10 @@ func TestDirectivesParser(t *testing.T) { }, }, document.Directive{ - Name: []byte("moveto"), + Name: "moveto", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 4, }, @@ -75,16 +74,16 @@ func TestDirectivesParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, }, document.Argument{ - Name: []byte("count"), + Name: "count", Value: document.IntValue{ Val: 10, }, @@ -100,9 +99,8 @@ func TestDirectivesParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseDirectives() Expect(err).To(test.expectErr) diff --git a/pkg/parser/enum_value_parser_test.go b/pkg/parser/enum_value_parser_test.go index 99eacb8244..0668eb76ef 100644 --- a/pkg/parser/enum_value_parser_test.go +++ b/pkg/parser/enum_value_parser_test.go @@ -1,9 +1,7 @@ package parser import ( - "bytes" . "github.com/franela/goblin" - "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" "testing" @@ -26,7 +24,7 @@ func TestEnumValueParser(t *testing.T) { it: "should parse MY_ENUM", input: "MY_ENUM", expectErr: BeNil(), - expectName: Equal(document.ByteSlice("MY_ENUM")), + expectName: Equal("MY_ENUM"), }, } @@ -35,9 +33,8 @@ func TestEnumValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedEnumValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/enumtypedefinition_parser_test.go b/pkg/parser/enumtypedefinition_parser_test.go index c29a2ffd85..149fc2cae9 100644 --- a/pkg/parser/enumtypedefinition_parser_test.go +++ b/pkg/parser/enumtypedefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -32,19 +31,19 @@ func TestParseEnumTypeDefinition(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.EnumTypeDefinition{ - Name: []byte("Direction"), + Name: "Direction", EnumValuesDefinition: document.EnumValuesDefinition{ { - EnumValue: []byte("NORTH"), + EnumValue: "NORTH", }, { - EnumValue: []byte("EAST"), + EnumValue: "EAST", }, { - EnumValue: []byte("SOUTH"), + EnumValue: "SOUTH", }, { - EnumValue: []byte("WEST"), + EnumValue: "WEST", }, }, }), @@ -63,23 +62,23 @@ func TestParseEnumTypeDefinition(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.EnumTypeDefinition{ - Name: []byte("Direction"), + Name: "Direction", EnumValuesDefinition: document.EnumValuesDefinition{ { - Description: []byte("describes north"), - EnumValue: []byte("NORTH"), + Description: "describes north", + EnumValue: "NORTH", }, { - Description: []byte("describes east"), - EnumValue: []byte("EAST"), + Description: "describes east", + EnumValue: "EAST", }, { - Description: []byte("describes south"), - EnumValue: []byte("SOUTH"), + Description: "describes south", + EnumValue: "SOUTH", }, { - Description: []byte("describes west"), - EnumValue: []byte("WEST"), + Description: "describes west", + EnumValue: "WEST", }, }, }), @@ -101,23 +100,23 @@ func TestParseEnumTypeDefinition(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.EnumTypeDefinition{ - Name: []byte("Direction"), + Name: "Direction", EnumValuesDefinition: document.EnumValuesDefinition{ { - Description: []byte("describes north"), - EnumValue: []byte("NORTH"), + Description: "describes north", + EnumValue: "NORTH", }, { - Description: []byte("describes east"), - EnumValue: []byte("EAST"), + Description: "describes east", + EnumValue: "EAST", }, { - Description: []byte("describes south"), - EnumValue: []byte("SOUTH"), + Description: "describes south", + EnumValue: "SOUTH", }, { - Description: []byte("describes west"), - EnumValue: []byte("WEST"), + Description: "describes west", + EnumValue: "WEST", }, }, }), @@ -129,26 +128,26 @@ func TestParseEnumTypeDefinition(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.EnumTypeDefinition{ - Name: []byte("Direction"), + Name: "Direction", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -156,7 +155,7 @@ func TestParseEnumTypeDefinition(t *testing.T) { }, EnumValuesDefinition: document.EnumValuesDefinition{ { - EnumValue: []byte("NORTH"), + EnumValue: "NORTH", }, }, }), @@ -166,7 +165,7 @@ func TestParseEnumTypeDefinition(t *testing.T) { input: ` Direction`, expectErr: BeNil(), expectValues: Equal(document.EnumTypeDefinition{ - Name: []byte("Direction"), + Name: "Direction", }), }, } @@ -176,9 +175,8 @@ func TestParseEnumTypeDefinition(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseEnumTypeDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/enumvaluesdefinition_parser.go b/pkg/parser/enumvaluesdefinition_parser.go index 31214b1f4e..61abd6c164 100644 --- a/pkg/parser/enumvaluesdefinition_parser.go +++ b/pkg/parser/enumvaluesdefinition_parser.go @@ -16,7 +16,7 @@ func (p *Parser) parseEnumValuesDefinition() (values document.EnumValuesDefiniti return } - var description []byte + var description string for { next, err := p.l.Peek(true) @@ -45,7 +45,7 @@ func (p *Parser) parseEnumValuesDefinition() (values document.EnumValuesDefiniti Description: description, } - description = nil + description = "" enumValueDefinition.Directives, err = p.parseDirectives() if err != nil { diff --git a/pkg/parser/enumvaluesdefinition_parser_test.go b/pkg/parser/enumvaluesdefinition_parser_test.go index c7a9c30a0c..61b3218440 100644 --- a/pkg/parser/enumvaluesdefinition_parser_test.go +++ b/pkg/parser/enumvaluesdefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -34,16 +33,16 @@ func TestParseEnumValuesDefinition(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.EnumValuesDefinition{ { - EnumValue: []byte("NORTH"), + EnumValue: "NORTH", }, { - EnumValue: []byte("EAST"), + EnumValue: "EAST", }, { - EnumValue: []byte("SOUTH"), + EnumValue: "SOUTH", }, { - EnumValue: []byte("WEST"), + EnumValue: "WEST", }, }, ), @@ -63,20 +62,20 @@ func TestParseEnumValuesDefinition(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.EnumValuesDefinition{ { - Description: []byte("describes north"), - EnumValue: []byte("NORTH"), + Description: "describes north", + EnumValue: "NORTH", }, { - Description: []byte("describes east"), - EnumValue: []byte("EAST"), + Description: "describes east", + EnumValue: "EAST", }, { - Description: []byte("describes south"), - EnumValue: []byte("SOUTH"), + Description: "describes south", + EnumValue: "SOUTH", }, { - Description: []byte("describes west"), - EnumValue: []byte("WEST"), + Description: "describes west", + EnumValue: "WEST", }, }, ), @@ -106,20 +105,20 @@ describes west expectErr: BeNil(), expectValues: Equal(document.EnumValuesDefinition{ { - Description: []byte("describes north"), - EnumValue: []byte("NORTH"), + Description: "describes north", + EnumValue: "NORTH", }, { - Description: []byte("describes east"), - EnumValue: []byte("EAST"), + Description: "describes east", + EnumValue: "EAST", }, { - Description: []byte("describes south"), - EnumValue: []byte("SOUTH"), + Description: "describes south", + EnumValue: "SOUTH", }, { - Description: []byte("describes west"), - EnumValue: []byte("WEST"), + Description: "describes west", + EnumValue: "WEST", }, }, ), @@ -132,26 +131,26 @@ describes west expectErr: BeNil(), expectValues: Equal(document.EnumValuesDefinition{ { - EnumValue: []byte("NORTH"), + EnumValue: "NORTH", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -170,26 +169,26 @@ describes west expectErr: BeNil(), expectValues: Equal(document.EnumValuesDefinition{ { - EnumValue: []byte("NORTH"), + EnumValue: "NORTH", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -197,26 +196,26 @@ describes west }, }, { - EnumValue: []byte("EAST"), + EnumValue: "EAST", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -233,9 +232,8 @@ describes west g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseEnumValuesDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/executabledefinition_parser_test.go b/pkg/parser/executabledefinition_parser_test.go index 6733dc5fbd..2c9e8bb314 100644 --- a/pkg/parser/executabledefinition_parser_test.go +++ b/pkg/parser/executabledefinition_parser_test.go @@ -5,7 +5,6 @@ import ( "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" - "strings" "testing" ) @@ -33,21 +32,21 @@ func TestExecutableDefinitionParser(t *testing.T) { OperationDefinitions: document.OperationDefinitions{ { OperationType: document.OperationTypeQuery, - Name: []byte("allGophers"), + Name: "allGophers", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -57,7 +56,7 @@ func TestExecutableDefinitionParser(t *testing.T) { }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, @@ -78,27 +77,27 @@ func TestExecutableDefinitionParser(t *testing.T) { expectValues: Equal(document.ExecutableDefinition{ OperationDefinitions: []document.OperationDefinition{ { - Name: []byte("Q1"), + Name: "Q1", OperationType: document.OperationTypeQuery, SelectionSet: []document.Selection{ document.Field{ - Name: []byte("foo"), + Name: "foo", }, }, }, }, FragmentDefinitions: document.FragmentDefinitions{ { - FragmentName: []byte("MyFragment"), + FragmentName: "MyFragment", TypeCondition: document.NamedType{ - Name: []byte("SomeType"), + Name: "SomeType", }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -108,7 +107,7 @@ func TestExecutableDefinitionParser(t *testing.T) { }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, @@ -132,35 +131,35 @@ func TestExecutableDefinitionParser(t *testing.T) { OperationDefinitions: document.OperationDefinitions{ { OperationType: document.OperationTypeQuery, - Name: []byte("allGophers"), + Name: "allGophers", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, { OperationType: document.OperationTypeQuery, - Name: []byte("allGophinas"), + Name: "allGophinas", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, @@ -188,51 +187,51 @@ func TestExecutableDefinitionParser(t *testing.T) { OperationDefinitions: document.OperationDefinitions{ { OperationType: document.OperationTypeQuery, - Name: []byte("allGophers"), + Name: "allGophers", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, { OperationType: document.OperationTypeQuery, - Name: []byte("allGophinas"), + Name: "allGophinas", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, }, FragmentDefinitions: document.FragmentDefinitions{ { - FragmentName: []byte("MyFragment"), + FragmentName: "MyFragment", TypeCondition: document.NamedType{ - Name: []byte("SomeType"), + Name: "SomeType", }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -242,7 +241,7 @@ func TestExecutableDefinitionParser(t *testing.T) { }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, @@ -286,14 +285,14 @@ func TestExecutableDefinitionParser(t *testing.T) { expectValues: Equal(document.ExecutableDefinition{ OperationDefinitions: document.OperationDefinitions{ { - Name: []byte("QueryWithFragments"), + Name: "QueryWithFragments", OperationType: document.OperationTypeQuery, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("hero"), + Name: "hero", SelectionSet: document.SelectionSet{ document.FragmentSpread{ - FragmentName: []byte("heroFields"), + FragmentName: "heroFields", }, }, }, @@ -302,27 +301,27 @@ func TestExecutableDefinitionParser(t *testing.T) { }, FragmentDefinitions: document.FragmentDefinitions{ { - FragmentName: []byte("heroFields"), + FragmentName: "heroFields", TypeCondition: document.NamedType{ - Name: []byte("SuperHero"), + Name: "SuperHero", }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, document.Field{ - Name: []byte("skill"), + Name: "skill", }, document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("DrivingSuperHero"), + Name: "DrivingSuperHero", }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("vehicles"), + Name: "vehicles", SelectionSet: document.SelectionSet{ document.FragmentSpread{ - FragmentName: []byte("vehicleFields"), + FragmentName: "vehicleFields", }, }, }, @@ -331,16 +330,16 @@ func TestExecutableDefinitionParser(t *testing.T) { }, }, { - FragmentName: []byte("vehicleFields"), + FragmentName: "vehicleFields", TypeCondition: document.NamedType{ - Name: []byte("Vehicle"), + Name: "Vehicle", }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, document.Field{ - Name: []byte("weapon"), + Name: "weapon", }, }, }, @@ -357,13 +356,13 @@ func TestExecutableDefinitionParser(t *testing.T) { OperationType: document.OperationTypeQuery, SelectionSet: []document.Selection{ document.Field{ - Name: []byte("hero"), + Name: "hero", SelectionSet: []document.Selection{ document.Field{ - Name: []byte("id"), + Name: "id", }, document.Field{ - Name: []byte("name"), + Name: "name", }, }, }, @@ -380,7 +379,7 @@ func TestExecutableDefinitionParser(t *testing.T) { g.It(test.it, func() { parser := NewParser() - parser.l.SetInput(strings.NewReader(test.input)) + parser.l.SetInput(test.input) val, err := parser.parseExecutableDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/field_parser_test.go b/pkg/parser/field_parser_test.go index d9e9a5e25a..32fbcee6f8 100644 --- a/pkg/parser/field_parser_test.go +++ b/pkg/parser/field_parser_test.go @@ -1,12 +1,10 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" - "io" "testing" ) @@ -28,11 +26,11 @@ func TestFieldParser(t *testing.T) { input: "preferredName: originalName(isSet: true) @rename(index: 3)", expectErr: BeNil(), expectValues: Equal(document.Field{ - Alias: []byte("preferredName"), - Name: []byte("originalName"), + Alias: "preferredName", + Name: "originalName", Arguments: document.Arguments{ document.Argument{ - Name: []byte("isSet"), + Name: "isSet", Value: document.BooleanValue{ Val: true, }, @@ -40,10 +38,10 @@ func TestFieldParser(t *testing.T) { }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -58,10 +56,10 @@ func TestFieldParser(t *testing.T) { input: "originalName(isSet: true) @rename(index: 3)", expectErr: BeNil(), expectValues: Equal(document.Field{ - Name: []byte("originalName"), + Name: "originalName", Arguments: document.Arguments{ document.Argument{ - Name: []byte("isSet"), + Name: "isSet", Value: document.BooleanValue{ Val: true, }, @@ -69,10 +67,10 @@ func TestFieldParser(t *testing.T) { }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -87,14 +85,14 @@ func TestFieldParser(t *testing.T) { input: "preferredName: originalName @rename(index: 3)", expectErr: BeNil(), expectValues: Equal(document.Field{ - Alias: []byte("preferredName"), - Name: []byte("originalName"), + Alias: "preferredName", + Name: "originalName", Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -109,11 +107,11 @@ func TestFieldParser(t *testing.T) { input: "preferredName: originalName(isSet: true)", expectErr: BeNil(), expectValues: Equal(document.Field{ - Alias: []byte("preferredName"), - Name: []byte("originalName"), + Alias: "preferredName", + Name: "originalName", Arguments: document.Arguments{ document.Argument{ - Name: []byte("isSet"), + Name: "isSet", Value: document.BooleanValue{ Val: true, }, @@ -132,13 +130,13 @@ func TestFieldParser(t *testing.T) { `, expectErr: BeNil(), expectValues: Equal(document.Field{ - Name: []byte("originalName"), + Name: "originalName", SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("unoriginalName"), + Name: "unoriginalName", SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("worstNamePossible"), + Name: "worstNamePossible", }, }, }, @@ -152,9 +150,8 @@ func TestFieldParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseField() Expect(err).To(test.expectErr) @@ -164,11 +161,10 @@ func TestFieldParser(t *testing.T) { }) } -var parseFieldBenchmarkInput = []byte(`t { kind name ofType { kind name ofType { kind name } } }`) +var parseFieldBenchmarkInput = `t { kind name ofType { kind name ofType { kind name } } }` func BenchmarkParseField(b *testing.B) { - reader := bytes.NewReader(parseFieldBenchmarkInput) var err error parser := NewParser() @@ -177,12 +173,7 @@ func BenchmarkParseField(b *testing.B) { for i := 0; i < b.N; i++ { - _, err = reader.Seek(0, io.SeekStart) - if err != nil { - b.Fatal(err) - } - - parser.l.SetInput(reader) + parser.l.SetInput(parseFieldBenchmarkInput) _, err = parser.parseField() if err != nil { b.Fatal(err) diff --git a/pkg/parser/fieldsdefinition_parser.go b/pkg/parser/fieldsdefinition_parser.go index 20016cbae5..dd5dcdc79a 100644 --- a/pkg/parser/fieldsdefinition_parser.go +++ b/pkg/parser/fieldsdefinition_parser.go @@ -16,7 +16,7 @@ func (p *Parser) parseFieldsDefinition() (fieldsDefinition document.FieldsDefini return } - var description []byte + var description string for { next, err := p.l.Peek(true) @@ -48,7 +48,7 @@ func (p *Parser) parseFieldsDefinition() (fieldsDefinition document.FieldsDefini Name: fieldIdent.Literal, } - description = nil + description = "" fieldDefinition.ArgumentsDefinition, err = p.parseArgumentsDefinition() if err != nil { @@ -76,39 +76,4 @@ func (p *Parser) parseFieldsDefinition() (fieldsDefinition document.FieldsDefini return fieldsDefinition, newErrInvalidType(invalid.Position, "parseFieldsDefinition", "string/curly bracket close/ident", invalid.Keyword.String()) } } - - /* _, err = p.readAllUntil(keyword.CURLYBRACKETCLOSE, WithReadRepeat(), WithDescription()). - foreachMatchedPattern(Pattern(keyword.IDENT), - func(tokens []token.Token) error { - description := string(tokens[0].Description) - name := string(tokens[0].Literal) - argumentsDefinition, err := p.parseArgumentsDefinition() - if err != nil { - return err - } - _, err = p.read(WithWhitelist(keyword.COLON)) - if err != nil { - return err - } - fieldType, err := p.parseType() - if err != nil { - return err - } - directives, err := p.parseDirectives() - fieldsDefinition = append(fieldsDefinition, document.FieldDefinition{ - Description: description, - Name: name, - Type: fieldType, - ArgumentsDefinition: argumentsDefinition, - Directives: directives, - }) - return err - }) - - _, err = p.read(WithWhitelist(keyword.CURLYBRACKETCLOSE)) - if err != nil { - return - } - - return*/ } diff --git a/pkg/parser/fieldsdefinition_parser_test.go b/pkg/parser/fieldsdefinition_parser_test.go index 45c08b95e5..63539bd95e 100644 --- a/pkg/parser/fieldsdefinition_parser_test.go +++ b/pkg/parser/fieldsdefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -30,9 +29,9 @@ func TestFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.FieldsDefinition{ { - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -46,15 +45,15 @@ func TestFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.FieldsDefinition{ { - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, { - Name: []byte("age"), + Name: "age", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }), @@ -68,10 +67,10 @@ func TestFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.FieldsDefinition{ { - Description: []byte("describes the name"), - Name: []byte("name"), + Description: "describes the name", + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -85,18 +84,18 @@ func TestFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.FieldsDefinition{ { - Name: []byte("name"), + Name: "name", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, NonNull: true, }, }, { - Name: []byte("age"), + Name: "age", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", NonNull: true, }, }, @@ -126,9 +125,9 @@ func TestFieldsDefinitionParser(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.FieldsDefinition{ { - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -141,15 +140,15 @@ func TestFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.FieldsDefinition{ { - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, ArgumentsDefinition: document.ArgumentsDefinition{ document.InputValueDefinition{ - Name: []byte("isSet"), + Name: "isSet", Type: document.NamedType{ - Name: []byte("boolean"), + Name: "boolean", NonNull: true, }, }, @@ -165,33 +164,33 @@ func TestFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.FieldsDefinition{ { - Name: []byte("name"), + Name: "name", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, }, }, Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -203,9 +202,8 @@ func TestFieldsDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseFieldsDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/fixtures/type_system_definition_parsed_starwars.golden b/pkg/parser/fixtures/type_system_definition_parsed_starwars.golden index b280b04734..54bf246aa2 100644 --- a/pkg/parser/fixtures/type_system_definition_parsed_starwars.golden +++ b/pkg/parser/fixtures/type_system_definition_parsed_starwars.golden @@ -49,37 +49,6 @@ "Description": "The query type, represents all of the entry points into our object graph", "Name": "Query", "FieldsDefinition": [ - { - "Description": "", - "Name": "__schema", - "ArgumentsDefinition": null, - "Type": { - "Name": "__Schema", - "NonNull": true - }, - "Directives": null - }, - { - "Description": "", - "Name": "__type", - "ArgumentsDefinition": [ - { - "Description": "", - "Name": "name", - "Type": { - "Name": "String", - "NonNull": true - }, - "DefaultValue": null, - "Directives": null - } - ], - "Type": { - "Name": "__Type", - "NonNull": false - }, - "Directives": null - }, { "Description": "", "Name": "hero", diff --git a/pkg/parser/float_value_parser.go b/pkg/parser/float_value_parser.go index 18455a5983..90610a6ed1 100644 --- a/pkg/parser/float_value_parser.go +++ b/pkg/parser/float_value_parser.go @@ -12,7 +12,7 @@ func (p *Parser) parsePeekedFloatValue() (val document.FloatValue, err error) { return val, err } - val.Val, err = transform.StringSliceToFloat32(floatToken.Literal) + val.Val, err = transform.StringToFloat32(floatToken.Literal) return } diff --git a/pkg/parser/float_value_parser_test.go b/pkg/parser/float_value_parser_test.go index f9eeff6575..185feb7a47 100644 --- a/pkg/parser/float_value_parser_test.go +++ b/pkg/parser/float_value_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -37,9 +36,8 @@ func TestFloatValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedFloatValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/fragmentdefinition_parser_test.go b/pkg/parser/fragmentdefinition_parser_test.go index 3eb1c84c88..1eca978df7 100644 --- a/pkg/parser/fragmentdefinition_parser_test.go +++ b/pkg/parser/fragmentdefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -30,16 +29,16 @@ func TestFragmentDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.FragmentDefinition{ - FragmentName: []byte("MyFragment"), + FragmentName: "MyFragment", TypeCondition: document.NamedType{ - Name: []byte("SomeType"), + Name: "SomeType", }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -49,7 +48,7 @@ func TestFragmentDefinitionParser(t *testing.T) { }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }), @@ -62,13 +61,13 @@ func TestFragmentDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.FragmentDefinition{ - FragmentName: []byte("MyFragment"), + FragmentName: "MyFragment", TypeCondition: document.NamedType{ - Name: []byte("SomeType"), + Name: "SomeType", }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }), @@ -81,7 +80,7 @@ func TestFragmentDefinitionParser(t *testing.T) { }`, expectErr: Not(BeNil()), expectValues: Equal(document.FragmentDefinition{ - FragmentName: []byte("MyFragment"), + FragmentName: "MyFragment", }), }, { @@ -92,7 +91,7 @@ func TestFragmentDefinitionParser(t *testing.T) { }`, expectErr: Not(BeNil()), expectValues: Equal(document.FragmentDefinition{ - FragmentName: []byte("MyFragment"), + FragmentName: "MyFragment", }), }, } @@ -102,9 +101,8 @@ func TestFragmentDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseFragmentDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/fragmentspread_parser_test.go b/pkg/parser/fragmentspread_parser_test.go index f4a9d04e4e..1fb361af2e 100644 --- a/pkg/parser/fragmentspread_parser_test.go +++ b/pkg/parser/fragmentspread_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -27,13 +26,13 @@ func TestFragmentSpreadParser(t *testing.T) { input: "firstFragment @rename(index: 3)", expectErr: BeNil(), expectValues: Equal(document.FragmentSpread{ - FragmentName: []byte("firstFragment"), + FragmentName: "firstFragment", Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -48,7 +47,7 @@ func TestFragmentSpreadParser(t *testing.T) { input: "firstFragment ", expectErr: BeNil(), expectValues: Equal(document.FragmentSpread{ - FragmentName: []byte("firstFragment"), + FragmentName: "firstFragment", }), }, { @@ -64,9 +63,8 @@ func TestFragmentSpreadParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseFragmentSpread() Expect(err).To(test.expectErr) diff --git a/pkg/parser/implementsinterfaces_parser_test.go b/pkg/parser/implementsinterfaces_parser_test.go index 5711b60cf2..c46f8c2bd2 100644 --- a/pkg/parser/implementsinterfaces_parser_test.go +++ b/pkg/parser/implementsinterfaces_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -27,7 +26,7 @@ func TestImplementsInterfacesParser(t *testing.T) { input: "implements Dogs", expectErr: BeNil(), expectValues: Equal(document.ImplementsInterfaces{ - []byte("Dogs"), + "Dogs", }), }, { @@ -35,9 +34,9 @@ func TestImplementsInterfacesParser(t *testing.T) { input: "implements Dogs & Cats & Mice", expectErr: BeNil(), expectValues: Equal(document.ImplementsInterfaces{ - []byte("Dogs"), - []byte("Cats"), - []byte("Mice"), + "Dogs", + "Cats", + "Mice", }), }, { @@ -45,8 +44,8 @@ func TestImplementsInterfacesParser(t *testing.T) { input: "implements Dogs & Cats Mice", expectErr: BeNil(), expectValues: Equal(document.ImplementsInterfaces{ - []byte("Dogs"), - []byte("Cats"), + "Dogs", + "Cats", }), }, { @@ -62,9 +61,8 @@ func TestImplementsInterfacesParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseImplementsInterfaces() Expect(err).To(test.expectErr) diff --git a/pkg/parser/inlinefragment_parser_test.go b/pkg/parser/inlinefragment_parser_test.go index 979dfbebfc..b324079057 100644 --- a/pkg/parser/inlinefragment_parser_test.go +++ b/pkg/parser/inlinefragment_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -35,21 +34,21 @@ func TestInlineFragmentParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Goland"), + Name: "Goland", }, SelectionSet: document.SelectionSet{ document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("GoWater"), + Name: "GoWater", }, SelectionSet: document.SelectionSet{ document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("GoAir"), + Name: "GoAir", }, SelectionSet: []document.Selection{ document.Field{ - Name: []byte("go"), + Name: "go", }, }, }, @@ -65,9 +64,8 @@ func TestInlineFragmentParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseInlineFragment() Expect(err).To(test.expectErr) diff --git a/pkg/parser/inputfieldsdefinition_parser_test.go b/pkg/parser/inputfieldsdefinition_parser_test.go index 900ffb8db4..b4c0123847 100644 --- a/pkg/parser/inputfieldsdefinition_parser_test.go +++ b/pkg/parser/inputfieldsdefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -28,9 +27,9 @@ func TestInputFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }), @@ -47,15 +46,15 @@ func TestInputFieldsDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, document.InputValueDefinition{ - Name: []byte("outputValue"), + Name: "outputValue", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -78,9 +77,9 @@ func TestInputFieldsDefinitionParser(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.InputFieldsDefinition(document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, })), @@ -92,9 +91,8 @@ func TestInputFieldsDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseInputFieldsDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/inputobjecttypedefinition_parser_test.go b/pkg/parser/inputobjecttypedefinition_parser_test.go index b84085e089..f708bcf6f0 100644 --- a/pkg/parser/inputobjecttypedefinition_parser_test.go +++ b/pkg/parser/inputobjecttypedefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -29,12 +28,12 @@ func TestInputObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.InputObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", InputFieldsDefinition: document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -48,22 +47,22 @@ func TestInputObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.InputObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", InputFieldsDefinition: document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("name"), + Name: "name", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, NonNull: true, }, }, document.InputValueDefinition{ - Name: []byte("age"), + Name: "age", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }, @@ -77,15 +76,15 @@ func TestInputObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.InputObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", InputFieldsDefinition: document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("name"), + Name: "name", DefaultValue: document.StringValue{ - Val: []byte("Gophina"), + Val: "Gophina", }, Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -96,7 +95,7 @@ func TestInputObjectTypeDefinitionParser(t *testing.T) { input: `Person `, expectErr: BeNil(), expectValues: Equal(document.InputObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", }), }, { @@ -106,26 +105,26 @@ func TestInputObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.InputObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -133,9 +132,9 @@ func TestInputObjectTypeDefinitionParser(t *testing.T) { }, InputFieldsDefinition: document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -148,9 +147,8 @@ func TestInputObjectTypeDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseInputObjectTypeDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/inputvaluedefinitions_parser.go b/pkg/parser/inputvaluedefinitions_parser.go index 6370265ced..bdf003b23f 100644 --- a/pkg/parser/inputvaluedefinitions_parser.go +++ b/pkg/parser/inputvaluedefinitions_parser.go @@ -14,7 +14,7 @@ import ( func (p *Parser) parseInputValueDefinitions() (inputValueDefinitions []document.InputValueDefinition, err error) { - var description []byte + var description string for { next, err := p.l.Peek(true) @@ -43,7 +43,7 @@ func (p *Parser) parseInputValueDefinitions() (inputValueDefinitions []document. Name: ident.Literal, } - description = nil + description = "" _, err = p.readExpect(keyword.COLON, "parseInputValueDefinitions") if err != nil { diff --git a/pkg/parser/inputvaluedefinitions_parser_test.go b/pkg/parser/inputvaluedefinitions_parser_test.go index 852a79b29b..2d8fee24a2 100644 --- a/pkg/parser/inputvaluedefinitions_parser_test.go +++ b/pkg/parser/inputvaluedefinitions_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -27,9 +26,9 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }), @@ -40,9 +39,9 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, DefaultValue: document.IntValue{ Val: 2, @@ -56,10 +55,10 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Description: []byte("useful description"), - Name: []byte("inputValue"), + Description: "useful description", + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, DefaultValue: document.IntValue{ Val: 2, @@ -73,15 +72,15 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, { - Name: []byte("outputValue"), + Name: "outputValue", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -92,17 +91,17 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Description: []byte("this is a inputValue"), - Name: []byte("inputValue"), + Description: "this is a inputValue", + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, { - Description: []byte("this is a outputValue"), - Name: []byte("outputValue"), + Description: "this is a outputValue", + Name: "outputValue", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }), @@ -113,23 +112,23 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Description: []byte("this is a inputValue"), - Name: []byte("inputValue"), + Description: "this is a inputValue", + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, DefaultValue: document.IntValue{ Val: 2, }, }, { - Description: []byte("this is a outputValue"), - Name: []byte("outputValue"), + Description: "this is a outputValue", + Name: "outputValue", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, DefaultValue: document.StringValue{ - Val: []byte("test"), + Val: "test", }, }, }), @@ -140,9 +139,9 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", NonNull: true, }, }, @@ -154,10 +153,10 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Name: []byte("inputValue"), + Name: "inputValue", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }, @@ -169,29 +168,29 @@ func TestInputValueDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal([]document.InputValueDefinition{ { - Name: []byte("inputValue"), + Name: "inputValue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -207,9 +206,8 @@ func TestInputValueDefinitionsParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseInputValueDefinitions() Expect(err).To(test.expectErr) diff --git a/pkg/parser/int_value_parser.go b/pkg/parser/int_value_parser.go index 59b5fe8451..a952802c81 100644 --- a/pkg/parser/int_value_parser.go +++ b/pkg/parser/int_value_parser.go @@ -12,7 +12,7 @@ func (p *Parser) parsePeekedIntValue() (val document.IntValue, err error) { return val, err } - val.Val, err = transform.StringSliceToInt32(integerToken.Literal) + val.Val, err = transform.StringToInt32(integerToken.Literal) return } diff --git a/pkg/parser/int_value_parser_test.go b/pkg/parser/int_value_parser_test.go index 7a839e547f..c976315ab1 100644 --- a/pkg/parser/int_value_parser_test.go +++ b/pkg/parser/int_value_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -37,9 +36,8 @@ func TestIntValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedIntValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/interfacetypedefinition_parser_test.go b/pkg/parser/interfacetypedefinition_parser_test.go index 7982cc7768..6a5c8b0277 100644 --- a/pkg/parser/interfacetypedefinition_parser_test.go +++ b/pkg/parser/interfacetypedefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -29,12 +28,12 @@ func TestInterfaceTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.InterfaceTypeDefinition{ - Name: []byte("NameEntity"), + Name: "NameEntity", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -48,22 +47,22 @@ func TestInterfaceTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.InterfaceTypeDefinition{ - Name: []byte("Person"), + Name: "Person", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, NonNull: true, }, }, document.FieldDefinition{ - Name: []byte("age"), + Name: "age", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }, @@ -75,7 +74,7 @@ func TestInterfaceTypeDefinitionParser(t *testing.T) { input: `Person `, expectErr: BeNil(), expectValues: Equal(document.InterfaceTypeDefinition{ - Name: []byte("Person"), + Name: "Person", }), }, { @@ -85,26 +84,26 @@ func TestInterfaceTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.InterfaceTypeDefinition{ - Name: []byte("NameEntity"), + Name: "NameEntity", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -112,9 +111,9 @@ func TestInterfaceTypeDefinitionParser(t *testing.T) { }, FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -127,9 +126,8 @@ func TestInterfaceTypeDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseInterfaceTypeDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/list_value_parser_test.go b/pkg/parser/list_value_parser_test.go index 5302766011..a82b62530f 100644 --- a/pkg/parser/list_value_parser_test.go +++ b/pkg/parser/list_value_parser_test.go @@ -1,9 +1,8 @@ package parser import ( - "bytes" . "github.com/franela/goblin" - document "github.com/jensneuse/graphql-go-tools/pkg/document" + "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" "testing" @@ -47,7 +46,7 @@ func TestListValueParser(t *testing.T) { Val: 1, }, document.StringValue{ - Val: []byte("2"), + Val: "2", }, document.IntValue{ Val: 3, @@ -68,9 +67,8 @@ func TestListValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedListValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/listtype_parser_test.go b/pkg/parser/listtype_parser_test.go index 8f8229e86d..1968e85aaa 100644 --- a/pkg/parser/listtype_parser_test.go +++ b/pkg/parser/listtype_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -28,7 +27,7 @@ func TestListTypeParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }), }, @@ -38,7 +37,7 @@ func TestListTypeParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, NonNull: true, }), @@ -49,7 +48,7 @@ func TestListTypeParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, NonNull: true, @@ -63,7 +62,7 @@ func TestListTypeParser(t *testing.T) { Type: document.ListType{ Type: document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, }, @@ -83,7 +82,7 @@ func TestListTypeParser(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }), }, @@ -94,9 +93,8 @@ func TestListTypeParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseListType() Expect(err).To(test.expectErr) diff --git a/pkg/parser/memprofile.out b/pkg/parser/memprofile.out deleted file mode 100644 index c6f1b6e96b..0000000000 Binary files a/pkg/parser/memprofile.out and /dev/null differ diff --git a/pkg/parser/namedtype_parser_test.go b/pkg/parser/namedtype_parser_test.go index c314f51ec6..39345d5884 100644 --- a/pkg/parser/namedtype_parser_test.go +++ b/pkg/parser/namedtype_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -27,7 +26,7 @@ func TestNamedTypeParser(t *testing.T) { input: "String", expectErr: BeNil(), expectValues: Equal(document.NamedType{ - Name: []byte("String"), + Name: "String", }), }, { @@ -35,7 +34,7 @@ func TestNamedTypeParser(t *testing.T) { input: "String!", expectErr: BeNil(), expectValues: Equal(document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }), }, @@ -52,9 +51,8 @@ func TestNamedTypeParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseNamedType() Expect(err).To(test.expectErr) diff --git a/pkg/parser/object_field_parser_test.go b/pkg/parser/object_field_parser_test.go index db21d0c16f..0949c29b34 100644 --- a/pkg/parser/object_field_parser_test.go +++ b/pkg/parser/object_field_parser_test.go @@ -1,9 +1,8 @@ package parser import ( - "bytes" . "github.com/franela/goblin" - document "github.com/jensneuse/graphql-go-tools/pkg/document" + "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" "testing" @@ -27,9 +26,9 @@ func TestObjectFieldParser(t *testing.T) { it: "should parse simple object field", input: `foo: "bar"`, expectErr: BeNil(), - expectFieldName: Equal(document.ByteSlice("foo")), + expectFieldName: Equal("foo"), expectFieldValue: Equal(document.StringValue{ - Val: document.ByteSlice("bar"), + Val: "bar", }), }, } @@ -39,9 +38,8 @@ func TestObjectFieldParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader(document.ByteSlice(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) field, err := parser.parseObjectField() Expect(err).To(test.expectErr) diff --git a/pkg/parser/object_value_parser.go b/pkg/parser/object_value_parser.go index 332d22137c..8b7572911f 100644 --- a/pkg/parser/object_value_parser.go +++ b/pkg/parser/object_value_parser.go @@ -59,22 +59,4 @@ func (p *Parser) parsePeekedObjectValue() (objectValue document.ObjectValue, err return objectValue, fmt.Errorf("parsePeekedObjectValue: expected }/ident, got: %s", peeked) } } - - /* err = p.readAllUntil(keyword.CURLYBRACKETCLOSE, - WithWhitelist(keyword.IDENT), - WithReadRepeat()). - foreach(func(tok token.Token) bool { - - var field document.ObjectField - field, err = p.parseObjectField() - if err != nil { - return false - } - - objectValue.Val = append(objectValue.Val, field) - - return true - }) - - return*/ } diff --git a/pkg/parser/object_value_parser_test.go b/pkg/parser/object_value_parser_test.go index 7ab576b561..449518cb71 100644 --- a/pkg/parser/object_value_parser_test.go +++ b/pkg/parser/object_value_parser_test.go @@ -1,9 +1,8 @@ package parser import ( - "bytes" . "github.com/franela/goblin" - document "github.com/jensneuse/graphql-go-tools/pkg/document" + "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" "testing" @@ -29,9 +28,9 @@ func TestObjectValueParser(t *testing.T) { expectVal: Equal(document.ObjectValue{ Val: []document.ObjectField{ { - Name: []byte("foo"), + Name: "foo", Value: document.StringValue{ - Val: []byte("bar"), + Val: "bar", }, }, }, @@ -44,33 +43,33 @@ func TestObjectValueParser(t *testing.T) { expectVal: Equal(document.ObjectValue{ Val: []document.ObjectField{ { - Name: []byte("foo"), + Name: "foo", Value: document.StringValue{ - Val: []byte("bar"), + Val: "bar", }, }, { - Name: []byte("baz"), + Name: "baz", Value: document.StringValue{ - Val: []byte("bat"), + Val: "bat", }, }, { - Name: []byte("bas"), + Name: "bas", Value: document.StringValue{ - Val: []byte("bal"), + Val: "bal", }, }, { - Name: []byte("anEnum"), + Name: "anEnum", Value: document.EnumValue{ - Name: []byte("NUM"), + Name: "NUM", }, }, { - Name: []byte("smallEnum"), + Name: "smallEnum", Value: document.EnumValue{ - Name: []byte("numnum"), + Name: "numnum", }, }, }, @@ -83,13 +82,13 @@ func TestObjectValueParser(t *testing.T) { expectVal: Equal(document.ObjectValue{ Val: []document.ObjectField{ { - Name: []byte("foo"), + Name: "foo", Value: document.ObjectValue{ Val: []document.ObjectField{ { - Name: []byte("bar"), + Name: "bar", Value: document.StringValue{ - Val: []byte("baz"), + Val: "baz", }, }, }, @@ -109,13 +108,13 @@ func TestObjectValueParser(t *testing.T) { expectVal: Equal(document.ObjectValue{ Val: []document.ObjectField{ { - Name: []byte("foo"), + Name: "foo", Value: document.ObjectValue{ Val: []document.ObjectField{ { - Name: []byte("bar"), + Name: "bar", Value: document.StringValue{ - Val: []byte("baz"), + Val: "baz", }, }, }, @@ -131,9 +130,8 @@ func TestObjectValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedObjectValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/objecttypedefinition_parser.go b/pkg/parser/objecttypedefinition_parser.go index 532d7471d6..9c0ba64f27 100644 --- a/pkg/parser/objecttypedefinition_parser.go +++ b/pkg/parser/objecttypedefinition_parser.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" "github.com/jensneuse/graphql-go-tools/pkg/document" "github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword" ) @@ -30,26 +29,26 @@ func (p *Parser) parseObjectTypeDefinition() (objectTypeDefinition document.Obje return objectTypeDefinition, err } - if bytes.Equal(objectTypeDefinition.Name, []byte("Query")) { + /* if objectTypeDefinition.Name == "Query" { introspectionFields := document.FieldsDefinition{ { - Name: []byte("__schema"), + Name: "__schema", Type: document.NamedType{ - Name: []byte("__Schema"), + Name: "__Schema", NonNull: true, }, }, { - Name: []byte("__type"), + Name: "__type", Type: document.NamedType{ - Name: []byte("__Type"), + Name: "__Type", NonNull: false, }, ArgumentsDefinition: []document.InputValueDefinition{ { - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, }, @@ -58,7 +57,7 @@ func (p *Parser) parseObjectTypeDefinition() (objectTypeDefinition document.Obje } objectTypeDefinition.FieldsDefinition = append(introspectionFields, objectTypeDefinition.FieldsDefinition...) - } + }*/ return } diff --git a/pkg/parser/objecttypedefinition_parser_test.go b/pkg/parser/objecttypedefinition_parser_test.go index 15d1abe334..8d662c92ca 100644 --- a/pkg/parser/objecttypedefinition_parser_test.go +++ b/pkg/parser/objecttypedefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -29,12 +28,12 @@ func TestObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.ObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -48,22 +47,22 @@ func TestObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.ObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, NonNull: true, }, }, document.FieldDefinition{ - Name: []byte("age"), + Name: "age", Type: document.ListType{ Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }, @@ -75,7 +74,7 @@ func TestObjectTypeDefinitionParser(t *testing.T) { input: `Person `, expectErr: BeNil(), expectValues: Equal(document.ObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", }), }, { @@ -85,13 +84,13 @@ func TestObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.ObjectTypeDefinition{ - Name: []byte("Person"), - ImplementsInterfaces: document.ImplementsInterfaces{[]byte("Human")}, + Name: "Person", + ImplementsInterfaces: document.ImplementsInterfaces{"Human"}, FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -104,13 +103,13 @@ func TestObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.ObjectTypeDefinition{ - Name: []byte("Person"), - ImplementsInterfaces: document.ImplementsInterfaces{[]byte("Human"), []byte("Mammal")}, + Name: "Person", + ImplementsInterfaces: document.ImplementsInterfaces{"Human", "Mammal"}, FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -123,26 +122,26 @@ func TestObjectTypeDefinitionParser(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.ObjectTypeDefinition{ - Name: []byte("Person"), + Name: "Person", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -150,9 +149,9 @@ func TestObjectTypeDefinitionParser(t *testing.T) { }, FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -165,9 +164,8 @@ func TestObjectTypeDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseObjectTypeDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/operationdefinition_parser_test.go b/pkg/parser/operationdefinition_parser_test.go index 5d47147c91..cf629136d9 100644 --- a/pkg/parser/operationdefinition_parser_test.go +++ b/pkg/parser/operationdefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -32,21 +31,21 @@ func TestOperationDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.OperationDefinition{ OperationType: document.OperationTypeQuery, - Name: []byte("allGophers"), + Name: "allGophers", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -56,7 +55,7 @@ func TestOperationDefinitionParser(t *testing.T) { }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }), @@ -71,18 +70,18 @@ func TestOperationDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.OperationDefinition{ OperationType: document.OperationTypeQuery, - Name: []byte("allGophers"), + Name: "allGophers", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }), @@ -97,13 +96,13 @@ func TestOperationDefinitionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.OperationDefinition{ OperationType: document.OperationTypeQuery, - Name: []byte("allGophers"), + Name: "allGophers", Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -113,7 +112,7 @@ func TestOperationDefinitionParser(t *testing.T) { }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }), @@ -130,18 +129,18 @@ func TestOperationDefinitionParser(t *testing.T) { OperationType: document.OperationTypeQuery, VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -151,7 +150,7 @@ func TestOperationDefinitionParser(t *testing.T) { }, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }), @@ -168,7 +167,7 @@ func TestOperationDefinitionParser(t *testing.T) { OperationType: document.OperationTypeQuery, SelectionSet: document.SelectionSet{ document.Field{ - Name: []byte("name"), + Name: "name", }, }, }), @@ -180,21 +179,21 @@ func TestOperationDefinitionParser(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.OperationDefinition{ OperationType: document.OperationTypeQuery, - Name: []byte("allGophers"), + Name: "allGophers", VariableDefinitions: document.VariableDefinitions{ { - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -211,9 +210,8 @@ func TestOperationDefinitionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseOperationDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/parser.go b/pkg/parser/parser.go index ad4c2dff52..ecb4d09871 100644 --- a/pkg/parser/parser.go +++ b/pkg/parser/parser.go @@ -7,7 +7,6 @@ import ( "github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword" "github.com/jensneuse/graphql-go-tools/pkg/lexing/position" "github.com/jensneuse/graphql-go-tools/pkg/lexing/token" - "io" ) type errInvalidType struct { @@ -32,13 +31,12 @@ func (e errInvalidType) Error() string { // Parser holds the lexer and a buffer for writing literals type Parser struct { - l Lexer - selectionSetBuffers []document.SelectionSet + l Lexer } // Lexer is the interface used by the Parser to lex tokens type Lexer interface { - SetInput(reader io.Reader) + SetInput(input string) Read() (tok token.Token, err error) Peek(ignoreWhitespace bool) (key keyword.Keyword, err error) } @@ -46,20 +44,20 @@ type Lexer interface { // NewParser returns a new parser using a buffered runestringer func NewParser() *Parser { return &Parser{ - l: lexer.NewLexer(), - selectionSetBuffers: make([]document.SelectionSet, 10), + l: lexer.NewLexer(), } } // ParseTypeSystemDefinition parses a TypeSystemDefinition from an io.Reader -func (p *Parser) ParseTypeSystemDefinition(reader io.Reader) (def document.TypeSystemDefinition, err error) { - p.l.SetInput(reader) +func (p *Parser) ParseTypeSystemDefinition(input string) (def document.TypeSystemDefinition, err error) { + p.l.SetInput(input) return p.parseTypeSystemDefinition() } // ParseExecutableDefinition parses an ExecutableDefinition from an io.Reader -func (p *Parser) ParseExecutableDefinition(reader io.Reader) (def document.ExecutableDefinition, err error) { - p.l.SetInput(reader) +func (p *Parser) ParseExecutableDefinition(input string) (def document.ExecutableDefinition, err error) { + + p.l.SetInput(input) return p.parseExecutableDefinition() } @@ -90,21 +88,3 @@ func (p *Parser) peekExpect(expected keyword.Keyword, swallow bool) (matched boo return } - -func (p *Parser) getSelectionSetBuffer() *document.SelectionSet { - - var s document.SelectionSet - - if len(p.selectionSetBuffers) == 0 { - s = make(document.SelectionSet, 10) - } else { - s, p.selectionSetBuffers = p.selectionSetBuffers[0], p.selectionSetBuffers[1:] - s = s[:0] - } - - return &s -} - -func (p *Parser) putSelectionSet(set *document.SelectionSet) { - p.selectionSetBuffers = append(p.selectionSetBuffers, *set) -} diff --git a/pkg/parser/parser.test b/pkg/parser/parser.test deleted file mode 100755 index f347a08606..0000000000 Binary files a/pkg/parser/parser.test and /dev/null differ diff --git a/pkg/parser/parser_test.go b/pkg/parser/parser_test.go index 8ae39cc582..db355cc964 100644 --- a/pkg/parser/parser_test.go +++ b/pkg/parser/parser_test.go @@ -6,10 +6,9 @@ import ( "github.com/jensneuse/diffview" . "github.com/onsi/gomega" "github.com/sebdah/goldie" - "io" "io/ioutil" "log" - "os" + "strings" "testing" ) @@ -20,14 +19,15 @@ func TestParser_Starwars(t *testing.T) { parser := NewParser() - starwarsSchema, err := os.Open(inputFileName) + starwarsSchema, err := ioutil.ReadFile(inputFileName) if err != nil { t.Fatal(err) } - defer starwarsSchema.Close() + builder := &strings.Builder{} + builder.Write(starwarsSchema) - def, err := parser.ParseTypeSystemDefinition(starwarsSchema) + def, err := parser.ParseTypeSystemDefinition(builder.String()) if err != nil { t.Fatal(err) } @@ -54,15 +54,16 @@ func TestParser_IntrospectionQuery(t *testing.T) { inputFileName := "./testdata/introspectionquery.graphql" fixtureFileName := "type_system_definition_parsed_introspection" - inputFile, err := os.Open(inputFileName) + inputFileData, err := ioutil.ReadFile(inputFileName) if err != nil { t.Fatal(err) } - defer inputFile.Close() + builder := &strings.Builder{} + builder.Write(inputFileData) parser := NewParser() - executableDefinition, err := parser.ParseExecutableDefinition(inputFile) + executableDefinition, err := parser.ParseExecutableDefinition(builder.String()) Expect(err).To(BeNil()) jsonBytes, err := json.MarshalIndent(executableDefinition, "", " ") @@ -88,27 +89,21 @@ func BenchmarkParser(b *testing.B) { parser := NewParser() - introspectionQueryFile, err := os.Open("./testdata/introspectionquery.graphql") + testData, err := ioutil.ReadFile("./testdata/introspectionquery.graphql") if err != nil { b.Fatal(err) } - parser.ParseExecutableDefinition(introspectionQueryFile) + builder := &strings.Builder{} + builder.Write(testData) - defer introspectionQueryFile.Close() + inputString := builder.String() b.ResetTimer() for i := 0; i < b.N; i++ { - b.StopTimer() - _, err = introspectionQueryFile.Seek(0, io.SeekStart) - if err != nil { - b.Fatal(err) - } - b.StartTimer() - - executableDefinition, err := parser.ParseExecutableDefinition(introspectionQueryFile) + executableDefinition, err := parser.ParseExecutableDefinition(inputString) if err != nil { b.Fatal(err) } diff --git a/pkg/parser/profile.out b/pkg/parser/profile.out deleted file mode 100644 index f1d17202c7..0000000000 Binary files a/pkg/parser/profile.out and /dev/null differ diff --git a/pkg/parser/scalartypedefinition_parser_test.go b/pkg/parser/scalartypedefinition_parser_test.go index aa8141829b..05ecb1ec1f 100644 --- a/pkg/parser/scalartypedefinition_parser_test.go +++ b/pkg/parser/scalartypedefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -26,7 +25,7 @@ func TestParseScalar(t *testing.T) { input: ` JSON`, expectErr: BeNil(), expectValues: Equal(document.ScalarTypeDefinition{ - Name: []byte("JSON"), + Name: "JSON", }), }, { @@ -34,26 +33,26 @@ func TestParseScalar(t *testing.T) { input: ` JSON @fromTop(to: "bottom") @fromBottom(to: "top") `, expectErr: BeNil(), expectValues: Equal(document.ScalarTypeDefinition{ - Name: []byte("JSON"), + Name: "JSON", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -68,9 +67,8 @@ func TestParseScalar(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseScalarTypeDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/schemadefinition_parser_test.go b/pkg/parser/schemadefinition_parser_test.go index 403ac5aa88..60fd464aff 100644 --- a/pkg/parser/schemadefinition_parser_test.go +++ b/pkg/parser/schemadefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -30,9 +29,9 @@ func TestParseSchemaDefinition(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), - Subscription: []byte("Subscription"), + Query: "Query", + Mutation: "Mutation", + Subscription: "Subscription", }), }, { @@ -48,9 +47,9 @@ func TestParseSchemaDefinition(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), - Subscription: []byte("Subscription"), + Query: "Query", + Mutation: "Mutation", + Subscription: "Subscription", }), }, { @@ -68,9 +67,9 @@ func TestParseSchemaDefinition(t *testing.T) { }`, expectErr: Not(BeNil()), expectValues: Equal(document.SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), - Subscription: []byte("Subscription"), + Query: "Query", + Mutation: "Mutation", + Subscription: "Subscription", }), }, { @@ -82,28 +81,28 @@ func TestParseSchemaDefinition(t *testing.T) { }`, expectErr: BeNil(), expectValues: Equal(document.SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), - Subscription: []byte("Subscription"), + Query: "Query", + Mutation: "Mutation", + Subscription: "Subscription", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, @@ -118,9 +117,8 @@ func TestParseSchemaDefinition(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseSchemaDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/selection_parser.go b/pkg/parser/selection_parser.go index dc7630c03f..3653c49fc4 100644 --- a/pkg/parser/selection_parser.go +++ b/pkg/parser/selection_parser.go @@ -13,7 +13,8 @@ func (p *Parser) parseSelection() (selection document.Selection, err error) { } if !isFragmentSelection { - return p.parseField() + selection, err = p.parseField() + return } isInlineFragment, err := p.peekExpect(keyword.ON, true) @@ -22,8 +23,10 @@ func (p *Parser) parseSelection() (selection document.Selection, err error) { } if isInlineFragment { - return p.parseInlineFragment() + selection, err = p.parseInlineFragment() + return } - return p.parseFragmentSpread() + selection, err = p.parseFragmentSpread() + return } diff --git a/pkg/parser/selection_parser_test.go b/pkg/parser/selection_parser_test.go index d4f63e36b8..e9099bb912 100644 --- a/pkg/parser/selection_parser_test.go +++ b/pkg/parser/selection_parser_test.go @@ -1,12 +1,10 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" - "io" "testing" ) @@ -29,7 +27,7 @@ func TestSelectionParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Land"), + Name: "Land", }, }), }, @@ -38,7 +36,7 @@ func TestSelectionParser(t *testing.T) { input: "originalName", expectErr: BeNil(), expectValues: Equal(document.Field{ - Name: []byte("originalName"), + Name: "originalName", }), }, { @@ -46,31 +44,31 @@ func TestSelectionParser(t *testing.T) { input: `t { kind name ofType { kind name ofType { kind name } } }`, expectErr: BeNil(), expectValues: Equal(document.Field{ - Name: []byte("t"), + Name: "t", SelectionSet: []document.Selection{ document.Field{ - Name: []byte(`kind`), + Name: "kind", }, document.Field{ - Name: []byte(`name`), + Name: "name", }, document.Field{ - Name: []byte(`ofType`), + Name: "ofType", SelectionSet: []document.Selection{ document.Field{ - Name: []byte(`kind`), + Name: "kind", }, document.Field{ - Name: []byte(`name`), + Name: "name", }, document.Field{ - Name: []byte(`ofType`), + Name: "ofType", SelectionSet: []document.Selection{ document.Field{ - Name: []byte(`kind`), + Name: "kind", }, document.Field{ - Name: []byte(`name`), + Name: "name", }, }, }, @@ -84,10 +82,10 @@ func TestSelectionParser(t *testing.T) { input: "originalName(isSet: true)", expectErr: BeNil(), expectValues: Equal(document.Field{ - Name: []byte("originalName"), + Name: "originalName", Arguments: document.Arguments{ document.Argument{ - Name: []byte("isSet"), + Name: "isSet", Value: document.BooleanValue{ Val: true, }, @@ -100,7 +98,7 @@ func TestSelectionParser(t *testing.T) { input: "...Land", expectErr: BeNil(), expectValues: Equal(document.FragmentSpread{ - FragmentName: []byte("Land"), + FragmentName: "Land", }), }, } @@ -110,9 +108,8 @@ func TestSelectionParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseSelection() Expect(err).To(test.expectErr) @@ -122,11 +119,9 @@ func TestSelectionParser(t *testing.T) { }) } -var parseSelectionBenchmarkInput = []byte(`t { kind name ofType { kind name ofType { kind name } } }`) +var parseSelectionBenchmarkInput = `t { kind name ofType { kind name ofType { kind name } } }` func BenchmarkParseSelection(b *testing.B) { - reader := bytes.NewReader(parseSelectionBenchmarkInput) - var err error parser := NewParser() @@ -134,13 +129,8 @@ func BenchmarkParseSelection(b *testing.B) { for i := 0; i < b.N; i++ { - _, err = reader.Seek(0, io.SeekStart) - if err != nil { - b.Fatal(err) - } - - parser.l.SetInput(reader) - _, err = parser.parseSelection() + parser.l.SetInput(parseSelectionBenchmarkInput) + _, err := parser.parseSelection() if err != nil { b.Fatal(err) } diff --git a/pkg/parser/selectionset_parser.go b/pkg/parser/selectionset_parser.go index 4435d0564e..99b81268cd 100644 --- a/pkg/parser/selectionset_parser.go +++ b/pkg/parser/selectionset_parser.go @@ -16,34 +16,24 @@ func (p *Parser) parseSelectionSet() (selectionSet document.SelectionSet, err er return } - buffer := p.getSelectionSetBuffer() - for { next, err := p.l.Peek(true) if err != nil { - p.putSelectionSet(buffer) return selectionSet, err } if next == keyword.CURLYBRACKETCLOSE { _, err = p.l.Read() - - selectionSet = make(document.SelectionSet, len(*buffer)) - copy(selectionSet, *buffer) - - p.putSelectionSet(buffer) - return selectionSet, err } selection, err := p.parseSelection() if err != nil { - p.putSelectionSet(buffer) return selectionSet, err } - *buffer = append(*buffer, selection) + selectionSet = append(selectionSet, selection) } } diff --git a/pkg/parser/selectionset_parser_test.go b/pkg/parser/selectionset_parser_test.go index 7187384145..077c8357bd 100644 --- a/pkg/parser/selectionset_parser_test.go +++ b/pkg/parser/selectionset_parser_test.go @@ -1,12 +1,10 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" - "io" "testing" ) @@ -31,7 +29,7 @@ func TestSelectionSetParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.SelectionSet{ document.Field{ - Name: []byte("foo"), + Name: "foo", }, }), }, @@ -46,15 +44,15 @@ func TestSelectionSetParser(t *testing.T) { expectValues: Equal(document.SelectionSet{ document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Goland"), + Name: "Goland", }, }, document.FragmentSpread{ - FragmentName: []byte("Air"), + FragmentName: "Air", }, document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Water"), + Name: "Water", }, }, }), @@ -70,15 +68,15 @@ func TestSelectionSetParser(t *testing.T) { expectValues: Equal(document.SelectionSet{ document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Goland"), + Name: "Goland", }, }, document.Field{ - Alias: []byte("preferredName"), - Name: []byte("originalName"), + Alias: "preferredName", + Name: "originalName", Arguments: document.Arguments{ document.Argument{ - Name: []byte("isSet"), + Name: "isSet", Value: document.BooleanValue{ Val: true, }, @@ -87,7 +85,7 @@ func TestSelectionSetParser(t *testing.T) { }, document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Water"), + Name: "Water", }, }, }), @@ -103,15 +101,15 @@ func TestSelectionSetParser(t *testing.T) { expectValues: Equal(document.SelectionSet{ document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Goland"), + Name: "Goland", }, }, document.Field{ - Alias: []byte("preferredName"), - Name: []byte("originalName"), + Alias: "preferredName", + Name: "originalName", Arguments: document.Arguments{ document.Argument{ - Name: []byte("isSet"), + Name: "isSet", Value: document.BooleanValue{ Val: true, }, @@ -119,10 +117,10 @@ func TestSelectionSetParser(t *testing.T) { }, Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -133,7 +131,7 @@ func TestSelectionSetParser(t *testing.T) { }, document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Water"), + Name: "Water", }, }, }), @@ -149,17 +147,17 @@ func TestSelectionSetParser(t *testing.T) { expectValues: Equal(document.SelectionSet{ document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Goland"), + Name: "Goland", }, }, document.FragmentSpread{ - FragmentName: []byte("firstFragment"), + FragmentName: "firstFragment", Directives: document.Directives{ document.Directive{ - Name: []byte("rename"), + Name: "rename", Arguments: document.Arguments{ document.Argument{ - Name: []byte("index"), + Name: "index", Value: document.IntValue{ Val: 3, }, @@ -170,7 +168,7 @@ func TestSelectionSetParser(t *testing.T) { }, document.InlineFragment{ TypeCondition: document.NamedType{ - Name: []byte("Water"), + Name: "Water", }, }, }), @@ -182,9 +180,8 @@ func TestSelectionSetParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseSelectionSet() Expect(err).To(test.expectErr) @@ -194,7 +191,7 @@ func TestSelectionSetParser(t *testing.T) { }) } -var selectionSetBenchmarkInput = []byte(`{ +var selectionSetBenchmarkInput = `{ kind name ofType { @@ -225,23 +222,16 @@ var selectionSetBenchmarkInput = []byte(`{ } } } -}`) +}` func BenchmarkParseSelectionSet(b *testing.B) { - reader := bytes.NewReader(selectionSetBenchmarkInput) - var err error - parser := NewParser() + var err error parse := func() { - _, err = reader.Seek(0, io.SeekStart) - if err != nil { - b.Fatal(err) - } - - parser.l.SetInput(reader) + parser.l.SetInput(selectionSetBenchmarkInput) _, err = parser.parseSelectionSet() if err != nil { b.Fatal(err) diff --git a/pkg/parser/string_value_parser_test.go b/pkg/parser/string_value_parser_test.go index 50832249bc..39ca478548 100644 --- a/pkg/parser/string_value_parser_test.go +++ b/pkg/parser/string_value_parser_test.go @@ -1,9 +1,7 @@ package parser import ( - "bytes" . "github.com/franela/goblin" - "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" "github.com/onsi/gomega/types" "testing" @@ -26,7 +24,7 @@ func TestStringValueParser(t *testing.T) { it: "should parse single line string value", input: `"lorem ipsum"`, expectErr: BeNil(), - expectVal: Equal(document.ByteSlice("lorem ipsum")), + expectVal: Equal("lorem ipsum"), }, { it: "should parse multi line string value", @@ -34,21 +32,21 @@ func TestStringValueParser(t *testing.T) { lorem ipsum """`, expectErr: BeNil(), - expectVal: Equal(document.ByteSlice("lorem ipsum")), + expectVal: Equal("lorem ipsum"), }, { - it: "should parse multi line string value", + it: "should parse multi line string value with escaped quote", input: `""" foo \" bar """`, expectErr: BeNil(), - expectVal: Equal(document.ByteSlice(`foo " bar`)), + expectVal: Equal(`foo \" bar`), }, { it: "should parse single line string with escaped\"", input: `"foo bar \" baz"`, expectErr: BeNil(), - expectVal: Equal(document.ByteSlice("foo bar \" baz")), + expectVal: Equal("foo bar \\\" baz"), }, } @@ -56,9 +54,9 @@ foo \" bar test := test g.It(test.it, func() { - reader := bytes.NewReader(document.ByteSlice(test.input)) + parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedStringValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/type_parser_test.go b/pkg/parser/type_parser_test.go index 689eba8b89..821c687e93 100644 --- a/pkg/parser/type_parser_test.go +++ b/pkg/parser/type_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" "testing" . "github.com/franela/goblin" @@ -27,7 +26,7 @@ func TestTypeParser(t *testing.T) { input: "String", expectErr: BeNil(), expectValues: Equal(document.NamedType{ - Name: []byte("String"), + Name: "String", }), }, { @@ -35,7 +34,7 @@ func TestTypeParser(t *testing.T) { input: "String!", expectErr: BeNil(), expectValues: Equal(document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }), }, @@ -51,7 +50,7 @@ func TestTypeParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }), }, @@ -61,7 +60,7 @@ func TestTypeParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, NonNull: true, }), @@ -72,7 +71,7 @@ func TestTypeParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, NonNull: true, @@ -86,7 +85,7 @@ func TestTypeParser(t *testing.T) { Type: document.ListType{ Type: document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, }, @@ -100,7 +99,7 @@ func TestTypeParser(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.ListType{ Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }), }, @@ -111,9 +110,8 @@ func TestTypeParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseType() Expect(err).To(test.expectErr) diff --git a/pkg/parser/typesystemdefinition_parser.go b/pkg/parser/typesystemdefinition_parser.go index bd6e82f109..d363b244f6 100644 --- a/pkg/parser/typesystemdefinition_parser.go +++ b/pkg/parser/typesystemdefinition_parser.go @@ -7,7 +7,7 @@ import ( func (p *Parser) parseTypeSystemDefinition() (typeSystemDefinition document.TypeSystemDefinition, err error) { - var description []byte + var description string for { next, err := p.l.Read() @@ -109,6 +109,6 @@ func (p *Parser) parseTypeSystemDefinition() (typeSystemDefinition document.Type return typeSystemDefinition, newErrInvalidType(invalid.Position, "parseTypeSystemDefinition", "eof/string/schema/scalar/type/interface/union/directive/input/enum", invalid.Keyword.String()) } - description = nil + description = "" } } diff --git a/pkg/parser/typesystemdefinition_parser_test.go b/pkg/parser/typesystemdefinition_parser_test.go index e78dd56c44..0b32d3937a 100644 --- a/pkg/parser/typesystemdefinition_parser_test.go +++ b/pkg/parser/typesystemdefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -32,8 +31,8 @@ func TestTypeSystemDefinition(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.TypeSystemDefinition{ SchemaDefinition: document.SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), + Query: "Query", + Mutation: "Mutation", }}), }, { @@ -47,19 +46,19 @@ func TestTypeSystemDefinition(t *testing.T) { expectValues: Equal(document.TypeSystemDefinition{ ScalarTypeDefinitions: []document.ScalarTypeDefinition{ { - Name: []byte("JSON"), - Description: []byte("this is a scalar"), + Name: "JSON", + Description: "this is a scalar", }, { - Name: []byte("testName"), + Name: "testName", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, @@ -67,8 +66,8 @@ func TestTypeSystemDefinition(t *testing.T) { }, }, { - Name: []byte("XML"), - Description: []byte("this is another scalar"), + Name: "XML", + Description: "this is another scalar", }, }}), }, @@ -91,34 +90,34 @@ func TestTypeSystemDefinition(t *testing.T) { expectValues: Equal(document.TypeSystemDefinition{ ObjectTypeDefinitions: []document.ObjectTypeDefinition{ { - Name: []byte("Person"), - Description: []byte("this is a Person"), + Name: "Person", + Description: "this is a Person", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, }, { - Name: []byte("testType"), + Name: "testType", }, { - Name: []byte("secondType"), - Description: []byte("second Type"), + Name: "secondType", + Description: "second Type", }, { - Name: []byte("thirdType"), + Name: "thirdType", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, @@ -126,13 +125,13 @@ func TestTypeSystemDefinition(t *testing.T) { }, }, { - Name: []byte("Animal"), - Description: []byte("this is an Animal"), + Name: "Animal", + Description: "this is an Animal", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("age"), + Name: "age", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }, @@ -159,34 +158,34 @@ func TestTypeSystemDefinition(t *testing.T) { expectValues: Equal(document.TypeSystemDefinition{ InterfaceTypeDefinitions: []document.InterfaceTypeDefinition{ { - Name: []byte("firstEntity"), - Description: []byte("describes firstEntity"), + Name: "firstEntity", + Description: "describes firstEntity", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, }, { - Name: []byte("firstInterface"), + Name: "firstInterface", }, { - Name: []byte("secondInterface"), - Description: []byte("second interface"), + Name: "secondInterface", + Description: "second interface", }, { - Name: []byte("thirdInterface"), + Name: "thirdInterface", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, @@ -194,13 +193,13 @@ func TestTypeSystemDefinition(t *testing.T) { }, }, { - Name: []byte("secondEntity"), - Description: []byte("describes secondEntity"), + Name: "secondEntity", + Description: "describes secondEntity", FieldsDefinition: document.FieldsDefinition{ document.FieldDefinition{ - Name: []byte("age"), + Name: "age", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, }, @@ -224,30 +223,30 @@ func TestTypeSystemDefinition(t *testing.T) { expectValues: Equal(document.TypeSystemDefinition{ UnionTypeDefinitions: []document.UnionTypeDefinition{ { - Name: []byte("SearchResult"), - Description: []byte("unifies SearchResult"), + Name: "SearchResult", + Description: "unifies SearchResult", UnionMemberTypes: document.UnionMemberTypes{ - []byte("Photo"), - []byte("Person"), + "Photo", + "Person", }, }, { - Name: []byte("thirdUnion"), + Name: "thirdUnion", }, { - Name: []byte("secondUnion"), - Description: []byte("second union"), + Name: "secondUnion", + Description: "second union", }, { - Name: []byte("firstUnion"), + Name: "firstUnion", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, @@ -255,11 +254,11 @@ func TestTypeSystemDefinition(t *testing.T) { }, }, { - Name: []byte("UnionExample"), - Description: []byte("unifies UnionExample"), + Name: "UnionExample", + Description: "unifies UnionExample", UnionMemberTypes: document.UnionMemberTypes{ - []byte("First"), - []byte("Second"), + "First", + "Second", }, }, }}), @@ -284,31 +283,31 @@ func TestTypeSystemDefinition(t *testing.T) { expectValues: Equal(document.TypeSystemDefinition{ EnumTypeDefinitions: []document.EnumTypeDefinition{ { - Name: []byte("Direction"), - Description: []byte("describes direction"), + Name: "Direction", + Description: "describes direction", EnumValuesDefinition: document.EnumValuesDefinition{ { - EnumValue: []byte("NORTH"), + EnumValue: "NORTH", }, }, }, { - Name: []byte("thirdEnum"), + Name: "thirdEnum", }, { - Name: []byte("secondEnum"), - Description: []byte("second enum"), + Name: "secondEnum", + Description: "second enum", }, { - Name: []byte("firstEnum"), + Name: "firstEnum", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, @@ -316,11 +315,11 @@ func TestTypeSystemDefinition(t *testing.T) { }, }, { - Name: []byte("EnumExample"), - Description: []byte("enumerates EnumExample"), + Name: "EnumExample", + Description: "enumerates EnumExample", EnumValuesDefinition: document.EnumValuesDefinition{ { - EnumValue: []byte("NORTH"), + EnumValue: "NORTH", }, }, }, @@ -346,34 +345,34 @@ func TestTypeSystemDefinition(t *testing.T) { expectValues: Equal(document.TypeSystemDefinition{ InputObjectTypeDefinitions: []document.InputObjectTypeDefinition{ { - Name: []byte("Person"), - Description: []byte("describes Person"), + Name: "Person", + Description: "describes Person", InputFieldsDefinition: document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, }, { - Name: []byte("thirdInput"), + Name: "thirdInput", }, { - Name: []byte("secondInput"), - Description: []byte("second input"), + Name: "secondInput", + Description: "second input", }, { - Name: []byte("firstInput"), + Name: "firstInput", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, @@ -381,13 +380,13 @@ func TestTypeSystemDefinition(t *testing.T) { }, }, { - Name: []byte("InputExample"), - Description: []byte("inputs InputExample"), + Name: "InputExample", + Description: "inputs InputExample", InputFieldsDefinition: document.InputFieldsDefinition{ document.InputValueDefinition{ - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -408,21 +407,21 @@ func TestTypeSystemDefinition(t *testing.T) { expectValues: Equal(document.TypeSystemDefinition{ DirectiveDefinitions: []document.DirectiveDefinition{ { - Name: []byte("somewhere"), - Description: []byte("describes somewhere"), + Name: "somewhere", + Description: "describes somewhere", DirectiveLocations: document.DirectiveLocations{ document.DirectiveLocationQUERY, }, }, { - Name: []byte("somehow"), + Name: "somehow", DirectiveLocations: document.DirectiveLocations{ document.DirectiveLocationMUTATION, }, }, { - Name: []byte("someway"), - Description: []byte("describes someway"), + Name: "someway", + Description: "describes someway", DirectiveLocations: document.DirectiveLocations{ document.DirectiveLocationSUBSCRIPTION, }, @@ -454,8 +453,8 @@ func TestTypeSystemDefinition(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.TypeSystemDefinition{ SchemaDefinition: document.SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), + Query: "Query", + Mutation: "Mutation", }}), }, } @@ -464,9 +463,8 @@ func TestTypeSystemDefinition(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseTypeSystemDefinition() Expect(val).To(test.expectValues) @@ -475,7 +473,7 @@ func TestTypeSystemDefinition(t *testing.T) { } }) - starWarsSchema := []byte(` + starWarsSchema := ` schema { query: Query mutation: Mutation @@ -638,364 +636,364 @@ func TestTypeSystemDefinition(t *testing.T) { } union SearchResult = Human | Droid | Starship - `) + ` g.Describe("StarWars Schema", func() { - g.It("should parse", func() { - reader := bytes.NewReader(starWarsSchema) + g.It("should parse the starwars schema", func() { + parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(starWarsSchema) val, err := parser.parseTypeSystemDefinition() Expect(val).To(Equal(document.TypeSystemDefinition{ SchemaDefinition: document.SchemaDefinition{ - Query: []byte("Query"), - Mutation: []byte("Mutation"), - Subscription: []byte("Subscription"), + Query: "Query", + Mutation: "Mutation", + Subscription: "Subscription", }, ScalarTypeDefinitions: nil, ObjectTypeDefinitions: []document.ObjectTypeDefinition{ { - Description: []byte("The query type, represents all of the entry points into our object graph"), - Name: []byte("Query"), + Description: "The query type, represents all of the entry points into our object graph", + Name: "Query", FieldsDefinition: []document.FieldDefinition{ - { - Name: []byte("__schema"), + /*{ + Name: "__schema", Type: document.NamedType{ - Name: []byte("__Schema"), + Name: "__Schema", NonNull: true, }, }, { - Name: []byte("__type"), + Name: "__type", Type: document.NamedType{ - Name: []byte("__Type"), + Name: "__Type", NonNull: false, }, ArgumentsDefinition: []document.InputValueDefinition{ { - Name: []byte("name"), + Name: "name", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, }, }, - }, + },*/ { - Name: []byte("hero"), + Name: "hero", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("episode"), - Type: document.NamedType{Name: []byte("Episode")}, + Name: "episode", + Type: document.NamedType{Name: "Episode"}, }, }, - Type: document.NamedType{Name: []byte("Character")}, + Type: document.NamedType{Name: "Character"}, }, { - Name: []byte("reviews"), + Name: "reviews", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("episode"), - Type: document.NamedType{Name: []byte("Episode"), NonNull: true}, + Name: "episode", + Type: document.NamedType{Name: "Episode", NonNull: true}, }, }, - Type: document.ListType{Type: document.NamedType{Name: []byte("Review")}}, + Type: document.ListType{Type: document.NamedType{Name: "Review"}}, }, { - Name: []byte("search"), + Name: "search", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("text"), - Type: document.NamedType{Name: []byte("String")}, + Name: "text", + Type: document.NamedType{Name: "String"}, }, }, - Type: document.ListType{Type: document.NamedType{Name: []byte("SearchResult")}}, + Type: document.ListType{Type: document.NamedType{Name: "SearchResult"}}, }, { - Name: []byte("character"), + Name: "character", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("id"), - Type: document.NamedType{Name: []byte("ID"), NonNull: true}, + Name: "id", + Type: document.NamedType{Name: "ID", NonNull: true}, }, }, - Type: document.NamedType{Name: []byte("Character")}, + Type: document.NamedType{Name: "Character"}, }, { - Name: []byte("droid"), + Name: "droid", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("id"), - Type: document.NamedType{Name: []byte("ID"), NonNull: true}, + Name: "id", + Type: document.NamedType{Name: "ID", NonNull: true}, }, }, - Type: document.NamedType{Name: []byte("Droid")}, + Type: document.NamedType{Name: "Droid"}, }, { - Name: []byte("human"), + Name: "human", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("id"), - Type: document.NamedType{Name: []byte("ID"), NonNull: true}, + Name: "id", + Type: document.NamedType{Name: "ID", NonNull: true}, }, }, - Type: document.NamedType{Name: []byte("Human")}, + Type: document.NamedType{Name: "Human"}, }, { - Name: []byte("starship"), + Name: "starship", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("id"), - Type: document.NamedType{Name: []byte("ID"), NonNull: true}, + Name: "id", + Type: document.NamedType{Name: "ID", NonNull: true}, }, }, - Type: document.NamedType{Name: []byte("Starship")}, + Type: document.NamedType{Name: "Starship"}, }, }, ImplementsInterfaces: nil, Directives: nil, }, { - Description: []byte("The mutation type, represents all updates we can make to our data"), - Name: []byte("Mutation"), + Description: "The mutation type, represents all updates we can make to our data", + Name: "Mutation", FieldsDefinition: document.FieldsDefinition{ { - Name: []byte("createReview"), + Name: "createReview", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("episode"), - Type: document.NamedType{Name: []byte("Episode")}, + Name: "episode", + Type: document.NamedType{Name: "Episode"}, }, { - Name: []byte("review"), - Type: document.NamedType{Name: []byte("ReviewInput"), NonNull: true}, + Name: "review", + Type: document.NamedType{Name: "ReviewInput", NonNull: true}, }, }, - Type: document.NamedType{Name: []byte("Review")}, + Type: document.NamedType{Name: "Review"}, }, }, ImplementsInterfaces: nil, Directives: nil, }, { - Description: []byte("The subscription type, represents all subscriptions we can make to our data"), - Name: []byte("Subscription"), + Description: "The subscription type, represents all subscriptions we can make to our data", + Name: "Subscription", FieldsDefinition: document.FieldsDefinition{ { - Name: []byte("reviewAdded"), + Name: "reviewAdded", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("episode"), - Type: document.NamedType{Name: []byte("Episode")}, + Name: "episode", + Type: document.NamedType{Name: "Episode"}, }, }, - Type: document.NamedType{Name: []byte("Review")}, + Type: document.NamedType{Name: "Review"}, }, }, ImplementsInterfaces: nil, Directives: nil, }, { - Description: []byte("A humanoid creature from the Star Wars universe"), - Name: []byte("Human"), + Description: "A humanoid creature from the Star Wars universe", + Name: "Human", FieldsDefinition: document.FieldsDefinition{ { - Description: []byte("The ID of the human"), - Name: []byte("id"), + Description: "The ID of the human", + Name: "id", ArgumentsDefinition: nil, - Type: document.NamedType{Name: []byte("ID"), NonNull: true}, + Type: document.NamedType{Name: "ID", NonNull: true}, Directives: nil, }, { - Description: []byte("What this human calls themselves"), - Name: []byte("name"), + Description: "What this human calls themselves", + Name: "name", ArgumentsDefinition: nil, - Type: document.NamedType{Name: []byte("String"), NonNull: true}, + Type: document.NamedType{Name: "String", NonNull: true}, Directives: nil, }, { - Description: []byte("The home planet of the human, or null if unknown"), - Name: []byte("homePlanet"), + Description: "The home planet of the human, or null if unknown", + Name: "homePlanet", ArgumentsDefinition: nil, - Type: document.NamedType{Name: []byte("String")}, + Type: document.NamedType{Name: "String"}, Directives: nil, }, { - Description: []byte("Height in the preferred unit, default is meters"), - Name: []byte("height"), + Description: "Height in the preferred unit, default is meters", + Name: "height", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("unit"), - Type: document.NamedType{Name: []byte("LengthUnit")}, - DefaultValue: document.EnumValue{Name: []byte("METER")}, + Name: "unit", + Type: document.NamedType{Name: "LengthUnit"}, + DefaultValue: document.EnumValue{Name: "METER"}, }, }, - Type: document.NamedType{Name: []byte("Float")}, + Type: document.NamedType{Name: "Float"}, }, { - Description: []byte("Mass in kilograms, or null if unknown"), - Name: []byte("mass"), + Description: "Mass in kilograms, or null if unknown", + Name: "mass", ArgumentsDefinition: nil, - Type: document.NamedType{Name: []byte("Float")}, + Type: document.NamedType{Name: "Float"}, Directives: nil, }, { - Description: []byte("This human's friends, or an empty list if they have none"), - Name: []byte("friends"), + Description: "This human's friends, or an empty list if they have none", + Name: "friends", ArgumentsDefinition: nil, - Type: document.ListType{Type: document.NamedType{Name: []byte("Character")}}, + Type: document.ListType{Type: document.NamedType{Name: "Character"}}, Directives: nil, }, { - Description: []byte("The friends of the human exposed as a connection with edges"), - Name: []byte("friendsConnection"), + Description: "The friends of the human exposed as a connection with edges", + Name: "friendsConnection", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("first"), - Type: document.NamedType{Name: []byte("Int")}, + Name: "first", + Type: document.NamedType{Name: "Int"}, }, { - Name: []byte("after"), - Type: document.NamedType{Name: []byte("ID")}, + Name: "after", + Type: document.NamedType{Name: "ID"}, }, }, Type: document.NamedType{ - Name: []byte("FriendsConnection"), + Name: "FriendsConnection", NonNull: true, }, }, { - Description: []byte("The movies this human appears in"), - Name: []byte("appearsIn"), + Description: "The movies this human appears in", + Name: "appearsIn", ArgumentsDefinition: nil, Type: document.ListType{Type: document.NamedType{ - Name: []byte("Episode"), + Name: "Episode", }, NonNull: true, }, }, { - Description: []byte("A list of starships this person has piloted, or an empty list if none"), - Name: []byte("starships"), + Description: "A list of starships this person has piloted, or an empty list if none", + Name: "starships", ArgumentsDefinition: nil, Type: document.ListType{Type: document.NamedType{ - Name: []byte("Starship"), + Name: "Starship", }, }, }, }, - ImplementsInterfaces: document.ImplementsInterfaces{[]byte("Character")}, + ImplementsInterfaces: document.ImplementsInterfaces{"Character"}, Directives: nil, }, { - Description: []byte("An autonomous mechanical character in the Star Wars universe"), - Name: []byte("Droid"), + Description: "An autonomous mechanical character in the Star Wars universe", + Name: "Droid", FieldsDefinition: document.FieldsDefinition{ { - Description: []byte("The ID of the droid"), - Name: []byte("id"), + Description: "The ID of the droid", + Name: "id", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("ID"), + Name: "ID", NonNull: true, }, }, { - Description: []byte("What others call this droid"), - Name: []byte("name"), + Description: "What others call this droid", + Name: "name", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, }, { - Description: []byte("This droid's friends, or an empty list if they have none"), - Name: []byte("friends"), + Description: "This droid's friends, or an empty list if they have none", + Name: "friends", ArgumentsDefinition: nil, Type: document.ListType{Type: document.NamedType{ - Name: []byte("Character"), + Name: "Character", }}, }, { - Description: []byte("The friends of the droid exposed as a connection with edges"), - Name: []byte("friendsConnection"), + Description: "The friends of the droid exposed as a connection with edges", + Name: "friendsConnection", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("first"), + Name: "first", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, { - Name: []byte("after"), + Name: "after", Type: document.NamedType{ - Name: []byte("ID"), + Name: "ID", }, }, }, Type: document.NamedType{ - Name: []byte("FriendsConnection"), + Name: "FriendsConnection", NonNull: true, }, }, { - Description: []byte("The movies this droid appears in"), - Name: []byte("appearsIn"), + Description: "The movies this droid appears in", + Name: "appearsIn", ArgumentsDefinition: nil, Type: document.ListType{Type: document.NamedType{ - Name: []byte("Episode"), + Name: "Episode", }, NonNull: true, }, }, { - Description: []byte("This droid's primary function"), - Name: []byte("primaryFunction"), + Description: "This droid's primary function", + Name: "primaryFunction", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, - ImplementsInterfaces: document.ImplementsInterfaces{[]byte("Character")}, + ImplementsInterfaces: document.ImplementsInterfaces{"Character"}, Directives: nil, }, { - Description: []byte("A connection object for a character's friends"), - Name: []byte("FriendsConnection"), + Description: "A connection object for a character's friends", + Name: "FriendsConnection", FieldsDefinition: document.FieldsDefinition{ { - Description: []byte("The total number of friends"), - Name: []byte("totalCount"), + Description: "The total number of friends", + Name: "totalCount", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", }, }, { - Description: []byte("The edges for each of the character's friends."), - Name: []byte("edges"), + Description: "The edges for each of the character's friends.", + Name: "edges", ArgumentsDefinition: nil, Type: document.ListType{Type: document.NamedType{ - Name: []byte("FriendsEdge"), + Name: "FriendsEdge", }}, }, { - Description: []byte("A list of the friends, as a convenience when edges are not needed."), - Name: []byte("friends"), + Description: "A list of the friends, as a convenience when edges are not needed.", + Name: "friends", ArgumentsDefinition: nil, Type: document.ListType{Type: document.NamedType{ - Name: []byte("Character"), + Name: "Character", }}, }, { - Description: []byte("Information for paginating this connection"), - Name: []byte("pageInfo"), + Description: "Information for paginating this connection", + Name: "pageInfo", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("PageInfo"), + Name: "PageInfo", NonNull: true, }, }, @@ -1004,24 +1002,24 @@ func TestTypeSystemDefinition(t *testing.T) { Directives: nil, }, { - Description: []byte("An edge object for a character's friends"), - Name: []byte("FriendsEdge"), + Description: "An edge object for a character's friends", + Name: "FriendsEdge", FieldsDefinition: document.FieldsDefinition{ { - Description: []byte("A cursor used for pagination"), - Name: []byte("cursor"), + Description: "A cursor used for pagination", + Name: "cursor", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("ID"), + Name: "ID", NonNull: true, }, }, { - Description: []byte("The character represented by this friendship edge"), - Name: []byte("node"), + Description: "The character represented by this friendship edge", + Name: "node", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("Character"), + Name: "Character", }, }, }, @@ -1029,28 +1027,28 @@ func TestTypeSystemDefinition(t *testing.T) { Directives: nil, }, { - Description: []byte("Information for paginating this connection"), - Name: []byte("PageInfo"), + Description: "Information for paginating this connection", + Name: "PageInfo", FieldsDefinition: document.FieldsDefinition{ { - Name: []byte("startCursor"), + Name: "startCursor", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("ID"), + Name: "ID", }, }, { - Name: []byte("endCursor"), + Name: "endCursor", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("ID"), + Name: "ID", }, }, { - Name: []byte("hasNextPage"), + Name: "hasNextPage", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("Boolean"), + Name: "Boolean", NonNull: true, }, }, @@ -1059,32 +1057,32 @@ func TestTypeSystemDefinition(t *testing.T) { Directives: nil, }, { - Description: []byte("Represents a review for a movie"), - Name: []byte("Review"), + Description: "Represents a review for a movie", + Name: "Review", FieldsDefinition: document.FieldsDefinition{ { - Description: []byte("The movie"), - Name: []byte("episode"), + Description: "The movie", + Name: "episode", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("Episode"), + Name: "Episode", }, }, { - Description: []byte("The number of stars this review gave, 1-5"), - Name: []byte("stars"), + Description: "The number of stars this review gave, 1-5", + Name: "stars", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", NonNull: true, }, }, { - Description: []byte("Comment about the movie"), - Name: []byte("commentary"), + Description: "Comment about the movie", + Name: "commentary", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, }, @@ -1092,48 +1090,48 @@ func TestTypeSystemDefinition(t *testing.T) { Directives: nil, }, { - Name: []byte("Starship"), + Name: "Starship", FieldsDefinition: document.FieldsDefinition{ { - Description: []byte("The ID of the starship"), - Name: []byte("id"), + Description: "The ID of the starship", + Name: "id", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("ID"), + Name: "ID", NonNull: true, }, }, { - Description: []byte("The name of the starship"), - Name: []byte("name"), + Description: "The name of the starship", + Name: "name", ArgumentsDefinition: nil, Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: true, }, }, { - Description: []byte("Length of the starship, along the longest axis"), - Name: []byte("length"), + Description: "Length of the starship, along the longest axis", + Name: "length", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("unit"), + Name: "unit", Type: document.NamedType{ - Name: []byte("LengthUnit"), + Name: "LengthUnit", }, - DefaultValue: document.EnumValue{Name: []byte("METER")}, + DefaultValue: document.EnumValue{Name: "METER"}, }, }, Type: document.NamedType{ - Name: []byte("Float"), + Name: "Float", }, }, { - Name: []byte("coordinates"), + Name: "coordinates", Type: document.ListType{ Type: document.ListType{ Type: document.NamedType{ - Name: []byte("Float"), + Name: "Float", NonNull: true, }, NonNull: true, @@ -1147,53 +1145,53 @@ func TestTypeSystemDefinition(t *testing.T) { }, InterfaceTypeDefinitions: []document.InterfaceTypeDefinition{ { - Description: []byte("A character from the Star Wars universe"), - Name: []byte("Character"), + Description: "A character from the Star Wars universe", + Name: "Character", FieldsDefinition: document.FieldsDefinition{ { - Description: []byte("The ID of the character"), - Name: []byte("id"), + Description: "The ID of the character", + Name: "id", ArgumentsDefinition: nil, - Type: document.NamedType{Name: []byte("ID"), NonNull: true}, + Type: document.NamedType{Name: "ID", NonNull: true}, Directives: nil, }, { - Description: []byte("The name of the character"), - Name: []byte("name"), + Description: "The name of the character", + Name: "name", ArgumentsDefinition: nil, - Type: document.NamedType{Name: []byte("String"), NonNull: true}, + Type: document.NamedType{Name: "String", NonNull: true}, Directives: nil, }, { - Description: []byte("The friends of the character, or an empty list if they have none"), - Name: []byte("friends"), + Description: "The friends of the character, or an empty list if they have none", + Name: "friends", ArgumentsDefinition: nil, - Type: document.ListType{Type: document.NamedType{Name: []byte("Character")}}, + Type: document.ListType{Type: document.NamedType{Name: "Character"}}, Directives: nil, }, { - Description: []byte("The friends of the character exposed as a connection with edges"), - Name: []byte("friendsConnection"), + Description: "The friends of the character exposed as a connection with edges", + Name: "friendsConnection", ArgumentsDefinition: document.ArgumentsDefinition{ { - Name: []byte("first"), - Type: document.NamedType{Name: []byte("Int")}, + Name: "first", + Type: document.NamedType{Name: "Int"}, }, { - Name: []byte("after"), - Type: document.NamedType{Name: []byte("ID")}, + Name: "after", + Type: document.NamedType{Name: "ID"}, }, }, Type: document.NamedType{ - Name: []byte("FriendsConnection"), + Name: "FriendsConnection", NonNull: true, }, }, { - Description: []byte("The movies this character appears in"), - Name: []byte("appearsIn"), + Description: "The movies this character appears in", + Name: "appearsIn", ArgumentsDefinition: nil, - Type: document.ListType{Type: document.NamedType{Name: []byte("Episode")}, NonNull: true}, + Type: document.ListType{Type: document.NamedType{Name: "Episode"}, NonNull: true}, Directives: nil, }, }, @@ -1201,96 +1199,96 @@ func TestTypeSystemDefinition(t *testing.T) { }, UnionTypeDefinitions: []document.UnionTypeDefinition{ { - Name: []byte("SearchResult"), - UnionMemberTypes: document.UnionMemberTypes{[]byte("Human"), []byte("Droid"), []byte("Starship")}, + Name: "SearchResult", + UnionMemberTypes: document.UnionMemberTypes{"Human", "Droid", "Starship"}, Directives: nil, }, }, EnumTypeDefinitions: []document.EnumTypeDefinition{ { - Description: []byte("The episodes in the Star Wars trilogy"), - Name: []byte("Episode"), + Description: "The episodes in the Star Wars trilogy", + Name: "Episode", EnumValuesDefinition: document.EnumValuesDefinition{ { - Description: []byte("Star Wars Episode IV: A New Hope, released in 1977."), - EnumValue: []byte("NEWHOPE"), + Description: "Star Wars Episode IV: A New Hope, released in 1977.", + EnumValue: "NEWHOPE", }, { - Description: []byte("Star Wars Episode V: The Empire Strikes Back, released in 1980."), - EnumValue: []byte("EMPIRE"), + Description: "Star Wars Episode V: The Empire Strikes Back, released in 1980.", + EnumValue: "EMPIRE", }, { - Description: []byte("Star Wars Episode VI: Return of the Jedi, released in 1983."), - EnumValue: []byte("JEDI"), + Description: "Star Wars Episode VI: Return of the Jedi, released in 1983.", + EnumValue: "JEDI", }, }, }, { - Description: []byte("Units of height"), - Name: []byte("LengthUnit"), + Description: "Units of height", + Name: "LengthUnit", EnumValuesDefinition: document.EnumValuesDefinition{ { - Description: []byte("The standard unit around the world"), - EnumValue: []byte("METER"), + Description: "The standard unit around the world", + EnumValue: "METER", }, { - Description: []byte("Primarily used in the United States"), - EnumValue: []byte("FOOT"), + Description: "Primarily used in the United States", + EnumValue: "FOOT", }, }, }, }, InputObjectTypeDefinitions: document.InputObjectTypeDefinitions{ { - Description: []byte("The input object sent when someone is creating a new review"), - Name: []byte("ReviewInput"), + Description: "The input object sent when someone is creating a new review", + Name: "ReviewInput", InputFieldsDefinition: document.InputFieldsDefinition{ { - Description: []byte("0-5 stars"), - Name: []byte("stars"), + Description: "0-5 stars", + Name: "stars", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", NonNull: true, }, }, { - Description: []byte("Comment about the movie, optional"), - Name: []byte("commentary"), + Description: "Comment about the movie, optional", + Name: "commentary", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", }, }, { - Description: []byte("Favorite color, optional"), - Name: []byte("favorite_color"), + Description: "Favorite color, optional", + Name: "favorite_color", Type: document.NamedType{ - Name: []byte("ColorInput"), + Name: "ColorInput", }, }, }, }, { - Description: []byte("The input object sent when passing in a color"), - Name: []byte("ColorInput"), + Description: "The input object sent when passing in a color", + Name: "ColorInput", InputFieldsDefinition: document.InputFieldsDefinition{ { - Name: []byte("red"), + Name: "red", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", NonNull: true, }, }, { - Name: []byte("green"), + Name: "green", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", NonNull: true, }, }, { - Name: []byte("blue"), + Name: "blue", Type: document.NamedType{ - Name: []byte("Int"), + Name: "Int", NonNull: true, }, }, diff --git a/pkg/parser/uniontypedefinition_parser_test.go b/pkg/parser/uniontypedefinition_parser_test.go index 1c273ac90b..63eff2ea09 100644 --- a/pkg/parser/uniontypedefinition_parser_test.go +++ b/pkg/parser/uniontypedefinition_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -27,10 +26,10 @@ func TestParseUnionTypeDefinition(t *testing.T) { input: ` SearchResult = Photo | Person`, expectErr: BeNil(), expectValues: Equal(document.UnionTypeDefinition{ - Name: []byte("SearchResult"), + Name: "SearchResult", UnionMemberTypes: document.UnionMemberTypes{ - []byte("Photo"), - []byte("Person"), + "Photo", + "Person", }, }), }, @@ -39,12 +38,12 @@ func TestParseUnionTypeDefinition(t *testing.T) { input: ` SearchResult = Photo | Person | Car | Planet`, expectErr: BeNil(), expectValues: Equal(document.UnionTypeDefinition{ - Name: []byte("SearchResult"), + Name: "SearchResult", UnionMemberTypes: document.UnionMemberTypes{ - []byte("Photo"), - []byte("Person"), - []byte("Car"), - []byte("Planet"), + "Photo", + "Person", + "Car", + "Planet", }, }), }, @@ -56,12 +55,12 @@ func TestParseUnionTypeDefinition(t *testing.T) { | Planet`, expectErr: BeNil(), expectValues: Equal(document.UnionTypeDefinition{ - Name: []byte("SearchResult"), + Name: "SearchResult", UnionMemberTypes: document.UnionMemberTypes{ - []byte("Photo"), - []byte("Person"), - []byte("Car"), - []byte("Planet"), + "Photo", + "Person", + "Car", + "Planet", }, }), }, @@ -70,34 +69,34 @@ func TestParseUnionTypeDefinition(t *testing.T) { input: ` SearchResult @fromTop(to: "bottom") @fromBottom(to: "top") = Photo | Person`, expectErr: BeNil(), expectValues: Equal(document.UnionTypeDefinition{ - Name: []byte("SearchResult"), + Name: "SearchResult", Directives: document.Directives{ document.Directive{ - Name: []byte("fromTop"), + Name: "fromTop", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("bottom"), + Val: "bottom", }, }, }, }, document.Directive{ - Name: []byte("fromBottom"), + Name: "fromBottom", Arguments: document.Arguments{ document.Argument{ - Name: []byte("to"), + Name: "to", Value: document.StringValue{ - Val: []byte("top"), + Val: "top", }, }, }, }, }, UnionMemberTypes: document.UnionMemberTypes{ - []byte("Photo"), - []byte("Person"), + "Photo", + "Person", }, }), }, @@ -106,7 +105,7 @@ func TestParseUnionTypeDefinition(t *testing.T) { input: ` SearchResult`, expectErr: BeNil(), expectValues: Equal(document.UnionTypeDefinition{ - Name: []byte("SearchResult"), + Name: "SearchResult", }), }, } @@ -116,9 +115,8 @@ func TestParseUnionTypeDefinition(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseUnionTypeDefinition() Expect(err).To(test.expectErr) diff --git a/pkg/parser/value_parser_test.go b/pkg/parser/value_parser_test.go index 7ebfef15fb..9aa9f6cb95 100644 --- a/pkg/parser/value_parser_test.go +++ b/pkg/parser/value_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" "testing" . "github.com/franela/goblin" @@ -29,7 +28,7 @@ func TestValueParser(t *testing.T) { input: "$foo", expectErr: BeNil(), expectValues: Equal(document.VariableValue{ - Name: []byte("foo"), + Name: "foo", }), }, { @@ -74,7 +73,7 @@ func TestValueParser(t *testing.T) { input: `"this is a string value"`, expectErr: BeNil(), expectValues: Equal(document.StringValue{ - Val: []byte("this is a string value"), + Val: "this is a string value", }), }, { @@ -82,7 +81,7 @@ func TestValueParser(t *testing.T) { input: `"""this is a string value"""`, expectErr: BeNil(), expectValues: Equal(document.StringValue{ - Val: []byte("this is a string value"), + Val: "this is a string value", }), }, { @@ -110,7 +109,7 @@ func TestValueParser(t *testing.T) { expectValues: Equal(document.ObjectValue{ Val: []document.ObjectField{ { - Name: []byte("isTrue"), + Name: "isTrue", Value: document.BooleanValue{ Val: true, }, @@ -139,9 +138,8 @@ func TestValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/variable_value_parser_test.go b/pkg/parser/variable_value_parser_test.go index 912548c033..2acd903077 100644 --- a/pkg/parser/variable_value_parser_test.go +++ b/pkg/parser/variable_value_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -27,7 +26,7 @@ func TestVariableValueParser(t *testing.T) { input: `$anyIdent`, expectErr: BeNil(), expectValues: Equal(document.VariableValue{ - Name: []byte("anyIdent"), + Name: "anyIdent", }), }, { @@ -43,9 +42,8 @@ func TestVariableValueParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parsePeekedVariableValue() Expect(err).To(test.expectErr) diff --git a/pkg/parser/variabledefinitions_parser_test.go b/pkg/parser/variabledefinitions_parser_test.go index cf6b7347d8..21623e93e1 100644 --- a/pkg/parser/variabledefinitions_parser_test.go +++ b/pkg/parser/variabledefinitions_parser_test.go @@ -1,7 +1,6 @@ package parser import ( - "bytes" . "github.com/franela/goblin" "github.com/jensneuse/graphql-go-tools/pkg/document" . "github.com/onsi/gomega" @@ -28,9 +27,9 @@ func TestVariableDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.VariableDefinitions{ document.VariableDefinition{ - Variable: []byte("foo"), + Variable: "foo", Type: document.NamedType{ - Name: []byte("bar"), + Name: "bar", NonNull: true, }, }, @@ -42,9 +41,9 @@ func TestVariableDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.VariableDefinitions{ document.VariableDefinition{ - Variable: []byte("color"), + Variable: "color", Type: document.NamedType{ - Name: []byte("String"), + Name: "String", NonNull: false, }, }, @@ -56,15 +55,15 @@ func TestVariableDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.VariableDefinitions{ document.VariableDefinition{ - Variable: []byte("foo"), + Variable: "foo", Type: document.NamedType{ - Name: []byte("bar"), + Name: "bar", }, }, document.VariableDefinition{ - Variable: []byte("baz"), + Variable: "baz", Type: document.NamedType{ - Name: []byte("bax"), + Name: "bax", }, }, }), @@ -75,15 +74,15 @@ func TestVariableDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.VariableDefinitions{ document.VariableDefinition{ - Variable: []byte("foo"), + Variable: "foo", Type: document.ListType{Type: document.NamedType{ - Name: []byte("bar"), + Name: "bar", }}, }, document.VariableDefinition{ - Variable: []byte("baz"), + Variable: "baz", Type: document.NamedType{ - Name: []byte("bax"), + Name: "bax", }, }, }), @@ -94,16 +93,16 @@ func TestVariableDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.VariableDefinitions{ document.VariableDefinition{ - Variable: []byte("foo"), + Variable: "foo", Type: document.NamedType{ - Name: []byte("bar"), + Name: "bar", NonNull: true, }, }, document.VariableDefinition{ - Variable: []byte("baz"), + Variable: "baz", Type: document.NamedType{ - Name: []byte("bax"), + Name: "bax", }, }, }), @@ -114,19 +113,19 @@ func TestVariableDefinitionsParser(t *testing.T) { expectErr: BeNil(), expectValues: Equal(document.VariableDefinitions{ document.VariableDefinition{ - Variable: []byte("foo"), + Variable: "foo", Type: document.NamedType{ - Name: []byte("bar"), + Name: "bar", NonNull: true, }, DefaultValue: document.StringValue{ - Val: []byte("me"), + Val: "me", }, }, document.VariableDefinition{ - Variable: []byte("baz"), + Variable: "baz", Type: document.NamedType{ - Name: []byte("bax"), + Name: "bax", }, }, }), @@ -137,9 +136,9 @@ func TestVariableDefinitionsParser(t *testing.T) { expectErr: Not(BeNil()), expectValues: Equal(document.VariableDefinitions{ document.VariableDefinition{ - Variable: []byte("foo"), + Variable: "foo", Type: document.NamedType{ - Name: []byte("bar"), + Name: "bar", NonNull: true, }, }, @@ -158,9 +157,8 @@ func TestVariableDefinitionsParser(t *testing.T) { g.It(test.it, func() { - reader := bytes.NewReader([]byte(test.input)) parser := NewParser() - parser.l.SetInput(reader) + parser.l.SetInput(test.input) val, err := parser.parseVariableDefinitions() Expect(err).To(test.expectErr) diff --git a/pkg/transform/stringslice.go b/pkg/transform/stringslice.go index 8bafd4fb43..caebfffac1 100644 --- a/pkg/transform/stringslice.go +++ b/pkg/transform/stringslice.go @@ -4,14 +4,14 @@ import ( "strconv" ) -// StringSliceToFloat32 converts a string slice to a float32 -func StringSliceToFloat32(input []byte) (float32, error) { - f64, err := strconv.ParseFloat(string(input), 32) +// StringToFloat32 converts a string slice to a float32 +func StringToFloat32(input string) (float32, error) { + f64, err := strconv.ParseFloat(input, 32) return float32(f64), err } -// StringSliceToInt32 converts a string slice to a int32 -func StringSliceToInt32(input []byte) (int32, error) { - i64, err := strconv.ParseInt(string(input), 10, 32) +// StringToInt32 converts a string slice to a int32 +func StringToInt32(input string) (int32, error) { + i64, err := strconv.ParseInt(input, 10, 32) return int32(i64), err } diff --git a/pkg/transform/stringslice_test.go b/pkg/transform/stringslice_test.go index a08d04ea63..2002da9aed 100644 --- a/pkg/transform/stringslice_test.go +++ b/pkg/transform/stringslice_test.go @@ -11,67 +11,67 @@ func TestBytesTransform(t *testing.T) { g := Goblin(t) RegisterFailHandler(func(m string, _ ...int) { g.Fail(m) }) - g.Describe("StringSliceToFloat32", func() { + g.Describe("StringToFloat32", func() { g.It("should transform byte[\"13.37\"] to float32(13.37)", func() { - input := []byte("13.37") - out, err := StringSliceToFloat32(input) + input := "13.37" + out, err := StringToFloat32(input) Expect(err).To(BeNil()) Expect(out).To(Equal(float32(13.37))) }) g.It("should transform byte[\"-13.37\"] to float32(-13.37)", func() { - input := []byte("-13.37") - out, err := StringSliceToFloat32(input) + input := "-13.37" + out, err := StringToFloat32(input) Expect(err).To(BeNil()) Expect(out).To(Equal(float32(-13.37))) }) g.It("should transform byte[\"1337\"] to float32(1337)", func() { - input := []byte("1337") - out, err := StringSliceToFloat32(input) + input := "1337" + out, err := StringToFloat32(input) Expect(err).To(BeNil()) Expect(out).To(Equal(float32(1337))) }) g.It("should transform byte[\"invalidInput\"] to err(invalidInput)", func() { - input := []byte("invalidInput") - out, err := StringSliceToFloat32(input) + input := "invalidInput" + out, err := StringToFloat32(input) Expect(err).NotTo(BeNil()) Expect(out).To(Equal(float32(0))) }) }) - g.Describe("StringSliceToInt32", func() { + g.Describe("StringToInt32", func() { g.It("should transform byte[\"1337\"] into int32(1337)", func() { - input := []byte("1337") - out, err := StringSliceToInt32(input) + input := "1337" + out, err := StringToInt32(input) Expect(err).To(BeNil()) Expect(out).To(Equal(int32(1337))) }) g.It("should transform byte[\"-1337\"] into int32(1337)", func() { - input := []byte("-1337") - out, err := StringSliceToInt32(input) + input := "-1337" + out, err := StringToInt32(input) Expect(err).To(BeNil()) Expect(out).To(Equal(int32(-1337))) }) g.It("should transform byte[\"13.37\"] into err(invalid syntax)", func() { - input := []byte("13.37") - out, err := StringSliceToInt32(input) + input := "13.37" + out, err := StringToInt32(input) Expect(err).NotTo(BeNil()) Expect(out).To(Equal(int32(0))) }) g.It("should transform byte[\"invalidInput\"] into err(invalid syntax)", func() { - input := []byte("invalidInput") - out, err := StringSliceToInt32(input) + input := "invalidInput" + out, err := StringToInt32(input) Expect(err).NotTo(BeNil()) Expect(out).To(Equal(int32(0))) }) diff --git a/pkg/transform/trim_whitespace.go b/pkg/transform/trim_whitespace.go index a738c14ce1..9e4d72489f 100644 --- a/pkg/transform/trim_whitespace.go +++ b/pkg/transform/trim_whitespace.go @@ -1,27 +1,27 @@ package transform import ( - "bytes" + "strings" "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" ) // TrimWhitespace removes all spaces,tabs,lineterminators before and after a literal -func TrimWhitespace(lit []byte) []byte { +func TrimWhitespace(lit string) string { for { - if bytes.HasPrefix(lit, literal.SPACE) { - lit = bytes.TrimPrefix(lit, literal.SPACE) + if strings.HasPrefix(lit, literal.SPACE) { + lit = strings.TrimPrefix(lit, literal.SPACE) continue } - if bytes.HasPrefix(lit, literal.TAB) { - lit = bytes.TrimPrefix(lit, literal.TAB) + if strings.HasPrefix(lit, literal.TAB) { + lit = strings.TrimPrefix(lit, literal.TAB) continue } - if bytes.HasPrefix(lit, literal.LINETERMINATOR) { - lit = bytes.TrimPrefix(lit, literal.LINETERMINATOR) + if strings.HasPrefix(lit, literal.LINETERMINATOR) { + lit = strings.TrimPrefix(lit, literal.LINETERMINATOR) continue } @@ -29,18 +29,18 @@ func TrimWhitespace(lit []byte) []byte { } for { - if bytes.HasSuffix(lit, literal.SPACE) { - lit = bytes.TrimSuffix(lit, literal.SPACE) + if strings.HasSuffix(lit, literal.SPACE) { + lit = strings.TrimSuffix(lit, literal.SPACE) continue } - if bytes.HasSuffix(lit, literal.TAB) { - lit = bytes.TrimSuffix(lit, literal.TAB) + if strings.HasSuffix(lit, literal.TAB) { + lit = strings.TrimSuffix(lit, literal.TAB) continue } - if bytes.HasSuffix(lit, literal.LINETERMINATOR) { - lit = bytes.TrimSuffix(lit, literal.LINETERMINATOR) + if strings.HasSuffix(lit, literal.LINETERMINATOR) { + lit = strings.TrimSuffix(lit, literal.LINETERMINATOR) continue } diff --git a/pkg/transform/trim_whitespace_test.go b/pkg/transform/trim_whitespace_test.go index 3995892b44..e930b15917 100644 --- a/pkg/transform/trim_whitespace_test.go +++ b/pkg/transform/trim_whitespace_test.go @@ -16,32 +16,32 @@ func TestTemplate(t *testing.T) { tests := []struct { it string - input []byte + input string expect types.GomegaMatcher }{ { it: "should trim space", - input: []byte(` lorem ipsum `), - expect: Equal([]byte(`lorem ipsum`)), + input: ` lorem ipsum `, + expect: Equal(`lorem ipsum`), }, { it: "should trim tabs", - input: []byte(` lorem ipsum `), - expect: Equal([]byte(`lorem ipsum`)), + input: ` lorem ipsum `, + expect: Equal(`lorem ipsum`), }, { it: "should trim lineterminators", - input: []byte(` + input: ` lorem ipsum -`), - expect: Equal([]byte(`lorem ipsum`)), +`, + expect: Equal(`lorem ipsum`), }, { it: "should trim all kinds of whitespace", - input: []byte(` + input: ` lorem ipsum - `), - expect: Equal([]byte(`lorem ipsum`)), + `, + expect: Equal(`lorem ipsum`), }, }