From d7d7e8183a82e03e8ca7d95ff885f4723c12e199 Mon Sep 17 00:00:00 2001 From: jnsone11 Date: Thu, 17 Jan 2019 22:17:02 +0100 Subject: [PATCH 1/7] switch lexer impl to byte slice references and update tests accordingly --- pkg/lexer/fixtures/introspection_lexed.golden | 1050 ++++++++++++----- pkg/lexer/lexer.go | 445 ++++--- pkg/lexer/lexer_test.go | 60 +- 3 files changed, 1008 insertions(+), 547 deletions(-) diff --git a/pkg/lexer/fixtures/introspection_lexed.golden b/pkg/lexer/fixtures/introspection_lexed.golden index 7b559f0f6d..e7a7ff9a6e 100644 --- a/pkg/lexer/fixtures/introspection_lexed.golden +++ b/pkg/lexer/fixtures/introspection_lexed.golden @@ -1,8 +1,11 @@ [ { "Keyword": 35, - "Literal": "query", - "Position": { + "Literal": { + "Start": 0, + "End": 5 + }, + "TextPosition": { "LineStart": 1, "LineEnd": 1, "CharStart": 1, @@ -12,8 +15,11 @@ }, { "Keyword": 1, - "Literal": "IntrospectionQuery", - "Position": { + "Literal": { + "Start": 6, + "End": 24 + }, + "TextPosition": { "LineStart": 1, "LineEnd": 1, "CharStart": 7, @@ -23,8 +29,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 25, + "End": 26 + }, + "TextPosition": { "LineStart": 1, "LineEnd": 1, "CharStart": 26, @@ -34,8 +43,11 @@ }, { "Keyword": 1, - "Literal": "__schema", - "Position": { + "Literal": { + "Start": 29, + "End": 37 + }, + "TextPosition": { "LineStart": 2, "LineEnd": 2, "CharStart": 3, @@ -45,8 +57,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 38, + "End": 39 + }, + "TextPosition": { "LineStart": 2, "LineEnd": 2, "CharStart": 12, @@ -56,8 +71,11 @@ }, { "Keyword": 1, - "Literal": "queryType", - "Position": { + "Literal": { + "Start": 44, + "End": 53 + }, + "TextPosition": { "LineStart": 3, "LineEnd": 3, "CharStart": 5, @@ -67,8 +85,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 54, + "End": 55 + }, + "TextPosition": { "LineStart": 3, "LineEnd": 3, "CharStart": 15, @@ -78,8 +99,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 62, + "End": 66 + }, + "TextPosition": { "LineStart": 4, "LineEnd": 4, "CharStart": 7, @@ -89,8 +113,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 71, + "End": 72 + }, + "TextPosition": { "LineStart": 5, "LineEnd": 5, "CharStart": 5, @@ -100,8 +127,11 @@ }, { "Keyword": 1, - "Literal": "mutationType", - "Position": { + "Literal": { + "Start": 77, + "End": 89 + }, + "TextPosition": { "LineStart": 6, "LineEnd": 6, "CharStart": 5, @@ -111,8 +141,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 90, + "End": 91 + }, + "TextPosition": { "LineStart": 6, "LineEnd": 6, "CharStart": 18, @@ -122,8 +155,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 98, + "End": 102 + }, + "TextPosition": { "LineStart": 7, "LineEnd": 7, "CharStart": 7, @@ -133,8 +169,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 107, + "End": 108 + }, + "TextPosition": { "LineStart": 8, "LineEnd": 8, "CharStart": 5, @@ -144,8 +183,11 @@ }, { "Keyword": 1, - "Literal": "subscriptionType", - "Position": { + "Literal": { + "Start": 113, + "End": 129 + }, + "TextPosition": { "LineStart": 9, "LineEnd": 9, "CharStart": 5, @@ -155,8 +197,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 130, + "End": 131 + }, + "TextPosition": { "LineStart": 9, "LineEnd": 9, "CharStart": 22, @@ -166,8 +211,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 138, + "End": 142 + }, + "TextPosition": { "LineStart": 10, "LineEnd": 10, "CharStart": 7, @@ -177,8 +225,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 147, + "End": 148 + }, + "TextPosition": { "LineStart": 11, "LineEnd": 11, "CharStart": 5, @@ -188,8 +239,11 @@ }, { "Keyword": 1, - "Literal": "types", - "Position": { + "Literal": { + "Start": 153, + "End": 158 + }, + "TextPosition": { "LineStart": 12, "LineEnd": 12, "CharStart": 5, @@ -199,8 +253,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 159, + "End": 160 + }, + "TextPosition": { "LineStart": 12, "LineEnd": 12, "CharStart": 11, @@ -210,8 +267,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 167, + "End": 170 + }, + "TextPosition": { "LineStart": 13, "LineEnd": 13, "CharStart": 7, @@ -221,8 +281,11 @@ }, { "Keyword": 1, - "Literal": "FullType", - "Position": { + "Literal": { + "Start": 170, + "End": 178 + }, + "TextPosition": { "LineStart": 13, "LineEnd": 13, "CharStart": 10, @@ -232,8 +295,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 183, + "End": 184 + }, + "TextPosition": { "LineStart": 14, "LineEnd": 14, "CharStart": 5, @@ -243,8 +309,11 @@ }, { "Keyword": 1, - "Literal": "directives", - "Position": { + "Literal": { + "Start": 189, + "End": 199 + }, + "TextPosition": { "LineStart": 15, "LineEnd": 15, "CharStart": 5, @@ -254,8 +323,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 200, + "End": 201 + }, + "TextPosition": { "LineStart": 15, "LineEnd": 15, "CharStart": 16, @@ -265,8 +337,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 208, + "End": 212 + }, + "TextPosition": { "LineStart": 16, "LineEnd": 16, "CharStart": 7, @@ -276,8 +351,11 @@ }, { "Keyword": 1, - "Literal": "description", - "Position": { + "Literal": { + "Start": 219, + "End": 230 + }, + "TextPosition": { "LineStart": 17, "LineEnd": 17, "CharStart": 7, @@ -287,8 +365,11 @@ }, { "Keyword": 1, - "Literal": "locations", - "Position": { + "Literal": { + "Start": 237, + "End": 246 + }, + "TextPosition": { "LineStart": 18, "LineEnd": 18, "CharStart": 7, @@ -298,8 +379,11 @@ }, { "Keyword": 1, - "Literal": "args", - "Position": { + "Literal": { + "Start": 253, + "End": 257 + }, + "TextPosition": { "LineStart": 19, "LineEnd": 19, "CharStart": 7, @@ -309,8 +393,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 258, + "End": 259 + }, + "TextPosition": { "LineStart": 19, "LineEnd": 19, "CharStart": 12, @@ -320,8 +407,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 268, + "End": 271 + }, + "TextPosition": { "LineStart": 20, "LineEnd": 20, "CharStart": 9, @@ -331,8 +421,11 @@ }, { "Keyword": 1, - "Literal": "InputValue", - "Position": { + "Literal": { + "Start": 271, + "End": 281 + }, + "TextPosition": { "LineStart": 20, "LineEnd": 20, "CharStart": 12, @@ -342,8 +435,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 288, + "End": 289 + }, + "TextPosition": { "LineStart": 21, "LineEnd": 21, "CharStart": 7, @@ -353,8 +449,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 294, + "End": 295 + }, + "TextPosition": { "LineStart": 22, "LineEnd": 22, "CharStart": 5, @@ -364,8 +463,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 298, + "End": 299 + }, + "TextPosition": { "LineStart": 23, "LineEnd": 23, "CharStart": 3, @@ -375,8 +477,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 300, + "End": 301 + }, + "TextPosition": { "LineStart": 24, "LineEnd": 24, "CharStart": 1, @@ -386,8 +491,11 @@ }, { "Keyword": 38, - "Literal": "fragment", - "Position": { + "Literal": { + "Start": 303, + "End": 311 + }, + "TextPosition": { "LineStart": 26, "LineEnd": 26, "CharStart": 1, @@ -397,8 +505,11 @@ }, { "Keyword": 1, - "Literal": "FullType", - "Position": { + "Literal": { + "Start": 312, + "End": 320 + }, + "TextPosition": { "LineStart": 26, "LineEnd": 26, "CharStart": 10, @@ -408,8 +519,11 @@ }, { "Keyword": 18, - "Literal": "on", - "Position": { + "Literal": { + "Start": 321, + "End": 323 + }, + "TextPosition": { "LineStart": 26, "LineEnd": 26, "CharStart": 19, @@ -419,8 +533,11 @@ }, { "Keyword": 1, - "Literal": "__Type", - "Position": { + "Literal": { + "Start": 324, + "End": 330 + }, + "TextPosition": { "LineStart": 26, "LineEnd": 26, "CharStart": 22, @@ -430,8 +547,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 331, + "End": 332 + }, + "TextPosition": { "LineStart": 26, "LineEnd": 26, "CharStart": 29, @@ -441,8 +561,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 335, + "End": 339 + }, + "TextPosition": { "LineStart": 27, "LineEnd": 27, "CharStart": 3, @@ -452,8 +575,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 342, + "End": 346 + }, + "TextPosition": { "LineStart": 28, "LineEnd": 28, "CharStart": 3, @@ -463,8 +589,11 @@ }, { "Keyword": 1, - "Literal": "description", - "Position": { + "Literal": { + "Start": 349, + "End": 360 + }, + "TextPosition": { "LineStart": 29, "LineEnd": 29, "CharStart": 3, @@ -474,8 +603,11 @@ }, { "Keyword": 1, - "Literal": "fields", - "Position": { + "Literal": { + "Start": 363, + "End": 369 + }, + "TextPosition": { "LineStart": 30, "LineEnd": 30, "CharStart": 3, @@ -485,8 +617,11 @@ }, { "Keyword": 39, - "Literal": "(", - "Position": { + "Literal": { + "Start": 369, + "End": 370 + }, + "TextPosition": { "LineStart": 30, "LineEnd": 30, "CharStart": 9, @@ -496,8 +631,11 @@ }, { "Keyword": 1, - "Literal": "includeDeprecated", - "Position": { + "Literal": { + "Start": 370, + "End": 387 + }, + "TextPosition": { "LineStart": 30, "LineEnd": 30, "CharStart": 10, @@ -507,8 +645,11 @@ }, { "Keyword": 4, - "Literal": ":", - "Position": { + "Literal": { + "Start": 387, + "End": 388 + }, + "TextPosition": { "LineStart": 30, "LineEnd": 30, "CharStart": 27, @@ -518,8 +659,11 @@ }, { "Keyword": 32, - "Literal": "true", - "Position": { + "Literal": { + "Start": 389, + "End": 393 + }, + "TextPosition": { "LineStart": 30, "LineEnd": 30, "CharStart": 29, @@ -529,8 +673,11 @@ }, { "Keyword": 40, - "Literal": ")", - "Position": { + "Literal": { + "Start": 393, + "End": 394 + }, + "TextPosition": { "LineStart": 30, "LineEnd": 30, "CharStart": 33, @@ -540,8 +687,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 395, + "End": 396 + }, + "TextPosition": { "LineStart": 30, "LineEnd": 30, "CharStart": 35, @@ -551,8 +701,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 401, + "End": 405 + }, + "TextPosition": { "LineStart": 31, "LineEnd": 31, "CharStart": 5, @@ -562,8 +715,11 @@ }, { "Keyword": 1, - "Literal": "description", - "Position": { + "Literal": { + "Start": 410, + "End": 421 + }, + "TextPosition": { "LineStart": 32, "LineEnd": 32, "CharStart": 5, @@ -573,8 +729,11 @@ }, { "Keyword": 1, - "Literal": "args", - "Position": { + "Literal": { + "Start": 426, + "End": 430 + }, + "TextPosition": { "LineStart": 33, "LineEnd": 33, "CharStart": 5, @@ -584,8 +743,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 431, + "End": 432 + }, + "TextPosition": { "LineStart": 33, "LineEnd": 33, "CharStart": 10, @@ -595,8 +757,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 439, + "End": 442 + }, + "TextPosition": { "LineStart": 34, "LineEnd": 34, "CharStart": 7, @@ -606,8 +771,11 @@ }, { "Keyword": 1, - "Literal": "InputValue", - "Position": { + "Literal": { + "Start": 442, + "End": 452 + }, + "TextPosition": { "LineStart": 34, "LineEnd": 34, "CharStart": 10, @@ -617,8 +785,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 457, + "End": 458 + }, + "TextPosition": { "LineStart": 35, "LineEnd": 35, "CharStart": 5, @@ -628,8 +799,11 @@ }, { "Keyword": 22, - "Literal": "type", - "Position": { + "Literal": { + "Start": 463, + "End": 467 + }, + "TextPosition": { "LineStart": 36, "LineEnd": 36, "CharStart": 5, @@ -639,8 +813,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 468, + "End": 469 + }, + "TextPosition": { "LineStart": 36, "LineEnd": 36, "CharStart": 10, @@ -650,8 +827,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 476, + "End": 479 + }, + "TextPosition": { "LineStart": 37, "LineEnd": 37, "CharStart": 7, @@ -661,8 +841,11 @@ }, { "Keyword": 1, - "Literal": "TypeRef", - "Position": { + "Literal": { + "Start": 479, + "End": 486 + }, + "TextPosition": { "LineStart": 37, "LineEnd": 37, "CharStart": 10, @@ -672,8 +855,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 491, + "End": 492 + }, + "TextPosition": { "LineStart": 38, "LineEnd": 38, "CharStart": 5, @@ -683,8 +869,11 @@ }, { "Keyword": 1, - "Literal": "isDeprecated", - "Position": { + "Literal": { + "Start": 497, + "End": 509 + }, + "TextPosition": { "LineStart": 39, "LineEnd": 39, "CharStart": 5, @@ -694,8 +883,11 @@ }, { "Keyword": 1, - "Literal": "deprecationReason", - "Position": { + "Literal": { + "Start": 514, + "End": 531 + }, + "TextPosition": { "LineStart": 40, "LineEnd": 40, "CharStart": 5, @@ -705,8 +897,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 534, + "End": 535 + }, + "TextPosition": { "LineStart": 41, "LineEnd": 41, "CharStart": 3, @@ -716,8 +911,11 @@ }, { "Keyword": 1, - "Literal": "inputFields", - "Position": { + "Literal": { + "Start": 538, + "End": 549 + }, + "TextPosition": { "LineStart": 42, "LineEnd": 42, "CharStart": 3, @@ -727,8 +925,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 550, + "End": 551 + }, + "TextPosition": { "LineStart": 42, "LineEnd": 42, "CharStart": 15, @@ -738,8 +939,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 556, + "End": 559 + }, + "TextPosition": { "LineStart": 43, "LineEnd": 43, "CharStart": 5, @@ -749,8 +953,11 @@ }, { "Keyword": 1, - "Literal": "InputValue", - "Position": { + "Literal": { + "Start": 559, + "End": 569 + }, + "TextPosition": { "LineStart": 43, "LineEnd": 43, "CharStart": 8, @@ -760,8 +967,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 572, + "End": 573 + }, + "TextPosition": { "LineStart": 44, "LineEnd": 44, "CharStart": 3, @@ -771,8 +981,11 @@ }, { "Keyword": 1, - "Literal": "interfaces", - "Position": { + "Literal": { + "Start": 576, + "End": 586 + }, + "TextPosition": { "LineStart": 45, "LineEnd": 45, "CharStart": 3, @@ -782,8 +995,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 587, + "End": 588 + }, + "TextPosition": { "LineStart": 45, "LineEnd": 45, "CharStart": 14, @@ -793,8 +1009,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 593, + "End": 596 + }, + "TextPosition": { "LineStart": 46, "LineEnd": 46, "CharStart": 5, @@ -804,8 +1023,11 @@ }, { "Keyword": 1, - "Literal": "TypeRef", - "Position": { + "Literal": { + "Start": 596, + "End": 603 + }, + "TextPosition": { "LineStart": 46, "LineEnd": 46, "CharStart": 8, @@ -815,8 +1037,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 606, + "End": 607 + }, + "TextPosition": { "LineStart": 47, "LineEnd": 47, "CharStart": 3, @@ -826,8 +1051,11 @@ }, { "Keyword": 1, - "Literal": "enumValues", - "Position": { + "Literal": { + "Start": 610, + "End": 620 + }, + "TextPosition": { "LineStart": 48, "LineEnd": 48, "CharStart": 3, @@ -837,8 +1065,11 @@ }, { "Keyword": 39, - "Literal": "(", - "Position": { + "Literal": { + "Start": 620, + "End": 621 + }, + "TextPosition": { "LineStart": 48, "LineEnd": 48, "CharStart": 13, @@ -848,8 +1079,11 @@ }, { "Keyword": 1, - "Literal": "includeDeprecated", - "Position": { + "Literal": { + "Start": 621, + "End": 638 + }, + "TextPosition": { "LineStart": 48, "LineEnd": 48, "CharStart": 14, @@ -859,8 +1093,11 @@ }, { "Keyword": 4, - "Literal": ":", - "Position": { + "Literal": { + "Start": 638, + "End": 639 + }, + "TextPosition": { "LineStart": 48, "LineEnd": 48, "CharStart": 31, @@ -870,8 +1107,11 @@ }, { "Keyword": 32, - "Literal": "true", - "Position": { + "Literal": { + "Start": 640, + "End": 644 + }, + "TextPosition": { "LineStart": 48, "LineEnd": 48, "CharStart": 33, @@ -881,8 +1121,11 @@ }, { "Keyword": 40, - "Literal": ")", - "Position": { + "Literal": { + "Start": 644, + "End": 645 + }, + "TextPosition": { "LineStart": 48, "LineEnd": 48, "CharStart": 37, @@ -892,8 +1135,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 646, + "End": 647 + }, + "TextPosition": { "LineStart": 48, "LineEnd": 48, "CharStart": 39, @@ -903,8 +1149,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 652, + "End": 656 + }, + "TextPosition": { "LineStart": 49, "LineEnd": 49, "CharStart": 5, @@ -914,8 +1163,11 @@ }, { "Keyword": 1, - "Literal": "description", - "Position": { + "Literal": { + "Start": 661, + "End": 672 + }, + "TextPosition": { "LineStart": 50, "LineEnd": 50, "CharStart": 5, @@ -925,8 +1177,11 @@ }, { "Keyword": 1, - "Literal": "isDeprecated", - "Position": { + "Literal": { + "Start": 677, + "End": 689 + }, + "TextPosition": { "LineStart": 51, "LineEnd": 51, "CharStart": 5, @@ -936,8 +1191,11 @@ }, { "Keyword": 1, - "Literal": "deprecationReason", - "Position": { + "Literal": { + "Start": 694, + "End": 711 + }, + "TextPosition": { "LineStart": 52, "LineEnd": 52, "CharStart": 5, @@ -947,8 +1205,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 714, + "End": 715 + }, + "TextPosition": { "LineStart": 53, "LineEnd": 53, "CharStart": 3, @@ -958,8 +1219,11 @@ }, { "Keyword": 1, - "Literal": "possibleTypes", - "Position": { + "Literal": { + "Start": 718, + "End": 731 + }, + "TextPosition": { "LineStart": 54, "LineEnd": 54, "CharStart": 3, @@ -969,8 +1233,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 732, + "End": 733 + }, + "TextPosition": { "LineStart": 54, "LineEnd": 54, "CharStart": 17, @@ -980,8 +1247,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 738, + "End": 741 + }, + "TextPosition": { "LineStart": 55, "LineEnd": 55, "CharStart": 5, @@ -991,8 +1261,11 @@ }, { "Keyword": 1, - "Literal": "TypeRef", - "Position": { + "Literal": { + "Start": 741, + "End": 748 + }, + "TextPosition": { "LineStart": 55, "LineEnd": 55, "CharStart": 8, @@ -1002,8 +1275,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 751, + "End": 752 + }, + "TextPosition": { "LineStart": 56, "LineEnd": 56, "CharStart": 3, @@ -1013,8 +1289,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 753, + "End": 754 + }, + "TextPosition": { "LineStart": 57, "LineEnd": 57, "CharStart": 1, @@ -1024,8 +1303,11 @@ }, { "Keyword": 38, - "Literal": "fragment", - "Position": { + "Literal": { + "Start": 756, + "End": 764 + }, + "TextPosition": { "LineStart": 59, "LineEnd": 59, "CharStart": 1, @@ -1035,8 +1317,11 @@ }, { "Keyword": 1, - "Literal": "InputValue", - "Position": { + "Literal": { + "Start": 765, + "End": 775 + }, + "TextPosition": { "LineStart": 59, "LineEnd": 59, "CharStart": 10, @@ -1046,8 +1331,11 @@ }, { "Keyword": 18, - "Literal": "on", - "Position": { + "Literal": { + "Start": 776, + "End": 778 + }, + "TextPosition": { "LineStart": 59, "LineEnd": 59, "CharStart": 21, @@ -1057,8 +1345,11 @@ }, { "Keyword": 1, - "Literal": "__InputValue", - "Position": { + "Literal": { + "Start": 779, + "End": 791 + }, + "TextPosition": { "LineStart": 59, "LineEnd": 59, "CharStart": 24, @@ -1068,8 +1359,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 792, + "End": 793 + }, + "TextPosition": { "LineStart": 59, "LineEnd": 59, "CharStart": 37, @@ -1079,8 +1373,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 796, + "End": 800 + }, + "TextPosition": { "LineStart": 60, "LineEnd": 60, "CharStart": 3, @@ -1090,8 +1387,11 @@ }, { "Keyword": 1, - "Literal": "description", - "Position": { + "Literal": { + "Start": 803, + "End": 814 + }, + "TextPosition": { "LineStart": 61, "LineEnd": 61, "CharStart": 3, @@ -1101,8 +1401,11 @@ }, { "Keyword": 22, - "Literal": "type", - "Position": { + "Literal": { + "Start": 817, + "End": 821 + }, + "TextPosition": { "LineStart": 62, "LineEnd": 62, "CharStart": 3, @@ -1112,8 +1415,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 822, + "End": 823 + }, + "TextPosition": { "LineStart": 62, "LineEnd": 62, "CharStart": 8, @@ -1123,8 +1429,11 @@ }, { "Keyword": 12, - "Literal": "...", - "Position": { + "Literal": { + "Start": 828, + "End": 831 + }, + "TextPosition": { "LineStart": 63, "LineEnd": 63, "CharStart": 5, @@ -1134,8 +1443,11 @@ }, { "Keyword": 1, - "Literal": "TypeRef", - "Position": { + "Literal": { + "Start": 831, + "End": 838 + }, + "TextPosition": { "LineStart": 63, "LineEnd": 63, "CharStart": 8, @@ -1145,8 +1457,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 841, + "End": 842 + }, + "TextPosition": { "LineStart": 64, "LineEnd": 64, "CharStart": 3, @@ -1156,8 +1471,11 @@ }, { "Keyword": 1, - "Literal": "defaultValue", - "Position": { + "Literal": { + "Start": 845, + "End": 857 + }, + "TextPosition": { "LineStart": 65, "LineEnd": 65, "CharStart": 3, @@ -1167,8 +1485,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 858, + "End": 859 + }, + "TextPosition": { "LineStart": 66, "LineEnd": 66, "CharStart": 1, @@ -1178,8 +1499,11 @@ }, { "Keyword": 38, - "Literal": "fragment", - "Position": { + "Literal": { + "Start": 861, + "End": 869 + }, + "TextPosition": { "LineStart": 68, "LineEnd": 68, "CharStart": 1, @@ -1189,8 +1513,11 @@ }, { "Keyword": 1, - "Literal": "TypeRef", - "Position": { + "Literal": { + "Start": 870, + "End": 877 + }, + "TextPosition": { "LineStart": 68, "LineEnd": 68, "CharStart": 10, @@ -1200,8 +1527,11 @@ }, { "Keyword": 18, - "Literal": "on", - "Position": { + "Literal": { + "Start": 878, + "End": 880 + }, + "TextPosition": { "LineStart": 68, "LineEnd": 68, "CharStart": 18, @@ -1211,8 +1541,11 @@ }, { "Keyword": 1, - "Literal": "__Type", - "Position": { + "Literal": { + "Start": 881, + "End": 887 + }, + "TextPosition": { "LineStart": 68, "LineEnd": 68, "CharStart": 21, @@ -1222,8 +1555,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 888, + "End": 889 + }, + "TextPosition": { "LineStart": 68, "LineEnd": 68, "CharStart": 28, @@ -1233,8 +1569,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 892, + "End": 896 + }, + "TextPosition": { "LineStart": 69, "LineEnd": 69, "CharStart": 3, @@ -1244,8 +1583,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 899, + "End": 903 + }, + "TextPosition": { "LineStart": 70, "LineEnd": 70, "CharStart": 3, @@ -1255,8 +1597,11 @@ }, { "Keyword": 1, - "Literal": "ofType", - "Position": { + "Literal": { + "Start": 906, + "End": 912 + }, + "TextPosition": { "LineStart": 71, "LineEnd": 71, "CharStart": 3, @@ -1266,8 +1611,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 913, + "End": 914 + }, + "TextPosition": { "LineStart": 71, "LineEnd": 71, "CharStart": 10, @@ -1277,8 +1625,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 919, + "End": 923 + }, + "TextPosition": { "LineStart": 72, "LineEnd": 72, "CharStart": 5, @@ -1288,8 +1639,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 928, + "End": 932 + }, + "TextPosition": { "LineStart": 73, "LineEnd": 73, "CharStart": 5, @@ -1299,8 +1653,11 @@ }, { "Keyword": 1, - "Literal": "ofType", - "Position": { + "Literal": { + "Start": 937, + "End": 943 + }, + "TextPosition": { "LineStart": 74, "LineEnd": 74, "CharStart": 5, @@ -1310,8 +1667,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 944, + "End": 945 + }, + "TextPosition": { "LineStart": 74, "LineEnd": 74, "CharStart": 12, @@ -1321,8 +1681,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 952, + "End": 956 + }, + "TextPosition": { "LineStart": 75, "LineEnd": 75, "CharStart": 7, @@ -1332,8 +1695,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 963, + "End": 967 + }, + "TextPosition": { "LineStart": 76, "LineEnd": 76, "CharStart": 7, @@ -1343,8 +1709,11 @@ }, { "Keyword": 1, - "Literal": "ofType", - "Position": { + "Literal": { + "Start": 974, + "End": 980 + }, + "TextPosition": { "LineStart": 77, "LineEnd": 77, "CharStart": 7, @@ -1354,8 +1723,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 981, + "End": 982 + }, + "TextPosition": { "LineStart": 77, "LineEnd": 77, "CharStart": 14, @@ -1365,8 +1737,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 991, + "End": 995 + }, + "TextPosition": { "LineStart": 78, "LineEnd": 78, "CharStart": 9, @@ -1376,8 +1751,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 1004, + "End": 1008 + }, + "TextPosition": { "LineStart": 79, "LineEnd": 79, "CharStart": 9, @@ -1387,8 +1765,11 @@ }, { "Keyword": 1, - "Literal": "ofType", - "Position": { + "Literal": { + "Start": 1017, + "End": 1023 + }, + "TextPosition": { "LineStart": 80, "LineEnd": 80, "CharStart": 9, @@ -1398,8 +1779,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 1024, + "End": 1025 + }, + "TextPosition": { "LineStart": 80, "LineEnd": 80, "CharStart": 16, @@ -1409,8 +1793,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 1036, + "End": 1040 + }, + "TextPosition": { "LineStart": 81, "LineEnd": 81, "CharStart": 11, @@ -1420,8 +1807,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 1051, + "End": 1055 + }, + "TextPosition": { "LineStart": 82, "LineEnd": 82, "CharStart": 11, @@ -1431,8 +1821,11 @@ }, { "Keyword": 1, - "Literal": "ofType", - "Position": { + "Literal": { + "Start": 1066, + "End": 1072 + }, + "TextPosition": { "LineStart": 83, "LineEnd": 83, "CharStart": 11, @@ -1442,8 +1835,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 1073, + "End": 1074 + }, + "TextPosition": { "LineStart": 83, "LineEnd": 83, "CharStart": 18, @@ -1453,8 +1849,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 1087, + "End": 1091 + }, + "TextPosition": { "LineStart": 84, "LineEnd": 84, "CharStart": 13, @@ -1464,8 +1863,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 1104, + "End": 1108 + }, + "TextPosition": { "LineStart": 85, "LineEnd": 85, "CharStart": 13, @@ -1475,8 +1877,11 @@ }, { "Keyword": 1, - "Literal": "ofType", - "Position": { + "Literal": { + "Start": 1121, + "End": 1127 + }, + "TextPosition": { "LineStart": 86, "LineEnd": 86, "CharStart": 13, @@ -1486,8 +1891,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 1128, + "End": 1129 + }, + "TextPosition": { "LineStart": 86, "LineEnd": 86, "CharStart": 20, @@ -1497,8 +1905,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 1144, + "End": 1148 + }, + "TextPosition": { "LineStart": 87, "LineEnd": 87, "CharStart": 15, @@ -1508,8 +1919,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 1163, + "End": 1167 + }, + "TextPosition": { "LineStart": 88, "LineEnd": 88, "CharStart": 15, @@ -1519,8 +1933,11 @@ }, { "Keyword": 1, - "Literal": "ofType", - "Position": { + "Literal": { + "Start": 1182, + "End": 1188 + }, + "TextPosition": { "LineStart": 89, "LineEnd": 89, "CharStart": 15, @@ -1530,8 +1947,11 @@ }, { "Keyword": 43, - "Literal": "{", - "Position": { + "Literal": { + "Start": 1189, + "End": 1190 + }, + "TextPosition": { "LineStart": 89, "LineEnd": 89, "CharStart": 22, @@ -1541,8 +1961,11 @@ }, { "Keyword": 1, - "Literal": "kind", - "Position": { + "Literal": { + "Start": 1207, + "End": 1211 + }, + "TextPosition": { "LineStart": 90, "LineEnd": 90, "CharStart": 17, @@ -1552,8 +1975,11 @@ }, { "Keyword": 1, - "Literal": "name", - "Position": { + "Literal": { + "Start": 1228, + "End": 1232 + }, + "TextPosition": { "LineStart": 91, "LineEnd": 91, "CharStart": 17, @@ -1563,8 +1989,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1247, + "End": 1248 + }, + "TextPosition": { "LineStart": 92, "LineEnd": 92, "CharStart": 15, @@ -1574,8 +2003,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1261, + "End": 1262 + }, + "TextPosition": { "LineStart": 93, "LineEnd": 93, "CharStart": 13, @@ -1585,8 +2017,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1273, + "End": 1274 + }, + "TextPosition": { "LineStart": 94, "LineEnd": 94, "CharStart": 11, @@ -1596,8 +2031,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1283, + "End": 1284 + }, + "TextPosition": { "LineStart": 95, "LineEnd": 95, "CharStart": 9, @@ -1607,8 +2045,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1291, + "End": 1292 + }, + "TextPosition": { "LineStart": 96, "LineEnd": 96, "CharStart": 7, @@ -1618,8 +2059,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1297, + "End": 1298 + }, + "TextPosition": { "LineStart": 97, "LineEnd": 97, "CharStart": 5, @@ -1629,8 +2073,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1301, + "End": 1302 + }, + "TextPosition": { "LineStart": 98, "LineEnd": 98, "CharStart": 3, @@ -1640,8 +2087,11 @@ }, { "Keyword": 44, - "Literal": "}", - "Position": { + "Literal": { + "Start": 1303, + "End": 1304 + }, + "TextPosition": { "LineStart": 99, "LineEnd": 99, "CharStart": 1, diff --git a/pkg/lexer/lexer.go b/pkg/lexer/lexer.go index f9d17357ab..064b070ea5 100644 --- a/pkg/lexer/lexer.go +++ b/pkg/lexer/lexer.go @@ -1,14 +1,12 @@ package lexer import ( - "bytes" "fmt" + "github.com/jensneuse/graphql-go-tools/pkg/document" "github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword" - "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" "github.com/jensneuse/graphql-go-tools/pkg/lexing/position" "github.com/jensneuse/graphql-go-tools/pkg/lexing/runes" "github.com/jensneuse/graphql-go-tools/pkg/lexing/token" - "unicode" ) // Lexer emits tokens from a input reader @@ -19,61 +17,79 @@ type Lexer struct { beforeLastLineTerminatorTextPosition position.Position } -type parsedRune struct { - r byte - pos position.Position -} - // NewLexer initializes a new lexer func NewLexer() *Lexer { return &Lexer{} } +const ( + uint16Max = 65535 +) + // SetInput sets the new reader as input and resets all position stats -func (l *Lexer) SetInput(input []byte) { +func (l *Lexer) SetInput(input []byte) error { + + if len(input) > uint16Max { + return fmt.Errorf("SetInput: input size must not be > %d, got: %d", uint16Max, len(input)) + } + l.input = input l.inputPosition = 0 l.textPosition.LineStart = 1 l.textPosition.CharStart = 1 + + return nil +} + +func (l *Lexer) ByteSlice(reference document.ByteSliceReference) document.ByteSlice { + return l.input[reference.Start:reference.End] } // Read emits the next token, this cannot be undone func (l *Lexer) Read() (tok token.Token, err error) { - var next parsedRune + var next byte + var inputPositionStart int for { + inputPositionStart = l.inputPosition + tok.SetStart(l.inputPosition, l.textPosition) next = l.readRune() - if !l.byteIsWhitespace(next.r) { + if !l.byteIsWhitespace(next) { break } } - var matched bool - tok, matched = l.matchSingleRuneToken(next) - if matched { - return tok, nil + if l.matchSingleRuneToken(next, &tok) { + return } - switch next.r { + switch next { case runes.QUOTE: - return l.readString(next) + err = l.readString(&tok) + return case runes.DOT: - return l.readSpread(next) + err = l.readSpread(&tok) + return case runes.DOLLAR: - return l.readVariable(next) + err = l.readVariable(&tok) + return } - if runeIsDigit(next.r) { - return l.readDigit(next) + if runeIsDigit(next) { + err = l.readDigit(&tok) + return } - return l.readIdent(next) + err = l.readIdent() + tok.Keyword = l.keywordFromIdent(inputPositionStart, l.inputPosition) + tok.SetEnd(l.inputPosition, l.textPosition) + return } func (l *Lexer) swallowWhitespace() (err error) { - var next rune + var next byte for { next = l.peekRune() @@ -82,7 +98,7 @@ func (l *Lexer) swallowWhitespace() (err error) { return nil } - if !l.runeIsWhitespace(next) { + if !l.byteIsWhitespace(next) { return nil } @@ -108,7 +124,7 @@ func (l *Lexer) Peek(ignoreWhitespace bool) (key keyword.Keyword, err error) { return l.keywordFromRune(next) } -func (l *Lexer) keywordFromRune(r rune) (key keyword.Keyword, err error) { +func (l *Lexer) keywordFromRune(r byte) (key keyword.Keyword, err error) { switch r { case runes.EOF: @@ -150,13 +166,13 @@ func (l *Lexer) keywordFromRune(r rune) (key keyword.Keyword, err error) { case runes.AND: return keyword.AND, nil case runes.DOT: - if l.peekEquals([]byte("...")) { + if l.peekEquals(runes.DOT, runes.DOT, runes.DOT) { return keyword.SPREAD, nil } return key, fmt.Errorf("keywordFromRune: must be '...'") } - if unicode.IsDigit(r) { + if runeIsDigit(r) { if l.peekIsFloat() { return keyword.FLOAT, nil } @@ -191,76 +207,57 @@ func (l *Lexer) peekIsFloat() (isFloat bool) { return hasDot } -func (l *Lexer) matchSingleRuneToken(r parsedRune) (tok token.Token, matched bool) { +func (l *Lexer) matchSingleRuneToken(r byte, tok *token.Token) bool { - matched = true - - switch r.r { + switch r { case runes.EOF: - tok = token.EOF + tok.Keyword = keyword.EOF case runes.PIPE: - tok = token.Pipe + tok.Keyword = keyword.PIPE case runes.EQUALS: - tok = token.Equals + tok.Keyword = keyword.EQUALS case runes.AT: - tok = token.At + tok.Keyword = keyword.AT case runes.COLON: - tok = token.Colon + tok.Keyword = keyword.COLON case runes.BANG: - tok = token.Bang + tok.Keyword = keyword.BANG case runes.BRACKETOPEN: - tok = token.BracketOpen + tok.Keyword = keyword.BRACKETOPEN case runes.BRACKETCLOSE: - tok = token.BracketClose + tok.Keyword = keyword.BRACKETCLOSE case runes.CURLYBRACKETOPEN: - tok = token.CurlyBracketOpen + tok.Keyword = keyword.CURLYBRACKETOPEN case runes.CURLYBRACKETCLOSE: - tok = token.CurlyBracketClose + tok.Keyword = keyword.CURLYBRACKETCLOSE case runes.SQUAREBRACKETOPEN: - tok = token.SquaredBracketOpen + tok.Keyword = keyword.SQUAREBRACKETOPEN case runes.SQUAREBRACKETCLOSE: - tok = token.SquaredBracketClose + tok.Keyword = keyword.SQUAREBRACKETCLOSE case runes.AND: - tok = token.And + tok.Keyword = keyword.AND default: - matched = false + return false } - tok.Position = r.pos + tok.SetEnd(l.inputPosition, l.textPosition) - return + return true } -func (l *Lexer) readIdent(startRune parsedRune) (tok token.Token, err error) { +func (l *Lexer) readIdent() error { - tok.Position = startRune.pos - start := l.inputPosition - 1 - var lastValidRune parsedRune - var r parsedRune + var r byte for { r = l.readRune() - if !runeIsIdent(r.r) { - break - } - - lastValidRune = r - } - - if r.r != runes.EOF && l.inputPosition > start+1 { - err = l.unreadRune() - if err != nil { - return tok, err + if !runeIsIdent(r) { + if r != runes.EOF { + return l.unreadRune() + } + return nil } } - - end := l.inputPosition - - tok.Literal = l.input[start:end] - tok.Keyword = l.keywordFromIdentString(tok.Literal) - tok.Position.SetEnd(lastValidRune.pos) - - return } const identWantRunes = 13 @@ -274,113 +271,144 @@ func (l *Lexer) peekIdent() (k keyword.Keyword) { end = len(l.input) } - peeked := l.input[start:end] - - for i, r := range peeked { - if !runeIsIdent(r) { - peeked = peeked[:i] + for i := start; i < end; { + if !runeIsIdent(l.input[i]) { + end = i break } + + i++ } - return l.keywordFromIdentString(peeked) + return l.keywordFromIdent(start, end) } -func (l *Lexer) keywordFromIdentString(ident []byte) (k keyword.Keyword) { - switch string(ident) { - case "on": - return keyword.ON - case "true": - return keyword.TRUE - case "type": - return keyword.TYPE - case "null": - return keyword.NULL - case "enum": - return keyword.ENUM - case "false": - return keyword.FALSE - case "union": - return keyword.UNION - case "query": - return keyword.QUERY - case "input": - return keyword.INPUT - case "schema": - return keyword.SCHEMA - case "scalar": - return keyword.SCALAR - case "mutation": - return keyword.MUTATION - case "fragment": - return keyword.FRAGMENT - case "interface": - return keyword.INTERFACE - case "directive": - return keyword.DIRECTIVE - case "implements": - return keyword.IMPLEMENTS - case "subscription": - return keyword.SUBSCRIPTION - default: - return keyword.IDENT +func (l *Lexer) keywordFromIdent(start, end int) (k keyword.Keyword) { + + switch end - start { + case 2: + if l.input[start] == 'o' && l.input[start+1] == 'n' { + return keyword.ON + } + case 4: + if l.input[start] == 'n' && l.input[start+1] == 'u' && l.input[start+2] == 'l' && l.input[start+3] == 'l' { + return keyword.NULL + } + if l.input[start] == 'e' && l.input[start+1] == 'n' && l.input[start+2] == 'u' && l.input[start+3] == 'm' { + return keyword.ENUM + } + if l.input[start] == 't' { + if l.input[start+1] == 'r' && l.input[start+2] == 'u' && l.input[start+3] == 'e' { + return keyword.TRUE + } + if l.input[start+1] == 'y' && l.input[start+2] == 'p' && l.input[start+3] == 'e' { + return keyword.TYPE + } + } + case 5: + if l.input[start] == 'f' && l.input[start+1] == 'a' && l.input[start+2] == 'l' && l.input[start+3] == 's' && l.input[start+4] == 'e' { + return keyword.FALSE + } + if l.input[start] == 'u' && l.input[start+1] == 'n' && l.input[start+2] == 'i' && l.input[start+3] == 'o' && l.input[start+4] == 'n' { + return keyword.UNION + } + if l.input[start] == 'q' && l.input[start+1] == 'u' && l.input[start+2] == 'e' && l.input[start+3] == 'r' && l.input[start+4] == 'y' { + return keyword.QUERY + } + if l.input[start] == 'i' && l.input[start+1] == 'n' && l.input[start+2] == 'p' && l.input[start+3] == 'u' && l.input[start+4] == 't' { + return keyword.INPUT + } + case 6: + if l.input[start] == 's' { + if l.input[start+1] == 'c' && l.input[start+2] == 'h' && l.input[start+3] == 'e' && l.input[start+4] == 'm' && l.input[start+5] == 'a' { + return keyword.SCHEMA + } + if l.input[start+1] == 'c' && l.input[start+2] == 'a' && l.input[start+3] == 'l' && l.input[start+4] == 'a' && l.input[start+5] == 'r' { + return keyword.SCALAR + } + } + case 8: + if l.input[start] == 'm' && l.input[start+1] == 'u' && l.input[start+2] == 't' && l.input[start+3] == 'a' && l.input[start+4] == 't' && l.input[start+5] == 'i' && l.input[start+6] == 'o' && l.input[start+7] == 'n' { + return keyword.MUTATION + } + if l.input[start] == 'f' && l.input[start+1] == 'r' && l.input[start+2] == 'a' && l.input[start+3] == 'g' && l.input[start+4] == 'm' && l.input[start+5] == 'e' && l.input[start+6] == 'n' && l.input[start+7] == 't' { + return keyword.FRAGMENT + } + case 9: + if l.input[start] == 'i' && l.input[start+1] == 'n' && l.input[start+2] == 't' && l.input[start+3] == 'e' && l.input[start+4] == 'r' && l.input[start+5] == 'f' && l.input[start+6] == 'a' && l.input[start+7] == 'c' && l.input[start+8] == 'e' { + return keyword.INTERFACE + } + if l.input[start] == 'd' && l.input[start+1] == 'i' && l.input[start+2] == 'r' && l.input[start+3] == 'e' && l.input[start+4] == 'c' && l.input[start+5] == 't' && l.input[start+6] == 'i' && l.input[start+7] == 'v' && l.input[start+8] == 'e' { + return keyword.DIRECTIVE + } + case 10: + if l.input[start] == 'i' && l.input[start+1] == 'm' && l.input[start+2] == 'p' && l.input[start+3] == 'l' && l.input[start+4] == 'e' && l.input[start+5] == 'm' && l.input[start+6] == 'e' && l.input[start+7] == 'n' && l.input[start+8] == 't' && l.input[start+9] == 's' { + return keyword.IMPLEMENTS + } + case 12: + if l.input[start] == 's' && l.input[start+1] == 'u' && l.input[start+2] == 'b' && l.input[start+3] == 's' && l.input[start+4] == 'c' && l.input[start+5] == 'r' && l.input[start+6] == 'i' && l.input[start+7] == 'p' && l.input[start+8] == 't' && l.input[start+9] == 'i' && l.input[start+10] == 'o' && l.input[start+11] == 'n' { + return keyword.SUBSCRIPTION + } } + + return keyword.IDENT } -func (l *Lexer) readVariable(startRune parsedRune) (tok token.Token, err error) { +func (l *Lexer) readVariable(tok *token.Token) error { + + tok.SetStart(l.inputPosition, l.textPosition) - tok.Position = startRune.pos tok.Keyword = keyword.VARIABLE peeked, err := l.Peek(false) if err != nil { - return tok, err + return err } if peeked == keyword.SPACE || peeked == keyword.TAB || peeked == keyword.COMMA || peeked == keyword.LINETERMINATOR { - return tok, fmt.Errorf("readVariable: must not have whitespace after $") + return fmt.Errorf("readVariable: must not have whitespace after $") } - ident, err := l.readIdent(startRune) + err = l.readIdent() if err != nil { - return tok, err + return err } - tok.Literal = ident.Literal[1:] - tok.Position.SetEnd(ident.Position) - return + tok.SetEnd(l.inputPosition, l.textPosition) + tok.TextPosition.CharStart -= 1 + return nil } -func (l *Lexer) readSpread(startRune parsedRune) (tok token.Token, err error) { +func (l *Lexer) readSpread(tok *token.Token) error { - isSpread := l.peekEquals([]byte("..")) + isSpread := l.peekEquals(runes.DOT, runes.DOT) if !isSpread { - tok.Position = startRune.pos - return tok, fmt.Errorf("readSpread: invalid '.' at position %s", startRune.pos.String()) + return fmt.Errorf("readSpread: invalid '.' at position %s", l.textPosition) } l.swallowAmount(2) - tok = token.Spread - tok.Position = startRune.pos - tok.Position.CharEnd = tok.Position.CharStart + 3 - return + tok.Keyword = keyword.SPREAD + tok.SetEnd(l.inputPosition, l.textPosition) + return nil } -func (l *Lexer) readString(startRune parsedRune) (tok token.Token, err error) { +func (l *Lexer) readString(tok *token.Token) error { + + tok.Keyword = keyword.STRING - isMultiLineString := l.peekEquals([]byte("\"\"")) + isMultiLineString := l.peekEquals(runes.QUOTE, runes.QUOTE) if isMultiLineString { l.swallowAmount(2) - return l.readMultiLineString(startRune) + return l.readMultiLineString(tok) } - return l.readSingleLineString(startRune) + return l.readSingleLineString(tok) } func (l *Lexer) swallowAmount(amount int) { @@ -389,7 +417,7 @@ func (l *Lexer) swallowAmount(amount int) { } } -func (l *Lexer) peekEquals(equals []byte) bool { +func (l *Lexer) peekEquals(equals ...byte) bool { start := l.inputPosition end := l.inputPosition + len(equals) @@ -398,58 +426,52 @@ func (l *Lexer) peekEquals(equals []byte) bool { return false } - return bytes.Equal(l.input[start:end], equals) -} - -func (l *Lexer) readDigit(startRune parsedRune) (tok token.Token, err error) { + for i := 0; i < len(equals); i++ { + if l.input[start+i] != equals[i] { + return false + } + } - tok.Position = startRune.pos + return true +} - start := l.inputPosition - 1 +func (l *Lexer) readDigit(tok *token.Token) error { - var lastValidRune parsedRune - var r parsedRune + var r byte for { r = l.readRune() - if !runeIsDigit(r.r) { + if !runeIsDigit(r) { break } - lastValidRune = r } - isFloat := r.r == runes.DOT + isFloat := r == runes.DOT if isFloat { l.swallowAmount(1) - return l.readFloat(startRune.pos, start) + return l.readFloat(tok) } - if r.r != runes.EOF { - err = l.unreadRune() + if r != runes.EOF { + err := l.unreadRune() if err != nil { - return tok, err + return err } } - end := l.inputPosition - tok.Keyword = keyword.INTEGER - tok.Literal = l.input[start:end] - tok.Position.SetEnd(lastValidRune.pos) - - return + tok.SetEnd(l.inputPosition, l.textPosition) + return nil } -func (l *Lexer) readFloat(position position.Position, start int) (tok token.Token, err error) { - - tok.Position = position +func (l *Lexer) readFloat(tok *token.Token) error { var valid bool - var r parsedRune + var r byte for { r = l.readRune() - if !runeIsDigit(r.r) { + if !runeIsDigit(r) { break } else if !valid { valid = true @@ -457,39 +479,27 @@ func (l *Lexer) readFloat(position position.Position, start int) (tok token.Toke } if !valid { - return tok, fmt.Errorf("readFloat: incomplete float, must have digits after dot") + return fmt.Errorf("readFloat: incomplete float, must have digits after dot") } - if r.r != runes.EOF { - err = l.unreadRune() + if r != runes.EOF { + err := l.unreadRune() if err != nil { - return tok, err + return err } } - end := l.inputPosition - tok.Keyword = keyword.FLOAT - tok.Literal = l.input[start:end] - - return -} - -func (l *Lexer) trimStartEnd(input, trim []byte) []byte { - return bytes.TrimSuffix(bytes.TrimPrefix(input, trim), trim) + tok.SetEnd(l.inputPosition, l.textPosition) + return nil } -func (l *Lexer) readRune() (r parsedRune) { - - r.pos.LineStart = l.textPosition.LineStart - r.pos.CharStart = l.textPosition.CharStart - r.pos.LineEnd = l.textPosition.LineStart - r.pos.CharEnd = l.textPosition.CharStart + 1 +func (l *Lexer) readRune() (r byte) { if l.inputPosition < len(l.input) { - r.r = l.input[l.inputPosition] + r = l.input[l.inputPosition] - if r.r == runes.LINETERMINATOR { + if r == runes.LINETERMINATOR { l.beforeLastLineTerminatorTextPosition = l.textPosition l.textPosition.LineStart++ l.textPosition.CharStart = 1 @@ -499,7 +509,7 @@ func (l *Lexer) readRune() (r parsedRune) { l.inputPosition++ } else { - r.r = runes.EOF + r = runes.EOF } return @@ -523,10 +533,10 @@ func (l *Lexer) unreadRune() error { return nil } -func (l *Lexer) peekRune() (r rune) { +func (l *Lexer) peekRune() (r byte) { if l.inputPosition < len(l.input) { - return rune(l.input[l.inputPosition]) + return l.input[l.inputPosition] } return runes.EOF @@ -559,15 +569,6 @@ func runeIsDigit(r byte) bool { } } -func (l *Lexer) runeIsWhitespace(r rune) bool { - switch r { - case runes.SPACE, runes.TAB, runes.LINETERMINATOR, runes.COMMA: - return true - default: - return false - } -} - func (l *Lexer) byteIsWhitespace(r byte) bool { switch r { case runes.SPACE, runes.TAB, runes.LINETERMINATOR, runes.COMMA: @@ -577,82 +578,80 @@ func (l *Lexer) byteIsWhitespace(r byte) bool { } } -func (l *Lexer) readMultiLineString(startRune parsedRune) (tok token.Token, err error) { +func (l *Lexer) readMultiLineString(tok *token.Token) error { - tok.Keyword = keyword.STRING - tok.Position = startRune.pos - - start := l.inputPosition + tok.SetStart(l.inputPosition, l.textPosition) var escaped bool for { - nextRune := l.readRune() + nextRune := l.peekRune() - switch nextRune.r { + switch nextRune { case runes.QUOTE: if escaped { escaped = false + l.readRune() } else { - isMultiLineStringEnd := l.peekEquals([]byte("\"\"")) + isMultiLineStringEnd := l.peekEquals(runes.QUOTE, runes.QUOTE, runes.QUOTE) if !isMultiLineStringEnd { escaped = false } else { - - end := l.inputPosition - 1 - l.swallowAmount(2) - - tok.Literal = l.trimStartEnd(l.input[start:end], literal.LINETERMINATOR) - tok.Position.SetEnd(nextRune.pos) - tok.Position.CharEnd += 2 - return tok, nil + tok.SetEnd(l.inputPosition, l.textPosition) + tok.TextPosition.CharStart -= 3 + tok.TextPosition.CharEnd += 3 + l.swallowAmount(3) + return nil } } case runes.BACKSLASH: + l.readRune() if escaped { escaped = false } else { escaped = true } default: + l.readRune() escaped = false } } } -func (l *Lexer) readSingleLineString(startRune parsedRune) (tok token.Token, err error) { +func (l *Lexer) readSingleLineString(tok *token.Token) error { - tok.Keyword = keyword.STRING - tok.Position = startRune.pos - - start := l.inputPosition + tok.SetStart(l.inputPosition, l.textPosition) var escaped bool for { - nextRune := l.readRune() + nextRune := l.peekRune() - switch nextRune.r { + switch nextRune { case runes.QUOTE: if escaped { escaped = false + l.readRune() } else { - end := l.inputPosition - 1 - tok.Literal = l.input[start:end] - tok.Position.SetEnd(nextRune.pos) - return tok, nil + tok.SetEnd(l.inputPosition, l.textPosition) + tok.TextPosition.CharStart -= 1 + tok.TextPosition.CharEnd += 1 + l.swallowAmount(1) + return nil } case runes.BACKSLASH: + l.readRune() if escaped { escaped = false } else { escaped = true } default: + l.readRune() escaped = false } } diff --git a/pkg/lexer/lexer_test.go b/pkg/lexer/lexer_test.go index 1b3f685339..99ba0aca96 100644 --- a/pkg/lexer/lexer_test.go +++ b/pkg/lexer/lexer_test.go @@ -17,7 +17,9 @@ func TestLexer_Peek_Read(t *testing.T) { run := func(input string, checks ...checkFunc) { lex := NewLexer() - lex.SetInput([]byte(input)) + if err := lex.SetInput([]byte(input)); err != nil { + panic(err) + } for i := range checks { checks[i](lex, i+1) } @@ -35,7 +37,7 @@ func TestLexer_Peek_Read(t *testing.T) { } } - mustRead := func(k keyword.Keyword, literal string) checkFunc { + mustRead := func(k keyword.Keyword, wantLiteral string) checkFunc { return func(lex *Lexer, i int) { tok, err := lex.Read() if err != nil { @@ -44,8 +46,9 @@ func TestLexer_Peek_Read(t *testing.T) { if k != tok.Keyword { panic(fmt.Errorf("mustRead: want(keyword): %s, got: %s [check: %d]", k.String(), tok.String(), i)) } - if literal != string(tok.Literal) { - panic(fmt.Errorf("mustRead: want(literal): %s, got: %s [check: %d]", literal, tok.Literal, i)) + gotLiteral := string(lex.ByteSlice(tok.Literal)) + if wantLiteral != gotLiteral { + panic(fmt.Errorf("mustRead: want(literal): %s, got: %s [check: %d]", wantLiteral, gotLiteral, i)) } } } @@ -68,7 +71,9 @@ func TestLexer_Peek_Read(t *testing.T) { resetInput := func(input string) checkFunc { return func(lex *Lexer, i int) { - lex.SetInput([]byte(input)) + if err := lex.SetInput([]byte(input)); err != nil { + panic(err) + } } } @@ -79,17 +84,17 @@ func TestLexer_Peek_Read(t *testing.T) { panic(err) } - if lineStart != tok.Position.LineStart { - panic(fmt.Errorf("mustReadPosition: want(lineStart): %d, got: %d [check: %d]", lineStart, tok.Position.LineStart, i)) + if lineStart != tok.TextPosition.LineStart { + panic(fmt.Errorf("mustReadPosition: want(lineStart): %d, got: %d [check: %d]", lineStart, tok.TextPosition.LineStart, i)) } - if charStart != tok.Position.CharStart { - panic(fmt.Errorf("mustReadPosition: want(charStart): %d, got: %d [check: %d]", charStart, tok.Position.CharStart, i)) + if charStart != tok.TextPosition.CharStart { + panic(fmt.Errorf("mustReadPosition: want(charStart): %d, got: %d [check: %d]", charStart, tok.TextPosition.CharStart, i)) } - if lineEnd != tok.Position.LineEnd { - panic(fmt.Errorf("mustReadPosition: want(lineEnd): %d, got: %d [check: %d]", lineEnd, tok.Position.LineEnd, i)) + if lineEnd != tok.TextPosition.LineEnd { + panic(fmt.Errorf("mustReadPosition: want(lineEnd): %d, got: %d [check: %d]", lineEnd, tok.TextPosition.LineEnd, i)) } - if charEnd != tok.Position.CharEnd { - panic(fmt.Errorf("mustReadPosition: want(charEnd): %d, got: %d [check: %d]", charEnd, tok.Position.CharEnd, i)) + if charEnd != tok.TextPosition.CharEnd { + panic(fmt.Errorf("mustReadPosition: want(charEnd): %d, got: %d [check: %d]", charEnd, tok.TextPosition.CharEnd, i)) } } } @@ -104,8 +109,8 @@ func TestLexer_Peek_Read(t *testing.T) { t.Run("read eof multiple times", func(t *testing.T) { run("x", mustRead(keyword.IDENT, "x"), - mustRead(keyword.EOF, "eof"), - mustRead(keyword.EOF, "eof"), + mustRead(keyword.EOF, ""), + mustRead(keyword.EOF, ""), ) }) t.Run("read integer", func(t *testing.T) { @@ -142,7 +147,7 @@ func TestLexer_Peek_Read(t *testing.T) { run("\"\"\"foo \\\" bar\"\"\"", mustPeekAndRead(keyword.STRING, "foo \\\" bar")) }) t.Run("read multi line string", func(t *testing.T) { - run("\"\"\"\nfoo\nbar\"\"\"", mustPeekAndRead(keyword.STRING, "foo\nbar")) + run("\"\"\"\nfoo\nbar\"\"\"", mustPeekAndRead(keyword.STRING, "\nfoo\nbar")) }) t.Run("read pipe", func(t *testing.T) { run("|", mustPeekAndRead(keyword.PIPE, "|")) @@ -153,6 +158,9 @@ func TestLexer_Peek_Read(t *testing.T) { t.Run("read fragment spread", func(t *testing.T) { run("...", mustPeekAndRead(keyword.SPREAD, "...")) }) + t.Run("must not read invalid fragment spread", func(t *testing.T) { + run("..", mustErrRead()) + }) t.Run("read variable", func(t *testing.T) { run("$123", mustPeekAndRead(keyword.VARIABLE, "123")) }) @@ -205,7 +213,7 @@ func TestLexer_Peek_Read(t *testing.T) { run("&", mustPeekAndRead(keyword.AND, "&")) }) t.Run("read EOF", func(t *testing.T) { - run("", mustPeekAndRead(keyword.EOF, "eof")) + run("", mustPeekAndRead(keyword.EOF, "")) }) t.Run("read ident", func(t *testing.T) { run("foo", mustPeekAndRead(keyword.IDENT, "foo")) @@ -271,16 +279,16 @@ func TestLexer_Peek_Read(t *testing.T) { run("on ", mustPeekAndRead(keyword.ON, "on")) }) t.Run("read ignore comma", func(t *testing.T) { - run(",", mustPeekAndRead(keyword.EOF, "eof")) + run(",", mustPeekAndRead(keyword.EOF, "")) }) t.Run("read ignore space", func(t *testing.T) { - run(" ", mustPeekAndRead(keyword.EOF, "eof")) + run(" ", mustPeekAndRead(keyword.EOF, "")) }) t.Run("read ignore tab", func(t *testing.T) { - run(" ", mustPeekAndRead(keyword.EOF, "eof")) + run(" ", mustPeekAndRead(keyword.EOF, "")) }) t.Run("read ignore lineTerminator", func(t *testing.T) { - run("\n", mustPeekAndRead(keyword.EOF, "eof")) + run("\n", mustPeekAndRead(keyword.EOF, "")) }) t.Run("read null", func(t *testing.T) { run("null", mustPeekAndRead(keyword.NULL, "null")) @@ -346,7 +354,7 @@ baz mustPeekAndRead(keyword.INTEGER, "1337"), mustPeekAndRead(keyword.INTEGER, "1338"), mustPeekAndRead(keyword.INTEGER, "1339"), mustPeekAndRead(keyword.STRING, "foo"), mustPeekAndRead(keyword.STRING, "bar"), mustPeekAndRead(keyword.STRING, "foo bar"), mustPeekAndRead(keyword.STRING, "foo\nbar"), - mustPeekAndRead(keyword.STRING, "foo\nbar\nbaz"), + mustPeekAndRead(keyword.STRING, "foo\nbar\nbaz\n"), mustPeekAndRead(keyword.FLOAT, "13.37"), ) }) @@ -455,7 +463,9 @@ fragment TypeRef on __Type { func TestLexerRegressions(t *testing.T) { lexer := NewLexer() - lexer.SetInput([]byte(introspectionQuery)) + if err := lexer.SetInput([]byte(introspectionQuery)); err != nil { + t.Fatal(err) + } var total []token.Token for { @@ -497,7 +507,9 @@ func BenchmarkLexer(b *testing.B) { for i := 0; i < b.N; i++ { - lexer.SetInput(inputBytes) + if err := lexer.SetInput(inputBytes); err != nil { + b.Fatal(err) + } var key keyword.Keyword var err error From 86e348bbbd18180e8fb0e13bdd20fc5fe23d1ee0 Mon Sep 17 00:00:00 2001 From: jnsone11 Date: Thu, 17 Jan 2019 22:17:18 +0100 Subject: [PATCH 2/7] add ByteSliceReference --- pkg/document/byteslice.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkg/document/byteslice.go b/pkg/document/byteslice.go index c2486a79f6..55d5d502bc 100644 --- a/pkg/document/byteslice.go +++ b/pkg/document/byteslice.go @@ -7,3 +7,8 @@ type ByteSlice []byte func (b ByteSlice) MarshalJSON() ([]byte, error) { return append(append(literal.QUOTE, b...), literal.QUOTE...), nil } + +type ByteSliceReference struct { + Start uint16 + End uint16 +} From f02cdf8235548b30788d0f50886de95f71ae138e Mon Sep 17 00:00:00 2001 From: jnsone11 Date: Thu, 17 Jan 2019 22:18:22 +0100 Subject: [PATCH 3/7] remove SetPosition from Position, this method moved to token.Token --- pkg/lexing/position/position.go.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/pkg/lexing/position/position.go.go b/pkg/lexing/position/position.go.go index f272f6b513..16b138a2ed 100644 --- a/pkg/lexing/position/position.go.go +++ b/pkg/lexing/position/position.go.go @@ -12,8 +12,3 @@ type Position struct { func (p Position) String() string { return fmt.Sprintf("%d:%d-%d:%d", p.LineStart, p.CharStart, p.LineEnd, p.CharEnd) } - -func (p *Position) SetEnd(position Position) { - p.LineEnd = position.LineEnd - p.CharEnd = position.CharEnd -} From 006af33c59ab486e8a686a009565930354013d97 Mon Sep 17 00:00:00 2001 From: jnsone11 Date: Thu, 17 Jan 2019 22:19:32 +0100 Subject: [PATCH 4/7] add SetStart and SetEnd methods to token.Token --- pkg/lexing/token/token.go | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/pkg/lexing/token/token.go b/pkg/lexing/token/token.go index e954781695..820dacdd6a 100644 --- a/pkg/lexing/token/token.go +++ b/pkg/lexing/token/token.go @@ -4,22 +4,33 @@ import ( "fmt" "github.com/jensneuse/graphql-go-tools/pkg/document" "github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword" - "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" "github.com/jensneuse/graphql-go-tools/pkg/lexing/position" ) type Token struct { - Keyword keyword.Keyword - Literal document.ByteSlice - Position position.Position - Description string + Keyword keyword.Keyword + Literal document.ByteSliceReference + TextPosition position.Position + Description string } func (t Token) String() string { - return fmt.Sprintf("Token:: Keyword: %s, Literal: %s Pos: %s", t.Keyword, t.Literal, t.Position) + return fmt.Sprintf("Token:: Keyword: %s, Pos: %s", t.Keyword, t.TextPosition) } -var ( +func (t *Token) SetStart(inputPosition int, textPosition position.Position) { + t.Literal.Start = uint16(inputPosition) + t.TextPosition.LineStart = textPosition.LineStart + t.TextPosition.CharStart = textPosition.CharStart +} + +func (t *Token) SetEnd(inputPosition int, textPosition position.Position) { + t.Literal.End = uint16(inputPosition) + t.TextPosition.LineEnd = textPosition.LineStart + t.TextPosition.CharEnd = textPosition.CharStart +} + +/*var ( EOF = Token{ Keyword: keyword.EOF, Literal: literal.EOF, @@ -81,3 +92,4 @@ var ( Literal: literal.AND, } ) +*/ From f99d9e69bbcdfa7a8adee1fdc697aab054f24d3c Mon Sep 17 00:00:00 2001 From: jnsone11 Date: Thu, 17 Jan 2019 22:20:27 +0100 Subject: [PATCH 5/7] update document structs to reflect the switch from ByteSlice to ByteSliceReference --- pkg/document/arguments.go | 14 +++---- pkg/document/argumentsdefinition.go | 14 ------- pkg/document/directivedefinition.go | 31 ++++---------- pkg/document/directives.go | 14 +++---- pkg/document/enumtypedefinition.go | 40 ++++-------------- pkg/document/enumvaluedefinition.go | 29 ++++--------- pkg/document/field.go | 18 ++++---- pkg/document/fielddefinition.go | 41 ++++--------------- pkg/document/fragmentdefinition.go | 27 ++++-------- pkg/document/fragmentspread.go | 14 +++---- pkg/document/implementsinterfaces.go | 2 +- pkg/document/inlinefragment.go | 10 ++--- pkg/document/inputfieldsdefinition.go | 13 ------ pkg/document/inputobjecttypedefinition.go | 43 ++++--------------- pkg/document/inputvaluedefinition.go | 18 ++++---- pkg/document/interfacetypedefinition.go | 31 ++++---------- pkg/document/node.go | 10 ++--- pkg/document/objectfield.go | 14 +++---- pkg/document/objecttypedefinition.go | 42 ++++--------------- pkg/document/operationdefinition.go | 14 +++---- pkg/document/scalartypedefinition.go | 18 ++++---- pkg/document/selectionset.go | 10 ++--- pkg/document/type.go | 14 +++---- pkg/document/typedefinition.go | 4 +- pkg/document/typesystemdefinition.go | 10 ++--- pkg/document/uniontypedefinition.go | 50 +++++------------------ pkg/document/value.go | 10 ++--- pkg/document/variabledefinitions.go | 14 +++---- 28 files changed, 184 insertions(+), 385 deletions(-) diff --git a/pkg/document/arguments.go b/pkg/document/arguments.go index 53d31e78bf..66e153a515 100644 --- a/pkg/document/arguments.go +++ b/pkg/document/arguments.go @@ -3,7 +3,7 @@ package document // Argument as specified in // http://facebook.github.io/graphql/draft/#Argument type Argument struct { - Name ByteSlice + Name ByteSliceReference Value int } @@ -15,7 +15,7 @@ func (a Argument) NodeValueReference() int { panic("implement me") } -func (a Argument) NodeUnionMemberTypes() []ByteSlice { +func (a Argument) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -51,7 +51,7 @@ func (a Argument) NodeDirectiveDefinitions() []int { panic("implement me") } -func (a Argument) NodeImplementsInterfaces() []ByteSlice { +func (a Argument) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -71,15 +71,15 @@ func (a Argument) NodeArgumentsDefinition() []int { panic("implement me") } -func (a Argument) NodeName() string { - return string(a.Name) +func (a Argument) NodeName() ByteSliceReference { + return a.Name } -func (a Argument) NodeAlias() string { +func (a Argument) NodeAlias() ByteSliceReference { panic("implement me") } -func (a Argument) NodeDescription() string { +func (a Argument) NodeDescription() ByteSliceReference { panic("implement me") } diff --git a/pkg/document/argumentsdefinition.go b/pkg/document/argumentsdefinition.go index 26b576d73b..249e2f06b2 100644 --- a/pkg/document/argumentsdefinition.go +++ b/pkg/document/argumentsdefinition.go @@ -1,19 +1,5 @@ package document -import "bytes" - // ArgumentsDefinition as specified in: // http://facebook.github.io/graphql/draft/#ArgumentsDefinition type ArgumentsDefinition []InputValueDefinition - -// GetByName returns InputValueDefinition by $name or nil if not found -func (a ArgumentsDefinition) GetByName(name ByteSlice) *InputValueDefinition { - - for _, definition := range a { - if bytes.Equal(definition.Name, name) { - return &definition - } - } - - return nil -} diff --git a/pkg/document/directivedefinition.go b/pkg/document/directivedefinition.go index 48fbcd907d..17b41fc564 100644 --- a/pkg/document/directivedefinition.go +++ b/pkg/document/directivedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // DirectiveDefinition as specified in // http://facebook.github.io/graphql/draft/#DirectiveDefinition type DirectiveDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference ArgumentsDefinition []int DirectiveLocations DirectiveLocations } @@ -19,16 +17,16 @@ func (d DirectiveDefinition) NodeValueReference() int { panic("implement me") } -func (d DirectiveDefinition) NodeName() string { - return string(d.Name) +func (d DirectiveDefinition) NodeName() ByteSliceReference { + return d.Name } -func (d DirectiveDefinition) NodeAlias() string { +func (d DirectiveDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (d DirectiveDefinition) NodeDescription() string { - return string(d.Description) +func (d DirectiveDefinition) NodeDescription() ByteSliceReference { + return d.Description } func (d DirectiveDefinition) NodeArguments() []int { @@ -83,7 +81,7 @@ func (d DirectiveDefinition) NodeDefaultValue() int { panic("implement me") } -func (d DirectiveDefinition) NodeImplementsInterfaces() []ByteSlice { +func (d DirectiveDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -119,7 +117,7 @@ func (d DirectiveDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (d DirectiveDefinition) NodeUnionMemberTypes() []ByteSlice { +func (d DirectiveDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -136,14 +134,3 @@ func (d DirectiveDefinition) ContainsLocation(location DirectiveLocation) bool { // DirectiveDefinitions is the plural of DirectiveDefinition type DirectiveDefinitions []DirectiveDefinition - -// GetByName returns the DirectiveDefinition via $name -func (d DirectiveDefinitions) GetByName(name ByteSlice) *DirectiveDefinition { - for _, directive := range d { - if bytes.Equal(directive.Name, name) { - return &directive - } - } - - return nil -} diff --git a/pkg/document/directives.go b/pkg/document/directives.go index af844970e8..414c1df65e 100644 --- a/pkg/document/directives.go +++ b/pkg/document/directives.go @@ -3,7 +3,7 @@ package document // Directive as specified in: // http://facebook.github.io/graphql/draft/#Directive type Directive struct { - Name ByteSlice + Name ByteSliceReference Arguments []int } @@ -15,7 +15,7 @@ func (d Directive) NodeValueReference() int { panic("implement me") } -func (d Directive) NodeUnionMemberTypes() []ByteSlice { +func (d Directive) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -51,7 +51,7 @@ func (d Directive) NodeDirectiveDefinitions() []int { panic("implement me") } -func (d Directive) NodeImplementsInterfaces() []ByteSlice { +func (d Directive) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -71,7 +71,7 @@ func (d Directive) NodeArgumentsDefinition() []int { panic("implement me") } -func (d Directive) NodeAlias() string { +func (d Directive) NodeAlias() ByteSliceReference { panic("implement me") } @@ -99,11 +99,11 @@ func (d Directive) NodeInlineFragments() []int { panic("implement me") } -func (d Directive) NodeName() string { - return string(d.Name) +func (d Directive) NodeName() ByteSliceReference { + return d.Name } -func (d Directive) NodeDescription() string { +func (d Directive) NodeDescription() ByteSliceReference { panic("implement me") } diff --git a/pkg/document/enumtypedefinition.go b/pkg/document/enumtypedefinition.go index 752d279ebd..6d0b008607 100644 --- a/pkg/document/enumtypedefinition.go +++ b/pkg/document/enumtypedefinition.go @@ -1,14 +1,10 @@ package document -import ( - "bytes" -) - // EnumTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#EnumTypeDefinition type EnumTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference EnumValuesDefinition []int Directives []int } @@ -21,7 +17,7 @@ func (e EnumTypeDefinition) NodeValueReference() int { panic("implement me") } -func (e EnumTypeDefinition) NodeUnionMemberTypes() []ByteSlice { +func (e EnumTypeDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -57,7 +53,7 @@ func (e EnumTypeDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (e EnumTypeDefinition) NodeImplementsInterfaces() []ByteSlice { +func (e EnumTypeDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -77,7 +73,7 @@ func (e EnumTypeDefinition) NodeArgumentsDefinition() []int { panic("implement me") } -func (e EnumTypeDefinition) NodeAlias() string { +func (e EnumTypeDefinition) NodeAlias() ByteSliceReference { panic("implement me") } @@ -109,12 +105,12 @@ func (e EnumTypeDefinition) NodeEnumValuesDefinition() []int { return e.EnumValuesDefinition } -func (e EnumTypeDefinition) NodeName() string { - return string(e.Name) +func (e EnumTypeDefinition) NodeName() ByteSliceReference { + return e.Name } -func (e EnumTypeDefinition) NodeDescription() string { - return string(e.Description) +func (e EnumTypeDefinition) NodeDescription() ByteSliceReference { + return e.Description } func (e EnumTypeDefinition) NodeArguments() []int { @@ -125,23 +121,5 @@ func (e EnumTypeDefinition) NodeDirectives() []int { return e.Directives } -// TitleCaseName returns the EnumTypeDefinition's Name -// as title case string. example: -// episode => Episode -func (e EnumTypeDefinition) TitleCaseName() ByteSlice { - return bytes.Title(e.Name) -} - // EnumTypeDefinitions is the plural of EnumTypeDefinition type EnumTypeDefinitions []EnumTypeDefinition - -// HasDefinition returns true if a EnumTypeDefinition with $name is contained -func (e EnumTypeDefinitions) HasDefinition(name ByteSlice) bool { - for _, definition := range e { - if bytes.Equal(definition.Name, name) { - return true - } - } - - return false -} diff --git a/pkg/document/enumvaluedefinition.go b/pkg/document/enumvaluedefinition.go index e9d4673b5b..d2c5b9aaab 100644 --- a/pkg/document/enumvaluedefinition.go +++ b/pkg/document/enumvaluedefinition.go @@ -1,14 +1,10 @@ package document -import ( - "bytes" -) - // EnumValueDefinition as specified in: // http://facebook.github.io/graphql/draft/#EnumValueDefinition type EnumValueDefinition struct { - Description ByteSlice - EnumValue ByteSlice + Description ByteSliceReference + EnumValue ByteSliceReference Directives []int } @@ -20,7 +16,7 @@ func (e EnumValueDefinition) NodeValueReference() int { panic("implement me") } -func (e EnumValueDefinition) NodeUnionMemberTypes() []ByteSlice { +func (e EnumValueDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -56,7 +52,7 @@ func (e EnumValueDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (e EnumValueDefinition) NodeImplementsInterfaces() []ByteSlice { +func (e EnumValueDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -76,7 +72,7 @@ func (e EnumValueDefinition) NodeArgumentsDefinition() []int { panic("implement me") } -func (e EnumValueDefinition) NodeAlias() string { +func (e EnumValueDefinition) NodeAlias() ByteSliceReference { panic("implement me") } @@ -104,12 +100,12 @@ func (e EnumValueDefinition) NodeInlineFragments() []int { return nil } -func (e EnumValueDefinition) NodeName() string { - return string(e.EnumValue) +func (e EnumValueDefinition) NodeName() ByteSliceReference { + return e.EnumValue } -func (e EnumValueDefinition) NodeDescription() string { - return string(e.Description) +func (e EnumValueDefinition) NodeDescription() ByteSliceReference { + return e.Description } func (e EnumValueDefinition) NodeArguments() []int { @@ -123,10 +119,3 @@ func (e EnumValueDefinition) NodeDirectives() []int { func (e EnumValueDefinition) NodeEnumValuesDefinition() []int { return nil } - -// ProperCaseVal returns the EnumValueDefinition's EnumValue -// as proper case string. example: -// NORTH => North -func (e EnumValueDefinition) ProperCaseVal() ByteSlice { - return bytes.Title(bytes.ToLower(e.EnumValue)) -} diff --git a/pkg/document/field.go b/pkg/document/field.go index 52a895b8b7..85f3840544 100644 --- a/pkg/document/field.go +++ b/pkg/document/field.go @@ -3,8 +3,8 @@ package document // Field as specified in: // http://facebook.github.io/graphql/draft/#Field type Field struct { - Alias ByteSlice - Name ByteSlice + Alias ByteSliceReference + Name ByteSliceReference Arguments []int Directives []int SelectionSet SelectionSet @@ -18,7 +18,7 @@ func (f Field) NodeValueReference() int { panic("implement me") } -func (f Field) NodeUnionMemberTypes() []ByteSlice { +func (f Field) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -54,7 +54,7 @@ func (f Field) NodeDirectiveDefinitions() []int { panic("implement me") } -func (f Field) NodeImplementsInterfaces() []ByteSlice { +func (f Field) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -74,19 +74,19 @@ func (f Field) NodeArgumentsDefinition() []int { panic("implement me") } -func (f Field) NodeAlias() string { - return string(f.Alias) +func (f Field) NodeAlias() ByteSliceReference { + return f.Alias } func (f Field) NodeOperationType() OperationType { panic("implement me") } -func (f Field) NodeName() string { - return string(f.Name) +func (f Field) NodeName() ByteSliceReference { + return f.Name } -func (f Field) NodeDescription() string { +func (f Field) NodeDescription() ByteSliceReference { panic("implement me") } diff --git a/pkg/document/fielddefinition.go b/pkg/document/fielddefinition.go index 633f8b62e3..42e2f815ca 100644 --- a/pkg/document/fielddefinition.go +++ b/pkg/document/fielddefinition.go @@ -1,15 +1,10 @@ package document -import ( - "bytes" - "github.com/jensneuse/graphql-go-tools/pkg/lexing/literal" -) - // FieldDefinition as specified in: // http://facebook.github.io/graphql/draft/#FieldDefinition type FieldDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference ArgumentsDefinition []int Type int Directives []int @@ -23,7 +18,7 @@ func (f FieldDefinition) NodeValueReference() int { panic("implement me") } -func (f FieldDefinition) NodeUnionMemberTypes() []ByteSlice { +func (f FieldDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -59,7 +54,7 @@ func (f FieldDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (f FieldDefinition) NodeImplementsInterfaces() []ByteSlice { +func (f FieldDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -79,16 +74,16 @@ func (f FieldDefinition) NodeArgumentsDefinition() []int { return f.ArgumentsDefinition } -func (f FieldDefinition) NodeName() string { - return string(f.Name) +func (f FieldDefinition) NodeName() ByteSliceReference { + return f.Name } -func (f FieldDefinition) NodeAlias() string { +func (f FieldDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (f FieldDefinition) NodeDescription() string { - return string(f.Description) +func (f FieldDefinition) NodeDescription() ByteSliceReference { + return f.Description } func (f FieldDefinition) NodeArguments() []int { @@ -126,21 +121,3 @@ func (f FieldDefinition) NodeType() int { func (f FieldDefinition) NodeOperationType() OperationType { panic("implement me") } - -// NameAsTitle trims all prefixed __ and formats the name with strings.Title -func (f FieldDefinition) NameAsTitle() ByteSlice { - return bytes.Title(bytes.TrimPrefix(f.Name, []byte("__"))) -} - -// NameAsGoTypeName returns the field definition name as a go type name -func (f FieldDefinition) NameAsGoTypeName() ByteSlice { - - name := f.NameAsTitle() - name = append(bytes.ToLower(name[:1]), name[1:]...) - - if bytes.Equal(name, literal.TYPE) { - name = literal.GRAPHQLTYPE - } - - return name -} diff --git a/pkg/document/fragmentdefinition.go b/pkg/document/fragmentdefinition.go index 3cca58b53c..774425421a 100644 --- a/pkg/document/fragmentdefinition.go +++ b/pkg/document/fragmentdefinition.go @@ -1,11 +1,9 @@ package document -import "bytes" - // FragmentDefinition as specified in // http://facebook.github.io/graphql/draft/#FragmentDefinition type FragmentDefinition struct { - FragmentName ByteSlice // but not on + FragmentName ByteSliceReference // but not on TypeCondition int Directives []int SelectionSet SelectionSet @@ -19,7 +17,7 @@ func (f FragmentDefinition) NodeValueReference() int { panic("implement me") } -func (f FragmentDefinition) NodeUnionMemberTypes() []ByteSlice { +func (f FragmentDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -55,7 +53,7 @@ func (f FragmentDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (f FragmentDefinition) NodeImplementsInterfaces() []ByteSlice { +func (f FragmentDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -75,7 +73,7 @@ func (f FragmentDefinition) NodeArgumentsDefinition() []int { panic("implement me") } -func (f FragmentDefinition) NodeAlias() string { +func (f FragmentDefinition) NodeAlias() ByteSliceReference { panic("implement me") } @@ -103,11 +101,11 @@ func (f FragmentDefinition) NodeInlineFragments() []int { return f.SelectionSet.InlineFragments } -func (f FragmentDefinition) NodeName() string { - return string(f.FragmentName) +func (f FragmentDefinition) NodeName() ByteSliceReference { + return f.FragmentName } -func (f FragmentDefinition) NodeDescription() string { +func (f FragmentDefinition) NodeDescription() ByteSliceReference { panic("implement me") } @@ -125,14 +123,3 @@ func (f FragmentDefinition) NodeEnumValuesDefinition() []int { // FragmentDefinitions is the plural of FragmentDefinition type FragmentDefinitions []FragmentDefinition - -// GetByName returns the fragment definition with the given name if contained -func (f FragmentDefinitions) GetByName(name ByteSlice) (FragmentDefinition, bool) { - for _, fragment := range f { - if bytes.Equal(fragment.FragmentName, name) { - return fragment, true - } - } - - return FragmentDefinition{}, false -} diff --git a/pkg/document/fragmentspread.go b/pkg/document/fragmentspread.go index f5c7052c35..24b40269b9 100644 --- a/pkg/document/fragmentspread.go +++ b/pkg/document/fragmentspread.go @@ -3,7 +3,7 @@ package document // FragmentSpread as specified in: // http://facebook.github.io/graphql/draft/#FragmentSpread type FragmentSpread struct { - FragmentName ByteSlice + FragmentName ByteSliceReference Directives []int } @@ -15,7 +15,7 @@ func (f FragmentSpread) NodeValueReference() int { panic("implement me") } -func (f FragmentSpread) NodeUnionMemberTypes() []ByteSlice { +func (f FragmentSpread) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -51,7 +51,7 @@ func (f FragmentSpread) NodeDirectiveDefinitions() []int { panic("implement me") } -func (f FragmentSpread) NodeImplementsInterfaces() []ByteSlice { +func (f FragmentSpread) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -67,7 +67,7 @@ func (f FragmentSpread) NodeFieldsDefinition() []int { panic("implement me") } -func (f FragmentSpread) NodeAlias() string { +func (f FragmentSpread) NodeAlias() ByteSliceReference { panic("implement me") } @@ -79,11 +79,11 @@ func (f FragmentSpread) NodeOperationType() OperationType { panic("implement me") } -func (f FragmentSpread) NodeName() string { - return string(f.FragmentName) +func (f FragmentSpread) NodeName() ByteSliceReference { + return f.FragmentName } -func (f FragmentSpread) NodeDescription() string { +func (f FragmentSpread) NodeDescription() ByteSliceReference { panic("implement me") } diff --git a/pkg/document/implementsinterfaces.go b/pkg/document/implementsinterfaces.go index 263f4f84ab..91a38a3d62 100644 --- a/pkg/document/implementsinterfaces.go +++ b/pkg/document/implementsinterfaces.go @@ -2,4 +2,4 @@ package document // ImplementsInterfaces as specified in: // http://facebook.github.io/graphql/draft/#ImplementsInterfaces -type ImplementsInterfaces []ByteSlice +type ImplementsInterfaces []ByteSliceReference diff --git a/pkg/document/inlinefragment.go b/pkg/document/inlinefragment.go index ce5299636b..4814d90ebd 100644 --- a/pkg/document/inlinefragment.go +++ b/pkg/document/inlinefragment.go @@ -16,7 +16,7 @@ func (i InlineFragment) NodeValueReference() int { panic("implement me") } -func (i InlineFragment) NodeUnionMemberTypes() []ByteSlice { +func (i InlineFragment) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -52,7 +52,7 @@ func (i InlineFragment) NodeDirectiveDefinitions() []int { panic("implement me") } -func (i InlineFragment) NodeImplementsInterfaces() []ByteSlice { +func (i InlineFragment) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -64,7 +64,7 @@ func (i InlineFragment) NodeDefaultValue() int { panic("implement me") } -func (i InlineFragment) NodeAlias() string { +func (i InlineFragment) NodeAlias() ByteSliceReference { panic("implement me") } @@ -80,11 +80,11 @@ func (i InlineFragment) NodeOperationType() OperationType { panic("implement me") } -func (i InlineFragment) NodeName() string { +func (i InlineFragment) NodeName() ByteSliceReference { panic("implement me") } -func (i InlineFragment) NodeDescription() string { +func (i InlineFragment) NodeDescription() ByteSliceReference { panic("implement me") } diff --git a/pkg/document/inputfieldsdefinition.go b/pkg/document/inputfieldsdefinition.go index 1c8b5e13af..b4bccaec87 100644 --- a/pkg/document/inputfieldsdefinition.go +++ b/pkg/document/inputfieldsdefinition.go @@ -1,18 +1,5 @@ package document -import "bytes" - // InputValueDefinitions as specified in: // http://facebook.github.io/graphql/draft/#InputFieldsDefinition type InputValueDefinitions []InputValueDefinition - -// GetByName returns a InputValueDefinition by $name or nil if not found -func (i InputValueDefinitions) GetByName(name ByteSlice) *InputValueDefinition { - for _, definition := range i { - if bytes.Equal(definition.Name, name) { - return &definition - } - } - - return nil -} diff --git a/pkg/document/inputobjecttypedefinition.go b/pkg/document/inputobjecttypedefinition.go index a6af6fbc03..a48eefea73 100644 --- a/pkg/document/inputobjecttypedefinition.go +++ b/pkg/document/inputobjecttypedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // InputObjectTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#InputObjectTypeDefinition type InputObjectTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference InputFieldsDefinition []int Directives []int } @@ -19,7 +17,7 @@ func (i InputObjectTypeDefinition) NodeValueReference() int { panic("implement me") } -func (i InputObjectTypeDefinition) NodeUnionMemberTypes() []ByteSlice { +func (i InputObjectTypeDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -55,20 +53,20 @@ func (i InputObjectTypeDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (i InputObjectTypeDefinition) NodeImplementsInterfaces() []ByteSlice { +func (i InputObjectTypeDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } -func (i InputObjectTypeDefinition) NodeName() string { - return string(i.Name) +func (i InputObjectTypeDefinition) NodeName() ByteSliceReference { + return i.Name } -func (i InputObjectTypeDefinition) NodeAlias() string { +func (i InputObjectTypeDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (i InputObjectTypeDefinition) NodeDescription() string { - return string(i.Description) +func (i InputObjectTypeDefinition) NodeDescription() ByteSliceReference { + return i.Description } func (i InputObjectTypeDefinition) NodeArguments() []int { @@ -125,26 +123,3 @@ func (i InputObjectTypeDefinition) NodeDefaultValue() int { // InputObjectTypeDefinitions is the plural of InputObjectTypeDefinition type InputObjectTypeDefinitions []InputObjectTypeDefinition - -// HasDefinition returns true if an InputObjectTypeDefinition with $name is contained -func (i InputObjectTypeDefinitions) HasDefinition(name ByteSlice) bool { - - for _, definition := range i { - if bytes.Equal(definition.Name, name) { - return true - } - } - - return false -} - -// GetByName returns a InputObjectTypeDefinition by $name or nil if not found -func (i InputObjectTypeDefinitions) GetByName(name ByteSlice) *InputObjectTypeDefinition { - for _, definition := range i { - if bytes.Equal(definition.Name, name) { - return &definition - } - } - - return nil -} diff --git a/pkg/document/inputvaluedefinition.go b/pkg/document/inputvaluedefinition.go index 0de6eb88da..dfd4e68c12 100644 --- a/pkg/document/inputvaluedefinition.go +++ b/pkg/document/inputvaluedefinition.go @@ -3,8 +3,8 @@ package document // InputValueDefinition as specified in: // http://facebook.github.io/graphql/draft/#InputValueDefinition type InputValueDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference Type int DefaultValue int Directives []int @@ -18,7 +18,7 @@ func (i InputValueDefinition) NodeValueReference() int { panic("implement me") } -func (i InputValueDefinition) NodeUnionMemberTypes() []ByteSlice { +func (i InputValueDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -54,7 +54,7 @@ func (i InputValueDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (i InputValueDefinition) NodeImplementsInterfaces() []ByteSlice { +func (i InputValueDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -66,16 +66,16 @@ func (i InputValueDefinition) NodeDefaultValue() int { return i.DefaultValue } -func (i InputValueDefinition) NodeName() string { - return string(i.Name) +func (i InputValueDefinition) NodeName() ByteSliceReference { + return i.Name } -func (i InputValueDefinition) NodeAlias() string { +func (i InputValueDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (i InputValueDefinition) NodeDescription() string { - return string(i.Description) +func (i InputValueDefinition) NodeDescription() ByteSliceReference { + return i.Description } func (i InputValueDefinition) NodeArguments() []int { diff --git a/pkg/document/interfacetypedefinition.go b/pkg/document/interfacetypedefinition.go index ffc4b1fc29..9541647e22 100644 --- a/pkg/document/interfacetypedefinition.go +++ b/pkg/document/interfacetypedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // InterfaceTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#InterfaceTypeDefinition type InterfaceTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference FieldsDefinition []int Directives []int } @@ -19,7 +17,7 @@ func (i InterfaceTypeDefinition) NodeValueReference() int { panic("implement me") } -func (i InterfaceTypeDefinition) NodeUnionMemberTypes() []ByteSlice { +func (i InterfaceTypeDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -55,7 +53,7 @@ func (i InterfaceTypeDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (i InterfaceTypeDefinition) NodeImplementsInterfaces() []ByteSlice { +func (i InterfaceTypeDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -71,16 +69,16 @@ func (i InterfaceTypeDefinition) NodeFieldsDefinition() []int { return i.FieldsDefinition } -func (i InterfaceTypeDefinition) NodeName() string { - return string(i.Name) +func (i InterfaceTypeDefinition) NodeName() ByteSliceReference { + return i.Name } -func (i InterfaceTypeDefinition) NodeAlias() string { +func (i InterfaceTypeDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (i InterfaceTypeDefinition) NodeDescription() string { - return string(i.Description) +func (i InterfaceTypeDefinition) NodeDescription() ByteSliceReference { + return i.Description } func (i InterfaceTypeDefinition) NodeArguments() []int { @@ -125,14 +123,3 @@ func (i InterfaceTypeDefinition) NodeOperationType() OperationType { // InterfaceTypeDefinitions is the plural of InterfaceTypeDefinition type InterfaceTypeDefinitions []InterfaceTypeDefinition - -// GetByName returns the interface type definition by name if contained -func (i InterfaceTypeDefinitions) GetByName(name ByteSlice) *InterfaceTypeDefinition { - for _, iFace := range i { - if bytes.Equal(iFace.Name, name) { - return &iFace - } - } - - return nil -} diff --git a/pkg/document/node.go b/pkg/document/node.go index 7c27ede494..1b2abe5fe5 100644 --- a/pkg/document/node.go +++ b/pkg/document/node.go @@ -1,9 +1,9 @@ package document type Node interface { - NodeName() string - NodeAlias() string - NodeDescription() string + NodeName() ByteSliceReference + NodeAlias() ByteSliceReference + NodeDescription() ByteSliceReference NodeArguments() []int NodeArgumentsDefinition() []int NodeDirectives() []int @@ -17,7 +17,7 @@ type Node interface { NodeOperationType() OperationType NodeValue() int NodeDefaultValue() int - NodeImplementsInterfaces() []ByteSlice + NodeImplementsInterfaces() []ByteSliceReference TypeSystemDefinitionNode UnionTypeSystemDefinitionNode @@ -36,7 +36,7 @@ type TypeSystemDefinitionNode interface { } type UnionTypeSystemDefinitionNode interface { - NodeUnionMemberTypes() []ByteSlice + NodeUnionMemberTypes() []ByteSliceReference } type ValueNode interface { diff --git a/pkg/document/objectfield.go b/pkg/document/objectfield.go index 204d12efbe..1880c2c4f7 100644 --- a/pkg/document/objectfield.go +++ b/pkg/document/objectfield.go @@ -3,7 +3,7 @@ package document // ObjectField as specified in: // http://facebook.github.io/graphql/draft/#ObjectField type ObjectField struct { - Name ByteSlice + Name ByteSliceReference Value int } @@ -11,15 +11,15 @@ func (o ObjectField) NodeType() int { panic("implement me") } -func (o ObjectField) NodeName() string { - return string(o.Name) +func (o ObjectField) NodeName() ByteSliceReference { + return o.Name } -func (o ObjectField) NodeAlias() string { +func (o ObjectField) NodeAlias() ByteSliceReference { panic("implement me") } -func (o ObjectField) NodeDescription() string { +func (o ObjectField) NodeDescription() ByteSliceReference { panic("implement me") } @@ -71,7 +71,7 @@ func (o ObjectField) NodeDefaultValue() int { panic("implement me") } -func (o ObjectField) NodeImplementsInterfaces() []ByteSlice { +func (o ObjectField) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -107,7 +107,7 @@ func (o ObjectField) NodeDirectiveDefinitions() []int { panic("implement me") } -func (o ObjectField) NodeUnionMemberTypes() []ByteSlice { +func (o ObjectField) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } diff --git a/pkg/document/objecttypedefinition.go b/pkg/document/objecttypedefinition.go index 2714bf5fa1..283c49c19a 100644 --- a/pkg/document/objecttypedefinition.go +++ b/pkg/document/objecttypedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // ObjectTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#ObjectTypeDefinition type ObjectTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference FieldsDefinition []int ImplementsInterfaces ImplementsInterfaces Directives []int @@ -20,7 +18,7 @@ func (o ObjectTypeDefinition) NodeValueReference() int { panic("implement me") } -func (o ObjectTypeDefinition) NodeUnionMemberTypes() []ByteSlice { +func (o ObjectTypeDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -56,20 +54,20 @@ func (o ObjectTypeDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (o ObjectTypeDefinition) NodeImplementsInterfaces() []ByteSlice { +func (o ObjectTypeDefinition) NodeImplementsInterfaces() []ByteSliceReference { return o.ImplementsInterfaces } -func (o ObjectTypeDefinition) NodeName() string { - return string(o.Name) +func (o ObjectTypeDefinition) NodeName() ByteSliceReference { + return o.Name } -func (o ObjectTypeDefinition) NodeAlias() string { +func (o ObjectTypeDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (o ObjectTypeDefinition) NodeDescription() string { - return string(o.Description) +func (o ObjectTypeDefinition) NodeDescription() ByteSliceReference { + return o.Description } func (o ObjectTypeDefinition) NodeArguments() []int { @@ -126,25 +124,3 @@ func (o ObjectTypeDefinition) NodeDefaultValue() int { // ObjectTypeDefinitions is the plural of ObjectTypeDefinition type ObjectTypeDefinitions []ObjectTypeDefinition - -// HasType returns if a type with $name is contained -func (o ObjectTypeDefinitions) HasType(name ByteSlice) bool { - for _, objectType := range o { - if bytes.Equal(objectType.Name, name) { - return true - } - } - - return false -} - -// ObjectTypeDefinitionByName returns ObjectTypeDefinition,true if it is contained -func (o *ObjectTypeDefinitions) ObjectTypeDefinitionByName(name ByteSlice) *ObjectTypeDefinition { - for _, objectType := range *o { - if bytes.Equal(objectType.Name, name) { - return &objectType - } - } - - return nil -} diff --git a/pkg/document/operationdefinition.go b/pkg/document/operationdefinition.go index 72f8b92084..4e6faa5761 100644 --- a/pkg/document/operationdefinition.go +++ b/pkg/document/operationdefinition.go @@ -4,7 +4,7 @@ package document // http://facebook.github.io/graphql/draft/#OperationDefinition type OperationDefinition struct { OperationType OperationType - Name ByteSlice + Name ByteSliceReference VariableDefinitions []int Directives []int SelectionSet SelectionSet @@ -18,7 +18,7 @@ func (o OperationDefinition) NodeValueReference() int { panic("implement me") } -func (o OperationDefinition) NodeUnionMemberTypes() []ByteSlice { +func (o OperationDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -54,7 +54,7 @@ func (o OperationDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (o OperationDefinition) NodeImplementsInterfaces() []ByteSlice { +func (o OperationDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -74,7 +74,7 @@ func (o OperationDefinition) NodeArgumentsDefinition() []int { panic("implement me") } -func (o OperationDefinition) NodeAlias() string { +func (o OperationDefinition) NodeAlias() ByteSliceReference { panic("implement me") } @@ -102,11 +102,11 @@ func (o OperationDefinition) NodeInlineFragments() []int { return o.SelectionSet.InlineFragments } -func (o OperationDefinition) NodeName() string { - return string(o.Name) +func (o OperationDefinition) NodeName() ByteSliceReference { + return o.Name } -func (o OperationDefinition) NodeDescription() string { +func (o OperationDefinition) NodeDescription() ByteSliceReference { panic("implement me") } diff --git a/pkg/document/scalartypedefinition.go b/pkg/document/scalartypedefinition.go index 564ffb9fb0..a636de596e 100644 --- a/pkg/document/scalartypedefinition.go +++ b/pkg/document/scalartypedefinition.go @@ -3,8 +3,8 @@ package document // ScalarTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#sec-Scalars type ScalarTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference Directives []int } @@ -16,7 +16,7 @@ func (s ScalarTypeDefinition) NodeValueReference() int { panic("implement me") } -func (s ScalarTypeDefinition) NodeUnionMemberTypes() []ByteSlice { +func (s ScalarTypeDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -52,16 +52,16 @@ func (s ScalarTypeDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (s ScalarTypeDefinition) NodeName() string { - return string(s.Name) +func (s ScalarTypeDefinition) NodeName() ByteSliceReference { + return s.Name } -func (s ScalarTypeDefinition) NodeAlias() string { +func (s ScalarTypeDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (s ScalarTypeDefinition) NodeDescription() string { - return string(s.Description) +func (s ScalarTypeDefinition) NodeDescription() ByteSliceReference { + return s.Description } func (s ScalarTypeDefinition) NodeArguments() []int { @@ -116,7 +116,7 @@ func (s ScalarTypeDefinition) NodeDefaultValue() int { panic("implement me") } -func (s ScalarTypeDefinition) NodeImplementsInterfaces() []ByteSlice { +func (s ScalarTypeDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } diff --git a/pkg/document/selectionset.go b/pkg/document/selectionset.go index db265f1ebd..1c5372ea03 100644 --- a/pkg/document/selectionset.go +++ b/pkg/document/selectionset.go @@ -16,7 +16,7 @@ func (s SelectionSet) NodeValueReference() int { panic("implement me") } -func (s SelectionSet) NodeUnionMemberTypes() []ByteSlice { +func (s SelectionSet) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -52,15 +52,15 @@ func (s SelectionSet) NodeDirectiveDefinitions() []int { panic("implement me") } -func (s SelectionSet) NodeName() string { +func (s SelectionSet) NodeName() ByteSliceReference { panic("implement me") } -func (s SelectionSet) NodeAlias() string { +func (s SelectionSet) NodeAlias() ByteSliceReference { panic("implement me") } -func (s SelectionSet) NodeDescription() string { +func (s SelectionSet) NodeDescription() ByteSliceReference { panic("implement me") } @@ -116,7 +116,7 @@ func (s SelectionSet) NodeDefaultValue() int { panic("implement me") } -func (s SelectionSet) NodeImplementsInterfaces() []ByteSlice { +func (s SelectionSet) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } diff --git a/pkg/document/type.go b/pkg/document/type.go index 0004e0f641..966d199510 100644 --- a/pkg/document/type.go +++ b/pkg/document/type.go @@ -16,19 +16,19 @@ type TypeKind int // http://facebook.github.io/graphql/draft/#Type type Type struct { Kind TypeKind - Name ByteSlice + Name ByteSliceReference OfType int } -func (t Type) NodeName() string { - return string(t.Name) +func (t Type) NodeName() ByteSliceReference { + return t.Name } -func (t Type) NodeAlias() string { +func (t Type) NodeAlias() ByteSliceReference { panic("implement me") } -func (t Type) NodeDescription() string { +func (t Type) NodeDescription() ByteSliceReference { panic("implement me") } @@ -84,7 +84,7 @@ func (t Type) NodeDefaultValue() int { panic("implement me") } -func (t Type) NodeImplementsInterfaces() []ByteSlice { +func (t Type) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -120,7 +120,7 @@ func (t Type) NodeDirectiveDefinitions() []int { panic("implement me") } -func (t Type) NodeUnionMemberTypes() []ByteSlice { +func (t Type) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } diff --git a/pkg/document/typedefinition.go b/pkg/document/typedefinition.go index 5a9e33e088..622951fd1a 100644 --- a/pkg/document/typedefinition.go +++ b/pkg/document/typedefinition.go @@ -3,7 +3,7 @@ package document // TypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#TypeDefinition type TypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference FieldsDefinition FieldDefinitions } diff --git a/pkg/document/typesystemdefinition.go b/pkg/document/typesystemdefinition.go index d10ca7e82c..183ecc9f16 100644 --- a/pkg/document/typesystemdefinition.go +++ b/pkg/document/typesystemdefinition.go @@ -21,19 +21,19 @@ func (t TypeSystemDefinition) NodeValueReference() int { panic("implement me") } -func (t TypeSystemDefinition) NodeUnionMemberTypes() []ByteSlice { +func (t TypeSystemDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } -func (t TypeSystemDefinition) NodeName() string { +func (t TypeSystemDefinition) NodeName() ByteSliceReference { panic("implement me") } -func (t TypeSystemDefinition) NodeAlias() string { +func (t TypeSystemDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (t TypeSystemDefinition) NodeDescription() string { +func (t TypeSystemDefinition) NodeDescription() ByteSliceReference { panic("implement me") } @@ -89,7 +89,7 @@ func (t TypeSystemDefinition) NodeDefaultValue() int { panic("implement me") } -func (t TypeSystemDefinition) NodeImplementsInterfaces() []ByteSlice { +func (t TypeSystemDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } diff --git a/pkg/document/uniontypedefinition.go b/pkg/document/uniontypedefinition.go index 4ab4f64b5e..035e856014 100644 --- a/pkg/document/uniontypedefinition.go +++ b/pkg/document/uniontypedefinition.go @@ -1,12 +1,10 @@ package document -import "bytes" - // UnionTypeDefinition as specified in: // http://facebook.github.io/graphql/draft/#UnionTypeDefinition type UnionTypeDefinition struct { - Description ByteSlice - Name ByteSlice + Description ByteSliceReference + Name ByteSliceReference UnionMemberTypes UnionMemberTypes Directives []int } @@ -19,20 +17,20 @@ func (u UnionTypeDefinition) NodeValueReference() int { panic("implement me") } -func (u UnionTypeDefinition) NodeUnionMemberTypes() []ByteSlice { +func (u UnionTypeDefinition) NodeUnionMemberTypes() []ByteSliceReference { return u.UnionMemberTypes } -func (u UnionTypeDefinition) NodeName() string { - return string(u.Name) +func (u UnionTypeDefinition) NodeName() ByteSliceReference { + return u.Name } -func (u UnionTypeDefinition) NodeAlias() string { +func (u UnionTypeDefinition) NodeAlias() ByteSliceReference { panic("implement me") } -func (u UnionTypeDefinition) NodeDescription() string { - return string(u.Description) +func (u UnionTypeDefinition) NodeDescription() ByteSliceReference { + return u.Description } func (u UnionTypeDefinition) NodeArguments() []int { @@ -87,7 +85,7 @@ func (u UnionTypeDefinition) NodeDefaultValue() int { panic("implement me") } -func (u UnionTypeDefinition) NodeImplementsInterfaces() []ByteSlice { +func (u UnionTypeDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -123,37 +121,9 @@ func (u UnionTypeDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -// GroupingFuncName returns a name to name a function after. Example: -// "Direction" => "IsDirection" -func (u UnionTypeDefinition) GroupingFuncName() ByteSlice { - return append([]byte("Is"), u.Name...) -} - -// HasMemberType returns true if a member with the given name is contained -func (u UnionTypeDefinition) HasMemberType(name ByteSlice) bool { - for _, unionMemberType := range u.UnionMemberTypes { - if bytes.Equal(unionMemberType, name) { - return true - } - } - - return false -} - // UnionMemberTypes as specified in: // http://facebook.github.io/graphql/draft/#UnionMemberTypes -type UnionMemberTypes []ByteSlice +type UnionMemberTypes []ByteSliceReference // UnionTypeDefinitions is the plural of UnionTypeDefinition type UnionTypeDefinitions []UnionTypeDefinition - -// GetByName returns the UnionTypeDefinition by $name if it is contained -func (u UnionTypeDefinitions) GetByName(name ByteSlice) *UnionTypeDefinition { - for _, definition := range u { - if bytes.Equal(definition.Name, name) { - return &definition - } - } - - return nil -} diff --git a/pkg/document/value.go b/pkg/document/value.go index ecca254168..a9ebc9f362 100644 --- a/pkg/document/value.go +++ b/pkg/document/value.go @@ -14,15 +14,15 @@ func (v Value) NodeValueReference() int { return v.Reference } -func (v Value) NodeName() string { +func (v Value) NodeName() ByteSliceReference { panic("implement me") } -func (v Value) NodeAlias() string { +func (v Value) NodeAlias() ByteSliceReference { panic("implement me") } -func (v Value) NodeDescription() string { +func (v Value) NodeDescription() ByteSliceReference { panic("implement me") } @@ -78,7 +78,7 @@ func (v Value) NodeDefaultValue() int { panic("implement me") } -func (v Value) NodeImplementsInterfaces() []ByteSlice { +func (v Value) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -114,7 +114,7 @@ func (v Value) NodeDirectiveDefinitions() []int { panic("implement me") } -func (v Value) NodeUnionMemberTypes() []ByteSlice { +func (v Value) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } diff --git a/pkg/document/variabledefinitions.go b/pkg/document/variabledefinitions.go index b6514ff2e5..be291b0e08 100644 --- a/pkg/document/variabledefinitions.go +++ b/pkg/document/variabledefinitions.go @@ -3,7 +3,7 @@ package document // VariableDefinition as specified in: // http://facebook.github.io/graphql/draft/#VariableDefinition type VariableDefinition struct { - Variable ByteSlice + Variable ByteSliceReference Type int DefaultValue int } @@ -16,7 +16,7 @@ func (v VariableDefinition) NodeValueReference() int { panic("implement me") } -func (v VariableDefinition) NodeUnionMemberTypes() []ByteSlice { +func (v VariableDefinition) NodeUnionMemberTypes() []ByteSliceReference { panic("implement me") } @@ -52,7 +52,7 @@ func (v VariableDefinition) NodeDirectiveDefinitions() []int { panic("implement me") } -func (v VariableDefinition) NodeImplementsInterfaces() []ByteSlice { +func (v VariableDefinition) NodeImplementsInterfaces() []ByteSliceReference { panic("implement me") } @@ -72,7 +72,7 @@ func (v VariableDefinition) NodeArgumentsDefinition() []int { panic("implement me") } -func (v VariableDefinition) NodeAlias() string { +func (v VariableDefinition) NodeAlias() ByteSliceReference { panic("implement me") } @@ -84,11 +84,11 @@ func (v VariableDefinition) NodeType() int { return v.Type } -func (v VariableDefinition) NodeName() string { - return string(v.Variable) +func (v VariableDefinition) NodeName() ByteSliceReference { + return v.Variable } -func (v VariableDefinition) NodeDescription() string { +func (v VariableDefinition) NodeDescription() ByteSliceReference { panic("implement me") } From 0c84d11375077d1afffd232207e847c4d47e753e Mon Sep 17 00:00:00 2001 From: jnsone11 Date: Thu, 17 Jan 2019 22:21:09 +0100 Subject: [PATCH 6/7] update the parser to reflect the switch from ByteSlices in Tokens to ByteSliceReferences --- pkg/parser/arguments_parser.go | 2 +- pkg/parser/byteslice_parser.go | 2 +- pkg/parser/directivedefinition_parser.go | 2 +- pkg/parser/enumvaluesdefinition_parser.go | 10 +- pkg/parser/executabledefinition_parser.go | 2 +- pkg/parser/fieldsdefinition_parser.go | 10 +- ...tem_definition_parsed_introspection.golden | 667 ++++++++++++++---- pkg/parser/float_value_parser.go | 2 +- pkg/parser/inputvaluedefinitions_parser.go | 12 +- pkg/parser/int_value_parser.go | 2 +- pkg/parser/parser.go | 45 +- pkg/parser/parser_test.go | 284 ++++---- pkg/parser/schemadefinition_parser.go | 4 +- pkg/parser/type_parser.go | 2 +- pkg/parser/typesystemdefinition_parser.go | 40 +- pkg/parser/value_parser.go | 2 +- pkg/parser/variabledefinitions_parser.go | 2 +- 17 files changed, 763 insertions(+), 327 deletions(-) diff --git a/pkg/parser/arguments_parser.go b/pkg/parser/arguments_parser.go index bc344c158f..bbe7b58dde 100644 --- a/pkg/parser/arguments_parser.go +++ b/pkg/parser/arguments_parser.go @@ -22,7 +22,7 @@ func (p *Parser) parseArguments(index *[]int) error { return err } - var valueName document.ByteSlice + var valueName document.ByteSliceReference for { key, err = p.l.Peek(true) diff --git a/pkg/parser/byteslice_parser.go b/pkg/parser/byteslice_parser.go index 4d1a38fbb3..8c0141f593 100644 --- a/pkg/parser/byteslice_parser.go +++ b/pkg/parser/byteslice_parser.go @@ -7,6 +7,6 @@ func (p *Parser) parsePeekedByteSlice(index *int) error { return err } - *index = p.putByteSlice(variableToken.Literal) + *index = p.putByteSliceReference(variableToken.Literal) return nil } diff --git a/pkg/parser/directivedefinition_parser.go b/pkg/parser/directivedefinition_parser.go index 709feba3e3..647cb1e354 100644 --- a/pkg/parser/directivedefinition_parser.go +++ b/pkg/parser/directivedefinition_parser.go @@ -48,7 +48,7 @@ func (p *Parser) parseDirectiveDefinition(index *[]int) error { return err } - parsedLocation, err := document.ParseDirectiveLocation(location.Literal) + parsedLocation, err := document.ParseDirectiveLocation(p.ByteSlice(location.Literal)) if err != nil { return err } diff --git a/pkg/parser/enumvaluesdefinition_parser.go b/pkg/parser/enumvaluesdefinition_parser.go index 1eff478ee2..e629488b7b 100644 --- a/pkg/parser/enumvaluesdefinition_parser.go +++ b/pkg/parser/enumvaluesdefinition_parser.go @@ -16,7 +16,7 @@ func (p *Parser) parseEnumValuesDefinition(index *[]int) error { return nil } - var description document.ByteSlice + var description *document.ByteSliceReference for { next, err := p.l.Peek(true) @@ -31,7 +31,7 @@ func (p *Parser) parseEnumValuesDefinition(index *[]int) error { return err } - description = stringToken.Literal + description = &stringToken.Literal continue } else if next == keyword.IDENT { @@ -42,7 +42,9 @@ func (p *Parser) parseEnumValuesDefinition(index *[]int) error { definition := p.makeEnumValueDefinition() definition.EnumValue = ident.Literal - definition.Description = description + if description != nil { + definition.Description = *description + } description = nil @@ -60,6 +62,6 @@ func (p *Parser) parseEnumValuesDefinition(index *[]int) error { } invalid, _ := p.l.Read() - return newErrInvalidType(invalid.Position, "parseEnumValuesDefinition", "string/ident/curlyBracketClose", invalid.Keyword.String()) + return newErrInvalidType(invalid.TextPosition, "parseEnumValuesDefinition", "string/ident/curlyBracketClose", invalid.Keyword.String()) } } diff --git a/pkg/parser/executabledefinition_parser.go b/pkg/parser/executabledefinition_parser.go index 2ea8c8d4d8..3d23019018 100644 --- a/pkg/parser/executabledefinition_parser.go +++ b/pkg/parser/executabledefinition_parser.go @@ -72,7 +72,7 @@ func (p *Parser) parseComplexExecutableDefinition() (executableDefinition docume if len(executableDefinition.OperationDefinitions) == 0 { invalid, _ := p.l.Read() - err = newErrInvalidType(invalid.Position, "parseComplexExecutableDefinition", "fragment/query/mutation/subscription", next.String()) + err = newErrInvalidType(invalid.TextPosition, "parseComplexExecutableDefinition", "fragment/query/mutation/subscription", next.String()) } return executableDefinition, err diff --git a/pkg/parser/fieldsdefinition_parser.go b/pkg/parser/fieldsdefinition_parser.go index cc6847fb5e..c593340d12 100644 --- a/pkg/parser/fieldsdefinition_parser.go +++ b/pkg/parser/fieldsdefinition_parser.go @@ -16,7 +16,7 @@ func (p *Parser) parseFieldsDefinition(index *[]int) (err error) { return } - var description document.ByteSlice + var description *document.ByteSliceReference for { next, err := p.l.Peek(true) @@ -31,7 +31,7 @@ func (p *Parser) parseFieldsDefinition(index *[]int) (err error) { return err } - description = stringToken.Literal + description = &stringToken.Literal case keyword.CURLYBRACKETCLOSE: _, err = p.l.Read() @@ -44,7 +44,9 @@ func (p *Parser) parseFieldsDefinition(index *[]int) (err error) { } definition := p.makeFieldDefinition() - definition.Description = description + if description != nil { + definition.Description = *description + } definition.Name = fieldIdent.Literal description = nil @@ -72,7 +74,7 @@ func (p *Parser) parseFieldsDefinition(index *[]int) (err error) { *index = append(*index, p.putFieldDefinition(definition)) default: invalid, _ := p.l.Read() - return newErrInvalidType(invalid.Position, "parseFieldsDefinition", "string/curly bracket close/ident", invalid.Keyword.String()) + return newErrInvalidType(invalid.TextPosition, "parseFieldsDefinition", "string/curly bracket close/ident", invalid.Keyword.String()) } } } diff --git a/pkg/parser/fixtures/type_system_definition_parsed_introspection.golden b/pkg/parser/fixtures/type_system_definition_parsed_introspection.golden index bc1609cdf7..8b2cb88b82 100644 --- a/pkg/parser/fixtures/type_system_definition_parsed_introspection.golden +++ b/pkg/parser/fixtures/type_system_definition_parsed_introspection.golden @@ -14,7 +14,10 @@ "OperationDefinitions": [ { "OperationType": 0, - "Name": "IntrospectionQuery", + "Name": { + "Start": 6, + "End": 24 + }, "VariableDefinitions": [], "Directives": [], "SelectionSet": { @@ -28,7 +31,10 @@ ], "FragmentDefinitions": [ { - "FragmentName": "FullType", + "FragmentName": { + "Start": 312, + "End": 320 + }, "TypeCondition": 0, "Directives": [], "SelectionSet": { @@ -47,7 +53,10 @@ } }, { - "FragmentName": "InputValue", + "FragmentName": { + "Start": 765, + "End": 775 + }, "TypeCondition": 1, "Directives": [], "SelectionSet": { @@ -62,7 +71,10 @@ } }, { - "FragmentName": "TypeRef", + "FragmentName": { + "Start": 870, + "End": 877 + }, "TypeCondition": 2, "Directives": [], "SelectionSet": { @@ -79,8 +91,14 @@ "VariableDefinitions": [], "Fields": [ { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 62, + "End": 66 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -90,8 +108,14 @@ } }, { - "Alias": "", - "Name": "queryType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 44, + "End": 53 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -103,8 +127,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 98, + "End": 102 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -114,8 +144,14 @@ } }, { - "Alias": "", - "Name": "mutationType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 77, + "End": 89 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -127,8 +163,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 138, + "End": 142 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -138,8 +180,14 @@ } }, { - "Alias": "", - "Name": "subscriptionType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 113, + "End": 129 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -151,8 +199,14 @@ } }, { - "Alias": "", - "Name": "types", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 153, + "End": 158 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -164,8 +218,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 208, + "End": 212 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -175,8 +235,14 @@ } }, { - "Alias": "", - "Name": "description", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 219, + "End": 230 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -186,8 +252,14 @@ } }, { - "Alias": "", - "Name": "locations", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 237, + "End": 246 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -197,8 +269,14 @@ } }, { - "Alias": "", - "Name": "args", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 253, + "End": 257 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -210,8 +288,14 @@ } }, { - "Alias": "", - "Name": "directives", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 189, + "End": 199 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -226,8 +310,14 @@ } }, { - "Alias": "", - "Name": "__schema", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 29, + "End": 37 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -243,8 +333,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 335, + "End": 339 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -254,8 +350,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 342, + "End": 346 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -265,8 +367,14 @@ } }, { - "Alias": "", - "Name": "description", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 349, + "End": 360 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -276,8 +384,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 401, + "End": 405 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -287,8 +401,14 @@ } }, { - "Alias": "", - "Name": "description", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 410, + "End": 421 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -298,8 +418,14 @@ } }, { - "Alias": "", - "Name": "args", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 426, + "End": 430 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -311,8 +437,14 @@ } }, { - "Alias": "", - "Name": "type", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 463, + "End": 467 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -324,8 +456,14 @@ } }, { - "Alias": "", - "Name": "isDeprecated", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 497, + "End": 509 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -335,8 +473,14 @@ } }, { - "Alias": "", - "Name": "deprecationReason", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 514, + "End": 531 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -346,8 +490,14 @@ } }, { - "Alias": "", - "Name": "fields", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 363, + "End": 369 + }, "Arguments": [ 0 ], @@ -366,8 +516,14 @@ } }, { - "Alias": "", - "Name": "inputFields", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 538, + "End": 549 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -379,8 +535,14 @@ } }, { - "Alias": "", - "Name": "interfaces", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 576, + "End": 586 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -392,8 +554,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 652, + "End": 656 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -403,8 +571,14 @@ } }, { - "Alias": "", - "Name": "description", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 661, + "End": 672 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -414,8 +588,14 @@ } }, { - "Alias": "", - "Name": "isDeprecated", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 677, + "End": 689 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -425,8 +605,14 @@ } }, { - "Alias": "", - "Name": "deprecationReason", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 694, + "End": 711 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -436,8 +622,14 @@ } }, { - "Alias": "", - "Name": "enumValues", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 610, + "End": 620 + }, "Arguments": [ 1 ], @@ -454,8 +646,14 @@ } }, { - "Alias": "", - "Name": "possibleTypes", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 718, + "End": 731 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -467,8 +665,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 796, + "End": 800 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -478,8 +682,14 @@ } }, { - "Alias": "", - "Name": "description", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 803, + "End": 814 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -489,8 +699,14 @@ } }, { - "Alias": "", - "Name": "type", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 817, + "End": 821 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -502,8 +718,14 @@ } }, { - "Alias": "", - "Name": "defaultValue", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 845, + "End": 857 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -513,8 +735,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 892, + "End": 896 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -524,8 +752,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 899, + "End": 903 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -535,8 +769,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 919, + "End": 923 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -546,8 +786,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 928, + "End": 932 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -557,8 +803,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 952, + "End": 956 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -568,8 +820,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 963, + "End": 967 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -579,8 +837,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 991, + "End": 995 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -590,8 +854,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1004, + "End": 1008 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -601,8 +871,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1036, + "End": 1040 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -612,8 +888,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1051, + "End": 1055 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -623,8 +905,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1087, + "End": 1091 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -634,8 +922,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1104, + "End": 1108 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -645,8 +939,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1144, + "End": 1148 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -656,8 +956,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1163, + "End": 1167 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -667,8 +973,14 @@ } }, { - "Alias": "", - "Name": "kind", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1207, + "End": 1211 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -678,8 +990,14 @@ } }, { - "Alias": "", - "Name": "name", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1228, + "End": 1232 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -689,8 +1007,14 @@ } }, { - "Alias": "", - "Name": "ofType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1182, + "End": 1188 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -703,8 +1027,14 @@ } }, { - "Alias": "", - "Name": "ofType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1121, + "End": 1127 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -718,8 +1048,14 @@ } }, { - "Alias": "", - "Name": "ofType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1066, + "End": 1072 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -733,8 +1069,14 @@ } }, { - "Alias": "", - "Name": "ofType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 1017, + "End": 1023 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -748,8 +1090,14 @@ } }, { - "Alias": "", - "Name": "ofType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 974, + "End": 980 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -763,8 +1111,14 @@ } }, { - "Alias": "", - "Name": "ofType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 937, + "End": 943 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -778,8 +1132,14 @@ } }, { - "Alias": "", - "Name": "ofType", + "Alias": { + "Start": 0, + "End": 0 + }, + "Name": { + "Start": 906, + "End": 912 + }, "Arguments": [], "Directives": [], "SelectionSet": { @@ -796,45 +1156,75 @@ "InlineFragments": [], "FragmentSpreads": [ { - "FragmentName": "FullType", + "FragmentName": { + "Start": 170, + "End": 178 + }, "Directives": [] }, { - "FragmentName": "InputValue", + "FragmentName": { + "Start": 271, + "End": 281 + }, "Directives": [] }, { - "FragmentName": "InputValue", + "FragmentName": { + "Start": 442, + "End": 452 + }, "Directives": [] }, { - "FragmentName": "TypeRef", + "FragmentName": { + "Start": 479, + "End": 486 + }, "Directives": [] }, { - "FragmentName": "InputValue", + "FragmentName": { + "Start": 559, + "End": 569 + }, "Directives": [] }, { - "FragmentName": "TypeRef", + "FragmentName": { + "Start": 596, + "End": 603 + }, "Directives": [] }, { - "FragmentName": "TypeRef", + "FragmentName": { + "Start": 741, + "End": 748 + }, "Directives": [] }, { - "FragmentName": "TypeRef", + "FragmentName": { + "Start": 831, + "End": 838 + }, "Directives": [] } ], "Arguments": [ { - "Name": "includeDeprecated", + "Name": { + "Start": 370, + "End": 387 + }, "Value": 0 }, { - "Name": "includeDeprecated", + "Name": { + "Start": 621, + "End": 638 + }, "Value": 1 } ], @@ -865,21 +1255,30 @@ "Types": [ { "Kind": 2, - "Name": "__Type", + "Name": { + "Start": 324, + "End": 330 + }, "OfType": -1 }, { "Kind": 2, - "Name": "__InputValue", + "Name": { + "Start": 779, + "End": 791 + }, "OfType": -1 }, { "Kind": 2, - "Name": "__Type", + "Name": { + "Start": 881, + "End": 887 + }, "OfType": -1 } ], - "ByteSlices": [], + "ByteSliceReferences": [], "Integers": [], "Floats": [], "Booleans": [ diff --git a/pkg/parser/float_value_parser.go b/pkg/parser/float_value_parser.go index edf906c4c5..fd16d9d970 100644 --- a/pkg/parser/float_value_parser.go +++ b/pkg/parser/float_value_parser.go @@ -11,7 +11,7 @@ func (p *Parser) parsePeekedFloatValue(index *int) error { return err } - float, err := transform.StringToFloat32(floatToken.Literal) + float, err := transform.StringToFloat32(p.ByteSlice(floatToken.Literal)) if err != nil { return err } diff --git a/pkg/parser/inputvaluedefinitions_parser.go b/pkg/parser/inputvaluedefinitions_parser.go index 6ec8ae89a5..9e6a28ac5d 100644 --- a/pkg/parser/inputvaluedefinitions_parser.go +++ b/pkg/parser/inputvaluedefinitions_parser.go @@ -3,7 +3,6 @@ package parser import ( "github.com/jensneuse/graphql-go-tools/pkg/document" "github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword" - "github.com/jensneuse/graphql-go-tools/pkg/transform" ) // InputValueDefinitions cannot be found in the graphQL spec. @@ -14,7 +13,7 @@ import ( func (p *Parser) parseInputValueDefinitions(index *[]int, closeKeyword keyword.Keyword) error { - var description document.ByteSlice + var description *document.ByteSliceReference for { next, err := p.l.Peek(true) @@ -29,7 +28,8 @@ func (p *Parser) parseInputValueDefinitions(index *[]int, closeKeyword keyword.K return err } - description = transform.TrimWhitespace(quote.Literal) + //*description = transform.TrimWhitespace(p.ByteSlice(quote.Literal)) TODO: fix trimming + description = "e.Literal } else if next == keyword.IDENT { @@ -39,7 +39,9 @@ func (p *Parser) parseInputValueDefinitions(index *[]int, closeKeyword keyword.K } definition := p.makeInputValueDefinition() - definition.Description = description + if description != nil { + definition.Description = *description + } definition.Name = ident.Literal description = nil @@ -68,7 +70,7 @@ func (p *Parser) parseInputValueDefinitions(index *[]int, closeKeyword keyword.K } else if next != closeKeyword && closeKeyword != keyword.UNDEFINED { invalid, _ := p.l.Read() - return newErrInvalidType(invalid.Position, "parseInputValueDefinitions", "string/ident/"+closeKeyword.String(), invalid.String()) + return newErrInvalidType(invalid.TextPosition, "parseInputValueDefinitions", "string/ident/"+closeKeyword.String(), invalid.String()) } else { return nil } diff --git a/pkg/parser/int_value_parser.go b/pkg/parser/int_value_parser.go index b719f7ad99..de9901db9c 100644 --- a/pkg/parser/int_value_parser.go +++ b/pkg/parser/int_value_parser.go @@ -11,7 +11,7 @@ func (p *Parser) parsePeekedIntValue(index *int) error { return err } - integer, err := transform.StringToInt32(integerToken.Literal) + integer, err := transform.StringToInt32(p.ByteSlice(integerToken.Literal)) if err != nil { return err } diff --git a/pkg/parser/parser.go b/pkg/parser/parser.go index 84bd140a60..0b189818f5 100644 --- a/pkg/parser/parser.go +++ b/pkg/parser/parser.go @@ -77,17 +77,18 @@ type ParsedDefinitions struct { ObjectFields document.ObjectFields Types document.Types - ByteSlices []document.ByteSlice - Integers []int32 - Floats []float32 - Booleans [2]bool + ByteSliceReferences []document.ByteSliceReference + Integers []int32 + Floats []float32 + Booleans [2]bool } // Lexer is the interface used by the Parser to lex tokens type Lexer interface { - SetInput(input []byte) + SetInput(input []byte) error Read() (tok token.Token, err error) Peek(ignoreWhitespace bool) (key keyword.Keyword, err error) + ByteSlice(reference document.ByteSliceReference) document.ByteSlice } // NewParser returns a new parser using a buffered runestringer @@ -124,9 +125,9 @@ func NewParser() *Parser { ObjectFields: make(document.ObjectFields, 0, 8), Types: make(document.Types, 0, 8), - Integers: make([]int32, 0, 8), - Floats: make([]float32, 0, 8), - ByteSlices: make([]document.ByteSlice, 0, 8), + Integers: make([]int32, 0, 8), + Floats: make([]float32, 0, 8), + ByteSliceReferences: make([]document.ByteSliceReference, 0, 8), } definitions.Booleans[0] = false @@ -139,17 +140,27 @@ func NewParser() *Parser { } } +func (p *Parser) ByteSlice(reference document.ByteSliceReference) document.ByteSlice { + return p.l.ByteSlice(reference) +} + // ParseTypeSystemDefinition parses a TypeSystemDefinition from an io.Reader -func (p *Parser) ParseTypeSystemDefinition(input []byte) (document.TypeSystemDefinition, error) { +func (p *Parser) ParseTypeSystemDefinition(input []byte) (definition document.TypeSystemDefinition, err error) { p.resetObjects() - p.l.SetInput(input) + err = p.l.SetInput(input) + if err != nil { + return + } return p.parseTypeSystemDefinition() } // ParseExecutableDefinition parses an ExecutableDefinition from an io.Reader -func (p *Parser) ParseExecutableDefinition(input []byte) (def document.ExecutableDefinition, err error) { +func (p *Parser) ParseExecutableDefinition(input []byte) (definition document.ExecutableDefinition, err error) { p.resetObjects() - p.l.SetInput(input) + err = p.l.SetInput(input) + if err != nil { + return + } return p.parseExecutableDefinition() } @@ -160,7 +171,7 @@ func (p *Parser) readExpect(expected keyword.Keyword, enclosingFunctionName stri } if t.Keyword != expected { - return t, newErrInvalidType(t.Position, enclosingFunctionName, expected.String(), t.Keyword.String()+" lit: "+string(t.Literal)) + return t, newErrInvalidType(t.TextPosition, enclosingFunctionName, expected.String(), t.Keyword.String()+" lit: "+string(p.ByteSlice(t.Literal))) } return @@ -361,7 +372,7 @@ func (p *Parser) resetObjects() { p.ParsedDefinitions.ObjectTypeDefinitions = p.ParsedDefinitions.ObjectTypeDefinitions[:0] p.ParsedDefinitions.ScalarTypeDefinitions = p.ParsedDefinitions.ScalarTypeDefinitions[:0] p.ParsedDefinitions.UnionTypeDefinitions = p.ParsedDefinitions.UnionTypeDefinitions[:0] - p.ParsedDefinitions.ByteSlices = p.ParsedDefinitions.ByteSlices[:0] + p.ParsedDefinitions.ByteSliceReferences = p.ParsedDefinitions.ByteSliceReferences[:0] p.ParsedDefinitions.Values = p.ParsedDefinitions.Values[:0] p.ParsedDefinitions.Integers = p.ParsedDefinitions.Integers[:0] p.ParsedDefinitions.Floats = p.ParsedDefinitions.Floats[:0] @@ -461,9 +472,9 @@ func (p *Parser) putUnionTypeDefinition(definition document.UnionTypeDefinition) return len(p.ParsedDefinitions.UnionTypeDefinitions) - 1 } -func (p *Parser) putByteSlice(slice document.ByteSlice) int { - p.ParsedDefinitions.ByteSlices = append(p.ParsedDefinitions.ByteSlices, slice) - return len(p.ParsedDefinitions.ByteSlices) - 1 +func (p *Parser) putByteSliceReference(slice document.ByteSliceReference) int { + p.ParsedDefinitions.ByteSliceReferences = append(p.ParsedDefinitions.ByteSliceReferences, slice) + return len(p.ParsedDefinitions.ByteSliceReferences) - 1 } func (p *Parser) putValue(value document.Value, index int) { diff --git a/pkg/parser/parser_test.go b/pkg/parser/parser_test.go index 9661a44062..f9f8af6d8a 100644 --- a/pkg/parser/parser_test.go +++ b/pkg/parser/parser_test.go @@ -12,12 +12,12 @@ import ( "testing" ) -type rule func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) +type rule func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) type ruleSet []rule -func (r ruleSet) eval(node document.Node, definitions ParsedDefinitions, ruleIndex int) { +func (r ruleSet) eval(node document.Node, parser *Parser, ruleIndex int) { for i, rule := range r { - rule(node, definitions, ruleIndex, i) + rule(node, parser, ruleIndex, i) } } @@ -27,7 +27,9 @@ func TestParser(t *testing.T) { run := func(input string, checks ...checkFunc) { parser := NewParser() - parser.l.SetInput([]byte(input)) + if err := parser.l.SetInput([]byte(input)); err != nil { + panic(err) + } for i, checkFunc := range checks { checkFunc(parser, i) } @@ -41,46 +43,49 @@ func TestParser(t *testing.T) { return sets } - evalRules := func(node document.Node, definitions ParsedDefinitions, rules ruleSet, ruleIndex int) { - rules.eval(node, definitions, ruleIndex) + evalRules := func(node document.Node, parser *Parser, rules ruleSet, ruleIndex int) { + rules.eval(node, parser, ruleIndex) } - hasName := func(name string) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - if name != node.NodeName() { - panic(fmt.Errorf("hasName: want: %s, got: %s [rule: %d, node: %d]", name, node.NodeName(), ruleIndex, ruleSetIndex)) + hasName := func(wantName string) rule { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + gotName := string(parser.ByteSlice(node.NodeName())) + if wantName != gotName { + panic(fmt.Errorf("hasName: want: %s, got: %s [rule: %d, node: %d]", wantName, gotName, ruleIndex, ruleSetIndex)) } } } - hasAlias := func(alias string) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - if alias != node.NodeAlias() { - panic(fmt.Errorf("hasAlias: want: %s, got: %s [rule: %d, node: %d]", alias, node.NodeAlias(), ruleIndex, ruleSetIndex)) + hasAlias := func(wantAlias string) rule { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + gotAlias := string(parser.ByteSlice(node.NodeAlias())) + if wantAlias != gotAlias { + panic(fmt.Errorf("hasAlias: want: %s, got: %s [rule: %d, node: %d]", wantAlias, gotAlias, ruleIndex, ruleSetIndex)) } } } - hasDescription := func(description string) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - if description != node.NodeDescription() { - panic(fmt.Errorf("hasName: want: %s, got: %s [rule: %d, node: %d]", description, node.NodeDescription(), ruleIndex, ruleSetIndex)) + hasDescription := func(wantDescription string) rule { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + gotDescription := string(parser.ByteSlice(node.NodeDescription())) + if wantDescription != gotDescription { + panic(fmt.Errorf("hasName: want: %s, got: %s [rule: %d, node: %d]", wantDescription, gotDescription, ruleIndex, ruleSetIndex)) } } } - unwrapObjectField := func(node document.Node, definitions ParsedDefinitions) document.Node { + unwrapObjectField := func(node document.Node, parser *Parser) document.Node { objectField, ok := node.(document.ObjectField) if ok { - node = definitions.Values[objectField.Value] + node = parser.ParsedDefinitions.Values[objectField.Value] } return node } expectIntegerValue := func(want int32) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - node = unwrapObjectField(node, definitions) - got := definitions.Integers[node.NodeValueReference()] + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + node = unwrapObjectField(node, parser) + got := parser.ParsedDefinitions.Integers[node.NodeValueReference()] if want != got { panic(fmt.Errorf("expectIntegerValue: want: %d, got: %d [rule: %d, node: %d]", want, got, ruleIndex, ruleSetIndex)) } @@ -88,9 +93,9 @@ func TestParser(t *testing.T) { } expectFloatValue := func(want float32) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - node = unwrapObjectField(node, definitions) - got := definitions.Floats[node.NodeValueReference()] + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + node = unwrapObjectField(node, parser) + got := parser.ParsedDefinitions.Floats[node.NodeValueReference()] if want != got { panic(fmt.Errorf("expectIntegerValue: want: %.2f, got: %.2f [rule: %d, node: %d]", want, got, ruleIndex, ruleSetIndex)) } @@ -98,9 +103,9 @@ func TestParser(t *testing.T) { } expectBooleanValue := func(want bool) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - node = unwrapObjectField(node, definitions) - got := definitions.Booleans[node.NodeValueReference()] + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + node = unwrapObjectField(node, parser) + got := parser.ParsedDefinitions.Booleans[node.NodeValueReference()] if want != got { panic(fmt.Errorf("expectIntegerValue: want: %v, got: %v [rule: %d, node: %d]", want, got, ruleIndex, ruleSetIndex)) } @@ -108,9 +113,9 @@ func TestParser(t *testing.T) { } expectByteSliceValue := func(want string) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - node = unwrapObjectField(node, definitions) - got := string(definitions.ByteSlices[node.NodeValueReference()]) + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + node = unwrapObjectField(node, parser) + got := string(parser.ByteSlice(parser.ParsedDefinitions.ByteSliceReferences[node.NodeValueReference()])) if want != got { panic(fmt.Errorf("expectByteSliceValue: want: %s, got: %s [rule: %d, node: %d]", want, got, ruleIndex, ruleSetIndex)) } @@ -118,30 +123,30 @@ func TestParser(t *testing.T) { } expectListValue := func(rules ...rule) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - list := definitions.ListValues[node.NodeValueReference()] + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + list := parser.ParsedDefinitions.ListValues[node.NodeValueReference()] for j, rule := range rules { valueIndex := list[j] - value := definitions.Values[valueIndex] - rule(value, definitions, j, ruleSetIndex) + value := parser.ParsedDefinitions.Values[valueIndex] + rule(value, parser, j, ruleSetIndex) } } } expectObjectValue := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - node = unwrapObjectField(node, definitions) - list := definitions.ObjectValues[node.NodeValueReference()] + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + node = unwrapObjectField(node, parser) + list := parser.ParsedDefinitions.ObjectValues[node.NodeValueReference()] for j, rule := range rules { valueIndex := list[j] - value := definitions.ObjectFields[valueIndex] - rule.eval(value, definitions, j) + value := parser.ParsedDefinitions.ObjectFields[valueIndex] + rule.eval(value, parser, j) } } } hasOperationType := func(operationType document.OperationType) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { gotOperationType := node.NodeOperationType().String() wantOperationType := operationType.String() if wantOperationType != gotOperationType { @@ -151,7 +156,7 @@ func TestParser(t *testing.T) { } hasTypeKind := func(wantTypeKind document.TypeKind) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { gotTypeKind := node.(document.Type).Kind if wantTypeKind != gotTypeKind { panic(fmt.Errorf("hasTypeKind: want(typeKind): %s, got: %s [rule: %d, node: %d]", wantTypeKind, gotTypeKind, ruleIndex, ruleSetIndex)) @@ -160,35 +165,35 @@ func TestParser(t *testing.T) { } nodeType := func(rules ...rule) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - nodeType := definitions.Types[node.NodeType()] + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + nodeType := parser.ParsedDefinitions.Types[node.NodeType()] for j, rule := range rules { - rule(nodeType, definitions, j, ruleSetIndex) + rule(nodeType, parser, j, ruleSetIndex) } } } ofType := func(rules ...rule) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { - ofType := definitions.Types[node.(document.Type).OfType] + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { + ofType := parser.ParsedDefinitions.Types[node.(document.Type).OfType] for j, rule := range rules { - rule(ofType, definitions, j, ruleSetIndex) + rule(ofType, parser, j, ruleSetIndex) } } } hasTypeName := func(wantName string) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { if fragment, ok := node.(document.FragmentDefinition); ok { - node = definitions.Types[fragment.TypeCondition] + node = parser.ParsedDefinitions.Types[fragment.TypeCondition] } if inlineFragment, ok := node.(document.InlineFragment); ok { - node = definitions.Types[inlineFragment.TypeCondition] + node = parser.ParsedDefinitions.Types[inlineFragment.TypeCondition] } - gotName := string(node.(document.Type).Name) + gotName := string(parser.ByteSlice(node.(document.Type).Name)) if wantName != gotName { panic(fmt.Errorf("hasTypeName: want: %s, got: %s [rule: %d, node: %d]", wantName, gotName, ruleIndex, ruleSetIndex)) } @@ -196,7 +201,7 @@ func TestParser(t *testing.T) { } /* hasDefaultValue := func(want document.Value) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { got := node.NodeDefaultValue() @@ -207,116 +212,116 @@ func TestParser(t *testing.T) { }*/ hasEnumValuesDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeEnumValuesDefinition() for j, k := range index { ruleSet := rules[j] - subNode := definitions.EnumValuesDefinitions[k] - ruleSet.eval(subNode, definitions, k) + subNode := parser.ParsedDefinitions.EnumValuesDefinitions[k] + ruleSet.eval(subNode, parser, k) } } } hasUnionTypeSystemDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeUnionTypeDefinitions() for j, ruleSet := range rules { definitionIndex := typeDefinitionIndex[j] - subNode := definitions.UnionTypeDefinitions[definitionIndex] - ruleSet.eval(subNode, definitions, j) + subNode := parser.ParsedDefinitions.UnionTypeDefinitions[definitionIndex] + ruleSet.eval(subNode, parser, j) } } } hasScalarTypeSystemDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeScalarTypeDefinitions() for j, ruleSet := range rules { definitionIndex := typeDefinitionIndex[j] - subNode := definitions.ScalarTypeDefinitions[definitionIndex] - ruleSet.eval(subNode, definitions, j) + subNode := parser.ParsedDefinitions.ScalarTypeDefinitions[definitionIndex] + ruleSet.eval(subNode, parser, j) } } } hasObjectTypeSystemDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeObjectTypeDefinitions() for j, ruleSet := range rules { definitionIndex := typeDefinitionIndex[j] - subNode := definitions.ObjectTypeDefinitions[definitionIndex] - ruleSet.eval(subNode, definitions, j) + subNode := parser.ParsedDefinitions.ObjectTypeDefinitions[definitionIndex] + ruleSet.eval(subNode, parser, j) } } } hasInterfaceTypeSystemDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeInterfaceTypeDefinitions() for j, ruleSet := range rules { definitionIndex := typeDefinitionIndex[j] - subNode := definitions.InterfaceTypeDefinitions[definitionIndex] - ruleSet.eval(subNode, definitions, j) + subNode := parser.ParsedDefinitions.InterfaceTypeDefinitions[definitionIndex] + ruleSet.eval(subNode, parser, j) } } } hasEnumTypeSystemDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeEnumTypeDefinitions() for j, ruleSet := range rules { definitionIndex := typeDefinitionIndex[j] - subNode := definitions.EnumTypeDefinitions[definitionIndex] - ruleSet.eval(subNode, definitions, j) + subNode := parser.ParsedDefinitions.EnumTypeDefinitions[definitionIndex] + ruleSet.eval(subNode, parser, j) } } } hasInputObjectTypeSystemDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeInputObjectTypeDefinitions() for j, ruleSet := range rules { definitionIndex := typeDefinitionIndex[j] - subNode := definitions.InputObjectTypeDefinitions[definitionIndex] - ruleSet.eval(subNode, definitions, j) + subNode := parser.ParsedDefinitions.InputObjectTypeDefinitions[definitionIndex] + ruleSet.eval(subNode, parser, j) } } } hasDirectiveDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeDirectiveDefinitions() for j, ruleSet := range rules { definitionIndex := typeDefinitionIndex[j] - subNode := definitions.DirectiveDefinitions[definitionIndex] - ruleSet.eval(subNode, definitions, j) + subNode := parser.ParsedDefinitions.DirectiveDefinitions[definitionIndex] + ruleSet.eval(subNode, parser, j) } } } hasUnionMemberTypes := func(members ...string) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { typeDefinitionIndex := node.NodeUnionMemberTypes() for j, want := range members { - got := string(typeDefinitionIndex[j]) + got := string(parser.ByteSlice(typeDefinitionIndex[j])) if want != got { panic(fmt.Errorf("hasUnionMemberTypes: want: %s, got: %s [check: %d]", want, got, ruleSetIndex)) } @@ -325,7 +330,7 @@ func TestParser(t *testing.T) { } hasSchemaDefinition := func() rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { schemaDefinition := node.NodeSchemaDefinition() if !schemaDefinition.IsDefined() { @@ -335,37 +340,37 @@ func TestParser(t *testing.T) { } hasVariableDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeVariableDefinitions() for j, k := range index { ruleSet := rules[j] - subNode := definitions.VariableDefinitions[k] - ruleSet.eval(subNode, definitions, k) + subNode := parser.ParsedDefinitions.VariableDefinitions[k] + ruleSet.eval(subNode, parser, k) } } } hasDirectives := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeDirectives() for i := range rules { ruleSet := rules[i] - subNode := definitions.Directives[index[i]] - ruleSet.eval(subNode, definitions, index[i]) + subNode := parser.ParsedDefinitions.Directives[index[i]] + ruleSet.eval(subNode, parser, index[i]) } } } hasImplementsInterfaces := func(interfaces ...string) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { actual := node.NodeImplementsInterfaces() for i, want := range interfaces { - got := string(actual[i]) + got := string(parser.ByteSlice(actual[i])) if want != got { panic(fmt.Errorf("hasImplementsInterfaces: want(at: %d): %s, got: %s [check: %d]", i, want, got, ruleSetIndex)) @@ -375,79 +380,79 @@ func TestParser(t *testing.T) { } hasFields := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeFields() for i := range rules { ruleSet := rules[i] - subNode := definitions.Fields[index[i]] - ruleSet.eval(subNode, definitions, index[i]) + subNode := parser.ParsedDefinitions.Fields[index[i]] + ruleSet.eval(subNode, parser, index[i]) } } } hasFieldsDefinitions := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeFieldsDefinition() for i := range rules { ruleSet := rules[i] - field := definitions.FieldDefinitions[index[i]] - ruleSet.eval(field, definitions, index[i]) + field := parser.ParsedDefinitions.FieldDefinitions[index[i]] + ruleSet.eval(field, parser, index[i]) } } } hasInputFields := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeFields() for i := range rules { ruleSet := rules[i] - subNode := definitions.InputValueDefinitions[index[i]] - ruleSet.eval(subNode, definitions, index[i]) + subNode := parser.ParsedDefinitions.InputValueDefinitions[index[i]] + ruleSet.eval(subNode, parser, index[i]) } } } hasArguments := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeArguments() for i := range rules { ruleSet := rules[i] - subNode := definitions.Arguments[index[i]] - ruleSet.eval(subNode, definitions, index[i]) + subNode := parser.ParsedDefinitions.Arguments[index[i]] + ruleSet.eval(subNode, parser, index[i]) } } } hasInlineFragments := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeInlineFragments() for i := range rules { ruleSet := rules[i] - subNode := definitions.InlineFragments[index[i]] - ruleSet.eval(subNode, definitions, index[i]) + subNode := parser.ParsedDefinitions.InlineFragments[index[i]] + ruleSet.eval(subNode, parser, index[i]) } } } hasFragmentSpreads := func(rules ...ruleSet) rule { - return func(node document.Node, definitions ParsedDefinitions, ruleIndex, ruleSetIndex int) { + return func(node document.Node, parser *Parser, ruleIndex, ruleSetIndex int) { index := node.NodeFragmentSpreads() for i := range rules { ruleSet := rules[i] - subNode := definitions.FragmentSpreads[index[i]] - ruleSet.eval(subNode, definitions, index[i]) + subNode := parser.ParsedDefinitions.FragmentSpreads[index[i]] + ruleSet.eval(subNode, parser, index[i]) } } } @@ -473,7 +478,7 @@ func TestParser(t *testing.T) { } for k, want := range argumentNames { - got := string(parser.ParsedDefinitions.Arguments[k].Name) + got := string(parser.ByteSlice(parser.ParsedDefinitions.Arguments[k].Name)) if want != got { panic(fmt.Errorf("mustParseArguments: want(i: %d): %s, got: %s [check: %d]", k, want, got, i)) } @@ -489,7 +494,7 @@ func TestParser(t *testing.T) { } for k, want := range argumentNames { - got := string(parser.ParsedDefinitions.InputValueDefinitions[k].Name) + got := string(parser.ByteSlice(parser.ParsedDefinitions.InputValueDefinitions[k].Name)) if want != got { panic(fmt.Errorf("mustParseArguments: want(i: %d): %s, got: %s [check: %d]", k, want, got, i)) } @@ -513,7 +518,7 @@ func TestParser(t *testing.T) { } } - mustParseDirectiveDefinition := func(name string, locations ...document.DirectiveLocation) checkFunc { + mustParseDirectiveDefinition := func(wantName string, locations ...document.DirectiveLocation) checkFunc { return func(parser *Parser, i int) { var index []int if err := parser.parseDirectiveDefinition(&index); err != nil { @@ -521,8 +526,9 @@ func TestParser(t *testing.T) { } got := parser.ParsedDefinitions.DirectiveDefinitions[0] - if string(got.Name) != name { - panic(fmt.Errorf("mustParseDirectiveDefinition: want(name): %s, got: %s", name, got.Name)) + gotName := string(parser.ByteSlice(got.Name)) + if wantName != gotName { + panic(fmt.Errorf("mustParseDirectiveDefinition: want(name): %s, got: %s", wantName, gotName)) } for k, wantLocation := range locations { @@ -536,7 +542,7 @@ func TestParser(t *testing.T) { mustContainInputValueDefinition := func(index int, wantName string) checkFunc { return func(parser *Parser, i int) { - gotName := string(parser.ParsedDefinitions.InputValueDefinitions[index].Name) + gotName := string(parser.ByteSlice(parser.ParsedDefinitions.InputValueDefinitions[index].Name)) if wantName != gotName { panic(fmt.Errorf("mustContainInputValueDefinition: want for index %d: %s,got: %s", index, wantName, gotName)) } @@ -547,7 +553,7 @@ func TestParser(t *testing.T) { return func(parser *Parser, i int) { for k, wantName := range name { - gotName := string(parser.ParsedDefinitions.Arguments[k].Name) + gotName := string(parser.ByteSlice(parser.ParsedDefinitions.Arguments[k].Name)) if wantName != gotName { panic(fmt.Errorf("mustContainArguments: want for index %d: %s,got: %s", k, wantName, gotName)) } @@ -564,7 +570,7 @@ func TestParser(t *testing.T) { for i, k := range index { wantName := name[i] - gotName := string(parser.ParsedDefinitions.Directives[k].Name) + gotName := string(parser.ByteSlice(parser.ParsedDefinitions.Directives[k].Name)) if gotName != wantName { panic(fmt.Errorf("mustParseDirectives: want: %s,got: %s [check: %d]", wantName, gotName, i)) } @@ -580,7 +586,7 @@ func TestParser(t *testing.T) { } enum := parser.ParsedDefinitions.EnumTypeDefinitions[0] - evalRules(enum, parser.ParsedDefinitions, rules, i) + evalRules(enum, parser, rules, i) } } @@ -595,13 +601,13 @@ func TestParser(t *testing.T) { for i, set := range fragments { fragmentIndex := definition.FragmentDefinitions[i] fragment := parser.ParsedDefinitions.FragmentDefinitions[fragmentIndex] - set.eval(fragment, parser.ParsedDefinitions, i) + set.eval(fragment, parser, i) } for i, set := range operations { opIndex := definition.OperationDefinitions[i] operation := parser.ParsedDefinitions.OperationDefinitions[opIndex] - set.eval(operation, parser.ParsedDefinitions, i) + set.eval(operation, parser, i) } } } @@ -616,7 +622,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { reverseIndex := len(parser.ParsedDefinitions.Fields) - 1 - j field := parser.ParsedDefinitions.Fields[reverseIndex] - evalRules(field, parser.ParsedDefinitions, rule, i) + evalRules(field, parser, rule, i) } } } @@ -630,7 +636,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { field := parser.ParsedDefinitions.FieldDefinitions[j] - evalRules(field, parser.ParsedDefinitions, rule, i) + evalRules(field, parser, rule, i) } } } @@ -645,7 +651,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { fragmentDefinition := parser.ParsedDefinitions.FragmentDefinitions[j] - evalRules(fragmentDefinition, parser.ParsedDefinitions, rule, i) + evalRules(fragmentDefinition, parser, rule, i) } } } @@ -659,7 +665,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { spread := parser.ParsedDefinitions.FragmentSpreads[j] - evalRules(spread, parser.ParsedDefinitions, rule, i) + evalRules(spread, parser, rule, i) } } } @@ -673,7 +679,7 @@ func TestParser(t *testing.T) { } for j, want := range implements { - got := string(interfaces[j]) + got := string(parser.ByteSlice(interfaces[j])) if want != got { panic(fmt.Errorf("mustParseImplementsInterfaces: want: %s, got: %s [check: %d]", want, got, i)) } @@ -689,7 +695,7 @@ func TestParser(t *testing.T) { } gotKeyword := next.Keyword - gotLiteral := string(next.Literal) + gotLiteral := string(parser.ByteSlice(next.Literal)) if wantKeyword != gotKeyword { panic(fmt.Errorf("mustParseLiteral: want(keyword): %s, got: %s, [check: %d]", wantKeyword.String(), gotKeyword.String(), i)) @@ -711,7 +717,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { reverseIndex := len(parser.ParsedDefinitions.InlineFragments) - 1 - j inlineFragment := parser.ParsedDefinitions.InlineFragments[reverseIndex] - evalRules(inlineFragment, parser.ParsedDefinitions, rule, i) + evalRules(inlineFragment, parser, rule, i) } } } @@ -725,7 +731,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { inputValueDefinition := parser.ParsedDefinitions.InputValueDefinitions[j] - evalRules(inputValueDefinition, parser.ParsedDefinitions, rule, i) + evalRules(inputValueDefinition, parser, rule, i) } } } @@ -739,7 +745,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { inputObjectDefinition := parser.ParsedDefinitions.InputObjectTypeDefinitions[j] - evalRules(inputObjectDefinition, parser.ParsedDefinitions, rule, i) + evalRules(inputObjectDefinition, parser, rule, i) } } } @@ -753,7 +759,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { inputValueDefinition := parser.ParsedDefinitions.InputValueDefinitions[j] - evalRules(inputValueDefinition, parser.ParsedDefinitions, rule, i) + evalRules(inputValueDefinition, parser, rule, i) } } } @@ -767,7 +773,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { interfaceTypeDefinition := parser.ParsedDefinitions.InterfaceTypeDefinitions[j] - evalRules(interfaceTypeDefinition, parser.ParsedDefinitions, rule, i) + evalRules(interfaceTypeDefinition, parser, rule, i) } } } @@ -781,7 +787,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { objectTypeDefinition := parser.ParsedDefinitions.ObjectTypeDefinitions[j] - evalRules(objectTypeDefinition, parser.ParsedDefinitions, rule, i) + evalRules(objectTypeDefinition, parser, rule, i) } } } @@ -795,7 +801,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { operationDefinition := parser.ParsedDefinitions.OperationDefinitions[j] - evalRules(operationDefinition, parser.ParsedDefinitions, rule, i) + evalRules(operationDefinition, parser, rule, i) } } } @@ -809,7 +815,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { scalarTypeDefinition := parser.ParsedDefinitions.ScalarTypeDefinitions[j] - evalRules(scalarTypeDefinition, parser.ParsedDefinitions, rule, i) + evalRules(scalarTypeDefinition, parser, rule, i) } } } @@ -822,7 +828,7 @@ func TestParser(t *testing.T) { panic(err) } - evalRules(definition, parser.ParsedDefinitions, rules, i) + evalRules(definition, parser, rules, i) } } @@ -849,7 +855,7 @@ func TestParser(t *testing.T) { for j, rule := range directives { directive := parser.ParsedDefinitions.Directives[j] - evalRules(directive, parser.ParsedDefinitions, rule, i) + evalRules(directive, parser, rule, i) } } } @@ -861,7 +867,7 @@ func TestParser(t *testing.T) { panic(err) } - rules.eval(selectionSet, parser.ParsedDefinitions, i) + rules.eval(selectionSet, parser, i) } } @@ -874,7 +880,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { scalarTypeDefinition := parser.ParsedDefinitions.UnionTypeDefinitions[j] - evalRules(scalarTypeDefinition, parser.ParsedDefinitions, rule, i) + evalRules(scalarTypeDefinition, parser, rule, i) } } } @@ -888,7 +894,7 @@ func TestParser(t *testing.T) { for j, rule := range rules { scalarTypeDefinition := parser.ParsedDefinitions.VariableDefinitions[j] - evalRules(scalarTypeDefinition, parser.ParsedDefinitions, rule, i) + evalRules(scalarTypeDefinition, parser, rule, i) } } } @@ -907,7 +913,7 @@ func TestParser(t *testing.T) { } for _, rule := range rules { - rule(value, parser.ParsedDefinitions, i, i) + rule(value, parser, i, i) } } } @@ -922,7 +928,7 @@ func TestParser(t *testing.T) { node := parser.ParsedDefinitions.Types[index] for j, rule := range rules { - rule(node, parser.ParsedDefinitions, j, i) + rule(node, parser, j, i) } } } diff --git a/pkg/parser/schemadefinition_parser.go b/pkg/parser/schemadefinition_parser.go index 8e4abe721c..e841d5feda 100644 --- a/pkg/parser/schemadefinition_parser.go +++ b/pkg/parser/schemadefinition_parser.go @@ -40,14 +40,14 @@ func (p *Parser) parseSchemaDefinition() (definition document.SchemaDefinition, return definition, err } - err = definition.SetOperationType(next.Literal, operationNameToken.Literal) + err = definition.SetOperationType(p.ByteSlice(next.Literal), p.ByteSlice(operationNameToken.Literal)) if err != nil { return definition, err } default: - return definition, newErrInvalidType(next.Position, "parseSchemaDefinition", "curlyBracketClose/query/subscription/mutation", next.String()) + return definition, newErrInvalidType(next.TextPosition, "parseSchemaDefinition", "curlyBracketClose/query/subscription/mutation", next.String()) } } } diff --git a/pkg/parser/type_parser.go b/pkg/parser/type_parser.go index 0ecd6938f9..21aa0dc2c6 100644 --- a/pkg/parser/type_parser.go +++ b/pkg/parser/type_parser.go @@ -29,7 +29,7 @@ func (p *Parser) parseType(index *int) error { firstType := p.makeType(index) var ofType int - var name document.ByteSlice + var name document.ByteSliceReference if isListType { diff --git a/pkg/parser/typesystemdefinition_parser.go b/pkg/parser/typesystemdefinition_parser.go index a2a6f6548d..a9a9546c50 100644 --- a/pkg/parser/typesystemdefinition_parser.go +++ b/pkg/parser/typesystemdefinition_parser.go @@ -9,7 +9,7 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi definition = p.makeTypeSystemDefinition() - var description document.ByteSlice + var description *document.ByteSliceReference for { next, err := p.l.Read() @@ -22,13 +22,13 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err case keyword.STRING: - description = next.Literal + description = &next.Literal continue case keyword.SCHEMA: if definition.SchemaDefinition.IsDefined() { - return definition, newErrInvalidType(next.Position, "parseTypeSystemDefinition", "not a re-assignment of SchemaDefinition", "multiple SchemaDefinition assignments") + return definition, newErrInvalidType(next.TextPosition, "parseTypeSystemDefinition", "not a re-assignment of SchemaDefinition", "multiple SchemaDefinition assignments") } definition.SchemaDefinition, err = p.parseSchemaDefinition() @@ -43,7 +43,9 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err } - p.ParsedDefinitions.ScalarTypeDefinitions[len(p.ParsedDefinitions.ScalarTypeDefinitions)-1].Description = description + if description != nil { + p.ParsedDefinitions.ScalarTypeDefinitions[len(p.ParsedDefinitions.ScalarTypeDefinitions)-1].Description = *description + } case keyword.TYPE: @@ -52,7 +54,9 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err } - p.ParsedDefinitions.ObjectTypeDefinitions[len(p.ParsedDefinitions.ObjectTypeDefinitions)-1].Description = description + if description != nil { + p.ParsedDefinitions.ObjectTypeDefinitions[len(p.ParsedDefinitions.ObjectTypeDefinitions)-1].Description = *description + } case keyword.INTERFACE: @@ -61,7 +65,9 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err } - p.ParsedDefinitions.InterfaceTypeDefinitions[len(p.ParsedDefinitions.InterfaceTypeDefinitions)-1].Description = description + if description != nil { + p.ParsedDefinitions.InterfaceTypeDefinitions[len(p.ParsedDefinitions.InterfaceTypeDefinitions)-1].Description = *description + } case keyword.UNION: @@ -70,7 +76,9 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err } - p.ParsedDefinitions.UnionTypeDefinitions[len(p.ParsedDefinitions.UnionTypeDefinitions)-1].Description = description + if description != nil { + p.ParsedDefinitions.UnionTypeDefinitions[len(p.ParsedDefinitions.UnionTypeDefinitions)-1].Description = *description + } case keyword.ENUM: @@ -79,8 +87,10 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err } - p.ParsedDefinitions.EnumTypeDefinitions[len(p.ParsedDefinitions.EnumTypeDefinitions)-1].Description = - description + if description != nil { + p.ParsedDefinitions.EnumTypeDefinitions[len(p.ParsedDefinitions.EnumTypeDefinitions)-1].Description = + *description + } case keyword.INPUT: @@ -89,7 +99,9 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err } - p.ParsedDefinitions.InputObjectTypeDefinitions[len(p.ParsedDefinitions.InputObjectTypeDefinitions)-1].Description = description + if description != nil { + p.ParsedDefinitions.InputObjectTypeDefinitions[len(p.ParsedDefinitions.InputObjectTypeDefinitions)-1].Description = *description + } case keyword.DIRECTIVE: @@ -98,12 +110,14 @@ func (p *Parser) parseTypeSystemDefinition() (definition document.TypeSystemDefi return definition, err } - p.ParsedDefinitions.DirectiveDefinitions[len(p.ParsedDefinitions.DirectiveDefinitions)-1].Description = - description + if description != nil { + p.ParsedDefinitions.DirectiveDefinitions[len(p.ParsedDefinitions.DirectiveDefinitions)-1].Description = + *description + } default: invalid, _ := p.l.Read() - return definition, newErrInvalidType(invalid.Position, "parseTypeSystemDefinition", "eof/string/schema/scalar/type/interface/union/directive/input/enum", invalid.Keyword.String()) + return definition, newErrInvalidType(invalid.TextPosition, "parseTypeSystemDefinition", "eof/string/schema/scalar/type/interface/union/directive/input/enum", invalid.Keyword.String()) } description = nil diff --git a/pkg/parser/value_parser.go b/pkg/parser/value_parser.go index be6c5e7cd5..3015e5bab2 100644 --- a/pkg/parser/value_parser.go +++ b/pkg/parser/value_parser.go @@ -49,7 +49,7 @@ func (p *Parser) parseValue(index *int) error { err = p.parsePeekedObjectValue(&value.Reference) default: invalidToken, _ := p.l.Read() - return newErrInvalidType(invalidToken.Position, "parseValue", fmt.Sprintf("%v", parseValuePossibleKeywords), string(invalidToken.Keyword)) + return newErrInvalidType(invalidToken.TextPosition, "parseValue", fmt.Sprintf("%v", parseValuePossibleKeywords), string(invalidToken.Keyword)) } p.putValue(value, *index) diff --git a/pkg/parser/variabledefinitions_parser.go b/pkg/parser/variabledefinitions_parser.go index bacfb4def9..2e950bf6b7 100644 --- a/pkg/parser/variabledefinitions_parser.go +++ b/pkg/parser/variabledefinitions_parser.go @@ -56,7 +56,7 @@ func (p *Parser) parseVariableDefinitions(index *[]int) (err error) { return err default: invalid, _ := p.l.Read() - return newErrInvalidType(invalid.Position, "parseVariableDefinitions", "variable/bracket close", invalid.Keyword.String()) + return newErrInvalidType(invalid.TextPosition, "parseVariableDefinitions", "variable/bracket close", invalid.Keyword.String()) } } } From 0dc4326a4b5010cb40637f55774f9e231a090e90 Mon Sep 17 00:00:00 2001 From: jnsone11 Date: Thu, 17 Jan 2019 22:21:35 +0100 Subject: [PATCH 7/7] update README with recent benchmarks --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 037aa253cd..546ebbb920 100644 --- a/README.md +++ b/README.md @@ -35,10 +35,10 @@ See pkg/parser/parser_test.go ``` pkg: github.com/jensneuse/graphql-go-tools/pkg/parser -BenchmarkParser-4 50000 24778 ns/op 0 B/op 0 allocs/op -BenchmarkParser-4 50000 24950 ns/op 1 B/op 0 allocs/op -BenchmarkParser-4 50000 25724 ns/op 0 B/op 0 allocs/op -BenchmarkParser-4 50000 25537 ns/op 0 B/op 0 allocs/op +BenchmarkParser-4 100000 21264 ns/op 0 B/op 0 allocs/op +BenchmarkParser-4 100000 21531 ns/op 0 B/op 0 allocs/op +BenchmarkParser-4 100000 21150 ns/op 0 B/op 0 allocs/op +BenchmarkParser-4 100000 21234 ns/op 0 B/op 0 allocs/op ``` In a previous release I found that nested slice structs accounted for huge amounts of gc and decreased performance.