Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
748 changes: 295 additions & 453 deletions pkg/lexer/lexer.go

Large diffs are not rendered by default.

Binary file added pkg/lexer/lexer.test
Binary file not shown.
135 changes: 135 additions & 0 deletions pkg/lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,73 @@ func TestLexerRegressions(t *testing.T) {
}
}

var _ = Describe("Lexer.Read", func() {
It("should not panic if reader is nil", func() {
lexer := NewLexer()
f := func() {
_, err := lexer.Read()
Expect(err).To(HaveOccurred())
}

Expect(f).ShouldNot(Panic())
})
It("should read correctly from reader when re-setting input", func() {
lexer := NewLexer()
lexer.SetInput(bytes.NewReader([]byte("x")))
_, err := lexer.Read()
Expect(err).NotTo(HaveOccurred())

lexer.SetInput(bytes.NewReader([]byte("x")))
x, err := lexer.Read()
Expect(err).NotTo(HaveOccurred())
Expect(x).To(Equal(token.Token{
Keyword: keyword.IDENT,
Literal: []byte("x"),
Position: position.Position{
Line: 1,
Char: 1,
},
}))
})
It("should read eof multiple times correctly", func() {
lexer := NewLexer()
lexer.SetInput(bytes.NewReader([]byte("x")))

x, err := lexer.Read()
Expect(err).NotTo(HaveOccurred())
Expect(x).To(Equal(token.Token{
Keyword: keyword.IDENT,
Literal: []byte("x"),
Position: position.Position{
Line: 1,
Char: 1,
},
}))

eof1, err := lexer.Read()
Expect(err).NotTo(HaveOccurred())
Expect(eof1).To(Equal(token.Token{
Keyword: keyword.EOF,
Literal: []byte("eof"),
Position: position.Position{
Line: 1,
Char: 2,
},
}))

eof2, err := lexer.Read()
Expect(err).NotTo(HaveOccurred())
Expect(eof2).To(Equal(token.Token{
Keyword: keyword.EOF,
Literal: []byte("eof"),
Position: position.Position{
Line: 1,
Char: 2,
},
}))
})
})

var _ = Describe("Lexer.Read", func() {

type Case struct {
Expand Down Expand Up @@ -99,6 +166,17 @@ var _ = Describe("Lexer.Read", func() {
},
},
}),
Entry("should read integer with comma at the end", Case{
in: []byte("1337,"),
out: token.Token{
Keyword: keyword.INTEGER,
Literal: []byte("1337"),
Position: position.Position{
Line: 1,
Char: 1,
},
},
}),
Entry("should read float", Case{
in: []byte("13.37"),
out: token.Token{
Expand Down Expand Up @@ -717,6 +795,20 @@ var _ = Describe("Lexer.Peek()", func() {
},
}),
}),
Entry("should peek ON with whitespace behind", Case{
input: []byte("on "),
expectKey: Equal(keyword.ON),
expectErr: BeNil(),
expectNextTokenErr: BeNil(),
expectNextToken: Equal(token.Token{
Keyword: keyword.ON,
Literal: []byte("on"),
Position: position.Position{
Line: 1,
Char: 1,
},
}),
}),
Entry("should peek ignore comma", Case{
input: []byte(","),
expectKey: Equal(keyword.EOF),
Expand Down Expand Up @@ -1115,6 +1207,20 @@ var _ = Describe("Lexer.peekIsFloat", func() {
)
})

func BenchmarkPeekIsFloat(b *testing.B) {
input := bytes.NewReader([]byte("13373737.37"))
lexer := NewLexer()

b.ReportAllocs()
b.ResetTimer()

for i := 0; i < b.N; i++ {
input.Seek(0, io.SeekStart)
lexer.SetInput(input)
lexer.peekIsFloat()
}
}

var _ = Describe("Lexer.Read", func() {

type Case struct {
Expand Down Expand Up @@ -1378,6 +1484,35 @@ baz`),
},
},
}),
Entry("should read '1,2,3' as three integers", Case{
in: []byte("1,2,3"),
out: []token.Token{
{
Keyword: keyword.INTEGER,
Literal: []byte("1"),
Position: position.Position{
Line: 1,
Char: 1,
},
},
{
Keyword: keyword.INTEGER,
Literal: []byte("2"),
Position: position.Position{
Line: 1,
Char: 3,
},
},
{
Keyword: keyword.INTEGER,
Literal: []byte("3"),
Position: position.Position{
Line: 1,
Char: 5,
},
},
},
}),
)
})

Expand Down
Binary file added pkg/lexer/memprofile.out
Binary file not shown.
Binary file added pkg/lexer/profile.out
Binary file not shown.
Binary file modified pkg/parser/memprofile.out
Binary file not shown.
24 changes: 22 additions & 2 deletions pkg/parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ func (e errInvalidType) Error() string {

// Parser holds the lexer and a buffer for writing literals
type Parser struct {
l Lexer
l Lexer
selectionSetBuffers []document.SelectionSet
}

// Lexer is the interface used by the Parser to lex tokens
Expand All @@ -45,7 +46,8 @@ type Lexer interface {
// NewParser returns a new parser using a buffered runestringer
func NewParser() *Parser {
return &Parser{
l: lexer.NewLexer(),
l: lexer.NewLexer(),
selectionSetBuffers: make([]document.SelectionSet, 10),
}
}

Expand Down Expand Up @@ -88,3 +90,21 @@ func (p *Parser) peekExpect(expected keyword.Keyword, swallow bool) (matched boo

return
}

func (p *Parser) getSelectionSetBuffer() *document.SelectionSet {

var s document.SelectionSet

if len(p.selectionSetBuffers) == 0 {
s = make(document.SelectionSet, 10)
} else {
s, p.selectionSetBuffers = p.selectionSetBuffers[0], p.selectionSetBuffers[1:]
s = s[:0]
}

return &s
}

func (p *Parser) putSelectionSet(set *document.SelectionSet) {
p.selectionSetBuffers = append(p.selectionSetBuffers, *set)
}
Binary file modified pkg/parser/parser.test
Binary file not shown.
Binary file modified pkg/parser/profile.out
Binary file not shown.
12 changes: 11 additions & 1 deletion pkg/parser/selectionset_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,24 +16,34 @@ func (p *Parser) parseSelectionSet() (selectionSet document.SelectionSet, err er
return
}

buffer := p.getSelectionSetBuffer()

for {

next, err := p.l.Peek(true)
if err != nil {
p.putSelectionSet(buffer)
return selectionSet, err
}

if next == keyword.CURLYBRACKETCLOSE {
_, err = p.l.Read()

selectionSet = make(document.SelectionSet, len(*buffer))
copy(selectionSet, *buffer)

p.putSelectionSet(buffer)

return selectionSet, err
}

selection, err := p.parseSelection()
if err != nil {
p.putSelectionSet(buffer)
return selectionSet, err
}

selectionSet = append(selectionSet, selection)
*buffer = append(*buffer, selection)
}

}