Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion go/vt/sqlparser/comments.go
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,11 @@ func hasCommentPrefix(sql string) bool {
func ExtractMysqlComment(sql string) (version string, innerSQL string) {
sql = sql[3 : len(sql)-2]

endOfVersionIndex := strings.IndexFunc(sql, func(c rune) bool { return !unicode.IsDigit(c) })
digitCount := 0
endOfVersionIndex := strings.IndexFunc(sql, func(c rune) bool {
digitCount++
return !unicode.IsDigit(c) || digitCount == 6
})
version = sql[0:endOfVersionIndex]
innerSQL = strings.TrimFunc(sql[endOfVersionIndex:], unicode.IsSpace)

Expand Down
4 changes: 4 additions & 0 deletions go/vt/sqlparser/comments_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,10 @@ func TestExtractMysqlComment(t *testing.T) {
input: "/*!50708 SET max_execution_time=5000*/",
outSQL: "SET max_execution_time=5000",
outVersion: "50708",
}, {
input: "/*!50708* from*/",
outSQL: "* from",
outVersion: "50708",
}, {
input: "/*! SET max_execution_time=5000*/",
outSQL: "SET max_execution_time=5000",
Expand Down
15 changes: 14 additions & 1 deletion go/vt/sqlparser/parse_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,20 @@ var (
}, {
input: "select /* string in case statement */ if(max(case a when 'foo' then 1 else 0 end) = 1, 'foo', 'bar') as foobar from t",
}, {
input: "select /* dual */ 1 from dual",
input: "/*!show databases*/",
output: "show databases",
}, {
input: "select /*!40101 * from*/ t",
output: "select * from t",
}, {
input: "select /*! * from*/ t",
output: "select * from t",
}, {
input: "select /*!* from*/ t",
output: "select * from t",
}, {
input: "select /*!401011 from*/ t",
output: "select 1 from t",
}, {
input: "select /* dual */ 1 from dual",
}, {
Expand Down
68 changes: 55 additions & 13 deletions go/vt/sqlparser/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,19 @@ const (
// Tokenizer is the struct used to generate SQL
// tokens for the parser.
type Tokenizer struct {
InStream io.Reader
AllowComments bool
ForceEOF bool
lastChar uint16
Position int
lastToken []byte
LastError error
posVarIndex int
ParseTree Statement
partialDDL *DDL
nesting int
multi bool
InStream io.Reader
AllowComments bool
ForceEOF bool
lastChar uint16
Position int
lastToken []byte
LastError error
posVarIndex int
ParseTree Statement
partialDDL *DDL
nesting int
multi bool
specialComment *Tokenizer

buf []byte
bufPos int
Expand Down Expand Up @@ -427,6 +428,18 @@ func (tkn *Tokenizer) Error(err string) {
// Scan scans the tokenizer for the next token and returns
// the token type and an optional value.
func (tkn *Tokenizer) Scan() (int, []byte) {
if tkn.specialComment != nil {
// Enter specialComment scan mode.
// for scanning such kind of comment: /*! MySQL-specific code */
specialComment := tkn.specialComment
tok, val := specialComment.Scan()
if tok != 0 {
// return the specialComment scan result as the result
return tok, val
}
// leave specialComment scan mode after all stream consumed.
tkn.specialComment = nil
}
if tkn.lastChar == 0 {
tkn.next()
}
Expand Down Expand Up @@ -495,7 +508,12 @@ func (tkn *Tokenizer) Scan() (int, []byte) {
return tkn.scanCommentType1("//")
case '*':
tkn.next()
return tkn.scanCommentType2()
switch tkn.lastChar {
case '!':
return tkn.scanMySQLSpecificComment()
default:
return tkn.scanCommentType2()
}
default:
return int(ch), nil
}
Expand Down Expand Up @@ -818,6 +836,29 @@ func (tkn *Tokenizer) scanCommentType2() (int, []byte) {
return COMMENT, buffer.Bytes()
}

func (tkn *Tokenizer) scanMySQLSpecificComment() (int, []byte) {
buffer := &bytes2.Buffer{}
buffer.WriteString("/*!")
tkn.next()
for {
if tkn.lastChar == '*' {
tkn.consumeNext(buffer)
if tkn.lastChar == '/' {
tkn.consumeNext(buffer)
break
}
continue
}
if tkn.lastChar == eofChar {
return LEX_ERROR, buffer.Bytes()
}
tkn.consumeNext(buffer)
}
_, sql := ExtractMysqlComment(buffer.String())
tkn.specialComment = NewStringTokenizer(sql)
return tkn.Scan()
}

func (tkn *Tokenizer) consumeNext(buffer *bytes2.Buffer) {
if tkn.lastChar == eofChar {
// This should never happen.
Expand Down Expand Up @@ -853,6 +894,7 @@ func (tkn *Tokenizer) next() {
func (tkn *Tokenizer) reset() {
tkn.ParseTree = nil
tkn.partialDDL = nil
tkn.specialComment = nil
tkn.posVarIndex = 0
tkn.nesting = 0
tkn.ForceEOF = false
Expand Down