-
Notifications
You must be signed in to change notification settings - Fork 1.5k
/
Copy pathtok.go
393 lines (340 loc) · 12.7 KB
/
tok.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
/*
* Copyright 2016-2018 Dgraph Labs, Inc. and Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tok
import (
"encoding/binary"
"plugin"
"time"
"github.com/golang/glog"
geom "github.com/twpayne/go-geom"
"github.com/dgraph-io/dgraph/types"
"github.com/dgraph-io/dgraph/x"
)
// Tokenizer identifiers are unique and can't be reused.
// The range 0x00 - 0x79 is system reserved.
// The range 0x80 - 0xff is for custom tokenizers.
// TODO: use these everywhere where we must ensure a system tokenizer.
const (
IdentNone = 0x0
IdentTerm = 0x1
IdentExact = 0x2
IdentYear = 0x4
IdentGeo = 0x5
IdentInt = 0x6
IdentFloat = 0x7
IdentFullText = 0x8
IdentBool = 0x9
IdentTrigram = 0xA
IdentHash = 0xB
IdentMonth = 0x41
IdentDay = 0x42
IdentHour = 0x43
)
// Tokenizer defines what a tokenizer must provide.
type Tokenizer interface {
// Name is name of tokenizer. This should be unique.
Name() string
// Type returns the string representation of the typeID that we care about.
Type() string
// Tokens return tokens for a given value. The tokens shouldn't be encoded
// with the byte identifier.
Tokens(interface{}) ([]string, error)
// Identifier returns the prefix byte for this token type. This should be
// unique. The range 0x80 to 0xff (inclusive) is reserved for user-provided
// custom tokenizers.
Identifier() byte
// IsSortable returns true if the tokenizer can be used for sorting/ordering.
IsSortable() bool
// IsLossy() returns true if we don't store the values directly as index keys
// during tokenization. If a predicate is tokenized using an IsLossy() tokenizer,
// then we need to fetch the actual value and compare.
IsLossy() bool
}
var tokenizers = make(map[string]Tokenizer)
func init() {
registerTokenizer(GeoTokenizer{})
registerTokenizer(IntTokenizer{})
registerTokenizer(FloatTokenizer{})
registerTokenizer(YearTokenizer{})
registerTokenizer(HourTokenizer{})
registerTokenizer(MonthTokenizer{})
registerTokenizer(DayTokenizer{})
registerTokenizer(ExactTokenizer{})
registerTokenizer(BoolTokenizer{})
registerTokenizer(TrigramTokenizer{})
registerTokenizer(HashTokenizer{})
registerTokenizer(TermTokenizer{})
registerTokenizer(FullTextTokenizer{})
setupBleve()
}
// BuildTokens tokenizes a value, creating strings that can be used to create
// index keys.
func BuildTokens(val interface{}, t Tokenizer) ([]string, error) {
tokens, err := t.Tokens(val)
if err != nil {
return nil, err
}
id := t.Identifier()
for i := range tokens {
tokens[i] = encodeToken(tokens[i], id)
}
return tokens, nil
}
func LoadCustomTokenizer(soFile string) {
glog.Infof("Loading custom tokenizer from %q", soFile)
pl, err := plugin.Open(soFile)
x.Checkf(err, "could not open custom tokenizer plugin file")
symb, err := pl.Lookup("Tokenizer")
x.Checkf(err, `could not find symbol "Tokenizer" while loading custom tokenizer: %v`, err)
// Let any type assertion panics occur, since they will contain a message
// telling the user what went wrong. Otherwise it's hard to capture this
// information to pass on to the user.
tokenizer := symb.(func() interface{})().(PluginTokenizer)
id := tokenizer.Identifier()
x.AssertTruef(id >= 0x80,
"custom tokenizer identifier byte must be >= 0x80, but was %#x", id)
registerTokenizer(CustomTokenizer{PluginTokenizer: tokenizer})
}
// GetTokenizer returns tokenizer given unique name.
func GetTokenizer(name string) (Tokenizer, bool) {
t, found := tokenizers[name]
return t, found
}
func registerTokenizer(t Tokenizer) {
_, ok := tokenizers[t.Name()]
x.AssertTruef(!ok, "Duplicate tokenizer: %s", t.Name())
_, ok = types.TypeForName(t.Type())
x.AssertTruef(ok, "Invalid type %q for tokenizer %s", t.Type(), t.Name())
tokenizers[t.Name()] = t
}
type GeoTokenizer struct{}
func (t GeoTokenizer) Name() string { return "geo" }
func (t GeoTokenizer) Type() string { return "geo" }
func (t GeoTokenizer) Tokens(v interface{}) ([]string, error) {
return types.IndexGeoTokens(v.(geom.T))
}
func (t GeoTokenizer) Identifier() byte { return 0x5 }
func (t GeoTokenizer) IsSortable() bool { return false }
func (t GeoTokenizer) IsLossy() bool { return true }
type IntTokenizer struct{}
func (t IntTokenizer) Name() string { return "int" }
func (t IntTokenizer) Type() string { return "int" }
func (t IntTokenizer) Tokens(v interface{}) ([]string, error) {
return []string{encodeInt(v.(int64))}, nil
}
func (t IntTokenizer) Identifier() byte { return 0x6 }
func (t IntTokenizer) IsSortable() bool { return true }
func (t IntTokenizer) IsLossy() bool { return false }
type FloatTokenizer struct{}
func (t FloatTokenizer) Name() string { return "float" }
func (t FloatTokenizer) Type() string { return "float" }
func (t FloatTokenizer) Tokens(v interface{}) ([]string, error) {
return []string{encodeInt(int64(v.(float64)))}, nil
}
func (t FloatTokenizer) Identifier() byte { return 0x7 }
func (t FloatTokenizer) IsSortable() bool { return true }
func (t FloatTokenizer) IsLossy() bool { return true }
type YearTokenizer struct{}
func (t YearTokenizer) Name() string { return "year" }
func (t YearTokenizer) Type() string { return "datetime" }
func (t YearTokenizer) Tokens(v interface{}) ([]string, error) {
tval := v.(time.Time)
buf := make([]byte, 2)
binary.BigEndian.PutUint16(buf[0:2], uint16(tval.Year()))
return []string{string(buf)}, nil
}
func (t YearTokenizer) Identifier() byte { return 0x4 }
func (t YearTokenizer) IsSortable() bool { return true }
func (t YearTokenizer) IsLossy() bool { return true }
type MonthTokenizer struct{}
func (t MonthTokenizer) Name() string { return "month" }
func (t MonthTokenizer) Type() string { return "datetime" }
func (t MonthTokenizer) Tokens(v interface{}) ([]string, error) {
tval := v.(time.Time)
buf := make([]byte, 4)
binary.BigEndian.PutUint16(buf[0:2], uint16(tval.Year()))
binary.BigEndian.PutUint16(buf[2:4], uint16(tval.Month()))
return []string{string(buf)}, nil
}
func (t MonthTokenizer) Identifier() byte { return 0x41 }
func (t MonthTokenizer) IsSortable() bool { return true }
func (t MonthTokenizer) IsLossy() bool { return true }
type DayTokenizer struct{}
func (t DayTokenizer) Name() string { return "day" }
func (t DayTokenizer) Type() string { return "datetime" }
func (t DayTokenizer) Tokens(v interface{}) ([]string, error) {
tval := v.(time.Time)
buf := make([]byte, 6)
binary.BigEndian.PutUint16(buf[0:2], uint16(tval.Year()))
binary.BigEndian.PutUint16(buf[2:4], uint16(tval.Month()))
binary.BigEndian.PutUint16(buf[4:6], uint16(tval.Day()))
return []string{string(buf)}, nil
}
func (t DayTokenizer) Identifier() byte { return 0x42 }
func (t DayTokenizer) IsSortable() bool { return true }
func (t DayTokenizer) IsLossy() bool { return true }
type HourTokenizer struct{}
func (t HourTokenizer) Name() string { return "hour" }
func (t HourTokenizer) Type() string { return "datetime" }
func (t HourTokenizer) Tokens(v interface{}) ([]string, error) {
tval := v.(time.Time)
buf := make([]byte, 8)
binary.BigEndian.PutUint16(buf[0:2], uint16(tval.Year()))
binary.BigEndian.PutUint16(buf[2:4], uint16(tval.Month()))
binary.BigEndian.PutUint16(buf[4:6], uint16(tval.Day()))
binary.BigEndian.PutUint16(buf[6:8], uint16(tval.Hour()))
return []string{string(buf)}, nil
}
func (t HourTokenizer) Identifier() byte { return 0x43 }
func (t HourTokenizer) IsSortable() bool { return true }
func (t HourTokenizer) IsLossy() bool { return true }
type TermTokenizer struct{}
func (t TermTokenizer) Name() string { return "term" }
func (t TermTokenizer) Type() string { return "string" }
func (t TermTokenizer) Tokens(v interface{}) ([]string, error) {
str, ok := v.(string)
if !ok || str == "" {
return []string{str}, nil
}
tokens := termAnalyzer.Analyze([]byte(str))
return uniqueTerms(tokens), nil
}
func (t TermTokenizer) Identifier() byte { return 0x1 }
func (t TermTokenizer) IsSortable() bool { return false }
func (t TermTokenizer) IsLossy() bool { return true }
type ExactTokenizer struct{}
func (t ExactTokenizer) Name() string { return "exact" }
func (t ExactTokenizer) Type() string { return "string" }
func (t ExactTokenizer) Tokens(v interface{}) ([]string, error) {
if term, ok := v.(string); ok {
return []string{term}, nil
}
return nil, x.Errorf("Exact indices only supported for string types")
}
func (t ExactTokenizer) Identifier() byte { return 0x2 }
func (t ExactTokenizer) IsSortable() bool { return true }
func (t ExactTokenizer) IsLossy() bool { return false }
type FullTextTokenizer struct{ lang string }
func (t FullTextTokenizer) Name() string { return "fulltext" }
func (t FullTextTokenizer) Type() string { return "string" }
func (t FullTextTokenizer) Tokens(v interface{}) ([]string, error) {
str, ok := v.(string)
if !ok || str == "" {
return []string{}, nil
}
lang := langBase(t.lang)
// pass 1 - lowercase and normalize input
tokens := fulltextAnalyzer.Analyze([]byte(str))
// pass 2 - filter stop words
tokens = filterStopwords(lang, tokens)
// pass 3 - filter stems
tokens = filterStemmers(lang, tokens)
// finally, return the terms.
return uniqueTerms(tokens), nil
}
func (t FullTextTokenizer) Identifier() byte { return 0x8 }
func (t FullTextTokenizer) IsSortable() bool { return false }
func (t FullTextTokenizer) IsLossy() bool { return true }
func encodeInt(val int64) string {
buf := make([]byte, 9)
binary.BigEndian.PutUint64(buf[1:], uint64(val))
if val < 0 {
buf[0] = 0
} else {
buf[0] = 1
}
return string(buf)
}
func encodeToken(tok string, typ byte) string {
return string(typ) + tok
}
func EncodeGeoTokens(tokens []string) {
for i := 0; i < len(tokens); i++ {
tokens[i] = encodeToken(tokens[i], GeoTokenizer{}.Identifier())
}
}
func EncodeRegexTokens(tokens []string) {
for i := 0; i < len(tokens); i++ {
tokens[i] = encodeToken(tokens[i], TrigramTokenizer{}.Identifier())
}
}
type BoolTokenizer struct{}
func (t BoolTokenizer) Name() string { return "bool" }
func (t BoolTokenizer) Type() string { return "bool" }
func (t BoolTokenizer) Tokens(v interface{}) ([]string, error) {
var b int64
if v.(bool) {
b = 1
}
return []string{encodeInt(b)}, nil
}
func (t BoolTokenizer) Identifier() byte { return 0x9 }
func (t BoolTokenizer) IsSortable() bool { return false }
func (t BoolTokenizer) IsLossy() bool { return false }
type TrigramTokenizer struct{}
func (t TrigramTokenizer) Name() string { return "trigram" }
func (t TrigramTokenizer) Type() string { return "string" }
func (t TrigramTokenizer) Tokens(v interface{}) ([]string, error) {
value, ok := v.(string)
if !ok {
return nil, x.Errorf("Trigram indices only supported for string types")
}
l := len(value) - 2
if l > 0 {
tokens := make([]string, l)
for i := 0; i < l; i++ {
tokens[i] = value[i : i+3]
}
tokens = x.RemoveDuplicates(tokens)
return tokens, nil
}
return nil, nil
}
func (t TrigramTokenizer) Identifier() byte { return 0xA }
func (t TrigramTokenizer) IsSortable() bool { return false }
func (t TrigramTokenizer) IsLossy() bool { return true }
type HashTokenizer struct{}
func (t HashTokenizer) Name() string { return "hash" }
func (t HashTokenizer) Type() string { return "string" }
func (t HashTokenizer) Tokens(v interface{}) ([]string, error) {
term, ok := v.(string)
if !ok {
return nil, x.Errorf("Hash tokenizer only supported for string types")
}
hash := x.Hash256([]byte(term))
if len(hash) == 0 {
return nil, x.Errorf("Hash tokenizer failed to create hash")
}
return []string{string(hash)}, nil
}
func (t HashTokenizer) Identifier() byte { return 0xB }
func (t HashTokenizer) IsSortable() bool { return false }
func (t HashTokenizer) IsLossy() bool { return true }
// PluginTokenizer is implemented by external plugins loaded dynamically via
// *.so files. It follows the implementation semantics of the Tokenizer
// interface.
//
// Think carefully before modifying this interface, as it would break users' plugins.
type PluginTokenizer interface {
Name() string
Type() string
Tokens(interface{}) ([]string, error)
Identifier() byte
}
type CustomTokenizer struct{ PluginTokenizer }
// It doesn't make sense for plugins to implement the following methods, so they're hardcoded.
func (t CustomTokenizer) IsSortable() bool { return false }
func (t CustomTokenizer) IsLossy() bool { return true }