Skip to content

Commit

Permalink
fix(DQL): ignore ordering of indexes in schema with eq function (DGRA…
Browse files Browse the repository at this point in the history
…PH-2601) (#6996)

Fixes DGRAPH-2601

Previously, the following schema:
```
name: string @index(trigram, term) .
```
with some added data, and the following query:
```
query {
	q(func: eq(name, "Alice", "Bob")) {
		uid
		name
	}
}
```
would error out saying it doesn't have a valid tokenizer:
```
{
  "errors": [
    {
      "message": ": Attribute name does not have a valid tokenizer.",
      "extensions": {
        "code": "ErrorInvalidRequest"
      }
    }
  ],
  "data": null
}
```
even though `term` index is present on the predicate.
On the other hand, if you reversed the order of indexes:
```
name: string @index(term, trigram) .
```
It would give correct results:
```
{
  "data": {
    "q": [
      {
        "uid": "0x2",
        "name": "Alice",
        "age": 20
      },
      {
        "uid": "0x3",
        "name": "Bob",
        "age": 25
      }
    ]
  }
}
```

This PR fixes the above issue.

(cherry picked from commit 0b11439)
  • Loading branch information
abhimanyusinghgaur authored and OmarAyo committed Dec 1, 2020
1 parent dd00c99 commit 497220a
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 3 deletions.
36 changes: 36 additions & 0 deletions systest/queries_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ func TestQuery(t *testing.T) {
t.Run("hash index queries", wrap(QueryHashIndex))
t.Run("fuzzy matching", wrap(FuzzyMatch))
t.Run("regexp with toggled trigram index", wrap(RegexpToggleTrigramIndex))
t.Run("eq with altering order of trigram and term index", wrap(EqWithAlteredIndexOrder))
t.Run("groupby uid that works", wrap(GroupByUidWorks))
t.Run("cleanup", wrap(SchemaQueryCleanup))
}
Expand Down Expand Up @@ -868,6 +869,41 @@ func RegexpToggleTrigramIndex(t *testing.T, c *dgo.Dgraph) {
require.Contains(t, err.Error(), "Attribute name does not have trigram index for regex matching.")
}

func EqWithAlteredIndexOrder(t *testing.T, c *dgo.Dgraph) {
ctx := context.Background()

// first, let's set the schema with term before trigram
op := &api.Operation{Schema: `name: string @index(term, trigram) .`}
require.NoError(t, c.Alter(ctx, op))

// fill up some data
txn := c.NewTxn()
_, err := txn.Mutate(ctx, &api.Mutation{
SetNquads: []byte(`
_:x1 <name> "Alice" .
_:x2 <name> "Bob" .
`),
})
require.NoError(t, err)
require.NoError(t, txn.Commit(ctx))

// querying with eq should work
q := `{q(func: eq(name, "Alice")) {name}}`
expectedResult := `{"q":[{"name":"Alice"}]}`
resp, err := c.NewReadOnlyTxn().Query(ctx, q)
require.NoError(t, err)
testutil.CompareJSON(t, expectedResult, string(resp.Json))

// now, let's set the schema with trigram before term
op = &api.Operation{Schema: `name: string @index(trigram, term) .`}
require.NoError(t, c.Alter(ctx, op))

// querying with eq should still work
resp, err = c.NewReadOnlyTxn().Query(ctx, q)
require.NoError(t, err)
testutil.CompareJSON(t, expectedResult, string(resp.Json))
}

func GroupByUidWorks(t *testing.T, c *dgo.Dgraph) {
ctx := context.Background()

Expand Down
2 changes: 1 addition & 1 deletion wiki/content/query-language/functions.md
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ Index Required: An index is required for the `eq(predicate, ...)` forms (see tab
| `int` | `int` |
| `float` | `float` |
| `bool` | `bool` |
| `string` | `exact`, `hash` |
| `string` | `exact`, `hash`, `term`, `fulltext` |
| `dateTime` | `dateTime` |

Test for equality of a predicate or variable to a value or find in a list of values.
Expand Down
14 changes: 12 additions & 2 deletions worker/tokens.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ func pickTokenizer(ctx context.Context, attr string, f string) (tok.Tokenizer, e

tokenizers := schema.State().Tokenizer(ctx, attr)
for _, t := range tokenizers {
// If function is eq and we found a tokenizer thats !Lossy(), lets return it
// If function is eq and we found a tokenizer that's !Lossy(), lets return it
switch f {
case "eq":
// For equality, find a non-lossy tokenizer.
Expand All @@ -105,7 +105,17 @@ func pickTokenizer(ctx context.Context, attr string, f string) (tok.Tokenizer, e
return nil, errors.Errorf("Attribute:%s does not have proper index for comparison", attr)
}

// We didn't find a sortable or !isLossy() tokenizer, lets return the first one.
// If we didn't find a !isLossy() tokenizer for eq function on string type predicates,
// then let's see if we can find a non-trigram tokenizer
if typ, err := schema.State().TypeOf(attr); err == nil && typ == types.StringID {
for _, t := range tokenizers {
if t.Identifier() != tok.IdentTrigram {
return t, nil
}
}
}

// otherwise, lets return the first one.
return tokenizers[0], nil
}

Expand Down

0 comments on commit 497220a

Please sign in to comment.