Skip to content

Commit

Permalink
Invalid UID checks (#5243) (#5251)
Browse files Browse the repository at this point in the history
Fixes #DGRAPH-1272
Fixes #5238

(cherry-picked from commit 09b2def)
  • Loading branch information
parasssh authored Apr 20, 2020
1 parent fd389b0 commit 072f33e
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 8 deletions.
6 changes: 6 additions & 0 deletions dgraph/cmd/bulk/mapper.go
Original file line number Diff line number Diff line change
Expand Up @@ -231,10 +231,16 @@ func (m *mapper) addMapEntry(key []byte, p *pb.Posting, shard int) {

func (m *mapper) processNQuad(nq gql.NQuad) {
sid := m.uid(nq.GetSubject())
if sid == 0 {
panic(fmt.Sprintf("invalid UID with value 0 for %v", nq.GetSubject()))
}
var oid uint64
var de *pb.DirectedEdge
if nq.GetObjectValue() == nil {
oid = m.uid(nq.GetObjectId())
if oid == 0 {
panic(fmt.Sprintf("invalid UID with value 0 for %v", nq.GetObjectId()))
}
de = nq.CreateUidEdge(sid, oid)
} else {
var err error
Expand Down
2 changes: 1 addition & 1 deletion gql/mutation.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import (
)

var (
errInvalidUID = errors.New("UID has to be greater than one")
errInvalidUID = errors.New("UID must to be greater than 0")
)

// Mutation stores the strings corresponding to set and delete operations.
Expand Down
4 changes: 3 additions & 1 deletion posting/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,9 @@ func (txn *Txn) addIndexMutations(ctx context.Context, info *indexMutationInfo)

attr := info.edge.Attr
uid := info.edge.Entity
x.AssertTrue(uid != 0)
if uid == 0 {
return errors.New("invalid UID with value 0")
}
tokens, err := indexTokens(ctx, info)
if err != nil {
// This data is not indexable
Expand Down
1 change: 1 addition & 0 deletions worker/backup_processor.go
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ func (pr *BackupProcessor) WriteBackup(ctx context.Context) (*pb.Status, error)
stream.ChooseKey = func(item *badger.Item) bool {
parsedKey, err := x.Parse(item.Key())
if err != nil {
glog.Errorf("error %v while parsing key %v during backup. Skip.", err, hex.EncodeToString(item.Key()))
return false
}

Expand Down
2 changes: 2 additions & 0 deletions worker/draft.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package worker
import (
"bytes"
"context"
"encoding/hex"
"fmt"
"sort"
"sync"
Expand Down Expand Up @@ -240,6 +241,7 @@ func detectPendingTxns(attr string) error {
tctxs := posting.Oracle().IterateTxns(func(key []byte) bool {
pk, err := x.Parse(key)
if err != nil {
glog.Errorf("error %v while parsing key %v", err, hex.EncodeToString(key))
return false
}
return pk.Attr == attr
Expand Down
3 changes: 3 additions & 0 deletions worker/export.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
"bytes"
"compress/gzip"
"context"
"encoding/hex"
"encoding/json"
"fmt"
"os"
Expand Down Expand Up @@ -439,6 +440,7 @@ func export(ctx context.Context, in *pb.ExportRequest) error {
stream.ChooseKey = func(item *badger.Item) bool {
pk, err := x.Parse(item.Key())
if err != nil {
glog.Errorf("error %v while parsing key %v during export. Skip.", err, hex.EncodeToString(item.Key()))
return false
}

Expand Down Expand Up @@ -469,6 +471,7 @@ func export(ctx context.Context, in *pb.ExportRequest) error {
item := itr.Item()
pk, err := x.Parse(item.Key())
if err != nil {
glog.Errorf("error %v while parsing key %v during export. Skip.", err, hex.EncodeToString(item.Key()))
return nil, err
}
e := &exporter{
Expand Down
6 changes: 4 additions & 2 deletions x/keys.go
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ func Parse(key []byte) (ParsedKey, error) {
var p ParsedKey

if len(key) == 0 {
return p, errors.Errorf("0 length key")
return p, errors.New("0 length key")
}

p.bytePrefix = key[0]
Expand Down Expand Up @@ -476,7 +476,9 @@ func Parse(key []byte) (ParsedKey, error) {
return p, errors.Errorf("uid length < 8 for key: %q, parsed key: %+v", key, p)
}
p.Uid = binary.BigEndian.Uint64(k)

if p.Uid == 0 {
return p, errors.Errorf("Invalid UID with value 0 for key: %v", key)
}
if !p.HasStartUid {
break
}
Expand Down
21 changes: 17 additions & 4 deletions x/keys_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,14 @@ import (

func TestDataKey(t *testing.T) {
var uid uint64
for uid = 0; uid < 1001; uid++ {

// key with uid = 0 is invalid
uid = 0
key := DataKey("bad uid", uid)
_, err := Parse(key)
require.Error(t, err)

for uid = 1; uid < 1001; uid++ {
// Use the uid to derive the attribute so it has variable length and the test
// can verify that multiple sizes of attr work correctly.
sattr := fmt.Sprintf("attr:%d", uid)
Expand Down Expand Up @@ -58,7 +65,7 @@ func TestDataKey(t *testing.T) {
func TestParseDataKeyWithStartUid(t *testing.T) {
var uid uint64
startUid := uint64(math.MaxUint64)
for uid = 0; uid < 1001; uid++ {
for uid = 1; uid < 1001; uid++ {
sattr := fmt.Sprintf("attr:%d", uid)
key := DataKey(sattr, uid)
key, err := SplitKey(key, startUid)
Expand Down Expand Up @@ -113,7 +120,7 @@ func TestIndexKeyWithStartUid(t *testing.T) {

func TestReverseKey(t *testing.T) {
var uid uint64
for uid = 0; uid < 1001; uid++ {
for uid = 1; uid < 1001; uid++ {
sattr := fmt.Sprintf("attr:%d", uid)

key := ReverseKey(sattr, uid)
Expand All @@ -129,7 +136,7 @@ func TestReverseKey(t *testing.T) {
func TestReverseKeyWithStartUid(t *testing.T) {
var uid uint64
startUid := uint64(math.MaxUint64)
for uid = 0; uid < 1001; uid++ {
for uid = 1; uid < 1001; uid++ {
sattr := fmt.Sprintf("attr:%d", uid)

key := ReverseKey(sattr, uid)
Expand Down Expand Up @@ -252,4 +259,10 @@ func TestBadKeys(t *testing.T) {
key = []byte{1, 0x00, 0x04, 1, 2}
_, err = Parse(key)
require.Error(t, err)

// key with uid = 0 is invalid
uid := 0
key = DataKey("bad uid", uint64(uid))
_, err = Parse(key)
require.Error(t, err)
}

0 comments on commit 072f33e

Please sign in to comment.