Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support @normalize directive for subqueries #4042

Merged
merged 26 commits into from
Oct 11, 2019
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
fcf5cdb
Support @normalize directive for subqueries
animesh2049 Sep 23, 2019
5a6b831
Address PR comments
animesh2049 Sep 23, 2019
e59dc97
Change flattenResult signature
animesh2049 Sep 23, 2019
da985b1
Add tests for @normalize at subqueries
animesh2049 Sep 24, 2019
4e511d2
Fix TestReflexive3
animesh2049 Sep 24, 2019
a924a78
Change function name
animesh2049 Sep 24, 2019
e42dcd5
Set isChild parameters while creating new nodes
animesh2049 Sep 24, 2019
3d99352
Minor style changes
animesh2049 Sep 24, 2019
f155e41
Add documentation for extended normalize
animesh2049 Sep 24, 2019
9e77e2d
Copy isChild attribute and change json formats
animesh2049 Sep 24, 2019
a1f1250
Style changes
animesh2049 Sep 25, 2019
a4a5ab3
More tests
animesh2049 Sep 25, 2019
bc52111
Add benchmark for normalizeResult
animesh2049 Sep 27, 2019
d6bbe1c
Minor changes
animesh2049 Sep 27, 2019
089bdd8
Normalize while preTraverse
animesh2049 Sep 30, 2019
5a8f16b
Add comment
animesh2049 Sep 30, 2019
c503fd8
Fix output for non list uid types
animesh2049 Oct 3, 2019
7929bd8
Address PR comments
animesh2049 Oct 3, 2019
dbf7191
Add test case for non list uid type
animesh2049 Oct 7, 2019
f43f29b
Normalize non list type with list type child
animesh2049 Oct 7, 2019
4035201
Style changes
animesh2049 Oct 7, 2019
e9ce5c7
Add everything as list for normalize
animesh2049 Oct 9, 2019
7b623d8
Add documentation about behavior
animesh2049 Oct 9, 2019
bbf6b69
Remove left over conflict characters
animesh2049 Oct 9, 2019
3d3a989
Address PR comments
animesh2049 Oct 10, 2019
913a7e1
Merge branch 'master' into animesh2049/normalize_subqueries
animesh2049 Oct 11, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions gql/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -2239,6 +2239,8 @@ func parseDirective(it *lex.ItemIterator, curp *GraphQuery) error {
}
} else if item.Val == "cascade" {
curp.Cascade = true
} else if item.Val == "normalize" {
curp.Normalize = true
} else if peek[0].Typ == itemLeftRound {
// this is directive
switch item.Val {
Expand Down
43 changes: 43 additions & 0 deletions query/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,10 @@ state : [uid] .
county : [uid] .
firstName : string .
lastName : string .
newname : string @index(exact, term) .
newage : int .
boss : uid .
newfriend : [uid] .
`

func populateCluster() {
Expand Down Expand Up @@ -566,6 +570,45 @@ func populateCluster() {
_:har <lastName> "Ford" .
_:ss <firstName> "Steven" .
_:ss <lastName> "Spielberg" .

<501> <newname> "P1" .
<502> <newname> "P2" .
<503> <newname> "P3" .
<504> <newname> "P4" .
<505> <newname> "P5" .
<506> <newname> "P6" .
<507> <newname> "P7" .
<508> <newname> "P8" .
<509> <newname> "P9" .
<510> <newname> "P10" .
<511> <newname> "P11" .
<512> <newname> "P12" .

<501> <newage> "21" .
<502> <newage> "22" .
<503> <newage> "23" .
<504> <newage> "24" .
<505> <newage> "25" .
<506> <newage> "26" .
<507> <newage> "27" .
<508> <newage> "28" .
<509> <newage> "29" .
<510> <newage> "30" .
<511> <newage> "31" .
<512> <newage> "32" .

<501> <newfriend> <502> .
<501> <newfriend> <503> .
<501> <boss> <504> .
<502> <newfriend> <505> .
<502> <newfriend> <506> .
<503> <newfriend> <507> .
<503> <newfriend> <508> .
<504> <newfriend> <509> .
<504> <newfriend> <510> .
<502> <boss> <510> .
<510> <newfriend> <511> .
<510> <newfriend> <512> .
`)

addGeoPointToCluster(1, "loc", []float64{1.1, 2.0})
Expand Down
67 changes: 59 additions & 8 deletions query/outputnode.go
Original file line number Diff line number Diff line change
Expand Up @@ -295,10 +295,19 @@ func merge(parent [][]*fastJsonNode, child [][]*fastJsonNode) ([][]*fastJsonNode
return mergedList, nil
}

// normalize returns all attributes of fj and its children (if any).
func (fj *fastJsonNode) normalize() ([][]*fastJsonNode, error) {
cnt := 0
for _, a := range fj.attrs {
if a.isChild {
// Here we are counting all non-scalar attributes of fj. If there are any such
// attributes, we will flatten it, otherwise we will return all attributes.

// When we call addMapChild it tries to find whether there is already an attribute
// with attr field same as attribute argument of addMapChild. If it doesn't find any
// such attribute, it creates an attribute with isChild = false. In those cases
// sometimes cnt remains zero and normalize returns attributes without flattening.
// So we are using len(a.attrs) > 0 instead of a.isChild
if len(a.attrs) > 0 {
cnt++
}
}
Expand All @@ -314,25 +323,23 @@ func (fj *fastJsonNode) normalize() ([][]*fastJsonNode, error) {
// merged with children later.
attrs := make([]*fastJsonNode, 0, len(fj.attrs)-cnt)
for _, a := range fj.attrs {
if !a.isChild {
// Check comment at previous occurrence of len(a.attrs) > 0
if len(a.attrs) == 0 {
attrs = append(attrs, a)
}
}
parentSlice = append(parentSlice, attrs)

for ci := 0; ci < len(fj.attrs); {
childNode := fj.attrs[ci]
if !childNode.isChild {
// Check comment at previous occurrence of len(a.attrs) > 0
if len(childNode.attrs) == 0 {
ci++
continue
}
childSlice := make([][]*fastJsonNode, 0, 5)
for ci < len(fj.attrs) && childNode.attr == fj.attrs[ci].attr {
normalized, err := fj.attrs[ci].normalize()
if err != nil {
return nil, err
}
childSlice = append(childSlice, normalized...)
childSlice = append(childSlice, fj.attrs[ci].attrs)
ci++
}
// Merging with parent.
Expand Down Expand Up @@ -670,6 +677,7 @@ func (sg *SubGraph) preTraverse(uid uint64, dst outputNode) error {
if sg.Params.IgnoreReflex {
pc.Params.ParentIds = sg.Params.ParentIds
}

// We create as many predicate entity children as the length of uids for
// this predicate.
ul := pc.uidMatrix[idx]
Expand Down Expand Up @@ -708,13 +716,56 @@ func (sg *SubGraph) preTraverse(uid uint64, dst outputNode) error {
if sg.Params.GetUid {
uc.SetUID(childUID, "uid")
}
if pc.Params.Normalize {
// We will normalize at each level instead of
// calling normalize after pretraverse.
// Now normalize() only flattens one level,
// the expectation is that its children have
// already been normalized.
normAttrs, err := uc.(*fastJsonNode).normalize()
if err != nil {
return err
}

for _, c := range normAttrs {
// Adding as list child irrespective of the type of pc
// (list or non-list), otherwise result might be inconsistent or might
// depend on children and grandchildren of pc. Consider the case:
// boss: uid .
// friend: [uid] .
// name: string .
// For query like:
// {
// me(func: uid(0x1)) {
// boss @normalize {
// name
// }
// }
// }
// boss will be non list type in response, but for query like:
// {
// me(func: uid(0x1)) {
// boss @normalize {
// friend {
// name
// }
// }
// }
// }
// boss should be of list type because there can be mutliple friends of
// boss.
dst.AddListChild(fieldName, &fastJsonNode{attrs: c})
}
continue
}
if pc.List {
dst.AddListChild(fieldName, uc)
} else {
dst.AddMapChild(fieldName, uc, false)
}
}
}

if pc.Params.UidCount && !(pc.Params.UidCountAlias == "" && pc.Params.Normalize) {
uc := dst.New(fieldName)
c := types.ValueForType(types.IntID)
Expand Down
65 changes: 0 additions & 65 deletions query/outputnode_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,68 +94,3 @@ func TestNormalizeJSONLimit(t *testing.T) {
_, err := n.(*fastJsonNode).normalize()
require.Error(t, err, "Couldn't evaluate @normalize directive - too many results")
}

func TestNormalizeJSONUid1(t *testing.T) {
// Set default normalize limit.
x.Config.NormalizeNodeLimit = 1e4

n := (&fastJsonNode{}).New("root")
require.NotNil(t, n)
child1 := n.New("child1")
child1.SetUID(uint64(1), "uid")
child1.AddValue("attr1", types.ValueForType(types.StringID))
n.AddListChild("child1", child1)

child2 := n.New("child2")
child2.SetUID(uint64(2), "uid")
child2.AddValue("attr2", types.ValueForType(types.StringID))
child1.AddListChild("child2", child2)

child3 := n.New("child3")
child3.SetUID(uint64(3), "uid")
child3.AddValue("attr3", types.ValueForType(types.StringID))
child2.AddListChild("child3", child3)

normalized, err := n.(*fastJsonNode).normalize()
require.NoError(t, err)
require.NotNil(t, normalized)
nn := (&fastJsonNode{}).New("root")
for _, c := range normalized {
nn.AddListChild("alias", &fastJsonNode{attrs: c})
}

var b bytes.Buffer
nn.(*fastJsonNode).encode(&b)
require.JSONEq(t, `{"alias":[{"uid":"0x3","attr1":"","attr2":"","attr3":""}]}`, b.String())
}

func TestNormalizeJSONUid2(t *testing.T) {
n := (&fastJsonNode{}).New("root")
require.NotNil(t, n)
child1 := n.New("child1")
child1.SetUID(uint64(1), "uid")
child1.AddValue("___attr1", types.ValueForType(types.StringID))
n.AddListChild("child1", child1)

child2 := n.New("child2")
child2.SetUID(uint64(2), "uid")
child2.AddValue("___attr2", types.ValueForType(types.StringID))
child1.AddListChild("child2", child2)

child3 := n.New("child3")
child3.SetUID(uint64(3), "uid")
child3.AddValue(fmt.Sprintf("attr3"), types.ValueForType(types.StringID))
child2.AddListChild("child3", child3)

normalized, err := n.(*fastJsonNode).normalize()
require.NoError(t, err)
require.NotNil(t, normalized)
nn := (&fastJsonNode{}).New("root")
for _, c := range normalized {
nn.AddListChild("alias", &fastJsonNode{attrs: c})
}

var b bytes.Buffer
nn.(*fastJsonNode).encode(&b)
require.JSONEq(t, `{"alias":[{"___attr1":"","___attr2":"","uid":"0x3","attr3":""}]}`, b.String())
}
2 changes: 1 addition & 1 deletion query/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ func treeCopy(gq *gql.GraphQuery, sg *SubGraph) error {
IgnoreReflex: sg.Params.IgnoreReflex,
Langs: gchild.Langs,
NeedsVar: append(gchild.NeedsVar[:0:0], gchild.NeedsVar...),
Normalize: sg.Params.Normalize,
Normalize: gchild.Normalize || sg.Params.Normalize,
Order: gchild.Order,
Var: gchild.Var,
GroupbyAttrs: gchild.GroupbyAttrs,
Expand Down
Loading