diff --git a/client/request/filter.go b/client/request/filter.go index bf32713844..67a80b58e7 100644 --- a/client/request/filter.go +++ b/client/request/filter.go @@ -10,6 +10,12 @@ package request +const ( + FilterOpOr = "_or" + FilterOpAnd = "_and" + FilterOpNot = "_not" +) + // Filter contains the parsed condition map to be // run by the Filter Evaluator. // @todo: Cache filter structure for faster condition diff --git a/planner/filter/complex.go b/planner/filter/complex.go new file mode 100644 index 0000000000..098caefc9c --- /dev/null +++ b/planner/filter/complex.go @@ -0,0 +1,64 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// IsComplex returns true if the provided filter is complex. +// A filter is considered complex if it contains a relation +// object withing an _or operator not necessarily being +// its direct child. +func IsComplex(filter *mapper.Filter) bool { + if filter == nil { + return false + } + return isComplex(filter.Conditions, false) +} + +func isComplex(conditions any, seekRelation bool) bool { + switch typedCond := conditions.(type) { + case map[connor.FilterKey]any: + for k, v := range typedCond { + if op, ok := k.(*mapper.Operator); ok { + if (op.Operation == request.FilterOpOr && len(v.([]any)) > 1) || + op.Operation == request.FilterOpNot { + if isComplex(v, true) { + return true + } + continue + } + } + if _, isProp := k.(*mapper.PropertyIndex); isProp && seekRelation { + objMap := v.(map[connor.FilterKey]any) + for objK := range objMap { + if _, isRelation := objK.(*mapper.PropertyIndex); isRelation { + return true + } + } + } + if isComplex(v, seekRelation) { + return true + } + } + case []any: + for _, v := range typedCond { + if isComplex(v, seekRelation) { + return true + } + } + default: + return false + } + return false +} diff --git a/planner/filter/complex_test.go b/planner/filter/complex_test.go new file mode 100644 index 0000000000..f16055df74 --- /dev/null +++ b/planner/filter/complex_test.go @@ -0,0 +1,175 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestIsComplex(t *testing.T) { + tests := []struct { + name string + inputFilter map[string]any + isComplex bool + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + isComplex: false, + }, + { + name: "fields within _and", + inputFilter: r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 55)), + ), + isComplex: false, + }, + { + name: "fields within _not", + inputFilter: r("_and", + m("_not", m("name", m("_eq", "John"))), + m("age", m("_gt", 55)), + ), + isComplex: false, + }, + { + name: "fields within _or and _and (with _and root)", + inputFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + m("age", m("_lt", 55)), + ), + ), + isComplex: false, + }, + { + name: "fields within _or and _and (with _or root)", + inputFilter: r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + m("verified", m("_eq", true)), + ), + isComplex: false, + }, + { + name: "only 1 relation within _or", + inputFilter: r("_or", + m("published", m("rating", m("_gt", 4.0))), + ), + isComplex: false, + }, + { + name: "relation inside _or", + inputFilter: r("_or", + m("published", m("rating", m("_gt", 4.0))), + m("age", m("_gt", 30)), + m("verified", m("_eq", true)), + ), + isComplex: true, + }, + { + name: "relation not inside _or", + inputFilter: r("_and", + r("_or", + m("age", m("_lt", 30)), + m("verified", m("_eq", false)), + ), + r("_or", + r("_and", + m("age", m("_gt", 30)), + ), + m("name", m("_eq", "John")), + ), + r("_and", + m("name", m("_eq", "Islam")), + m("published", m("rating", m("_gt", 4.0))), + ), + ), + isComplex: false, + }, + { + name: "relation inside _and and _or", + inputFilter: r("_and", + r("_or", + m("age", m("_lt", 30)), + m("verified", m("_eq", false)), + ), + r("_or", + r("_and", + m("published", m("rating", m("_gt", 4.0))), + m("age", m("_gt", 30)), + ), + m("name", m("_eq", "John")), + ), + ), + isComplex: true, + }, + { + name: "relation within _not", + inputFilter: m("_not", + m("published", m("rating", m("_gt", 4.0))), + ), + isComplex: true, + }, + { + name: "field inside long _or/_and/_not chain", + inputFilter: m("_not", r("_and", r("_or", m("_not", r("_or", r("_and", + m("name", m("_eq", "John")))), + )))), + isComplex: false, + }, + { + name: "relation inside _and/_or and _not", + inputFilter: r("_and", + r("_or", + m("age", m("_lt", 30)), + m("verified", m("_eq", false)), + ), + r("_or", + m("_not", + m("published", m("rating", m("_gt", 4.0))), + ), + m("name", m("_eq", "John")), + ), + ), + isComplex: true, + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + actual := IsComplex(inputFilter) + assert.Equal(t, test.isComplex, actual) + }) + } +} + +func TestIsComplexNullFilter(t *testing.T) { + assert.False(t, IsComplex(nil)) +} diff --git a/planner/filter/copy.go b/planner/filter/copy.go new file mode 100644 index 0000000000..fec591f5ab --- /dev/null +++ b/planner/filter/copy.go @@ -0,0 +1,38 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/connor" +) + +// Copy performs a deep copy of the provided filter. +func Copy(filter map[connor.FilterKey]any) map[connor.FilterKey]any { + return copyFilterConditions(filter).(map[connor.FilterKey]any) +} + +func copyFilterConditions(conditions any) any { + switch typedCond := conditions.(type) { + case map[connor.FilterKey]any: + result := make(map[connor.FilterKey]any) + for key, clause := range typedCond { + result[key] = copyFilterConditions(clause) + } + return result + case []any: + resultArr := make([]any, len(typedCond)) + for i, elementClause := range typedCond { + resultArr[i] = copyFilterConditions(elementClause) + } + return resultArr + default: + return conditions + } +} diff --git a/planner/filter/copy_field.go b/planner/filter/copy_field.go new file mode 100644 index 0000000000..59f7db3471 --- /dev/null +++ b/planner/filter/copy_field.go @@ -0,0 +1,73 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// copyField copies the given field from the provided filter. +// The result filter preserves the structure of the original filter. +func copyField(filter *mapper.Filter, field mapper.Field) *mapper.Filter { + if filter == nil { + return nil + } + conditionKey := &mapper.PropertyIndex{ + Index: field.Index, + } + + resultFilter := &mapper.Filter{} + conditionMap := traverseFilterByProperty(conditionKey, filter.Conditions, false) + if len(conditionMap) > 0 { + resultFilter.Conditions = conditionMap + return resultFilter + } + return nil +} + +func traverseFilterByProperty( + key *mapper.PropertyIndex, + conditions map[connor.FilterKey]any, + shouldDelete bool, +) map[connor.FilterKey]any { + result := conditions + if !shouldDelete { + result = make(map[connor.FilterKey]any) + } + for targetKey, clause := range conditions { + if targetKey.Equal(key) { + if shouldDelete { + delete(result, targetKey) + } else { + result[key] = clause + } + } else if opKey, isOpKey := targetKey.(*mapper.Operator); isOpKey { + clauseArr, isArr := clause.([]any) + if isArr { + resultArr := make([]any, 0) + for _, elementClause := range clauseArr { + elementMap, ok := elementClause.(map[connor.FilterKey]any) + if !ok { + continue + } + compoundCond := traverseFilterByProperty(key, elementMap, shouldDelete) + if len(compoundCond) > 0 { + resultArr = append(resultArr, compoundCond) + } + } + if len(resultArr) > 0 { + result[opKey] = resultArr + } + } + } + } + return result +} diff --git a/planner/filter/copy_field_test.go b/planner/filter/copy_field_test.go new file mode 100644 index 0000000000..d3ec10cf62 --- /dev/null +++ b/planner/filter/copy_field_test.go @@ -0,0 +1,90 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" + + "github.com/stretchr/testify/assert" +) + +func TestCopyField(t *testing.T) { + tests := []struct { + name string + inputField mapper.Field + inputFilter map[string]any + expectedFilter map[string]any + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + inputField: mapper.Field{Index: 1}, // age + expectedFilter: m("age", m("_gt", 55)), + }, + { + name: "within _and", + inputFilter: r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 55)), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + m("age", m("_gt", 55)), + ), + }, + { + name: "within _or and _and", + inputFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + m("age", m("_lt", 55)), + ), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + r("_or", + r("_and", + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("age", m("_lt", 55)), + ), + ), + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + actualFilter := copyField(inputFilter, test.inputField) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter.Conditions) + }) + } +} + +func TestCopyFieldOfNullFilter(t *testing.T) { + actualFilter := copyField(nil, mapper.Field{Index: 1}) + assert.Nil(t, actualFilter) +} diff --git a/planner/filter/copy_test.go b/planner/filter/copy_test.go new file mode 100644 index 0000000000..ccb471c2b6 --- /dev/null +++ b/planner/filter/copy_test.go @@ -0,0 +1,144 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestCopyFilter(t *testing.T) { + getFilter := func() map[connor.FilterKey]any { + return map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_or"}: []any{ + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 0}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_eq"}: "Some name", + }, + }, + map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_and"}: []any{ + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_gt"}: 64, + }, + }, + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 2}: map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_gt"}: 4.8, + }, + }, + }, + }, + }, + map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_and"}: []any{ + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_lt"}: 64, + }, + }, + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 2}: map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_lt"}: 4.8, + }, + }, + }, + }, + }, + }, + } + } + + getFirstArrContent := func(f map[connor.FilterKey]any) []any { + for _, val := range f { + arr, isArr := val.([]any) + if isArr { + return arr + } + } + return nil + } + + hasGtOperator := func(f map[connor.FilterKey]any) bool { + orContent := getFirstArrContent(f) + for _, val := range orContent { + andContent := getFirstArrContent(val.(map[connor.FilterKey]any)) + for _, andEl := range andContent { + elMap := andEl.(map[connor.FilterKey]any) + for _, v := range elMap { + vMap := v.(map[connor.FilterKey]any) + for k := range vMap { + if op, ok := k.(*mapper.Operator); ok && op.Operation == "_gt" { + return true + } + } + } + } + } + return false + } + + tests := []struct { + name string + act func(t *testing.T, original, copyFilter map[connor.FilterKey]any) + }{ + { + name: "add new value to top level", + act: func(t *testing.T, original, copyFilter map[connor.FilterKey]any) { + assert.Len(t, original, 1) + + original[&mapper.Operator{Operation: "_and"}] = []any{} + assert.Len(t, original, 2) + assert.Len(t, copyFilter, 1) + }, + }, + { + name: "change array value", + act: func(t *testing.T, original, copyFilter map[connor.FilterKey]any) { + orContent := getFirstArrContent(original) + assert.True(t, hasGtOperator(original)) + + copy(orContent[1:], orContent[2:]) + assert.False(t, hasGtOperator(original)) + assert.True(t, hasGtOperator(copyFilter)) + }, + }, + { + name: "change nested map value", + act: func(t *testing.T, original, copyFilter map[connor.FilterKey]any) { + getFirstOrEl := func(f map[connor.FilterKey]any) map[connor.FilterKey]any { + orContent := getFirstArrContent(f) + return orContent[0].(map[connor.FilterKey]any) + } + elMap := getFirstOrEl(original) + assert.Len(t, elMap, 1) + + elMap[&mapper.Operator{Operation: "_and"}] = []any{} + + assert.Len(t, getFirstOrEl(original), 2) + assert.Len(t, getFirstOrEl(copyFilter), 1) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + f := getFilter() + test.act(t, f, Copy(f)) + }) + } +} diff --git a/planner/filter/merge.go b/planner/filter/merge.go new file mode 100644 index 0000000000..3bc38f4ba3 --- /dev/null +++ b/planner/filter/merge.go @@ -0,0 +1,39 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// Merge merges two filters into one. +// It basically applies _and to both filters and normalizes them. +func Merge(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor.FilterKey]any { + if len(c1) == 0 { + return c2 + } + if len(c2) == 0 { + return c1 + } + + result := map[connor.FilterKey]any{ + &mapper.Operator{Operation: request.FilterOpAnd}: []any{ + c1, c2, + }, + } + // we don't use any intelligent way of merging 2 filters using + // some kind of field-by-field analysis. + // The way we merge filters is rather artificial: create a root _and operator + // and put both filters as its children. This makes the resulting filter + // more complex, that's why simplify if by normalizing it. + return normalize(result) +} diff --git a/planner/filter/merge_test.go b/planner/filter/merge_test.go new file mode 100644 index 0000000000..153c850e80 --- /dev/null +++ b/planner/filter/merge_test.go @@ -0,0 +1,75 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestMergeFilterConditions(t *testing.T) { + tests := []struct { + name string + left map[string]any + right map[string]any + expected map[string]any + }{ + { + name: "basic merge", + left: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + right: map[string]any{ + "age": m("_gt", 55), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + "age": m("_gt", 55), + }, + }, + { + name: "basic _and merge", + left: m("_and", []any{ + m("name", m("_eq", "John")), + }), + right: m("_and", []any{ + m("age", m("_gt", 55)), + }), + expected: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + }, + } + + mapping := getDocMapping() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + leftFilter := mapper.ToFilter(request.Filter{Conditions: tt.left}, mapping) + rightFilter := mapper.ToFilter(request.Filter{Conditions: tt.right}, mapping) + actualFilter := Merge(leftFilter.Conditions, rightFilter.Conditions) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: tt.expected}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter) + }) + } +} + +func TestMergeNullFilter(t *testing.T) { + f := map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 0}: "value1", + } + AssertEqualFilterMap(t, f, Merge(f, nil)) + AssertEqualFilterMap(t, f, Merge(nil, f)) +} diff --git a/planner/filter/normalize.go b/planner/filter/normalize.go new file mode 100644 index 0000000000..5f7d275418 --- /dev/null +++ b/planner/filter/normalize.go @@ -0,0 +1,149 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// normalize normalizes the provided filter conditions. +// The following cases are subject of normalization: +// - _and or _or with one element is removed flattened +// - double _not is removed +// - any number of consecutive _ands with any number of elements is flattened +// As the result object is a map with unique keys (a.k.a. properties), +// while performing flattening of compound operators if the same property +// is present in the result map, both conditions will be moved into an _and +func normalize(conditions map[connor.FilterKey]any) map[connor.FilterKey]any { + return normalizeConditions(conditions, false).(map[connor.FilterKey]any) +} + +func conditionsArrToMap(conditions []any) map[connor.FilterKey]any { + result := make(map[connor.FilterKey]any) + for _, clause := range conditions { + if clauseMap, ok := clause.(map[connor.FilterKey]any); ok { + for k, v := range clauseMap { + result[k] = v + } + } + } + return result +} + +func addNormalizedCondition(key connor.FilterKey, val any, m map[connor.FilterKey]any) { + if _, isProp := key.(*mapper.PropertyIndex); isProp { + var andOp *mapper.Operator + var andContent []any + for existingKey := range m { + if op, isOp := existingKey.(*mapper.Operator); isOp && op.Operation == request.FilterOpAnd { + andOp = op + andContent = m[existingKey].([]any) + break + } + } + for existingKey := range m { + if existingKey.Equal(key) { + existingVal := m[existingKey] + delete(m, existingKey) + if andOp == nil { + andOp = &mapper.Operator{Operation: request.FilterOpAnd} + } + m[andOp] = append( + andContent, + map[connor.FilterKey]any{existingKey: existingVal}, + map[connor.FilterKey]any{key: val}, + ) + return + } + } + for _, andElement := range andContent { + elementMap := andElement.(map[connor.FilterKey]any) + for andElementKey := range elementMap { + if andElementKey.Equal(key) { + m[andOp] = append(andContent, map[connor.FilterKey]any{key: val}) + return + } + } + } + } + m[key] = val +} + +func normalizeConditions(conditions any, skipRoot bool) any { + result := make(map[connor.FilterKey]any) + switch typedConditions := conditions.(type) { + case map[connor.FilterKey]any: + for rootKey, rootVal := range typedConditions { + rootOpKey, isRootOp := rootKey.(*mapper.Operator) + if isRootOp { + if rootOpKey.Operation == request.FilterOpAnd || rootOpKey.Operation == request.FilterOpOr { + rootValArr := rootVal.([]any) + if len(rootValArr) == 1 || rootOpKey.Operation == request.FilterOpAnd && !skipRoot { + flat := normalizeConditions(conditionsArrToMap(rootValArr), false) + flatMap := flat.(map[connor.FilterKey]any) + for k, v := range flatMap { + addNormalizedCondition(k, v, result) + } + } else { + resultArr := []any{} + for i := range rootValArr { + norm := normalizeConditions(rootValArr[i], !skipRoot) + normMap, ok := norm.(map[connor.FilterKey]any) + if ok { + for k, v := range normMap { + resultArr = append(resultArr, map[connor.FilterKey]any{k: v}) + } + } else { + resultArr = append(resultArr, norm) + } + } + addNormalizedCondition(rootKey, resultArr, result) + } + } else if rootOpKey.Operation == request.FilterOpNot { + notMap := rootVal.(map[connor.FilterKey]any) + if len(notMap) == 1 { + var k connor.FilterKey + for k = range notMap { + break + } + norm := normalizeConditions(notMap, true).(map[connor.FilterKey]any) + delete(notMap, k) + var v any + for k, v = range norm { + break + } + if opKey, ok := k.(*mapper.Operator); ok && opKey.Operation == request.FilterOpNot { + notNotMap := normalizeConditions(v, false).(map[connor.FilterKey]any) + for notNotKey, notNotVal := range notNotMap { + addNormalizedCondition(notNotKey, notNotVal, result) + } + } else { + notMap[k] = v + addNormalizedCondition(rootOpKey, notMap, result) + } + } else { + addNormalizedCondition(rootKey, rootVal, result) + } + } else { + addNormalizedCondition(rootKey, rootVal, result) + } + } else { + addNormalizedCondition(rootKey, normalizeConditions(rootVal, false), result) + } + } + return result + case []any: + return conditionsArrToMap(typedConditions) + default: + return conditions + } +} diff --git a/planner/filter/normalize_test.go b/planner/filter/normalize_test.go new file mode 100644 index 0000000000..22e4f69ed0 --- /dev/null +++ b/planner/filter/normalize_test.go @@ -0,0 +1,302 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestNormalizeConditions(t *testing.T) { + tests := []struct { + name string + input map[string]any + expected map[string]any + }{ + { + name: "don't normalize already normalized conditions", + input: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "flatten single _and condition", + input: r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "don't touch single _or condition", + input: r("_or", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + expected: r("_or", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + }, + { + name: "flatten _and with single condition", + input: map[string]any{ + "_and": []any{ + m("name", m("_eq", "John")), + }, + "verified": m("_eq", true), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "flatten _or with single condition", + input: map[string]any{ + "_or": []any{ + m("name", m("_eq", "John")), + }, + "verified": m("_eq", true), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "flatten long _and/_or chain", + input: r("_or", r("_and", r("_or", r("_or", r("_and", r("_and", r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ))))))), + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "normalize sibling _and with few conditions", + input: map[string]any{ + "_and": []any{ + r("_and", + m("age", m("_gt", 30)), + m("published", m("rating", m("_lt", 4.8))), + ), + r("_and", m("verified", m("_eq", true))), + }, + "name": m("_eq", "John"), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "published": m("rating", m("_lt", 4.8)), + "age": m("_gt", 30), + "verified": m("_eq", true), + }, + }, + { + name: "don't touch single _not", + input: m("_not", m("name", m("_eq", "John"))), + expected: m("_not", m("name", m("_eq", "John"))), + }, + { + name: "remove double _not", + input: m("_not", m("_not", m("name", m("_eq", "John")))), + expected: m("name", m("_eq", "John")), + }, + { + name: "remove double _not (sibling)", + input: map[string]any{ + "_not": m("_not", m("name", m("_eq", "John"))), + "age": m("_eq", 65), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "age": m("_eq", 65), + }, + }, + { + name: "don't touch double _not if first has few elements", + input: m("_not", map[string]any{ + "_not": m("name", m("_eq", "John")), + "verified": m("_eq", true), + }), + expected: m("_not", map[string]any{ + "_not": m("name", m("_eq", "John")), + "verified": m("_eq", true), + }), + }, + { + name: "normalize long _not chain", + input: m("_not", m("_not", m("_not", m("_not", m("_not", m("name", m("_eq", "John"))))))), + expected: m("_not", m("name", m("_eq", "John"))), + }, + { + name: "normalize _not content", + input: m("_not", r("_and", + m("name", m("_eq", "John")), + r("_and", + m("age", m("_eq", 30)), + m("verified", m("_eq", true)), + ), + )), + expected: m("_not", r("_and", + m("name", m("_eq", "John")), + m("age", m("_eq", 30)), + m("verified", m("_eq", true)), + )), + }, + { + name: "normalize long _not,_and,_or chain", + input: m("_not", r("_and", m("_not", r("_or", m("_not", m("name", m("_eq", "John"))))))), + expected: m("_not", m("name", m("_eq", "John"))), + }, + { + name: "normalize nested arr elements", + input: r("_and", + r("_and", r("_and", m("name", m("_eq", "John")))), + r("_and", m("verified", m("_eq", true))), + r("_and", r("_and", + r("_and", m("age", m("_lt", 55))), + m("published", m("rating", m("_gt", 4.4))), + )), + ), + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + "age": m("_lt", 55), + "published": m("rating", m("_gt", 4.4)), + }, + }, + { + name: "do not flatten _and, child of _or", + input: r("_or", + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + r("_and", + m("name", m("_eq", "Islam")), + m("verified", m("_eq", false)), + ), + ), + expected: r("_or", + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + r("_and", + m("name", m("_eq", "Islam")), + m("verified", m("_eq", false)), + ), + ), + }, + { + name: "flatten _and, grand children of _or", + input: r("_or", + r("_and", + r("_and", + m("name", m("_eq", "Islam")), + m("age", m("_eq", "30")), + ), + m("verified", m("_eq", false)), + ), + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + ), + expected: r("_or", + r("_and", + m("name", m("_eq", "Islam")), + m("age", m("_eq", "30")), + m("verified", m("_eq", false)), + ), + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + ), + }, + { + name: "squash same keys into _and", + input: map[string]any{ + "_and": []any{ + r("_and", + m("age", m("_gt", 30)), + m("published", m("rating", m("_lt", 4.8))), + ), + r("_and", m("age", m("_lt", 55))), + m("age", m("_ne", 33)), + }, + "name": m("_eq", "John"), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "published": m("rating", m("_lt", 4.8)), + "_and": []any{ + m("age", m("_gt", 30)), + m("age", m("_lt", 55)), + m("age", m("_ne", 33)), + }, + }, + }, + { + name: "squash same keys into _and (with more matching keys)", + input: map[string]any{ + "_and": []any{ + m("published", m("rating", m("_lt", 4.8))), + r("_and", m("name", m("_ne", "Islam"))), + r("_and", + m("age", m("_gt", 30)), + m("published", m("genre", m("_eq", "Thriller"))), + m("verified", m("_eq", true)), + ), + r("_and", + m("age", m("_lt", 55)), + m("published", m("rating", m("_gt", 4.4)))), + }, + "name": m("_eq", "John"), + }, + expected: map[string]any{ + "_and": []any{ + m("name", m("_eq", "John")), + m("name", m("_ne", "Islam")), + m("published", m("rating", m("_gt", 4.4))), + m("published", m("rating", m("_lt", 4.8))), + m("published", m("genre", m("_eq", "Thriller"))), + m("age", m("_gt", 30)), + m("age", m("_lt", 55)), + }, + "verified": m("_eq", true), + }, + }, + } + + mapping := getDocMapping() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: tt.input}, mapping) + actualFilter := normalize(inputFilter.Conditions) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: tt.expected}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter) + }) + } +} diff --git a/planner/filter/remove_field.go b/planner/filter/remove_field.go new file mode 100644 index 0000000000..5c80ffc96c --- /dev/null +++ b/planner/filter/remove_field.go @@ -0,0 +1,26 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// RemoveField removes the given field from the provided filter. +func RemoveField(filter *mapper.Filter, field mapper.Field) { + if filter == nil { + return + } + conditionKey := &mapper.PropertyIndex{ + Index: field.Index, + } + + traverseFilterByProperty(conditionKey, filter.Conditions, true) +} diff --git a/planner/filter/remove_field_test.go b/planner/filter/remove_field_test.go new file mode 100644 index 0000000000..2b6e8cdd3a --- /dev/null +++ b/planner/filter/remove_field_test.go @@ -0,0 +1,87 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestRemoveFieldFromFilter(t *testing.T) { + tests := []struct { + name string + inputField mapper.Field + inputFilter map[string]any + expectedFilter map[string]any + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + inputField: mapper.Field{Index: 1}, // age + expectedFilter: m("name", m("_eq", "John")), + }, + { + name: "within _and", + inputFilter: r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 55)), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + m("name", m("_eq", "John")), + ), + }, + { + name: "within _or and _and", + inputFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + m("age", m("_lt", 55)), + ), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + ), + ), + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + RemoveField(inputFilter, test.inputField) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, inputFilter.Conditions) + }) + } +} + +func TestRemoveFieldFromNullFilter(t *testing.T) { + RemoveField(nil, mapper.Field{Index: 1}) +} diff --git a/planner/filter/split.go b/planner/filter/split.go new file mode 100644 index 0000000000..bba822145a --- /dev/null +++ b/planner/filter/split.go @@ -0,0 +1,34 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// SplitByField splits the provided filter into 2 filters based on field. +// It can be used for extracting a supType +// Eg. (filter: {age: 10, name: "bob", author: {birthday: "June 26, 1990", ...}, ...}) +// +// In this case the root filter is the conditions that apply to the main type +// ie: {age: 10, name: "bob", ...}. +// +// And the subType filter is the conditions that apply to the queried sub type +// ie: {birthday: "June 26, 1990", ...}. +func SplitByField(filter *mapper.Filter, field mapper.Field) (*mapper.Filter, *mapper.Filter) { + if filter == nil { + return nil, nil + } + + splitF := copyField(filter, field) + RemoveField(filter, field) + + return filter, splitF +} diff --git a/planner/filter/split_test.go b/planner/filter/split_test.go new file mode 100644 index 0000000000..1bcbecffb7 --- /dev/null +++ b/planner/filter/split_test.go @@ -0,0 +1,58 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" + + "github.com/stretchr/testify/assert" +) + +func TestSplitFilter(t *testing.T) { + tests := []struct { + name string + inputField mapper.Field + inputFilter map[string]any + expectedFilter1 map[string]any + expectedFilter2 map[string]any + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + inputField: mapper.Field{Index: 1}, // age + expectedFilter1: m("name", m("_eq", "John")), + expectedFilter2: m("age", m("_gt", 55)), + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + actualFilter1, actualFilter2 := SplitByField(inputFilter, test.inputField) + expectedFilter1 := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter1}, mapping) + expectedFilter2 := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter2}, mapping) + AssertEqualFilterMap(t, expectedFilter1.Conditions, actualFilter1.Conditions) + AssertEqualFilterMap(t, expectedFilter2.Conditions, actualFilter2.Conditions) + }) + } +} + +func TestSplitNullFilter(t *testing.T) { + actualFilter1, actualFilter2 := SplitByField(nil, mapper.Field{Index: 1}) + assert.Nil(t, actualFilter1) + assert.Nil(t, actualFilter2) +} diff --git a/planner/filter/util_test.go b/planner/filter/util_test.go new file mode 100644 index 0000000000..e8860081c8 --- /dev/null +++ b/planner/filter/util_test.go @@ -0,0 +1,140 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "fmt" + "reflect" + "testing" + + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func assertEqualFilterMap(expected, actual map[connor.FilterKey]any, prefix string) string { + if len(expected) != len(actual) { + return fmt.Sprintf("Mismatch at %s: Expected map length: %d, but got: %d", prefix, len(expected), len(actual)) + } + + findMatchingKey := func(key connor.FilterKey, m map[connor.FilterKey]any) connor.FilterKey { + for k := range m { + if k.Equal(key) { + return k + } + } + return nil + } + + for expKey, expVal := range expected { + actKey := findMatchingKey(expKey, actual) + if actKey == nil { + return fmt.Sprintf("Mismatch at %s: Expected key %v not found in actual map", prefix, expKey) + } + actVal := actual[actKey] + + newPrefix := fmt.Sprintf("%s.%v", prefix, expKey) + switch expTypedVal := expVal.(type) { + case map[connor.FilterKey]any: + actTypedVal, ok := actVal.(map[connor.FilterKey]any) + if !ok { + return fmt.Sprintf("Mismatch at %s: Expected a nested map[FilterKey]any for key %v, but got: %v", prefix, expKey, actVal) + } + errMsg := assertEqualFilterMap(expTypedVal, actTypedVal, newPrefix) + if errMsg != "" { + return errMsg + } + case []any: + actTypedVal, ok := actVal.([]any) + if !ok { + return fmt.Sprintf("Mismatch at %s: Expected a nested []any for key %v, but got: %v", newPrefix, expKey, actVal) + } + if len(expTypedVal) != len(actTypedVal) { + return fmt.Sprintf("Mismatch at %s: Expected slice length: %d, but got: %d", newPrefix, len(expTypedVal), len(actTypedVal)) + } + numElements := len(expTypedVal) + for i := 0; i < numElements; i++ { + for j := 0; j < numElements; j++ { + errMsg := compareElements(expTypedVal[i], actTypedVal[j], expKey, newPrefix) + if errMsg == "" { + actTypedVal = append(actTypedVal[:j], actTypedVal[j+1:]...) + break + } + } + if len(actTypedVal) != numElements-i-1 { + return fmt.Sprintf("Mismatch at %s: Expected element not found: %d", newPrefix, expTypedVal[i]) + } + } + default: + if !reflect.DeepEqual(expVal, actVal) { + return fmt.Sprintf("Mismatch at %s: Expected value %v for key %v, but got %v", prefix, expVal, expKey, actVal) + } + } + } + return "" +} + +func compareElements(expected, actual any, key connor.FilterKey, prefix string) string { + switch expElem := expected.(type) { + case map[connor.FilterKey]any: + actElem, ok := actual.(map[connor.FilterKey]any) + if !ok { + return fmt.Sprintf("Mismatch at %s: Expected a nested map[FilterKey]any for key %v, but got: %v", prefix, key, actual) + } + return assertEqualFilterMap(expElem, actElem, prefix) + default: + if !reflect.DeepEqual(expElem, actual) { + return fmt.Sprintf("Mismatch at %s: Expected value %v for key %v, but got %v", prefix, expElem, key, actual) + } + } + return "" +} + +func AssertEqualFilterMap(t *testing.T, expected, actual map[connor.FilterKey]any) { + errMsg := assertEqualFilterMap(expected, actual, "root") + if errMsg != "" { + t.Fatal(errMsg) + } +} + +func AssertEqualFilter(t *testing.T, expected, actual *mapper.Filter) { + if expected == nil && actual == nil { + return + } + + if expected == nil || actual == nil { + t.Fatalf("Expected %v, but got %v", expected, actual) + return + } + + AssertEqualFilterMap(t, expected.Conditions, actual.Conditions) + + if !reflect.DeepEqual(expected.ExternalConditions, actual.ExternalConditions) { + t.Errorf("Expected external conditions \n\t%v\n, but got \n\t%v", + expected.ExternalConditions, actual.ExternalConditions) + } +} + +func m(op string, val any) map[string]any { + return map[string]any{op: val} +} + +func r(op string, vals ...any) map[string]any { + return m(op, vals) +} + +func getDocMapping() *core.DocumentMapping { + return &core.DocumentMapping{ + IndexesByName: map[string][]int{"name": {0}, "age": {1}, "published": {2}, "verified": {3}}, + ChildMappings: []*core.DocumentMapping{nil, nil, { + IndexesByName: map[string][]int{"rating": {11}, "genre": {12}}, + }}, + } +} diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 336b3eae40..b6f80a55a2 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -16,7 +16,6 @@ import ( "strings" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" @@ -788,6 +787,7 @@ func resolveFilterDependencies( source.Value().Conditions, mapping, existingFields, + nil, ) } @@ -797,50 +797,83 @@ func resolveInnerFilterDependencies( source map[string]any, mapping *core.DocumentMapping, existingFields []Requestable, + resolvedFields []Requestable, ) ([]Requestable, error) { newFields := []Requestable{} -sourceLoop: for key := range source { - if strings.HasPrefix(key, "_") && key != request.KeyFieldName { - continue - } - - propertyMapped := len(mapping.IndexesByName[key]) != 0 + if key == request.FilterOpAnd || key == request.FilterOpOr { + compoundFilter := source[key].([]any) + for _, innerFilter := range compoundFilter { + innerFields, err := resolveInnerFilterDependencies( + descriptionsRepo, + parentCollectionName, + innerFilter.(map[string]any), + mapping, + existingFields, + resolvedFields, + ) + if err != nil { + return nil, err + } - if !propertyMapped { - join, err := constructEmptyJoin(descriptionsRepo, parentCollectionName, mapping, key) + resolvedFields = append(resolvedFields, innerFields...) + newFields = append(newFields, innerFields...) + } + continue + } else if key == request.FilterOpNot { + notFilter := source[key].(map[string]any) + innerFields, err := resolveInnerFilterDependencies( + descriptionsRepo, + parentCollectionName, + notFilter, + mapping, + existingFields, + resolvedFields, + ) if err != nil { return nil, err } - newFields = append(newFields, join) + resolvedFields = append(resolvedFields, innerFields...) + newFields = append(newFields, innerFields...) + continue } - keyIndex := mapping.FirstIndexOfName(key) + propertyMapped := len(mapping.IndexesByName[key]) != 0 - if keyIndex >= len(mapping.ChildMappings) { - // If the key index is outside the bounds of the child mapping array, then - // this is not a relation/join and we can add it to the fields and - // continue (no child props to process) - for _, field := range existingFields { - if field.GetIndex() == keyIndex { - continue sourceLoop + var childSelect *Select + if propertyMapped { + var field Requestable + for _, f := range existingFields { + if f.GetIndex() == mapping.FirstIndexOfName(key) { + field = f + break } } - newFields = append(existingFields, &Field{ - Index: keyIndex, - Name: key, - }) - - continue - } + for _, f := range resolvedFields { + if f.GetIndex() == mapping.FirstIndexOfName(key) { + field = f + break + } + } + if field == nil { + newFields = append(newFields, &Field{Index: mapping.FirstIndexOfName(key), Name: key}) + continue + } + var isSelect bool + childSelect, isSelect = field.(*Select) + if !isSelect { + continue + } + } else { + var err error + childSelect, err = constructEmptyJoin(descriptionsRepo, parentCollectionName, mapping, key) + if err != nil { + return nil, err + } - childMap := mapping.ChildMappings[keyIndex] - if childMap == nil { - // If childMap is nil, then this is not a relation/join and we can continue - // (no child props to process) - continue + newFields = append(newFields, childSelect) } childSource := source[key] @@ -851,56 +884,25 @@ sourceLoop: continue } - dummyParsed := &request.Select{ - Field: request.Field{ - Name: key, - }, - } - + dummyParsed := &request.Select{Field: request.Field{Name: key}} childCollectionName, err := getCollectionName(descriptionsRepo, dummyParsed, parentCollectionName) if err != nil { return nil, err } - allFields := enumerable.Concat( - enumerable.New(newFields), - enumerable.New(existingFields), - ) - - matchingFields := enumerable.Where[Requestable](allFields, func(existingField Requestable) (bool, error) { - return existingField.GetIndex() == keyIndex, nil - }) - - matchingHosts := enumerable.Select(matchingFields, func(existingField Requestable) (*Select, error) { - host, isSelect := existingField.AsSelect() - if !isSelect { - // This should never be possible - return nil, client.NewErrUnhandledType("host", existingField) - } - return host, nil - }) - - host, hasHost, err := enumerable.TryGetFirst(matchingHosts) - if err != nil { - return nil, err - } - if !hasHost { - // This should never be possible - return nil, ErrFailedToFindHostField - } - childFields, err := resolveInnerFilterDependencies( descriptionsRepo, childCollectionName, childFilter, - childMap, - host.Fields, + childSelect.DocumentMapping, + childSelect.Fields, + nil, ) if err != nil { return nil, err } - host.Fields = append(host.Fields, childFields...) + childSelect.Fields = append(childSelect.Fields, childFields...) } return newFields, nil @@ -946,7 +948,7 @@ func constructEmptyJoin( }, nil } -// resolveSecondaryRelationIDs contructs the required stuff needed to resolve secondary relation ids. +// resolveSecondaryRelationIDs constructs the required stuff needed to resolve secondary relation ids. // // They are handled by joining (if not already done so) the related object and copying its key into the // secondary relation id field. diff --git a/planner/mapper/targetable.go b/planner/mapper/targetable.go index 49190b911f..bcfdb02ef8 100644 --- a/planner/mapper/targetable.go +++ b/planner/mapper/targetable.go @@ -13,6 +13,7 @@ package mapper import ( "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/connor" "github.com/sourcenetwork/defradb/core" ) @@ -109,7 +110,7 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a case *Operator: switch keyType.Operation { - case "_and", "_or": + case request.FilterOpAnd, request.FilterOpOr: v := v.([]any) logicMapEntries := make([]any, len(v)) for i, item := range v { @@ -117,7 +118,7 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a logicMapEntries[i] = filterObjectToMap(mapping, itemMap) } outmap[keyType.Operation] = logicMapEntries - case "_not": + case request.FilterOpNot: itemMap := v.(map[connor.FilterKey]any) outmap[keyType.Operation] = filterObjectToMap(mapping, itemMap) default: diff --git a/planner/type_join.go b/planner/type_join.go index f0dd2c6d19..f37437089e 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -18,6 +18,7 @@ import ( "github.com/sourcenetwork/defradb/connor" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/planner/filter" "github.com/sourcenetwork/defradb/planner/mapper" "github.com/sourcenetwork/defradb/request/graphql/schema" ) @@ -210,39 +211,6 @@ func (n *typeIndexJoin) Explain(explainType request.ExplainType) (map[string]any // Merge implements mergeNode func (n *typeIndexJoin) Merge() bool { return true } -// split the provided filter -// into the root and subType components. -// Eg. (filter: {age: 10, name: "bob", author: {birthday: "June 26, 1990", ...}, ...}) -// -// The root filter is the conditions that apply to the main -// type ie: {age: 10, name: "bob", ...}. -// -// The subType filter is the conditions that apply to the -// queried sub type ie: {birthday: "June 26, 1990", ...}. -func splitFilterByType(filter *mapper.Filter, subType int) (*mapper.Filter, *mapper.Filter) { - if filter == nil { - return nil, nil - } - conditionKey := &mapper.PropertyIndex{ - Index: subType, - } - - keyFound, sub := removeConditionIndex(conditionKey, filter.Conditions) - if !keyFound { - return filter, nil - } - - // create new splitup filter - // our schema ensures that if sub exists, its of type map[string]any - splitF := &mapper.Filter{Conditions: map[connor.FilterKey]any{conditionKey: sub}} - - // check if we have any remaining filters - if len(filter.Conditions) == 0 { - return nil, splitF - } - return filter, splitF -} - // typeJoinOne is the plan node for a type index join // where the root type is the primary in a one-to-one relation request. type typeJoinOne struct { @@ -269,19 +237,7 @@ func (p *Planner) makeTypeJoinOne( source planNode, subType *mapper.Select, ) (*typeJoinOne, error) { - // split filter - if scan, ok := source.(*scanNode); ok { - var parentfilter *mapper.Filter - scan.filter, parentfilter = splitFilterByType(scan.filter, subType.Index) - if parentfilter != nil { - if parent.filter == nil { - parent.filter = new(mapper.Filter) - } - parent.filter.Conditions = mergeFilterConditions( - parent.filter.Conditions, parentfilter.Conditions) - } - subType.ShowDeleted = parent.selectReq.ShowDeleted - } + prepareScanNodeFilterForTypeJoin(parent, source, subType) selectPlan, err := p.SubSelect(subType) if err != nil { @@ -372,20 +328,9 @@ func (n *typeJoinOne) Next() (bool, error) { } func (n *typeJoinOne) valuesSecondary(doc core.Doc) (core.Doc, error) { - fkIndex := &mapper.PropertyIndex{ - Index: n.subType.DocumentMap().FirstIndexOfName(n.subTypeFieldName + request.RelatedObjectID), - } - filter := map[connor.FilterKey]any{ - fkIndex: map[connor.FilterKey]any{ - mapper.FilterEqOp: doc.GetKey(), - }, - } - + propIndex := n.subType.DocumentMap().FirstIndexOfName(n.subTypeFieldName + request.RelatedObjectID) // using the doc._key as a filter - err := appendFilterToScanNode(n.subType, filter) - if err != nil { - return core.Doc{}, err - } + setSubTypeFilterToScanNode(n.subType, propIndex, doc.GetKey()) // We have to reset the scan node after appending the new key-filter if err := n.subType.Init(); err != nil { @@ -397,11 +342,11 @@ func (n *typeJoinOne) valuesSecondary(doc core.Doc) (core.Doc, error) { return doc, err } - subdoc := n.subType.Value() - doc.Fields[n.subSelect.Index] = subdoc + subDoc := n.subType.Value() + doc.Fields[n.subSelect.Index] = subDoc if n.secondaryFieldIndex.HasValue() { - doc.Fields[n.secondaryFieldIndex.Value()] = subdoc.GetKey() + doc.Fields[n.secondaryFieldIndex.Value()] = subDoc.GetKey() } return doc, nil @@ -434,7 +379,7 @@ func (n *typeJoinOne) valuesPrimary(doc core.Doc) (core.Doc, error) { // if we don't find any docs from our point span lookup // or if we encounter an error just return the base doc, - // with an empty map for the subdoc + // with an empty map for the subDoc next, err := n.subType.Next() if err != nil { @@ -479,24 +424,47 @@ type typeJoinMany struct { subSelect *mapper.Select } -func (p *Planner) makeTypeJoinMany( +func prepareScanNodeFilterForTypeJoin( parent *selectNode, source planNode, subType *mapper.Select, -) (*typeJoinMany, error) { - // split filter - if scan, ok := source.(*scanNode); ok { - var parentfilter *mapper.Filter - scan.filter, parentfilter = splitFilterByType(scan.filter, subType.Index) - if parentfilter != nil { +) { + subType.ShowDeleted = parent.selectReq.ShowDeleted + + scan, ok := source.(*scanNode) + if !ok || scan.filter == nil { + return + } + + if filter.IsComplex(scan.filter) { + if parent.filter == nil { + parent.filter = mapper.NewFilter() + parent.filter.Conditions = filter.Copy(scan.filter.Conditions) + } else { + parent.filter.Conditions = filter.Merge( + parent.filter.Conditions, scan.filter.Conditions) + } + filter.RemoveField(scan.filter, subType.Field) + } else { + var parentFilter *mapper.Filter + scan.filter, parentFilter = filter.SplitByField(scan.filter, subType.Field) + if parentFilter != nil { if parent.filter == nil { - parent.filter = new(mapper.Filter) + parent.filter = parentFilter + } else { + parent.filter.Conditions = filter.Merge( + parent.filter.Conditions, parentFilter.Conditions) } - parent.filter.Conditions = mergeFilterConditions( - parent.filter.Conditions, parentfilter.Conditions) } - subType.ShowDeleted = parent.selectReq.ShowDeleted } +} + +func (p *Planner) makeTypeJoinMany( + parent *selectNode, + source planNode, + subType *mapper.Select, +) (*typeJoinMany, error) { + prepareScanNodeFilterForTypeJoin(parent, source, subType) selectPlan, err := p.SubSelect(subType) if err != nil { @@ -560,26 +528,15 @@ func (n *typeJoinMany) Next() (bool, error) { n.currentValue = n.root.Value() // check if theres an index - // if there is, scan and aggregate resuts + // if there is, scan and aggregate results // if not, then manually scan the subtype table - subdocs := make([]core.Doc, 0) + subDocs := make([]core.Doc, 0) if n.index != nil { // @todo: handle index for one-to-many setup } else { - fkIndex := &mapper.PropertyIndex{ - Index: n.subSelect.FirstIndexOfName(n.rootName + request.RelatedObjectID), - } - filter := map[connor.FilterKey]any{ - fkIndex: map[connor.FilterKey]any{ - mapper.FilterEqOp: n.currentValue.GetKey(), - }, - } - + propIndex := n.subSelect.FirstIndexOfName(n.rootName + request.RelatedObjectID) // using the doc._key as a filter - err := appendFilterToScanNode(n.subType, filter) - if err != nil { - return false, err - } + setSubTypeFilterToScanNode(n.subType, propIndex, n.currentValue.GetKey()) // reset scan node if err := n.subType.Init(); err != nil { @@ -595,12 +552,12 @@ func (n *typeJoinMany) Next() (bool, error) { break } - subdoc := n.subType.Value() - subdocs = append(subdocs, subdoc) + subDoc := n.subType.Value() + subDocs = append(subDocs, subDoc) } } - n.currentValue.Fields[n.subSelect.Index] = subdocs + n.currentValue.Fields[n.subSelect.Index] = subDocs return true, nil } @@ -614,53 +571,35 @@ func (n *typeJoinMany) Close() error { func (n *typeJoinMany) Source() planNode { return n.root } -func appendFilterToScanNode(plan planNode, filterCondition map[connor.FilterKey]any) error { - switch node := plan.(type) { - case *scanNode: - filter := node.filter - if filter == nil && len(filterCondition) > 0 { - filter = mapper.NewFilter() - } - - filter.Conditions = mergeFilterConditions(filter.Conditions, filterCondition) - - node.filter = filter - case nil: - return nil - default: - return appendFilterToScanNode(node.Source(), filterCondition) +func setSubTypeFilterToScanNode(plan planNode, propIndex int, key string) { + scan := getScanNode(plan) + if scan == nil { + return } - return nil -} -// merge into dest with src, return dest -func mergeFilterConditions(dest map[connor.FilterKey]any, src map[connor.FilterKey]any) map[connor.FilterKey]any { - if dest == nil { - dest = make(map[connor.FilterKey]any) + if scan.filter == nil { + scan.filter = mapper.NewFilter() } - // merge filter conditions - for k, v := range src { - indexKey, isIndexKey := k.(*mapper.PropertyIndex) - if !isIndexKey { - continue - } - removeConditionIndex(indexKey, dest) - dest[k] = v + + propertyIndex := &mapper.PropertyIndex{Index: propIndex} + filterConditions := map[connor.FilterKey]any{ + propertyIndex: map[connor.FilterKey]any{ + mapper.FilterEqOp: key, + }, } - return dest + + filter.RemoveField(scan.filter, mapper.Field{Index: propIndex}) + scan.filter.Conditions = filter.Merge(scan.filter.Conditions, filterConditions) } -func removeConditionIndex( - key *mapper.PropertyIndex, - filterConditions map[connor.FilterKey]any, -) (bool, any) { - for targetKey, clause := range filterConditions { - if indexKey, isIndexKey := targetKey.(*mapper.PropertyIndex); isIndexKey { - if key.Index == indexKey.Index { - delete(filterConditions, targetKey) - return true, clause - } +func getScanNode(plan planNode) *scanNode { + node := plan + for node != nil { + scanNode, ok := node.(*scanNode) + if ok { + return scanNode } + node = node.Source() } - return false, nil + return nil } diff --git a/tests/integration/query/one_to_many/with_filter_test.go b/tests/integration/query/one_to_many/with_filter_test.go index 72f62d6abd..322f1581bc 100644 --- a/tests/integration/query/one_to_many/with_filter_test.go +++ b/tests/integration/query/one_to_many/with_filter_test.go @@ -17,274 +17,442 @@ import ( ) func TestQueryOneToManyWithNumericGreaterThanFilterOnParent(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter", - Request: `query { - Author(filter: {age: {_gt: 63}}) { - name - age - published { - name - rating - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", - "age": uint64(65), - "published": []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - }, + testUtils.Request{ + Request: `query { + Author(filter: {age: {_gt: 63}}) { + name + age + published { + name + rating + } + } + }`, + Results: []map[string]any{ { - "name": "A Time for Mercy", - "rating": 4.5, + "name": "John Grisham", + "age": uint64(65), + "published": []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + }, + { + "name": "A Time for Mercy", + "rating": 4.5, + }, + }, }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToManyWithNumericGreaterThanChildFilterOnParentWithUnrenderedChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter", - Request: `query { - Author(filter: {published: {rating: {_gt: 4.8}}}) { - name - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", + testUtils.Request{ + Request: `query { + Author(filter: {published: {rating: {_gt: 4.8}}, age: {_gt: 63}}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + }, + }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToManyWithNumericGreaterThanFilterOnParentAndChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter on root and sub type", - Request: `query { - Author(filter: {age: {_gt: 63}}) { - name - age - published(filter: {rating: {_gt: 4.6}}) { - name - rating - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", - "age": uint64(65), - "published": []map[string]any{ + testUtils.Request{ + Request: `query { + Author(filter: {age: {_gt: 63}}) { + name + age + published(filter: {rating: {_gt: 4.6}}) { + name + rating + } + } + }`, + Results: []map[string]any{ { - "name": "Painted House", - "rating": 4.9, + "name": "John Grisham", + "age": uint64(65), + "published": []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + }, + }, }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToManyWithMultipleAliasedFilteredChildren(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter on root and sub type", - Request: `query { - Author { - name - age - p1: published(filter: {rating: {_gt: 4.6}}) { - name - rating - } - p2: published(filter: {rating: {_lt: 4.6}}) { - name - rating - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", - "age": uint64(65), - "p1": []map[string]any{ + testUtils.Request{ + Request: `query { + Author { + name + age + p1: published(filter: {rating: {_gt: 4.6}}) { + name + rating + } + p2: published(filter: {rating: {_lt: 4.6}}) { + name + rating + } + } + }`, + Results: []map[string]any{ { - "name": "Painted House", - "rating": 4.9, + "name": "John Grisham", + "age": uint64(65), + "p1": []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + }, + }, + "p2": []map[string]any{ + { + "name": "A Time for Mercy", + "rating": 4.5, + }, + }, }, - }, - "p2": []map[string]any{ { - "name": "A Time for Mercy", - "rating": 4.5, + "name": "Cornelia Funke", + "age": uint64(62), + "p1": []map[string]any{ + { + "name": "Theif Lord", + "rating": 4.8, + }, + }, + "p2": []map[string]any{}, }, }, }, - { - "name": "Cornelia Funke", - "age": uint64(62), - "p1": []map[string]any{ + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToManyWithCompoundOperatorInFilterAndRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-many relation query filter with compound operator and relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House", + "rating": 4.9, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "A Time for Mercy", + "rating": 4.5, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Theif Lord", + "rating": 4.8, + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "The Lord of the Rings", + "rating": 5.0, + "author_id": "bae-61d279c1-eab9-56ec-8654-dce0324ebfda" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + Doc: `{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-61d279c1-eab9-56ec-8654-dce0324ebfda + Doc: `{ + "name": "John Tolkien", + "age": 70, + "verified": true + }`, + }, + testUtils.Request{ + Request: `query { + Author(filter: {_or: [ + {_and: [ + {published: {rating: {_lt: 5.0}}}, + {published: {rating: {_gt: 4.8}}} + ]}, + {_and: [ + {age: {_le: 65}}, + {published: {name: {_like: "%Lord%"}}} + ]}, + ]}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + }, { - "name": "Theif Lord", - "rating": 4.8, + "name": "Cornelia Funke", }, }, - "p2": []map[string]any{}, + }, + testUtils.Request{ + Request: `query { + Author(filter: {_and: [ + { _not: {published: {rating: {_gt: 4.8}}}}, + { _not: {published: {rating: {_lt: 4.8}}}} + ]}) { + name + } + }`, + Results: []map[string]any{{ + "name": "Cornelia Funke", + }}, }, }, } - - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/query/one_to_many_to_one/with_filter_test.go b/tests/integration/query/one_to_many_to_one/with_filter_test.go index 7f15fe58a0..99890196bb 100644 --- a/tests/integration/query/one_to_many_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_many_to_one/with_filter_test.go @@ -284,3 +284,122 @@ func TestOneToManyToOneWithTwoLevelDeepFilter(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestOneToManyToOneWithCompoundOperatorInFilterAndRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "1-N-1 two level deep filter with compound operator and relation", + Actions: []any{ + gqlSchemaOneToManyToOne(), + createDocsWith6BooksAnd5Publishers(), + testUtils.CreateDoc{ + CollectionID: 0, + // bae-61d279c1-eab9-56ec-8654-dce0324ebfda + Doc: `{ + "name": "John Tolkien", + "age": 70, + "verified": true + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-0718e995-e7b5-55b1-874a-8f7d956be53c + Doc: `{ + "name": "The Lord of the Rings", + "rating": 5.0, + "author_id": "bae-61d279c1-eab9-56ec-8654-dce0324ebfda" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Allen & Unwin", + "address": "1 Allen Ave., Sydney, Australia", + "yearOpened": 1954, + "book_id": "bae-0718e995-e7b5-55b1-874a-8f7d956be53c" + }`, + }, + testUtils.Request{ + Request: `query { + Author (filter: {_and: [ + {age: {_gt: 50}}, + {_or: [ + {book: {publisher: {yearOpened: {_gt: 2020}}}}, + {book: {publisher: {yearOpened: {_lt: 1960}}}} + ]} + ]}){ + name + } + }`, + Results: []map[string]any{ + { + "name": "John Tolkien", + }, + { + "name": "Cornelia Funke", + }, + }, + }, + testUtils.Request{ + Request: `query { + Author (filter: {_and: [ + {_not: {age: {_ge: 70}}}, + {book: {rating: {_gt: 2.5}}}, + {_or: [ + {book: {publisher: {yearOpened: {_le: 2020}}}}, + {_not: {book: {rating: {_le: 4.0}}}} + ]} + ]}){ + name + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestOneToManyToOneWithCompoundOperatorInSubFilterAndRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "1-N-1 with sub filter with compound operator and relation", + Actions: []any{ + gqlSchemaOneToManyToOne(), + createDocsWith6BooksAnd5Publishers(), + testUtils.Request{ + Request: `query { + Author (filter: {_and: [ + {age: {_gt: 20}}, + {_or: [ + {book: {publisher: {yearOpened: {_lt: 2020}}}}, + {book: {rating: { _lt: 1}}} + ]} + ]}){ + name + book (filter: {_and: [ + {publisher: {yearOpened: {_lt: 2020}}}, + {_or: [ + {rating: { _lt: 3.4}}, + {publisher: {name: {_eq: "Not existing publisher"}}} + ]} + ]}){ + name + } + } + }`, + Results: []map[string]any{{ + "name": "John Grisham", + "book": []map[string]any{{ + "name": "Sooley", + }}, + }}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/one_to_one/with_filter_test.go b/tests/integration/query/one_to_one/with_filter_test.go index 88bc48a03e..a4b6abf6de 100644 --- a/tests/integration/query/one_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_one/with_filter_test.go @@ -17,235 +17,478 @@ import ( ) func TestQueryOneToOneWithNumericFilterOnParent(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with simple filter on sub type", - Request: `query { - Book { - name - rating - author(filter: {age: {_eq: 65}}) { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book { + name + rating + author(filter: {age: {_eq: 65}}) { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithStringFilterOnChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with simple filter on parent", - Request: `query { - Book(filter: {name: {_eq: "Painted House"}}) { - name - rating - author { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book(filter: {name: {_eq: "Painted House"}}) { + name + rating + author { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithBooleanFilterOnChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with simple sub filter on child", - Request: `query { - Book(filter: {author: {verified: {_eq: true}}}) { - name - rating - author { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book(filter: {author: {verified: {_eq: true}}}) { + name + rating + author { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithFilterThroughChildBackToParent(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with filter on parent referencing parent through child", - Request: `query { - Book(filter: {author: {published: {rating: {_eq: 4.9}}}}) { - name - rating - author { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, // bae-d432bdfb-787d-5a1c-ac29-dc025ab80095 - `{ + Doc: `{ "name": "Theif Lord", "rating": 4.8 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false, "published_id": "bae-d432bdfb-787d-5a1c-ac29-dc025ab80095" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book(filter: {author: {published: {rating: {_eq: 4.9}}}}) { + name + rating + author { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithBooleanFilterOnChildWithNoSubTypeSelection(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation with simple sub filter on child, but not child selections", - Request: `query { + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ + "name": "Painted House", + "rating": 4.9 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, + testUtils.Request{ + Request: `query { Book(filter: {author: {verified: {_eq: true}}}) { name rating } }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Results: []map[string]any{{ + "name": "Painted House", + "rating": 4.9, + }}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithCompoundAndFilterThatIncludesRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation with _and filter that includes relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 0, + // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + Doc: `{ + "name": "Some Book", + "rating": 4.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-500a9445-bd90-580e-9191-d2d0ec1a5cf5 + Doc: `{ + "name": "Some Other Book", + "rating": 3.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Writer", + "age": 45, + "verified": false, + "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Other Writer", + "age": 30, + "verified": true, + "published_id": "bae-500a9445-bd90-580e-9191-d2d0ec1a5cf5" + }`, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_and: [ + {rating: {_ge: 4.0}}, + {author: {verified: {_eq: true}}} + ]}) { + name + rating + } + }`, + Results: []map[string]any{{ + "name": "Painted House", + "rating": 4.9, + }}, + }, }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation with _or filter that includes relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ + "name": "Painted House", + "rating": 4.9 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + Doc: `{ + "name": "Some Book", + "rating": 4.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-1c890922-ddf9-5820-a888-c7f977848934 + Doc: `{ + "name": "Some Other Book", + "rating": 3.5 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-e8642720-08cb-5f5b-a8d6-7187c444a78d + Doc: `{ + "name": "Yet Another Book", + "rating": 3.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Writer", + "age": 45, + "verified": false, + "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Other Writer", + "age": 35, + "verified": false, + "published_id": "bae-1c890922-ddf9-5820-a888-c7f977848934" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Yet Another Writer", + "age": 30, + "verified": false, + "published_id": "bae-e8642720-08cb-5f5b-a8d6-7187c444a78d" + }`, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_or: [ + {_and: [ + {rating: {_ge: 4.0}}, + {author: {age: {_le: 45}}} + ]}, + {_and: [ + {rating: {_le: 3.5}}, + {author: {age: {_ge: 35}}} + ]} + ]}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Some Other Book", + }, + { + "name": "Some Book", + }, + }, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_or: [ + {_not: {author: {age: {_lt: 65}}} }, + {_not: {author: {age: {_gt: 30}}} } + ]}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Yet Another Book", + }, + { + "name": "Painted House", + }, + }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/query/simple/with_filter/with_not_test.go b/tests/integration/query/simple/with_filter/with_not_test.go index 8ec86c15dd..3b5832bcdb 100644 --- a/tests/integration/query/simple/with_filter/with_not_test.go +++ b/tests/integration/query/simple/with_filter/with_not_test.go @@ -64,6 +64,46 @@ func TestQuerySimple_WithNotEqualToXFilter_NoError(t *testing.T) { executeTestCase(t, test) } +func TestQuerySimple_WithNotAndComparisonXFilter_NoError(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple query with _not filter with _gt condition)", + Request: `query { + Users(filter: {_not: {Age: {_gt: 20}}}) { + Name + Age + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "Name": "John", + "Age": 21 + }`, + `{ + "Name": "Bob", + "Age": 32 + }`, + `{ + "Name": "Carlo", + "Age": 55 + }`, + `{ + "Name": "Alice", + "Age": 19 + }`, + }, + }, + Results: []map[string]any{ + { + "Name": "Alice", + "Age": uint64(19), + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimple_WithNotEqualToXorYFilter_NoError(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with logical compound filter (not)",