Skip to content

Commit

Permalink
Add failing test
Browse files Browse the repository at this point in the history
  • Loading branch information
trzysiek committed May 22, 2024
1 parent 1e23ae0 commit 6676224
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 5 deletions.
10 changes: 7 additions & 3 deletions quesma/queryparser/aggregation_parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"cmp"
"context"
"github.com/jinzhu/copier"
"github.com/k0kubun/pp"
"github.com/stretchr/testify/assert"
"mitmproxy/quesma/clickhouse"
"mitmproxy/quesma/concurrent"
Expand Down Expand Up @@ -577,7 +578,10 @@ func Test2AggregationParserExternalTestcases(t *testing.T) {
allTests = append(allTests, opensearch_visualize.PipelineAggregationTests...)
for i, test := range allTests {
t.Run(test.TestName+"("+strconv.Itoa(i)+")", func(t *testing.T) {
if test.TestName == "Max bucket with some null buckets. Reproduce: Visualize -> Vertical Bar: Metrics: Max Bucket (Aggregation: Date Histogram, Metric: Min)" {
if i != 58 {
t.Skip()
}
if test.TestName == "Max/Sum bucket with some null buckets. Reproduce: Visualize -> Vertical Bar: Metrics: Max (Sum) Bucket (Aggregation: Date Histogram, Metric: Min)" {
t.Skip("Needs to be fixed by keeping last key for every aggregation. Now we sometimes don't know it. Hard to reproduce, leaving it for separate PR")
}
if test.TestName == "complex sum_bucket. Reproduce: Visualize -> Vertical Bar: Metrics: Sum Bucket (Bucket: Date Histogram, Metric: Average), Buckets: X-Asis: Histogram" {
Expand Down Expand Up @@ -639,8 +643,8 @@ func Test2AggregationParserExternalTestcases(t *testing.T) {

// probability and seed are present in random_sampler aggregation. I'd assume they are not needed, thus let's not care about it for now.
acceptableDifference := []string{"doc_count_error_upper_bound", "sum_other_doc_count", "probability", "seed", "bg_count", "doc_count"}
// pp.Println("ACTUAL", actualMinusExpected)
// pp.Println("EXPECTED", expectedMinusActual)
pp.Println("ACTUAL", actualMinusExpected)
pp.Println("EXPECTED", expectedMinusActual)
assert.True(t, util.AlmostEmpty(actualMinusExpected, acceptableDifference))
assert.True(t, util.AlmostEmpty(expectedMinusActual, acceptableDifference))
assert.Contains(t, string(fullResponse), `"value":`+strconv.FormatUint(test.ExpectedResults[0][0].Cols[0].Value.(uint64), 10)) // checks if hits nr is OK
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2677,7 +2677,7 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
},
},
{ // [15]
TestName: "Max bucket with some null buckets. Reproduce: Visualize -> Vertical Bar: Metrics: Max Bucket (Aggregation: Date Histogram, Metric: Min)",
TestName: "Max/Sum bucket with some null buckets. Reproduce: Visualize -> Vertical Bar: Metrics: Max (Sum) Bucket (Aggregation: Date Histogram, Metric: Min)",
QueryRequestJson: `
{
"_source": {
Expand All @@ -2689,6 +2689,11 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
"buckets_path": "1-bucket>1-metric"
}
},
"2":{
"sum_bucket": {
"buckets_path": "1-bucket>1-metric"
}
},
"1-bucket": {
"aggs": {
"1-metric": {
Expand Down Expand Up @@ -2759,6 +2764,9 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
],
"value": 121360.0
},
"2": {
"dunno": "check in opensearch and add this"
},
"1-bucket": {
"buckets": [
{
Expand Down Expand Up @@ -2848,7 +2856,7 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
},
},
{ // [16]
TestName: "Max bucket with some null buckets. Reproduce: Visualize -> Vertical Bar: Metrics: Max Bucket (Aggregation: Histogram, Metric: Max)",
TestName: "Max/Sum bucket with some null buckets. Reproduce: Visualize -> Vertical Bar: Metrics: Max/Sum Bucket (Aggregation: Histogram, Metric: Max)",
QueryRequestJson: `
{
"_source": {
Expand All @@ -2860,6 +2868,11 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
"buckets_path": "1-bucket>1-metric"
}
},
"2":{
"sum_bucket": {
"buckets_path": "1-bucket>1-metric"
}
},
"1-bucket": {
"aggs": {
"1-metric": {
Expand Down Expand Up @@ -2930,6 +2943,10 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
],
"value": 211840
},
"2":
{
"value": 212292
},
"1-bucket": {
"buckets": [
{
Expand Down Expand Up @@ -2998,6 +3015,7 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
model.NewQueryResultCol("count()", 1),
}},
},
{}, // NoDBQuery
},
ExpectedSQLs: []string{
`SELECT count() FROM ` + testdata.QuotedTableName,
Expand All @@ -3010,6 +3028,7 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{
`FROM ` + testdata.QuotedTableName + ` ` +
`GROUP BY ("bytes") ` +
`ORDER BY ("bytes")`,
`NoDBQuery`,
},
},
/* waits for probably a simple filters fix
Expand Down

0 comments on commit 6676224

Please sign in to comment.