From 51a740be52926da87e641484cb98e89bc7c309ce Mon Sep 17 00:00:00 2001 From: Jacek Migdal Date: Thu, 12 Sep 2024 13:41:19 +0200 Subject: [PATCH] Unify counts (#758) Two simple changes: - Unify `count()` to `count(*)` - Don't generate order count, reuse count column This optimizes and simplifies queries. Need it, so I can migrate facets to pancakes. The change is mostly mundane updates of tests. --- quesma/model/bucket_aggregations/dateRange.go | 21 -- quesma/model/bucket_aggregations/range.go | 23 -- quesma/model/expr.go | 3 + quesma/optimize/pipeline_test.go | 6 +- .../queryparser/pancake_aggregation_parser.go | 2 +- .../pancake_aggregation_parser_metrics.go | 2 +- .../pancake_sql_query_generation.go | 20 +- .../pancake_sql_query_generation_test.go | 8 +- quesma/quesma/schema_transformer_test.go | 32 +-- quesma/quesma/search_test.go | 6 +- quesma/testdata/aggregation_requests.go | 227 ++++++------------ quesma/testdata/aggregation_requests_2.go | 212 +++++----------- quesma/testdata/clients/ophelia.go | 133 ++++------ quesma/testdata/full_search_requests.go | 4 +- .../kibana-visualize/aggregation_requests.go | 50 ++-- .../aggregation_requests.go | 38 +-- .../pipeline_aggregation_requests.go | 3 +- quesma/testdata/requests.go | 73 +++--- quesma/util/sql_pretty_fmt_test.go | 42 ++-- quesma/util/utils_test.go | 8 +- 20 files changed, 306 insertions(+), 607 deletions(-) diff --git a/quesma/model/bucket_aggregations/dateRange.go b/quesma/model/bucket_aggregations/dateRange.go index d9ce69e1c..87afb2062 100644 --- a/quesma/model/bucket_aggregations/dateRange.go +++ b/quesma/model/bucket_aggregations/dateRange.go @@ -27,27 +27,6 @@ func NewDateTimeInterval(begin, end string) DateTimeInterval { } } -// ToSQLSelectQuery returns count(...) where ... is a condition for the interval, just like we want it in SQL's SELECT -// from elastic docs: Note that this aggregation includes the from value and excludes the to value for each range. -func (interval DateTimeInterval) ToSQLSelectQuery(fieldName string) model.Expr { - if interval.Begin != UnboundedInterval && interval.End != UnboundedInterval { - return model.NewCountFunc(model.NewFunction("if", - model.NewInfixExpr( - model.NewInfixExpr(model.NewColumnRef(fieldName), " >= ", model.NewLiteral(interval.Begin)), - "AND", - model.NewInfixExpr(model.NewColumnRef(fieldName), " < ", model.NewLiteral(interval.End)), - ), - model.NewLiteral(1), model.NewLiteral("NULL"))) - } else if interval.Begin != UnboundedInterval { - return model.NewCountFunc(model.NewFunction("if", - model.NewInfixExpr(model.NewColumnRef(fieldName), " >= ", model.NewLiteral(interval.Begin)), model.NewLiteral(1), model.NewLiteral("NULL"))) - } else if interval.End != UnboundedInterval { - return model.NewCountFunc(model.NewFunction("if", - model.NewInfixExpr(model.NewColumnRef(fieldName), " < ", model.NewLiteral(interval.End)), model.NewLiteral(1), model.NewLiteral("NULL"))) - } - return model.NewCountFunc() -} - // BeginTimestampToSQL returns SQL select for the begin timestamp, and a boolean indicating if the select is needed // We query Clickhouse for this timestamp, as it's defined in Clickhouse's format, e.g. now()-1d. // It's only 1 more field to our SELECT query, so it shouldn't be a performance issue. diff --git a/quesma/model/bucket_aggregations/range.go b/quesma/model/bucket_aggregations/range.go index 210a8afce..b3b9d5e34 100644 --- a/quesma/model/bucket_aggregations/range.go +++ b/quesma/model/bucket_aggregations/range.go @@ -30,29 +30,6 @@ func (interval Interval) String() string { return interval.floatToString(interval.Begin) + "-" + interval.floatToString(interval.End) } -// ToSQLSelectQuery returns count(...) where ... is a condition for the interval, just like we want it in SQL's SELECT -func (interval Interval) ToSQLSelectQuery(columnExpr model.Expr) model.Expr { - var sqlLeft, sqlRight, sql model.Expr - if !interval.IsOpeningBoundInfinite() { - sqlLeft = model.NewInfixExpr(columnExpr, ">=", model.NewLiteral(interval.Begin)) - } - if !interval.IsClosingBoundInfinite() { - sqlRight = model.NewInfixExpr(columnExpr, "<", model.NewLiteral(interval.End)) - } - switch { - case sqlLeft != nil && sqlRight != nil: - sql = model.NewInfixExpr(sqlLeft, "AND", sqlRight) - case sqlLeft != nil: - sql = sqlLeft - case sqlRight != nil: - sql = sqlRight - default: - return model.NewFunction("count") - } - // count(if(sql, 1, NULL)) - return model.NewFunction("count", model.NewFunction("if", sql, model.NewLiteral(1), model.NewLiteral("NULL"))) -} - func (interval Interval) ToWhereClause(field model.Expr) model.Expr { // returns a condition for the interval, just like we want it in SQL's WHERE var sqlLeft, sqlRight model.Expr if !interval.IsOpeningBoundInfinite() { diff --git a/quesma/model/expr.go b/quesma/model/expr.go index 9aae1352c..564cedc7b 100644 --- a/quesma/model/expr.go +++ b/quesma/model/expr.go @@ -98,6 +98,9 @@ func NewFunction(name string, args ...Expr) FunctionExpr { } func NewCountFunc(args ...Expr) FunctionExpr { + if len(args) == 0 { + args = []Expr{NewWildcardExpr} + } return NewFunction("count", args...) } diff --git a/quesma/optimize/pipeline_test.go b/quesma/optimize/pipeline_test.go index 1892888ef..3083640f5 100644 --- a/quesma/optimize/pipeline_test.go +++ b/quesma/optimize/pipeline_test.go @@ -33,7 +33,7 @@ func Test_cacheQueries(t *testing.T) { true, "foo", model.SelectCommand{ - Columns: []model.Expr{model.NewColumnRef("a"), model.NewFunction("count", model.NewColumnRef("*"))}, + Columns: []model.Expr{model.NewColumnRef("a"), model.NewCountFunc()}, FromClause: model.NewTableRef("foo"), GroupBy: []model.Expr{model.NewLiteral(1)}, }, @@ -167,12 +167,12 @@ func Test_dateTrunc(t *testing.T) { "select a, count() from foo group by 1", "foo", model.SelectCommand{ - Columns: []model.Expr{model.NewColumnRef("a"), model.NewFunction("count", model.NewColumnRef("*"))}, + Columns: []model.Expr{model.NewColumnRef("a"), model.NewCountFunc()}, FromClause: model.NewTableRef("foo"), GroupBy: []model.Expr{model.NewLiteral(1)}, }, model.SelectCommand{ - Columns: []model.Expr{model.NewColumnRef("a"), model.NewFunction("count", model.NewColumnRef("*"))}, + Columns: []model.Expr{model.NewColumnRef("a"), model.NewCountFunc()}, FromClause: model.NewTableRef("foo"), GroupBy: []model.Expr{model.NewLiteral(1)}, }, diff --git a/quesma/queryparser/pancake_aggregation_parser.go b/quesma/queryparser/pancake_aggregation_parser.go index b2f30515f..d395f2f70 100644 --- a/quesma/queryparser/pancake_aggregation_parser.go +++ b/quesma/queryparser/pancake_aggregation_parser.go @@ -70,7 +70,7 @@ func (cw *ClickhouseQueryTranslator) PancakeParseAggregationJson(body types.JSON name: PancakeTotalCountMetricName, internalName: "metric__" + PancakeTotalCountMetricName, queryType: typical_queries.Count{}, - selectedColumns: []model.Expr{model.NewFunction("count", model.NewLiteral("*"))}, + selectedColumns: []model.Expr{model.NewCountFunc()}, } pancakeQueries[0].layers[0].currentMetricAggregations = append(pancakeQueries[0].layers[0].currentMetricAggregations, augmentedCountAggregation) diff --git a/quesma/queryparser/pancake_aggregation_parser_metrics.go b/quesma/queryparser/pancake_aggregation_parser_metrics.go index 4dc28e9ad..80a32419c 100644 --- a/quesma/queryparser/pancake_aggregation_parser_metrics.go +++ b/quesma/queryparser/pancake_aggregation_parser_metrics.go @@ -114,7 +114,7 @@ func generateMetricSelectedColumns(ctx context.Context, metricsAggr metricsAggre castLon := model.NewFunction("CAST", lonColumn, model.NewLiteral(fmt.Sprintf("'%s'", "Float"))) result = append(result, model.NewFunction("avgOrNull", castLat)) result = append(result, model.NewFunction("avgOrNull", castLon)) - result = append(result, model.NewFunction("count")) + result = append(result, model.NewCountFunc()) } default: logger.WarnWithCtx(ctx).Msgf("unknown metrics aggregation: %s", metricsAggr.AggrType) diff --git a/quesma/queryparser/pancake_sql_query_generation.go b/quesma/queryparser/pancake_sql_query_generation.go index 6e0140e59..51281268e 100644 --- a/quesma/queryparser/pancake_sql_query_generation.go +++ b/quesma/queryparser/pancake_sql_query_generation.go @@ -80,10 +80,10 @@ func (p *pancakeSqlQueryGenerator) generateMetricSelects(metric *pancakeModelMet return } -func (p *pancakeSqlQueryGenerator) isPartOfGroupBy(column model.Expr, groupByColumns []model.AliasedExpr) *model.AliasedExpr { - for _, groupByColumn := range groupByColumns { - if model.PartlyImplementedIsEqual(column, groupByColumn) { - return &groupByColumn +func (p *pancakeSqlQueryGenerator) isPartOf(column model.Expr, aliasedColumns []model.AliasedExpr) *model.AliasedExpr { + for _, aliasedColumn := range aliasedColumns { + if model.PartlyImplementedIsEqual(column, aliasedColumn) { + return &aliasedColumn } } return nil @@ -103,7 +103,7 @@ func (p *pancakeSqlQueryGenerator) isPartOfOrderBy(alias model.AliasedExpr, orde func (p *pancakeSqlQueryGenerator) addPotentialParentCount(bucketAggregation *pancakeModelBucketAggregation, groupByColumns []model.AliasedExpr) []model.AliasedExpr { if query_util.IsAnyKindOfTerms(bucketAggregation.queryType) { parentCountColumn := model.NewWindowFunction("sum", - []model.Expr{model.NewFunction("count", model.NewLiteral("*"))}, + []model.Expr{model.NewCountFunc()}, p.generatePartitionBy(groupByColumns), []model.OrderByExpr{}) parentCountAliasedColumn := model.NewAliasedExpr(parentCountColumn, bucketAggregation.InternalNameForParentCount()) return []model.AliasedExpr{parentCountAliasedColumn} @@ -126,12 +126,12 @@ func (p *pancakeSqlQueryGenerator) generateBucketSqlParts(bucketAggregation *pan // build count for aggr var countColumn model.Expr if hasMoreBucketAggregations { - partCountColumn := model.NewFunction("count", model.NewLiteral("*")) + partCountColumn := model.NewCountFunc() countColumn = model.NewWindowFunction("sum", []model.Expr{partCountColumn}, p.generatePartitionBy(append(groupByColumns, addGroupBys...)), []model.OrderByExpr{}) } else { - countColumn = model.NewFunction("count", model.NewLiteral("*")) + countColumn = model.NewCountFunc() } countAliasedColumn := model.NewAliasedExpr(countColumn, bucketAggregation.InternalNameForCount()) addSelectColumns = append(addSelectColumns, countAliasedColumn) @@ -143,7 +143,9 @@ func (p *pancakeSqlQueryGenerator) generateBucketSqlParts(bucketAggregation *pan columnId := len(bucketAggregation.selectedColumns) + i direction := orderBy.Direction - rankColumn := p.isPartOfGroupBy(orderBy.Expr, append(groupByColumns, addGroupBys...)) + rankColumn := p.isPartOf(orderBy.Expr, append(append(groupByColumns, addGroupBys...), + // We need count before window functions + model.NewAliasedExpr(model.NewCountFunc(), bucketAggregation.InternalNameForCount()))) if rankColumn != nil { // rank is part of group by if direction == model.DefaultOrder { direction = model.AscOrder // primarily needed for tests @@ -323,7 +325,7 @@ func (p *pancakeSqlQueryGenerator) generateSelectCommand(aggregation *pancakeMod combinatorWhere = append(combinatorWhere, subGroup.WhereClause) for _, selectAfter := range selectsAfter { var withCombinator model.Expr - if p.isPartOfGroupBy(selectAfter.Expr, groupBys) != nil { + if p.isPartOf(selectAfter.Expr, groupBys) != nil { withCombinator = selectAfter.Expr } else { withIfCombinator, err := p.addIfCombinator(selectAfter.Expr, subGroup.WhereClause) diff --git a/quesma/queryparser/pancake_sql_query_generation_test.go b/quesma/queryparser/pancake_sql_query_generation_test.go index b40a9acef..84b5c0fff 100644 --- a/quesma/queryparser/pancake_sql_query_generation_test.go +++ b/quesma/queryparser/pancake_sql_query_generation_test.go @@ -237,11 +237,10 @@ func TestPancakeQueryGeneration_halfpancake(t *testing.T) { `, sql: ` SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", - "host.name" AS "aggr__0__key_0", count(*) AS "aggr__0__count", - count() AS "aggr__0__order_1" + "host.name" AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM ` + TableName + ` GROUP BY "host.name" AS "aggr__0__key_0" -ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC +ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC LIMIT 4`, // -- we added one more as filtering nulls happens during rendering }, @@ -268,11 +267,10 @@ LIMIT 4`, // -- we added one more as filtering nulls happens during rendering ` SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "host.name" AS "aggr__0__key_0", count(*) AS "aggr__0__count", - count() AS "aggr__0__order_1", avgOrNull("bytes_gauge") AS "metric__0__2_col_0" FROM ` + TableName + ` GROUP BY "host.name" AS "aggr__0__key_0" -ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC +ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC LIMIT 4`, // we increased limit by 1 to allow filtering of nulls druing json rendering }, } diff --git a/quesma/quesma/schema_transformer_test.go b/quesma/quesma/schema_transformer_test.go index 026a4f5d3..1fb517ee2 100644 --- a/quesma/quesma/schema_transformer_test.go +++ b/quesma/quesma/schema_transformer_test.go @@ -473,7 +473,7 @@ func Test_arrayType(t *testing.T) { FromClause: model.NewTableRef("kibana_sample_data_ecommerce"), Columns: []model.Expr{ model.NewColumnRef("order_date"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewInfixExpr( model.NewColumnRef("products::name"), @@ -489,7 +489,7 @@ func Test_arrayType(t *testing.T) { FromClause: model.NewTableRef("kibana_sample_data_ecommerce"), Columns: []model.Expr{ model.NewColumnRef("order_date"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewFunction( "arrayExists", @@ -511,7 +511,7 @@ func Test_arrayType(t *testing.T) { FromClause: model.NewTableRef("kibana_sample_data_ecommerce"), Columns: []model.Expr{ model.NewColumnRef("order_date"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewInfixExpr( model.NewColumnRef("products::sku"), @@ -527,7 +527,7 @@ func Test_arrayType(t *testing.T) { FromClause: model.NewTableRef("kibana_sample_data_ecommerce"), Columns: []model.Expr{ model.NewColumnRef("order_date"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewFunction( "has", @@ -687,14 +687,14 @@ func TestApplyPhysicalFromExpression(t *testing.T) { FromClause: model.NewTableRef(model.SingleTableNamePlaceHolder), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, }, model.SelectCommand{ FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, }, }, @@ -705,7 +705,7 @@ func TestApplyPhysicalFromExpression(t *testing.T) { FromClause: model.NewTableRef(model.SingleTableNamePlaceHolder), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, NamedCTEs: []*model.CTE{ { @@ -723,7 +723,7 @@ func TestApplyPhysicalFromExpression(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, NamedCTEs: []*model.CTE{ { @@ -745,7 +745,7 @@ func TestApplyPhysicalFromExpression(t *testing.T) { FromClause: model.NewTableRef(model.SingleTableNamePlaceHolder), Columns: []model.Expr{ model.NewColumnRef("order_date"), - model.NewFunction("count"), + model.NewCountFunc(), }, NamedCTEs: []*model.CTE{ { @@ -763,7 +763,7 @@ func TestApplyPhysicalFromExpression(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("order_date"), - model.NewFunction("count"), + model.NewCountFunc(), }, NamedCTEs: []*model.CTE{ { @@ -822,7 +822,7 @@ func TestFullTextFields(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewInfixExpr(model.NewColumnRef(model.FullTextFieldNamePlaceHolder), "=", model.NewLiteral("foo")), }, @@ -830,7 +830,7 @@ func TestFullTextFields(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewLiteral(false), }, @@ -843,7 +843,7 @@ func TestFullTextFields(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewInfixExpr(model.NewColumnRef(model.FullTextFieldNamePlaceHolder), "=", model.NewLiteral("foo")), }, @@ -851,7 +851,7 @@ func TestFullTextFields(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewInfixExpr(model.NewColumnRef("b"), "=", model.NewLiteral("foo")), }, @@ -864,7 +864,7 @@ func TestFullTextFields(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.NewInfixExpr(model.NewColumnRef(model.FullTextFieldNamePlaceHolder), "=", model.NewLiteral("foo")), }, @@ -872,7 +872,7 @@ func TestFullTextFields(t *testing.T) { FromClause: model.NewTableRef("test"), Columns: []model.Expr{ model.NewColumnRef("a"), - model.NewFunction("count"), + model.NewCountFunc(), }, WhereClause: model.Or([]model.Expr{ model.NewInfixExpr(model.NewColumnRef("a"), "=", model.NewLiteral("foo")), diff --git a/quesma/quesma/search_test.go b/quesma/quesma/search_test.go index fec1e91fe..a695c5a59 100644 --- a/quesma/quesma/search_test.go +++ b/quesma/quesma/search_test.go @@ -464,7 +464,7 @@ func TestNumericFacetsQueries(t *testing.T) { } // count, present in all tests - mock.ExpectQuery(`SELECT count\(\) FROM ` + tableName).WillReturnRows(sqlmock.NewRows([]string{"count"})) + mock.ExpectQuery(`SELECT count\(\*\) FROM ` + tableName).WillReturnRows(sqlmock.NewRows([]string{"count"})) // Don't care about the query's SQL in this test, it's thoroughly tested in different tests, thus "" mock.ExpectQuery("").WillReturnRows(returnedBuckets) @@ -530,7 +530,7 @@ func TestSearchTrackTotalCount(t *testing.T) { }) test := func(t *testing.T, handlerName string, testcase testdata.FullSearchTestCase) { - db, mock := util.InitSqlMockWithPrettyPrint(t, false) + db, mock := util.InitSqlMockWithPrettySqlAndPrint(t, false) defer db.Close() lm := clickhouse.NewLogManagerWithConnection(db, table) managementConsole := ui.NewQuesmaManagementConsole(&DefaultConfig, nil, nil, make(<-chan logger.LogWithLevel, 50000), telemetry.NewPhoneHomeEmptyAgent(), nil) @@ -540,7 +540,7 @@ func TestSearchTrackTotalCount(t *testing.T) { for _, row := range testcase.ExpectedSQLResults[i] { rows.AddRow(row.Cols[0].Value) } - mock.ExpectQuery(testdata.EscapeBrackets(sql)).WillReturnRows(rows) + mock.ExpectQuery(sql).WillReturnRows(rows) } queryRunner := NewQueryRunner(lm, &DefaultConfig, nil, managementConsole, s, ab_testing.NewEmptySender()) diff --git a/quesma/testdata/aggregation_requests.go b/quesma/testdata/aggregation_requests.go index 1554f0c79..a405a551c 100644 --- a/quesma/testdata/aggregation_requests.go +++ b/quesma/testdata/aggregation_requests.go @@ -470,7 +470,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__0__key_0", "No Delay"), model.NewQueryResultCol("aggr__0__count", uint64(1647)), - model.NewQueryResultCol("aggr__0__order_1", uint64(1647)), model.NewQueryResultCol("aggr__0__1__key_0", int64(1706875200000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", uint64(2)), }}, @@ -478,7 +477,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__0__key_0", "No Delay"), model.NewQueryResultCol("aggr__0__count", uint64(1647)), - model.NewQueryResultCol("aggr__0__order_1", uint64(1647)), model.NewQueryResultCol("aggr__0__1__key_0", int64(1706886000000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", uint64(27)), }}, @@ -486,7 +484,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__0__key_0", "No Delay"), model.NewQueryResultCol("aggr__0__count", uint64(1647)), - model.NewQueryResultCol("aggr__0__order_1", uint64(1647)), model.NewQueryResultCol("aggr__0__1__key_0", int64(1706896800000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", uint64(34)), }}, @@ -494,7 +491,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__0__key_0", "Security Delay"), model.NewQueryResultCol("aggr__0__count", uint64(45)), - model.NewQueryResultCol("aggr__0__order_1", uint64(45)), model.NewQueryResultCol("aggr__0__1__key_0", int64(1706875200000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", uint64(0)), }}, @@ -502,18 +498,17 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__0__key_0", "Security Delay"), model.NewQueryResultCol("aggr__0__count", uint64(45)), - model.NewQueryResultCol("aggr__0__order_1", uint64(45)), model.NewQueryResultCol("aggr__0__1__key_0", int64(1706886000000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", uint64(2)), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count" + "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) + "aggr__0__1__key_0", "aggr__0__1__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" @@ -521,7 +516,6 @@ var AggregationTests = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "FlightDelayType" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" @@ -739,21 +733,18 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__suggestions__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__suggestions__key_0", "Rome"), model.NewQueryResultCol("aggr__suggestions__count", uint64(73)), - model.NewQueryResultCol("aggr__suggestions__order_1", uint64(73)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("metric__unique_terms_col_0", 143), model.NewQueryResultCol("aggr__suggestions__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__suggestions__key_0", "Bogota"), model.NewQueryResultCol("aggr__suggestions__count", uint64(44)), - model.NewQueryResultCol("aggr__suggestions__order_1", uint64(44)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("metric__unique_terms_col_0", 143), model.NewQueryResultCol("aggr__suggestions__parent_count", uint64(2200)), model.NewQueryResultCol("aggr__suggestions__key_0", "Milan"), model.NewQueryResultCol("aggr__suggestions__count", uint64(32)), - model.NewQueryResultCol("aggr__suggestions__order_1", uint64(32)), }}, }, ExpectedPancakeSQL: ` @@ -761,13 +752,12 @@ var AggregationTests = []AggregationTestCase{ "metric__unique_terms_col_0", sum(count(*)) OVER () AS "aggr__suggestions__parent_count", "OriginCityName" AS "aggr__suggestions__key_0", - count(*) AS "aggr__suggestions__count", - count() AS "aggr__suggestions__order_1" + count(*) AS "aggr__suggestions__count" FROM ` + TableName + ` WHERE ("timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z')) GROUP BY "OriginCityName" AS "aggr__suggestions__key_0" - ORDER BY "aggr__suggestions__order_1" DESC, "aggr__suggestions__key_0" ASC + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, }, { // [5] @@ -1321,7 +1311,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__origins__parent_count", uint64(13014)), model.NewQueryResultCol("aggr__origins__key_0", "UIO"), model.NewQueryResultCol("aggr__origins__count", int64(283)), - model.NewQueryResultCol("aggr__origins__order_1", int64(283)), model.NewQueryResultCol("aggr__origins__distinations__key_0", "EZE"), model.NewQueryResultCol("aggr__origins__distinations__count", int64(21)), model.NewQueryResultCol("aggr__origins__distinations__order_1", int64(21)), @@ -1337,7 +1326,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__origins__parent_count", uint64(13014)), model.NewQueryResultCol("aggr__origins__key_0", "UIO"), model.NewQueryResultCol("aggr__origins__count", int64(283)), - model.NewQueryResultCol("aggr__origins__order_1", int64(283)), model.NewQueryResultCol("aggr__origins__distinations__key_0", "UIO"), model.NewQueryResultCol("aggr__origins__distinations__count", int64(12)), model.NewQueryResultCol("aggr__origins__distinations__order_1", int64(12)), @@ -1353,7 +1341,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__origins__parent_count", uint64(13014)), model.NewQueryResultCol("aggr__origins__key_0", "DLH"), model.NewQueryResultCol("aggr__origins__count", int64(15)), - model.NewQueryResultCol("aggr__origins__order_1", int64(15)), model.NewQueryResultCol("aggr__origins__distinations__key_0", "YUL"), model.NewQueryResultCol("aggr__origins__distinations__count", int64(11)), model.NewQueryResultCol("aggr__origins__distinations__order_1", int64(11)), @@ -1369,7 +1356,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__origins__parent_count", uint64(13014)), model.NewQueryResultCol("aggr__origins__key_0", "DLH"), model.NewQueryResultCol("aggr__origins__count", int64(15)), - model.NewQueryResultCol("aggr__origins__order_1", int64(15)), model.NewQueryResultCol("aggr__origins__distinations__key_0", "EZE"), model.NewQueryResultCol("aggr__origins__distinations__count", int64(10)), model.NewQueryResultCol("aggr__origins__distinations__order_1", int64(10)), @@ -1390,7 +1376,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__origins__parent_count", uint64(13014)), model.NewQueryResultCol("aggr__origins__key_0", "UIO"), model.NewQueryResultCol("aggr__origins__count", int64(283)), - model.NewQueryResultCol("aggr__origins__order_1", int64(283)), model.NewQueryResultCol("top_hits__origins__originLocation_col_0", `{ "lat": "-0.129166667", "lon": "-78.3575" @@ -1404,7 +1389,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__origins__parent_count", uint64(13014)), model.NewQueryResultCol("aggr__origins__key_0", "DLH"), model.NewQueryResultCol("aggr__origins__count", int64(15)), - model.NewQueryResultCol("aggr__origins__order_1", int64(15)), model.NewQueryResultCol("top_hits__origins__originLocation_col_0", `{ "lat": "46.84209824", "lon": "-92.19360352" @@ -1418,22 +1402,18 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeSQL: ` WITH quesma_top_hits_group_table AS ( SELECT "aggr__origins__parent_count", "aggr__origins__key_0", - "aggr__origins__count", "aggr__origins__order_1", - "aggr__origins__distinations__parent_count", + "aggr__origins__count", "aggr__origins__distinations__parent_count", "aggr__origins__distinations__key_0", "aggr__origins__distinations__count", - "aggr__origins__distinations__order_1", "aggr__origins__order_1_rank", - "aggr__origins__distinations__order_1_rank" + "aggr__origins__order_1_rank", "aggr__origins__distinations__order_1_rank" FROM ( SELECT "aggr__origins__parent_count", "aggr__origins__key_0", - "aggr__origins__count", "aggr__origins__order_1", - "aggr__origins__distinations__parent_count", + "aggr__origins__count", "aggr__origins__distinations__parent_count", "aggr__origins__distinations__key_0", "aggr__origins__distinations__count", - "aggr__origins__distinations__order_1", - dense_rank() OVER (ORDER BY "aggr__origins__order_1" DESC, + dense_rank() OVER (ORDER BY "aggr__origins__count" DESC, "aggr__origins__key_0" ASC) AS "aggr__origins__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__origins__key_0" ORDER BY - "aggr__origins__distinations__order_1" DESC, + "aggr__origins__distinations__count" DESC, "aggr__origins__distinations__key_0" ASC) AS "aggr__origins__distinations__order_1_rank" FROM ( @@ -1441,13 +1421,10 @@ var AggregationTests = []AggregationTestCase{ "OriginAirportID" AS "aggr__origins__key_0", sum(count(*)) OVER (PARTITION BY "aggr__origins__key_0") AS "aggr__origins__count", - sum(count()) OVER (PARTITION BY "aggr__origins__key_0") AS - "aggr__origins__order_1", sum(count(*)) OVER (PARTITION BY "aggr__origins__key_0") AS "aggr__origins__distinations__parent_count", "DestAirportID" AS "aggr__origins__distinations__key_0", - count(*) AS "aggr__origins__distinations__count", - count() AS "aggr__origins__distinations__order_1" + count(*) AS "aggr__origins__distinations__count" FROM __quesma_table_name GROUP BY "OriginAirportID" AS "aggr__origins__key_0", "DestAirportID" AS "aggr__origins__distinations__key_0")) @@ -1460,15 +1437,12 @@ var AggregationTests = []AggregationTestCase{ "aggr__origins__parent_count", "group_table"."aggr__origins__key_0" AS "aggr__origins__key_0", "group_table"."aggr__origins__count" AS "aggr__origins__count", - "group_table"."aggr__origins__order_1" AS "aggr__origins__order_1", "group_table"."aggr__origins__distinations__parent_count" AS "aggr__origins__distinations__parent_count", "group_table"."aggr__origins__distinations__key_0" AS "aggr__origins__distinations__key_0", "group_table"."aggr__origins__distinations__count" AS "aggr__origins__distinations__count", - "group_table"."aggr__origins__distinations__order_1" AS - "aggr__origins__distinations__order_1", map('lat', "hit_table"."DestLocation::lat", 'lon', "hit_table"."DestLocation::lon") AS "top_hits__origins__distinations__destLocation_col_0", @@ -1484,10 +1458,8 @@ var AggregationTests = []AggregationTestCase{ "group_table"."aggr__origins__distinations__key_0"= "hit_table"."DestAirportID"))) SELECT "aggr__origins__parent_count", "aggr__origins__key_0", - "aggr__origins__count", "aggr__origins__order_1", - "aggr__origins__distinations__parent_count", + "aggr__origins__count", "aggr__origins__distinations__parent_count", "aggr__origins__distinations__key_0", "aggr__origins__distinations__count", - "aggr__origins__distinations__order_1", "top_hits__origins__distinations__destLocation_col_0", "top_hits_rank" FROM "quesma_top_hits_join" WHERE "top_hits_rank"<=1 @@ -1497,17 +1469,16 @@ var AggregationTests = []AggregationTestCase{ WITH quesma_top_hits_group_table AS ( SELECT sum(count(*)) OVER () AS "aggr__origins__parent_count", "OriginAirportID" AS "aggr__origins__key_0", - count(*) AS "aggr__origins__count", count() AS "aggr__origins__order_1" + count(*) AS "aggr__origins__count" FROM __quesma_table_name GROUP BY "OriginAirportID" AS "aggr__origins__key_0" - ORDER BY "aggr__origins__order_1" DESC, "aggr__origins__key_0" ASC + ORDER BY "aggr__origins__count" DESC, "aggr__origins__key_0" ASC LIMIT 10001) , quesma_top_hits_join AS ( SELECT "group_table"."aggr__origins__parent_count" AS "aggr__origins__parent_count", "group_table"."aggr__origins__key_0" AS "aggr__origins__key_0", "group_table"."aggr__origins__count" AS "aggr__origins__count", - "group_table"."aggr__origins__order_1" AS "aggr__origins__order_1", map('lat', "hit_table"."OriginLocation::lat", 'lon', "hit_table"."OriginLocation::lon") AS "top_hits__origins__originLocation_col_0", @@ -1518,12 +1489,11 @@ var AggregationTests = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__origins__key_0"= "hit_table"."OriginAirportID")) SELECT "aggr__origins__parent_count", "aggr__origins__key_0", - "aggr__origins__count", "aggr__origins__order_1", - "top_hits__origins__originLocation_col_0", + "aggr__origins__count", "top_hits__origins__originLocation_col_0", "top_hits__origins__originLocation_col_1", "top_hits_rank" FROM "quesma_top_hits_join" WHERE "top_hits_rank"<=1 - ORDER BY "aggr__origins__order_1" DESC, "aggr__origins__key_0" ASC, + ORDER BY "aggr__origins__count" DESC, "aggr__origins__key_0" ASC, "top_hits_rank" ASC`}, }, { // [8] @@ -1825,7 +1795,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", 167), model.NewQueryResultCol("aggr__0__key_0", "info"), model.NewQueryResultCol("aggr__0__count", int64(102)), - model.NewQueryResultCol("aggr__0__order_1", 102), model.NewQueryResultCol("aggr__0__1__key_0", int64(1707480000000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", 22), }}, @@ -1833,7 +1802,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", 167), model.NewQueryResultCol("aggr__0__key_0", "info"), model.NewQueryResultCol("aggr__0__count", int64(102)), - model.NewQueryResultCol("aggr__0__order_1", 102), model.NewQueryResultCol("aggr__0__1__key_0", int64(1707490800000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", 80), }}, @@ -1841,7 +1809,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", 167), model.NewQueryResultCol("aggr__0__key_0", "debug"), model.NewQueryResultCol("aggr__0__count", int64(49)), - model.NewQueryResultCol("aggr__0__order_1", 49), model.NewQueryResultCol("aggr__0__1__key_0", int64(1707480000000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", 17), }}, @@ -1849,7 +1816,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", 167), model.NewQueryResultCol("aggr__0__key_0", "debug"), model.NewQueryResultCol("aggr__0__count", int64(49)), - model.NewQueryResultCol("aggr__0__order_1", 49), model.NewQueryResultCol("aggr__0__1__key_0", int64(1707490800000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", 32), }}, @@ -1865,30 +1831,28 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", 167), model.NewQueryResultCol("aggr__0__key_0", "critical"), model.NewQueryResultCol("aggr__0__count", int64(16)), - model.NewQueryResultCol("aggr__0__order_1", 16), model.NewQueryResultCol("aggr__0__1__key_0", int64(1707490800000/1000/60/60/3)), model.NewQueryResultCol("aggr__0__1__count", 11), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count" + "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) - AS "aggr__0__order_1_rank", + "aggr__0__1__key_0", "aggr__0__1__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS + "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "severity" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset(toTimezone( - "@timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0", + "@timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" - FROM ` + TableName + ` + FROM __quesma_table_name WHERE ("host.name" iLIKE '%prometheus%' AND ("@timestamp">= parseDateTime64BestEffort('2024-02-02T16:36:49.940Z') AND "@timestamp"<= parseDateTime64BestEffort('2024-02-09T16:36:49.940Z'))) @@ -2429,7 +2393,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "hephaestus"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(30)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 30), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2437,7 +2400,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "poseidon"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(29)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 29), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2445,7 +2407,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "jupiter"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(28)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 28), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2453,7 +2414,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "selen"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(26)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 26), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2461,7 +2421,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "demeter"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(24)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2469,7 +2428,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "iris"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(24)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2477,7 +2435,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "pan"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(24)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2485,7 +2442,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "hades"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(22)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 22), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2493,7 +2449,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "hermes"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(22)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 22), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", 262), @@ -2501,7 +2456,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__top_values__parent_count", 262), model.NewQueryResultCol("aggr__sample__top_values__key_0", "persephone"), model.NewQueryResultCol("aggr__sample__top_values__count", int64(21)), - model.NewQueryResultCol("aggr__sample__top_values__order_1", 21), }}, }, ExpectedPancakeSQL: ` @@ -2509,8 +2463,7 @@ var AggregationTests = []AggregationTestCase{ sum(count("host.name")) OVER () AS "metric__sample__sample_count_col_0", sum(count(*)) OVER () AS "aggr__sample__top_values__parent_count", "host.name" AS "aggr__sample__top_values__key_0", - count(*) AS "aggr__sample__top_values__count", - count() AS "aggr__sample__top_values__order_1" + count(*) AS "aggr__sample__top_values__count" FROM ( SELECT "host.name" FROM ` + TableName + ` @@ -2519,7 +2472,7 @@ var AggregationTests = []AggregationTestCase{ ` + fullTextFieldName + ` iLIKE '%user%') LIMIT 8000) GROUP BY "host.name" AS "aggr__sample__top_values__key_0" - ORDER BY "aggr__sample__top_values__order_1" DESC, + ORDER BY "aggr__sample__top_values__count" DESC, "aggr__sample__top_values__key_0" ASC LIMIT 11`, }, @@ -2763,7 +2716,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__stats__parent_count", int64(4675)), model.NewQueryResultCol("aggr__stats__key_0", "27"), model.NewQueryResultCol("aggr__stats__count", int64(348)), - model.NewQueryResultCol("aggr__stats__order_1", 348), model.NewQueryResultCol("aggr__stats__series__key_0", int64(1713398400000/60000)), model.NewQueryResultCol("aggr__stats__series__count", 85), }}, @@ -2771,7 +2723,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__stats__parent_count", int64(4675)), model.NewQueryResultCol("aggr__stats__key_0", "27"), model.NewQueryResultCol("aggr__stats__count", int64(348)), - model.NewQueryResultCol("aggr__stats__order_1", 348), model.NewQueryResultCol("aggr__stats__series__key_0", int64(1714003200000/60000)), model.NewQueryResultCol("aggr__stats__series__count", 79), }}, @@ -2779,20 +2730,18 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__stats__parent_count", int64(4675)), model.NewQueryResultCol("aggr__stats__key_0", "52"), model.NewQueryResultCol("aggr__stats__count", int64(188)), - model.NewQueryResultCol("aggr__stats__order_1", 188), model.NewQueryResultCol("aggr__stats__series__key_0", int64(1713398400000/60000)), model.NewQueryResultCol("aggr__stats__series__count", 35), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__stats__parent_count", "aggr__stats__key_0", "aggr__stats__count", - "aggr__stats__order_1", "aggr__stats__series__key_0", - "aggr__stats__series__count" + "aggr__stats__series__key_0", "aggr__stats__series__count" FROM ( SELECT "aggr__stats__parent_count", "aggr__stats__key_0", - "aggr__stats__count", "aggr__stats__order_1", "aggr__stats__series__key_0", + "aggr__stats__count", "aggr__stats__series__key_0", "aggr__stats__series__count", - dense_rank() OVER (ORDER BY "aggr__stats__order_1" DESC, + dense_rank() OVER (ORDER BY "aggr__stats__count" DESC, "aggr__stats__key_0" ASC) AS "aggr__stats__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__stats__key_0" ORDER BY "aggr__stats__series__key_0" ASC) AS "aggr__stats__series__order_1_rank" @@ -2801,8 +2750,6 @@ var AggregationTests = []AggregationTestCase{ COALESCE("event.dataset", 'unknown') AS "aggr__stats__key_0", sum(count(*)) OVER (PARTITION BY "aggr__stats__key_0") AS "aggr__stats__count", - sum(count()) OVER (PARTITION BY "aggr__stats__key_0") AS - "aggr__stats__order_1", toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS "aggr__stats__series__key_0", count(*) AS "aggr__stats__series__count" FROM ` + TableName + ` @@ -3141,30 +3088,26 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__parent_count", uint64(15750)), model.NewQueryResultCol("aggr__0__key_0", "User created"), model.NewQueryResultCol("aggr__0__count", int64(1700)), - model.NewQueryResultCol("aggr__0__order_1", 1700), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__parent_count", uint64(15750)), model.NewQueryResultCol("aggr__0__key_0", "User deleted"), model.NewQueryResultCol("aggr__0__count", int64(1781)), - model.NewQueryResultCol("aggr__0__order_1", 1781), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__parent_count", uint64(15750)), model.NewQueryResultCol("aggr__0__key_0", "User logged in"), model.NewQueryResultCol("aggr__0__count", int64(1757)), - model.NewQueryResultCol("aggr__0__order_1", 1757), }}, }, ExpectedPancakeSQL: ` SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", - "message" AS "aggr__0__key_0", count(*) AS "aggr__0__count", - count() AS "aggr__0__order_1" + "message" AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM ` + TableName + ` WHERE ("timestamp">=parseDateTime64BestEffort('2024-02-20T19:13:33.795Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-21T04:01:14.920Z')) GROUP BY "message" AS "aggr__0__key_0" - ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC + ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC LIMIT 4`, }, { // [17] @@ -3989,21 +3932,18 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__sample__bytes_gauge_top__parent_count", int64(1634)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__key_0", int64(0)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__count", int64(53)), - model.NewQueryResultCol("aggr__sample__bytes_gauge_top__order_1", int64(53)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", int64(1634)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__parent_count", int64(1634)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__key_0", int64(15035)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__count", int64(7)), - model.NewQueryResultCol("aggr__sample__bytes_gauge_top__order_1", int64(7)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__sample__count", int64(1634)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__parent_count", int64(1634)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__key_0", int64(3350)), model.NewQueryResultCol("aggr__sample__bytes_gauge_top__count", int64(4)), - model.NewQueryResultCol("aggr__sample__bytes_gauge_top__order_1", int64(4)), }}, }, }, @@ -4111,8 +4051,7 @@ var AggregationTests = []AggregationTestCase{ `SELECT sum(count(*)) OVER () AS "aggr__sample__count", sum(count(*)) OVER () AS "aggr__sample__bytes_gauge_top__parent_count", "bytes_gauge" AS "aggr__sample__bytes_gauge_top__key_0", - count(*) AS "aggr__sample__bytes_gauge_top__count", - count() AS "aggr__sample__bytes_gauge_top__order_1" + count(*) AS "aggr__sample__bytes_gauge_top__count" FROM ( SELECT "bytes_gauge" FROM __quesma_table_name @@ -4120,7 +4059,7 @@ var AggregationTests = []AggregationTestCase{ toUnixTimestamp64Milli("timestamp")<=1.711228426749e+12) LIMIT 20000) GROUP BY "bytes_gauge" AS "aggr__sample__bytes_gauge_top__key_0" - ORDER BY "aggr__sample__bytes_gauge_top__order_1" DESC, + ORDER BY "aggr__sample__bytes_gauge_top__count" DESC, "aggr__sample__bytes_gauge_top__key_0" ASC LIMIT 11`, }, @@ -4579,23 +4518,20 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", uint64(825)), model.NewQueryResultCol("aggr__2__key_0", "a"), model.NewQueryResultCol("aggr__2__count", uint64(619)), - model.NewQueryResultCol("aggr__2__order_1", uint64(619)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", uint64(825)), model.NewQueryResultCol("aggr__2__key_0", "zip"), model.NewQueryResultCol("aggr__2__count", uint64(206)), - model.NewQueryResultCol("aggr__2__order_1", uint64(206)), }}, }, ExpectedPancakeSQL: ` SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "message" AS "aggr__2__key_0", - count(*) AS "aggr__2__count", - count() AS "aggr__2__order_1" + count(*) AS "aggr__2__count" FROM ` + TableName + ` GROUP BY "message" AS "aggr__2__key_0" - ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC + ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 5`, }, { // [24] @@ -5099,7 +5035,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__2__parent_count", 3), model.NewQueryResultCol("aggr__0__2__key_0", "a"), model.NewQueryResultCol("aggr__0__2__count", int64(2)), - model.NewQueryResultCol("aggr__0__2__order_1", 2), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__key_0", 0.0), @@ -5107,7 +5042,6 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__2__parent_count", 3), model.NewQueryResultCol("aggr__0__2__key_0", "b"), model.NewQueryResultCol("aggr__0__2__count", int64(1)), - model.NewQueryResultCol("aggr__0__2__order_1", 1), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__key_0", 4000.0), @@ -5115,26 +5049,25 @@ var AggregationTests = []AggregationTestCase{ model.NewQueryResultCol("aggr__0__2__parent_count", 1), model.NewQueryResultCol("aggr__0__2__key_0", "c"), model.NewQueryResultCol("aggr__0__2__count", int64(1)), - model.NewQueryResultCol("aggr__0__2__order_1", 1), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__0__key_0", "aggr__0__count", "aggr__0__2__parent_count", - "aggr__0__2__key_0", "aggr__0__2__count", "aggr__0__2__order_1" + "aggr__0__2__key_0", "aggr__0__2__count" FROM ( SELECT "aggr__0__key_0", "aggr__0__count", "aggr__0__2__parent_count", - "aggr__0__2__key_0", "aggr__0__2__count", "aggr__0__2__order_1", + "aggr__0__2__key_0", "aggr__0__2__count", dense_rank() OVER (ORDER BY "aggr__0__key_0" ASC) AS "aggr__0__order_1_rank" , dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY - "aggr__0__2__order_1" DESC, "aggr__0__2__key_0" ASC) AS + "aggr__0__2__count" DESC, "aggr__0__2__key_0" ASC) AS "aggr__0__2__order_1_rank" FROM ( SELECT floor("rspContentLen"/2000)*2000 AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__2__parent_count", "message" AS "aggr__0__2__key_0", - count(*) AS "aggr__0__2__count", count() AS "aggr__0__2__order_1" + count(*) AS "aggr__0__2__count" FROM ` + TableName + ` GROUP BY floor("rspContentLen"/2000)*2000 AS "aggr__0__key_0", "message" AS "aggr__0__2__key_0")) @@ -6274,29 +6207,25 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeResults: make([]model.QueryResultRow, 0), ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__parent_count", "aggr__0__1__key_0", - "aggr__0__1__count", "aggr__0__1__order_1" + "aggr__0__1__parent_count", "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__parent_count", "aggr__0__1__key_0", - "aggr__0__1__count", "aggr__0__1__order_1", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) - AS "aggr__0__order_1_rank", + "aggr__0__1__parent_count", "aggr__0__1__key_0", "aggr__0__1__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS + "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY - "aggr__0__1__order_1" DESC, "aggr__0__1__key_0" ASC) AS + "aggr__0__1__count" DESC, "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "host.name" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__1__parent_count", "message" AS "aggr__0__1__key_0", - count(*) AS "aggr__0__1__count", count() AS "aggr__0__1__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__0__1__count" + FROM __quesma_table_name WHERE ("message" IS NOT NULL AND NOT ("message" iLIKE '%US%')) GROUP BY "host.name" AS "aggr__0__key_0", "message" AS "aggr__0__1__key_0")) - WHERE ("aggr__0__order_1_rank"<=11 AND "aggr__0__1__order_1_rank"<=4) ORDER BY "aggr__0__order_1_rank" ASC, "aggr__0__1__order_1_rank" ASC`, }, @@ -6385,37 +6314,33 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeResults: make([]model.QueryResultRow, 0), ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__parent_count", "aggr__0__1__key_0", - "aggr__0__1__count", "aggr__0__1__order_1", "aggr__0__1__2__parent_count", - "aggr__0__1__2__key_0", "aggr__0__1__2__count", "aggr__0__1__2__order_1" + "aggr__0__1__parent_count", "aggr__0__1__key_0", "aggr__0__1__count", + "aggr__0__1__2__parent_count", "aggr__0__1__2__key_0", "aggr__0__1__2__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__parent_count", "aggr__0__1__key_0", - "aggr__0__1__count", "aggr__0__1__order_1", "aggr__0__1__2__parent_count", - "aggr__0__1__2__key_0", "aggr__0__1__2__count", "aggr__0__1__2__order_1", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) - AS "aggr__0__order_1_rank", + "aggr__0__1__parent_count", "aggr__0__1__key_0", "aggr__0__1__count", + "aggr__0__1__2__parent_count", "aggr__0__1__2__key_0", + "aggr__0__1__2__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS + "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY - "aggr__0__1__order_1" DESC, "aggr__0__1__key_0" ASC) AS + "aggr__0__1__count" DESC, "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0", "aggr__0__1__key_0" ORDER - BY "aggr__0__1__2__order_1" DESC, "aggr__0__1__key_0" ASC, + BY "aggr__0__1__2__count" DESC, "aggr__0__1__key_0" ASC, "aggr__0__1__2__key_0" ASC) AS "aggr__0__1__2__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "host.name" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__1__parent_count", "message" AS "aggr__0__1__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0", "aggr__0__1__key_0") AS "aggr__0__1__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0", "aggr__0__1__key_0") AS - "aggr__0__1__order_1", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0", "aggr__0__1__key_0") AS "aggr__0__1__2__parent_count", "message" AS "aggr__0__1__2__key_0", - count(*) AS "aggr__0__1__2__count", count() AS "aggr__0__1__2__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__0__1__2__count" + FROM __quesma_table_name WHERE ("message" IS NOT NULL AND NOT ("message" iLIKE '%US%')) GROUP BY "host.name" AS "aggr__0__key_0", "message" AS "aggr__0__1__key_0", "message" AS "aggr__0__1__2__key_0")) @@ -6494,11 +6419,11 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeResults: make([]model.QueryResultRow, 0), ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count" + "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) + "aggr__0__1__key_0", "aggr__0__1__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" @@ -6506,7 +6431,6 @@ var AggregationTests = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "host.name" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", "FlightDelayMin" AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" FROM ` + TableName + ` WHERE ("message" IS NOT NULL AND NOT ("message" iLIKE '%US%')) @@ -6605,11 +6529,11 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeResults: make([]model.QueryResultRow, 0), ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count" + "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) + "aggr__0__1__key_0", "aggr__0__1__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" @@ -6617,7 +6541,6 @@ var AggregationTests = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "host.name" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", "FlightDelayMin" AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" FROM ` + TableName + ` WHERE ("message" IS NOT NULL AND NOT ("message" iLIKE '%US%')) @@ -6703,21 +6626,20 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeResults: make([]model.QueryResultRow, 0), ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count" + "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__key_0", "aggr__0__1__count", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) - AS "aggr__0__order_1_rank", + "aggr__0__1__key_0", "aggr__0__1__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS + "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "host.name" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", "FlightDelayMin" AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" - FROM ` + TableName + ` + FROM __quesma_table_name WHERE ("message" IS NOT NULL AND NOT ("message" iLIKE '%US%')) GROUP BY "host.name" AS "aggr__0__key_0", "FlightDelayMin" AS "aggr__0__1__key_0")) @@ -6993,26 +6915,23 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeResults: make([]model.QueryResultRow, 0), ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__parent_count", "aggr__0__1__key_0", - "aggr__0__1__count", "aggr__0__1__order_1" + "aggr__0__1__parent_count", "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__count", - "aggr__0__order_1", "aggr__0__1__parent_count", "aggr__0__1__key_0", - "aggr__0__1__count", "aggr__0__1__order_1", - dense_rank() OVER (ORDER BY "aggr__0__order_1" DESC, "aggr__0__key_0" ASC) - AS "aggr__0__order_1_rank", + "aggr__0__1__parent_count", "aggr__0__1__key_0", "aggr__0__1__count", + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC) AS + "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY - "aggr__0__1__order_1" DESC, "aggr__0__1__key_0" ASC) AS + "aggr__0__1__count" DESC, "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "OriginAirportID" AS "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__order_1", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__1__parent_count", "DestAirportID" AS "aggr__0__1__key_0", - count(*) AS "aggr__0__1__count", count() AS "aggr__0__1__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__0__1__count" + FROM __quesma_table_name GROUP BY "OriginAirportID" AS "aggr__0__key_0", "DestAirportID" AS "aggr__0__1__key_0")) WHERE ("aggr__0__order_1_rank"<=11 AND "aggr__0__1__order_1_rank"<=4) diff --git a/quesma/testdata/aggregation_requests_2.go b/quesma/testdata/aggregation_requests_2.go index f0c8ede55..aaab513d0 100644 --- a/quesma/testdata/aggregation_requests_2.go +++ b/quesma/testdata/aggregation_requests_2.go @@ -593,26 +593,22 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 2786), model.NewQueryResultCol("aggr__2__key_0", "200"), model.NewQueryResultCol("aggr__2__count", 2570), - model.NewQueryResultCol("aggr__2__order_1", 2570), model.NewQueryResultCol("metric__2__1_col_0", []time.Time{util.ParseTime("2024-04-21T06:11:13.619Z")}), model.NewQueryResultCol("metric__2__1_col_1", []time.Time{util.ParseTime("2024-04-21T12:21:13.414Z")}), model.NewQueryResultCol("metric__2__2_col_0", 10), }}, }, ExpectedPancakeSQL: ` - SELECT - sum(count(*)) OVER () AS "aggr__2__parent_count", - "response" AS "aggr__2__key_0", - count(*) AS "aggr__2__count", - count() AS "aggr__2__order_1", + SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", + "response" AS "aggr__2__key_0", count(*) AS "aggr__2__count", quantiles(0.010000)("timestamp") AS "metric__2__1_col_0", quantiles(0.020000)("timestamp") AS "metric__2__1_col_1", sumOrNull("count") AS "metric__2__2_col_0" - FROM ` + TableName + ` + FROM __quesma_table_name WHERE ("timestamp">=parseDateTime64BestEffort('2024-04-18T00:51:15.845Z') AND "timestamp"<=parseDateTime64BestEffort('2024-05-03T00:51:15.845Z')) GROUP BY "response" AS "aggr__2__key_0" - ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC + ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, }, { // [44] @@ -725,66 +721,55 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1" + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__order_1" DESC, "aggr__2__8__key_0" ASC) AS + "aggr__2__8__count" DESC, "aggr__2__8__key_0" ASC) AS "aggr__2__8__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "surname" AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__parent_count", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0", - count(*) AS "aggr__2__8__count", count() AS "aggr__2__8__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__count" + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0")) WHERE ("aggr__2__order_1_rank"<=201 AND "aggr__2__8__order_1_rank"<=20) @@ -918,117 +903,96 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34324), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b11"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34324), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", nil), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34324), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__order_1", 24), }}, // nil at the beginningÄ… {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", nil), model.NewQueryResultCol("aggr__2__8__count", int64(57)), - model.NewQueryResultCol("aggr__2__8__order_1", 57), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b22"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, // nil at the end {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a3"), model.NewQueryResultCol("aggr__2__count_1", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b31"), model.NewQueryResultCol("aggr__2__8__count_1", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a3"), model.NewQueryResultCol("aggr__2__count_1", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b32"), model.NewQueryResultCol("aggr__2__8__count_1", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a3"), model.NewQueryResultCol("aggr__2__count_1", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", nil), model.NewQueryResultCol("aggr__2__8__count_1", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1" + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__order_1" DESC, "aggr__2__8__key_0" ASC) AS + "aggr__2__8__count" DESC, "aggr__2__8__key_0" ASC) AS "aggr__2__8__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "surname" AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__parent_count", "limbName" AS "aggr__2__8__key_0", - count(*) AS "aggr__2__8__count", count() AS "aggr__2__8__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__count" + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", "limbName" AS "aggr__2__8__key_0")) WHERE ("aggr__2__order_1_rank"<=201 AND "aggr__2__8__order_1_rank"<=21) ORDER BY "aggr__2__order_1_rank" ASC, "aggr__2__8__order_1_rank" ASC`, @@ -1144,66 +1108,55 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "miss"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "miss"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1" + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__order_1" DESC, "aggr__2__8__key_0" ASC) AS + "aggr__2__8__count" DESC, "aggr__2__8__key_0" ASC) AS "aggr__2__8__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", COALESCE("surname", 'miss') AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__parent_count", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0", - count(*) AS "aggr__2__8__count", count() AS "aggr__2__8__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__count" + FROM __quesma_table_name GROUP BY COALESCE("surname", 'miss') AS "aggr__2__key_0", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0")) WHERE ("aggr__2__order_1_rank"<=200 AND "aggr__2__8__order_1_rank"<=20) @@ -1318,95 +1271,78 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b11"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", nil), model.NewQueryResultCol("aggr__2__count", int64(55)), - model.NewQueryResultCol("aggr__2__order_1", 55), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", nil), model.NewQueryResultCol("aggr__2__count", int64(55)), - model.NewQueryResultCol("aggr__2__order_1", 55), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "lala"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34290), model.NewQueryResultCol("aggr__2__8__key_0", nil), model.NewQueryResultCol("aggr__2__8__count", uint64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", uint64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b22"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1" + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__order_1" DESC, "aggr__2__8__key_0" ASC) AS + "aggr__2__8__count" DESC, "aggr__2__8__key_0" ASC) AS "aggr__2__8__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "surname" AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__parent_count", "limbName" AS "aggr__2__8__key_0", - count(*) AS "aggr__2__8__count", count() AS "aggr__2__8__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__count" + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", "limbName" AS "aggr__2__8__key_0")) WHERE ("aggr__2__order_1_rank"<=201 AND "aggr__2__8__order_1_rank"<=21) ORDER BY "aggr__2__order_1_rank" ASC, "aggr__2__8__order_1_rank" ASC`, @@ -1544,7 +1480,7 @@ var AggregationTests2 = []AggregationTestCase{ }, /* ExpectedSQLs: []string{ - `SELECT count() ` + + `SELECT count(*) ` + `FROM ` + QuotedTableName + ` ` + `WHERE ("message" iLIKE '%user%' ` + `AND ("@timestamp">=parseDateTime64BestEffort('2024-01-23T14:43:19.481Z') ` + @@ -1554,7 +1490,7 @@ var AggregationTests2 = []AggregationTestCase{ `WHERE ("message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-23T14:43:19.481Z') ` + `AND "@timestamp"<=parseDateTime64BestEffort('2024-01-23T14:58:19.481Z'))) ` + `LIMIT 5`, - `SELECT ` + timestampGroupByClause + `, count() ` + + `SELECT ` + timestampGroupByClause + `, count(*) ` + `FROM ` + QuotedTableName + ` ` + `WHERE ("message" iLIKE '%user%' ` + `AND ("@timestamp">=parseDateTime64BestEffort('2024-01-23T14:43:19.481Z') ` + @@ -2049,71 +1985,62 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__count", 1036), model.NewQueryResultCol("aggr__2__8__5__parent_count", int64(1036)), model.NewQueryResultCol("aggr__2__8__5__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__5__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__count", 1036), model.NewQueryResultCol("aggr__2__8__5__parent_count", int64(1036)), model.NewQueryResultCol("aggr__2__8__5__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__5__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__count", 34), model.NewQueryResultCol("aggr__2__8__5__parent_count", 34), model.NewQueryResultCol("aggr__2__8__5__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__5__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__count", 34), model.NewQueryResultCol("aggr__2__8__5__parent_count", 34), model.NewQueryResultCol("aggr__2__8__5__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__5__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 17), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__count", "aggr__2__8__5__parent_count", - "aggr__2__8__5__key_0", "aggr__2__8__5__count", "aggr__2__8__5__order_1" + "aggr__2__8__count", "aggr__2__8__5__parent_count", "aggr__2__8__5__key_0", + "aggr__2__8__5__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__count", "aggr__2__8__5__parent_count", - "aggr__2__8__5__key_0", "aggr__2__8__5__count", "aggr__2__8__5__order_1", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "aggr__2__8__count", "aggr__2__8__5__parent_count", "aggr__2__8__5__key_0", + "aggr__2__8__5__count", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__5__order_1" DESC, "aggr__2__8__5__key_0" ASC) AS + "aggr__2__8__5__count" DESC, "aggr__2__8__5__key_0" ASC) AS "aggr__2__8__5__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "surname" AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__count", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__5__parent_count", COALESCE("limbName", '__missing__') AS "aggr__2__8__5__key_0", - count(*) AS "aggr__2__8__5__count", count() AS "aggr__2__8__5__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__5__count" + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", COALESCE("limbName", '__missing__') AS "aggr__2__8__5__key_0")) WHERE ("aggr__2__order_1_rank"<=201 AND "aggr__2__8__5__order_1_rank"<=20) @@ -2247,71 +2174,62 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__count", 1036), model.NewQueryResultCol("aggr__2__8__5__parent_count", int64(1036)), model.NewQueryResultCol("aggr__2__8__5__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__5__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 21), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", int64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__count", 1036), model.NewQueryResultCol("aggr__2__8__5__parent_count", int64(1036)), model.NewQueryResultCol("aggr__2__8__5__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__5__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__count", 34), model.NewQueryResultCol("aggr__2__8__5__parent_count", 34), model.NewQueryResultCol("aggr__2__8__5__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__5__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", int64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__count", 34), model.NewQueryResultCol("aggr__2__8__5__parent_count", 34), model.NewQueryResultCol("aggr__2__8__5__key_0", "__missing__"), model.NewQueryResultCol("aggr__2__8__5__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__5__order_1", 17), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__count", "aggr__2__8__5__parent_count", - "aggr__2__8__5__key_0", "aggr__2__8__5__count", "aggr__2__8__5__order_1" + "aggr__2__8__count", "aggr__2__8__5__parent_count", "aggr__2__8__5__key_0", + "aggr__2__8__5__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__count", "aggr__2__8__5__parent_count", - "aggr__2__8__5__key_0", "aggr__2__8__5__count", "aggr__2__8__5__order_1", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "aggr__2__8__count", "aggr__2__8__5__parent_count", "aggr__2__8__5__key_0", + "aggr__2__8__5__count", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__5__order_1" DESC, "aggr__2__8__5__key_0" ASC) AS + "aggr__2__8__5__count" DESC, "aggr__2__8__5__key_0" ASC) AS "aggr__2__8__5__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "surname" AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__count", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__5__parent_count", COALESCE("limbName", '__missing__') AS "aggr__2__8__5__key_0", - count(*) AS "aggr__2__8__5__count", count() AS "aggr__2__8__5__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__5__count" + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", COALESCE("limbName", '__missing__') AS "aggr__2__8__5__key_0")) WHERE ("aggr__2__order_1_rank"<=201 AND "aggr__2__8__5__order_1_rank"<=20) @@ -2970,7 +2888,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 1960), model.NewQueryResultCol("aggr__histo__0__key_0", "order"), model.NewQueryResultCol("aggr__histo__0__count", int64(42)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(42)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", 0), @@ -2978,7 +2895,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 1960), model.NewQueryResultCol("aggr__histo__0__key_0", "disorder"), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", 224.19300000000004), @@ -2986,7 +2902,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 17), model.NewQueryResultCol("aggr__histo__0__key_0", nil), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", nil), @@ -2994,7 +2909,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 15), model.NewQueryResultCol("aggr__histo__0__key_0", "a"), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", nil), @@ -3002,21 +2916,20 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 15), model.NewQueryResultCol("aggr__histo__0__key_0", "b"), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__histo__key_0", "aggr__histo__count", "aggr__histo__0__parent_count", "aggr__histo__0__key_0", - "aggr__histo__0__count", "aggr__histo__0__order_1" + "aggr__histo__0__count" FROM ( SELECT "aggr__histo__key_0", "aggr__histo__count", "aggr__histo__0__parent_count", "aggr__histo__0__key_0", - "aggr__histo__0__count", "aggr__histo__0__order_1", + "aggr__histo__0__count", dense_rank() OVER (ORDER BY "aggr__histo__key_0" ASC) AS "aggr__histo__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__histo__key_0" ORDER BY - "aggr__histo__0__order_1" DESC, "aggr__histo__0__key_0" ASC) AS + "aggr__histo__0__count" DESC, "aggr__histo__0__key_0" ASC) AS "aggr__histo__0__order_1_rank" FROM ( SELECT floor("taxful_total_price"/224.19300000000004)*224.19300000000004 AS @@ -3025,7 +2938,7 @@ var AggregationTests2 = []AggregationTestCase{ "aggr__histo__count", sum(count(*)) OVER (PARTITION BY "aggr__histo__key_0") AS "aggr__histo__0__parent_count", "type" AS "aggr__histo__0__key_0", - count(*) AS "aggr__histo__0__count", count() AS "aggr__histo__0__order_1" + count(*) AS "aggr__histo__0__count" FROM __quesma_table_name GROUP BY floor("taxful_total_price"/224.19300000000004)*224.19300000000004 AS "aggr__histo__key_0", "type" AS "aggr__histo__0__key_0")) @@ -3240,7 +3153,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 1960), model.NewQueryResultCol("aggr__histo__0__key_0", "order"), model.NewQueryResultCol("aggr__histo__0__count", int64(42)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(42)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", 0), @@ -3248,7 +3160,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 1960), model.NewQueryResultCol("aggr__histo__0__key_0", "disorder"), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", 224.19300000000004), @@ -3256,7 +3167,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 17), model.NewQueryResultCol("aggr__histo__0__key_0", nil), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", 800), @@ -3264,7 +3174,6 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 15), model.NewQueryResultCol("aggr__histo__0__key_0", "a"), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__histo__key_0", 800), @@ -3272,21 +3181,20 @@ var AggregationTests2 = []AggregationTestCase{ model.NewQueryResultCol("aggr__histo__0__parent_count", 15), model.NewQueryResultCol("aggr__histo__0__key_0", "b"), model.NewQueryResultCol("aggr__histo__0__count", int64(1)), - model.NewQueryResultCol("aggr__histo__0__order_1", int64(1)), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__histo__key_0", "aggr__histo__count", "aggr__histo__0__parent_count", "aggr__histo__0__key_0", - "aggr__histo__0__count", "aggr__histo__0__order_1" + "aggr__histo__0__count" FROM ( SELECT "aggr__histo__key_0", "aggr__histo__count", "aggr__histo__0__parent_count", "aggr__histo__0__key_0", - "aggr__histo__0__count", "aggr__histo__0__order_1", + "aggr__histo__0__count", dense_rank() OVER (ORDER BY "aggr__histo__key_0" ASC) AS "aggr__histo__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__histo__key_0" ORDER BY - "aggr__histo__0__order_1" DESC, "aggr__histo__0__key_0" ASC) AS + "aggr__histo__0__count" DESC, "aggr__histo__0__key_0" ASC) AS "aggr__histo__0__order_1_rank" FROM ( SELECT floor(COALESCE("taxful_total_price", 800)/224.19300000000004)* @@ -3295,7 +3203,7 @@ var AggregationTests2 = []AggregationTestCase{ "aggr__histo__count", sum(count(*)) OVER (PARTITION BY "aggr__histo__key_0") AS "aggr__histo__0__parent_count", "type" AS "aggr__histo__0__key_0", - count(*) AS "aggr__histo__0__count", count() AS "aggr__histo__0__order_1" + count(*) AS "aggr__histo__0__count" FROM __quesma_table_name GROUP BY floor(COALESCE("taxful_total_price", 800)/224.19300000000004)* 224.19300000000004 AS "aggr__histo__key_0", diff --git a/quesma/testdata/clients/ophelia.go b/quesma/testdata/clients/ophelia.go index 3b671e6ce..14b34934c 100644 --- a/quesma/testdata/clients/ophelia.go +++ b/quesma/testdata/clients/ophelia.go @@ -188,93 +188,77 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", uint64(1036)), - model.NewQueryResultCol("aggr__2__order_1", uint64(1036)), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b11"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", int64(21)), model.NewQueryResultCol("aggr__2__8__4__parent_count", 21), model.NewQueryResultCol("aggr__2__8__4__key_0", "c11"), model.NewQueryResultCol("aggr__2__8__4__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__4__order_1", int64(21)), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", uint64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__order_1", 24), model.NewQueryResultCol("aggr__2__8__4__parent_count", 24), model.NewQueryResultCol("aggr__2__8__4__key_0", "c12"), model.NewQueryResultCol("aggr__2__8__4__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__4__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", uint64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), model.NewQueryResultCol("aggr__2__8__4__parent_count", 17), model.NewQueryResultCol("aggr__2__8__4__key_0", "c21"), model.NewQueryResultCol("aggr__2__8__4__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__4__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", uint64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__8__key_0", "b22"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), model.NewQueryResultCol("aggr__2__8__4__parent_count", 17), model.NewQueryResultCol("aggr__2__8__4__key_0", "c22"), model.NewQueryResultCol("aggr__2__8__4__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__4__order_1", 17), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1", "aggr__2__8__4__parent_count", - "aggr__2__8__4__key_0", "aggr__2__8__4__count", "aggr__2__8__4__order_1" + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count", + "aggr__2__8__4__parent_count", "aggr__2__8__4__key_0", "aggr__2__8__4__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "aggr__2__8__parent_count", "aggr__2__8__key_0", - "aggr__2__8__count", "aggr__2__8__order_1", "aggr__2__8__4__parent_count", - "aggr__2__8__4__key_0", "aggr__2__8__4__count", "aggr__2__8__4__order_1", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count", + "aggr__2__8__4__parent_count", "aggr__2__8__4__key_0", + "aggr__2__8__4__count", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__order_1" DESC, "aggr__2__8__key_0" ASC) AS + "aggr__2__8__count" DESC, "aggr__2__8__key_0" ASC) AS "aggr__2__8__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0" ORDER - BY "aggr__2__8__4__order_1" DESC, "aggr__2__8__4__key_0" ASC) AS + BY "aggr__2__8__4__count" DESC, "aggr__2__8__4__key_0" ASC) AS "aggr__2__8__4__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "surname" AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__8__parent_count", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0") AS "aggr__2__8__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0") AS - "aggr__2__8__order_1", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0") AS "aggr__2__8__4__parent_count", "organName" AS "aggr__2__8__4__key_0", - count(*) AS "aggr__2__8__4__count", count() AS "aggr__2__8__4__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__4__count" + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0", "organName" AS "aggr__2__8__4__key_0")) @@ -527,17 +511,14 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", uint64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("metric__2__1_col_0", 1091661.760867), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b12"), model.NewQueryResultCol("aggr__2__8__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__order_1", 24), model.NewQueryResultCol("metric__2__8__1_col_0", 45774.291766666654), model.NewQueryResultCol("aggr__2__8__4__parent_count", 24), model.NewQueryResultCol("aggr__2__8__4__key_0", "c12"), model.NewQueryResultCol("aggr__2__8__4__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__4__order_1", 24), model.NewQueryResultCol("metric__2__8__4__1_col_0", 45774.291766666654), model.NewQueryResultCol("metric__2__8__4__5_col_0", 36577.89516666666), }}, @@ -545,17 +526,14 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a1"), model.NewQueryResultCol("aggr__2__count", uint64(1036)), - model.NewQueryResultCol("aggr__2__order_1", 1036), model.NewQueryResultCol("metric__2__1_col_0", 1091661.760867), model.NewQueryResultCol("aggr__2__8__parent_count", 1036), model.NewQueryResultCol("aggr__2__8__key_0", "b11"), model.NewQueryResultCol("aggr__2__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__order_1", 21), model.NewQueryResultCol("metric__2__8__1_col_0", 51891.94613333333), model.NewQueryResultCol("aggr__2__8__4__parent_count", 21), model.NewQueryResultCol("aggr__2__8__4__key_0", "c11"), model.NewQueryResultCol("aggr__2__8__4__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__4__order_1", 21), model.NewQueryResultCol("metric__2__8__4__1_col_0", 51891.94613333333), model.NewQueryResultCol("metric__2__8__4__5_col_0", 37988.09523333333), }}, @@ -563,17 +541,14 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", uint64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("metric__2__1_col_0", 630270.07765), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b21"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), model.NewQueryResultCol("metric__2__8__1_col_0", 399126.7496833334), model.NewQueryResultCol("aggr__2__8__4__parent_count", 17), model.NewQueryResultCol("aggr__2__8__4__key_0", "c21"), model.NewQueryResultCol("aggr__2__8__4__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__4__order_1", 17), model.NewQueryResultCol("metric__2__8__4__1_col_0", 399126.7496833334), model.NewQueryResultCol("metric__2__8__4__5_col_0", 337246.82201666664), }}, @@ -581,48 +556,42 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 34290), model.NewQueryResultCol("aggr__2__key_0", "a2"), model.NewQueryResultCol("aggr__2__count", uint64(34)), - model.NewQueryResultCol("aggr__2__order_1", 34), model.NewQueryResultCol("metric__2__1_col_0", 231143.3279666666), model.NewQueryResultCol("aggr__2__8__parent_count", 34), model.NewQueryResultCol("aggr__2__8__key_0", "b22"), model.NewQueryResultCol("aggr__2__8__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__order_1", 17), model.NewQueryResultCol("metric__2__8__1_col_0", 231143.3279666666), model.NewQueryResultCol("aggr__2__8__4__parent_count", 17), model.NewQueryResultCol("aggr__2__8__4__key_0", "c22"), model.NewQueryResultCol("aggr__2__8__4__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__4__order_1", 17), model.NewQueryResultCol("metric__2__8__4__1_col_0", 231143.3279666666), model.NewQueryResultCol("metric__2__8__4__5_col_0", 205408.48849999998), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "metric__2__1_col_0", "aggr__2__8__parent_count", - "aggr__2__8__key_0", "aggr__2__8__count", "aggr__2__8__order_1", - "metric__2__8__1_col_0", "aggr__2__8__4__parent_count", - "aggr__2__8__4__key_0", "aggr__2__8__4__count", "aggr__2__8__4__order_1", - "metric__2__8__4__1_col_0", "metric__2__8__4__5_col_0" + "metric__2__1_col_0", "aggr__2__8__parent_count", "aggr__2__8__key_0", + "aggr__2__8__count", "metric__2__8__1_col_0", "aggr__2__8__4__parent_count", + "aggr__2__8__4__key_0", "aggr__2__8__4__count", "metric__2__8__4__1_col_0", + "metric__2__8__4__5_col_0" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", - "aggr__2__order_1", "metric__2__1_col_0", "aggr__2__8__parent_count", - "aggr__2__8__key_0", "aggr__2__8__count", "aggr__2__8__order_1", - "metric__2__8__1_col_0", "aggr__2__8__4__parent_count", - "aggr__2__8__4__key_0", "aggr__2__8__4__count", "aggr__2__8__4__order_1", - "metric__2__8__4__1_col_0", "metric__2__8__4__5_col_0", - dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) - AS "aggr__2__order_1_rank", + "metric__2__1_col_0", "aggr__2__8__parent_count", "aggr__2__8__key_0", + "aggr__2__8__count", "metric__2__8__1_col_0", "aggr__2__8__4__parent_count", + "aggr__2__8__4__key_0", "aggr__2__8__4__count", "metric__2__8__4__1_col_0", + "metric__2__8__4__5_col_0", + dense_rank() OVER (ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC) AS + "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY - "aggr__2__8__order_1" DESC, "aggr__2__8__key_0" ASC) AS + "aggr__2__8__count" DESC, "aggr__2__8__key_0" ASC) AS "aggr__2__8__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0" ORDER - BY "aggr__2__8__4__order_1" DESC, "aggr__2__8__4__key_0" ASC) AS + BY "aggr__2__8__4__count" DESC, "aggr__2__8__4__key_0" ASC) AS "aggr__2__8__4__order_1_rank" FROM ( SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", "surname" AS "aggr__2__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0") AS "aggr__2__order_1", sumOrNull(sumOrNull("total")) OVER (PARTITION BY "aggr__2__key_0") AS "metric__2__1_col_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0") AS @@ -630,16 +599,14 @@ var OpheliaTests = []testdata.AggregationTestCase{ COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0") AS "aggr__2__8__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0") AS - "aggr__2__8__order_1", sumOrNull(sumOrNull("total")) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0") AS "metric__2__8__1_col_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0") AS "aggr__2__8__4__parent_count", "organName" AS "aggr__2__8__4__key_0", - count(*) AS "aggr__2__8__4__count", count() AS "aggr__2__8__4__order_1", + count(*) AS "aggr__2__8__4__count", sumOrNull("total") AS "metric__2__8__4__1_col_0", sumOrNull("some") AS "metric__2__8__4__5_col_0" - FROM ` + TableName + ` + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0", "organName" AS "aggr__2__8__4__key_0")) @@ -1736,7 +1703,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__8__4__5__parent_count", 17), model.NewQueryResultCol("aggr__2__8__4__5__key_0", "d22"), model.NewQueryResultCol("aggr__2__8__4__5__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__4__5__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), @@ -1753,7 +1719,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__8__4__5__parent_count", 17), model.NewQueryResultCol("aggr__2__8__4__5__key_0", "d21"), model.NewQueryResultCol("aggr__2__8__4__5__count", int64(17)), - model.NewQueryResultCol("aggr__2__8__4__5__order_1", 17), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), @@ -1770,7 +1735,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__8__4__5__parent_count", 24), model.NewQueryResultCol("aggr__2__8__4__5__key_0", "d12"), model.NewQueryResultCol("aggr__2__8__4__5__count", int64(24)), - model.NewQueryResultCol("aggr__2__8__4__5__order_1", 24), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 34290), @@ -1788,7 +1752,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__8__4__5__parent_count", 21), model.NewQueryResultCol("aggr__2__8__4__5__key_0", "d11"), model.NewQueryResultCol("aggr__2__8__4__5__count", int64(21)), - model.NewQueryResultCol("aggr__2__8__4__5__order_1", 21), }}, }, ExpectedPancakeSQL: ` @@ -1797,7 +1760,7 @@ var OpheliaTests = []testdata.AggregationTestCase{ "aggr__2__8__order_1", "metric__2__8__1_col_0", "aggr__2__8__4__parent_count", "aggr__2__8__4__key_0", "aggr__2__8__4__count", "aggr__2__8__4__5__parent_count", "aggr__2__8__4__5__key_0", - "aggr__2__8__4__5__count", "aggr__2__8__4__5__order_1" + "aggr__2__8__4__5__count" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", "aggr__2__8__parent_count", "aggr__2__8__key_0", "aggr__2__8__count", @@ -1805,7 +1768,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ "aggr__2__8__4__parent_count", "aggr__2__8__4__key_0", "aggr__2__8__4__count", "aggr__2__8__4__5__parent_count", "aggr__2__8__4__5__key_0", "aggr__2__8__4__5__count", - "aggr__2__8__4__5__order_1", dense_rank() OVER (ORDER BY "aggr__2__key_0" DESC) AS "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY @@ -1814,7 +1776,7 @@ var OpheliaTests = []testdata.AggregationTestCase{ dense_rank() OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0" ORDER BY "aggr__2__8__4__key_0" DESC) AS "aggr__2__8__4__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0", - "aggr__2__8__4__key_0" ORDER BY "aggr__2__8__4__5__order_1" DESC, + "aggr__2__8__4__key_0" ORDER BY "aggr__2__8__4__5__count" DESC, "aggr__2__8__4__key_0" ASC, "aggr__2__8__4__5__key_0" ASC) AS "aggr__2__8__4__5__order_1_rank" FROM ( @@ -1837,9 +1799,8 @@ var OpheliaTests = []testdata.AggregationTestCase{ sum(count(*)) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__8__key_0", "aggr__2__8__4__key_0") AS "aggr__2__8__4__5__parent_count", "organName" AS "aggr__2__8__4__5__key_0", - count(*) AS "aggr__2__8__4__5__count", - count() AS "aggr__2__8__4__5__order_1" - FROM ` + TableName + ` + count(*) AS "aggr__2__8__4__5__count" + FROM __quesma_table_name GROUP BY "surname" AS "aggr__2__key_0", COALESCE("limbName", '__missing__') AS "aggr__2__8__key_0", "organName" AS "aggr__2__8__4__key_0", @@ -2554,7 +2515,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__7__8__parent_count", 21), model.NewQueryResultCol("aggr__2__7__8__key_0", "c1"), model.NewQueryResultCol("aggr__2__7__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__7__8__order_1", int64(21)), model.NewQueryResultCol("metric__2__7__8__1_col_0", 51891.94613333333), model.NewQueryResultCol("aggr__2__7__8__4__parent_count", 21), model.NewQueryResultCol("aggr__2__7__8__4__key_0", "d11"), @@ -2583,7 +2543,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__7__8__parent_count", 21), model.NewQueryResultCol("aggr__2__7__8__key_0", "c1"), model.NewQueryResultCol("aggr__2__7__8__count", int64(21)), - model.NewQueryResultCol("aggr__2__7__8__order_1", int64(21)), model.NewQueryResultCol("metric__2__7__8__1_col_0", 51891.94613333333), model.NewQueryResultCol("aggr__2__7__8__4__parent_count", 21), model.NewQueryResultCol("aggr__2__7__8__4__key_0", "d12"), @@ -2612,7 +2571,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__7__8__parent_count", 4), model.NewQueryResultCol("aggr__2__7__8__key_0", "c2"), model.NewQueryResultCol("aggr__2__7__8__count", int64(3)), - model.NewQueryResultCol("aggr__2__7__8__order_1", int64(3)), model.NewQueryResultCol("metric__2__7__8__1_col_0", 0.2), model.NewQueryResultCol("aggr__2__7__8__4__parent_count", 3), model.NewQueryResultCol("aggr__2__7__8__4__key_0", "d2"), @@ -2633,32 +2591,31 @@ var OpheliaTests = []testdata.AggregationTestCase{ "aggr__2__order_1", "metric__2__1_col_0", "aggr__2__7__parent_count", "aggr__2__7__key_0", "aggr__2__7__count", "metric__2__7__1_col_0", "aggr__2__7__8__parent_count", "aggr__2__7__8__key_0", "aggr__2__7__8__count", - "aggr__2__7__8__order_1", "metric__2__7__8__1_col_0", - "aggr__2__7__8__4__parent_count", "aggr__2__7__8__4__key_0", - "aggr__2__7__8__4__count", "aggr__2__7__8__4__order_1", - "metric__2__7__8__4__1_col_0", "aggr__2__7__8__4__3__parent_count", - "aggr__2__7__8__4__3__key_0", "aggr__2__7__8__4__3__count", - "aggr__2__7__8__4__3__order_1", "metric__2__7__8__4__3__1_col_0", - "metric__2__7__8__4__3__5_col_0", "metric__2__7__8__4__3__6_col_0" + "metric__2__7__8__1_col_0", "aggr__2__7__8__4__parent_count", + "aggr__2__7__8__4__key_0", "aggr__2__7__8__4__count", + "aggr__2__7__8__4__order_1", "metric__2__7__8__4__1_col_0", + "aggr__2__7__8__4__3__parent_count", "aggr__2__7__8__4__3__key_0", + "aggr__2__7__8__4__3__count", "aggr__2__7__8__4__3__order_1", + "metric__2__7__8__4__3__1_col_0", "metric__2__7__8__4__3__5_col_0", + "metric__2__7__8__4__3__6_col_0" FROM ( SELECT "aggr__2__parent_count", "aggr__2__key_0", "aggr__2__count", "aggr__2__order_1", "metric__2__1_col_0", "aggr__2__7__parent_count", "aggr__2__7__key_0", "aggr__2__7__count", "metric__2__7__1_col_0", "aggr__2__7__8__parent_count", "aggr__2__7__8__key_0", - "aggr__2__7__8__count", "aggr__2__7__8__order_1", - "metric__2__7__8__1_col_0", "aggr__2__7__8__4__parent_count", - "aggr__2__7__8__4__key_0", "aggr__2__7__8__4__count", - "aggr__2__7__8__4__order_1", "metric__2__7__8__4__1_col_0", - "aggr__2__7__8__4__3__parent_count", "aggr__2__7__8__4__3__key_0", - "aggr__2__7__8__4__3__count", "aggr__2__7__8__4__3__order_1", - "metric__2__7__8__4__3__1_col_0", "metric__2__7__8__4__3__5_col_0", - "metric__2__7__8__4__3__6_col_0", + "aggr__2__7__8__count", "metric__2__7__8__1_col_0", + "aggr__2__7__8__4__parent_count", "aggr__2__7__8__4__key_0", + "aggr__2__7__8__4__count", "aggr__2__7__8__4__order_1", + "metric__2__7__8__4__1_col_0", "aggr__2__7__8__4__3__parent_count", + "aggr__2__7__8__4__3__key_0", "aggr__2__7__8__4__3__count", + "aggr__2__7__8__4__3__order_1", "metric__2__7__8__4__3__1_col_0", + "metric__2__7__8__4__3__5_col_0", "metric__2__7__8__4__3__6_col_0", dense_rank() OVER (ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC) AS "aggr__2__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0" ORDER BY "aggr__2__7__key_0" ASC) AS "aggr__2__7__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0", "aggr__2__7__key_0" ORDER - BY "aggr__2__7__8__order_1" DESC, "aggr__2__7__8__key_0" ASC) AS + BY "aggr__2__7__8__count" DESC, "aggr__2__7__8__key_0" ASC) AS "aggr__2__7__8__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__2__key_0", "aggr__2__7__key_0", "aggr__2__7__8__key_0" ORDER BY "aggr__2__7__8__4__order_1" DESC, @@ -2687,8 +2644,6 @@ var OpheliaTests = []testdata.AggregationTestCase{ COALESCE("organName", '__missing__') AS "aggr__2__7__8__key_0", sum(count(*)) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__7__key_0", "aggr__2__7__8__key_0") AS "aggr__2__7__8__count", - sum(count()) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__7__key_0", - "aggr__2__7__8__key_0") AS "aggr__2__7__8__order_1", sumOrNull(sumOrNull("total")) OVER (PARTITION BY "aggr__2__key_0", "aggr__2__7__key_0", "aggr__2__7__8__key_0") AS "metric__2__7__8__1_col_0" , diff --git a/quesma/testdata/full_search_requests.go b/quesma/testdata/full_search_requests.go index 92d0d8d09..46509eaf2 100644 --- a/quesma/testdata/full_search_requests.go +++ b/quesma/testdata/full_search_requests.go @@ -8,10 +8,10 @@ import ( ) func selectCnt(limit int) string { - return fmt.Sprintf(`SELECT count() FROM (SELECT 1 FROM %s LIMIT %d)`, TableName, limit) + return fmt.Sprintf(`SELECT count(*) FROM (SELECT 1 FROM %s LIMIT %d)`, TableName, limit) } func selectTotalCnt() string { - return fmt.Sprintf("SELECT count() FROM %s", TableName) + return fmt.Sprintf("SELECT count(*) FROM %s", TableName) } func selectStar(limit int) string { return fmt.Sprintf("SELECT \"message\" FROM %s LIMIT %d", TableName, limit) diff --git a/quesma/testdata/kibana-visualize/aggregation_requests.go b/quesma/testdata/kibana-visualize/aggregation_requests.go index 5e5975499..75251bf90 100644 --- a/quesma/testdata/kibana-visualize/aggregation_requests.go +++ b/quesma/testdata/kibana-visualize/aggregation_requests.go @@ -184,7 +184,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__1__key_0", "artemis"), model.NewQueryResultCol("aggr__0__1__key_1", "error"), model.NewQueryResultCol("aggr__0__1__count", 1), - model.NewQueryResultCol("aggr__0__1__order_2", 1), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__key_0", int64(1716834210000/30000)), @@ -193,7 +192,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__1__key_0", "artemis"), model.NewQueryResultCol("aggr__0__1__key_1", "info"), model.NewQueryResultCol("aggr__0__1__count", 1), - model.NewQueryResultCol("aggr__0__1__order_2", 1), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__key_0", int64(1716834210000/30000)), @@ -202,7 +200,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__1__key_0", "jupiter"), model.NewQueryResultCol("aggr__0__1__key_1", "info"), model.NewQueryResultCol("aggr__0__1__count", 1), - model.NewQueryResultCol("aggr__0__1__order_2", 1), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__key_0", int64(1716834270000/30000)), @@ -211,7 +208,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__1__key_0", "apollo"), model.NewQueryResultCol("aggr__0__1__key_1", "info"), model.NewQueryResultCol("aggr__0__1__count", 2), - model.NewQueryResultCol("aggr__0__1__order_2", 2), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__0__key_0", int64(1716834270000/30000)), @@ -220,36 +216,34 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__1__key_0", "cassandra"), model.NewQueryResultCol("aggr__0__1__key_1", "debug"), model.NewQueryResultCol("aggr__0__1__count", 1), - model.NewQueryResultCol("aggr__0__1__order_2", 1), }}, }, ExpectedPancakeSQL: ` SELECT "aggr__0__key_0", "aggr__0__count", "aggr__0__1__parent_count", - "aggr__0__1__key_0", "aggr__0__1__key_1", "aggr__0__1__count", - "aggr__0__1__order_2" + "aggr__0__1__key_0", "aggr__0__1__key_1", "aggr__0__1__count" FROM ( SELECT "aggr__0__key_0", "aggr__0__count", "aggr__0__1__parent_count", "aggr__0__1__key_0", "aggr__0__1__key_1", "aggr__0__1__count", - "aggr__0__1__order_2", dense_rank() OVER (ORDER BY "aggr__0__key_0" ASC) AS "aggr__0__order_1_rank" , dense_rank() OVER (PARTITION BY "aggr__0__key_0" ORDER BY - "aggr__0__1__order_2" DESC, "aggr__0__1__key_0" ASC, "aggr__0__1__key_1" ASC - ) AS "aggr__0__1__order_1_rank" + "aggr__0__1__count" DESC, "aggr__0__1__key_0" ASC, "aggr__0__1__key_1" ASC) + AS "aggr__0__1__order_1_rank" FROM ( SELECT toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset( - toTimezone("@timestamp", 'Europe/Warsaw'))*1000) / 30000) AS "aggr__0__key_0", + toTimezone("@timestamp", 'Europe/Warsaw'))*1000) / 30000) AS + "aggr__0__key_0", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__count", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0") AS "aggr__0__1__parent_count", "severity" AS "aggr__0__1__key_0", - "source" AS "aggr__0__1__key_1", count(*) AS "aggr__0__1__count", - count() AS "aggr__0__1__order_2" - FROM ` + TableName + ` + "source" AS "aggr__0__1__key_1", count(*) AS "aggr__0__1__count" + FROM __quesma_table_name WHERE ("@timestamp">=parseDateTime64BestEffort('2024-05-27T11:59:56.627Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-05-27T12:14:56.627Z')) GROUP BY toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset( - toTimezone("@timestamp", 'Europe/Warsaw'))*1000) / 30000) AS "aggr__0__key_0", - "severity" AS "aggr__0__1__key_0", "source" AS "aggr__0__1__key_1")) + toTimezone("@timestamp", 'Europe/Warsaw'))*1000) / 30000) AS + "aggr__0__key_0", "severity" AS "aggr__0__1__key_0", + "source" AS "aggr__0__1__key_1")) WHERE "aggr__0__1__order_1_rank"<=3 ORDER BY "aggr__0__order_1_rank" ASC, "aggr__0__1__order_1_rank" ASC`, }, @@ -381,7 +375,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__key_0", "info"), model.NewQueryResultCol("aggr__0__key_1", "redhat"), model.NewQueryResultCol("aggr__0__count", 13), - model.NewQueryResultCol("aggr__0__order_2", 13), model.NewQueryResultCol("aggr__0__1__key_0", int64(1716834420000/30000)), model.NewQueryResultCol("aggr__0__1__count", 1), model.NewQueryResultCol("aggr__0__1__order_1", int64(1716834420000/30000)), @@ -391,7 +384,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__key_0", "info"), model.NewQueryResultCol("aggr__0__key_1", "redhat"), model.NewQueryResultCol("aggr__0__count", 13), - model.NewQueryResultCol("aggr__0__order_2", 13), model.NewQueryResultCol("aggr__0__1__key_0", int64(1716834450000/30000)), model.NewQueryResultCol("aggr__0__1__count", 1), model.NewQueryResultCol("aggr__0__1__order_1", int64(1716834450000/30000)), @@ -401,7 +393,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__key_0", "info"), model.NewQueryResultCol("aggr__0__key_1", "redhat"), model.NewQueryResultCol("aggr__0__count", 13), - model.NewQueryResultCol("aggr__0__order_2", 13), model.NewQueryResultCol("aggr__0__1__key_0", int64(1716834510000/30000)), model.NewQueryResultCol("aggr__0__1__count", 2), model.NewQueryResultCol("aggr__0__1__order_1", int64(1716834510000/30000)), @@ -409,13 +400,12 @@ var AggregationTests = []testdata.AggregationTestCase{ }, ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__key_1", - "aggr__0__count", "aggr__0__order_2", "aggr__0__1__key_0", - "aggr__0__1__count" + "aggr__0__count", "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__key_1", - "aggr__0__count", "aggr__0__order_2", "aggr__0__1__key_0", + "aggr__0__count", "aggr__0__1__key_0", "aggr__0__1__count", - dense_rank() OVER (ORDER BY "aggr__0__order_2" DESC, "aggr__0__key_0" ASC, + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC, "aggr__0__key_1" ASC) AS "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0", "aggr__0__key_1" ORDER BY "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" @@ -424,8 +414,6 @@ var AggregationTests = []testdata.AggregationTestCase{ "message" AS "aggr__0__key_0", "host.name" AS "aggr__0__key_1", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0", "aggr__0__key_1") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0", "aggr__0__key_1") AS - "aggr__0__order_2", toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" FROM ` + TableName + ` @@ -813,7 +801,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__key_0", false), model.NewQueryResultCol("aggr__0__key_1", 167.05126953125), model.NewQueryResultCol("aggr__0__count", 1), - model.NewQueryResultCol("aggr__0__order_2", 1), model.NewQueryResultCol("aggr__0__1__key_0", int64(1716839040000/30000)), model.NewQueryResultCol("aggr__0__1__count", 1), model.NewQueryResultCol("aggr__0__1__order_1", int64(1716839040000/30000)), @@ -823,7 +810,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__key_0", false), model.NewQueryResultCol("aggr__0__key_1", 331.336181640625), model.NewQueryResultCol("aggr__0__count", 1), - model.NewQueryResultCol("aggr__0__order_2", 1), model.NewQueryResultCol("aggr__0__1__key_0", int64(1716838530000/30000)), model.NewQueryResultCol("aggr__0__1__count", 1), model.NewQueryResultCol("aggr__0__1__order_1", int64(1716838530000/30000)), @@ -833,7 +819,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__0__key_0", false), model.NewQueryResultCol("aggr__0__key_1", 714.4038696289062), model.NewQueryResultCol("aggr__0__count", 1), - model.NewQueryResultCol("aggr__0__order_2", 1), model.NewQueryResultCol("aggr__0__1__key_0", int64(1716838500000/30000)), model.NewQueryResultCol("aggr__0__1__count", 1), model.NewQueryResultCol("aggr__0__1__order_1", int64(1716838500000/30000)), @@ -841,13 +826,12 @@ var AggregationTests = []testdata.AggregationTestCase{ }, ExpectedPancakeSQL: ` SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__key_1", - "aggr__0__count", "aggr__0__order_2", "aggr__0__1__key_0", - "aggr__0__1__count" + "aggr__0__count", "aggr__0__1__key_0", "aggr__0__1__count" FROM ( SELECT "aggr__0__parent_count", "aggr__0__key_0", "aggr__0__key_1", - "aggr__0__count", "aggr__0__order_2", "aggr__0__1__key_0", + "aggr__0__count", "aggr__0__1__key_0", "aggr__0__1__count", - dense_rank() OVER (ORDER BY "aggr__0__order_2" DESC, "aggr__0__key_0" ASC, + dense_rank() OVER (ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC, "aggr__0__key_1" ASC) AS "aggr__0__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__0__key_0", "aggr__0__key_1" ORDER BY "aggr__0__1__key_0" ASC) AS "aggr__0__1__order_1_rank" @@ -856,8 +840,6 @@ var AggregationTests = []testdata.AggregationTestCase{ "Cancelled" AS "aggr__0__key_0", "AvgTicketPrice" AS "aggr__0__key_1", sum(count(*)) OVER (PARTITION BY "aggr__0__key_0", "aggr__0__key_1") AS "aggr__0__count", - sum(count()) OVER (PARTITION BY "aggr__0__key_0", "aggr__0__key_1") AS - "aggr__0__order_2", toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" FROM ` + TableName + ` diff --git a/quesma/testdata/opensearch-visualize/aggregation_requests.go b/quesma/testdata/opensearch-visualize/aggregation_requests.go index ab36b0248..c778c3b34 100644 --- a/quesma/testdata/opensearch-visualize/aggregation_requests.go +++ b/quesma/testdata/opensearch-visualize/aggregation_requests.go @@ -739,29 +739,24 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 5000), model.NewQueryResultCol("aggr__2__key_0", "200"), model.NewQueryResultCol("aggr__2__count", int64(2570)), - model.NewQueryResultCol("aggr__2__order_1", 2570), model.NewQueryResultCol("metric__2__1_col_0", util.ParseTime("2024-05-02T21:58:16.297Z")), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 5000), model.NewQueryResultCol("aggr__2__key_0", "503"), model.NewQueryResultCol("aggr__2__count", int64(94)), - model.NewQueryResultCol("aggr__2__order_1", 94), model.NewQueryResultCol("metric__2__1_col_0", util.ParseTime("2024-05-02T15:59:12.949Z")), }}, }, ExpectedPancakeSQL: ` - SELECT - sum(count(*)) OVER () AS "aggr__2__parent_count", - "response" AS "aggr__2__key_0", - count(*) AS "aggr__2__count", - count() AS "aggr__2__order_1", + SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", + "response" AS "aggr__2__key_0", count(*) AS "aggr__2__count", maxOrNull("timestamp") AS "metric__2__1_col_0" - FROM ` + TableName + ` + FROM __quesma_table_name WHERE ("timestamp">=parseDateTime64BestEffort('2024-04-18T00:49:59.517Z') AND "timestamp"<=parseDateTime64BestEffort('2024-05-03T00:49:59.517Z')) GROUP BY "response" AS "aggr__2__key_0" - ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC + ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, }, { // [5] @@ -888,29 +883,24 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", 5300), model.NewQueryResultCol("aggr__2__key_0", "200"), model.NewQueryResultCol("aggr__2__count", uint64(2570)), - model.NewQueryResultCol("aggr__2__order_1", 2570), model.NewQueryResultCol("metric__2__1_col_0", util.ParseTime("2024-04-21T00:39:02.912Z")), }}, {Cols: []model.QueryResultCol{ model.NewQueryResultCol("aggr__2__parent_count", 5300), model.NewQueryResultCol("aggr__2__key_0", "503"), model.NewQueryResultCol("aggr__2__count", uint64(94)), - model.NewQueryResultCol("aggr__2__order_1", 94), model.NewQueryResultCol("metric__2__1_col_0", util.ParseTime("2024-04-21T03:30:25.131Z")), }}, }, ExpectedPancakeSQL: ` - SELECT - sum(count(*)) OVER () AS "aggr__2__parent_count", - "response" AS "aggr__2__key_0", - count(*) AS "aggr__2__count", - count() AS "aggr__2__order_1", + SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", + "response" AS "aggr__2__key_0", count(*) AS "aggr__2__count", minOrNull("timestamp") AS "metric__2__1_col_0" - FROM ` + TableName + ` + FROM __quesma_table_name WHERE ("timestamp">=parseDateTime64BestEffort('2024-04-18T00:51:00.471Z') AND "timestamp"<=parseDateTime64BestEffort('2024-05-03T00:51:00.471Z')) GROUP BY "response" AS "aggr__2__key_0" - ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC + ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, }, { // [6] @@ -1064,7 +1054,6 @@ var AggregationTests = []testdata.AggregationTestCase{ model.NewQueryResultCol("aggr__2__parent_count", int64(2786)), model.NewQueryResultCol("aggr__2__key_0", "200"), model.NewQueryResultCol("aggr__2__count", int64(2570)), - model.NewQueryResultCol("aggr__2__order_1", 2570), model.NewQueryResultCol("metric__2__1_col_0", []time.Time{util.ParseTime("2024-04-21T06:11:13.619Z")}), model.NewQueryResultCol("metric__2__1_col_1", []time.Time{util.ParseTime("2024-04-21T12:21:13.414Z")}), model.NewQueryResultCol("metric__2__1_col_2", []time.Time{util.ParseTime("2024-04-23T18:47:45.613Z")}), @@ -1075,11 +1064,8 @@ var AggregationTests = []testdata.AggregationTestCase{ }}, }, ExpectedPancakeSQL: ` - SELECT - sum(count(*)) OVER () AS "aggr__2__parent_count", - "response" AS "aggr__2__key_0", - count(*) AS "aggr__2__count", - count() AS "aggr__2__order_1", + SELECT sum(count(*)) OVER () AS "aggr__2__parent_count", + "response" AS "aggr__2__key_0", count(*) AS "aggr__2__count", quantiles(0.010000)("timestamp") AS "metric__2__1_col_0", quantiles(0.020000)("timestamp") AS "metric__2__1_col_1", quantiles(0.250000)("timestamp") AS "metric__2__1_col_2", @@ -1087,11 +1073,11 @@ var AggregationTests = []testdata.AggregationTestCase{ quantiles(0.750000)("timestamp") AS "metric__2__1_col_4", quantiles(0.950000)("timestamp") AS "metric__2__1_col_5", quantiles(0.990000)("timestamp") AS "metric__2__1_col_6" - FROM ` + TableName + ` + FROM __quesma_table_name WHERE ("timestamp">=parseDateTime64BestEffort('2024-04-18T00:51:15.845Z') AND "timestamp"<=parseDateTime64BestEffort('2024-05-03T00:51:15.845Z')) GROUP BY "response" AS "aggr__2__key_0" - ORDER BY "aggr__2__order_1" DESC, "aggr__2__key_0" ASC + ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, }, { // [7] diff --git a/quesma/testdata/opensearch-visualize/pipeline_aggregation_requests.go b/quesma/testdata/opensearch-visualize/pipeline_aggregation_requests.go index c0caee41f..f5481fff1 100644 --- a/quesma/testdata/opensearch-visualize/pipeline_aggregation_requests.go +++ b/quesma/testdata/opensearch-visualize/pipeline_aggregation_requests.go @@ -4347,11 +4347,10 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{ ExpectedPancakeSQL: ` SELECT sum(count(*)) OVER () AS "aggr__1-bucket__parent_count", "extension" AS "aggr__1-bucket__key_0", count(*) AS "aggr__1-bucket__count", - count() AS "aggr__1-bucket__order_1", avgOrNull("machine.ram") AS "metric__1-bucket__1-metric_col_0" FROM __quesma_table_name GROUP BY "extension" AS "aggr__1-bucket__key_0" - ORDER BY "aggr__1-bucket__order_1" DESC, "aggr__1-bucket__key_0" ASC + ORDER BY "aggr__1-bucket__count" DESC, "aggr__1-bucket__key_0" ASC LIMIT 6`, }, { // [25] diff --git a/quesma/testdata/requests.go b/quesma/testdata/requests.go index cdcd79a80..4bf8720f6 100644 --- a/quesma/testdata/requests.go +++ b/quesma/testdata/requests.go @@ -148,7 +148,7 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ "no comment yet", model.SearchQueryInfo{Typ: model.Facets, FieldName: "host.name", I1: 10, I2: 5000}, []string{ - `SELECT "host.name" AS "key", count() AS "doc_count" + `SELECT "host.name" AS "key", count(*) AS "doc_count" FROM ( SELECT "host.name" FROM __quesma_table_name @@ -157,7 +157,7 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ "message" iLIKE '%user%') LIMIT 20000) GROUP BY "host.name" - ORDER BY count() DESC`, + ORDER BY count(*) DESC`, }, true, }, @@ -306,7 +306,7 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ "message" iLIKE '%user%') AND "message" IS NOT NULL) ORDER BY "@timestamp" DESC LIMIT 100`, - `SELECT count() + `SELECT count(*) FROM __quesma_table_name WHERE ((("@timestamp">=parseDateTime64BestEffort('2024-01-23T14:43:19.481Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-23T14:58:19.481Z')) @@ -748,14 +748,13 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ model.SearchQueryInfo{Typ: model.Normal}, []string{ `SELECT "aggr__stats__parent_count", "aggr__stats__key_0", "aggr__stats__count", - "aggr__stats__order_1", "aggr__stats__series__key_0", - "aggr__stats__series__count" + "aggr__stats__series__key_0", "aggr__stats__series__count" FROM ( SELECT "aggr__stats__parent_count", "aggr__stats__key_0", - "aggr__stats__count", "aggr__stats__order_1", "aggr__stats__series__key_0", + "aggr__stats__count", "aggr__stats__series__key_0", "aggr__stats__series__count", - dense_rank() OVER (ORDER BY "aggr__stats__order_1" DESC, - "aggr__stats__key_0" ASC) AS "aggr__stats__order_1_rank", + dense_rank() OVER (ORDER BY "aggr__stats__count" DESC, "aggr__stats__key_0" + ASC) AS "aggr__stats__order_1_rank", dense_rank() OVER (PARTITION BY "aggr__stats__key_0" ORDER BY "aggr__stats__series__key_0" ASC) AS "aggr__stats__series__order_1_rank" FROM ( @@ -763,8 +762,6 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ COALESCE("event.dataset", 'unknown') AS "aggr__stats__key_0", sum(count(*)) OVER (PARTITION BY "aggr__stats__key_0") AS "aggr__stats__count", - sum(count()) OVER (PARTITION BY "aggr__stats__key_0") AS - "aggr__stats__order_1", toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS "aggr__stats__series__key_0", count(*) AS "aggr__stats__series__count" FROM __quesma_table_name @@ -999,7 +996,7 @@ var TestsSearch = []SearchTestCase{ ////[]model.Query{justSimplestWhere(`"type"='task'`)}, []string{ `SELECT "message" FROM ` + TableName + ` WHERE "type"='task' LIMIT 10`, - `SELECT count() FROM ` + TableName, + `SELECT count(*) FROM ` + TableName, }, []string{}, }, @@ -1032,7 +1029,7 @@ var TestsSearch = []SearchTestCase{ //}, []string{ `SELECT "message" FROM ` + TableName + ` WHERE ("type"='task' AND "task.enabled" IN (true,54)) LIMIT 10`, - `SELECT count() FROM ` + TableName, + `SELECT count(*) FROM ` + TableName, }, []string{}, }, @@ -1078,7 +1075,7 @@ var TestsSearch = []SearchTestCase{ `AND ("@timestamp".=parseDateTime64BestEffort('2024-01-17T10:..:18.815Z') ` + `AND "@timestamp".=parseDateTime64BestEffort('2024-01-17T10:..:18.815Z'))) ` + `LIMIT 10`, - `SELECT count() FROM ` + TableName, + `SELECT count(*) FROM ` + TableName, }, []string{}, }, @@ -1121,7 +1118,7 @@ var TestsSearch = []SearchTestCase{ `SELECT "message" FROM ` + TableName + ` WHERE ((("user.id"='kimchy' AND "tags"='production') ` + `AND ("tags"='env1' OR "tags"='deployed')) AND NOT (("age".=.0 AND "age".=.0))) ` + `LIMIT 10`, - `SELECT count() FROM ` + TableName + ` ` + + `SELECT count(*) FROM ` + TableName + ` ` + `WHERE ((("user.id"='kimchy' AND "tags"='production') ` + `AND ("tags"='env1' OR "tags"='deployed')) AND NOT (("age".=.0 AND "age".=.0)))`, }, @@ -1425,7 +1422,7 @@ var TestsSearch = []SearchTestCase{ model.ListAllFields, //[]model.Query{newSimplestQuery()}, []string{ - `SELECT count() FROM ` + TableName, + `SELECT count(*) FROM ` + TableName, `SELECT "message" FROM ` + TableName, }, []string{}, @@ -1576,14 +1573,13 @@ var TestsSearch = []SearchTestCase{ sum(count(*)) OVER () AS "metric____quesma_total_count_col_0", sum(count(*)) OVER () AS "aggr__suggestions__parent_count", "stream.namespace" AS "aggr__suggestions__key_0", - count(*) AS "aggr__suggestions__count", - count() AS "aggr__suggestions__order_1" + count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE ("message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort( '2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort( '2024-01-22T09:41:10.299Z'))) GROUP BY "stream.namespace" AS "aggr__suggestions__key_0" - ORDER BY "aggr__suggestions__order_1" DESC, "aggr__suggestions__key_0" ASC + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, }, }, @@ -1669,19 +1665,18 @@ var TestsSearch = []SearchTestCase{ `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", "namespace" AS "aggr__suggestions__key_0", - count(*) AS "aggr__suggestions__count", - count() AS "aggr__suggestions__order_1" + count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE ("service.name"='admin' AND ("@timestamp">=parseDateTime64BestEffort( '2024-01-22T14:34:35.873Z') AND "@timestamp"<=parseDateTime64BestEffort( '2024-01-22T14:49:35.873Z'))) GROUP BY "namespace" AS "aggr__suggestions__key_0" - ORDER BY "aggr__suggestions__order_1" DESC, "aggr__suggestions__key_0" ASC + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, }, }, { // [21] - "count() as /_search query. With filter", // response should be just ["hits"]["total"]["value"] == result of count() + "count(*) as /_search query. With filter", // response should be just ["hits"]["total"]["value"] == result of count(*) `{ "aggs": { "suggestions": { @@ -1760,19 +1755,18 @@ var TestsSearch = []SearchTestCase{ sum(count(*)) OVER () AS "metric____quesma_total_count_col_0", sum(count(*)) OVER () AS "aggr__suggestions__parent_count", "stream.namespace" AS "aggr__suggestions__key_0", - count(*) AS "aggr__suggestions__count", - count() AS "aggr__suggestions__order_1" + count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE (("message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%') AND ("@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z'))) GROUP BY "stream.namespace" AS "aggr__suggestions__key_0" - ORDER BY "aggr__suggestions__order_1" DESC, "aggr__suggestions__key_0" ASC + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, }, }, { // [22] - "count() as /_search or /logs-*-/_search query. Without filter", // response should be just ["hits"]["total"]["value"] == result of count() + "count(*) as /_search or /logs-*-/_search query. Without filter", // response should be just ["hits"]["total"]["value"] == result of count(*) `{ "aggs": { "suggestions": { @@ -1844,19 +1838,18 @@ var TestsSearch = []SearchTestCase{ `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", "namespace" AS "aggr__suggestions__key_0", - count(*) AS "aggr__suggestions__count", - count() AS "aggr__suggestions__order_1" + count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE ("message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort( '2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort( '2024-01-22T09:41:10.299Z'))) GROUP BY "namespace" AS "aggr__suggestions__key_0" - ORDER BY "aggr__suggestions__order_1" DESC, "aggr__suggestions__key_0" ASC + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, }, }, { // [23] - "count() as /_search query. With filter", // response should be just ["hits"]["total"]["value"] == result of count() + "count(*) as /_search query. With filter", // response should be just ["hits"]["total"]["value"] == result of count(*) `{ "aggs": { "suggestions": { @@ -1933,19 +1926,18 @@ var TestsSearch = []SearchTestCase{ `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", "namespace" AS "aggr__suggestions__key_0", - count(*) AS "aggr__suggestions__count", - count() AS "aggr__suggestions__order_1" + count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE (("message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%') AND ("@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z'))) GROUP BY "namespace" AS "aggr__suggestions__key_0" - ORDER BY "aggr__suggestions__order_1" DESC, "aggr__suggestions__key_0" ASC + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, }, }, { // [24] - "count() as /_search or /logs-*-/_search query. Without filter", // response should be just ["hits"]["total"]["value"] == result of count() + "count(*) as /_search or /logs-*-/_search query. Without filter", // response should be just ["hits"]["total"]["value"] == result of count(*) `{ "aggs": { "suggestions": { @@ -2017,14 +2009,13 @@ var TestsSearch = []SearchTestCase{ `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", "namespace" AS "aggr__suggestions__key_0", - count(*) AS "aggr__suggestions__count", - count() AS "aggr__suggestions__order_1" + count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE ("message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort( '2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort( '2024-01-22T09:41:10.299Z'))) GROUP BY "namespace" AS "aggr__suggestions__key_0" - ORDER BY "aggr__suggestions__order_1" DESC, "aggr__suggestions__key_0" ASC + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, }, }, @@ -2066,7 +2057,7 @@ var TestsSearch = []SearchTestCase{ model.ListByField, //[]model.Query{withLimit(newSimplestQuery(), 500)}, []string{ - `SELECT count() FROM ` + TableName, + `SELECT count(*) FROM ` + TableName, `SELECT "message" FROM ` + TableName + ` LIMIT 500`, }, []string{}, @@ -2086,7 +2077,7 @@ var TestsSearch = []SearchTestCase{ model.ListAllFields, //[]model.Query{justSimplestWhere(``)}, []string{ - `SELECT count() FROM ` + TableName, + `SELECT count(*) FROM ` + TableName, `SELECT "message" FROM ` + TableName + ` LIMIT 10`, }, []string{}, @@ -2145,7 +2136,7 @@ var TestsSearch = []SearchTestCase{ model.ListAllFields, //[]model.Query{justSimplestWhere(``)}, []string{ - `SELECT count() FROM ` + TableName, + `SELECT count(*) FROM ` + TableName, `SELECT "message" FROM ` + TableName, }, []string{}, @@ -2192,7 +2183,7 @@ var TestsSearch = []SearchTestCase{ model.ListAllFields, //[]model.Query{newSimplestQuery()}, []string{ - `SELECT count() FROM (SELECT 1 FROM ` + TableName + ` LIMIT 10000)`, + `SELECT count(*) FROM (SELECT 1 FROM ` + TableName + ` LIMIT 10000)`, `SELECT "message" FROM __quesma_table_name LIMIT 10`, }, []string{}, diff --git a/quesma/util/sql_pretty_fmt_test.go b/quesma/util/sql_pretty_fmt_test.go index 822f10dfc..0b37ae3f2 100644 --- a/quesma/util/sql_pretty_fmt_test.go +++ b/quesma/util/sql_pretty_fmt_test.go @@ -23,12 +23,12 @@ WHERE (message LIKE '%user%' AND (timestamp>=parseDateTime64BestEffort( } func TestSqlPrettyPrint_multipleSqls(t *testing.T) { - sql := `SELECT '', '', count() FROM "kibana_sample_data_ecommerce" WHERE (("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) OR ("order_date"<=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date">=parseDateTime64BestEffort('2024-02-12T12:59:40.626Z'))) AND ("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) -SELECT '', '', count() FROM "kibana_sample_data_ecommerce" WHERE (("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) OR ("order_date"<=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date">=parseDateTime64BestEffort('2024-02-12T12:59:40.626Z'))) AND ("order_date">=parseDateTime64BestEffort('2024-02-12T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z')) + sql := `SELECT '', '', count(*) FROM "kibana_sample_data_ecommerce" WHERE (("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) OR ("order_date"<=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date">=parseDateTime64BestEffort('2024-02-12T12:59:40.626Z'))) AND ("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) +SELECT '', '', count(*) FROM "kibana_sample_data_ecommerce" WHERE (("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) OR ("order_date"<=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date">=parseDateTime64BestEffort('2024-02-12T12:59:40.626Z'))) AND ("order_date">=parseDateTime64BestEffort('2024-02-12T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z')) SELECT '', '', '', sum("taxful_total_price") FROM "kibana_sample_data_ecommerce" WHERE ("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) OR ("order_date"<=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date">=parseDateTime64BestEffort('2024-02-12T12:59:40.626Z'))` sqlFormatted := SqlPrettyPrint([]byte(sql)) - sqlExpected := `SELECT '', '', count() + sqlExpected := `SELECT '', '', count(*) FROM "kibana_sample_data_ecommerce" WHERE (("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) OR ( @@ -37,7 +37,7 @@ WHERE (("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) -SELECT '', '', count() +SELECT '', '', count(*) FROM "kibana_sample_data_ecommerce" WHERE (("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND "order_date"<=parseDateTime64BestEffort('2024-02-26T12:59:40.626Z')) OR ( @@ -59,7 +59,7 @@ WHERE ("order_date">=parseDateTime64BestEffort('2024-02-19T12:59:40.626Z') AND // Test checking if SqlPrettyPrint deals well with backticks. // If you don't process backticks accordingly, SqlPrettyPrint throws an error. func TestSqlPrettPrintBackticks(t *testing.T) { - sql := "SELECT toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000), count() FROM " + `"logs-generic-default" WHERE ("@timestamp">=parseDateTime64BestEffort('2024-02-04T11:11:29.735Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-02-04T11:26:29.735Z')) AND ("@timestamp">=timestamp_sub(SECOND,900, now64())) GROUP BY toInt64(toUnixTimestamp64Milli(` + "`@timestamp`)/30000)" + sql := "SELECT toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000), count(*) FROM " + `"logs-generic-default" WHERE ("@timestamp">=parseDateTime64BestEffort('2024-02-04T11:11:29.735Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-02-04T11:26:29.735Z')) AND ("@timestamp">=timestamp_sub(SECOND,900, now64())) GROUP BY toInt64(toUnixTimestamp64Milli(` + "`@timestamp`)/30000)" sqlFormatted := SqlPrettyPrint([]byte(sql)) assert.Greater(t, len(strings.Split(sqlFormatted, "\n")), 1) // if error, SqlPrettyPrint returns input string with len == 1 } @@ -71,8 +71,8 @@ func TestInvalidSql(t *testing.T) { } func TestGroupBySql(t *testing.T) { - sql := "SELECT toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000), count() FROM \"logs-generic-default\" WHERE \"@timestamp\">=parseDateTime64BestEffort('2024-04-08T14:42:43.243Z') AND \"@timestamp\"<=parseDateTime64BestEffort('2024-04-08T14:57:43.243Z') GROUP BY (toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000)) ORDER BY (toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000))" - expect := "SELECT toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000), count()\n" + + sql := "SELECT toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000), count(*) FROM \"logs-generic-default\" WHERE \"@timestamp\">=parseDateTime64BestEffort('2024-04-08T14:42:43.243Z') AND \"@timestamp\"<=parseDateTime64BestEffort('2024-04-08T14:57:43.243Z') GROUP BY (toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000)) ORDER BY (toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000))" + expect := "SELECT toInt64(toUnixTimestamp64Milli(`@timestamp`)/30000), count(*)\n" + "FROM \"logs-generic-default\"\n" + "WHERE \"@timestamp\">=parseDateTime64BestEffort('2024-04-08T14:42:43.243Z') AND\n" + " \"@timestamp\"<=parseDateTime64BestEffort('2024-04-08T14:57:43.243Z')\n" + @@ -83,8 +83,8 @@ func TestGroupBySql(t *testing.T) { } func TestPrettySubQuery(t *testing.T) { - sql := `SELECT "clientip", count() FROM ( SELECT "clientip" FROM "kibana_sample_data_logs" WHERE "@timestamp">=parseDateTime64BestEffort('2024-04-08T08:38:14.246Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-04-09T09:38:14.246Z') LIMIT 20000) GROUP BY "clientip" ORDER BY count() DESC` - expect := `SELECT "clientip", count() + sql := `SELECT "clientip", count(*) FROM ( SELECT "clientip" FROM "kibana_sample_data_logs" WHERE "@timestamp">=parseDateTime64BestEffort('2024-04-08T08:38:14.246Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-04-09T09:38:14.246Z') LIMIT 20000) GROUP BY "clientip" ORDER BY count(*) DESC` + expect := `SELECT "clientip", count(*) FROM ( SELECT "clientip" FROM "kibana_sample_data_logs" @@ -92,43 +92,43 @@ FROM ( "@timestamp"<=parseDateTime64BestEffort('2024-04-09T09:38:14.246Z') LIMIT 20000) GROUP BY "clientip" -ORDER BY count() DESC` +ORDER BY count(*) DESC` sqlFormatted := SqlPrettyPrint([]byte(sql)) assert.Equal(t, expect, sqlFormatted) } func TestDontExpand(t *testing.T) { - expect := `SELECT "clientip", count() + expect := `SELECT "clientip", count(*) FROM "kibana_sample_data_logs" WHERE "@timestamp">=parseDateTime64BestEffort('2024-04-08T08:38:14.246Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-04-09T09:38:14.246Z') GROUP BY "clientip" -ORDER BY count() DESC` +ORDER BY count(*) DESC` sqlFormatted := SqlPrettyPrint([]byte(expect)) assert.Equal(t, expect, sqlFormatted) } func TestSqlWith(t *testing.T) { - sql := `SELECT count() FROM "kibana_sample_data_ecommerce" -WITH subQuery_1 AS (SELECT "animalType" AS "subQuery_1_1", count() AS "subQuery_1_cnt" FROM "default"."animal_index" WHERE ("date">=parseDateTime64BestEffort('2024-04-17T08:53:18.456Z') AND "date"<=parseDateTime64BestEffort('2024-07-10T08:53:18.456Z')) GROUP BY "animalType" ORDER BY count() DESC, "animalType" LIMIT 5) SELECT "animalType", "zooName", count() FROM "default"."animal_index" INNER JOIN "subQuery_1" ON "animalType" = "subQuery_1_1" WHERE ("date">=parseDateTime64BestEffort('2024-04-17T08:53:18.456Z') AND "date"<=parseDateTime64BestEffort('2024-07-10T08:53:18.456Z')) GROUP BY "animalType", "zooName", subQuery_1_cnt ORDER BY subQuery_1_cnt DESC, "animalType", count() DESC, "zooName" LIMIT 6` - expect := `SELECT count() + sql := `SELECT count(*) FROM "kibana_sample_data_ecommerce" +WITH subQuery_1 AS (SELECT "animalType" AS "subQuery_1_1", count(*) AS "subQuery_1_cnt" FROM "default"."animal_index" WHERE ("date">=parseDateTime64BestEffort('2024-04-17T08:53:18.456Z') AND "date"<=parseDateTime64BestEffort('2024-07-10T08:53:18.456Z')) GROUP BY "animalType" ORDER BY count(*) DESC, "animalType" LIMIT 5) SELECT "animalType", "zooName", count(*) FROM "default"."animal_index" INNER JOIN "subQuery_1" ON "animalType" = "subQuery_1_1" WHERE ("date">=parseDateTime64BestEffort('2024-04-17T08:53:18.456Z') AND "date"<=parseDateTime64BestEffort('2024-07-10T08:53:18.456Z')) GROUP BY "animalType", "zooName", subQuery_1_cnt ORDER BY subQuery_1_cnt DESC, "animalType", count(*) DESC, "zooName" LIMIT 6` + expect := `SELECT count(*) FROM "kibana_sample_data_ecommerce" WITH subQuery_1 AS ( - SELECT "animalType" AS "subQuery_1_1", count() AS "subQuery_1_cnt" + SELECT "animalType" AS "subQuery_1_1", count(*) AS "subQuery_1_cnt" FROM "default"."animal_index" WHERE ("date">=parseDateTime64BestEffort('2024-04-17T08:53:18.456Z') AND "date"<=parseDateTime64BestEffort('2024-07-10T08:53:18.456Z')) GROUP BY "animalType" - ORDER BY count() DESC, "animalType" + ORDER BY count(*) DESC, "animalType" LIMIT 5) -SELECT "animalType", "zooName", count() +SELECT "animalType", "zooName", count(*) FROM "default"."animal_index" INNER JOIN "subQuery_1" ON "animalType" = "subQuery_1_1" WHERE ("date">=parseDateTime64BestEffort('2024-04-17T08:53:18.456Z') AND "date" <=parseDateTime64BestEffort('2024-07-10T08:53:18.456Z')) GROUP BY "animalType", "zooName", subQuery_1_cnt -ORDER BY subQuery_1_cnt DESC, "animalType", count() DESC, "zooName" +ORDER BY subQuery_1_cnt DESC, "animalType", count(*) DESC, "zooName" LIMIT 6` sqlFormatted := SqlPrettyPrint([]byte(sql)) assert.Equal(t, expect, sqlFormatted) @@ -251,13 +251,13 @@ WITH quesma_top_hits_group_table AS ( "OriginAirportID" AS "aggr__origins__key_0", sum(count(*)) OVER (PARTITION BY "aggr__origins__key_0") AS "aggr__origins__count", - sum(count()) OVER (PARTITION BY "aggr__origins__key_0") AS + sum(count(*)) OVER (PARTITION BY "aggr__origins__key_0") AS "aggr__origins__order_1", sum(count(*)) OVER (PARTITION BY "aggr__origins__key_0") AS "aggr__origins__distinations__parent_count", "DestAirportID" AS "aggr__origins__distinations__key_0", count(*) AS "aggr__origins__distinations__count", - count() AS "aggr__origins__distinations__order_1" + count(*) AS "aggr__origins__distinations__order_1" FROM __quesma_table_name GROUP BY "OriginAirportID" AS "aggr__origins__key_0", "DestAirportID" AS "aggr__origins__distinations__key_0")) diff --git a/quesma/util/utils_test.go b/quesma/util/utils_test.go index 0a6410cfd..ddf6f433c 100644 --- a/quesma/util/utils_test.go +++ b/quesma/util/utils_test.go @@ -681,13 +681,13 @@ func TestIsSqlEqual(t *testing.T) { {"a OR (b AND c)", "a OR (c AND b)", true}, {"a OR (b AND c)", "a OR (c OR b)", false}, { - `SELECT count() FROM add-this WHERE \"timestamp\"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z') AND \"timestamp\">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z')`, - `SELECT count() FROM add-this WHERE \"timestamp\">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND \"timestamp\"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z')`, + `SELECT count(*) FROM add-this WHERE \"timestamp\"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z') AND \"timestamp\">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z')`, + `SELECT count(*) FROM add-this WHERE \"timestamp\">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND \"timestamp\"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z')`, true, }, { - `SELECT count() FROM "logs-generic-default" WHERE ("FlightDelay" == true AND (("timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z')) OR ("timestamp">=parseDateTime64BestEffort('2024-01-26T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z')))) AND ("timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z'))`, - `SELECT count() FROM "logs-generic-default" WHERE ("FlightDelay" == true AND (("timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z') AND "timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z')) OR ("timestamp"<=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp">=parseDateTime64BestEffort('2024-01-26T13:47:16.029Z')))) AND ("timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z'))`, + `SELECT count(*) FROM "logs-generic-default" WHERE ("FlightDelay" == true AND (("timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z')) OR ("timestamp">=parseDateTime64BestEffort('2024-01-26T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z')))) AND ("timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z'))`, + `SELECT count(*) FROM "logs-generic-default" WHERE ("FlightDelay" == true AND (("timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z') AND "timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z')) OR ("timestamp"<=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp">=parseDateTime64BestEffort('2024-01-26T13:47:16.029Z')))) AND ("timestamp">=parseDateTime64BestEffort('2024-02-02T13:47:16.029Z') AND "timestamp"<=parseDateTime64BestEffort('2024-02-09T13:47:16.029Z'))`, true, }, }