From 225c3d0aae717a6b650b8566303399dd77807932 Mon Sep 17 00:00:00 2001 From: Przemyslaw Delewski <102958445+pdelewski@users.noreply.github.com> Date: Wed, 8 May 2024 15:23:07 +0200 Subject: [PATCH 01/14] BuildHistogramQuery is unused (#62) --- quesma/queryparser/query_translator.go | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/quesma/queryparser/query_translator.go b/quesma/queryparser/query_translator.go index ab24602ee..61cec3e15 100644 --- a/quesma/queryparser/query_translator.go +++ b/quesma/queryparser/query_translator.go @@ -11,7 +11,6 @@ import ( "mitmproxy/quesma/util" "strconv" "strings" - "time" ) const facetsSampleSize = "20000" @@ -588,30 +587,6 @@ func (cw *ClickhouseQueryTranslator) BuildAutocompleteQuery(fieldName, whereClau } } -func (cw *ClickhouseQueryTranslator) BuildHistogramQuery(timestampFieldName, whereClauseOriginal, fixedInterval string) (*model.Query, time.Duration) { - var defaultInterval = 30 * time.Second - histogramOneBar, err := kibana.ParseInterval(fixedInterval) - if err != nil { - logger.ErrorWithCtx(cw.Ctx).Msg(err.Error()) - histogramOneBar = defaultInterval - } - groupByClause := clickhouse.TimestampGroupBy(timestampFieldName, cw.Table.GetDateTimeType(cw.Ctx, timestampFieldName), histogramOneBar) - // [WARNING] This is a little oversimplified, but it seems to be good enough for now (==satisfies Kibana's histogram) - // - // In Elasticsearch's `date_histogram` aggregation implementation, the timestamps for the intervals are generated independently of the document data. - // The aggregation divides the specified time range into intervals based on the interval unit (e.g., minute, hour, day) and generates timestamps for each interval, - // irrespective of the actual timestamps of the documents. - query := model.Query{ - Fields: []string{}, - NonSchemaFields: []string{groupByClause, "count()"}, - WhereClause: whereClauseOriginal, - SuffixClauses: []string{"GROUP BY " + groupByClause}, - FromClause: cw.Table.FullTableName(), - CanParse: true, - } - return &query, histogramOneBar -} - //lint:ignore U1000 Not used yet func (cw *ClickhouseQueryTranslator) BuildAutocompleteSuggestionsQuery(fieldName string, prefix string, limit int) *model.Query { whereClause := "" From 2d76649b4f2198fff0b18ce886ab830f138a4215 Mon Sep 17 00:00:00 2001 From: Przemyslaw Delewski <102958445+pdelewski@users.noreply.github.com> Date: Wed, 8 May 2024 15:38:07 +0200 Subject: [PATCH 02/14] Fixing resource explosion by limiting returned batch size (#45) Quesma explosion with one async query `SELECT * FROM "default"."kibana_sample_data_flights"` produced as a result of any async query that follows `BuildSimpleSelectQuery` path image --------- Co-authored-by: Krzysztof Kiewicz --- quesma/eql/query_translator.go | 2 +- quesma/model/query.go | 2 + quesma/queryparser/query_parser.go | 39 +++---- quesma/queryparser/query_parser_test.go | 10 +- quesma/queryparser/query_translator.go | 11 +- quesma/queryparser/query_translator_test.go | 3 +- quesma/quesma/query_translator.go | 2 +- quesma/quesma/search.go | 2 +- quesma/quesma/search_test.go | 6 +- quesma/testdata/requests.go | 106 +++++++++--------- .../testdata/requests_no_full_text_fields.go | 4 +- quesma/testdata/util.go | 10 +- 12 files changed, 102 insertions(+), 95 deletions(-) diff --git a/quesma/eql/query_translator.go b/quesma/eql/query_translator.go index 8192f69fd..d9458ace2 100644 --- a/quesma/eql/query_translator.go +++ b/quesma/eql/query_translator.go @@ -137,7 +137,7 @@ func (cw *ClickhouseEQLQueryTranslator) BuildSimpleCountQuery(whereClause string panic("EQL does not support count") } -func (cw *ClickhouseEQLQueryTranslator) BuildSimpleSelectQuery(whereClause string) *model.Query { +func (cw *ClickhouseEQLQueryTranslator) BuildSimpleSelectQuery(whereClause string, size int) *model.Query { panic("EQL does not support this method") } diff --git a/quesma/model/query.go b/quesma/model/query.go index 44fe04ae4..9797da988 100644 --- a/quesma/model/query.go +++ b/quesma/model/query.go @@ -194,6 +194,8 @@ const ( None ) +const DefaultSizeListQuery = 1000 // we use LIMIT 1000 in some simple list queries (SELECT ...) + func (queryType SearchQueryType) String() string { return []string{"Facets", "FacetsNumeric", "ListByField", "ListAllFields", "CountAsync", "Normal", "None"}[queryType] } diff --git a/quesma/queryparser/query_parser.go b/quesma/queryparser/query_parser.go index 616aad3a0..23475986d 100644 --- a/quesma/queryparser/query_parser.go +++ b/quesma/queryparser/query_parser.go @@ -1068,17 +1068,18 @@ func (cw *ClickhouseQueryTranslator) tryProcessSearchMetadata(queryMap QueryMap) // case 3: maybe it's a normal request var queryMapNested QueryMap var ok bool + size, _ := cw.parseSize(metadata) if queryMapNested, ok = queryMap["aggs"].(QueryMap); !ok { - return model.SearchQueryInfo{Typ: model.Normal} + return model.SearchQueryInfo{Typ: model.Normal, I2: size} } if queryMapNested, ok = queryMapNested["suggestions"].(QueryMap); !ok { - return model.SearchQueryInfo{Typ: model.Normal} + return model.SearchQueryInfo{Typ: model.Normal, I2: size} } if queryMapNested, ok = queryMapNested["terms"].(QueryMap); !ok { - return model.SearchQueryInfo{Typ: model.Normal} + return model.SearchQueryInfo{Typ: model.Normal, I2: size} } if _, ok = queryMapNested["field"]; !ok { - return model.SearchQueryInfo{Typ: model.Normal} + return model.SearchQueryInfo{Typ: model.Normal, I2: size} } // otherwise: None @@ -1120,15 +1121,9 @@ func (cw *ClickhouseQueryTranslator) isItFacetsRequest(queryMap QueryMap) (model return model.NewSearchQueryInfoNone(), false } - var size int - sizeRaw, ok := firstNestingMap["size"] + size, ok := cw.parseSize(firstNestingMap) if !ok { return model.NewSearchQueryInfoNone(), false - } else if sizeAsFloat, ok := sizeRaw.(float64); ok { - size = int(sizeAsFloat) - } else { - logger.WarnWithCtx(cw.Ctx).Msgf("invalid size type: %T, value: %v. Expected float64", sizeRaw, sizeRaw) - return model.NewSearchQueryInfoNone(), false } fieldNameRaw, ok := firstNestingMap["field"] if !ok { @@ -1170,14 +1165,8 @@ func (cw *ClickhouseQueryTranslator) isItFacetsRequest(queryMap QueryMap) (model // returns (model.NewSearchQueryInfoNone, false) if it's not ListAllFields/ListByField request func (cw *ClickhouseQueryTranslator) isItListRequest(queryMap QueryMap) (model.SearchQueryInfo, bool) { // 1) case: very simple SELECT * kind of request - var size int - sizeRaw, okSize := queryMap["size"] - if !okSize { - return model.NewSearchQueryInfoNone(), false - } else if sizeAsFloat, ok := sizeRaw.(float64); ok { - size = int(sizeAsFloat) - } else { - logger.WarnWithCtx(cw.Ctx).Msgf("invalid size type: %T, value: %v. Expected float64", sizeRaw, sizeRaw) + size, ok := cw.parseSize(queryMap) + if !ok { return model.NewSearchQueryInfoNone(), false } @@ -1306,3 +1295,15 @@ func (cw *ClickhouseQueryTranslator) parseSortFields(sortMaps []any) []string { } return sortFields } + +func (cw *ClickhouseQueryTranslator) parseSize(queryMap QueryMap) (size int, ok bool) { + sizeRaw, exists := queryMap["size"] + if !exists { + return model.DefaultSizeListQuery, false + } else if sizeAsFloat, ok := sizeRaw.(float64); ok { + return int(sizeAsFloat), true + } else { + logger.WarnWithCtx(cw.Ctx).Msgf("invalid size type: %T, value: %v. Expected float64", sizeRaw, sizeRaw) + return model.DefaultSizeListQuery, false + } +} diff --git a/quesma/queryparser/query_parser_test.go b/quesma/queryparser/query_parser_test.go index 0335b945d..711d6230e 100644 --- a/quesma/queryparser/query_parser_test.go +++ b/quesma/queryparser/query_parser_test.go @@ -5,6 +5,7 @@ import ( "github.com/stretchr/testify/require" "mitmproxy/quesma/clickhouse" "mitmproxy/quesma/concurrent" + "mitmproxy/quesma/model" "mitmproxy/quesma/quesma/config" "mitmproxy/quesma/telemetry" "mitmproxy/quesma/testdata" @@ -43,14 +44,14 @@ func TestQueryParserStringAttrConfig(t *testing.T) { lm.AddTableIfDoesntExist(table) cw := ClickhouseQueryTranslator{ClickhouseLM: lm, Table: table, Ctx: context.Background()} + for _, tt := range testdata.TestsSearch { t.Run(tt.Name, func(t *testing.T) { simpleQuery, queryInfo, _ := cw.ParseQuery(tt.QueryJson) assert.True(t, simpleQuery.CanParse, "can parse") assert.Contains(t, tt.WantedSql, simpleQuery.Sql.Stmt, "contains wanted sql") assert.Equal(t, tt.WantedQueryType, queryInfo.Typ, "equals to wanted query type") - - query := cw.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt) + query := cw.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt, model.DefaultSizeListQuery) assert.Contains(t, tt.WantedQuery, *query) }) } @@ -77,8 +78,7 @@ func TestQueryParserNoFullTextFields(t *testing.T) { assert.True(t, simpleQuery.CanParse, "can parse") assert.Contains(t, tt.WantedSql, simpleQuery.Sql.Stmt, "contains wanted sql") assert.Equal(t, tt.WantedQueryType, queryInfo.Typ, "equals to wanted query type") - - query := cw.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt) + query := cw.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt, model.DefaultSizeListQuery) assert.Contains(t, tt.WantedQuery, *query) }) } @@ -104,7 +104,7 @@ func TestQueryParserNoAttrsConfig(t *testing.T) { assert.Contains(t, tt.WantedSql, simpleQuery.Sql.Stmt) assert.Equal(t, tt.WantedQueryType, queryInfo.Typ) - query := cw.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt) + query := cw.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt, model.DefaultSizeListQuery) assert.Contains(t, tt.WantedQuery, *query) }) } diff --git a/quesma/queryparser/query_translator.go b/quesma/queryparser/query_translator.go index 61cec3e15..f0fe66f4f 100644 --- a/quesma/queryparser/query_translator.go +++ b/quesma/queryparser/query_translator.go @@ -523,12 +523,13 @@ func (cw *ClickhouseQueryTranslator) BuildSelectQuery(fields []string, whereClau } } -func (cw *ClickhouseQueryTranslator) BuildSimpleSelectQuery(whereClause string) *model.Query { +func (cw *ClickhouseQueryTranslator) BuildSimpleSelectQuery(whereClause string, limit int) *model.Query { return &model.Query{ - Fields: []string{"*"}, - WhereClause: whereClause, - FromClause: cw.Table.FullTableName(), - CanParse: true, + Fields: []string{"*"}, + WhereClause: whereClause, + FromClause: cw.Table.FullTableName(), + SuffixClauses: []string{"LIMIT " + strconv.Itoa(cw.applySizeLimit(limit))}, + CanParse: true, } } diff --git a/quesma/queryparser/query_translator_test.go b/quesma/queryparser/query_translator_test.go index 802036589..fa3074a73 100644 --- a/quesma/queryparser/query_translator_test.go +++ b/quesma/queryparser/query_translator_test.go @@ -462,8 +462,9 @@ func TestMakeResponseAsyncSearchQuery(t *testing.T) { // used to fail before we fixed field quoting. func TestMakeResponseSearchQueryIsProperJson(t *testing.T) { cw := ClickhouseQueryTranslator{ClickhouseLM: nil, Table: clickhouse.NewEmptyTable("@"), Ctx: context.Background()} + const limit = 1000 queries := []*model.Query{ - cw.BuildSimpleSelectQuery(""), + cw.BuildSimpleSelectQuery("", limit), cw.BuildNRowsQuery("@", SimpleQuery{}, 0), } for _, query := range queries { diff --git a/quesma/quesma/query_translator.go b/quesma/quesma/query_translator.go index 22c5cfe3f..ec1b50238 100644 --- a/quesma/quesma/query_translator.go +++ b/quesma/quesma/query_translator.go @@ -21,7 +21,7 @@ type IQueryTranslator interface { ParseAggregationJson(aggregationJson string) ([]model.QueryWithAggregation, error) BuildSimpleCountQuery(whereClause string) *model.Query - BuildSimpleSelectQuery(whereClause string) *model.Query + BuildSimpleSelectQuery(whereClause string, size int) *model.Query BuildNRowsQuery(fieldName string, simpleQuery queryparser.SimpleQuery, limit int) *model.Query BuildFacetsQuery(fieldName string, simpleQuery queryparser.SimpleQuery, limit int) *model.Query diff --git a/quesma/quesma/search.go b/quesma/quesma/search.go index aec96ebec..7dd3767c5 100644 --- a/quesma/quesma/search.go +++ b/quesma/quesma/search.go @@ -479,7 +479,7 @@ func (q *QueryRunner) searchWorkerCommon(ctx context.Context, queryTranslator IQ hits, err = q.logManager.ProcessSelectQuery(dbQueryCtx, table, fullQuery) case model.Normal: - fullQuery = queryTranslator.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt) + fullQuery = queryTranslator.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt, queryInfo.I2) hits, err = q.logManager.ProcessSelectQuery(dbQueryCtx, table, fullQuery) default: diff --git a/quesma/quesma/search_test.go b/quesma/quesma/search_test.go index 04a5a3a10..300b9c2ac 100644 --- a/quesma/quesma/search_test.go +++ b/quesma/quesma/search_test.go @@ -38,10 +38,10 @@ func TestNoAsciiTableName(t *testing.T) { assert.True(t, simpleQuery.CanParse) assert.Equal(t, "", simpleQuery.Sql.Stmt) assert.Equal(t, model.Normal, queryInfo.Typ) - - query := queryTranslator.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt) + const Limit = 1000 + query := queryTranslator.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt, Limit) assert.True(t, query.CanParse) - assert.Equal(t, fmt.Sprintf(`SELECT * FROM "%s" `, tableName), query.String()) + assert.Equal(t, fmt.Sprintf(`SELECT * FROM "%s" LIMIT %d`, tableName, Limit), query.String()) } var ctx = context.WithValue(context.TODO(), tracing.RequestIdCtxKey, tracing.GetRequestId()) diff --git a/quesma/testdata/requests.go b/quesma/testdata/requests.go index 54c7e8d07..331a6c36f 100644 --- a/quesma/testdata/requests.go +++ b/quesma/testdata/requests.go @@ -904,8 +904,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"type"='task'`}, model.Normal, - []model.Query{justWhere(`"type"='task'`)}, - []string{qToStr(justWhere(`"type"='task'`))}, + []model.Query{justSimplestWhere(`"type"='task'`)}, + []string{qToStr(justSimplestWhere(`"type"='task'`))}, }, { // [2] "Term as array", @@ -931,9 +931,9 @@ var TestsSearch = []SearchTestCase{ []string{`"type"='task' AND ("task.enabled"=true OR "task.enabled"=54)`}, model.Normal, []model.Query{ - justWhere(`"type"='task' AND ("task.enabled"=true OR "task.enabled"=54)`), + justSimplestWhere(`"type"='task' AND ("task.enabled"=true OR "task.enabled"=54)`), }, - []string{qToStr(justWhere(`"type"='task' AND ("task.enabled"=true OR "task.enabled"=54)`))}, + []string{qToStr(justSimplestWhere(`"type"='task' AND ("task.enabled"=true OR "task.enabled"=54)`))}, }, { // [3] "Sample log query", @@ -970,8 +970,8 @@ var TestsSearch = []SearchTestCase{ }, model.Normal, []model.Query{ - justWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-17T10:28:18.815Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-17T10:43:18.815Z'))`), - justWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-17T10:43:18.815Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-17T10:28:18.815Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-17T10:28:18.815Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-17T10:43:18.815Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-17T10:43:18.815Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-17T10:28:18.815Z'))`), }, []string{`SELECT "message" FROM "logs-generic-default" WHERE "message" iLIKE '%user%' AND ("@timestamp".=parseDateTime64BestEffort('2024-01-17T10:..:18.815Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-17T10:..:18.815Z'))`}, }, @@ -1007,8 +1007,8 @@ var TestsSearch = []SearchTestCase{ }, model.Normal, []model.Query{ - justWhere(`(("user.id"='kimchy' AND "tags"='production') AND ("tags"='env1' OR "tags"='deployed')) AND NOT ("age"<=20 AND "age">=10)`), - justWhere(`(("user.id"='kimchy' AND "tags"='production') AND ("tags"='env1' OR "tags"='deployed')) AND NOT ("age">=10 AND "age"<=20)`), + justSimplestWhere(`(("user.id"='kimchy' AND "tags"='production') AND ("tags"='env1' OR "tags"='deployed')) AND NOT ("age"<=20 AND "age">=10)`), + justSimplestWhere(`(("user.id"='kimchy' AND "tags"='production') AND ("tags"='env1' OR "tags"='deployed')) AND NOT ("age">=10 AND "age"<=20)`), }, []string{`SELECT "message" FROM "logs-generic-default" WHERE (("user.id"='kimchy' AND "tags"='production') AND ("tags"='env1' OR "tags"='deployed')) AND NOT ("age".=.0 AND "age".=.0)`}, }, @@ -1039,8 +1039,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"host_name.keyword" iLIKE '%prometheus%'`}, model.Normal, - []model.Query{justWhere(`"host_name.keyword" iLIKE '%prometheus%'`)}, - []string{qToStr(justWhere(`"host_name.keyword" iLIKE '%prometheus%'`))}, + []model.Query{justSimplestWhere(`"host_name.keyword" iLIKE '%prometheus%'`)}, + []string{qToStr(justSimplestWhere(`"host_name.keyword" iLIKE '%prometheus%'`))}, }, { // [6] "Match", @@ -1054,8 +1054,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"message" iLIKE '%this%' OR "message" iLIKE '%is%' OR "message" iLIKE '%a%' OR "message" iLIKE '%test%'`}, model.Normal, - []model.Query{justWhere(`"message" iLIKE '%this%' OR "message" iLIKE '%is%' OR "message" iLIKE '%a%' OR "message" iLIKE '%test%'`)}, - []string{qToStr(justWhere(`"message" iLIKE '%this%' OR "message" iLIKE '%is%' OR "message" iLIKE '%a%' OR "message" iLIKE '%test%'`))}, + []model.Query{justSimplestWhere(`"message" iLIKE '%this%' OR "message" iLIKE '%is%' OR "message" iLIKE '%a%' OR "message" iLIKE '%test%'`)}, + []string{qToStr(justSimplestWhere(`"message" iLIKE '%this%' OR "message" iLIKE '%is%' OR "message" iLIKE '%a%' OR "message" iLIKE '%test%'`))}, }, { // [7] "Terms", @@ -1075,8 +1075,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"status"='pending'`}, model.Normal, - []model.Query{justWhere(`"status"='pending'`)}, - []string{qToStr(justWhere(`"status"='pending'`))}, + []model.Query{justSimplestWhere(`"status"='pending'`)}, + []string{qToStr(justSimplestWhere(`"status"='pending'`))}, }, { // [8] "Exists", @@ -1122,9 +1122,9 @@ var TestsSearch = []SearchTestCase{ []string{`"type"='upgrade-assistant-reindex-operation' AND NOT ((has("attributes_string_key","namespace") AND "attributes_string_value"[indexOf("attributes_string_key","namespace")] IS NOT NULL) OR (has("attributes_string_key","namespaces") AND "attributes_string_value"[indexOf("attributes_string_key","namespaces")] IS NOT NULL))`}, model.Normal, []model.Query{ - justWhere(`"type"='upgrade-assistant-reindex-operation' AND NOT ((has("attributes_string_key","namespace") AND "attributes_string_value"[indexOf("attributes_string_key","namespace")] IS NOT NULL) OR (has("attributes_string_key","namespaces") AND "attributes_string_value"[indexOf("attributes_string_key","namespaces")] IS NOT NULL))`), + justSimplestWhere(`"type"='upgrade-assistant-reindex-operation' AND NOT ((has("attributes_string_key","namespace") AND "attributes_string_value"[indexOf("attributes_string_key","namespace")] IS NOT NULL) OR (has("attributes_string_key","namespaces") AND "attributes_string_value"[indexOf("attributes_string_key","namespaces")] IS NOT NULL))`), }, - []string{qToStr(justWhere(`"type"='upgrade-assistant-reindex-operation' AND NOT ((has("attributes_string_key","namespace") AND "attributes_string_value"[indexOf("attributes_string_key","namespace")] IS NOT NULL) OR (has("attributes_string_key","namespaces") AND "attributes_string_value"[indexOf("attributes_string_key","namespaces")] IS NOT NULL))`))}, + []string{qToStr(justSimplestWhere(`"type"='upgrade-assistant-reindex-operation' AND NOT ((has("attributes_string_key","namespace") AND "attributes_string_value"[indexOf("attributes_string_key","namespace")] IS NOT NULL) OR (has("attributes_string_key","namespaces") AND "attributes_string_value"[indexOf("attributes_string_key","namespaces")] IS NOT NULL))`))}, }, { // [9] "Simple query string", @@ -1148,8 +1148,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"exception-list-agnostic.list_id" = 'endpoint_event_filters'`}, model.Normal, - []model.Query{justWhere(`"exception-list-agnostic.list_id" = 'endpoint_event_filters'`)}, - []string{qToStr(justWhere(`"exception-list-agnostic.list_id" = 'endpoint_event_filters'`))}, + []model.Query{justSimplestWhere(`"exception-list-agnostic.list_id" = 'endpoint_event_filters'`)}, + []string{qToStr(justSimplestWhere(`"exception-list-agnostic.list_id" = 'endpoint_event_filters'`))}, }, { // [10] "Simple query string wildcard", @@ -1174,8 +1174,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"message" = 'ingest-agent-policies'`}, model.Normal, - []model.Query{justWhere(`"message" = 'ingest-agent-policies'`)}, - []string{qToStr(justWhere(`"message" = 'ingest-agent-policies'`))}, + []model.Query{justSimplestWhere(`"message" = 'ingest-agent-policies'`)}, + []string{qToStr(justSimplestWhere(`"message" = 'ingest-agent-policies'`))}, }, { // [11] "Simple wildcard", @@ -1197,8 +1197,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"task.taskType" iLIKE 'alerting:%'`}, model.Normal, - []model.Query{justWhere(`"task.taskType" iLIKE 'alerting:%'`)}, - []string{qToStr(justWhere(`"task.taskType" iLIKE 'alerting:%'`))}, + []model.Query{justSimplestWhere(`"task.taskType" iLIKE 'alerting:%'`)}, + []string{qToStr(justSimplestWhere(`"task.taskType" iLIKE 'alerting:%'`))}, }, { // [12] "Simple prefix ver1", @@ -1220,8 +1220,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"alert.actions.actionRef" iLIKE 'preconfigured:%'`}, model.Normal, - []model.Query{justWhere(`"alert.actions.actionRef" iLIKE 'preconfigured:%'`)}, - []string{qToStr(justWhere(`"alert.actions.actionRef" iLIKE 'preconfigured:%'`))}, + []model.Query{justSimplestWhere(`"alert.actions.actionRef" iLIKE 'preconfigured:%'`)}, + []string{qToStr(justSimplestWhere(`"alert.actions.actionRef" iLIKE 'preconfigured:%'`))}, }, { // [13] "Simple prefix ver2", @@ -1233,8 +1233,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"user" iLIKE 'ki%'`}, model.Normal, - []model.Query{justWhere(`"user" iLIKE 'ki%'`)}, - []string{qToStr(justWhere(`"user" iLIKE 'ki%'`))}, + []model.Query{justSimplestWhere(`"user" iLIKE 'ki%'`)}, + []string{qToStr(justSimplestWhere(`"user" iLIKE 'ki%'`))}, }, { // [14] "Query string, wildcards don't work properly", @@ -1251,8 +1251,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"message" ILIKE '% logged'`}, model.Normal, - []model.Query{justWhere(`"message" ILIKE '% logged'`)}, - []string{qToStr(justWhere(`"message" ILIKE '% logged'`))}, + []model.Query{justSimplestWhere(`"message" ILIKE '% logged'`)}, + []string{qToStr(justSimplestWhere(`"message" ILIKE '% logged'`))}, }, { // [15] "Empty bool", @@ -1283,8 +1283,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"message" iLIKE '%this is a test%'`}, model.Normal, - []model.Query{justWhere(`"message" iLIKE '%this is a test%'`)}, - []string{qToStr(justWhere(`"message" iLIKE '%this is a test%'`))}, + []model.Query{justSimplestWhere(`"message" iLIKE '%this is a test%'`)}, + []string{qToStr(justSimplestWhere(`"message" iLIKE '%this is a test%'`))}, }, { // [17] "More nested 'match_phrase'", @@ -1300,8 +1300,8 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"message" iLIKE '%this is a test%'`}, model.Normal, - []model.Query{justWhere(`"message" iLIKE '%this is a test%'`)}, - []string{qToStr(justWhere(`"message" iLIKE '%this is a test%'`))}, + []model.Query{justSimplestWhere(`"message" iLIKE '%this is a test%'`)}, + []string{qToStr(justSimplestWhere(`"message" iLIKE '%this is a test%'`))}, }, { // [18] "Simple nested", @@ -1332,11 +1332,11 @@ var TestsSearch = []SearchTestCase{ }`, []string{`"references.type"='tag'`}, model.Normal, - []model.Query{justWhere(`"references.type"='tag'`)}, - []string{qToStr(justWhere(`"references.type"='tag'`))}, + []model.Query{justSimplestWhere(`"references.type"='tag'`)}, + []string{qToStr(justSimplestWhere(`"references.type"='tag'`))}, }, { // [19] - "TODO bad answer?", + "random simple test", ` { "size": 0, @@ -1399,8 +1399,8 @@ var TestsSearch = []SearchTestCase{ }, model.Normal, []model.Query{ - justWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z'))`), - justWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`), }, []string{`SELECT count() FROM "logs-generic-default" WHERE "message" iLIKE '%user%' AND ("@timestamp".=parseDateTime64BestEffort('2024-01-22T09:..:10.299Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-22T09:..:10.299Z'))`}, }, @@ -1475,8 +1475,8 @@ var TestsSearch = []SearchTestCase{ }, model.Normal, []model.Query{ - justWhere(`"service.name"='admin' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T14:49:35.873Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T14:34:35.873Z'))`), - justWhere(`"service.name"='admin' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T14:34:35.873Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T14:49:35.873Z'))`), + justSimplestWhere(`"service.name"='admin' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T14:49:35.873Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T14:34:35.873Z'))`), + justSimplestWhere(`"service.name"='admin' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T14:34:35.873Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T14:49:35.873Z'))`), }, []string{`SELECT count() FROM "logs-generic-default" WHERE "service.name"='admin' AND ("@timestamp".=parseDateTime64BestEffort('2024-01-22T14:..:35.873Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-22T14:..:35.873Z'))`}, }, @@ -1543,8 +1543,8 @@ var TestsSearch = []SearchTestCase{ `"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z'))`}, model.Normal, []model.Query{ - justWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z'))`), - justWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z'))`), + justSimplestWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z'))`), + justSimplestWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z'))`), }, []string{`SELECT count() FROM "logs-generic-default" WHERE "message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp".=parseDateTime64BestEffort('2024-01-29T1.:..:36.491Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-29T1.:..:36.491Z'))`}, }, @@ -1608,8 +1608,8 @@ var TestsSearch = []SearchTestCase{ `"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`}, model.Normal, []model.Query{ - justWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z'))`), - justWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`), }, []string{`SELECT count() FROM "logs-generic-default" WHERE "message" iLIKE '%user%' AND ("@timestamp".=parseDateTime64BestEffort('2024-01-22T09:..:10.299Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-22T09:..:10.299Z'))`}, }, @@ -1676,8 +1676,8 @@ var TestsSearch = []SearchTestCase{ `"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z'))`}, model.Normal, []model.Query{ - justWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z'))`), - justWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z'))`), + justSimplestWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z'))`), + justSimplestWhere(`"message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-29T18:11:36.491Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-29T15:36:36.491Z'))`), }, []string{`SELECT count() FROM "logs-generic-default" WHERE "message" iLIKE '%User logged out%' AND "host.name" iLIKE '%poseidon%' AND ("@timestamp".=parseDateTime64BestEffort('2024-01-29T1.:..:36.491Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-29T1.:..:36.491Z'))`}, }, @@ -1741,8 +1741,8 @@ var TestsSearch = []SearchTestCase{ `"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`}, model.Normal, []model.Query{ - justWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z'))`), - justWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z'))`), + justSimplestWhere(`"message" iLIKE '%user%' AND ("@timestamp"<=parseDateTime64BestEffort('2024-01-22T09:41:10.299Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-22T09:26:10.299Z'))`), }, []string{`SELECT count() FROM "logs-generic-default" WHERE "message" iLIKE '%user%' AND ("@timestamp".=parseDateTime64BestEffort('2024-01-22T09:..:10.299Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-22T09:..:10.299Z'))`}, }, @@ -1826,8 +1826,8 @@ var TestsSearchNoAttrs = []SearchTestCase{ }, model.Normal, []model.Query{ - justWhere(`"@timestamp">=parseDateTime64BestEffort('2024-01-25T13:22:45.968Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-25T13:37:45.968Z')`), - justWhere(`"@timestamp"<=parseDateTime64BestEffort('2024-01-25T13:37:45.968Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-25T13:22:45.968Z')`), + justSimplestWhere(`"@timestamp">=parseDateTime64BestEffort('2024-01-25T13:22:45.968Z') AND "@timestamp"<=parseDateTime64BestEffort('2024-01-25T13:37:45.968Z')`), + justSimplestWhere(`"@timestamp"<=parseDateTime64BestEffort('2024-01-25T13:37:45.968Z') AND "@timestamp">=parseDateTime64BestEffort('2024-01-25T13:22:45.968Z')`), }, []string{`SELECT "message" FROM "logs-generic-default" WHERE ("@timestamp".=parseDateTime64BestEffort('2024-01-25T13:..:45.968Z') AND "@timestamp".=parseDateTime64BestEffort('2024-01-25T13:..:45.968Z')) AND (has("attributes_string_key","summary") AND "attributes_string_value"[indexOf("attributes_string_key","summary")] IS NOT NULL) AND NOT (has("attributes_string_key","run_once") AND "attributes_string_value"[indexOf("attributes_string_key","run_once")] IS NOT NULL)`}, }, @@ -1879,8 +1879,8 @@ var TestSearchFilter = []SearchTestCase{ }, model.Normal, []model.Query{ - justWhere(``), - justWhere(``), + justSimplestWhere(``), + justSimplestWhere(``), }, []string{ "SELECT count() FROM " + QuotedTableName, @@ -1939,8 +1939,8 @@ var TestSearchFilter = []SearchTestCase{ }, model.Normal, []model.Query{ - justWhere(``), - justWhere(``), + justSimplestWhere(``), + justSimplestWhere(``), }, []string{ "SELECT count() FROM " + QuotedTableName + ` WHERE "@timestamp">subDate(now(), INTERVAL 15 minute)`, diff --git a/quesma/testdata/requests_no_full_text_fields.go b/quesma/testdata/requests_no_full_text_fields.go index 2f8260dcf..01f04d995 100644 --- a/quesma/testdata/requests_no_full_text_fields.go +++ b/quesma/testdata/requests_no_full_text_fields.go @@ -104,8 +104,8 @@ var TestsSearchNoFullTextFields = []SearchTestCase{ }, WantedQueryType: model.Normal, WantedQuery: []model.Query{ - justWhere(`(((false AND false) OR (false AND false) OR false) AND NOT false) AND ("timestamp">='2024-03-26T09:56:02.241Z' AND "timestamp"<='2024-04-10T08:56:02.241Z')`), - justWhere(`(((false AND false) OR (false AND false) OR false) AND NOT false) AND ("timestamp"<='2024-04-10T08:56:02.241Z' AND "timestamp">='2024-03-26T09:56:02.241Z')`), + justSimplestWhere(`(((false AND false) OR (false AND false) OR false) AND NOT false) AND ("timestamp">='2024-03-26T09:56:02.241Z' AND "timestamp"<='2024-04-10T08:56:02.241Z')`), + justSimplestWhere(`(((false AND false) OR (false AND false) OR false) AND NOT false) AND ("timestamp"<='2024-04-10T08:56:02.241Z' AND "timestamp">='2024-03-26T09:56:02.241Z')`), }, WantedRegexes: []string{}, // empty, as not important so far. Can be filled later if needed }, diff --git a/quesma/testdata/util.go b/quesma/testdata/util.go index 0e36c1844..fe651c793 100644 --- a/quesma/testdata/util.go +++ b/quesma/testdata/util.go @@ -39,14 +39,16 @@ func selectFieldsInAnyOrderAsRegex(fields []string) string { const TableName = "logs-generic-default" const QuotedTableName = `"` + TableName + `"` const queryparserFacetsSampleSize = "20000" // should be same value as queryparser.facetsSampleSize +const defaultLimit = model.DefaultSizeListQuery const oneMinute = 60 * time.Second func newSimplestQuery() model.Query { return model.Query{ - Fields: []string{"*"}, - FromClause: strconv.Quote(TableName), - CanParse: true, + Fields: []string{"*"}, + FromClause: strconv.Quote(TableName), + SuffixClauses: []string{"LIMIT " + strconv.Itoa(defaultLimit)}, + CanParse: true, } } @@ -56,7 +58,7 @@ func qToStr(query model.Query) string { } // justWhere is a simple helper function to help fill out test cases -func justWhere(whereClause string) model.Query { +func justSimplestWhere(whereClause string) model.Query { query := newSimplestQuery() query.WhereClause = whereClause return query From 92aebdf810a7b2d412f2cfa70cd76e9c5723e8bf Mon Sep 17 00:00:00 2001 From: Krzysztof Kiewicz Date: Wed, 8 May 2024 19:02:09 +0200 Subject: [PATCH 03/14] Fix test flakiness - bump timeouts (#64) Those new tests for errors in UI per query type passed 100/100 times on my PC, also they've been here for some time, and noone reported flakiness. But it happened to me now, so I guess I need to increase timeouts a bit. I also remove 1 print from tests, I guess it shouldn't be there. --- quesma/quesma/field_caps_test.go | 2 -- quesma/quesma/search_test.go | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/quesma/quesma/field_caps_test.go b/quesma/quesma/field_caps_test.go index f47706b6b..ad6ca835e 100644 --- a/quesma/quesma/field_caps_test.go +++ b/quesma/quesma/field_caps_test.go @@ -2,7 +2,6 @@ package quesma import ( "encoding/json" - "fmt" "github.com/stretchr/testify/assert" "mitmproxy/quesma/clickhouse" "mitmproxy/quesma/concurrent" @@ -190,7 +189,6 @@ func TestFieldCapsMultipleIndexesConflictingEntries(t *testing.T) { }, }) resp, err := handleFieldCapsIndex(ctx, []string{"logs-1", "logs-2", "logs-3"}, *tableMap) - fmt.Printf("string(resp): %+v\n", string(resp)) assert.NoError(t, err) expectedResp, err := json.MarshalIndent([]byte(`{ "fields": { diff --git a/quesma/quesma/search_test.go b/quesma/quesma/search_test.go index 300b9c2ac..8fe625eab 100644 --- a/quesma/quesma/search_test.go +++ b/quesma/quesma/search_test.go @@ -437,7 +437,7 @@ func TestAllUnsupportedQueryTypesAreProperlyRecorded(t *testing.T) { // (go managementConsole.RunOnlyChannelProcessor() above), so we might need to wait a bit assert.Eventually(t, func() bool { return len(managementConsole.QueriesWithUnsupportedType(tt.QueryType)) == 1 - }, 150*time.Millisecond, 1*time.Millisecond) + }, 250*time.Millisecond, 1*time.Millisecond) assert.Equal(t, 1, managementConsole.GetTotalUnsupportedQueries()) assert.Equal(t, 1, managementConsole.GetSavedUnsupportedQueries()) assert.Equal(t, 1, len(managementConsole.GetUnsupportedTypesWithCount())) @@ -489,7 +489,7 @@ func TestDifferentUnsupportedQueries(t *testing.T) { // (go managementConsole.RunOnlyChannelProcessor() above), so we might need to wait a bit assert.Eventually(t, func() bool { return len(managementConsole.QueriesWithUnsupportedType(tt.QueryType)) == min(testCounts[i], maxSavedQueriesPerQueryType) - }, 500*time.Millisecond, 1*time.Millisecond, + }, 600*time.Millisecond, 1*time.Millisecond, tt.TestName+": wanted: %d, got: %d", min(testCounts[i], maxSavedQueriesPerQueryType), len(managementConsole.QueriesWithUnsupportedType(tt.QueryType)), ) From 62e6b307707bd2378fb234152b93c75337443270 Mon Sep 17 00:00:00 2001 From: Krzysztof Kiewicz Date: Wed, 8 May 2024 21:29:28 +0200 Subject: [PATCH 04/14] Respect `keyed` parameter in `percentile_ranks` aggr (#57) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before: dashboard is broken. We don't respect `keyed`, and return response in incorrect format. I've already fixed it for 2 other aggregations. Not sure why, but I didn't add a test for this aggregation, so we had none. Now we have 1 😆 Screenshot 2024-05-08 at 00 15 39 Screenshot 2024-05-08 at 00 15 14 After: Screenshot 2024-05-08 at 18 52 39 --- .../metrics_aggregations/percentile_ranks.go | 97 ++++++++--- quesma/queryparser/aggregation_parser.go | 14 +- .../aggregation_requests.go | 152 ++++++++++++++++++ 3 files changed, 236 insertions(+), 27 deletions(-) diff --git a/quesma/model/metrics_aggregations/percentile_ranks.go b/quesma/model/metrics_aggregations/percentile_ranks.go index 50e87eb6a..6537f63ff 100644 --- a/quesma/model/metrics_aggregations/percentile_ranks.go +++ b/quesma/model/metrics_aggregations/percentile_ranks.go @@ -4,15 +4,19 @@ import ( "context" "mitmproxy/quesma/logger" "mitmproxy/quesma/model" + "strconv" "strings" ) type PercentileRanks struct { ctx context.Context + // defines what response should look like + // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-percentile-rank-aggregation.html#_keyed_response_5 + Keyed bool } -func NewPercentileRanks(ctx context.Context) PercentileRanks { - return PercentileRanks{ctx: ctx} +func NewPercentileRanks(ctx context.Context, keyed bool) PercentileRanks { + return PercentileRanks{ctx: ctx, Keyed: keyed} } func (query PercentileRanks) IsBucketAggregation() bool { @@ -20,33 +24,74 @@ func (query PercentileRanks) IsBucketAggregation() bool { } func (query PercentileRanks) TranslateSqlResponseToJson(rows []model.QueryResultRow, level int) []model.JsonMap { - valueMap := make(map[string]float64) - for _, percentileRank := range rows[0].Cols[level:] { - // percentileRank.ColName looks like this [...]<=X,[...]. We're extracting X. - // It always needs to have .Y or .YZ at the end, so 1 or 2 digits after the dot, and dot is mandatory. - // Also, can't be .00, needs to be .0 - beg := strings.Index(percentileRank.ColName, "<=") - end := strings.Index(percentileRank.ColName[beg:], ",") - cutValue := percentileRank.ColName[beg+2 : beg+end] - - dot := strings.Index(cutValue, ".") - if dot == -1 { - cutValue += ".0" - } else if end-dot >= len(".00") && cutValue[dot:dot+3] == ".00" { - cutValue = cutValue[:dot+2] - } else { - cutValue = cutValue[:dot+3] + if len(rows) == 0 { + logger.WarnWithCtx(query.ctx).Msg("no rows in percentile ranks response") + return make([]model.JsonMap, 0) + } + // I duplicate a lot of code in this if/else below, + // but I think it's worth it, as this function might get called a lot of times for a single query. + // And because of complete separation in if/else, I guess it might (should) be slightly faster (?) + if query.Keyed { + valueMap := make(model.JsonMap) + for _, percentileRank := range rows[0].Cols[level:] { + // percentileRank.ColName looks like this [...]<=X,[...]. We're extracting X. + // It always needs to have .Y or .YZ at the end, so 1 or 2 digits after the dot, and dot is mandatory. + // Also, can't be .00, needs to be .0 + beg := strings.Index(percentileRank.ColName, "<=") + end := strings.Index(percentileRank.ColName[beg:], ",") + cutValue := percentileRank.ColName[beg+2 : beg+end] + + dot := strings.Index(cutValue, ".") + if dot == -1 { + cutValue += ".0" + } else if end-dot >= len(".00") && cutValue[dot:dot+3] == ".00" { + cutValue = cutValue[:dot+2] + } else { + cutValue = cutValue[:dot+3] + } + if value, ok := percentileRank.Value.(float64); ok { + valueMap[cutValue] = value + } else { + logger.WarnWithCtx(query.ctx).Msgf("failed to convert percentile rank value to float64, type: %T, value: %v. Skipping", + percentileRank.Value, percentileRank.Value) + } } - if value, ok := percentileRank.Value.(float64); ok { - valueMap[cutValue] = value - } else { - logger.WarnWithCtx(query.ctx).Msgf("failed to convert percentile rank value to float64, type: %T, value: %v", - percentileRank.Value, percentileRank.Value) + return []model.JsonMap{{ + "values": valueMap, + }} + } else { + buckets := make([]model.JsonMap, 0) + for _, percentileRank := range rows[0].Cols[level:] { + // percentileRank.ColName looks like this [...]<=X,[...]. We're extracting X. + // It always needs to have .Y or .YZ at the end, so 1 or 2 digits after the dot, and dot is mandatory. + // Also, can't be .00, needs to be .0 + beg := strings.Index(percentileRank.ColName, "<=") + end := strings.Index(percentileRank.ColName[beg:], ",") + cutValue := percentileRank.ColName[beg+2 : beg+end] + + dot := strings.Index(cutValue, ".") + if dot == -1 { + cutValue += ".0" + } else if end-dot >= len(".00") && cutValue[dot:dot+3] == ".00" { + cutValue = cutValue[:dot+2] + } else { + cutValue = cutValue[:dot+3] + } + cutValueFloat, _ := strconv.ParseFloat(cutValue, 64) + if value, ok := percentileRank.Value.(float64); ok { + buckets = append(buckets, model.JsonMap{ + "key": cutValueFloat, + "value": value, + }) + } else { + logger.WarnWithCtx(query.ctx).Msgf("failed to convert percentile rank value to float64, type: %T, value: %v. Skipping", + percentileRank.Value, percentileRank.Value) + } } + return []model.JsonMap{{ + "values": buckets, + }} } - return []model.JsonMap{{ - "values": valueMap, - }} } func (query PercentileRanks) String() string { diff --git a/quesma/queryparser/aggregation_parser.go b/quesma/queryparser/aggregation_parser.go index 4a736c80a..d070fcf60 100644 --- a/quesma/queryparser/aggregation_parser.go +++ b/quesma/queryparser/aggregation_parser.go @@ -17,6 +17,8 @@ import ( "strings" ) +const keyedDefaultValuePercentileRanks = true + type filter struct { name string sql SimpleQuery @@ -184,7 +186,7 @@ func (b *aggrQueryBuilder) buildMetricsAggregation(metricsAggr metricsAggregatio case "value_count": query.Type = metrics_aggregations.NewValueCount(b.ctx) case "percentile_ranks": - query.Type = metrics_aggregations.NewPercentileRanks(b.ctx) + query.Type = metrics_aggregations.NewPercentileRanks(b.ctx, metricsAggr.Keyed) } return query } @@ -512,10 +514,20 @@ func (cw *ClickhouseQueryTranslator) tryMetricsAggregation(queryMap QueryMap) (m logger.WarnWithCtx(cw.Ctx).Msgf("cutValue in percentile_ranks is not a number, but %T, value: %v. Skipping.", cutValue, cutValue) } } + var keyed bool + if keyedRaw, ok := percentileRanks.(QueryMap)["keyed"]; ok { + if keyed, ok = keyedRaw.(bool); !ok { + logger.WarnWithCtx(cw.Ctx).Msgf("keyed specified for percentiles aggregation is not a boolean. Querymap: %v", queryMap) + keyed = keyedDefaultValuePercentileRanks + } + } else { + keyed = keyedDefaultValuePercentileRanks + } return metricsAggregation{ AggrType: "percentile_ranks", FieldNames: fieldNames, FieldType: metricsAggregationDefaultFieldType, // don't need to check, it's unimportant for this aggregation + Keyed: keyed, }, true } diff --git a/quesma/testdata/opensearch-visualize/aggregation_requests.go b/quesma/testdata/opensearch-visualize/aggregation_requests.go index 55251a79c..81a0e47e9 100644 --- a/quesma/testdata/opensearch-visualize/aggregation_requests.go +++ b/quesma/testdata/opensearch-visualize/aggregation_requests.go @@ -1124,4 +1124,156 @@ var AggregationTests = []testdata.AggregationTestCase{ `ORDER BY ("response")`, }, }, + { // [7] + TestName: "Percentile_ranks keyed=false. Reproduce: Visualize -> Line -> Metrics: Percentile Ranks, Buckets: X-Asis Date Histogram", + QueryRequestJson: ` + { + "_source": { + "excludes": [] + }, + "aggs": { + "2": { + "aggs": { + "1": { + "percentile_ranks": { + "field": "AvgTicketPrice", + "keyed": false, + "values": [ + 0, + 50000 + ] + } + } + }, + "date_histogram": { + "calendar_interval": "1h", + "field": "timestamp", + "min_doc_count": 1, + "time_zone": "Europe/Warsaw" + } + } + }, + "docvalue_fields": [ + { + "field": "timestamp", + "format": "date_time" + } + ], + "query": { + "bool": { + "filter": [], + "must": [ + { + "match_all": {} + } + ], + "must_not": [], + "should": [] + } + }, + "script_fields": { + "hour_of_day": { + "script": { + "lang": "painless", + "source": "doc['timestamp'].value.hourOfDay" + } + } + }, + "size": 0, + "stored_fields": [ + "*" + ] + }`, + ExpectedResponse: ` + { + "_shards": { + "failed": 0, + "skipped": 0, + "successful": 1, + "total": 1 + }, + "aggregations": { + "2": { + "buckets": [ + { + "1": { + "values": [ + { + "key": 0.0, + "value": 0.0 + }, + { + "key": 50000.0, + "value": 100.0 + } + ] + }, + "doc_count": 9, + "key": 1714860000000, + "key_as_string": "2024-05-04T22:00:00.000" + }, + { + "1": { + "values": [ + { + "key": 0.0, + "value": 0.0 + }, + { + "key": 50000.0, + "value": 50.0 + } + ] + }, + "doc_count": 12, + "key": 1714863600000, + "key_as_string": "2024-05-04T23:00:00.000" + } + ] + } + }, + "hits": { + "hits": [], + "max_score": null, + "total": { + "relation": "eq", + "value": 884 + } + }, + "timed_out": false, + "took": 0 + }`, + ExpectedResults: [][]model.QueryResultRow{ + {{Cols: []model.QueryResultCol{model.NewQueryResultCol("hits", uint64(884))}}}, + { + {Cols: []model.QueryResultCol{ + model.NewQueryResultCol("key", int64(1714860000000/3600000)), + model.NewQueryResultCol("AvgTicketPrice<=0,", 0.0), + model.NewQueryResultCol("AvgTicketPrice<=50000,", 100.0)}, + }, + {Cols: []model.QueryResultCol{ + model.NewQueryResultCol("key", int64(1714863600000/3600000)), + model.NewQueryResultCol("AvgTicketPrice<=0,", 0.0), + model.NewQueryResultCol("AvgTicketPrice<=50000,", 50.0), + }}, + }, + { + {Cols: []model.QueryResultCol{model.NewQueryResultCol("key", int64(1714860000000/3600000)), model.NewQueryResultCol("doc_count", 9)}}, + {Cols: []model.QueryResultCol{model.NewQueryResultCol("key", int64(1714863600000/3600000)), model.NewQueryResultCol("doc_count", 12)}}, + }, + }, + ExpectedSQLs: []string{ + `SELECT count() FROM ` + testdata.QuotedTableName + ` `, + "SELECT toInt64(toUnixTimestamp64Milli(`timestamp`)/3600000), " + + `count(if("AvgTicketPrice"<=0.000000, 1, NULL))/count(*)*100, ` + + `count(if("AvgTicketPrice"<=50000.000000, 1, NULL))/count(*)*100 ` + + `FROM ` + testdata.QuotedTableName + ` ` + + "GROUP BY (toInt64(toUnixTimestamp64Milli(`timestamp`)/3600000)) " + + "ORDER BY (toInt64(toUnixTimestamp64Milli(`timestamp`)/3600000))", + "SELECT toInt64(toUnixTimestamp64Milli(`timestamp`)/3600000), count() " + + `FROM ` + testdata.QuotedTableName + ` ` + + "GROUP BY (toInt64(toUnixTimestamp64Milli(`timestamp`)/3600000)) " + + "ORDER BY (toInt64(toUnixTimestamp64Milli(`timestamp`)/3600000))", + }, + }, } From e354b8f7dac90ccdee625a4863719b60fb1167f9 Mon Sep 17 00:00:00 2001 From: Jacek Migdal Date: Wed, 8 May 2024 21:34:49 +0200 Subject: [PATCH 05/14] Jacek request UI (#61) Investigating a request requires looking at two screens: - logs - list of request of size one I unify it to a single screen: ![Screenshot 2024-05-08 at 11 46 34](https://github.com/QuesmaOrg/quesma/assets/972989/180fbe5e-9a65-45d9-bc3d-06b148578f2e) ![Screenshot 2024-05-08 at 11 46 20](https://github.com/QuesmaOrg/quesma/assets/972989/8609379b-b1bc-4ee8-b7cc-5f65a20bc662) --------- Co-authored-by: Krzysztof Kiewicz --- quesma/quesma/ui/asset/head.html | 52 ++++++++-- quesma/quesma/ui/console_routes.go | 5 - quesma/quesma/ui/html_pages_test.go | 2 +- quesma/quesma/ui/live_tail_drilldown.go | 128 +++++++++++++++--------- 4 files changed, 130 insertions(+), 57 deletions(-) diff --git a/quesma/quesma/ui/asset/head.html b/quesma/quesma/ui/asset/head.html index 23bdb1ac3..7633663cf 100644 --- a/quesma/quesma/ui/asset/head.html +++ b/quesma/quesma/ui/asset/head.html @@ -480,34 +480,74 @@ background-color: #dddddd; } - #request-log-messages table, #errors table { + #request-info { + background-color: rgb(0, 0, 0); + color: white; + } + + #request-info .title { + margin-top: 0; + background-color: #333; + color: white; + padding: 0 5px; + } + + #request-info pre { + padding: 0 5px; + } + + #request-info .title-logs { + margin-top: 1em; + } + + #request-info .two-columns { + display: flex; + height: 48%; + } + + #request-info .elastic-response, #request-info .quesma-response, #request-info .query-body, #request-info .query-body-translated { + flex: 1; + overflow: auto; + } + + #request-info .query-body, #request-info .quesma-response { + background-color: rgb(221, 226, 235); + color: black; + } + + #request-info .query-body, #request-info .elastic-response { + border-right: white 5px solid; + } + + #request-info table, #errors table { border-collapse: collapse; table-layout: fixed; width: 98%; word-wrap: break-word; + margin-bottom: 1em; } - #request-log-messages th, #request-log-messages td, #errors table th, #errors table td { + #request-info th, #request-info td, #errors table th, #errors table td { border: 1px solid rgb(221, 226, 235); font-family: Courier; font-size: small; padding: 0 3px; } - #request-log-messages .time { + #request-info .time { width: 10%; } - #request-log-messages .level { + #request-info .level { width: 5%; } - #request-log-messages .message { + #request-info .message { width: 55%; white-space: pre-wrap; } - #request-log-messages .fields { + #request-info .fields { width: 28%; white-space: pre-wrap; } diff --git a/quesma/quesma/ui/console_routes.go b/quesma/quesma/ui/console_routes.go index ce3a462d7..65745b626 100644 --- a/quesma/quesma/ui/console_routes.go +++ b/quesma/quesma/ui/console_routes.go @@ -118,11 +118,6 @@ func (qmc *QuesmaManagementConsole) createRouting() *mux.Router { buf := qmc.generateReportForRequestId(vars["requestId"]) _, _ = writer.Write(buf) }) - router.PathPrefix("/log/{requestId}").HandlerFunc(func(writer http.ResponseWriter, r *http.Request) { - vars := mux.Vars(r) - buf := qmc.generateLogForRequestId(vars["requestId"]) - _, _ = writer.Write(buf) - }) router.PathPrefix("/error/{reason}").HandlerFunc(func(writer http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) buf := qmc.generateErrorForReason(vars["reason"]) diff --git a/quesma/quesma/ui/html_pages_test.go b/quesma/quesma/ui/html_pages_test.go index ae2515657..470e0e994 100644 --- a/quesma/quesma/ui/html_pages_test.go +++ b/quesma/quesma/ui/html_pages_test.go @@ -49,7 +49,7 @@ func TestHtmlPages(t *testing.T) { }) t.Run("logs got no XSS", func(t *testing.T) { - response := string(qmc.generateLogForRequestId(id)) + response := string(qmc.generateReportForRequestId(id)) assert.NotContains(t, response, xss) }) diff --git a/quesma/quesma/ui/live_tail_drilldown.go b/quesma/quesma/ui/live_tail_drilldown.go index fb589c805..f1a507a5e 100644 --- a/quesma/quesma/ui/live_tail_drilldown.go +++ b/quesma/quesma/ui/live_tail_drilldown.go @@ -5,6 +5,7 @@ import ( "fmt" "gopkg.in/yaml.v3" "mitmproxy/quesma/quesma/ui/internal/builder" + "mitmproxy/quesma/quesma/ui/internal/sqlfmt" "strings" ) @@ -13,27 +14,104 @@ func (qmc *QuesmaManagementConsole) generateReportForRequestId(requestId string) request, requestFound := qmc.debugInfoMessages[requestId] qmc.mutex.Unlock() + logMessages, optAsyncId := generateLogMessages(request.logMessages, []string{}) + buffer := newBufferWithHead() if requestFound { - buffer.Write(generateSimpleTop("Report for request UUID " + requestId)) + if optAsyncId != nil { + buffer.Write(generateSimpleTop("Report for request id " + requestId + " and async id " + *optAsyncId)) + } else { + buffer.Write(generateSimpleTop("Report for request id " + requestId)) + } } else { buffer.Write(generateSimpleTop("Report not found for request UUID " + requestId)) } - buffer.Html(`
`) + buffer.Html(`
` + "\n") + + // Show Request and SQL + if requestFound { + buffer.Html(`
` + "\n") + buffer.Html(`
` + "\n") + buffer.Html("

Original query:

\n") + buffer.Html(`
`)
+		buffer.Text(string(request.IncomingQueryBody))
+		buffer.Html("\n
") + buffer.Html(`
` + "\n") + + buffer.Html(`
` + "\n") + buffer.Html("

Translated SQL:

\n") + buffer.Html(`
`)
+		buffer.Text(sqlfmt.SqlPrettyPrint(request.QueryBodyTranslated))
+		buffer.Html("\n
") + buffer.Html(`
` + "\n") + buffer.Html(`
` + "\n") + } + + buffer.Html("\n\n") + buffer.Html(`
`) + + buffer.Html(`

`) + if requestFound && len(request.logMessages) > 0 { + buffer.Html("Logs:

\n") + buffer.Write(logMessages) + } else { + buffer.Html("No logs for this request

\n") + } - debugKeyValueSlice := []queryDebugInfoWithId{} + // Show ElasticSearch and Quesma Response if requestFound { - debugKeyValueSlice = append(debugKeyValueSlice, queryDebugInfoWithId{requestId, request}) + buffer.Html(`
` + "\n") + buffer.Html(`
` + "\n") + if len(request.QueryDebugPrimarySource.QueryResp) > 0 { + tookStr := fmt.Sprintf(" took %d ms:", request.PrimaryTook.Milliseconds()) + buffer.Html("

Elastic response").Text(tookStr).Html("

\n") + buffer.Html(`
`)
+			buffer.Text(string(request.QueryDebugPrimarySource.QueryResp))
+			buffer.Html("\n
") + } else { + buffer.Html("

No Elastic response for this request

\n") + } + buffer.Html(`
` + "\n") + + buffer.Html(`
` + "\n") + if len(request.QueryDebugSecondarySource.QueryTranslatedResults) > 0 { + tookStr := fmt.Sprintf(" took %d ms:", request.SecondaryTook.Milliseconds()) + buffer.Html("

Quesma response").Text(tookStr).Html("

\n") + buffer.Html(`
`)
+			buffer.Text(string(request.QueryDebugSecondarySource.QueryTranslatedResults))
+			buffer.Html("\n
") + } else { + buffer.Html("

No Quesma response for this request

\n") + } + buffer.Html(`
` + "\n") + buffer.Html(`
` + "\n") } - buffer.Write(generateQueries(debugKeyValueSlice, false)) + buffer.Html("\n
\n") buffer.Html("\n
\n") buffer.Html(`") - buffer.Html("\n") - buffer.Html("\n") - return buffer.Bytes() -} - -func (qmc *QuesmaManagementConsole) generateLogForRequestId(requestId string) []byte { - qmc.mutex.Lock() - request, requestFound := qmc.debugInfoMessages[requestId] - qmc.mutex.Unlock() - - logMessages, optAsyncId := generateLogMessages(request.logMessages, []string{}) - - buffer := newBufferWithHead() - if requestFound { - if optAsyncId != nil { - buffer.Write(generateSimpleTop("Log for request id " + requestId + " and async id " + *optAsyncId)) - } else { - buffer.Write(generateSimpleTop("Log for request id " + requestId)) - } - } else { - buffer.Write(generateSimpleTop("Log not found for request id " + requestId)) - } - - buffer.Html(`
`) - buffer.Html("\n\n") - buffer.Html(`
`) - - buffer.Write(logMessages) - - buffer.Html("\n
\n") - buffer.Html("\n
\n") - buffer.Html(`") buffer.Html("\n") From 62955a2b1623cbbb3f24d7e6543b81c2fa4903b4 Mon Sep 17 00:00:00 2001 From: Krzysztof Kiewicz Date: Thu, 9 May 2024 07:14:19 +0200 Subject: [PATCH 06/14] Add context to mergeMaps function (#66) It's very needed, as it's one of the most fragile parts of aggregation handling. When something goes wrong (e.g. for me during testing), it's very often there, and so far we didn't catch any errors in UI because of lack of context. --- quesma/queryparser/query_translator.go | 2 +- quesma/util/utils.go | 9 +++++---- quesma/util/utils_test.go | 3 ++- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/quesma/queryparser/query_translator.go b/quesma/queryparser/query_translator.go index f0fe66f4f..c18fe0065 100644 --- a/quesma/queryparser/query_translator.go +++ b/quesma/queryparser/query_translator.go @@ -459,7 +459,7 @@ func (cw *ClickhouseQueryTranslator) MakeAggregationPartOfResponse(queries []mod } aggregation := cw.makeResponseAggregationRecursive(query, ResultSets[i+1], 0, 0) if len(aggregation) != 0 { - aggregations = util.MergeMaps(aggregations, aggregation[0]) // result of root node is always a single map, thus [0] + aggregations = util.MergeMaps(cw.Ctx, aggregations, aggregation[0]) // result of root node is always a single map, thus [0] } } return aggregations diff --git a/quesma/util/utils.go b/quesma/util/utils.go index 796ab227d..40725c98a 100644 --- a/quesma/util/utils.go +++ b/quesma/util/utils.go @@ -2,6 +2,7 @@ package util import ( "bytes" + "context" "encoding/json" "fmt" "io" @@ -255,7 +256,7 @@ func JsonDifference(jsonActual, jsonExpected string) (JsonMap, JsonMap, error) { // but none of them works for nested maps, so needed to write our own. // * mActual - uses JsonMap fully: values are []JsonMap, or JsonMap, or base types // * mExpected - value can also be []any, because it's generated from Golang's json.Unmarshal -func MergeMaps(mActual, mExpected JsonMap) JsonMap { +func MergeMaps(ctx context.Context, mActual, mExpected JsonMap) JsonMap { var mergeMapsRec func(m1, m2 JsonMap) JsonMap // merges 'i1' and 'i2' in 3 cases: both are JsonMap, both are []JsonMap, or both are some base type mergeAny := func(i1, i2 any) any { @@ -263,7 +264,7 @@ func MergeMaps(mActual, mExpected JsonMap) JsonMap { case JsonMap: i2Typed, ok := i2.(JsonMap) if !ok { - logger.Error().Msgf("mergeAny: i1 is map, i2 is not. i1: %v, i2: %v", i1, i2) + logger.ErrorWithCtx(ctx).Msgf("mergeAny: i1 is map, i2 is not. i1: %v, i2: %v", i1, i2) return i1 } return mergeMapsRec(i1Typed, i2Typed) @@ -277,13 +278,13 @@ func MergeMaps(mActual, mExpected JsonMap) JsonMap { i2Typed = append(i2Typed, val) } } else { - logger.Error().Msgf("mergeAny: i1 is []JsonMap, i2 is not an array. i1: %v, i2: %v", i1Typed, i2) + logger.ErrorWithCtx(ctx).Msgf("mergeAny: i1 is []JsonMap, i2 is not an array. i1: %v, i2: %v", i1Typed, i2) } } // lengths should be always equal in our usage of this function, maybe that'll change if len(i1Typed) != len(i2Typed) { - logger.Error().Msgf("mergeAny: i1 and i2 are slices, but have different lengths. i1: %v, i2: %v", i1, i2) + logger.ErrorWithCtx(ctx).Msgf("mergeAny: i1 and i2 are slices, but have different lengths. len(i1): %v, len(i2): %v, i1: %v, i2: %v", len(i1Typed), len(i2Typed), i1, i2) return []JsonMap{} } mergedArray := make([]JsonMap, len(i1Typed)) diff --git a/quesma/util/utils_test.go b/quesma/util/utils_test.go index c597118ee..d90c3b966 100644 --- a/quesma/util/utils_test.go +++ b/quesma/util/utils_test.go @@ -1,6 +1,7 @@ package util import ( + "context" "encoding/json" "github.com/stretchr/testify/assert" "reflect" @@ -602,7 +603,7 @@ func TestMergeMaps(t *testing.T) { for i, tt := range cases { t.Run("TestMergeMaps_"+strconv.Itoa(i), func(t *testing.T) { // simple == or Equal doesn't work on nested maps => need DeepEqual - assert.True(t, reflect.DeepEqual(tt.wanted, MergeMaps(tt.m1, tt.m2))) + assert.True(t, reflect.DeepEqual(tt.wanted, MergeMaps(context.Background(), tt.m1, tt.m2))) }) } } From def5925e9103635c953b02930273ba6ace435d34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 May 2024 10:22:04 +0200 Subject: [PATCH 07/14] Bump github.com/ClickHouse/clickhouse-go/v2 from 2.23.2 to 2.24.0 in /quesma (#67) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/ClickHouse/clickhouse-go/v2](https://github.com/ClickHouse/clickhouse-go) from 2.23.2 to 2.24.0.
Release notes

Sourced from github.com/ClickHouse/clickhouse-go/v2's releases.

v2.24.0

What's Changed

Enhancements 🎉

Fixes 🐛

Other Changes 🛠

New Contributors

Full Changelog: https://github.com/ClickHouse/clickhouse-go/compare/v2.23.2...v2.24.0

Changelog

Sourced from github.com/ClickHouse/clickhouse-go/v2's changelog.

v2.24.0, 2024-05-08

What's Changed

Enhancements 🎉

Fixes 🐛

Other Changes 🛠

New Contributors

Full Changelog: https://github.com/ClickHouse/clickhouse-go/compare/v2.23.2...v2.24.0

Commits
  • 28fd6a4 Update release notes
  • 544f2a7 fix: fix some nil checks (#1283)
  • cd37406 Optional flag to close query with flush (#1276)
  • 1ae716e Bump go.opentelemetry.io/otel/trace from 1.24.0 to 1.26.0 (#1282)
  • 0771fb3 Bump github.com/docker/docker (#1289)
  • a9bbb75 Fix prepare batch does not break on values substring in table name (#1290)
  • c3f7a77 Always compress responses when the client compression is on (#1286)
  • 6985077 Merge pull request #1291 from ClickHouse/gg/append_block_lc_fix
  • 42eeaa8 Don't recreate keys from LC columns from direct stream
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/ClickHouse/clickhouse-go/v2&package-manager=go_modules&previous-version=2.23.2&new-version=2.24.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- quesma/go.mod | 6 +++--- quesma/go.sum | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/quesma/go.mod b/quesma/go.mod index b973ebde6..e7078222a 100644 --- a/quesma/go.mod +++ b/quesma/go.mod @@ -3,7 +3,7 @@ module mitmproxy/quesma go 1.22.0 require ( - github.com/ClickHouse/clickhouse-go/v2 v2.23.2 + github.com/ClickHouse/clickhouse-go/v2 v2.24.0 github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/DataDog/go-sqllexer v0.0.11 github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df @@ -58,8 +58,8 @@ require ( github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect - go.opentelemetry.io/otel v1.24.0 // indirect - go.opentelemetry.io/otel/trace v1.24.0 // indirect + go.opentelemetry.io/otel v1.26.0 // indirect + go.opentelemetry.io/otel/trace v1.26.0 // indirect golang.org/x/sys v0.19.0 // indirect gopkg.in/yaml.v3 v3.0.1 ) diff --git a/quesma/go.sum b/quesma/go.sum index 4737b5552..774e688c0 100644 --- a/quesma/go.sum +++ b/quesma/go.sum @@ -1,7 +1,7 @@ github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4= github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg= -github.com/ClickHouse/clickhouse-go/v2 v2.23.2 h1:+DAKPMnxLS7pduQZsrJc8OhdLS2L9MfDEJ2TS+hpYDM= -github.com/ClickHouse/clickhouse-go/v2 v2.23.2/go.mod h1:aNap51J1OM3yxQJRgM+AlP/MPkGBCL8A74uQThoQhR0= +github.com/ClickHouse/clickhouse-go/v2 v2.24.0 h1:L/n/pVVpk95KtkHOiKuSnO7cu2ckeW4gICbbOh5qs74= +github.com/ClickHouse/clickhouse-go/v2 v2.24.0/go.mod h1:iDTViXk2Fgvf1jn2dbJd1ys+fBkdD1UMRnXlwmhijhQ= github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/DataDog/go-sqllexer v0.0.11 h1:OfPBjmayreblOXreszbrOTICNZ3qWrA6Bg4sypvxpbw= @@ -142,10 +142,10 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g= -go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= -go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= -go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= -go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= +go.opentelemetry.io/otel v1.26.0 h1:LQwgL5s/1W7YiiRwxf03QGnWLb2HW4pLiAhaA5cZXBs= +go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM+fvFs4= +go.opentelemetry.io/otel/trace v1.26.0 h1:1ieeAUb4y0TE26jUFrCIXKpTuVK7uJGN9/Z/2LP5sQA= +go.opentelemetry.io/otel/trace v1.26.0/go.mod h1:4iDxvGDQuUkHve82hJJ8UqrwswHYsZuWCBllGV2U2y0= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= From db5fd9d27da2872bd8d69ec73f09b4ee9a68cb98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Strzali=C5=84ski?= Date: Thu, 9 May 2024 12:08:18 +0200 Subject: [PATCH 08/14] EQL - end-to-end tests (#50) There are End 2 End tests here. EQL queries are run against Quesma and Elastic. Results are compared compared. It's not a final solution. Will continue in the next PR. --- .../schema/02-windows_logs.sql | 39 +++ quesma/eql/e2e/end2end_test.go | 262 ++++++++++++++++++ quesma/eql/e2e/ingest_test.go | 113 ++++++++ quesma/eql/e2e/query_test.go | 118 ++++++++ quesma/eql/parser/EQL.g4 | 7 +- quesma/eql/parser/eql_base_listener.go | 24 +- quesma/eql/parser/eql_base_visitor.go | 6 +- quesma/eql/parser/eql_listener.go | 24 +- quesma/eql/parser/eql_parser.go | 219 +++++++++------ quesma/eql/parser/eql_visitor.go | 12 +- quesma/eql/query_translator.go | 23 +- quesma/eql/transform/eql2exp.go | 32 ++- quesma/eql/transform_test.go | 16 +- 13 files changed, 748 insertions(+), 147 deletions(-) create mode 100644 docker/clean-clickhouse/schema/02-windows_logs.sql create mode 100644 quesma/eql/e2e/end2end_test.go create mode 100644 quesma/eql/e2e/ingest_test.go create mode 100644 quesma/eql/e2e/query_test.go diff --git a/docker/clean-clickhouse/schema/02-windows_logs.sql b/docker/clean-clickhouse/schema/02-windows_logs.sql new file mode 100644 index 000000000..850d0d025 --- /dev/null +++ b/docker/clean-clickhouse/schema/02-windows_logs.sql @@ -0,0 +1,39 @@ +CREATE TABLE IF NOT EXISTS "windows_logs" +( + "attributes_string_key" Array(String), + "attributes_string_value" Array(String), + + "@timestamp" DateTime64 DEFAULT now64(), + + "event::category" Nullable(String), + "event::type" Nullable(String), + + "dll::name" Nullable(String), + "dll::path" Nullable(String), + + "registry::path" Nullable(String), + "registry::value" Nullable(String), + "registry::key" Nullable(String), + + "destination::address" Nullable(String), + "destination::port" Nullable(String), + + "network::protocol" Nullable(String), + "network::direction" Nullable(String), + + "source::address" Nullable(String), + "source::port" Nullable(String), + + "process::pid" Nullable(Int64), + "process::entity_id" Nullable(String), + "process::executable" Nullable(String), + "process::name" Nullable(String), + + "user::id" Nullable(String), + "user::domain" Nullable(String), + "user::full_name" Nullable(String), + +) +ENGINE = MergeTree +ORDER BY ("@timestamp") +COMMENT 'Windows Security Logs. Created by clean-clickhouse.' \ No newline at end of file diff --git a/quesma/eql/e2e/end2end_test.go b/quesma/eql/e2e/end2end_test.go new file mode 100644 index 000000000..fcbc723eb --- /dev/null +++ b/quesma/eql/e2e/end2end_test.go @@ -0,0 +1,262 @@ +package e2e + +import ( + "fmt" + "strings" + "testing" + "time" +) + +// Tests are disabled by default. To enable them, set the condition to false. +// Tests requires Quesma and Elastic to be running. +// Queries are run against both Quesma and Elastic and results are compared. + +var runTests = false + +func TestE2E(t *testing.T) { + + if !runTests { + t.Skip("Tests are disabled. To enable them, set the condition to false.") + return + } + + // These are the queries that are run against both Quesma and Elastic. + // Queries start with a "--" are skipped. + var eqlQueries = []string{ + `any where false`, + `not_existing where true`, + "process where true", + "process where process.pid == 1", + "process where process.pid > 0", + "process where process.pid >= 0", + "process where process.pid < 2", + "process where process.pid <= 2", + `process where process.pid == 1 + 1 - 1 `, + `process where process.pid == 2 / 2`, + `process where process.pid == 3 % 2`, + `process where process.pid == 2 * 3 / 6`, + `-- process where process.pid < 4.0 / 2`, // TODO add floats + `process where not false`, + `process where not (event.type == "start")`, + `process where process.pid == 1 and event.type == "start"`, + `process where event.type : "start"`, + `process where event.type : "st*"`, + `process where event.type : ("start", "stop")`, + `process where process.pid == 1 and event.type like "st*"`, + `-- process where process.pid == 1 and event.type like "st%"`, // FIXME this is a bug, we should escape % in like + `process where process.name like~ "test"`, + `process where process.name like ("test", "test2")`, + `process where event.type in ("start", "stop")`, + `process where event.type in~ ("STaRT", "StOP")`, + `process where event.type not in ("start", "stop")`, + `-- process where event.type not in~ ("STaRT", "StOP")`, // FIXME THIS IS A BUG, quesma retured: 3 but elastic returned: 1 + + `process where process.name != string(1)`, + `process where process.name == null`, + + // FIXME elastic returns: error calling elastic: Unexpected status code: 400, 400 Bad Request + // {"error":{"root_cause":[{"type":"verification_exception","reason":"Found 1 problem\nline 1:25: Unknown column [ddl.name]"}],"type":"verification_exception","reason":"Found 1 problem\nline 1:25: Unknown column [ddl.name]"},"status":400} + `-- process where ddl.name != null`, + + `process where process.name regex "T.*"`, + `process where process.name regex~ "t.*"`, + + `process where process.name : "*est"`, + `process where process.name : "T*t"`, + `process where process.name : "Te*"`, + + `process where process.name like "Te"`, + `process where process.name like "T*t"`, + + `-- process where process.name : "_est"`, //FIXME we should escace _ in like, quesma retured: 3 but elastic returned: 0 + `-- process where process.name : "Te_t"`, // FIXME quesma retured: 3 but elastic returned: 0 + `process where process.name : "Te_"`, + + `-- process where process.name : "?est"`, // FIXME support ? wildcard , quesma retured: 0 but elastic returned: 3 + `-- process where process.name : "Te?t"`, + `process where process.name : "Te?"`, + + `process where process.pid == add(0,1)`, + `-- process where process.pid == add(-2,3)`, // FIXME this is a bug, we should support negative numbers + `-- process where process.pid == add(-2,3)`, + + // FIXME this is an elastic limitation + // elastic fail response: {"error":{"root_cause":[{"type":"ql_illegal_argument_exception","reason":"Line 1:40: Comparisons against fields are not (currently) supported; offender [add(process.pid,0)] in [==]"}],"type":"ql_illegal_argument_exception","reason":"Line 1:40: Comparisons against fields are not (currently) supported; offender [add(process.pid,0)] in [==]"},"status":500} + `-- process where process.pid == add(process.pid,0)`, + + `process where add(null, 1) == null`, + + // FIXME Comparisons against fields are not (currently) supported; offender + `-- process where process.pid == add(process.pid, null)`, + + `-- process where between(process.name, "T", "t") == "es"`, + + // FIXME add IP fields to the test data, first argument of [cidrMatch(\"127.0.0.1\", \"127.0.0.0/24\")] must be [ip], found value [\"127.0.0.1\"] type [keyword]"}] + `-- process where cidrMatch("127.0.0.1", "127.0.0.0/24")`, + + `-- process where cidrMatch(null, "127.0.0.1/24") == null`, // FIXME this is a bug, quesma returned 0 here + + `process where concat ("a", "b") == "ab"`, + `process where concat ("a", "b", "c") == "abc"`, + `process where concat (process.name, "1234") == "Test1234"`, + `process where concat (process.name, 1234) == "Test1234"`, + `process where concat ("a") == "a"`, + + `process where concat (null, "a") == null`, + `process where concat ("a", null) == null`, + `process where concat (null) == null`, + + `process where divide(4, 2) == 2`, + `-- process where divide(4, 3) == 1`, // FIXME this is a bug, Quesma returned 0 here + `-- process where divide(1.0, 2.0) == 0.5`, // FIXME this is a bug, float are not supported + + `process where divide(null,2) == null`, + `process where divide(2,null) == null`, + + `process where endsWith("quesma.exe", ".exe")`, + `process where endsWith("quesma.exe", ".EXE")`, + `process where endsWith("quesma.exe", "EXE")`, + + `process where endsWith~("quesma.exe", "EXE")`, + + `process where endsWith(null, ".exe") == null`, + `process where endsWith("quesma.exe", null) == null`, + + `-- process where indexOf("quesma.exe", "ue") == 1`, // FIXME this is bug in quesma + `-- process where indexOf("quesma.exe", "UE") == null`, // FIXME this is bug in quesma + `-- process where indexOf~("quesma.exe", "UE") == 1`, // FIXME this is bug in quesma + `-- process where indexOf("", "") == 0`, // FIXME this is bug in quesma + `-- process where indexOf("quesma.exe", "") == 0`, // FIXME this is bug in quesma + `-- process where indexOf("a.b.c", ".") == 1`, // FIXME this is a bug in quesma + + `process where indexOf(null, "UE") == null`, + `process where indexOf("Q", null) == null`, + + `process where length("quesma.exe") == 10`, + `process where length("") == 0`, + `process where length(null) == null`, + + `process where modulo(10, 3) == 1`, + + `process where multiply(2, 2) == 4`, + `process where multiply(null, 2) == null`, + `process where multiply(2, null) == null`, + + `-- process where number("1234") == 1234`, // FIXME this is a bug in quesma it's false + `-- process where number("1234.5") == 1234.5`, // FIXME float + `-- process where number("-1234.5") == -1234.5`, // FIXME + `-- process where number("f", 16) == 15`, // FIXME 2nd argument is base + `-- process where number("0x1", null) == 1`, // FIXME 2nd argument is base + `-- process where number(null) == null`, // FIXME it's false in quesma + `-- process where number(null, 16) == null`, // FIXME 2nd argument is base + + `process where startsWith("quesma.exe", "quesma")`, + `process where startsWith("quesma.exe", "QUESMA")`, + `process where startsWith~("quesma.exe", "QUESMA")`, + `process where startsWith("", "")`, + `process where startsWith(null, "quesma") == null`, + `process where startsWith("quesma.exe", null) == null`, + `process where startsWith("null", "null") == null`, + + `process where string(1) == "1"`, + `process where string(null) == null`, + `process where string(true) == "true"`, + `process where string("foo") == "foo"`, + + `process where stringContains("quesma.exe", "quesma")`, + `process where stringContains("quesma.exe", "QUESMA")`, + `process where stringContains~("quesma.exe", "QUESMA")`, + `-- process where stringContains("", "")`, // FIXME this is a bug, quesma returned true here + `process where stringContains(null, "quesma") == null`, + + `-- process where substring("quesma.exe", 1) == "uesma.exe"`, // FIXME this is a bug, quesma returned false here + `process where substring("quesma.exe", 1, 2) == "ue"`, + `-- process where substring("quesma.exe", 1, 100) == "uesma.exe"`, // FIXME this is a bug, quesma returned false here + `process where substring("quesma.exe", 1, 0) == ""`, + `-- process where substring("quesma.exe", -4) == ".exe"`, // FIXME this is a bug, quesma returned error here + `-- process where substring("quesma.exe", -4, -1) == ".ex"`, // FIXME this is a bug, quesma returned error here + + `process where subtract(10, 2) == 8`, + `process where subtract(null, 2) == null`, + `process where subtract(2, null) == null`, + + `process where ?not_existing == null`, // FIXME this is a bug, optional fields are not supported yet + } + + // This our category name. Each test runs in a separate category. + // So we can run multiple tests without need to clean up the data. + categoryName := fmt.Sprintf("test%d", time.Now().UnixMilli()) + + setup(categoryName) + + fmt.Println("Waiting for data to be indexed...") + time.Sleep(5 * time.Second) + + for _, eqlQuery := range eqlQueries { + t.Run(eqlQuery, func(tt *testing.T) { + + if strings.HasPrefix(eqlQuery, "--") { + return + } + fmt.Println("Running test for query:", eqlQuery) + + // here we replace given category name with the actual category name + if strings.HasPrefix(eqlQuery, "process") { + eqlQuery = categoryName + eqlQuery[len("process"):] + } + + testQuery(tt, eqlQuery) + }) + } +} + +func testQuery(t *testing.T, eqlQuery string) { + + fmt.Println("Rewritten query:", eqlQuery) + + fmt.Println("Calling Elastic...") + elasticEvents, err := eqlClient(elasticUrl, eqlQuery) + if err != nil { + t.Fatalf("error calling elastic: %v", err) + return + } + + fmt.Println("Calling Quesma...") + quesmaEvents, err := eqlClient(quesmaUrl, eqlQuery) + if err != nil { + t.Fatalf("error calling quesma: %v", err) + return + } + + if len(quesmaEvents) != len(elasticEvents) { + t.Fatalf("different number of events: quesma retured: %v but elastic returned: %v", len(quesmaEvents), len(elasticEvents)) + } + + fmt.Println("Quesma events:", quesmaEvents) + fmt.Println("Elastic events:", elasticEvents) + + for i := range len(quesmaEvents) { + quesmaEvent := quesmaEvents[i] + elasticEvent := elasticEvents[i] + + compareMap(t, i, quesmaEvent, elasticEvent) + } +} + +func compareMap(t *testing.T, evenNo int, quesma eqlEvent, elastic eqlEvent) { + + // TODO compare number of keys + + for k, v := range elastic { + + if k == "@timestamp" { + continue //FIXME compare timestamps + } + + if quesma[k] != v { + t.Errorf("eventNo: %d - different values for key %v: quesma: '%v' != elastic: '%v'", evenNo, k, quesma[k], v) + } else { + fmt.Printf("eventNo: %d - same values for key %v: quesma: '%v' == elastic: '%v'\n", evenNo, k, quesma[k], v) + } + } +} diff --git a/quesma/eql/e2e/ingest_test.go b/quesma/eql/e2e/ingest_test.go new file mode 100644 index 000000000..01a88dda8 --- /dev/null +++ b/quesma/eql/e2e/ingest_test.go @@ -0,0 +1,113 @@ +package e2e + +import ( + "bytes" + "encoding/json" + "fmt" + "log" + "net/http" + "time" +) + +const quesmaUrl = "http://localhost:8080" +const elasticUrl = "http://localhost:9201" + +type logEntry struct { + Process struct { + Name string `json:"name"` + Pid int `json:"pid"` + EntityID string `json:"entity_id"` + Executable string `json:"executable"` + } `json:"process"` + Timestamp string `json:"@timestamp"` + Event struct { + Category string `json:"category"` + Type string `json:"type"` + } `json:"event"` +} + +func someLogEntry(ts time.Time) logEntry { + + var entry logEntry + + entry.Event.Category = "process" + entry.Event.Type = "start" + + entry.Timestamp = ts.Format(time.RFC3339) + + entry.Process.Name = "Test" + entry.Process.Executable = "Test" + entry.Process.Pid = 1 + entry.Process.EntityID = "1" + + return entry +} + +func toBulk(entry logEntry) (logBytes []byte) { + + const windowsBulkJson = `{"create":{"_index":"windows_logs"}}` + + serialized, err := json.Marshal(entry) + if err != nil { + log.Println(err) + } + + logBytes = append(logBytes, []byte(windowsBulkJson)...) + logBytes = append(logBytes, []byte("\n")...) + logBytes = append(logBytes, serialized...) + logBytes = append(logBytes, []byte("\n")...) + return logBytes + +} + +func sendLogEntryTo(targetUrl string, logBytes []byte) { + + if resp, err := http.Post(targetUrl+"/_bulk", "application/json", bytes.NewBuffer(logBytes)); err != nil { + log.Printf("Failed to send windows logs: %v", err) + } else { + fmt.Printf("Sent windows_logs to %s response=%s\n", targetUrl, resp.Status) + if err := resp.Body.Close(); err != nil { + log.Fatal(err) + } + } +} + +func sendLogEntry(logBytes []byte) { + sendLogEntryTo(quesmaUrl, logBytes) + sendLogEntryTo(elasticUrl, logBytes) +} + +func setup(categoryName string) { + // setup + + // these events are used to test the queries + { + entry := someLogEntry(time.Unix(0, 0)) + entry.Event.Category = categoryName + entry.Event.Type = "start" + entry.Process.Pid = 1 + entry.Process.EntityID = "1" + logBytes := toBulk(entry) + sendLogEntry(logBytes) + } + + { + entry := someLogEntry(time.Unix(1, 0)) + entry.Event.Category = categoryName + entry.Process.Pid = 1 + entry.Event.Type = "stop" + entry.Process.EntityID = "1" + logBytes2 := toBulk(entry) + sendLogEntry(logBytes2) + } + + { + entry := someLogEntry(time.Unix(2, 0)) + entry.Event.Category = categoryName + entry.Process.Pid = 1 + entry.Event.Type = "crash" + entry.Process.EntityID = "1" + logBytes2 := toBulk(entry) + sendLogEntry(logBytes2) + } +} diff --git a/quesma/eql/e2e/query_test.go b/quesma/eql/e2e/query_test.go new file mode 100644 index 000000000..9d99e3c06 --- /dev/null +++ b/quesma/eql/e2e/query_test.go @@ -0,0 +1,118 @@ +package e2e + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "mitmproxy/quesma/jsonprocessor" + "net/http" + "sort" + "strings" +) + +// + +type eqlEvent map[string]interface{} + +func eqlClient(target string, eqlQuery string) ([]eqlEvent, error) { + + type elasticQuery struct { + Query string `json:"query"` + } + + query := elasticQuery{Query: eqlQuery} + data, err := json.Marshal(query) + if err != nil { + return nil, err + } + reader := bytes.NewReader(data) + + url := target + "/windows_logs/_eql/search" + + // We're calling GET method here with body. + // This is oddity. Golang http client does not support sending body with GET method. + + req, err := http.NewRequest(http.MethodGet, url, reader) + if err != nil { + panic(err) + } + req.Header.Set("Content-Type", "application/json") + res, err := http.DefaultClient.Do(req) + + if err != nil { + return nil, err + } + + defer res.Body.Close() + + response, err := io.ReadAll(res.Body) + if err != nil { + return nil, err + } + + if res.StatusCode != http.StatusOK { + fmt.Println("response", string(response)) + return nil, fmt.Errorf("Unexpected status code: %v, %v", res.StatusCode, res.Status) + } + + return extractListOfEvents(string(response)) +} + +func parseResponse(response string) (map[string]interface{}, error) { + + var result map[string]interface{} + err := json.Unmarshal([]byte(response), &result) + if err != nil { + return nil, err + } + return result, nil +} + +func extractListOfEvents(response string) ([]eqlEvent, error) { + + var res []eqlEvent + + parsed, err := parseResponse(response) + if err != nil { + return nil, err + } + hits, ok := parsed["hits"] + if !ok { + return nil, fmt.Errorf("missing hits in response") + } + + events, ok := hits.(map[string]interface{})["events"] + if !ok { + fmt.Println("missing events in hits") + // FIXME this is a bug + // quesma omits empty events array + //return nil, fmt.Errorf("missing events in hits") + events = []interface{}{} + } + + for i, event := range events.([]interface{}) { + + m := event.(map[string]interface{}) + + source, ok := m["_source"] + if !ok { + return nil, fmt.Errorf("missing source in event") + } + + sourceAsMap, ok := source.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("source is not a map") + } + sourceAsMap = jsonprocessor.FlattenMap(sourceAsMap, "::") + + fmt.Println("event", i, sourceAsMap) + res = append(res, sourceAsMap) + } + + // not sure if it is necessary + sort.Slice(res, func(i, j int) bool { + return strings.Compare(res[i]["@timestamp"].(string), res[j]["@timestamp"].(string)) < 0 + }) + return res, nil +} diff --git a/quesma/eql/parser/EQL.g4 b/quesma/eql/parser/EQL.g4 index 08fdf8efd..d7f08d82a 100644 --- a/quesma/eql/parser/EQL.g4 +++ b/quesma/eql/parser/EQL.g4 @@ -20,10 +20,9 @@ sampleQuery: 'sample' 'by' fieldList condition: BOOLEAN #ConditionBoolean | 'not' condition #ConditionNot | '(' condition ')' #ConditionGroup - | field op=('==' | '!=' | '>' | '<' | '>=' | '<=' | ':' | 'like' | 'like~' | 'regex' | 'regex~') value #ConditionOp - | field op=( ':' | 'in' | 'in~' | 'like' | 'like~' | 'regex' | 'regex~') list=literalList #ConditionOpList -// FIXME This rule should part of the rule above. Not sure how to do it. - | field 'not' ('in' | 'in~') list=literalList #ConditionNotIn + | left=value op=('==' | '!=' | '>' | '<' | '>=' | '<=' | ':' | 'like' | 'like~' | 'regex' | 'regex~' ) right=value #ComparisonOp + | field 'not' op=('in' | 'in~') list=literalList #LookupNotOpList + | field op=(':' | 'in' | 'in~' | 'like' | 'like~' | 'regex' | 'regex~') list=literalList #LookupOpList | left=condition op=('and' | 'or') right=condition #ConditionLogicalOp | funcall #ConditionFuncall | 'not' funcall #ConditionNotFuncall diff --git a/quesma/eql/parser/eql_base_listener.go b/quesma/eql/parser/eql_base_listener.go index 1f4ef91e1..1cf6e7978 100644 --- a/quesma/eql/parser/eql_base_listener.go +++ b/quesma/eql/parser/eql_base_listener.go @@ -44,17 +44,17 @@ func (s *BaseEQLListener) EnterSampleQuery(ctx *SampleQueryContext) {} // ExitSampleQuery is called when production sampleQuery is exited. func (s *BaseEQLListener) ExitSampleQuery(ctx *SampleQueryContext) {} -// EnterConditionOp is called when production ConditionOp is entered. -func (s *BaseEQLListener) EnterConditionOp(ctx *ConditionOpContext) {} +// EnterLookupOpList is called when production LookupOpList is entered. +func (s *BaseEQLListener) EnterLookupOpList(ctx *LookupOpListContext) {} -// ExitConditionOp is called when production ConditionOp is exited. -func (s *BaseEQLListener) ExitConditionOp(ctx *ConditionOpContext) {} +// ExitLookupOpList is called when production LookupOpList is exited. +func (s *BaseEQLListener) ExitLookupOpList(ctx *LookupOpListContext) {} -// EnterConditionOpList is called when production ConditionOpList is entered. -func (s *BaseEQLListener) EnterConditionOpList(ctx *ConditionOpListContext) {} +// EnterComparisonOp is called when production ComparisonOp is entered. +func (s *BaseEQLListener) EnterComparisonOp(ctx *ComparisonOpContext) {} -// ExitConditionOpList is called when production ConditionOpList is exited. -func (s *BaseEQLListener) ExitConditionOpList(ctx *ConditionOpListContext) {} +// ExitComparisonOp is called when production ComparisonOp is exited. +func (s *BaseEQLListener) ExitComparisonOp(ctx *ComparisonOpContext) {} // EnterConditionNotFuncall is called when production ConditionNotFuncall is entered. func (s *BaseEQLListener) EnterConditionNotFuncall(ctx *ConditionNotFuncallContext) {} @@ -74,11 +74,11 @@ func (s *BaseEQLListener) EnterConditionNot(ctx *ConditionNotContext) {} // ExitConditionNot is called when production ConditionNot is exited. func (s *BaseEQLListener) ExitConditionNot(ctx *ConditionNotContext) {} -// EnterConditionNotIn is called when production ConditionNotIn is entered. -func (s *BaseEQLListener) EnterConditionNotIn(ctx *ConditionNotInContext) {} +// EnterLookupNotOpList is called when production LookupNotOpList is entered. +func (s *BaseEQLListener) EnterLookupNotOpList(ctx *LookupNotOpListContext) {} -// ExitConditionNotIn is called when production ConditionNotIn is exited. -func (s *BaseEQLListener) ExitConditionNotIn(ctx *ConditionNotInContext) {} +// ExitLookupNotOpList is called when production LookupNotOpList is exited. +func (s *BaseEQLListener) ExitLookupNotOpList(ctx *LookupNotOpListContext) {} // EnterConditionLogicalOp is called when production ConditionLogicalOp is entered. func (s *BaseEQLListener) EnterConditionLogicalOp(ctx *ConditionLogicalOpContext) {} diff --git a/quesma/eql/parser/eql_base_visitor.go b/quesma/eql/parser/eql_base_visitor.go index 32c6dd28b..c5d389649 100644 --- a/quesma/eql/parser/eql_base_visitor.go +++ b/quesma/eql/parser/eql_base_visitor.go @@ -23,11 +23,11 @@ func (v *BaseEQLVisitor) VisitSampleQuery(ctx *SampleQueryContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseEQLVisitor) VisitConditionOp(ctx *ConditionOpContext) interface{} { +func (v *BaseEQLVisitor) VisitLookupOpList(ctx *LookupOpListContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseEQLVisitor) VisitConditionOpList(ctx *ConditionOpListContext) interface{} { +func (v *BaseEQLVisitor) VisitComparisonOp(ctx *ComparisonOpContext) interface{} { return v.VisitChildren(ctx) } @@ -43,7 +43,7 @@ func (v *BaseEQLVisitor) VisitConditionNot(ctx *ConditionNotContext) interface{} return v.VisitChildren(ctx) } -func (v *BaseEQLVisitor) VisitConditionNotIn(ctx *ConditionNotInContext) interface{} { +func (v *BaseEQLVisitor) VisitLookupNotOpList(ctx *LookupNotOpListContext) interface{} { return v.VisitChildren(ctx) } diff --git a/quesma/eql/parser/eql_listener.go b/quesma/eql/parser/eql_listener.go index e27e99106..4aa487e37 100644 --- a/quesma/eql/parser/eql_listener.go +++ b/quesma/eql/parser/eql_listener.go @@ -19,11 +19,11 @@ type EQLListener interface { // EnterSampleQuery is called when entering the sampleQuery production. EnterSampleQuery(c *SampleQueryContext) - // EnterConditionOp is called when entering the ConditionOp production. - EnterConditionOp(c *ConditionOpContext) + // EnterLookupOpList is called when entering the LookupOpList production. + EnterLookupOpList(c *LookupOpListContext) - // EnterConditionOpList is called when entering the ConditionOpList production. - EnterConditionOpList(c *ConditionOpListContext) + // EnterComparisonOp is called when entering the ComparisonOp production. + EnterComparisonOp(c *ComparisonOpContext) // EnterConditionNotFuncall is called when entering the ConditionNotFuncall production. EnterConditionNotFuncall(c *ConditionNotFuncallContext) @@ -34,8 +34,8 @@ type EQLListener interface { // EnterConditionNot is called when entering the ConditionNot production. EnterConditionNot(c *ConditionNotContext) - // EnterConditionNotIn is called when entering the ConditionNotIn production. - EnterConditionNotIn(c *ConditionNotInContext) + // EnterLookupNotOpList is called when entering the LookupNotOpList production. + EnterLookupNotOpList(c *LookupNotOpListContext) // EnterConditionLogicalOp is called when entering the ConditionLogicalOp production. EnterConditionLogicalOp(c *ConditionLogicalOpContext) @@ -103,11 +103,11 @@ type EQLListener interface { // ExitSampleQuery is called when exiting the sampleQuery production. ExitSampleQuery(c *SampleQueryContext) - // ExitConditionOp is called when exiting the ConditionOp production. - ExitConditionOp(c *ConditionOpContext) + // ExitLookupOpList is called when exiting the LookupOpList production. + ExitLookupOpList(c *LookupOpListContext) - // ExitConditionOpList is called when exiting the ConditionOpList production. - ExitConditionOpList(c *ConditionOpListContext) + // ExitComparisonOp is called when exiting the ComparisonOp production. + ExitComparisonOp(c *ComparisonOpContext) // ExitConditionNotFuncall is called when exiting the ConditionNotFuncall production. ExitConditionNotFuncall(c *ConditionNotFuncallContext) @@ -118,8 +118,8 @@ type EQLListener interface { // ExitConditionNot is called when exiting the ConditionNot production. ExitConditionNot(c *ConditionNotContext) - // ExitConditionNotIn is called when exiting the ConditionNotIn production. - ExitConditionNotIn(c *ConditionNotInContext) + // ExitLookupNotOpList is called when exiting the LookupNotOpList production. + ExitLookupNotOpList(c *LookupNotOpListContext) // ExitConditionLogicalOp is called when exiting the ConditionLogicalOp production. ExitConditionLogicalOp(c *ConditionLogicalOpContext) diff --git a/quesma/eql/parser/eql_parser.go b/quesma/eql/parser/eql_parser.go index 7a2e9604e..2877d12b8 100644 --- a/quesma/eql/parser/eql_parser.go +++ b/quesma/eql/parser/eql_parser.go @@ -69,7 +69,7 @@ func eqlParserInit() { 1, 11, 5, 11, 159, 8, 11, 10, 11, 12, 11, 162, 9, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 3, 12, 169, 8, 12, 1, 13, 1, 13, 1, 13, 0, 2, 8, 20, 14, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 0, 8, 1, 0, 13, - 23, 1, 0, 19, 25, 1, 0, 24, 25, 1, 0, 26, 27, 3, 0, 36, 36, 43, 43, 45, + 23, 1, 0, 24, 25, 1, 0, 19, 25, 1, 0, 26, 27, 3, 0, 36, 36, 43, 43, 45, 45, 3, 0, 39, 39, 41, 41, 43, 43, 1, 0, 30, 32, 1, 0, 33, 34, 183, 0, 31, 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 61, 1, 0, 0, 0, 8, 96, 1, 0, 0, 0, 10, 106, 1, 0, 0, 0, 12, 108, 1, 0, 0, 0, 14, 110, 1, 0, @@ -91,13 +91,13 @@ func eqlParserInit() { 1, 0, 0, 0, 70, 68, 1, 0, 0, 0, 70, 71, 1, 0, 0, 0, 71, 7, 1, 0, 0, 0, 72, 73, 6, 4, -1, 0, 73, 97, 5, 39, 0, 0, 74, 75, 5, 10, 0, 0, 75, 97, 3, 8, 4, 8, 76, 77, 5, 11, 0, 0, 77, 78, 3, 8, 4, 0, 78, 79, 5, 12, 0, - 0, 79, 97, 1, 0, 0, 0, 80, 81, 3, 12, 6, 0, 81, 82, 7, 0, 0, 0, 82, 83, - 3, 20, 10, 0, 83, 97, 1, 0, 0, 0, 84, 85, 3, 12, 6, 0, 85, 86, 7, 1, 0, - 0, 86, 87, 3, 18, 9, 0, 87, 97, 1, 0, 0, 0, 88, 89, 3, 12, 6, 0, 89, 90, - 5, 10, 0, 0, 90, 91, 7, 2, 0, 0, 91, 92, 3, 18, 9, 0, 92, 97, 1, 0, 0, + 0, 79, 97, 1, 0, 0, 0, 80, 81, 3, 20, 10, 0, 81, 82, 7, 0, 0, 0, 82, 83, + 3, 20, 10, 0, 83, 97, 1, 0, 0, 0, 84, 85, 3, 12, 6, 0, 85, 86, 5, 10, 0, + 0, 86, 87, 7, 1, 0, 0, 87, 88, 3, 18, 9, 0, 88, 97, 1, 0, 0, 0, 89, 90, + 3, 12, 6, 0, 90, 91, 7, 2, 0, 0, 91, 92, 3, 18, 9, 0, 92, 97, 1, 0, 0, 0, 93, 97, 3, 22, 11, 0, 94, 95, 5, 10, 0, 0, 95, 97, 3, 22, 11, 0, 96, 72, 1, 0, 0, 0, 96, 74, 1, 0, 0, 0, 96, 76, 1, 0, 0, 0, 96, 80, 1, 0, 0, - 0, 96, 84, 1, 0, 0, 0, 96, 88, 1, 0, 0, 0, 96, 93, 1, 0, 0, 0, 96, 94, + 0, 96, 84, 1, 0, 0, 0, 96, 89, 1, 0, 0, 0, 96, 93, 1, 0, 0, 0, 96, 94, 1, 0, 0, 0, 97, 103, 1, 0, 0, 0, 98, 99, 10, 3, 0, 0, 99, 100, 7, 3, 0, 0, 100, 102, 3, 8, 4, 4, 101, 98, 1, 0, 0, 0, 102, 105, 1, 0, 0, 0, 103, 101, 1, 0, 0, 0, 103, 104, 1, 0, 0, 0, 104, 9, 1, 0, 0, 0, 105, 103, 1, @@ -1155,13 +1155,14 @@ func (s *ConditionContext) ToStringTree(ruleNames []string, recog antlr.Recogniz return antlr.TreesStringTree(s, ruleNames, recog) } -type ConditionOpContext struct { +type LookupOpListContext struct { ConditionContext - op antlr.Token + op antlr.Token + list ILiteralListContext } -func NewConditionOpContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ConditionOpContext { - var p = new(ConditionOpContext) +func NewLookupOpListContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LookupOpListContext { + var p = new(LookupOpListContext) InitEmptyConditionContext(&p.ConditionContext) p.parser = parser @@ -1170,15 +1171,19 @@ func NewConditionOpContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Co return p } -func (s *ConditionOpContext) GetOp() antlr.Token { return s.op } +func (s *LookupOpListContext) GetOp() antlr.Token { return s.op } + +func (s *LookupOpListContext) SetOp(v antlr.Token) { s.op = v } -func (s *ConditionOpContext) SetOp(v antlr.Token) { s.op = v } +func (s *LookupOpListContext) GetList() ILiteralListContext { return s.list } -func (s *ConditionOpContext) GetRuleContext() antlr.RuleContext { +func (s *LookupOpListContext) SetList(v ILiteralListContext) { s.list = v } + +func (s *LookupOpListContext) GetRuleContext() antlr.RuleContext { return s } -func (s *ConditionOpContext) Field() IFieldContext { +func (s *LookupOpListContext) Field() IFieldContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IFieldContext); ok { @@ -1194,10 +1199,10 @@ func (s *ConditionOpContext) Field() IFieldContext { return t.(IFieldContext) } -func (s *ConditionOpContext) Value() IValueContext { +func (s *LookupOpListContext) LiteralList() ILiteralListContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IValueContext); ok { + if _, ok := ctx.(ILiteralListContext); ok { t = ctx.(antlr.RuleContext) break } @@ -1207,39 +1212,40 @@ func (s *ConditionOpContext) Value() IValueContext { return nil } - return t.(IValueContext) + return t.(ILiteralListContext) } -func (s *ConditionOpContext) EnterRule(listener antlr.ParseTreeListener) { +func (s *LookupOpListContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(EQLListener); ok { - listenerT.EnterConditionOp(s) + listenerT.EnterLookupOpList(s) } } -func (s *ConditionOpContext) ExitRule(listener antlr.ParseTreeListener) { +func (s *LookupOpListContext) ExitRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(EQLListener); ok { - listenerT.ExitConditionOp(s) + listenerT.ExitLookupOpList(s) } } -func (s *ConditionOpContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *LookupOpListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case EQLVisitor: - return t.VisitConditionOp(s) + return t.VisitLookupOpList(s) default: return t.VisitChildren(s) } } -type ConditionOpListContext struct { +type ComparisonOpContext struct { ConditionContext - op antlr.Token - list ILiteralListContext + left IValueContext + op antlr.Token + right IValueContext } -func NewConditionOpListContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ConditionOpListContext { - var p = new(ConditionOpListContext) +func NewComparisonOpContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ComparisonOpContext { + var p = new(ComparisonOpContext) InitEmptyConditionContext(&p.ConditionContext) p.parser = parser @@ -1248,40 +1254,53 @@ func NewConditionOpListContext(parser antlr.Parser, ctx antlr.ParserRuleContext) return p } -func (s *ConditionOpListContext) GetOp() antlr.Token { return s.op } +func (s *ComparisonOpContext) GetOp() antlr.Token { return s.op } + +func (s *ComparisonOpContext) SetOp(v antlr.Token) { s.op = v } -func (s *ConditionOpListContext) SetOp(v antlr.Token) { s.op = v } +func (s *ComparisonOpContext) GetLeft() IValueContext { return s.left } -func (s *ConditionOpListContext) GetList() ILiteralListContext { return s.list } +func (s *ComparisonOpContext) GetRight() IValueContext { return s.right } -func (s *ConditionOpListContext) SetList(v ILiteralListContext) { s.list = v } +func (s *ComparisonOpContext) SetLeft(v IValueContext) { s.left = v } -func (s *ConditionOpListContext) GetRuleContext() antlr.RuleContext { +func (s *ComparisonOpContext) SetRight(v IValueContext) { s.right = v } + +func (s *ComparisonOpContext) GetRuleContext() antlr.RuleContext { return s } -func (s *ConditionOpListContext) Field() IFieldContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IFieldContext); ok { - t = ctx.(antlr.RuleContext) - break +func (s *ComparisonOpContext) AllValue() []IValueContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IValueContext); ok { + len++ } } - if t == nil { - return nil + tst := make([]IValueContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IValueContext); ok { + tst[i] = t.(IValueContext) + i++ + } } - return t.(IFieldContext) + return tst } -func (s *ConditionOpListContext) LiteralList() ILiteralListContext { +func (s *ComparisonOpContext) Value(i int) IValueContext { var t antlr.RuleContext + j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ILiteralListContext); ok { - t = ctx.(antlr.RuleContext) - break + if _, ok := ctx.(IValueContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ } } @@ -1289,25 +1308,25 @@ func (s *ConditionOpListContext) LiteralList() ILiteralListContext { return nil } - return t.(ILiteralListContext) + return t.(IValueContext) } -func (s *ConditionOpListContext) EnterRule(listener antlr.ParseTreeListener) { +func (s *ComparisonOpContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(EQLListener); ok { - listenerT.EnterConditionOpList(s) + listenerT.EnterComparisonOp(s) } } -func (s *ConditionOpListContext) ExitRule(listener antlr.ParseTreeListener) { +func (s *ComparisonOpContext) ExitRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(EQLListener); ok { - listenerT.ExitConditionOpList(s) + listenerT.ExitComparisonOp(s) } } -func (s *ConditionOpListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *ComparisonOpContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case EQLVisitor: - return t.VisitConditionOpList(s) + return t.VisitComparisonOp(s) default: return t.VisitChildren(s) @@ -1470,13 +1489,14 @@ func (s *ConditionNotContext) Accept(visitor antlr.ParseTreeVisitor) interface{} } } -type ConditionNotInContext struct { +type LookupNotOpListContext struct { ConditionContext + op antlr.Token list ILiteralListContext } -func NewConditionNotInContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ConditionNotInContext { - var p = new(ConditionNotInContext) +func NewLookupNotOpListContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LookupNotOpListContext { + var p = new(LookupNotOpListContext) InitEmptyConditionContext(&p.ConditionContext) p.parser = parser @@ -1485,15 +1505,19 @@ func NewConditionNotInContext(parser antlr.Parser, ctx antlr.ParserRuleContext) return p } -func (s *ConditionNotInContext) GetList() ILiteralListContext { return s.list } +func (s *LookupNotOpListContext) GetOp() antlr.Token { return s.op } -func (s *ConditionNotInContext) SetList(v ILiteralListContext) { s.list = v } +func (s *LookupNotOpListContext) SetOp(v antlr.Token) { s.op = v } -func (s *ConditionNotInContext) GetRuleContext() antlr.RuleContext { +func (s *LookupNotOpListContext) GetList() ILiteralListContext { return s.list } + +func (s *LookupNotOpListContext) SetList(v ILiteralListContext) { s.list = v } + +func (s *LookupNotOpListContext) GetRuleContext() antlr.RuleContext { return s } -func (s *ConditionNotInContext) Field() IFieldContext { +func (s *LookupNotOpListContext) Field() IFieldContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IFieldContext); ok { @@ -1509,7 +1533,7 @@ func (s *ConditionNotInContext) Field() IFieldContext { return t.(IFieldContext) } -func (s *ConditionNotInContext) LiteralList() ILiteralListContext { +func (s *LookupNotOpListContext) LiteralList() ILiteralListContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(ILiteralListContext); ok { @@ -1525,22 +1549,22 @@ func (s *ConditionNotInContext) LiteralList() ILiteralListContext { return t.(ILiteralListContext) } -func (s *ConditionNotInContext) EnterRule(listener antlr.ParseTreeListener) { +func (s *LookupNotOpListContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(EQLListener); ok { - listenerT.EnterConditionNotIn(s) + listenerT.EnterLookupNotOpList(s) } } -func (s *ConditionNotInContext) ExitRule(listener antlr.ParseTreeListener) { +func (s *LookupNotOpListContext) ExitRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(EQLListener); ok { - listenerT.ExitConditionNotIn(s) + listenerT.ExitLookupNotOpList(s) } } -func (s *ConditionNotInContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *LookupNotOpListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case EQLVisitor: - return t.VisitConditionNotIn(s) + return t.VisitLookupNotOpList(s) default: return t.VisitChildren(s) @@ -1837,26 +1861,29 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } case 4: - localctx = NewConditionOpContext(p, localctx) + localctx = NewComparisonOpContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { p.SetState(80) - p.Field() + + var _x = p.value(0) + + localctx.(*ComparisonOpContext).left = _x } { p.SetState(81) var _lt = p.GetTokenStream().LT(1) - localctx.(*ConditionOpContext).op = _lt + localctx.(*ComparisonOpContext).op = _lt _la = p.GetTokenStream().LA(1) if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&16769024) != 0) { var _ri = p.GetErrorHandler().RecoverInline(p) - localctx.(*ConditionOpContext).op = _ri + localctx.(*ComparisonOpContext).op = _ri } else { p.GetErrorHandler().ReportMatch(p) p.Consume() @@ -1864,11 +1891,14 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } { p.SetState(82) - p.value(0) + + var _x = p.value(0) + + localctx.(*ComparisonOpContext).right = _x } case 5: - localctx = NewConditionOpListContext(p, localctx) + localctx = NewLookupNotOpListContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { @@ -1877,52 +1907,59 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } { p.SetState(85) + p.Match(EQLParserT__9) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(86) var _lt = p.GetTokenStream().LT(1) - localctx.(*ConditionOpListContext).op = _lt + localctx.(*LookupNotOpListContext).op = _lt _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&66584576) != 0) { + if !(_la == EQLParserT__23 || _la == EQLParserT__24) { var _ri = p.GetErrorHandler().RecoverInline(p) - localctx.(*ConditionOpListContext).op = _ri + localctx.(*LookupNotOpListContext).op = _ri } else { p.GetErrorHandler().ReportMatch(p) p.Consume() } } { - p.SetState(86) + p.SetState(87) var _x = p.LiteralList() - localctx.(*ConditionOpListContext).list = _x + localctx.(*LookupNotOpListContext).list = _x } case 6: - localctx = NewConditionNotInContext(p, localctx) + localctx = NewLookupOpListContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx - { - p.SetState(88) - p.Field() - } { p.SetState(89) - p.Match(EQLParserT__9) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.Field() } { p.SetState(90) + + var _lt = p.GetTokenStream().LT(1) + + localctx.(*LookupOpListContext).op = _lt + _la = p.GetTokenStream().LA(1) - if !(_la == EQLParserT__23 || _la == EQLParserT__24) { - p.GetErrorHandler().RecoverInline(p) + if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&66584576) != 0) { + var _ri = p.GetErrorHandler().RecoverInline(p) + + localctx.(*LookupOpListContext).op = _ri } else { p.GetErrorHandler().ReportMatch(p) p.Consume() @@ -1933,7 +1970,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { var _x = p.LiteralList() - localctx.(*ConditionNotInContext).list = _x + localctx.(*LookupOpListContext).list = _x } case 7: diff --git a/quesma/eql/parser/eql_visitor.go b/quesma/eql/parser/eql_visitor.go index 73a0e1f25..2d36d4a91 100644 --- a/quesma/eql/parser/eql_visitor.go +++ b/quesma/eql/parser/eql_visitor.go @@ -19,11 +19,11 @@ type EQLVisitor interface { // Visit a parse tree produced by EQLParser#sampleQuery. VisitSampleQuery(ctx *SampleQueryContext) interface{} - // Visit a parse tree produced by EQLParser#ConditionOp. - VisitConditionOp(ctx *ConditionOpContext) interface{} + // Visit a parse tree produced by EQLParser#LookupOpList. + VisitLookupOpList(ctx *LookupOpListContext) interface{} - // Visit a parse tree produced by EQLParser#ConditionOpList. - VisitConditionOpList(ctx *ConditionOpListContext) interface{} + // Visit a parse tree produced by EQLParser#ComparisonOp. + VisitComparisonOp(ctx *ComparisonOpContext) interface{} // Visit a parse tree produced by EQLParser#ConditionNotFuncall. VisitConditionNotFuncall(ctx *ConditionNotFuncallContext) interface{} @@ -34,8 +34,8 @@ type EQLVisitor interface { // Visit a parse tree produced by EQLParser#ConditionNot. VisitConditionNot(ctx *ConditionNotContext) interface{} - // Visit a parse tree produced by EQLParser#ConditionNotIn. - VisitConditionNotIn(ctx *ConditionNotInContext) interface{} + // Visit a parse tree produced by EQLParser#LookupNotOpList. + VisitLookupNotOpList(ctx *LookupNotOpListContext) interface{} // Visit a parse tree produced by EQLParser#ConditionLogicalOp. VisitConditionLogicalOp(ctx *ConditionLogicalOpContext) interface{} diff --git a/quesma/eql/query_translator.go b/quesma/eql/query_translator.go index d9458ace2..7b46649f1 100644 --- a/quesma/eql/query_translator.go +++ b/quesma/eql/query_translator.go @@ -20,13 +20,29 @@ type ClickhouseEQLQueryTranslator struct { Ctx context.Context } -func (cw *ClickhouseEQLQueryTranslator) BuildNRowsQuery(fieldName string, simpleQuery queryparser.SimpleQuery, limit int) *model.Query { +func (cw *ClickhouseEQLQueryTranslator) applySizeLimit(size int) int { + // FIXME hard limit here to prevent OOM + const quesmaMaxSize = 10000 + if size > quesmaMaxSize { + logger.WarnWithCtx(cw.Ctx).Msgf("setting hits size to=%d, got=%d", quesmaMaxSize, size) + size = quesmaMaxSize + } + return size +} +func (cw *ClickhouseEQLQueryTranslator) BuildNRowsQuery(fieldName string, simpleQuery queryparser.SimpleQuery, limit int) *model.Query { + suffixClauses := make([]string, 0) + if len(simpleQuery.SortFields) > 0 { + suffixClauses = append(suffixClauses, "ORDER BY "+strings.Join(simpleQuery.SortFields, ", ")) + } + if limit > 0 { + suffixClauses = append(suffixClauses, "LIMIT "+strconv.Itoa(cw.applySizeLimit(limit))) + } return &model.Query{ Fields: []string{fieldName}, NonSchemaFields: []string{}, WhereClause: simpleQuery.Sql.Stmt, - SuffixClauses: []string{}, + SuffixClauses: suffixClauses, FromClause: cw.Table.FullTableName(), CanParse: true, } @@ -119,7 +135,7 @@ func (cw *ClickhouseEQLQueryTranslator) ParseQuery(queryAsJson string) (query qu where, _, err := trans.TransformQuery(eqlQuery) if err != nil { - logger.ErrorWithCtx(cw.Ctx).Err(err).Msg("error transforming EQL query") + logger.ErrorWithCtx(cw.Ctx).Err(err).Msgf("error transforming EQL query: '%s'", eqlQuery) query.CanParse = false query.Sql.Stmt = "Invalid EQL query" return query, model.NewSearchQueryInfoNone(), highlighter @@ -127,6 +143,7 @@ func (cw *ClickhouseEQLQueryTranslator) ParseQuery(queryAsJson string) (query qu query.Sql.Stmt = where query.CanParse = true + query.SortFields = []string{"\"@timestamp\""} return query, searchQueryInfo, highlighter } diff --git a/quesma/eql/transform/eql2exp.go b/quesma/eql/transform/eql2exp.go index 7031570ac..8bf825803 100644 --- a/quesma/eql/transform/eql2exp.go +++ b/quesma/eql/transform/eql2exp.go @@ -84,31 +84,42 @@ func (v *EQLParseTreeToExpTransformer) VisitConditionLogicalOp(ctx *parser.Condi return NewInfixOp(op, left.(Exp), right.(Exp)) } -func (v *EQLParseTreeToExpTransformer) VisitConditionOp(ctx *parser.ConditionOpContext) interface{} { +func (v *EQLParseTreeToExpTransformer) VisitComparisonOp(ctx *parser.ComparisonOpContext) interface{} { + + op := ctx.GetOp().GetText() + left := ctx.GetLeft().Accept(v) + right := ctx.GetRight().Accept(v) + + return NewInfixOp(op, left.(Exp), right.(Exp)) +} + +func (v *EQLParseTreeToExpTransformer) VisitLookupOpList(ctx *parser.LookupOpListContext) interface{} { field := ctx.Field().Accept(v) - value := ctx.Value().Accept(v) + list := ctx.GetList().Accept(v) op := ctx.GetOp().GetText() + op = strings.ToLower(op) // paranoia check, should never happen // if there is no visitor implemented for the right side value is null // TODO add more info here to help debugging - if value == nil { + if list == nil { v.error("value is nil here") return &Const{Value: "error"} } - return NewInfixOp(op, field.(Exp), value.(Exp)) + return NewInfixOp(op, field.(Exp), list.(Exp)) } -func (v *EQLParseTreeToExpTransformer) VisitConditionOpList(ctx *parser.ConditionOpListContext) interface{} { +func (v *EQLParseTreeToExpTransformer) VisitLookupNotOpList(ctx *parser.LookupNotOpListContext) interface{} { + field := ctx.Field().Accept(v) + list := ctx.GetList().Accept(v) op := ctx.GetOp().GetText() op = strings.ToLower(op) - inList := ctx.GetList().Accept(v).(Exp) - return NewInfixOp(op, field.(Exp), inList) + return NewInfixOp("not "+op, field.(Exp), list.(Exp)) } func (v *EQLParseTreeToExpTransformer) VisitConditionNot(ctx *parser.ConditionNotContext) interface{} { @@ -120,13 +131,6 @@ func (v *EQLParseTreeToExpTransformer) VisitConditionGroup(ctx *parser.Condition return NewGroup(ctx.Condition().Accept(v).(Exp)) } -func (v *EQLParseTreeToExpTransformer) VisitConditionNotIn(ctx *parser.ConditionNotInContext) interface{} { - field := ctx.Field().Accept(v).(Exp) - inList := ctx.GetList().Accept(v).(Exp) - - return NewInfixOp("not in", field, inList) -} - func (v *EQLParseTreeToExpTransformer) VisitConditionFuncall(ctx *parser.ConditionFuncallContext) interface{} { return ctx.Funcall().Accept(v) diff --git a/quesma/eql/transform_test.go b/quesma/eql/transform_test.go index 992b3df92..500d21e2d 100644 --- a/quesma/eql/transform_test.go +++ b/quesma/eql/transform_test.go @@ -74,7 +74,7 @@ func TestTransform(t *testing.T) { "match(process.name, 'FOO[0-9]')"}, {"any where process.name regex~ \"foo[0-9]\" ", "" + - "match(process.name, 'foo[0-9]')"}, // FIXME + "match(process.name, 'foo[0-9]')"}, {"any where process.parent.name == \"bar\" and process.name == \"foo\"", "((process.parent.name = 'bar') AND (process.name = 'foo'))"}, @@ -205,8 +205,20 @@ func TestTransform(t *testing.T) { {"any where process.name == substring(\"start quesma.exe\", 6)", "(process.name = substring('start quesma.exe', 6))"}, - {"any where foo == subtract(10, 2)", "" + + {"any where foo == subtract(10, 2)", "(foo = (10 - 2))"}, + + {"any where 1 == 2", + "(1 = 2)"}, + + {"any where add(1,2) == 2", + "((1 + 2) = 2)"}, + + {"any where 1 == null", + "(1 IS NULL)"}, + + {"any where add(1,null) == 1", + "((1 + NULL) = 1)"}, } for _, tt := range tests { From f82655a95ac3c6bdf792f0b5de1ee384cf934987 Mon Sep 17 00:00:00 2001 From: Przemyslaw Delewski <102958445+pdelewski@users.noreply.github.com> Date: Thu, 9 May 2024 13:00:13 +0200 Subject: [PATCH 09/14] Generalizing sql query processing 1 (#68) - combines three of four `Process...Query` - extends implementation of `func (q *Query) StringFromColumns` to be almost in pair with `func (q *Query) String`. The final goal would be to have just one function --- quesma/clickhouse/quesma_communicator.go | 29 +++++++----------------- quesma/model/query.go | 28 ++++++++++++++++++++--- quesma/quesma/search.go | 14 ++++++------ quesma/quesma/termsenum/terms_enum.go | 2 +- 4 files changed, 41 insertions(+), 32 deletions(-) diff --git a/quesma/clickhouse/quesma_communicator.go b/quesma/clickhouse/quesma_communicator.go index 0a6e01e25..9dd17f896 100644 --- a/quesma/clickhouse/quesma_communicator.go +++ b/quesma/clickhouse/quesma_communicator.go @@ -26,17 +26,22 @@ func (lm *LogManager) Query(ctx context.Context, query string) (*sql.Rows, error return rows, err } -// ProcessSimpleSelectQuery - only WHERE clause +// ProcessQuery - only WHERE clause // TODO query param should be type safe Query representing all parts of // sql statement that were already parsed and not string from which // we have to extract again different parts like where clause and columns to build a proper result -func (lm *LogManager) ProcessSelectQuery(ctx context.Context, table *Table, query *model.Query) ([]model.QueryResultRow, error) { +func (lm *LogManager) ProcessQuery(ctx context.Context, table *Table, query *model.Query) ([]model.QueryResultRow, error) { colNames, err := table.extractColumns(query, false) rowToScan := make([]interface{}, len(colNames)+len(query.NonSchemaFields)) if err != nil { return nil, err } - rows, err := executeQuery(ctx, lm, table.Name, query.StringFromColumns(colNames), append(colNames, query.NonSchemaFields...), rowToScan) + + resultColumns, err := table.extractColumns(query, true) + if err != nil { + return nil, err + } + rows, err := executeQuery(ctx, lm, table.Name, query.StringFromColumns(colNames), resultColumns, rowToScan) if err == nil { for _, row := range rows { row.Index = table.Name @@ -114,24 +119,6 @@ func executeQuery(ctx context.Context, lm *LogManager, tableName string, queryAs return res, err } -func (lm *LogManager) ProcessAutocompleteSuggestionsQuery(ctx context.Context, table *Table, query *model.Query) ([]model.QueryResultRow, error) { - colNames, err := table.extractColumns(query, false) - if err != nil { - return nil, err - } - rowToScan := make([]interface{}, len(colNames)+len(query.NonSchemaFields)) - return executeQuery(ctx, lm, table.Name, query.String(), query.Fields, rowToScan) -} - -func (lm *LogManager) ProcessGeneralAggregationQuery(ctx context.Context, table *Table, query *model.Query) ([]model.QueryResultRow, error) { - colNames, err := table.extractColumns(query, true) - if err != nil { - return nil, err - } - rowToScan := make([]interface{}, len(colNames)) - return executeQuery(ctx, lm, table.Name, query.String(), colNames, rowToScan) -} - // 'selectFields' are all values that we return from the query, both columns and non-schema fields, // like e.g. count(), or toInt8(boolField) func read(tableName string, rows *sql.Rows, selectFields []string, rowToScan []interface{}) ([]model.QueryResultRow, error) { diff --git a/quesma/model/query.go b/quesma/model/query.go index 9797da988..0021114f6 100644 --- a/quesma/model/query.go +++ b/quesma/model/query.go @@ -91,11 +91,14 @@ func (q *Query) String() string { func (q *Query) StringFromColumns(colNames []string) string { var sb strings.Builder sb.WriteString("SELECT ") + if q.IsDistinct { + sb.WriteString("DISTINCT ") + } for i, field := range colNames { - if field != EmptyFieldSelection { - sb.WriteString(strconv.Quote(field)) - } else { + if field == "*" || field == EmptyFieldSelection { sb.WriteString(field) + } else { + sb.WriteString(strconv.Quote(field)) } if i < len(colNames)-1 || len(q.NonSchemaFields) > 0 { sb.WriteString(", ") @@ -112,6 +115,25 @@ func (q *Query) StringFromColumns(colNames []string) string { where = "" } sb.WriteString(" FROM " + q.FromClause + where + q.WhereClause + " " + strings.Join(q.SuffixClauses, " ")) + if len(q.GroupByFields) > 0 { + sb.WriteString(" GROUP BY (") + for i, field := range q.GroupByFields { + sb.WriteString(field) + if i < len(q.GroupByFields)-1 { + sb.WriteString(", ") + } + } + sb.WriteString(")") + + sb.WriteString(" ORDER BY (") + for i, field := range q.GroupByFields { + sb.WriteString(field) + if i < len(q.GroupByFields)-1 { + sb.WriteString(", ") + } + } + sb.WriteString(")") + } return sb.String() } diff --git a/quesma/quesma/search.go b/quesma/quesma/search.go index 7dd3767c5..b55bcb3c4 100644 --- a/quesma/quesma/search.go +++ b/quesma/quesma/search.go @@ -241,14 +241,14 @@ func (q *QueryRunner) handleSearchCommon(ctx context.Context, indexPattern strin fieldName = "*" } listQuery := queryTranslator.BuildNRowsQuery(fieldName, simpleQuery, queryInfo.Size) - hitsFallback, err = q.logManager.ProcessSelectQuery(ctx, table, listQuery) + hitsFallback, err = q.logManager.ProcessQuery(ctx, table, listQuery) if err != nil { logger.ErrorWithCtx(ctx).Msgf("error processing fallback query. Err: %v, query: %+v", err, listQuery) pushSecondaryInfo(q.quesmaManagementConsole, id, path, body, translatedQueryBody, responseBody, startTime) return responseBody, err } countQuery := queryTranslator.BuildSimpleCountQuery(simpleQuery.Sql.Stmt) - countResult, err := q.logManager.ProcessSelectQuery(ctx, table, countQuery) + countResult, err := q.logManager.ProcessQuery(ctx, table, countQuery) if err != nil { logger.ErrorWithCtx(ctx).Msgf("error processing count query. Err: %v, query: %+v", err, countQuery) pushSecondaryInfo(q.quesmaManagementConsole, id, path, body, translatedQueryBody, responseBody, startTime) @@ -461,7 +461,7 @@ func (q *QueryRunner) searchWorkerCommon(ctx context.Context, queryTranslator IQ switch queryInfo.Typ { case model.CountAsync: fullQuery = queryTranslator.BuildSimpleCountQuery(simpleQuery.Sql.Stmt) - hits, err = q.logManager.ProcessSelectQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) case model.Facets, model.FacetsNumeric: // queryInfo = (Facets, fieldName, Limit results, Limit last rows to look into) @@ -471,16 +471,16 @@ func (q *QueryRunner) searchWorkerCommon(ctx context.Context, queryTranslator IQ case model.ListByField: // queryInfo = (ListByField, fieldName, 0, LIMIT) fullQuery = queryTranslator.BuildNRowsQuery(queryInfo.FieldName, simpleQuery, queryInfo.I2) - hits, err = q.logManager.ProcessSelectQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) case model.ListAllFields: // queryInfo = (ListAllFields, "*", 0, LIMIT) fullQuery = queryTranslator.BuildNRowsQuery("*", simpleQuery, queryInfo.I2) - hits, err = q.logManager.ProcessSelectQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) case model.Normal: fullQuery = queryTranslator.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt, queryInfo.I2) - hits, err = q.logManager.ProcessSelectQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) default: logger.ErrorWithCtx(ctx).Msgf("unknown query type: %v, query body: %v", queryInfo.Typ, body) @@ -543,7 +543,7 @@ func (q *QueryRunner) searchAggregationWorkerCommon(ctx context.Context, aggrega for _, agg := range aggregations { logger.InfoWithCtx(ctx).Msg(agg.String()) // I'd keep for now until aggregations work fully sqls += agg.Query.String() + "\n" - rows, err := q.logManager.ProcessGeneralAggregationQuery(dbQueryCtx, table, &agg.Query) + rows, err := q.logManager.ProcessQuery(dbQueryCtx, table, &agg.Query) if err != nil { logger.ErrorWithCtx(ctx).Msg(err.Error()) continue diff --git a/quesma/quesma/termsenum/terms_enum.go b/quesma/quesma/termsenum/terms_enum.go index 32753d279..97f236830 100644 --- a/quesma/quesma/termsenum/terms_enum.go +++ b/quesma/quesma/termsenum/terms_enum.go @@ -37,7 +37,7 @@ func handleTermsEnumRequest(ctx context.Context, reqBody []byte, qt *queryparser dbQueryCtx, cancel := context.WithCancel(ctx) // TODO this will be used to cancel goroutine that is executing the query _ = cancel - if rows, err2 := qt.ClickhouseLM.ProcessAutocompleteSuggestionsQuery(dbQueryCtx, qt.Table, selectQuery); err2 != nil { + if rows, err2 := qt.ClickhouseLM.ProcessQuery(dbQueryCtx, qt.Table, selectQuery); err2 != nil { logger.Error().Msgf("terms enum failed - error processing SQL query [%s]", err2) result, err = json.Marshal(emptyTermsEnumResponse()) } else { From bd4da0625f5bb1f4cd9d08484d9077859c4c2242 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Hejman?= Date: Thu, 9 May 2024 19:16:22 +0200 Subject: [PATCH 10/14] Remove `ngrok.yml` and references (#72) Related to https://github.com/QuesmaOrg/telemetry-collector/pull/17 --- .gitignore | 1 - docker/README.md | 1 - docker/ngrok.yml | 14 -------------- 3 files changed, 16 deletions(-) delete mode 100644 docker/ngrok.yml diff --git a/.gitignore b/.gitignore index ee94164eb..798979941 100644 --- a/.gitignore +++ b/.gitignore @@ -16,7 +16,6 @@ heap.out **/terraform.tfstate* **/.terraform.tfstate* **/.terraform -docker/ngrok/ngrok.yml docker/security/ca docker/security/es.local docker/security/certificate-bundle.zip diff --git a/docker/README.md b/docker/README.md index 94f09c099..768305a68 100644 --- a/docker/README.md +++ b/docker/README.md @@ -13,5 +13,4 @@ or our own services like log-generator. * `kafka-demo.yml` - created specifically for Device demo, contains all services and data, including Kafka, which writes to Quesma via Elasticsearch Connector. * `opensearch.yml` - used for local development with OpenSearch instead of Elasticsearch. Work in progress. * `hydrolix.yml` - to be used with Hydrolix, requires `.env` file from 1Password. -* `ngrok.yml` - used to expose our service to the internet, requires `ngrok/ngrok.yml` file from 1Password diff --git a/docker/ngrok.yml b/docker/ngrok.yml deleted file mode 100644 index f4e1bd382..000000000 --- a/docker/ngrok.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: "3.7" -services: - ngrok: - image: ngrok/ngrok:latest - restart: unless-stopped - command: - - "start" - - "--all" - - "--config" - - "/etc/ngrok.yml" - volumes: - - ./ngrok/ngrok.yml:/etc/ngrok.yml - ports: - - 4040:4040 \ No newline at end of file From ee7fc1d425b7396758353c0afb51202b1dd61b4e Mon Sep 17 00:00:00 2001 From: Przemyslaw Delewski <102958445+pdelewski@users.noreply.github.com> Date: Thu, 9 May 2024 20:14:27 +0200 Subject: [PATCH 11/14] Update smoke test to check logs after triggering async queries (#70) --- .github/workflows/pipeline.yml | 2 + docker/ci.yml | 1 + docker/local-dev.yml | 1 + smoke-test/main.go | 132 ++++++++++++++++++++++++++------- 4 files changed, 108 insertions(+), 28 deletions(-) diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml index c7d9b7a58..7ff7d20ac 100644 --- a/.github/workflows/pipeline.yml +++ b/.github/workflows/pipeline.yml @@ -148,6 +148,8 @@ jobs: - name: Verify if data is flowing working-directory: smoke-test + env: + GITHUB_ACTIONS: true run: go run main.go - name: Print docker status diff --git a/docker/ci.yml b/docker/ci.yml index c31abae0d..40e340f08 100644 --- a/docker/ci.yml +++ b/docker/ci.yml @@ -9,6 +9,7 @@ services: - QUESMA_port=8080 - QUESMA_logging_path=/var/quesma/logs - QUESMA_clickhouse_url=clickhouse://clickhouse:9000 + - QUESMA_logging_fileLogging=true depends_on: clickhouse: condition: service_healthy diff --git a/docker/local-dev.yml b/docker/local-dev.yml index 51f77bad9..dac2dbdf4 100644 --- a/docker/local-dev.yml +++ b/docker/local-dev.yml @@ -13,6 +13,7 @@ services: - QUESMA_logging_path=/var/quesma/logs - QUESMA_mode=dual-write-query-clickhouse - QUESMA_CONFIG_FILE=/config/local-dev.yaml + - QUESMA_logging_fileLogging=true depends_on: clean-clickhouse: condition: service_completed_successfully diff --git a/smoke-test/main.go b/smoke-test/main.go index 2c930581a..624dc7c39 100644 --- a/smoke-test/main.go +++ b/smoke-test/main.go @@ -10,6 +10,7 @@ import ( "io" "log" "net/http" + "os" "slices" "strings" "time" @@ -36,7 +37,13 @@ const ( printInterval = 5 * time.Second ) -const query = ` +const ( + localLogPath = "../docker/quesma/logs/quesma.log" + ciLogPath = "/home/runner/work/quesma/quesma/docker/quesma/logs/quesma.log" + ciEnvVar = "GITHUB_ACTIONS" +) + +var queries = []string{` { "_source": false, "fields": [ @@ -75,8 +82,8 @@ const query = ` "range": { "@timestamp": { "format": "strict_date_optional_time", - "gte": "2024-01-23T14:43:19.481Z", - "lte": "2024-01-23T14:58:19.481Z" + "gte": "now-1d", + "lte": "now-1s" } } } @@ -110,7 +117,53 @@ const query = ` "track_total_hits": false, "version": true } -` +`, + `{ + "_source": { + "excludes": [] + }, + "aggs": { + "0": { + "date_histogram": { + "field": "@timestamp", + "fixed_interval": "30s", + "min_doc_count": 1, + "time_zone": "Europe/Warsaw" + } + } + }, + "fields": [ + { + "field": "@timestamp", + "format": "date_time" + } + ], + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "now-1d", + "lte": "now-1s" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + }, + "runtime_mappings": {}, + "script_fields": {}, + "size": 0, + "stored_fields": [ + "*" + ], + "track_total_hits": true +}`} const kibanaInternalLog = ` { @@ -182,7 +235,7 @@ func main() { reportUri := waitForScheduleReportGeneration() waitForLogsInClickhouse("logs-generic-default", time.Minute) println(" Logs in Clickhouse: OK") - waitForAsyncQuery(time.Minute) + waitForAsyncQuery(time.Minute, queries) println(" AsyncQuery: OK") waitForKibanaLogExplorer("kibana LogExplorer", time.Minute) println(" Kibana LogExplorer: OK") @@ -424,37 +477,60 @@ func waitForLogsInElasticsearchRaw(serviceName, url string, quesmaSource bool, t } } -func waitForAsyncQuery(timeout time.Duration) { - serviceName := "async query" - res := waitFor(serviceName, func() bool { - resp, err := http.Post(asyncQueryUrl, "application/json", bytes.NewBuffer([]byte(query))) +func checkLogs() { + value := os.Getenv(ciEnvVar) + logPath := localLogPath + if value != "" { + logPath = ciLogPath + } + content, err := os.ReadFile(logPath) + if err != nil { + panic("Error reading file:" + err.Error()) + return + } - if err == nil { - defer resp.Body.Close() - if resp.StatusCode == 200 { - body, err := io.ReadAll(resp.Body) - if err == nil { - var response map[string]interface{} - _ = json.Unmarshal(body, &response) + fileContent := string(content) + searchString := "Panic recovered:" - if response["completion_time_in_millis"] != nil { - if sourceClickhouse(resp) { - return true - } else { - panic("invalid X-Quesma-Source header value") + if bytes.Contains([]byte(fileContent), []byte(searchString)) { + panic("Panic recovered in quesma.log") + } +} + +func waitForAsyncQuery(timeout time.Duration, queries []string) { + serviceName := "async query" + for _, query := range queries { + res := waitFor(serviceName, func() bool { + resp, err := http.Post(asyncQueryUrl, "application/json", bytes.NewBuffer([]byte(query))) + + if err == nil { + defer resp.Body.Close() + if resp.StatusCode == 200 { + body, err := io.ReadAll(resp.Body) + if err == nil { + var response map[string]interface{} + _ = json.Unmarshal(body, &response) + + if response["completion_time_in_millis"] != nil { + if sourceClickhouse(resp) { + return true + } else { + panic("invalid X-Quesma-Source header value") + } } + } else { + log.Println(err) } - } else { - log.Println(err) } } - } - return false - }, timeout) + return false + }, timeout) - if !res { - panic(serviceName + " is not alive or is not receiving logs") + if !res { + panic(serviceName + " is not alive or is not receiving logs") + } } + checkLogs() } func headerExists(headers http.Header, key string, value string) bool { From fa232646ed657ee1eba0ef9b762708161508f3af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Strzali=C5=84ski?= Date: Fri, 10 May 2024 12:23:30 +0200 Subject: [PATCH 12/14] EQL - Final (#69) This is a "final" change. Changes in parser: - add pipe command (parsed but not executed) - floats - optional field names (parsed but not evaluated) - add a full list of functions, antlr can give a nicer error message Some bug fixes. --- quesma/eql/README.md | 93 ++ quesma/eql/e2e/end2end_test.go | 29 +- quesma/eql/parser.go | 2 +- quesma/eql/parser/EQL.g4 | 46 +- quesma/eql/parser/eql_base_listener.go | 36 + quesma/eql/parser/eql_base_visitor.go | 24 + quesma/eql/parser/eql_lexer.go | 471 +++++--- quesma/eql/parser/eql_listener.go | 36 + quesma/eql/parser/eql_parser.go | 1335 ++++++++++++++++------ quesma/eql/parser/eql_visitor.go | 18 + quesma/eql/parser_test.go | 3 + quesma/eql/playground/main.go | 2 +- quesma/eql/transform.go | 2 - quesma/eql/transform/eql2exp.go | 29 +- quesma/eql/transform/renderer.go | 14 +- quesma/eql/transform/trans_clickhouse.go | 25 +- quesma/eql/transform_test.go | 62 +- 17 files changed, 1660 insertions(+), 567 deletions(-) create mode 100644 quesma/eql/README.md diff --git a/quesma/eql/README.md b/quesma/eql/README.md new file mode 100644 index 000000000..4d3e1ab49 --- /dev/null +++ b/quesma/eql/README.md @@ -0,0 +1,93 @@ +EQL support +--- + + +This package contains the EQL parser and query transformer. + +- The parser is generated using ANTLR4. The grammar is defined in `EQL.g4` file. The generated code is in `parser` directory. Do not review the generated code. +- HTTP endpoint is implemented in `FIXME` +- `query_translator.go` is the glue code that connects the parser with the Quesma search engine. +- Sample EQL query as an HTTP request is in `http_request/eql_search.http` file. +- A simple command line client is implemented in `playground` directory. +- End-to-End tests are implemented in `e2e` directory. See file `e2e/eql_test.go` for more details. + + +What is supported? +--- + +Comparison operators + +| operator | supported | comment | +|----------|--------------------|---------| +| `==` | :heavy_check_mark: | | +| `!=` | :heavy_check_mark: | | +| `>` | :heavy_check_mark: | | +| `>=` | :heavy_check_mark: | | +| `<` | :heavy_check_mark: | | +| `<=` | :heavy_check_mark: | | +| `:` | :heavy_check_mark: | | + + +Lookup operators + +| operator | supported | comment | +|-----------|--------------------|---------| +| `in` | :heavy_check_mark: | | +| `not in` | :heavy_check_mark: | | +| `in~` | :heavy_check_mark: | | +| `not in~` | :heavy_check_mark: | | +| `:` | :heavy_check_mark: | | +| `like` | :heavy_check_mark: | | +| `like~` | :heavy_check_mark: | | +| `regex` | :heavy_check_mark: | | +| `regex~` | :heavy_check_mark: | | + + +Logical operators + +| operator | supported | comment | +|----------|--------------------|---------| +| `and` | :heavy_check_mark: | | +| `or` | :heavy_check_mark: | | +| `not` | :heavy_check_mark: | | + + + +Supported functions + + +| function | supported | comment | +|-------------------|--------------------|----------------------------------------| +| `add` | :heavy_check_mark: | | +| `between` | :x: | | +| `cidrMatch` | :cockroach: | | +| `concat` | :heavy_check_mark: | | +| `divide` | :cockroach: | division of integers should be rounded | +| `endsWith` | :heavy_check_mark: | | +| `endsWith~` | :heavy_check_mark: | | +| `indexOf` | :cockroach: | | +| `indexOf~` | :cockroach: | | +| `length` | :heavy_check_mark: | | +| `modulo` | :heavy_check_mark: | | +| `multiply` | :heavy_check_mark: | | +| `number` | :cockroach: | | +| `startsWith` | :heavy_check_mark: | | +| `startsWith~` | :heavy_check_mark: | | +| `string` | :heavy_check_mark: | | +| `stringContains` | :cockroach: | | +| `stringContains~` | :cockroach: | | +| `substring` | :cockroach: | | +| `subtract` | :heavy_check_mark: | | + + + + +Known limitations +--- + +1. We support only simple EQL queries. Sequence and sample queries are not supported. +2. Pipe operators are not supported. Syntax is parsed. Error is returned if pipe operator is used in the query. (https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-syntax.html#eql-pipes) +3. Optional fields are not supported. Field names are parsed. Error is returned if that field is used in the query. (https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-syntax.html#eql-syntax-optional-fields) +4. Backtick escaping is not supported. (https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-syntax.html#eql-syntax-escape-a-field-name) +5. Error handling is missing. Every error will be returned as na internal server error. + diff --git a/quesma/eql/e2e/end2end_test.go b/quesma/eql/e2e/end2end_test.go index fcbc723eb..9737e4cfc 100644 --- a/quesma/eql/e2e/end2end_test.go +++ b/quesma/eql/e2e/end2end_test.go @@ -24,8 +24,12 @@ func TestE2E(t *testing.T) { // Queries start with a "--" are skipped. var eqlQueries = []string{ `any where false`, + `any where false and true`, `not_existing where true`, "process where true", + "process where false and true", + "process where not false and true", + "process where process.pid == 1", "process where process.pid > 0", "process where process.pid >= 0", @@ -35,21 +39,23 @@ func TestE2E(t *testing.T) { `process where process.pid == 2 / 2`, `process where process.pid == 3 % 2`, `process where process.pid == 2 * 3 / 6`, - `-- process where process.pid < 4.0 / 2`, // TODO add floats + `process where process.pid < 4.0 / 2`, + `process where not false`, `process where not (event.type == "start")`, `process where process.pid == 1 and event.type == "start"`, + `process where process.pid == 1 and event.type != "start"`, `process where event.type : "start"`, `process where event.type : "st*"`, `process where event.type : ("start", "stop")`, `process where process.pid == 1 and event.type like "st*"`, - `-- process where process.pid == 1 and event.type like "st%"`, // FIXME this is a bug, we should escape % in like + `process where process.pid == 1 and event.type like "st%"`, `process where process.name like~ "test"`, `process where process.name like ("test", "test2")`, `process where event.type in ("start", "stop")`, `process where event.type in~ ("STaRT", "StOP")`, `process where event.type not in ("start", "stop")`, - `-- process where event.type not in~ ("STaRT", "StOP")`, // FIXME THIS IS A BUG, quesma retured: 3 but elastic returned: 1 + `process where event.type not in~ ("STaRT", "StOP")`, `process where process.name != string(1)`, `process where process.name == null`, @@ -68,17 +74,17 @@ func TestE2E(t *testing.T) { `process where process.name like "Te"`, `process where process.name like "T*t"`, - `-- process where process.name : "_est"`, //FIXME we should escace _ in like, quesma retured: 3 but elastic returned: 0 - `-- process where process.name : "Te_t"`, // FIXME quesma retured: 3 but elastic returned: 0 + `process where process.name : "_est"`, + `process where process.name : "Te_t"`, `process where process.name : "Te_"`, - `-- process where process.name : "?est"`, // FIXME support ? wildcard , quesma retured: 0 but elastic returned: 3 - `-- process where process.name : "Te?t"`, + `process where process.name : "?est"`, + `process where process.name : "Te?t"`, `process where process.name : "Te?"`, `process where process.pid == add(0,1)`, - `-- process where process.pid == add(-2,3)`, // FIXME this is a bug, we should support negative numbers - `-- process where process.pid == add(-2,3)`, + `process where process.pid == add(-2,3)`, + `process where process.pid == add(-2,3)`, // FIXME this is an elastic limitation // elastic fail response: {"error":{"root_cause":[{"type":"ql_illegal_argument_exception","reason":"Line 1:40: Comparisons against fields are not (currently) supported; offender [add(process.pid,0)] in [==]"}],"type":"ql_illegal_argument_exception","reason":"Line 1:40: Comparisons against fields are not (currently) supported; offender [add(process.pid,0)] in [==]"},"status":500} @@ -180,7 +186,10 @@ func TestE2E(t *testing.T) { `process where subtract(null, 2) == null`, `process where subtract(2, null) == null`, - `process where ?not_existing == null`, // FIXME this is a bug, optional fields are not supported yet + `-- process where ?not_existing == null`, // FIXME this is a bug, optional fields are not supported yet + + `process where process.name != "foo"`, + `process where process.name != "';delete from table;"`, } // This our category name. Each test runs in a separate category. diff --git a/quesma/eql/parser.go b/quesma/eql/parser.go index 7cddd3a5d..50ceebc0a 100644 --- a/quesma/eql/parser.go +++ b/quesma/eql/parser.go @@ -75,5 +75,5 @@ func (s *EQL) Parse(query string) (parser.IQueryContext, error) { } func (s *EQL) IsSupported(ast parser.IQueryContext) bool { - return ast.SimpleQuery() != nil + return ast.SimpleQuery() != nil && len(ast.AllPipe()) == 0 } diff --git a/quesma/eql/parser/EQL.g4 b/quesma/eql/parser/EQL.g4 index d7f08d82a..4fb1f8f36 100644 --- a/quesma/eql/parser/EQL.g4 +++ b/quesma/eql/parser/EQL.g4 @@ -1,12 +1,9 @@ grammar EQL; -query: ( simpleQuery - | sequenceQuery - | sampleQuery ) EOF +query: ( simpleQuery | sequenceQuery | sampleQuery ) ('|' pipe)* EOF ; simpleQuery: category 'where' condition -// TODO add support for pipe opertor '|' ; sequenceQuery: 'sequence' ( 'by' fieldList )? ( 'with' 'maxspan' '=' interval )? @@ -35,13 +32,12 @@ category | STRING ; -field: ID; -// TODO add optional field names: `?field_name` +field: ID | ('?' ID); +// TODO add optional field names: '?field_name' // TODO add backtick escape for field names fieldList : field (',' field)*; -// TODO add floats literal: STRING | NUMBER | BOOLEAN; literalList: '(' literal (',' literal)* ')'; @@ -56,9 +52,39 @@ value: ; +pipe: + 'head' NUMBER #PipeHead + | 'tail' NUMBER #PipeTail + | 'count' #PipeCount + | 'unique' fieldList #PipeUnique + | 'filter' condition #PipeFilter + | 'sort' fieldList #PipeSort + ; + funcall: funcName '(' value (',' value)* ')'; -funcName: ID | ID '~'; +funcName: + 'add' + | 'between' + | 'cidrMatch' + | 'concat' + | 'divide' + | 'endsWith' + | 'endsWith~' + | 'indexOf' + | 'indexOf~' + | 'length' + | 'modulo' + | 'multiply' + | 'number' + | 'startsWith' + | 'startsWith~' + | 'string' + | 'stringContains' + | 'stringContains~' + | 'substring' + | 'subtract' +; interval: INTERVAL; @@ -69,11 +95,13 @@ MULTILINE_COMMENT: '/*' .*? '*/' -> channel(HIDDEN); ONELINE_COMMNET: '//' ~[\r\n]* -> channel(HIDDEN); BOOLEAN: 'true' | 'false'; INTERVAL: [0-9]+[a-z]; -NUMBER: [0-9]+; + +NUMBER: ('-' | ) ([0-9]+ | [0-9]* '.' [0-9]+) ([eE] [+-]? [0-9]+)?; ESC: '\\' .; STRING: '"' ('\\' . | '""' | ~["\\])* '"' | '"""' .*? '"""'; WS: [ \t\n\r\f]+ -> skip ; + ID: [a-zA-Z_][.a-zA-Z0-9_-]*; diff --git a/quesma/eql/parser/eql_base_listener.go b/quesma/eql/parser/eql_base_listener.go index 1cf6e7978..780b90cd2 100644 --- a/quesma/eql/parser/eql_base_listener.go +++ b/quesma/eql/parser/eql_base_listener.go @@ -170,6 +170,42 @@ func (s *BaseEQLListener) EnterValueField(ctx *ValueFieldContext) {} // ExitValueField is called when production ValueField is exited. func (s *BaseEQLListener) ExitValueField(ctx *ValueFieldContext) {} +// EnterPipeHead is called when production PipeHead is entered. +func (s *BaseEQLListener) EnterPipeHead(ctx *PipeHeadContext) {} + +// ExitPipeHead is called when production PipeHead is exited. +func (s *BaseEQLListener) ExitPipeHead(ctx *PipeHeadContext) {} + +// EnterPipeTail is called when production PipeTail is entered. +func (s *BaseEQLListener) EnterPipeTail(ctx *PipeTailContext) {} + +// ExitPipeTail is called when production PipeTail is exited. +func (s *BaseEQLListener) ExitPipeTail(ctx *PipeTailContext) {} + +// EnterPipeCount is called when production PipeCount is entered. +func (s *BaseEQLListener) EnterPipeCount(ctx *PipeCountContext) {} + +// ExitPipeCount is called when production PipeCount is exited. +func (s *BaseEQLListener) ExitPipeCount(ctx *PipeCountContext) {} + +// EnterPipeUnique is called when production PipeUnique is entered. +func (s *BaseEQLListener) EnterPipeUnique(ctx *PipeUniqueContext) {} + +// ExitPipeUnique is called when production PipeUnique is exited. +func (s *BaseEQLListener) ExitPipeUnique(ctx *PipeUniqueContext) {} + +// EnterPipeFilter is called when production PipeFilter is entered. +func (s *BaseEQLListener) EnterPipeFilter(ctx *PipeFilterContext) {} + +// ExitPipeFilter is called when production PipeFilter is exited. +func (s *BaseEQLListener) ExitPipeFilter(ctx *PipeFilterContext) {} + +// EnterPipeSort is called when production PipeSort is entered. +func (s *BaseEQLListener) EnterPipeSort(ctx *PipeSortContext) {} + +// ExitPipeSort is called when production PipeSort is exited. +func (s *BaseEQLListener) ExitPipeSort(ctx *PipeSortContext) {} + // EnterFuncall is called when production funcall is entered. func (s *BaseEQLListener) EnterFuncall(ctx *FuncallContext) {} diff --git a/quesma/eql/parser/eql_base_visitor.go b/quesma/eql/parser/eql_base_visitor.go index c5d389649..ba6fd01ec 100644 --- a/quesma/eql/parser/eql_base_visitor.go +++ b/quesma/eql/parser/eql_base_visitor.go @@ -107,6 +107,30 @@ func (v *BaseEQLVisitor) VisitValueField(ctx *ValueFieldContext) interface{} { return v.VisitChildren(ctx) } +func (v *BaseEQLVisitor) VisitPipeHead(ctx *PipeHeadContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEQLVisitor) VisitPipeTail(ctx *PipeTailContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEQLVisitor) VisitPipeCount(ctx *PipeCountContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEQLVisitor) VisitPipeUnique(ctx *PipeUniqueContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEQLVisitor) VisitPipeFilter(ctx *PipeFilterContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEQLVisitor) VisitPipeSort(ctx *PipeSortContext) interface{} { + return v.VisitChildren(ctx) +} + func (v *BaseEQLVisitor) VisitFuncall(ctx *FuncallContext) interface{} { return v.VisitChildren(ctx) } diff --git a/quesma/eql/parser/eql_lexer.go b/quesma/eql/parser/eql_lexer.go index ce019f909..645ca06e9 100644 --- a/quesma/eql/parser/eql_lexer.go +++ b/quesma/eql/parser/eql_lexer.go @@ -43,29 +43,40 @@ func eqllexerLexerInit() { "DEFAULT_MODE", } staticData.LiteralNames = []string{ - "", "'where'", "'sequence'", "'by'", "'with'", "'maxspan'", "'='", "'['", - "']'", "'sample'", "'not'", "'('", "')'", "'=='", "'!='", "'>'", "'<'", - "'>='", "'<='", "':'", "'like'", "'like~'", "'regex'", "'regex~'", "'in'", - "'in~'", "'and'", "'or'", "','", "'null'", "'*'", "'/'", "'%'", "'+'", - "'-'", "'~'", "'any'", + "", "'|'", "'where'", "'sequence'", "'by'", "'with'", "'maxspan'", "'='", + "'['", "']'", "'sample'", "'not'", "'('", "')'", "'=='", "'!='", "'>'", + "'<'", "'>='", "'<='", "':'", "'like'", "'like~'", "'regex'", "'regex~'", + "'in'", "'in~'", "'and'", "'or'", "'?'", "','", "'null'", "'*'", "'/'", + "'%'", "'+'", "'-'", "'head'", "'tail'", "'count'", "'unique'", "'filter'", + "'sort'", "'add'", "'between'", "'cidrMatch'", "'concat'", "'divide'", + "'endsWith'", "'endsWith~'", "'indexOf'", "'indexOf~'", "'length'", + "'modulo'", "'multiply'", "'number'", "'startsWith'", "'startsWith~'", + "'string'", "'stringContains'", "'stringContains~'", "'substring'", + "'subtract'", "'any'", } staticData.SymbolicNames = []string{ "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", - "", "", "ANY", "MULTILINE_COMMENT", "ONELINE_COMMNET", "BOOLEAN", "INTERVAL", - "NUMBER", "ESC", "STRING", "WS", "ID", + "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", "", "", "", "", "ANY", "MULTILINE_COMMENT", + "ONELINE_COMMNET", "BOOLEAN", "INTERVAL", "NUMBER", "ESC", "STRING", + "WS", "ID", } staticData.RuleNames = []string{ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13", "T__14", "T__15", "T__16", "T__17", "T__18", "T__19", "T__20", "T__21", "T__22", "T__23", "T__24", "T__25", "T__26", "T__27", "T__28", "T__29", "T__30", "T__31", "T__32", - "T__33", "T__34", "ANY", "MULTILINE_COMMENT", "ONELINE_COMMNET", "BOOLEAN", - "INTERVAL", "NUMBER", "ESC", "STRING", "WS", "ID", + "T__33", "T__34", "T__35", "T__36", "T__37", "T__38", "T__39", "T__40", + "T__41", "T__42", "T__43", "T__44", "T__45", "T__46", "T__47", "T__48", + "T__49", "T__50", "T__51", "T__52", "T__53", "T__54", "T__55", "T__56", + "T__57", "T__58", "T__59", "T__60", "T__61", "ANY", "MULTILINE_COMMENT", + "ONELINE_COMMNET", "BOOLEAN", "INTERVAL", "NUMBER", "ESC", "STRING", + "WS", "ID", } staticData.PredictionContextCache = antlr.NewPredictionContextCache() staticData.serializedATN = []int32{ - 4, 0, 45, 314, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, + 4, 0, 72, 616, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, @@ -73,136 +84,269 @@ func eqllexerLexerInit() { 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, - 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 1, 0, 1, 0, 1, 0, 1, 0, 1, - 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, - 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, - 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, - 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, - 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, - 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, - 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, - 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, - 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, - 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, - 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, - 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, - 227, 8, 36, 10, 36, 12, 36, 230, 9, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, - 36, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 241, 8, 37, 10, 37, 12, 37, 244, - 9, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, - 38, 1, 38, 3, 38, 257, 8, 38, 1, 39, 4, 39, 260, 8, 39, 11, 39, 12, 39, - 261, 1, 39, 1, 39, 1, 40, 4, 40, 267, 8, 40, 11, 40, 12, 40, 268, 1, 41, - 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 280, 8, - 42, 10, 42, 12, 42, 283, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, - 5, 42, 291, 8, 42, 10, 42, 12, 42, 294, 9, 42, 1, 42, 1, 42, 1, 42, 3, - 42, 299, 8, 42, 1, 43, 4, 43, 302, 8, 43, 11, 43, 12, 43, 303, 1, 43, 1, - 43, 1, 44, 1, 44, 5, 44, 310, 8, 44, 10, 44, 12, 44, 313, 9, 44, 2, 228, - 292, 0, 45, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, - 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, - 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, - 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67, 34, 69, 35, 71, 36, 73, - 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85, 43, 87, 44, 89, 45, 1, - 0, 7, 2, 0, 10, 10, 13, 13, 1, 0, 48, 57, 1, 0, 97, 122, 2, 0, 34, 34, - 92, 92, 3, 0, 9, 10, 12, 13, 32, 32, 3, 0, 65, 90, 95, 95, 97, 122, 5, - 0, 45, 46, 48, 57, 65, 90, 95, 95, 97, 122, 325, 0, 1, 1, 0, 0, 0, 0, 3, - 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, - 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, - 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, - 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, - 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, - 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, - 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, - 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, - 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, - 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, - 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, - 0, 0, 0, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 3, 97, 1, 0, 0, 0, 5, 106, - 1, 0, 0, 0, 7, 109, 1, 0, 0, 0, 9, 114, 1, 0, 0, 0, 11, 122, 1, 0, 0, 0, - 13, 124, 1, 0, 0, 0, 15, 126, 1, 0, 0, 0, 17, 128, 1, 0, 0, 0, 19, 135, - 1, 0, 0, 0, 21, 139, 1, 0, 0, 0, 23, 141, 1, 0, 0, 0, 25, 143, 1, 0, 0, - 0, 27, 146, 1, 0, 0, 0, 29, 149, 1, 0, 0, 0, 31, 151, 1, 0, 0, 0, 33, 153, - 1, 0, 0, 0, 35, 156, 1, 0, 0, 0, 37, 159, 1, 0, 0, 0, 39, 161, 1, 0, 0, - 0, 41, 166, 1, 0, 0, 0, 43, 172, 1, 0, 0, 0, 45, 178, 1, 0, 0, 0, 47, 185, - 1, 0, 0, 0, 49, 188, 1, 0, 0, 0, 51, 192, 1, 0, 0, 0, 53, 196, 1, 0, 0, - 0, 55, 199, 1, 0, 0, 0, 57, 201, 1, 0, 0, 0, 59, 206, 1, 0, 0, 0, 61, 208, - 1, 0, 0, 0, 63, 210, 1, 0, 0, 0, 65, 212, 1, 0, 0, 0, 67, 214, 1, 0, 0, - 0, 69, 216, 1, 0, 0, 0, 71, 218, 1, 0, 0, 0, 73, 222, 1, 0, 0, 0, 75, 236, - 1, 0, 0, 0, 77, 256, 1, 0, 0, 0, 79, 259, 1, 0, 0, 0, 81, 266, 1, 0, 0, - 0, 83, 270, 1, 0, 0, 0, 85, 298, 1, 0, 0, 0, 87, 301, 1, 0, 0, 0, 89, 307, - 1, 0, 0, 0, 91, 92, 5, 119, 0, 0, 92, 93, 5, 104, 0, 0, 93, 94, 5, 101, - 0, 0, 94, 95, 5, 114, 0, 0, 95, 96, 5, 101, 0, 0, 96, 2, 1, 0, 0, 0, 97, - 98, 5, 115, 0, 0, 98, 99, 5, 101, 0, 0, 99, 100, 5, 113, 0, 0, 100, 101, - 5, 117, 0, 0, 101, 102, 5, 101, 0, 0, 102, 103, 5, 110, 0, 0, 103, 104, - 5, 99, 0, 0, 104, 105, 5, 101, 0, 0, 105, 4, 1, 0, 0, 0, 106, 107, 5, 98, - 0, 0, 107, 108, 5, 121, 0, 0, 108, 6, 1, 0, 0, 0, 109, 110, 5, 119, 0, - 0, 110, 111, 5, 105, 0, 0, 111, 112, 5, 116, 0, 0, 112, 113, 5, 104, 0, - 0, 113, 8, 1, 0, 0, 0, 114, 115, 5, 109, 0, 0, 115, 116, 5, 97, 0, 0, 116, - 117, 5, 120, 0, 0, 117, 118, 5, 115, 0, 0, 118, 119, 5, 112, 0, 0, 119, - 120, 5, 97, 0, 0, 120, 121, 5, 110, 0, 0, 121, 10, 1, 0, 0, 0, 122, 123, - 5, 61, 0, 0, 123, 12, 1, 0, 0, 0, 124, 125, 5, 91, 0, 0, 125, 14, 1, 0, - 0, 0, 126, 127, 5, 93, 0, 0, 127, 16, 1, 0, 0, 0, 128, 129, 5, 115, 0, - 0, 129, 130, 5, 97, 0, 0, 130, 131, 5, 109, 0, 0, 131, 132, 5, 112, 0, - 0, 132, 133, 5, 108, 0, 0, 133, 134, 5, 101, 0, 0, 134, 18, 1, 0, 0, 0, - 135, 136, 5, 110, 0, 0, 136, 137, 5, 111, 0, 0, 137, 138, 5, 116, 0, 0, - 138, 20, 1, 0, 0, 0, 139, 140, 5, 40, 0, 0, 140, 22, 1, 0, 0, 0, 141, 142, - 5, 41, 0, 0, 142, 24, 1, 0, 0, 0, 143, 144, 5, 61, 0, 0, 144, 145, 5, 61, - 0, 0, 145, 26, 1, 0, 0, 0, 146, 147, 5, 33, 0, 0, 147, 148, 5, 61, 0, 0, - 148, 28, 1, 0, 0, 0, 149, 150, 5, 62, 0, 0, 150, 30, 1, 0, 0, 0, 151, 152, - 5, 60, 0, 0, 152, 32, 1, 0, 0, 0, 153, 154, 5, 62, 0, 0, 154, 155, 5, 61, - 0, 0, 155, 34, 1, 0, 0, 0, 156, 157, 5, 60, 0, 0, 157, 158, 5, 61, 0, 0, - 158, 36, 1, 0, 0, 0, 159, 160, 5, 58, 0, 0, 160, 38, 1, 0, 0, 0, 161, 162, - 5, 108, 0, 0, 162, 163, 5, 105, 0, 0, 163, 164, 5, 107, 0, 0, 164, 165, - 5, 101, 0, 0, 165, 40, 1, 0, 0, 0, 166, 167, 5, 108, 0, 0, 167, 168, 5, - 105, 0, 0, 168, 169, 5, 107, 0, 0, 169, 170, 5, 101, 0, 0, 170, 171, 5, - 126, 0, 0, 171, 42, 1, 0, 0, 0, 172, 173, 5, 114, 0, 0, 173, 174, 5, 101, - 0, 0, 174, 175, 5, 103, 0, 0, 175, 176, 5, 101, 0, 0, 176, 177, 5, 120, - 0, 0, 177, 44, 1, 0, 0, 0, 178, 179, 5, 114, 0, 0, 179, 180, 5, 101, 0, - 0, 180, 181, 5, 103, 0, 0, 181, 182, 5, 101, 0, 0, 182, 183, 5, 120, 0, - 0, 183, 184, 5, 126, 0, 0, 184, 46, 1, 0, 0, 0, 185, 186, 5, 105, 0, 0, - 186, 187, 5, 110, 0, 0, 187, 48, 1, 0, 0, 0, 188, 189, 5, 105, 0, 0, 189, - 190, 5, 110, 0, 0, 190, 191, 5, 126, 0, 0, 191, 50, 1, 0, 0, 0, 192, 193, - 5, 97, 0, 0, 193, 194, 5, 110, 0, 0, 194, 195, 5, 100, 0, 0, 195, 52, 1, - 0, 0, 0, 196, 197, 5, 111, 0, 0, 197, 198, 5, 114, 0, 0, 198, 54, 1, 0, - 0, 0, 199, 200, 5, 44, 0, 0, 200, 56, 1, 0, 0, 0, 201, 202, 5, 110, 0, - 0, 202, 203, 5, 117, 0, 0, 203, 204, 5, 108, 0, 0, 204, 205, 5, 108, 0, - 0, 205, 58, 1, 0, 0, 0, 206, 207, 5, 42, 0, 0, 207, 60, 1, 0, 0, 0, 208, - 209, 5, 47, 0, 0, 209, 62, 1, 0, 0, 0, 210, 211, 5, 37, 0, 0, 211, 64, - 1, 0, 0, 0, 212, 213, 5, 43, 0, 0, 213, 66, 1, 0, 0, 0, 214, 215, 5, 45, - 0, 0, 215, 68, 1, 0, 0, 0, 216, 217, 5, 126, 0, 0, 217, 70, 1, 0, 0, 0, - 218, 219, 5, 97, 0, 0, 219, 220, 5, 110, 0, 0, 220, 221, 5, 121, 0, 0, - 221, 72, 1, 0, 0, 0, 222, 223, 5, 47, 0, 0, 223, 224, 5, 42, 0, 0, 224, - 228, 1, 0, 0, 0, 225, 227, 9, 0, 0, 0, 226, 225, 1, 0, 0, 0, 227, 230, - 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 229, 231, 1, 0, - 0, 0, 230, 228, 1, 0, 0, 0, 231, 232, 5, 42, 0, 0, 232, 233, 5, 47, 0, - 0, 233, 234, 1, 0, 0, 0, 234, 235, 6, 36, 0, 0, 235, 74, 1, 0, 0, 0, 236, - 237, 5, 47, 0, 0, 237, 238, 5, 47, 0, 0, 238, 242, 1, 0, 0, 0, 239, 241, - 8, 0, 0, 0, 240, 239, 1, 0, 0, 0, 241, 244, 1, 0, 0, 0, 242, 240, 1, 0, - 0, 0, 242, 243, 1, 0, 0, 0, 243, 245, 1, 0, 0, 0, 244, 242, 1, 0, 0, 0, - 245, 246, 6, 37, 0, 0, 246, 76, 1, 0, 0, 0, 247, 248, 5, 116, 0, 0, 248, - 249, 5, 114, 0, 0, 249, 250, 5, 117, 0, 0, 250, 257, 5, 101, 0, 0, 251, - 252, 5, 102, 0, 0, 252, 253, 5, 97, 0, 0, 253, 254, 5, 108, 0, 0, 254, - 255, 5, 115, 0, 0, 255, 257, 5, 101, 0, 0, 256, 247, 1, 0, 0, 0, 256, 251, - 1, 0, 0, 0, 257, 78, 1, 0, 0, 0, 258, 260, 7, 1, 0, 0, 259, 258, 1, 0, - 0, 0, 260, 261, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, - 262, 263, 1, 0, 0, 0, 263, 264, 7, 2, 0, 0, 264, 80, 1, 0, 0, 0, 265, 267, - 7, 1, 0, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 266, 1, 0, - 0, 0, 268, 269, 1, 0, 0, 0, 269, 82, 1, 0, 0, 0, 270, 271, 5, 92, 0, 0, - 271, 272, 9, 0, 0, 0, 272, 84, 1, 0, 0, 0, 273, 281, 5, 34, 0, 0, 274, - 275, 5, 92, 0, 0, 275, 280, 9, 0, 0, 0, 276, 277, 5, 34, 0, 0, 277, 280, - 5, 34, 0, 0, 278, 280, 8, 3, 0, 0, 279, 274, 1, 0, 0, 0, 279, 276, 1, 0, - 0, 0, 279, 278, 1, 0, 0, 0, 280, 283, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, - 281, 282, 1, 0, 0, 0, 282, 284, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 284, - 299, 5, 34, 0, 0, 285, 286, 5, 34, 0, 0, 286, 287, 5, 34, 0, 0, 287, 288, - 5, 34, 0, 0, 288, 292, 1, 0, 0, 0, 289, 291, 9, 0, 0, 0, 290, 289, 1, 0, - 0, 0, 291, 294, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, - 293, 295, 1, 0, 0, 0, 294, 292, 1, 0, 0, 0, 295, 296, 5, 34, 0, 0, 296, - 297, 5, 34, 0, 0, 297, 299, 5, 34, 0, 0, 298, 273, 1, 0, 0, 0, 298, 285, - 1, 0, 0, 0, 299, 86, 1, 0, 0, 0, 300, 302, 7, 4, 0, 0, 301, 300, 1, 0, - 0, 0, 302, 303, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, - 304, 305, 1, 0, 0, 0, 305, 306, 6, 43, 1, 0, 306, 88, 1, 0, 0, 0, 307, - 311, 7, 5, 0, 0, 308, 310, 7, 6, 0, 0, 309, 308, 1, 0, 0, 0, 310, 313, - 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 90, 1, 0, - 0, 0, 313, 311, 1, 0, 0, 0, 12, 0, 228, 242, 256, 261, 268, 279, 281, 292, - 298, 303, 311, 2, 0, 1, 0, 6, 0, 0, + 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, + 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, + 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, + 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, + 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, + 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 1, 0, 1, 0, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, + 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, + 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, + 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, + 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, + 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 19, + 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, + 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, + 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, + 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, + 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, + 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, + 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, + 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, + 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, + 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, + 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, + 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, + 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, + 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, + 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, + 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, + 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, + 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, + 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, + 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, + 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, + 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, + 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, + 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, + 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, + 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, + 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 5, + 63, 500, 8, 63, 10, 63, 12, 63, 503, 9, 63, 1, 63, 1, 63, 1, 63, 1, 63, + 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 514, 8, 64, 10, 64, 12, 64, 517, + 9, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, + 65, 1, 65, 3, 65, 530, 8, 65, 1, 66, 4, 66, 533, 8, 66, 11, 66, 12, 66, + 534, 1, 66, 1, 66, 1, 67, 1, 67, 3, 67, 541, 8, 67, 1, 67, 4, 67, 544, + 8, 67, 11, 67, 12, 67, 545, 1, 67, 5, 67, 549, 8, 67, 10, 67, 12, 67, 552, + 9, 67, 1, 67, 1, 67, 4, 67, 556, 8, 67, 11, 67, 12, 67, 557, 3, 67, 560, + 8, 67, 1, 67, 1, 67, 3, 67, 564, 8, 67, 1, 67, 4, 67, 567, 8, 67, 11, 67, + 12, 67, 568, 3, 67, 571, 8, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, + 1, 69, 1, 69, 1, 69, 5, 69, 582, 8, 69, 10, 69, 12, 69, 585, 9, 69, 1, + 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 5, 69, 593, 8, 69, 10, 69, 12, 69, + 596, 9, 69, 1, 69, 1, 69, 1, 69, 3, 69, 601, 8, 69, 1, 70, 4, 70, 604, + 8, 70, 11, 70, 12, 70, 605, 1, 70, 1, 70, 1, 71, 1, 71, 5, 71, 612, 8, + 71, 10, 71, 12, 71, 615, 9, 71, 2, 501, 594, 0, 72, 1, 1, 3, 2, 5, 3, 7, + 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, + 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, + 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, + 32, 65, 33, 67, 34, 69, 35, 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, + 41, 83, 42, 85, 43, 87, 44, 89, 45, 91, 46, 93, 47, 95, 48, 97, 49, 99, + 50, 101, 51, 103, 52, 105, 53, 107, 54, 109, 55, 111, 56, 113, 57, 115, + 58, 117, 59, 119, 60, 121, 61, 123, 62, 125, 63, 127, 64, 129, 65, 131, + 66, 133, 67, 135, 68, 137, 69, 139, 70, 141, 71, 143, 72, 1, 0, 9, 2, 0, + 10, 10, 13, 13, 1, 0, 48, 57, 1, 0, 97, 122, 2, 0, 69, 69, 101, 101, 2, + 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 3, 0, 9, 10, 12, 13, 32, 32, 3, + 0, 65, 90, 95, 95, 97, 122, 5, 0, 45, 46, 48, 57, 65, 90, 95, 95, 97, 122, + 634, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, + 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, + 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, + 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, + 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, + 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, + 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, + 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, + 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, + 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, + 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, + 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, + 0, 0, 93, 1, 0, 0, 0, 0, 95, 1, 0, 0, 0, 0, 97, 1, 0, 0, 0, 0, 99, 1, 0, + 0, 0, 0, 101, 1, 0, 0, 0, 0, 103, 1, 0, 0, 0, 0, 105, 1, 0, 0, 0, 0, 107, + 1, 0, 0, 0, 0, 109, 1, 0, 0, 0, 0, 111, 1, 0, 0, 0, 0, 113, 1, 0, 0, 0, + 0, 115, 1, 0, 0, 0, 0, 117, 1, 0, 0, 0, 0, 119, 1, 0, 0, 0, 0, 121, 1, + 0, 0, 0, 0, 123, 1, 0, 0, 0, 0, 125, 1, 0, 0, 0, 0, 127, 1, 0, 0, 0, 0, + 129, 1, 0, 0, 0, 0, 131, 1, 0, 0, 0, 0, 133, 1, 0, 0, 0, 0, 135, 1, 0, + 0, 0, 0, 137, 1, 0, 0, 0, 0, 139, 1, 0, 0, 0, 0, 141, 1, 0, 0, 0, 0, 143, + 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 3, 147, 1, 0, 0, 0, 5, 153, 1, 0, 0, 0, + 7, 162, 1, 0, 0, 0, 9, 165, 1, 0, 0, 0, 11, 170, 1, 0, 0, 0, 13, 178, 1, + 0, 0, 0, 15, 180, 1, 0, 0, 0, 17, 182, 1, 0, 0, 0, 19, 184, 1, 0, 0, 0, + 21, 191, 1, 0, 0, 0, 23, 195, 1, 0, 0, 0, 25, 197, 1, 0, 0, 0, 27, 199, + 1, 0, 0, 0, 29, 202, 1, 0, 0, 0, 31, 205, 1, 0, 0, 0, 33, 207, 1, 0, 0, + 0, 35, 209, 1, 0, 0, 0, 37, 212, 1, 0, 0, 0, 39, 215, 1, 0, 0, 0, 41, 217, + 1, 0, 0, 0, 43, 222, 1, 0, 0, 0, 45, 228, 1, 0, 0, 0, 47, 234, 1, 0, 0, + 0, 49, 241, 1, 0, 0, 0, 51, 244, 1, 0, 0, 0, 53, 248, 1, 0, 0, 0, 55, 252, + 1, 0, 0, 0, 57, 255, 1, 0, 0, 0, 59, 257, 1, 0, 0, 0, 61, 259, 1, 0, 0, + 0, 63, 264, 1, 0, 0, 0, 65, 266, 1, 0, 0, 0, 67, 268, 1, 0, 0, 0, 69, 270, + 1, 0, 0, 0, 71, 272, 1, 0, 0, 0, 73, 274, 1, 0, 0, 0, 75, 279, 1, 0, 0, + 0, 77, 284, 1, 0, 0, 0, 79, 290, 1, 0, 0, 0, 81, 297, 1, 0, 0, 0, 83, 304, + 1, 0, 0, 0, 85, 309, 1, 0, 0, 0, 87, 313, 1, 0, 0, 0, 89, 321, 1, 0, 0, + 0, 91, 331, 1, 0, 0, 0, 93, 338, 1, 0, 0, 0, 95, 345, 1, 0, 0, 0, 97, 354, + 1, 0, 0, 0, 99, 364, 1, 0, 0, 0, 101, 372, 1, 0, 0, 0, 103, 381, 1, 0, + 0, 0, 105, 388, 1, 0, 0, 0, 107, 395, 1, 0, 0, 0, 109, 404, 1, 0, 0, 0, + 111, 411, 1, 0, 0, 0, 113, 422, 1, 0, 0, 0, 115, 434, 1, 0, 0, 0, 117, + 441, 1, 0, 0, 0, 119, 456, 1, 0, 0, 0, 121, 472, 1, 0, 0, 0, 123, 482, + 1, 0, 0, 0, 125, 491, 1, 0, 0, 0, 127, 495, 1, 0, 0, 0, 129, 509, 1, 0, + 0, 0, 131, 529, 1, 0, 0, 0, 133, 532, 1, 0, 0, 0, 135, 540, 1, 0, 0, 0, + 137, 572, 1, 0, 0, 0, 139, 600, 1, 0, 0, 0, 141, 603, 1, 0, 0, 0, 143, + 609, 1, 0, 0, 0, 145, 146, 5, 124, 0, 0, 146, 2, 1, 0, 0, 0, 147, 148, + 5, 119, 0, 0, 148, 149, 5, 104, 0, 0, 149, 150, 5, 101, 0, 0, 150, 151, + 5, 114, 0, 0, 151, 152, 5, 101, 0, 0, 152, 4, 1, 0, 0, 0, 153, 154, 5, + 115, 0, 0, 154, 155, 5, 101, 0, 0, 155, 156, 5, 113, 0, 0, 156, 157, 5, + 117, 0, 0, 157, 158, 5, 101, 0, 0, 158, 159, 5, 110, 0, 0, 159, 160, 5, + 99, 0, 0, 160, 161, 5, 101, 0, 0, 161, 6, 1, 0, 0, 0, 162, 163, 5, 98, + 0, 0, 163, 164, 5, 121, 0, 0, 164, 8, 1, 0, 0, 0, 165, 166, 5, 119, 0, + 0, 166, 167, 5, 105, 0, 0, 167, 168, 5, 116, 0, 0, 168, 169, 5, 104, 0, + 0, 169, 10, 1, 0, 0, 0, 170, 171, 5, 109, 0, 0, 171, 172, 5, 97, 0, 0, + 172, 173, 5, 120, 0, 0, 173, 174, 5, 115, 0, 0, 174, 175, 5, 112, 0, 0, + 175, 176, 5, 97, 0, 0, 176, 177, 5, 110, 0, 0, 177, 12, 1, 0, 0, 0, 178, + 179, 5, 61, 0, 0, 179, 14, 1, 0, 0, 0, 180, 181, 5, 91, 0, 0, 181, 16, + 1, 0, 0, 0, 182, 183, 5, 93, 0, 0, 183, 18, 1, 0, 0, 0, 184, 185, 5, 115, + 0, 0, 185, 186, 5, 97, 0, 0, 186, 187, 5, 109, 0, 0, 187, 188, 5, 112, + 0, 0, 188, 189, 5, 108, 0, 0, 189, 190, 5, 101, 0, 0, 190, 20, 1, 0, 0, + 0, 191, 192, 5, 110, 0, 0, 192, 193, 5, 111, 0, 0, 193, 194, 5, 116, 0, + 0, 194, 22, 1, 0, 0, 0, 195, 196, 5, 40, 0, 0, 196, 24, 1, 0, 0, 0, 197, + 198, 5, 41, 0, 0, 198, 26, 1, 0, 0, 0, 199, 200, 5, 61, 0, 0, 200, 201, + 5, 61, 0, 0, 201, 28, 1, 0, 0, 0, 202, 203, 5, 33, 0, 0, 203, 204, 5, 61, + 0, 0, 204, 30, 1, 0, 0, 0, 205, 206, 5, 62, 0, 0, 206, 32, 1, 0, 0, 0, + 207, 208, 5, 60, 0, 0, 208, 34, 1, 0, 0, 0, 209, 210, 5, 62, 0, 0, 210, + 211, 5, 61, 0, 0, 211, 36, 1, 0, 0, 0, 212, 213, 5, 60, 0, 0, 213, 214, + 5, 61, 0, 0, 214, 38, 1, 0, 0, 0, 215, 216, 5, 58, 0, 0, 216, 40, 1, 0, + 0, 0, 217, 218, 5, 108, 0, 0, 218, 219, 5, 105, 0, 0, 219, 220, 5, 107, + 0, 0, 220, 221, 5, 101, 0, 0, 221, 42, 1, 0, 0, 0, 222, 223, 5, 108, 0, + 0, 223, 224, 5, 105, 0, 0, 224, 225, 5, 107, 0, 0, 225, 226, 5, 101, 0, + 0, 226, 227, 5, 126, 0, 0, 227, 44, 1, 0, 0, 0, 228, 229, 5, 114, 0, 0, + 229, 230, 5, 101, 0, 0, 230, 231, 5, 103, 0, 0, 231, 232, 5, 101, 0, 0, + 232, 233, 5, 120, 0, 0, 233, 46, 1, 0, 0, 0, 234, 235, 5, 114, 0, 0, 235, + 236, 5, 101, 0, 0, 236, 237, 5, 103, 0, 0, 237, 238, 5, 101, 0, 0, 238, + 239, 5, 120, 0, 0, 239, 240, 5, 126, 0, 0, 240, 48, 1, 0, 0, 0, 241, 242, + 5, 105, 0, 0, 242, 243, 5, 110, 0, 0, 243, 50, 1, 0, 0, 0, 244, 245, 5, + 105, 0, 0, 245, 246, 5, 110, 0, 0, 246, 247, 5, 126, 0, 0, 247, 52, 1, + 0, 0, 0, 248, 249, 5, 97, 0, 0, 249, 250, 5, 110, 0, 0, 250, 251, 5, 100, + 0, 0, 251, 54, 1, 0, 0, 0, 252, 253, 5, 111, 0, 0, 253, 254, 5, 114, 0, + 0, 254, 56, 1, 0, 0, 0, 255, 256, 5, 63, 0, 0, 256, 58, 1, 0, 0, 0, 257, + 258, 5, 44, 0, 0, 258, 60, 1, 0, 0, 0, 259, 260, 5, 110, 0, 0, 260, 261, + 5, 117, 0, 0, 261, 262, 5, 108, 0, 0, 262, 263, 5, 108, 0, 0, 263, 62, + 1, 0, 0, 0, 264, 265, 5, 42, 0, 0, 265, 64, 1, 0, 0, 0, 266, 267, 5, 47, + 0, 0, 267, 66, 1, 0, 0, 0, 268, 269, 5, 37, 0, 0, 269, 68, 1, 0, 0, 0, + 270, 271, 5, 43, 0, 0, 271, 70, 1, 0, 0, 0, 272, 273, 5, 45, 0, 0, 273, + 72, 1, 0, 0, 0, 274, 275, 5, 104, 0, 0, 275, 276, 5, 101, 0, 0, 276, 277, + 5, 97, 0, 0, 277, 278, 5, 100, 0, 0, 278, 74, 1, 0, 0, 0, 279, 280, 5, + 116, 0, 0, 280, 281, 5, 97, 0, 0, 281, 282, 5, 105, 0, 0, 282, 283, 5, + 108, 0, 0, 283, 76, 1, 0, 0, 0, 284, 285, 5, 99, 0, 0, 285, 286, 5, 111, + 0, 0, 286, 287, 5, 117, 0, 0, 287, 288, 5, 110, 0, 0, 288, 289, 5, 116, + 0, 0, 289, 78, 1, 0, 0, 0, 290, 291, 5, 117, 0, 0, 291, 292, 5, 110, 0, + 0, 292, 293, 5, 105, 0, 0, 293, 294, 5, 113, 0, 0, 294, 295, 5, 117, 0, + 0, 295, 296, 5, 101, 0, 0, 296, 80, 1, 0, 0, 0, 297, 298, 5, 102, 0, 0, + 298, 299, 5, 105, 0, 0, 299, 300, 5, 108, 0, 0, 300, 301, 5, 116, 0, 0, + 301, 302, 5, 101, 0, 0, 302, 303, 5, 114, 0, 0, 303, 82, 1, 0, 0, 0, 304, + 305, 5, 115, 0, 0, 305, 306, 5, 111, 0, 0, 306, 307, 5, 114, 0, 0, 307, + 308, 5, 116, 0, 0, 308, 84, 1, 0, 0, 0, 309, 310, 5, 97, 0, 0, 310, 311, + 5, 100, 0, 0, 311, 312, 5, 100, 0, 0, 312, 86, 1, 0, 0, 0, 313, 314, 5, + 98, 0, 0, 314, 315, 5, 101, 0, 0, 315, 316, 5, 116, 0, 0, 316, 317, 5, + 119, 0, 0, 317, 318, 5, 101, 0, 0, 318, 319, 5, 101, 0, 0, 319, 320, 5, + 110, 0, 0, 320, 88, 1, 0, 0, 0, 321, 322, 5, 99, 0, 0, 322, 323, 5, 105, + 0, 0, 323, 324, 5, 100, 0, 0, 324, 325, 5, 114, 0, 0, 325, 326, 5, 77, + 0, 0, 326, 327, 5, 97, 0, 0, 327, 328, 5, 116, 0, 0, 328, 329, 5, 99, 0, + 0, 329, 330, 5, 104, 0, 0, 330, 90, 1, 0, 0, 0, 331, 332, 5, 99, 0, 0, + 332, 333, 5, 111, 0, 0, 333, 334, 5, 110, 0, 0, 334, 335, 5, 99, 0, 0, + 335, 336, 5, 97, 0, 0, 336, 337, 5, 116, 0, 0, 337, 92, 1, 0, 0, 0, 338, + 339, 5, 100, 0, 0, 339, 340, 5, 105, 0, 0, 340, 341, 5, 118, 0, 0, 341, + 342, 5, 105, 0, 0, 342, 343, 5, 100, 0, 0, 343, 344, 5, 101, 0, 0, 344, + 94, 1, 0, 0, 0, 345, 346, 5, 101, 0, 0, 346, 347, 5, 110, 0, 0, 347, 348, + 5, 100, 0, 0, 348, 349, 5, 115, 0, 0, 349, 350, 5, 87, 0, 0, 350, 351, + 5, 105, 0, 0, 351, 352, 5, 116, 0, 0, 352, 353, 5, 104, 0, 0, 353, 96, + 1, 0, 0, 0, 354, 355, 5, 101, 0, 0, 355, 356, 5, 110, 0, 0, 356, 357, 5, + 100, 0, 0, 357, 358, 5, 115, 0, 0, 358, 359, 5, 87, 0, 0, 359, 360, 5, + 105, 0, 0, 360, 361, 5, 116, 0, 0, 361, 362, 5, 104, 0, 0, 362, 363, 5, + 126, 0, 0, 363, 98, 1, 0, 0, 0, 364, 365, 5, 105, 0, 0, 365, 366, 5, 110, + 0, 0, 366, 367, 5, 100, 0, 0, 367, 368, 5, 101, 0, 0, 368, 369, 5, 120, + 0, 0, 369, 370, 5, 79, 0, 0, 370, 371, 5, 102, 0, 0, 371, 100, 1, 0, 0, + 0, 372, 373, 5, 105, 0, 0, 373, 374, 5, 110, 0, 0, 374, 375, 5, 100, 0, + 0, 375, 376, 5, 101, 0, 0, 376, 377, 5, 120, 0, 0, 377, 378, 5, 79, 0, + 0, 378, 379, 5, 102, 0, 0, 379, 380, 5, 126, 0, 0, 380, 102, 1, 0, 0, 0, + 381, 382, 5, 108, 0, 0, 382, 383, 5, 101, 0, 0, 383, 384, 5, 110, 0, 0, + 384, 385, 5, 103, 0, 0, 385, 386, 5, 116, 0, 0, 386, 387, 5, 104, 0, 0, + 387, 104, 1, 0, 0, 0, 388, 389, 5, 109, 0, 0, 389, 390, 5, 111, 0, 0, 390, + 391, 5, 100, 0, 0, 391, 392, 5, 117, 0, 0, 392, 393, 5, 108, 0, 0, 393, + 394, 5, 111, 0, 0, 394, 106, 1, 0, 0, 0, 395, 396, 5, 109, 0, 0, 396, 397, + 5, 117, 0, 0, 397, 398, 5, 108, 0, 0, 398, 399, 5, 116, 0, 0, 399, 400, + 5, 105, 0, 0, 400, 401, 5, 112, 0, 0, 401, 402, 5, 108, 0, 0, 402, 403, + 5, 121, 0, 0, 403, 108, 1, 0, 0, 0, 404, 405, 5, 110, 0, 0, 405, 406, 5, + 117, 0, 0, 406, 407, 5, 109, 0, 0, 407, 408, 5, 98, 0, 0, 408, 409, 5, + 101, 0, 0, 409, 410, 5, 114, 0, 0, 410, 110, 1, 0, 0, 0, 411, 412, 5, 115, + 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 5, 97, 0, 0, 414, 415, 5, 114, + 0, 0, 415, 416, 5, 116, 0, 0, 416, 417, 5, 115, 0, 0, 417, 418, 5, 87, + 0, 0, 418, 419, 5, 105, 0, 0, 419, 420, 5, 116, 0, 0, 420, 421, 5, 104, + 0, 0, 421, 112, 1, 0, 0, 0, 422, 423, 5, 115, 0, 0, 423, 424, 5, 116, 0, + 0, 424, 425, 5, 97, 0, 0, 425, 426, 5, 114, 0, 0, 426, 427, 5, 116, 0, + 0, 427, 428, 5, 115, 0, 0, 428, 429, 5, 87, 0, 0, 429, 430, 5, 105, 0, + 0, 430, 431, 5, 116, 0, 0, 431, 432, 5, 104, 0, 0, 432, 433, 5, 126, 0, + 0, 433, 114, 1, 0, 0, 0, 434, 435, 5, 115, 0, 0, 435, 436, 5, 116, 0, 0, + 436, 437, 5, 114, 0, 0, 437, 438, 5, 105, 0, 0, 438, 439, 5, 110, 0, 0, + 439, 440, 5, 103, 0, 0, 440, 116, 1, 0, 0, 0, 441, 442, 5, 115, 0, 0, 442, + 443, 5, 116, 0, 0, 443, 444, 5, 114, 0, 0, 444, 445, 5, 105, 0, 0, 445, + 446, 5, 110, 0, 0, 446, 447, 5, 103, 0, 0, 447, 448, 5, 67, 0, 0, 448, + 449, 5, 111, 0, 0, 449, 450, 5, 110, 0, 0, 450, 451, 5, 116, 0, 0, 451, + 452, 5, 97, 0, 0, 452, 453, 5, 105, 0, 0, 453, 454, 5, 110, 0, 0, 454, + 455, 5, 115, 0, 0, 455, 118, 1, 0, 0, 0, 456, 457, 5, 115, 0, 0, 457, 458, + 5, 116, 0, 0, 458, 459, 5, 114, 0, 0, 459, 460, 5, 105, 0, 0, 460, 461, + 5, 110, 0, 0, 461, 462, 5, 103, 0, 0, 462, 463, 5, 67, 0, 0, 463, 464, + 5, 111, 0, 0, 464, 465, 5, 110, 0, 0, 465, 466, 5, 116, 0, 0, 466, 467, + 5, 97, 0, 0, 467, 468, 5, 105, 0, 0, 468, 469, 5, 110, 0, 0, 469, 470, + 5, 115, 0, 0, 470, 471, 5, 126, 0, 0, 471, 120, 1, 0, 0, 0, 472, 473, 5, + 115, 0, 0, 473, 474, 5, 117, 0, 0, 474, 475, 5, 98, 0, 0, 475, 476, 5, + 115, 0, 0, 476, 477, 5, 116, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, + 105, 0, 0, 479, 480, 5, 110, 0, 0, 480, 481, 5, 103, 0, 0, 481, 122, 1, + 0, 0, 0, 482, 483, 5, 115, 0, 0, 483, 484, 5, 117, 0, 0, 484, 485, 5, 98, + 0, 0, 485, 486, 5, 116, 0, 0, 486, 487, 5, 114, 0, 0, 487, 488, 5, 97, + 0, 0, 488, 489, 5, 99, 0, 0, 489, 490, 5, 116, 0, 0, 490, 124, 1, 0, 0, + 0, 491, 492, 5, 97, 0, 0, 492, 493, 5, 110, 0, 0, 493, 494, 5, 121, 0, + 0, 494, 126, 1, 0, 0, 0, 495, 496, 5, 47, 0, 0, 496, 497, 5, 42, 0, 0, + 497, 501, 1, 0, 0, 0, 498, 500, 9, 0, 0, 0, 499, 498, 1, 0, 0, 0, 500, + 503, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 501, 499, 1, 0, 0, 0, 502, 504, + 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 504, 505, 5, 42, 0, 0, 505, 506, 5, 47, + 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 63, 0, 0, 508, 128, 1, 0, 0, 0, + 509, 510, 5, 47, 0, 0, 510, 511, 5, 47, 0, 0, 511, 515, 1, 0, 0, 0, 512, + 514, 8, 0, 0, 0, 513, 512, 1, 0, 0, 0, 514, 517, 1, 0, 0, 0, 515, 513, + 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 518, 1, 0, 0, 0, 517, 515, 1, 0, + 0, 0, 518, 519, 6, 64, 0, 0, 519, 130, 1, 0, 0, 0, 520, 521, 5, 116, 0, + 0, 521, 522, 5, 114, 0, 0, 522, 523, 5, 117, 0, 0, 523, 530, 5, 101, 0, + 0, 524, 525, 5, 102, 0, 0, 525, 526, 5, 97, 0, 0, 526, 527, 5, 108, 0, + 0, 527, 528, 5, 115, 0, 0, 528, 530, 5, 101, 0, 0, 529, 520, 1, 0, 0, 0, + 529, 524, 1, 0, 0, 0, 530, 132, 1, 0, 0, 0, 531, 533, 7, 1, 0, 0, 532, + 531, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 532, 1, 0, 0, 0, 534, 535, + 1, 0, 0, 0, 535, 536, 1, 0, 0, 0, 536, 537, 7, 2, 0, 0, 537, 134, 1, 0, + 0, 0, 538, 541, 5, 45, 0, 0, 539, 541, 1, 0, 0, 0, 540, 538, 1, 0, 0, 0, + 540, 539, 1, 0, 0, 0, 541, 559, 1, 0, 0, 0, 542, 544, 7, 1, 0, 0, 543, + 542, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 543, 1, 0, 0, 0, 545, 546, + 1, 0, 0, 0, 546, 560, 1, 0, 0, 0, 547, 549, 7, 1, 0, 0, 548, 547, 1, 0, + 0, 0, 549, 552, 1, 0, 0, 0, 550, 548, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, + 551, 553, 1, 0, 0, 0, 552, 550, 1, 0, 0, 0, 553, 555, 5, 46, 0, 0, 554, + 556, 7, 1, 0, 0, 555, 554, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 555, + 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 560, 1, 0, 0, 0, 559, 543, 1, 0, + 0, 0, 559, 550, 1, 0, 0, 0, 560, 570, 1, 0, 0, 0, 561, 563, 7, 3, 0, 0, + 562, 564, 7, 4, 0, 0, 563, 562, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, + 566, 1, 0, 0, 0, 565, 567, 7, 1, 0, 0, 566, 565, 1, 0, 0, 0, 567, 568, + 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 571, 1, 0, + 0, 0, 570, 561, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 136, 1, 0, 0, 0, + 572, 573, 5, 92, 0, 0, 573, 574, 9, 0, 0, 0, 574, 138, 1, 0, 0, 0, 575, + 583, 5, 34, 0, 0, 576, 577, 5, 92, 0, 0, 577, 582, 9, 0, 0, 0, 578, 579, + 5, 34, 0, 0, 579, 582, 5, 34, 0, 0, 580, 582, 8, 5, 0, 0, 581, 576, 1, + 0, 0, 0, 581, 578, 1, 0, 0, 0, 581, 580, 1, 0, 0, 0, 582, 585, 1, 0, 0, + 0, 583, 581, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 586, 1, 0, 0, 0, 585, + 583, 1, 0, 0, 0, 586, 601, 5, 34, 0, 0, 587, 588, 5, 34, 0, 0, 588, 589, + 5, 34, 0, 0, 589, 590, 5, 34, 0, 0, 590, 594, 1, 0, 0, 0, 591, 593, 9, + 0, 0, 0, 592, 591, 1, 0, 0, 0, 593, 596, 1, 0, 0, 0, 594, 595, 1, 0, 0, + 0, 594, 592, 1, 0, 0, 0, 595, 597, 1, 0, 0, 0, 596, 594, 1, 0, 0, 0, 597, + 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 601, 5, 34, 0, 0, 600, 575, + 1, 0, 0, 0, 600, 587, 1, 0, 0, 0, 601, 140, 1, 0, 0, 0, 602, 604, 7, 6, + 0, 0, 603, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, + 605, 606, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 70, 1, 0, 608, + 142, 1, 0, 0, 0, 609, 613, 7, 7, 0, 0, 610, 612, 7, 8, 0, 0, 611, 610, + 1, 0, 0, 0, 612, 615, 1, 0, 0, 0, 613, 611, 1, 0, 0, 0, 613, 614, 1, 0, + 0, 0, 614, 144, 1, 0, 0, 0, 615, 613, 1, 0, 0, 0, 19, 0, 501, 515, 529, + 534, 540, 545, 550, 557, 559, 563, 568, 570, 581, 583, 594, 600, 605, 613, + 2, 0, 1, 0, 6, 0, 0, } deserializer := antlr.NewATNDeserializer(nil) staticData.atn = deserializer.Deserialize(staticData.serializedATN) @@ -278,14 +422,41 @@ const ( EQLLexerT__32 = 33 EQLLexerT__33 = 34 EQLLexerT__34 = 35 - EQLLexerANY = 36 - EQLLexerMULTILINE_COMMENT = 37 - EQLLexerONELINE_COMMNET = 38 - EQLLexerBOOLEAN = 39 - EQLLexerINTERVAL = 40 - EQLLexerNUMBER = 41 - EQLLexerESC = 42 - EQLLexerSTRING = 43 - EQLLexerWS = 44 - EQLLexerID = 45 + EQLLexerT__35 = 36 + EQLLexerT__36 = 37 + EQLLexerT__37 = 38 + EQLLexerT__38 = 39 + EQLLexerT__39 = 40 + EQLLexerT__40 = 41 + EQLLexerT__41 = 42 + EQLLexerT__42 = 43 + EQLLexerT__43 = 44 + EQLLexerT__44 = 45 + EQLLexerT__45 = 46 + EQLLexerT__46 = 47 + EQLLexerT__47 = 48 + EQLLexerT__48 = 49 + EQLLexerT__49 = 50 + EQLLexerT__50 = 51 + EQLLexerT__51 = 52 + EQLLexerT__52 = 53 + EQLLexerT__53 = 54 + EQLLexerT__54 = 55 + EQLLexerT__55 = 56 + EQLLexerT__56 = 57 + EQLLexerT__57 = 58 + EQLLexerT__58 = 59 + EQLLexerT__59 = 60 + EQLLexerT__60 = 61 + EQLLexerT__61 = 62 + EQLLexerANY = 63 + EQLLexerMULTILINE_COMMENT = 64 + EQLLexerONELINE_COMMNET = 65 + EQLLexerBOOLEAN = 66 + EQLLexerINTERVAL = 67 + EQLLexerNUMBER = 68 + EQLLexerESC = 69 + EQLLexerSTRING = 70 + EQLLexerWS = 71 + EQLLexerID = 72 ) diff --git a/quesma/eql/parser/eql_listener.go b/quesma/eql/parser/eql_listener.go index 4aa487e37..e22f6655d 100644 --- a/quesma/eql/parser/eql_listener.go +++ b/quesma/eql/parser/eql_listener.go @@ -82,6 +82,24 @@ type EQLListener interface { // EnterValueField is called when entering the ValueField production. EnterValueField(c *ValueFieldContext) + // EnterPipeHead is called when entering the PipeHead production. + EnterPipeHead(c *PipeHeadContext) + + // EnterPipeTail is called when entering the PipeTail production. + EnterPipeTail(c *PipeTailContext) + + // EnterPipeCount is called when entering the PipeCount production. + EnterPipeCount(c *PipeCountContext) + + // EnterPipeUnique is called when entering the PipeUnique production. + EnterPipeUnique(c *PipeUniqueContext) + + // EnterPipeFilter is called when entering the PipeFilter production. + EnterPipeFilter(c *PipeFilterContext) + + // EnterPipeSort is called when entering the PipeSort production. + EnterPipeSort(c *PipeSortContext) + // EnterFuncall is called when entering the funcall production. EnterFuncall(c *FuncallContext) @@ -166,6 +184,24 @@ type EQLListener interface { // ExitValueField is called when exiting the ValueField production. ExitValueField(c *ValueFieldContext) + // ExitPipeHead is called when exiting the PipeHead production. + ExitPipeHead(c *PipeHeadContext) + + // ExitPipeTail is called when exiting the PipeTail production. + ExitPipeTail(c *PipeTailContext) + + // ExitPipeCount is called when exiting the PipeCount production. + ExitPipeCount(c *PipeCountContext) + + // ExitPipeUnique is called when exiting the PipeUnique production. + ExitPipeUnique(c *PipeUniqueContext) + + // ExitPipeFilter is called when exiting the PipeFilter production. + ExitPipeFilter(c *PipeFilterContext) + + // ExitPipeSort is called when exiting the PipeSort production. + ExitPipeSort(c *PipeSortContext) + // ExitFuncall is called when exiting the funcall production. ExitFuncall(c *FuncallContext) diff --git a/quesma/eql/parser/eql_parser.go b/quesma/eql/parser/eql_parser.go index 2877d12b8..3fc878fa4 100644 --- a/quesma/eql/parser/eql_parser.go +++ b/quesma/eql/parser/eql_parser.go @@ -32,100 +32,118 @@ var EQLParserStaticData struct { func eqlParserInit() { staticData := &EQLParserStaticData staticData.LiteralNames = []string{ - "", "'where'", "'sequence'", "'by'", "'with'", "'maxspan'", "'='", "'['", - "']'", "'sample'", "'not'", "'('", "')'", "'=='", "'!='", "'>'", "'<'", - "'>='", "'<='", "':'", "'like'", "'like~'", "'regex'", "'regex~'", "'in'", - "'in~'", "'and'", "'or'", "','", "'null'", "'*'", "'/'", "'%'", "'+'", - "'-'", "'~'", "'any'", + "", "'|'", "'where'", "'sequence'", "'by'", "'with'", "'maxspan'", "'='", + "'['", "']'", "'sample'", "'not'", "'('", "')'", "'=='", "'!='", "'>'", + "'<'", "'>='", "'<='", "':'", "'like'", "'like~'", "'regex'", "'regex~'", + "'in'", "'in~'", "'and'", "'or'", "'?'", "','", "'null'", "'*'", "'/'", + "'%'", "'+'", "'-'", "'head'", "'tail'", "'count'", "'unique'", "'filter'", + "'sort'", "'add'", "'between'", "'cidrMatch'", "'concat'", "'divide'", + "'endsWith'", "'endsWith~'", "'indexOf'", "'indexOf~'", "'length'", + "'modulo'", "'multiply'", "'number'", "'startsWith'", "'startsWith~'", + "'string'", "'stringContains'", "'stringContains~'", "'substring'", + "'subtract'", "'any'", } staticData.SymbolicNames = []string{ "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", - "", "", "ANY", "MULTILINE_COMMENT", "ONELINE_COMMNET", "BOOLEAN", "INTERVAL", - "NUMBER", "ESC", "STRING", "WS", "ID", + "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", "", "", "", "", "ANY", "MULTILINE_COMMENT", + "ONELINE_COMMNET", "BOOLEAN", "INTERVAL", "NUMBER", "ESC", "STRING", + "WS", "ID", } staticData.RuleNames = []string{ "query", "simpleQuery", "sequenceQuery", "sampleQuery", "condition", "category", "field", "fieldList", "literal", "literalList", "value", - "funcall", "funcName", "interval", + "pipe", "funcall", "funcName", "interval", } staticData.PredictionContextCache = antlr.NewPredictionContextCache() staticData.serializedATN = []int32{ - 4, 1, 45, 173, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, + 4, 1, 72, 195, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, - 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 1, 0, 1, 0, 1, 0, 3, 0, 32, - 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 3, 2, 43, 8, - 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 49, 8, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, - 3, 2, 56, 8, 2, 4, 2, 58, 8, 2, 11, 2, 12, 2, 59, 1, 3, 1, 3, 1, 3, 1, - 3, 1, 3, 1, 3, 1, 3, 4, 3, 69, 8, 3, 11, 3, 12, 3, 70, 1, 4, 1, 4, 1, 4, - 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, - 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 3, 4, 97, 8, 4, 1, - 4, 1, 4, 1, 4, 5, 4, 102, 8, 4, 10, 4, 12, 4, 105, 9, 4, 1, 5, 1, 5, 1, - 6, 1, 6, 1, 7, 1, 7, 1, 7, 5, 7, 114, 8, 7, 10, 7, 12, 7, 117, 9, 7, 1, - 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 125, 8, 9, 10, 9, 12, 9, 128, 9, - 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, - 1, 10, 3, 10, 141, 8, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, - 10, 149, 8, 10, 10, 10, 12, 10, 152, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, - 1, 11, 5, 11, 159, 8, 11, 10, 11, 12, 11, 162, 9, 11, 1, 11, 1, 11, 1, - 12, 1, 12, 1, 12, 3, 12, 169, 8, 12, 1, 13, 1, 13, 1, 13, 0, 2, 8, 20, - 14, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 0, 8, 1, 0, 13, - 23, 1, 0, 24, 25, 1, 0, 19, 25, 1, 0, 26, 27, 3, 0, 36, 36, 43, 43, 45, - 45, 3, 0, 39, 39, 41, 41, 43, 43, 1, 0, 30, 32, 1, 0, 33, 34, 183, 0, 31, - 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 61, 1, 0, 0, 0, 8, - 96, 1, 0, 0, 0, 10, 106, 1, 0, 0, 0, 12, 108, 1, 0, 0, 0, 14, 110, 1, 0, - 0, 0, 16, 118, 1, 0, 0, 0, 18, 120, 1, 0, 0, 0, 20, 140, 1, 0, 0, 0, 22, - 153, 1, 0, 0, 0, 24, 168, 1, 0, 0, 0, 26, 170, 1, 0, 0, 0, 28, 32, 3, 2, - 1, 0, 29, 32, 3, 4, 2, 0, 30, 32, 3, 6, 3, 0, 31, 28, 1, 0, 0, 0, 31, 29, - 1, 0, 0, 0, 31, 30, 1, 0, 0, 0, 32, 33, 1, 0, 0, 0, 33, 34, 5, 0, 0, 1, - 34, 1, 1, 0, 0, 0, 35, 36, 3, 10, 5, 0, 36, 37, 5, 1, 0, 0, 37, 38, 3, - 8, 4, 0, 38, 3, 1, 0, 0, 0, 39, 42, 5, 2, 0, 0, 40, 41, 5, 3, 0, 0, 41, - 43, 3, 14, 7, 0, 42, 40, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 48, 1, 0, - 0, 0, 44, 45, 5, 4, 0, 0, 45, 46, 5, 5, 0, 0, 46, 47, 5, 6, 0, 0, 47, 49, - 3, 26, 13, 0, 48, 44, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 57, 1, 0, 0, - 0, 50, 51, 5, 7, 0, 0, 51, 52, 3, 2, 1, 0, 52, 55, 5, 8, 0, 0, 53, 54, - 5, 3, 0, 0, 54, 56, 3, 14, 7, 0, 55, 53, 1, 0, 0, 0, 55, 56, 1, 0, 0, 0, - 56, 58, 1, 0, 0, 0, 57, 50, 1, 0, 0, 0, 58, 59, 1, 0, 0, 0, 59, 57, 1, - 0, 0, 0, 59, 60, 1, 0, 0, 0, 60, 5, 1, 0, 0, 0, 61, 62, 5, 9, 0, 0, 62, - 63, 5, 3, 0, 0, 63, 68, 3, 14, 7, 0, 64, 65, 5, 7, 0, 0, 65, 66, 3, 2, - 1, 0, 66, 67, 5, 8, 0, 0, 67, 69, 1, 0, 0, 0, 68, 64, 1, 0, 0, 0, 69, 70, - 1, 0, 0, 0, 70, 68, 1, 0, 0, 0, 70, 71, 1, 0, 0, 0, 71, 7, 1, 0, 0, 0, - 72, 73, 6, 4, -1, 0, 73, 97, 5, 39, 0, 0, 74, 75, 5, 10, 0, 0, 75, 97, - 3, 8, 4, 8, 76, 77, 5, 11, 0, 0, 77, 78, 3, 8, 4, 0, 78, 79, 5, 12, 0, - 0, 79, 97, 1, 0, 0, 0, 80, 81, 3, 20, 10, 0, 81, 82, 7, 0, 0, 0, 82, 83, - 3, 20, 10, 0, 83, 97, 1, 0, 0, 0, 84, 85, 3, 12, 6, 0, 85, 86, 5, 10, 0, - 0, 86, 87, 7, 1, 0, 0, 87, 88, 3, 18, 9, 0, 88, 97, 1, 0, 0, 0, 89, 90, - 3, 12, 6, 0, 90, 91, 7, 2, 0, 0, 91, 92, 3, 18, 9, 0, 92, 97, 1, 0, 0, - 0, 93, 97, 3, 22, 11, 0, 94, 95, 5, 10, 0, 0, 95, 97, 3, 22, 11, 0, 96, - 72, 1, 0, 0, 0, 96, 74, 1, 0, 0, 0, 96, 76, 1, 0, 0, 0, 96, 80, 1, 0, 0, - 0, 96, 84, 1, 0, 0, 0, 96, 89, 1, 0, 0, 0, 96, 93, 1, 0, 0, 0, 96, 94, - 1, 0, 0, 0, 97, 103, 1, 0, 0, 0, 98, 99, 10, 3, 0, 0, 99, 100, 7, 3, 0, - 0, 100, 102, 3, 8, 4, 4, 101, 98, 1, 0, 0, 0, 102, 105, 1, 0, 0, 0, 103, - 101, 1, 0, 0, 0, 103, 104, 1, 0, 0, 0, 104, 9, 1, 0, 0, 0, 105, 103, 1, - 0, 0, 0, 106, 107, 7, 4, 0, 0, 107, 11, 1, 0, 0, 0, 108, 109, 5, 45, 0, - 0, 109, 13, 1, 0, 0, 0, 110, 115, 3, 12, 6, 0, 111, 112, 5, 28, 0, 0, 112, - 114, 3, 12, 6, 0, 113, 111, 1, 0, 0, 0, 114, 117, 1, 0, 0, 0, 115, 113, - 1, 0, 0, 0, 115, 116, 1, 0, 0, 0, 116, 15, 1, 0, 0, 0, 117, 115, 1, 0, - 0, 0, 118, 119, 7, 5, 0, 0, 119, 17, 1, 0, 0, 0, 120, 121, 5, 11, 0, 0, - 121, 126, 3, 16, 8, 0, 122, 123, 5, 28, 0, 0, 123, 125, 3, 16, 8, 0, 124, - 122, 1, 0, 0, 0, 125, 128, 1, 0, 0, 0, 126, 124, 1, 0, 0, 0, 126, 127, - 1, 0, 0, 0, 127, 129, 1, 0, 0, 0, 128, 126, 1, 0, 0, 0, 129, 130, 5, 12, - 0, 0, 130, 19, 1, 0, 0, 0, 131, 132, 6, 10, -1, 0, 132, 141, 5, 29, 0, - 0, 133, 141, 3, 16, 8, 0, 134, 141, 3, 12, 6, 0, 135, 141, 3, 22, 11, 0, - 136, 137, 5, 11, 0, 0, 137, 138, 3, 20, 10, 0, 138, 139, 5, 12, 0, 0, 139, - 141, 1, 0, 0, 0, 140, 131, 1, 0, 0, 0, 140, 133, 1, 0, 0, 0, 140, 134, - 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 141, 150, 1, 0, - 0, 0, 142, 143, 10, 2, 0, 0, 143, 144, 7, 6, 0, 0, 144, 149, 3, 20, 10, - 3, 145, 146, 10, 1, 0, 0, 146, 147, 7, 7, 0, 0, 147, 149, 3, 20, 10, 2, - 148, 142, 1, 0, 0, 0, 148, 145, 1, 0, 0, 0, 149, 152, 1, 0, 0, 0, 150, - 148, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 21, 1, 0, 0, 0, 152, 150, 1, - 0, 0, 0, 153, 154, 3, 24, 12, 0, 154, 155, 5, 11, 0, 0, 155, 160, 3, 20, - 10, 0, 156, 157, 5, 28, 0, 0, 157, 159, 3, 20, 10, 0, 158, 156, 1, 0, 0, - 0, 159, 162, 1, 0, 0, 0, 160, 158, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, - 163, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 164, 5, 12, 0, 0, 164, 23, - 1, 0, 0, 0, 165, 169, 5, 45, 0, 0, 166, 167, 5, 45, 0, 0, 167, 169, 5, - 35, 0, 0, 168, 165, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 169, 25, 1, 0, 0, - 0, 170, 171, 5, 40, 0, 0, 171, 27, 1, 0, 0, 0, 15, 31, 42, 48, 55, 59, - 70, 96, 103, 115, 126, 140, 148, 150, 160, 168, + 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 1, 0, 1, 0, + 1, 0, 3, 0, 34, 8, 0, 1, 0, 1, 0, 5, 0, 38, 8, 0, 10, 0, 12, 0, 41, 9, + 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 3, 2, 52, 8, 2, + 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 58, 8, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, + 2, 65, 8, 2, 4, 2, 67, 8, 2, 11, 2, 12, 2, 68, 1, 3, 1, 3, 1, 3, 1, 3, + 1, 3, 1, 3, 1, 3, 4, 3, 78, 8, 3, 11, 3, 12, 3, 79, 1, 4, 1, 4, 1, 4, 1, + 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, + 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 3, 4, 106, 8, 4, 1, + 4, 1, 4, 1, 4, 5, 4, 111, 8, 4, 10, 4, 12, 4, 114, 9, 4, 1, 5, 1, 5, 1, + 6, 1, 6, 1, 6, 3, 6, 121, 8, 6, 1, 7, 1, 7, 1, 7, 5, 7, 126, 8, 7, 10, + 7, 12, 7, 129, 9, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 137, 8, + 9, 10, 9, 12, 9, 140, 9, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, + 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 153, 8, 10, 1, 10, 1, 10, 1, 10, + 1, 10, 1, 10, 1, 10, 5, 10, 161, 8, 10, 10, 10, 12, 10, 164, 9, 10, 1, + 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, + 3, 11, 177, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 5, 12, 184, 8, 12, + 10, 12, 12, 12, 187, 9, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, + 14, 0, 2, 8, 20, 15, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, + 28, 0, 9, 1, 0, 14, 24, 1, 0, 25, 26, 1, 0, 20, 26, 1, 0, 27, 28, 3, 0, + 63, 63, 70, 70, 72, 72, 3, 0, 66, 66, 68, 68, 70, 70, 1, 0, 32, 34, 1, + 0, 35, 36, 1, 0, 43, 62, 210, 0, 33, 1, 0, 0, 0, 2, 44, 1, 0, 0, 0, 4, + 48, 1, 0, 0, 0, 6, 70, 1, 0, 0, 0, 8, 105, 1, 0, 0, 0, 10, 115, 1, 0, 0, + 0, 12, 120, 1, 0, 0, 0, 14, 122, 1, 0, 0, 0, 16, 130, 1, 0, 0, 0, 18, 132, + 1, 0, 0, 0, 20, 152, 1, 0, 0, 0, 22, 176, 1, 0, 0, 0, 24, 178, 1, 0, 0, + 0, 26, 190, 1, 0, 0, 0, 28, 192, 1, 0, 0, 0, 30, 34, 3, 2, 1, 0, 31, 34, + 3, 4, 2, 0, 32, 34, 3, 6, 3, 0, 33, 30, 1, 0, 0, 0, 33, 31, 1, 0, 0, 0, + 33, 32, 1, 0, 0, 0, 34, 39, 1, 0, 0, 0, 35, 36, 5, 1, 0, 0, 36, 38, 3, + 22, 11, 0, 37, 35, 1, 0, 0, 0, 38, 41, 1, 0, 0, 0, 39, 37, 1, 0, 0, 0, + 39, 40, 1, 0, 0, 0, 40, 42, 1, 0, 0, 0, 41, 39, 1, 0, 0, 0, 42, 43, 5, + 0, 0, 1, 43, 1, 1, 0, 0, 0, 44, 45, 3, 10, 5, 0, 45, 46, 5, 2, 0, 0, 46, + 47, 3, 8, 4, 0, 47, 3, 1, 0, 0, 0, 48, 51, 5, 3, 0, 0, 49, 50, 5, 4, 0, + 0, 50, 52, 3, 14, 7, 0, 51, 49, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 57, + 1, 0, 0, 0, 53, 54, 5, 5, 0, 0, 54, 55, 5, 6, 0, 0, 55, 56, 5, 7, 0, 0, + 56, 58, 3, 28, 14, 0, 57, 53, 1, 0, 0, 0, 57, 58, 1, 0, 0, 0, 58, 66, 1, + 0, 0, 0, 59, 60, 5, 8, 0, 0, 60, 61, 3, 2, 1, 0, 61, 64, 5, 9, 0, 0, 62, + 63, 5, 4, 0, 0, 63, 65, 3, 14, 7, 0, 64, 62, 1, 0, 0, 0, 64, 65, 1, 0, + 0, 0, 65, 67, 1, 0, 0, 0, 66, 59, 1, 0, 0, 0, 67, 68, 1, 0, 0, 0, 68, 66, + 1, 0, 0, 0, 68, 69, 1, 0, 0, 0, 69, 5, 1, 0, 0, 0, 70, 71, 5, 10, 0, 0, + 71, 72, 5, 4, 0, 0, 72, 77, 3, 14, 7, 0, 73, 74, 5, 8, 0, 0, 74, 75, 3, + 2, 1, 0, 75, 76, 5, 9, 0, 0, 76, 78, 1, 0, 0, 0, 77, 73, 1, 0, 0, 0, 78, + 79, 1, 0, 0, 0, 79, 77, 1, 0, 0, 0, 79, 80, 1, 0, 0, 0, 80, 7, 1, 0, 0, + 0, 81, 82, 6, 4, -1, 0, 82, 106, 5, 66, 0, 0, 83, 84, 5, 11, 0, 0, 84, + 106, 3, 8, 4, 8, 85, 86, 5, 12, 0, 0, 86, 87, 3, 8, 4, 0, 87, 88, 5, 13, + 0, 0, 88, 106, 1, 0, 0, 0, 89, 90, 3, 20, 10, 0, 90, 91, 7, 0, 0, 0, 91, + 92, 3, 20, 10, 0, 92, 106, 1, 0, 0, 0, 93, 94, 3, 12, 6, 0, 94, 95, 5, + 11, 0, 0, 95, 96, 7, 1, 0, 0, 96, 97, 3, 18, 9, 0, 97, 106, 1, 0, 0, 0, + 98, 99, 3, 12, 6, 0, 99, 100, 7, 2, 0, 0, 100, 101, 3, 18, 9, 0, 101, 106, + 1, 0, 0, 0, 102, 106, 3, 24, 12, 0, 103, 104, 5, 11, 0, 0, 104, 106, 3, + 24, 12, 0, 105, 81, 1, 0, 0, 0, 105, 83, 1, 0, 0, 0, 105, 85, 1, 0, 0, + 0, 105, 89, 1, 0, 0, 0, 105, 93, 1, 0, 0, 0, 105, 98, 1, 0, 0, 0, 105, + 102, 1, 0, 0, 0, 105, 103, 1, 0, 0, 0, 106, 112, 1, 0, 0, 0, 107, 108, + 10, 3, 0, 0, 108, 109, 7, 3, 0, 0, 109, 111, 3, 8, 4, 4, 110, 107, 1, 0, + 0, 0, 111, 114, 1, 0, 0, 0, 112, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0, 0, + 113, 9, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 115, 116, 7, 4, 0, 0, 116, 11, + 1, 0, 0, 0, 117, 121, 5, 72, 0, 0, 118, 119, 5, 29, 0, 0, 119, 121, 5, + 72, 0, 0, 120, 117, 1, 0, 0, 0, 120, 118, 1, 0, 0, 0, 121, 13, 1, 0, 0, + 0, 122, 127, 3, 12, 6, 0, 123, 124, 5, 30, 0, 0, 124, 126, 3, 12, 6, 0, + 125, 123, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, + 128, 1, 0, 0, 0, 128, 15, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 130, 131, 7, + 5, 0, 0, 131, 17, 1, 0, 0, 0, 132, 133, 5, 12, 0, 0, 133, 138, 3, 16, 8, + 0, 134, 135, 5, 30, 0, 0, 135, 137, 3, 16, 8, 0, 136, 134, 1, 0, 0, 0, + 137, 140, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, + 141, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 141, 142, 5, 13, 0, 0, 142, 19, + 1, 0, 0, 0, 143, 144, 6, 10, -1, 0, 144, 153, 5, 31, 0, 0, 145, 153, 3, + 16, 8, 0, 146, 153, 3, 12, 6, 0, 147, 153, 3, 24, 12, 0, 148, 149, 5, 12, + 0, 0, 149, 150, 3, 20, 10, 0, 150, 151, 5, 13, 0, 0, 151, 153, 1, 0, 0, + 0, 152, 143, 1, 0, 0, 0, 152, 145, 1, 0, 0, 0, 152, 146, 1, 0, 0, 0, 152, + 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 153, 162, 1, 0, 0, 0, 154, 155, + 10, 2, 0, 0, 155, 156, 7, 6, 0, 0, 156, 161, 3, 20, 10, 3, 157, 158, 10, + 1, 0, 0, 158, 159, 7, 7, 0, 0, 159, 161, 3, 20, 10, 2, 160, 154, 1, 0, + 0, 0, 160, 157, 1, 0, 0, 0, 161, 164, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, + 162, 163, 1, 0, 0, 0, 163, 21, 1, 0, 0, 0, 164, 162, 1, 0, 0, 0, 165, 166, + 5, 37, 0, 0, 166, 177, 5, 68, 0, 0, 167, 168, 5, 38, 0, 0, 168, 177, 5, + 68, 0, 0, 169, 177, 5, 39, 0, 0, 170, 171, 5, 40, 0, 0, 171, 177, 3, 14, + 7, 0, 172, 173, 5, 41, 0, 0, 173, 177, 3, 8, 4, 0, 174, 175, 5, 42, 0, + 0, 175, 177, 3, 14, 7, 0, 176, 165, 1, 0, 0, 0, 176, 167, 1, 0, 0, 0, 176, + 169, 1, 0, 0, 0, 176, 170, 1, 0, 0, 0, 176, 172, 1, 0, 0, 0, 176, 174, + 1, 0, 0, 0, 177, 23, 1, 0, 0, 0, 178, 179, 3, 26, 13, 0, 179, 180, 5, 12, + 0, 0, 180, 185, 3, 20, 10, 0, 181, 182, 5, 30, 0, 0, 182, 184, 3, 20, 10, + 0, 183, 181, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, + 186, 1, 0, 0, 0, 186, 188, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 189, + 5, 13, 0, 0, 189, 25, 1, 0, 0, 0, 190, 191, 7, 8, 0, 0, 191, 27, 1, 0, + 0, 0, 192, 193, 5, 67, 0, 0, 193, 29, 1, 0, 0, 0, 17, 33, 39, 51, 57, 64, + 68, 79, 105, 112, 120, 127, 138, 152, 160, 162, 176, 185, } deserializer := antlr.NewATNDeserializer(nil) staticData.atn = deserializer.Deserialize(staticData.serializedATN) @@ -199,16 +217,43 @@ const ( EQLParserT__32 = 33 EQLParserT__33 = 34 EQLParserT__34 = 35 - EQLParserANY = 36 - EQLParserMULTILINE_COMMENT = 37 - EQLParserONELINE_COMMNET = 38 - EQLParserBOOLEAN = 39 - EQLParserINTERVAL = 40 - EQLParserNUMBER = 41 - EQLParserESC = 42 - EQLParserSTRING = 43 - EQLParserWS = 44 - EQLParserID = 45 + EQLParserT__35 = 36 + EQLParserT__36 = 37 + EQLParserT__37 = 38 + EQLParserT__38 = 39 + EQLParserT__39 = 40 + EQLParserT__40 = 41 + EQLParserT__41 = 42 + EQLParserT__42 = 43 + EQLParserT__43 = 44 + EQLParserT__44 = 45 + EQLParserT__45 = 46 + EQLParserT__46 = 47 + EQLParserT__47 = 48 + EQLParserT__48 = 49 + EQLParserT__49 = 50 + EQLParserT__50 = 51 + EQLParserT__51 = 52 + EQLParserT__52 = 53 + EQLParserT__53 = 54 + EQLParserT__54 = 55 + EQLParserT__55 = 56 + EQLParserT__56 = 57 + EQLParserT__57 = 58 + EQLParserT__58 = 59 + EQLParserT__59 = 60 + EQLParserT__60 = 61 + EQLParserT__61 = 62 + EQLParserANY = 63 + EQLParserMULTILINE_COMMENT = 64 + EQLParserONELINE_COMMNET = 65 + EQLParserBOOLEAN = 66 + EQLParserINTERVAL = 67 + EQLParserNUMBER = 68 + EQLParserESC = 69 + EQLParserSTRING = 70 + EQLParserWS = 71 + EQLParserID = 72 ) // EQLParser rules. @@ -224,9 +269,10 @@ const ( EQLParserRULE_literal = 8 EQLParserRULE_literalList = 9 EQLParserRULE_value = 10 - EQLParserRULE_funcall = 11 - EQLParserRULE_funcName = 12 - EQLParserRULE_interval = 13 + EQLParserRULE_pipe = 11 + EQLParserRULE_funcall = 12 + EQLParserRULE_funcName = 13 + EQLParserRULE_interval = 14 ) // IQueryContext is an interface to support dynamic dispatch. @@ -241,6 +287,8 @@ type IQueryContext interface { SimpleQuery() ISimpleQueryContext SequenceQuery() ISequenceQueryContext SampleQuery() ISampleQueryContext + AllPipe() []IPipeContext + Pipe(i int) IPipeContext // IsQueryContext differentiates from other interfaces. IsQueryContext() @@ -330,6 +378,47 @@ func (s *QueryContext) SampleQuery() ISampleQueryContext { return t.(ISampleQueryContext) } +func (s *QueryContext) AllPipe() []IPipeContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IPipeContext); ok { + len++ + } + } + + tst := make([]IPipeContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IPipeContext); ok { + tst[i] = t.(IPipeContext) + i++ + } + } + + return tst +} + +func (s *QueryContext) Pipe(i int) IPipeContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IPipeContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IPipeContext) +} + func (s *QueryContext) GetRuleContext() antlr.RuleContext { return s } @@ -363,8 +452,10 @@ func (s *QueryContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *EQLParser) Query() (localctx IQueryContext) { localctx = NewQueryContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 0, EQLParserRULE_query) + var _la int + p.EnterOuterAlt(localctx, 1) - p.SetState(31) + p.SetState(33) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -373,19 +464,19 @@ func (p *EQLParser) Query() (localctx IQueryContext) { switch p.GetTokenStream().LA(1) { case EQLParserANY, EQLParserSTRING, EQLParserID: { - p.SetState(28) + p.SetState(30) p.SimpleQuery() } - case EQLParserT__1: + case EQLParserT__2: { - p.SetState(29) + p.SetState(31) p.SequenceQuery() } - case EQLParserT__8: + case EQLParserT__9: { - p.SetState(30) + p.SetState(32) p.SampleQuery() } @@ -393,8 +484,36 @@ func (p *EQLParser) Query() (localctx IQueryContext) { p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) goto errorExit } + p.SetState(39) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + for _la == EQLParserT__0 { + { + p.SetState(35) + p.Match(EQLParserT__0) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(36) + p.Pipe() + } + + p.SetState(41) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + } { - p.SetState(33) + p.SetState(42) p.Match(EQLParserEOF) if p.HasError() { // Recognition error - abort rule @@ -531,19 +650,19 @@ func (p *EQLParser) SimpleQuery() (localctx ISimpleQueryContext) { p.EnterRule(localctx, 2, EQLParserRULE_simpleQuery) p.EnterOuterAlt(localctx, 1) { - p.SetState(35) + p.SetState(44) p.Category() } { - p.SetState(36) - p.Match(EQLParserT__0) + p.SetState(45) + p.Match(EQLParserT__1) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(37) + p.SetState(46) p.condition(0) } @@ -747,125 +866,125 @@ func (p *EQLParser) SequenceQuery() (localctx ISequenceQueryContext) { p.EnterOuterAlt(localctx, 1) { - p.SetState(39) - p.Match(EQLParserT__1) + p.SetState(48) + p.Match(EQLParserT__2) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(42) + p.SetState(51) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == EQLParserT__2 { + if _la == EQLParserT__3 { { - p.SetState(40) - p.Match(EQLParserT__2) + p.SetState(49) + p.Match(EQLParserT__3) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(41) + p.SetState(50) p.FieldList() } } - p.SetState(48) + p.SetState(57) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == EQLParserT__3 { + if _la == EQLParserT__4 { { - p.SetState(44) - p.Match(EQLParserT__3) + p.SetState(53) + p.Match(EQLParserT__4) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(45) - p.Match(EQLParserT__4) + p.SetState(54) + p.Match(EQLParserT__5) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(46) - p.Match(EQLParserT__5) + p.SetState(55) + p.Match(EQLParserT__6) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(47) + p.SetState(56) p.Interval() } } - p.SetState(57) + p.SetState(66) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for ok := true; ok; ok = _la == EQLParserT__6 { + for ok := true; ok; ok = _la == EQLParserT__7 { { - p.SetState(50) - p.Match(EQLParserT__6) + p.SetState(59) + p.Match(EQLParserT__7) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(51) + p.SetState(60) p.SimpleQuery() } { - p.SetState(52) - p.Match(EQLParserT__7) + p.SetState(61) + p.Match(EQLParserT__8) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(55) + p.SetState(64) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == EQLParserT__2 { + if _la == EQLParserT__3 { { - p.SetState(53) - p.Match(EQLParserT__2) + p.SetState(62) + p.Match(EQLParserT__3) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(54) + p.SetState(63) p.FieldList() } } - p.SetState(59) + p.SetState(68) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -1030,55 +1149,55 @@ func (p *EQLParser) SampleQuery() (localctx ISampleQueryContext) { p.EnterOuterAlt(localctx, 1) { - p.SetState(61) - p.Match(EQLParserT__8) + p.SetState(70) + p.Match(EQLParserT__9) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(62) - p.Match(EQLParserT__2) + p.SetState(71) + p.Match(EQLParserT__3) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(63) + p.SetState(72) p.FieldList() } - p.SetState(68) + p.SetState(77) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for ok := true; ok; ok = _la == EQLParserT__6 { + for ok := true; ok; ok = _la == EQLParserT__7 { { - p.SetState(64) - p.Match(EQLParserT__6) + p.SetState(73) + p.Match(EQLParserT__7) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(65) + p.SetState(74) p.SimpleQuery() } { - p.SetState(66) - p.Match(EQLParserT__7) + p.SetState(75) + p.Match(EQLParserT__8) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(70) + p.SetState(79) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -1797,20 +1916,20 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { var _alt int p.EnterOuterAlt(localctx, 1) - p.SetState(96) + p.SetState(105) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 6, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 7, p.GetParserRuleContext()) { case 1: localctx = NewConditionBooleanContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(73) + p.SetState(82) p.Match(EQLParserBOOLEAN) if p.HasError() { // Recognition error - abort rule @@ -1823,15 +1942,15 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(74) - p.Match(EQLParserT__9) + p.SetState(83) + p.Match(EQLParserT__10) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(75) + p.SetState(84) p.condition(8) } @@ -1840,20 +1959,20 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(76) - p.Match(EQLParserT__10) + p.SetState(85) + p.Match(EQLParserT__11) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(77) + p.SetState(86) p.condition(0) } { - p.SetState(78) - p.Match(EQLParserT__11) + p.SetState(87) + p.Match(EQLParserT__12) if p.HasError() { // Recognition error - abort rule goto errorExit @@ -1865,14 +1984,14 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(80) + p.SetState(89) var _x = p.value(0) localctx.(*ComparisonOpContext).left = _x } { - p.SetState(81) + p.SetState(90) var _lt = p.GetTokenStream().LT(1) @@ -1880,7 +1999,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&16769024) != 0) { + if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&33538048) != 0) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*ComparisonOpContext).op = _ri @@ -1890,7 +2009,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } } { - p.SetState(82) + p.SetState(91) var _x = p.value(0) @@ -1902,19 +2021,19 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(84) + p.SetState(93) p.Field() } { - p.SetState(85) - p.Match(EQLParserT__9) + p.SetState(94) + p.Match(EQLParserT__10) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(86) + p.SetState(95) var _lt = p.GetTokenStream().LT(1) @@ -1922,7 +2041,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { _la = p.GetTokenStream().LA(1) - if !(_la == EQLParserT__23 || _la == EQLParserT__24) { + if !(_la == EQLParserT__24 || _la == EQLParserT__25) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*LookupNotOpListContext).op = _ri @@ -1932,7 +2051,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } } { - p.SetState(87) + p.SetState(96) var _x = p.LiteralList() @@ -1944,11 +2063,11 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(89) + p.SetState(98) p.Field() } { - p.SetState(90) + p.SetState(99) var _lt = p.GetTokenStream().LT(1) @@ -1956,7 +2075,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&66584576) != 0) { + if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&133169152) != 0) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*LookupOpListContext).op = _ri @@ -1966,7 +2085,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } } { - p.SetState(91) + p.SetState(100) var _x = p.LiteralList() @@ -1978,7 +2097,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(93) + p.SetState(102) p.Funcall() } @@ -1987,15 +2106,15 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(94) - p.Match(EQLParserT__9) + p.SetState(103) + p.Match(EQLParserT__10) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(95) + p.SetState(104) p.Funcall() } @@ -2003,12 +2122,12 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { goto errorExit } p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) - p.SetState(103) + p.SetState(112) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 7, p.GetParserRuleContext()) + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 8, p.GetParserRuleContext()) if p.HasError() { goto errorExit } @@ -2022,14 +2141,14 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { localctx.(*ConditionLogicalOpContext).left = _prevctx p.PushNewRecursionContext(localctx, _startState, EQLParserRULE_condition) - p.SetState(98) + p.SetState(107) if !(p.Precpred(p.GetParserRuleContext(), 3)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", "")) goto errorExit } { - p.SetState(99) + p.SetState(108) var _lt = p.GetTokenStream().LT(1) @@ -2037,7 +2156,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { _la = p.GetTokenStream().LA(1) - if !(_la == EQLParserT__25 || _la == EQLParserT__26) { + if !(_la == EQLParserT__26 || _la == EQLParserT__27) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*ConditionLogicalOpContext).op = _ri @@ -2047,7 +2166,7 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } } { - p.SetState(100) + p.SetState(109) var _x = p.condition(4) @@ -2055,12 +2174,12 @@ func (p *EQLParser) condition(_p int) (localctx IConditionContext) { } } - p.SetState(105) + p.SetState(114) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 7, p.GetParserRuleContext()) + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 8, p.GetParserRuleContext()) if p.HasError() { goto errorExit } @@ -2178,10 +2297,10 @@ func (p *EQLParser) Category() (localctx ICategoryContext) { p.EnterOuterAlt(localctx, 1) { - p.SetState(106) + p.SetState(115) _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&44049184587776) != 0) { + if !((int64((_la-63)) & ^0x3f) == 0 && ((int64(1)<<(_la-63))&641) != 0) { p.GetErrorHandler().RecoverInline(p) } else { p.GetErrorHandler().ReportMatch(p) @@ -2287,14 +2406,46 @@ func (s *FieldContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *EQLParser) Field() (localctx IFieldContext) { localctx = NewFieldContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 12, EQLParserRULE_field) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(108) - p.Match(EQLParserID) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + p.SetState(120) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + + switch p.GetTokenStream().LA(1) { + case EQLParserID: + p.EnterOuterAlt(localctx, 1) + { + p.SetState(117) + p.Match(EQLParserID) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case EQLParserT__28: + p.EnterOuterAlt(localctx, 2) + { + p.SetState(118) + p.Match(EQLParserT__28) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(119) + p.Match(EQLParserID) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } errorExit: @@ -2437,31 +2588,31 @@ func (p *EQLParser) FieldList() (localctx IFieldListContext) { p.EnterOuterAlt(localctx, 1) { - p.SetState(110) + p.SetState(122) p.Field() } - p.SetState(115) + p.SetState(127) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == EQLParserT__27 { + for _la == EQLParserT__29 { { - p.SetState(111) - p.Match(EQLParserT__27) + p.SetState(123) + p.Match(EQLParserT__29) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(112) + p.SetState(124) p.Field() } - p.SetState(117) + p.SetState(129) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -2581,10 +2732,10 @@ func (p *EQLParser) Literal() (localctx ILiteralContext) { p.EnterOuterAlt(localctx, 1) { - p.SetState(118) + p.SetState(130) _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&11544872091648) != 0) { + if !((int64((_la-66)) & ^0x3f) == 0 && ((int64(1)<<(_la-66))&21) != 0) { p.GetErrorHandler().RecoverInline(p) } else { p.GetErrorHandler().ReportMatch(p) @@ -2732,39 +2883,39 @@ func (p *EQLParser) LiteralList() (localctx ILiteralListContext) { p.EnterOuterAlt(localctx, 1) { - p.SetState(120) - p.Match(EQLParserT__10) + p.SetState(132) + p.Match(EQLParserT__11) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(121) + p.SetState(133) p.Literal() } - p.SetState(126) + p.SetState(138) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == EQLParserT__27 { + for _la == EQLParserT__29 { { - p.SetState(122) - p.Match(EQLParserT__27) + p.SetState(134) + p.Match(EQLParserT__29) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(123) + p.SetState(135) p.Literal() } - p.SetState(128) + p.SetState(140) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -2772,8 +2923,8 @@ func (p *EQLParser) LiteralList() (localctx ILiteralListContext) { _la = p.GetTokenStream().LA(1) } { - p.SetState(129) - p.Match(EQLParserT__11) + p.SetState(141) + p.Match(EQLParserT__12) if p.HasError() { // Recognition error - abort rule goto errorExit @@ -3323,89 +3474,90 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { var _alt int p.EnterOuterAlt(localctx, 1) - p.SetState(140) + p.SetState(152) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 10, p.GetParserRuleContext()) { - case 1: + switch p.GetTokenStream().LA(1) { + case EQLParserT__30: localctx = NewValueNullContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(132) - p.Match(EQLParserT__28) + p.SetState(144) + p.Match(EQLParserT__30) if p.HasError() { // Recognition error - abort rule goto errorExit } } - case 2: + case EQLParserBOOLEAN, EQLParserNUMBER, EQLParserSTRING: localctx = NewValueLiteralContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(133) + p.SetState(145) p.Literal() } - case 3: + case EQLParserT__28, EQLParserID: localctx = NewValueFieldContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(134) + p.SetState(146) p.Field() } - case 4: + case EQLParserT__42, EQLParserT__43, EQLParserT__44, EQLParserT__45, EQLParserT__46, EQLParserT__47, EQLParserT__48, EQLParserT__49, EQLParserT__50, EQLParserT__51, EQLParserT__52, EQLParserT__53, EQLParserT__54, EQLParserT__55, EQLParserT__56, EQLParserT__57, EQLParserT__58, EQLParserT__59, EQLParserT__60, EQLParserT__61: localctx = NewValueFuncallContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(135) + p.SetState(147) p.Funcall() } - case 5: + case EQLParserT__11: localctx = NewValueGroupContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(136) - p.Match(EQLParserT__10) + p.SetState(148) + p.Match(EQLParserT__11) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(137) + p.SetState(149) p.value(0) } { - p.SetState(138) - p.Match(EQLParserT__11) + p.SetState(150) + p.Match(EQLParserT__12) if p.HasError() { // Recognition error - abort rule goto errorExit } } - case antlr.ATNInvalidAltNumber: + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) goto errorExit } p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) - p.SetState(150) + p.SetState(162) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 12, p.GetParserRuleContext()) + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 14, p.GetParserRuleContext()) if p.HasError() { goto errorExit } @@ -3415,26 +3567,26 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { p.TriggerExitRuleEvent() } _prevctx = localctx - p.SetState(148) + p.SetState(160) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 11, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 13, p.GetParserRuleContext()) { case 1: localctx = NewValueMulDivContext(p, NewValueContext(p, _parentctx, _parentState)) localctx.(*ValueMulDivContext).left = _prevctx p.PushNewRecursionContext(localctx, _startState, EQLParserRULE_value) - p.SetState(142) + p.SetState(154) if !(p.Precpred(p.GetParserRuleContext(), 2)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) goto errorExit } { - p.SetState(143) + p.SetState(155) var _lt = p.GetTokenStream().LT(1) @@ -3442,7 +3594,7 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&7516192768) != 0) { + if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&30064771072) != 0) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*ValueMulDivContext).op = _ri @@ -3452,7 +3604,7 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { } } { - p.SetState(144) + p.SetState(156) var _x = p.value(3) @@ -3464,14 +3616,14 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { localctx.(*ValueAddSubContext).left = _prevctx p.PushNewRecursionContext(localctx, _startState, EQLParserRULE_value) - p.SetState(145) + p.SetState(157) if !(p.Precpred(p.GetParserRuleContext(), 1)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) goto errorExit } { - p.SetState(146) + p.SetState(158) var _lt = p.GetTokenStream().LT(1) @@ -3479,7 +3631,7 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { _la = p.GetTokenStream().LA(1) - if !(_la == EQLParserT__32 || _la == EQLParserT__33) { + if !(_la == EQLParserT__34 || _la == EQLParserT__35) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*ValueAddSubContext).op = _ri @@ -3489,7 +3641,7 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { } } { - p.SetState(147) + p.SetState(159) var _x = p.value(2) @@ -3501,12 +3653,12 @@ func (p *EQLParser) value(_p int) (localctx IValueContext) { } } - p.SetState(152) + p.SetState(164) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 12, p.GetParserRuleContext()) + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 14, p.GetParserRuleContext()) if p.HasError() { goto errorExit } @@ -3527,103 +3679,583 @@ errorExit: return localctx } -// IFuncallContext is an interface to support dynamic dispatch. -type IFuncallContext interface { +// IPipeContext is an interface to support dynamic dispatch. +type IPipeContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - - // Getter signatures - FuncName() IFuncNameContext - AllValue() []IValueContext - Value(i int) IValueContext - - // IsFuncallContext differentiates from other interfaces. - IsFuncallContext() + // IsPipeContext differentiates from other interfaces. + IsPipeContext() } -type FuncallContext struct { +type PipeContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyFuncallContext() *FuncallContext { - var p = new(FuncallContext) +func NewEmptyPipeContext() *PipeContext { + var p = new(PipeContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = EQLParserRULE_funcall + p.RuleIndex = EQLParserRULE_pipe return p } -func InitEmptyFuncallContext(p *FuncallContext) { +func InitEmptyPipeContext(p *PipeContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = EQLParserRULE_funcall + p.RuleIndex = EQLParserRULE_pipe } -func (*FuncallContext) IsFuncallContext() {} +func (*PipeContext) IsPipeContext() {} -func NewFuncallContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FuncallContext { - var p = new(FuncallContext) +func NewPipeContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *PipeContext { + var p = new(PipeContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = EQLParserRULE_funcall + p.RuleIndex = EQLParserRULE_pipe return p } -func (s *FuncallContext) GetParser() antlr.Parser { return s.parser } +func (s *PipeContext) GetParser() antlr.Parser { return s.parser } -func (s *FuncallContext) FuncName() IFuncNameContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IFuncNameContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *PipeContext) CopyAll(ctx *PipeContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) +} - if t == nil { - return nil - } +func (s *PipeContext) GetRuleContext() antlr.RuleContext { + return s +} - return t.(IFuncNameContext) +func (s *PipeContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *FuncallContext) AllValue() []IValueContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IValueContext); ok { - len++ - } - } +type PipeHeadContext struct { + PipeContext +} - tst := make([]IValueContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IValueContext); ok { - tst[i] = t.(IValueContext) - i++ - } - } +func NewPipeHeadContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PipeHeadContext { + var p = new(PipeHeadContext) - return tst + InitEmptyPipeContext(&p.PipeContext) + p.parser = parser + p.CopyAll(ctx.(*PipeContext)) + + return p } -func (s *FuncallContext) Value(i int) IValueContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IValueContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } +func (s *PipeHeadContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *PipeHeadContext) NUMBER() antlr.TerminalNode { + return s.GetToken(EQLParserNUMBER, 0) +} + +func (s *PipeHeadContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.EnterPipeHead(s) + } +} + +func (s *PipeHeadContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.ExitPipeHead(s) + } +} + +func (s *PipeHeadContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EQLVisitor: + return t.VisitPipeHead(s) + + default: + return t.VisitChildren(s) + } +} + +type PipeCountContext struct { + PipeContext +} + +func NewPipeCountContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PipeCountContext { + var p = new(PipeCountContext) + + InitEmptyPipeContext(&p.PipeContext) + p.parser = parser + p.CopyAll(ctx.(*PipeContext)) + + return p +} + +func (s *PipeCountContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *PipeCountContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.EnterPipeCount(s) + } +} + +func (s *PipeCountContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.ExitPipeCount(s) + } +} + +func (s *PipeCountContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EQLVisitor: + return t.VisitPipeCount(s) + + default: + return t.VisitChildren(s) + } +} + +type PipeUniqueContext struct { + PipeContext +} + +func NewPipeUniqueContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PipeUniqueContext { + var p = new(PipeUniqueContext) + + InitEmptyPipeContext(&p.PipeContext) + p.parser = parser + p.CopyAll(ctx.(*PipeContext)) + + return p +} + +func (s *PipeUniqueContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *PipeUniqueContext) FieldList() IFieldListContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IFieldListContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IFieldListContext) +} + +func (s *PipeUniqueContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.EnterPipeUnique(s) + } +} + +func (s *PipeUniqueContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.ExitPipeUnique(s) + } +} + +func (s *PipeUniqueContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EQLVisitor: + return t.VisitPipeUnique(s) + + default: + return t.VisitChildren(s) + } +} + +type PipeTailContext struct { + PipeContext +} + +func NewPipeTailContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PipeTailContext { + var p = new(PipeTailContext) + + InitEmptyPipeContext(&p.PipeContext) + p.parser = parser + p.CopyAll(ctx.(*PipeContext)) + + return p +} + +func (s *PipeTailContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *PipeTailContext) NUMBER() antlr.TerminalNode { + return s.GetToken(EQLParserNUMBER, 0) +} + +func (s *PipeTailContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.EnterPipeTail(s) + } +} + +func (s *PipeTailContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.ExitPipeTail(s) + } +} + +func (s *PipeTailContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EQLVisitor: + return t.VisitPipeTail(s) + + default: + return t.VisitChildren(s) + } +} + +type PipeFilterContext struct { + PipeContext +} + +func NewPipeFilterContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PipeFilterContext { + var p = new(PipeFilterContext) + + InitEmptyPipeContext(&p.PipeContext) + p.parser = parser + p.CopyAll(ctx.(*PipeContext)) + + return p +} + +func (s *PipeFilterContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *PipeFilterContext) Condition() IConditionContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IConditionContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IConditionContext) +} + +func (s *PipeFilterContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.EnterPipeFilter(s) + } +} + +func (s *PipeFilterContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.ExitPipeFilter(s) + } +} + +func (s *PipeFilterContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EQLVisitor: + return t.VisitPipeFilter(s) + + default: + return t.VisitChildren(s) + } +} + +type PipeSortContext struct { + PipeContext +} + +func NewPipeSortContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PipeSortContext { + var p = new(PipeSortContext) + + InitEmptyPipeContext(&p.PipeContext) + p.parser = parser + p.CopyAll(ctx.(*PipeContext)) + + return p +} + +func (s *PipeSortContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *PipeSortContext) FieldList() IFieldListContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IFieldListContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IFieldListContext) +} + +func (s *PipeSortContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.EnterPipeSort(s) + } +} + +func (s *PipeSortContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EQLListener); ok { + listenerT.ExitPipeSort(s) + } +} + +func (s *PipeSortContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EQLVisitor: + return t.VisitPipeSort(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *EQLParser) Pipe() (localctx IPipeContext) { + localctx = NewPipeContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 22, EQLParserRULE_pipe) + p.SetState(176) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + + switch p.GetTokenStream().LA(1) { + case EQLParserT__36: + localctx = NewPipeHeadContext(p, localctx) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(165) + p.Match(EQLParserT__36) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(166) + p.Match(EQLParserNUMBER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case EQLParserT__37: + localctx = NewPipeTailContext(p, localctx) + p.EnterOuterAlt(localctx, 2) + { + p.SetState(167) + p.Match(EQLParserT__37) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(168) + p.Match(EQLParserNUMBER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case EQLParserT__38: + localctx = NewPipeCountContext(p, localctx) + p.EnterOuterAlt(localctx, 3) + { + p.SetState(169) + p.Match(EQLParserT__38) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case EQLParserT__39: + localctx = NewPipeUniqueContext(p, localctx) + p.EnterOuterAlt(localctx, 4) + { + p.SetState(170) + p.Match(EQLParserT__39) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(171) + p.FieldList() + } + + case EQLParserT__40: + localctx = NewPipeFilterContext(p, localctx) + p.EnterOuterAlt(localctx, 5) + { + p.SetState(172) + p.Match(EQLParserT__40) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(173) + p.condition(0) + } + + case EQLParserT__41: + localctx = NewPipeSortContext(p, localctx) + p.EnterOuterAlt(localctx, 6) + { + p.SetState(174) + p.Match(EQLParserT__41) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(175) + p.FieldList() + } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + if false { + goto errorExit // Trick to prevent compiler error if the label is not used + } + return localctx +} + +// IFuncallContext is an interface to support dynamic dispatch. +type IFuncallContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // Getter signatures + FuncName() IFuncNameContext + AllValue() []IValueContext + Value(i int) IValueContext + + // IsFuncallContext differentiates from other interfaces. + IsFuncallContext() +} + +type FuncallContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyFuncallContext() *FuncallContext { + var p = new(FuncallContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = EQLParserRULE_funcall + return p +} + +func InitEmptyFuncallContext(p *FuncallContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = EQLParserRULE_funcall +} + +func (*FuncallContext) IsFuncallContext() {} + +func NewFuncallContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FuncallContext { + var p = new(FuncallContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + + p.parser = parser + p.RuleIndex = EQLParserRULE_funcall + + return p +} + +func (s *FuncallContext) GetParser() antlr.Parser { return s.parser } + +func (s *FuncallContext) FuncName() IFuncNameContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IFuncNameContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IFuncNameContext) +} + +func (s *FuncallContext) AllValue() []IValueContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IValueContext); ok { + len++ + } + } + + tst := make([]IValueContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IValueContext); ok { + tst[i] = t.(IValueContext) + i++ + } + } + + return tst +} + +func (s *FuncallContext) Value(i int) IValueContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IValueContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } if t == nil { return nil @@ -3664,48 +4296,48 @@ func (s *FuncallContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *EQLParser) Funcall() (localctx IFuncallContext) { localctx = NewFuncallContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 22, EQLParserRULE_funcall) + p.EnterRule(localctx, 24, EQLParserRULE_funcall) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(153) + p.SetState(178) p.FuncName() } { - p.SetState(154) - p.Match(EQLParserT__10) + p.SetState(179) + p.Match(EQLParserT__11) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(155) + p.SetState(180) p.value(0) } - p.SetState(160) + p.SetState(185) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == EQLParserT__27 { + for _la == EQLParserT__29 { { - p.SetState(156) - p.Match(EQLParserT__27) + p.SetState(181) + p.Match(EQLParserT__29) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(157) + p.SetState(182) p.value(0) } - p.SetState(162) + p.SetState(187) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -3713,8 +4345,8 @@ func (p *EQLParser) Funcall() (localctx IFuncallContext) { _la = p.GetTokenStream().LA(1) } { - p.SetState(163) - p.Match(EQLParserT__11) + p.SetState(188) + p.Match(EQLParserT__12) if p.HasError() { // Recognition error - abort rule goto errorExit @@ -3742,10 +4374,6 @@ type IFuncNameContext interface { // GetParser returns the parser. GetParser() antlr.Parser - - // Getter signatures - ID() antlr.TerminalNode - // IsFuncNameContext differentiates from other interfaces. IsFuncNameContext() } @@ -3781,11 +4409,6 @@ func NewFuncNameContext(parser antlr.Parser, parent antlr.ParserRuleContext, inv } func (s *FuncNameContext) GetParser() antlr.Parser { return s.parser } - -func (s *FuncNameContext) ID() antlr.TerminalNode { - return s.GetToken(EQLParserID, 0) -} - func (s *FuncNameContext) GetRuleContext() antlr.RuleContext { return s } @@ -3818,46 +4441,20 @@ func (s *FuncNameContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *EQLParser) FuncName() (localctx IFuncNameContext) { localctx = NewFuncNameContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 24, EQLParserRULE_funcName) - p.SetState(168) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } + p.EnterRule(localctx, 26, EQLParserRULE_funcName) + var _la int - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 14, p.GetParserRuleContext()) { - case 1: - p.EnterOuterAlt(localctx, 1) - { - p.SetState(165) - p.Match(EQLParserID) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + p.EnterOuterAlt(localctx, 1) + { + p.SetState(190) + _la = p.GetTokenStream().LA(1) - case 2: - p.EnterOuterAlt(localctx, 2) - { - p.SetState(166) - p.Match(EQLParserID) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(167) - p.Match(EQLParserT__34) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&9223363240761753600) != 0) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() } - - case antlr.ATNInvalidAltNumber: - goto errorExit } errorExit: @@ -3957,10 +4554,10 @@ func (s *IntervalContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *EQLParser) Interval() (localctx IIntervalContext) { localctx = NewIntervalContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 26, EQLParserRULE_interval) + p.EnterRule(localctx, 28, EQLParserRULE_interval) p.EnterOuterAlt(localctx, 1) { - p.SetState(170) + p.SetState(192) p.Match(EQLParserINTERVAL) if p.HasError() { // Recognition error - abort rule diff --git a/quesma/eql/parser/eql_visitor.go b/quesma/eql/parser/eql_visitor.go index 2d36d4a91..a44b102f1 100644 --- a/quesma/eql/parser/eql_visitor.go +++ b/quesma/eql/parser/eql_visitor.go @@ -82,6 +82,24 @@ type EQLVisitor interface { // Visit a parse tree produced by EQLParser#ValueField. VisitValueField(ctx *ValueFieldContext) interface{} + // Visit a parse tree produced by EQLParser#PipeHead. + VisitPipeHead(ctx *PipeHeadContext) interface{} + + // Visit a parse tree produced by EQLParser#PipeTail. + VisitPipeTail(ctx *PipeTailContext) interface{} + + // Visit a parse tree produced by EQLParser#PipeCount. + VisitPipeCount(ctx *PipeCountContext) interface{} + + // Visit a parse tree produced by EQLParser#PipeUnique. + VisitPipeUnique(ctx *PipeUniqueContext) interface{} + + // Visit a parse tree produced by EQLParser#PipeFilter. + VisitPipeFilter(ctx *PipeFilterContext) interface{} + + // Visit a parse tree produced by EQLParser#PipeSort. + VisitPipeSort(ctx *PipeSortContext) interface{} + // Visit a parse tree produced by EQLParser#funcall. VisitFuncall(ctx *FuncallContext) interface{} diff --git a/quesma/eql/parser_test.go b/quesma/eql/parser_test.go index 0f2ec14ec..7e11df6a1 100644 --- a/quesma/eql/parser_test.go +++ b/quesma/eql/parser_test.go @@ -15,6 +15,8 @@ func TestEQL_ParseNoErrors(t *testing.T) { `process where process_name == "cmd.exe" and process_path == "C:\\Windows\\System32\\cmd.exe" or process_command_line == "cmd.exe"`, `sequence [ process where foo == 1] [ process where bar == 2]`, `sample by foo [ bar where true ]`, + "any where true | head 3", + "process where ?notexistsing == true", } for _, eqlQuery := range eqlQueries { @@ -36,6 +38,7 @@ func TestEQL_IsSupported(t *testing.T) { supported bool }{ {"simple where true", true}, + {"process where true | head 3 ", false}, {"sequence [ simple where true] [ simple where true]", false}, {"sample by foo [ bar where true ]", false}, } diff --git a/quesma/eql/playground/main.go b/quesma/eql/playground/main.go index 9be539f99..026c35b83 100644 --- a/quesma/eql/playground/main.go +++ b/quesma/eql/playground/main.go @@ -82,7 +82,7 @@ func translate(cmd string) (string, map[string]interface{}) { trans := eql.NewTransformer() trans.FieldNameTranslator = translateName - trans.ExtractParameters = true + trans.ExtractParameters = false where, parameters, err := trans.TransformQuery(cmd) if err != nil { diff --git a/quesma/eql/transform.go b/quesma/eql/transform.go index 3c523fd08..4a1d9f140 100644 --- a/quesma/eql/transform.go +++ b/quesma/eql/transform.go @@ -79,8 +79,6 @@ func (t *Transformer) TransformQuery(query string) (string, map[string]interface } // 6. Render the expression as WHERE clause - // TODO errors while rendering ? - // TODO add configuration for renderer renderer := &transform.Renderer{} whereClause := exp.Accept(renderer).(string) diff --git a/quesma/eql/transform/eql2exp.go b/quesma/eql/transform/eql2exp.go index 8bf825803..ff6e32254 100644 --- a/quesma/eql/transform/eql2exp.go +++ b/quesma/eql/transform/eql2exp.go @@ -38,16 +38,27 @@ func (v *EQLParseTreeToExpTransformer) evalString(s string) string { const quote = `"` if strings.HasPrefix(s, quote) && strings.HasSuffix(s, quote) { // TODO handle escape sequences - return s[1 : len(s)-1] + s = s[1 : len(s)-1] + + s = strings.ReplaceAll(s, `\"`, `"`) + s = strings.ReplaceAll(s, `\\`, `\`) + s = strings.ReplaceAll(s, `\n`, "\n") + s = strings.ReplaceAll(s, `\t`, "\t") + s = strings.ReplaceAll(s, `\r`, "\r") + } return s } -func (v *EQLParseTreeToExpTransformer) evalNumber(s string) (int, error) { +func (v *EQLParseTreeToExpTransformer) evalInteger(s string) (int, error) { return strconv.Atoi(s) } +func (v *EQLParseTreeToExpTransformer) evalFloat(s string) (float64, error) { + return strconv.ParseFloat(s, 64) +} + func (v *EQLParseTreeToExpTransformer) VisitQuery(ctx *parser.QueryContext) interface{} { return ctx.SimpleQuery().Accept(v) } @@ -59,7 +70,7 @@ func (v *EQLParseTreeToExpTransformer) VisitSimpleQuery(ctx *parser.SimpleQueryC if condition == nil { if category == nil { - return nil // TODO what is an empty query? -> select * from where true + return nil // empty `where` clause } else { return category } @@ -140,6 +151,10 @@ func (v *EQLParseTreeToExpTransformer) VisitField(ctx *parser.FieldContext) inte name := v.evalString(ctx.GetText()) + if strings.HasPrefix(name, `?`) { + v.error("optional fields are not supported") + } + return NewSymbol(name) } @@ -155,11 +170,17 @@ func (v *EQLParseTreeToExpTransformer) VisitLiteral(ctx *parser.LiteralContext) case ctx.STRING() != nil: return &Const{Value: v.evalString(ctx.GetText())} case ctx.NUMBER() != nil: - i, err := v.evalNumber(ctx.GetText()) + + i, err := v.evalInteger(ctx.GetText()) if err == nil { return &Const{Value: i} } + f, err := v.evalFloat(ctx.GetText()) + if err == nil { + return &Const{Value: f} + } + v.error(fmt.Sprintf("error parsing number: %v", err)) return &Const{Value: 0} diff --git a/quesma/eql/transform/renderer.go b/quesma/eql/transform/renderer.go index 2465692d2..9f7c29ca7 100644 --- a/quesma/eql/transform/renderer.go +++ b/quesma/eql/transform/renderer.go @@ -10,11 +10,17 @@ type Renderer struct { } func (v *Renderer) VisitConst(e *Const) interface{} { - switch e.Value.(type) { - // TODO proper escaping here + switch val := e.Value.(type) { + case string: - // TODO add proper escaping - return fmt.Sprintf("'%v'", e.Value.(string)) + + val = strings.ReplaceAll(val, `\`, `\\`) + val = strings.ReplaceAll(val, "'", `\'`) + val = strings.ReplaceAll(val, "\n", `\n`) + val = strings.ReplaceAll(val, "\t", `\t`) + val = strings.ReplaceAll(val, "\r", `\r`) + + return fmt.Sprintf("'%v'", val) } return fmt.Sprintf("%v", e.Value) diff --git a/quesma/eql/transform/trans_clickhouse.go b/quesma/eql/transform/trans_clickhouse.go index 67c2e7030..a4c516a6e 100644 --- a/quesma/eql/transform/trans_clickhouse.go +++ b/quesma/eql/transform/trans_clickhouse.go @@ -53,8 +53,16 @@ func (t *ClickhouseTransformer) replaceConstLikePattern(exp Exp) Exp { if constant, ok := exp.(*Const); ok { if s, ok := constant.Value.(string); ok { - // TODO replace ? to sth else - return NewConst(strings.Replace(s, "*", "%", -1)) + + // Fist escape % nad _, because it's a special character in LIKE operator + + s = strings.ReplaceAll(s, "%", "\\%") + s = strings.ReplaceAll(s, "_", "\\_") + + s = strings.ReplaceAll(s, "*", "%") // replace * with % for LIKE operator + s = strings.ReplaceAll(s, "?", "_") // replace ? with _ for LIKE operator + + return NewConst(s) } } @@ -105,7 +113,12 @@ func (t *ClickhouseTransformer) VisitInfixOp(e *InfixOp) interface{} { return NewInfixOp(op, left, right) - case "in~": + case "not in~", "in~": + + targetOp := "IN" + if op == "not in~" { + targetOp = "NOT IN" + } if array, ok := right.(*Array); ok { @@ -113,12 +126,12 @@ func (t *ClickhouseTransformer) VisitInfixOp(e *InfixOp) interface{} { return t.clickhouseLower(e) } - return NewInfixOp("IN", + return NewInfixOp(targetOp, t.clickhouseLower(left), NewArray(mapExp(fn, array.Values)...)) } - return NewInfixOp("IN", t.clickhouseLower(left), right) + return t.error(op + " operator requires a list of values") case "like": @@ -200,7 +213,7 @@ func (t *ClickhouseTransformer) VisitFunction(e *Function) interface{} { return t.funcArityError(name, "3", argsCount) } - return t.error("between function not implemented") + return t.error("between function is not implemented") case "cidrMatch": //https://clickhouse.com/docs/en/sql-reference/functions/ip-address-functions#isipaddressinrange diff --git a/quesma/eql/transform_test.go b/quesma/eql/transform_test.go index 500d21e2d..67da15c22 100644 --- a/quesma/eql/transform_test.go +++ b/quesma/eql/transform_test.go @@ -34,6 +34,12 @@ func TestTransform(t *testing.T) { {`any where not (foo == 1)`, `(NOT ((foo = 1)))`}, + {`any where not (foo == -1)`, + `(NOT ((foo = -1)))`}, + + {`any where not (foo == 1.2)`, + `(NOT ((foo = 1.2)))`}, + {`any where process.name in ("naboo", "corusant")`, `(process.name IN ('naboo', 'corusant'))`}, @@ -88,9 +94,8 @@ func TestTransform(t *testing.T) { {"any where process.name not in (\"foo\", \"BAR\", \"BAZ\")", "(process.name NOT IN ('foo', 'BAR', 'BAZ'))"}, - // FIXME implementation {"any where process.name not in~ (\"foo\", \"bar\", \"baz\")", - "TODO (lower(process.name) NOT IN (lower('foo'), lower('bar'), lower('baz')))"}, + "(lower(process.name) NOT IN (lower('foo'), lower('bar'), lower('baz')))"}, {"any where process.name : (\"foo\", \"bar\", \"baz\") ", "((process.name ILIKE 'foo') OR ((process.name ILIKE 'bar') OR (process.name ILIKE 'baz')))"}, @@ -111,11 +116,8 @@ func TestTransform(t *testing.T) { {"any where process.pid == ( 4 / process.args_count )", "(process.pid = ((4 / process.args_count)))"}, - // FIXME check if this is correct, no float support at the moment - {"any where (process.pid == ( 4.0 / process.args_count)) ", - "TODO (process.pid = ((4.0 / process.args_count)))"}, - - // FIXME add optional field names + {"any where process.pid == ( 4.1 / process.args_count) ", + "(process.pid = ((4.1 / process.args_count)))"}, {"any where ?user.id != null", "TODO (user.id IS NOT NULL)"}, @@ -131,16 +133,16 @@ func TestTransform(t *testing.T) { "(process.name ILIKE 'foo%')"}, {"any where process.name : \"foo?\" ", - "(process.name ILIKE 'foo?')"}, + "(process.name ILIKE 'foo_')"}, {"any where process.name like \"FOO?\" ", - "(process.name LIKE 'FOO?')"}, + "(process.name LIKE 'FOO_')"}, {"any where process.name : (\"f*o\", \"ba?\", \"baz\")", - "((process.name ILIKE 'f%o') OR ((process.name ILIKE 'ba?') OR (process.name ILIKE 'baz')))"}, + "((process.name ILIKE 'f%o') OR ((process.name ILIKE 'ba_') OR (process.name ILIKE 'baz')))"}, {"any where process.name like (\"F*O\", \"BA?\", \"baz\")", - "((process.name LIKE 'F%O') OR ((process.name LIKE 'BA?') OR (process.name LIKE 'baz')))"}, + "((process.name LIKE 'F%O') OR ((process.name LIKE 'BA_') OR (process.name LIKE 'baz')))"}, {"any where process.pid == add(process.id, 5)", "" + "(process.pid = (process.id + 5))"}, @@ -219,6 +221,12 @@ func TestTransform(t *testing.T) { {"any where add(1,null) == 1", "((1 + NULL) = 1)"}, + + {`any where foo == "\n"`, + `(foo = '\n')`}, + + {`any where foo == "'; delete from table"`, + `(foo = '\'; delete from table')`}, } for _, tt := range tests { @@ -287,3 +295,35 @@ func TestTransformWithFieldName(t *testing.T) { }) } } + +func TestErrors(t *testing.T) { + + tests := []struct { + eql string + errorPattern string + }{ + {`any where ?notexisting == true `, + `optional fields are not supported`}, + {`any where true | head 1`, + "unsupported query type"}, + {`any where between(file.path, "System32\\", ".exe") == ""`, + `between function is not implemented`}, + } + + for _, tt := range tests { + t.Run(tt.eql, func(t *testing.T) { + + transformer := NewTransformer() + _, _, err := transformer.TransformQuery(tt.eql) + + if err == nil { + t.Error("expected error: ", tt.errorPattern) + return + } + + if !strings.Contains(err.Error(), tt.errorPattern) { + t.Error("expected error: ", tt.errorPattern, " got: ", err) + } + }) + } +} From 31cd5f19e1b65de4dd1664ae0d1b4c138acc3237 Mon Sep 17 00:00:00 2001 From: Przemyslaw Delewski <102958445+pdelewski@users.noreply.github.com> Date: Fri, 10 May 2024 13:05:31 +0200 Subject: [PATCH 13/14] Get rid of ProcessFacetsQuery, use one common ProcessQuery (#75) Final `ProcessQuery` unification. One common method --- quesma/clickhouse/quesma_communicator.go | 28 ++++++++++-------------- quesma/quesma/search.go | 16 +++++++------- quesma/quesma/termsenum/terms_enum.go | 2 +- 3 files changed, 21 insertions(+), 25 deletions(-) diff --git a/quesma/clickhouse/quesma_communicator.go b/quesma/clickhouse/quesma_communicator.go index 9dd17f896..7acf63e5c 100644 --- a/quesma/clickhouse/quesma_communicator.go +++ b/quesma/clickhouse/quesma_communicator.go @@ -26,22 +26,28 @@ func (lm *LogManager) Query(ctx context.Context, query string) (*sql.Rows, error return rows, err } +// GetAllColumns - returns all columns for a given table including non-schema fields +func (lm *LogManager) GetAllColumns(table *Table, query *model.Query) []string { + columns, err := table.extractColumns(query, true) + if err != nil { + logger.Error().Msgf("Failed to extract columns from query: %v", err) + return nil + } + return columns +} + // ProcessQuery - only WHERE clause // TODO query param should be type safe Query representing all parts of // sql statement that were already parsed and not string from which // we have to extract again different parts like where clause and columns to build a proper result -func (lm *LogManager) ProcessQuery(ctx context.Context, table *Table, query *model.Query) ([]model.QueryResultRow, error) { +func (lm *LogManager) ProcessQuery(ctx context.Context, table *Table, query *model.Query, columns []string) ([]model.QueryResultRow, error) { colNames, err := table.extractColumns(query, false) rowToScan := make([]interface{}, len(colNames)+len(query.NonSchemaFields)) if err != nil { return nil, err } - resultColumns, err := table.extractColumns(query, true) - if err != nil { - return nil, err - } - rows, err := executeQuery(ctx, lm, table.Name, query.StringFromColumns(colNames), resultColumns, rowToScan) + rows, err := executeQuery(ctx, lm, table.Name, query.StringFromColumns(colNames), columns, rowToScan) if err == nil { for _, row := range rows { row.Index = table.Name @@ -50,16 +56,6 @@ func (lm *LogManager) ProcessQuery(ctx context.Context, table *Table, query *mod return rows, err } -// TODO add support for autocomplete for attributes, if we'll find it needed -func (lm *LogManager) ProcessFacetsQuery(ctx context.Context, table *Table, query *model.Query) ([]model.QueryResultRow, error) { - colNames, err := table.extractColumns(query, false) - if err != nil { - return nil, err - } - rowToScan := make([]interface{}, len(colNames)+len(query.NonSchemaFields)) - return executeQuery(ctx, lm, table.Name, query.StringFromColumns(colNames), []string{"key", "doc_count"}, rowToScan) -} - var random = rand.New(rand.NewSource(time.Now().UnixNano())) const slowQueryThreshold = 30 * time.Second diff --git a/quesma/quesma/search.go b/quesma/quesma/search.go index b55bcb3c4..cc0dfd060 100644 --- a/quesma/quesma/search.go +++ b/quesma/quesma/search.go @@ -241,14 +241,14 @@ func (q *QueryRunner) handleSearchCommon(ctx context.Context, indexPattern strin fieldName = "*" } listQuery := queryTranslator.BuildNRowsQuery(fieldName, simpleQuery, queryInfo.Size) - hitsFallback, err = q.logManager.ProcessQuery(ctx, table, listQuery) + hitsFallback, err = q.logManager.ProcessQuery(ctx, table, listQuery, q.logManager.GetAllColumns(table, listQuery)) if err != nil { logger.ErrorWithCtx(ctx).Msgf("error processing fallback query. Err: %v, query: %+v", err, listQuery) pushSecondaryInfo(q.quesmaManagementConsole, id, path, body, translatedQueryBody, responseBody, startTime) return responseBody, err } countQuery := queryTranslator.BuildSimpleCountQuery(simpleQuery.Sql.Stmt) - countResult, err := q.logManager.ProcessQuery(ctx, table, countQuery) + countResult, err := q.logManager.ProcessQuery(ctx, table, countQuery, q.logManager.GetAllColumns(table, listQuery)) if err != nil { logger.ErrorWithCtx(ctx).Msgf("error processing count query. Err: %v, query: %+v", err, countQuery) pushSecondaryInfo(q.quesmaManagementConsole, id, path, body, translatedQueryBody, responseBody, startTime) @@ -461,26 +461,26 @@ func (q *QueryRunner) searchWorkerCommon(ctx context.Context, queryTranslator IQ switch queryInfo.Typ { case model.CountAsync: fullQuery = queryTranslator.BuildSimpleCountQuery(simpleQuery.Sql.Stmt) - hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery, q.logManager.GetAllColumns(table, fullQuery)) case model.Facets, model.FacetsNumeric: // queryInfo = (Facets, fieldName, Limit results, Limit last rows to look into) fullQuery = queryTranslator.BuildFacetsQuery(queryInfo.FieldName, simpleQuery, queryInfo.I2) - hits, err = q.logManager.ProcessFacetsQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery, []string{"key", "doc_count"}) case model.ListByField: // queryInfo = (ListByField, fieldName, 0, LIMIT) fullQuery = queryTranslator.BuildNRowsQuery(queryInfo.FieldName, simpleQuery, queryInfo.I2) - hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery, q.logManager.GetAllColumns(table, fullQuery)) case model.ListAllFields: // queryInfo = (ListAllFields, "*", 0, LIMIT) fullQuery = queryTranslator.BuildNRowsQuery("*", simpleQuery, queryInfo.I2) - hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery, q.logManager.GetAllColumns(table, fullQuery)) case model.Normal: fullQuery = queryTranslator.BuildSimpleSelectQuery(simpleQuery.Sql.Stmt, queryInfo.I2) - hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery) + hits, err = q.logManager.ProcessQuery(dbQueryCtx, table, fullQuery, q.logManager.GetAllColumns(table, fullQuery)) default: logger.ErrorWithCtx(ctx).Msgf("unknown query type: %v, query body: %v", queryInfo.Typ, body) @@ -543,7 +543,7 @@ func (q *QueryRunner) searchAggregationWorkerCommon(ctx context.Context, aggrega for _, agg := range aggregations { logger.InfoWithCtx(ctx).Msg(agg.String()) // I'd keep for now until aggregations work fully sqls += agg.Query.String() + "\n" - rows, err := q.logManager.ProcessQuery(dbQueryCtx, table, &agg.Query) + rows, err := q.logManager.ProcessQuery(dbQueryCtx, table, &agg.Query, q.logManager.GetAllColumns(table, &agg.Query)) if err != nil { logger.ErrorWithCtx(ctx).Msg(err.Error()) continue diff --git a/quesma/quesma/termsenum/terms_enum.go b/quesma/quesma/termsenum/terms_enum.go index 97f236830..5dd115baa 100644 --- a/quesma/quesma/termsenum/terms_enum.go +++ b/quesma/quesma/termsenum/terms_enum.go @@ -37,7 +37,7 @@ func handleTermsEnumRequest(ctx context.Context, reqBody []byte, qt *queryparser dbQueryCtx, cancel := context.WithCancel(ctx) // TODO this will be used to cancel goroutine that is executing the query _ = cancel - if rows, err2 := qt.ClickhouseLM.ProcessQuery(dbQueryCtx, qt.Table, selectQuery); err2 != nil { + if rows, err2 := qt.ClickhouseLM.ProcessQuery(dbQueryCtx, qt.Table, selectQuery, qt.ClickhouseLM.GetAllColumns(qt.Table, selectQuery)); err2 != nil { logger.Error().Msgf("terms enum failed - error processing SQL query [%s]", err2) result, err = json.Marshal(emptyTermsEnumResponse()) } else { From bfbf62fccb294adc723e50fa7b7e961bf15ca0b2 Mon Sep 17 00:00:00 2001 From: Jacek Migdal Date: Fri, 10 May 2024 13:54:10 +0200 Subject: [PATCH 14/14] Increase disk size (#74) Our demo days after 4 days due to lack of space: 1. Initially we have 10GB, but 7.4GB is used by OS and system. 2. Increased disk size to 200GB, which cost $24/month. 3. Also changed type for SSD for better performance. --- docker/deploy/quesma-all-in-one-vm.tf | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docker/deploy/quesma-all-in-one-vm.tf b/docker/deploy/quesma-all-in-one-vm.tf index 723116fd0..1ef1b4b7b 100644 --- a/docker/deploy/quesma-all-in-one-vm.tf +++ b/docker/deploy/quesma-all-in-one-vm.tf @@ -55,6 +55,11 @@ resource "google_compute_instance" "vm_instance" { boot_disk { initialize_params { image = "debian-cloud/debian-10" + size = 200 + type = "pd-balanced" + labels = { + name = "quesma-demo-aio-vm" + } } }