diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1574f81..c55b766 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -38,6 +38,10 @@ jobs: host node port: 9300 node port: 9300 discovery type: 'single-node' + - name: Start MongoDB + uses: supercharge/mongodb-github-action@1.7.0 + with: + mongodb-version: '5.0' - name: Run unit tests run: | pytest diff --git a/README.md b/README.md index ad770cc..2d3cb3e 100644 --- a/README.md +++ b/README.md @@ -12,13 +12,15 @@ pygeofilter is a pure Python parser implementation of OGC filtering standards * [CQL as defined in CSW 2.0](https://portal.ogc.org/files/?artifact_id=20555) * [CQL JSON as defined in OGC API - Features - Part 3: Filtering and the Common Query Language (CQL)](https://portal.ogc.org/files/96288#cql-json-schema) * [JSON Filter Expressions (JFE)](https://github.com/tschaub/ogcapi-features/tree/json-array-expression/extensions/cql/jfe) + * [FES](http://docs.opengeospatial.org/is/09-026r2/09-026r2.html) * Soon: * [CQL Text as defined in OGC API - Features - Part 3: Filtering and the Common Query Language (CQL)](https://portal.ogc.org/files/96288#cql-bnf) - * [FES](http://docs.opengeospatial.org/is/09-026r2/09-026r2.html) * Several backends included * [Django](https://www.djangoproject.com/) * [SQLAlchemy](https://www.sqlalchemy.org/) * [(Geo)Pandas](https://pandas.pydata.org/) + * [Elasticsearch]() + * [MongoDB]() * Native Python objects @@ -328,6 +330,35 @@ Note that it is vital to specify the `SQLite` dialect as this is the one used in :warning: Input values are *not* sanitized/separated from the generated SQL text. This is due to the compatibility with the OGR API not allowing to separate the SQL from the arguments. +### MongoDB + +The `MongoDBEvaluator` creates a JSON structure that can be sent via the pymongo client. + +```python +from pymongo import MongoClient +from pygeofilter.parsers.ecql import parse +from pygeofilter.backends.mongodb import to_filter + +# connect to the MongoDB database and create a spatial index for the geometry +client = MongoClient() +client.db.collection.create_index([ + ("geometry", pymongo.GEOSPHERE), +]) + +# insert records here +# ... + +# parse a filter +ast_ = parse('INTERSECTS(geometry, ENVELOPE (0.0 1.0 0.0 1.0))') + +# turn the AST to a MongoDB query +query = to_filter(ast_) + +# perform the query +results = collection.find(query) +``` + + ### Optimization This is a special kind of backend, as the result of the AST evaluation is actually a new AST. The purpose of this backend is to eliminate static branches of the AST, potentially reducing the cost of an actual evaluation for filtering values. diff --git a/pygeofilter/backends/mongodb/__init__.py b/pygeofilter/backends/mongodb/__init__.py new file mode 100644 index 0000000..15464ac --- /dev/null +++ b/pygeofilter/backends/mongodb/__init__.py @@ -0,0 +1,33 @@ +# ------------------------------------------------------------------------------ +# +# Project: pygeofilter +# Authors: Fabian Schindler +# +# ------------------------------------------------------------------------------ +# Copyright (C) 2022 EOX IT Services GmbH +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies of this Software or works derived from this Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# ------------------------------------------------------------------------------ + +""" MongoDB backend for pygeofilter. +""" + +from .evaluate import to_filter + +__all__ = ["to_filter"] diff --git a/pygeofilter/backends/mongodb/evaluate.py b/pygeofilter/backends/mongodb/evaluate.py new file mode 100644 index 0000000..da5a25f --- /dev/null +++ b/pygeofilter/backends/mongodb/evaluate.py @@ -0,0 +1,380 @@ +# ------------------------------------------------------------------------------ +# +# Project: pygeofilter +# Authors: Fabian Schindler +# +# ------------------------------------------------------------------------------ +# Copyright (C) 2022 EOX IT Services GmbH +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies of this Software or works derived from this Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# ------------------------------------------------------------------------------ + +""" +MongoDB filter evaluator. +""" + + +# pylint: disable=E1130,C0103,W0223 + +from dataclasses import dataclass +from functools import wraps +from typing import Dict, Optional + +from pygeofilter.util import like_pattern_to_re_pattern + +from ..evaluator import Evaluator, handle +from ... import ast +from ... import values + + +COMPARISON_OP_MAP = { + ast.ComparisonOp.EQ: "$eq", + ast.ComparisonOp.NE: "$ne", + ast.ComparisonOp.LT: "$lt", + ast.ComparisonOp.LE: "$lte", + ast.ComparisonOp.GT: "$gt", + ast.ComparisonOp.GE: "$gte", +} + + +SWAP_COMPARISON_OP_MAP = { + ast.ComparisonOp.EQ: ast.Equal, + ast.ComparisonOp.NE: ast.NotEqual, + ast.ComparisonOp.LT: ast.GreaterThan, + ast.ComparisonOp.GT: ast.LessThan, + ast.ComparisonOp.LE: ast.GreaterEqual, + ast.ComparisonOp.GE: ast.LessEqual, +} + + +SPATIAL_COMPARISON_OP_MAP = { + ast.SpatialComparisonOp.INTERSECTS: "$geoIntersects", + ast.SpatialComparisonOp.WITHIN: "$geoWithin", +} + +SPATIAL_DISTANCE_OP_MAP = { + ast.SpatialDistanceOp.DWITHIN: "$maxDistance", + ast.SpatialDistanceOp.BEYOND: "$minDistance", +} + +DISTANCE_UNITS_FACTORS = { + "kilometers": 1000, + "feet": 0.3048, + "statute miles": 1609.34, + "nautical miles": 1852, + "meters": 1, +} + + +def to_meters(distance: float, units: str): + """Returns common distance units to meters""" + factor = DISTANCE_UNITS_FACTORS[units.lower()] + return distance * factor + + +def swap_comparison(node: ast.Comparison, lhs, rhs): + """Swaps comparison nodes""" + return SWAP_COMPARISON_OP_MAP[node.op](node.rhs, node.lhs), rhs, lhs + + +def swap_spatial_comparison(node: ast.SpatialComparisonPredicate, lhs, rhs): + """Swaps spatial comparison nodes""" + if node.op == ast.SpatialComparisonOp.INTERSECTS: + return ast.GeometryIntersects(node.rhs, node.lhs), rhs, lhs + + raise ValueError(f"Cannot swap spatial comparison predicate {node.op}") + + +def swap_distance_comparison(node: ast.SpatialDistancePredicate, lhs, rhs): + """Swaps distance comparison nodes""" + return type(node)(node.rhs, node.lhs, node.distance, node.units), rhs, lhs + + +def swap_array_comparison(node: ast.ArrayPredicate, lhs, rhs): + """Swaps array comparison nodes""" + if node.op == ast.ArrayComparisonOp.AEQUALS: + return ast.ArrayEquals(node.rhs, node.lhs), rhs, lhs + + raise ValueError(f"Cannot swap array comparison predicate {node.op}") + + +@dataclass(slots=True) +class AttributeWrapper: + "Wrapper for attribute access" + name: str + + +def ensure_lhs_attribute(swapper=None): + """Decorator to ensure that the left hand side is always an attribute. + If a `swapper` is provided, it will swap `lhs` with `rhs` + """ + + def inner(handler): + @wraps(handler) + def wrapper(self, node, lhs, *args, **kwargs): + print(handler, self, node, lhs) + if isinstance(lhs, AttributeWrapper): + return handler(self, node, lhs.name, *args, **kwargs) + if swapper and isinstance(args[0], AttributeWrapper): + node, lhs, rhs = swapper(node, lhs, args[0].name) + return handler(self, node, lhs, rhs, *args[1:], **kwargs) + raise Exception() + + return wrapper + + return inner + + +class MongoDBEvaluator(Evaluator): + """A filter evaluator for Elasticsearch DSL.""" + + def __init__( + self, + attribute_map: Optional[Dict[str, str]] = None, + ): + self.attribute_map = attribute_map + + @handle(ast.Not) + def not_(self, _, sub): + """Inverts a filter object.""" + return {"$not": sub} + + @handle(ast.And, ast.Or) + def combination(self, node: ast.Combination, lhs, rhs): + """Joins two filter objects with an `$and`/`$or` operator.""" + op = "$and" if node.op == ast.CombinationOp.AND else "$or" + lhs_subs = lhs[op] if op in lhs else [lhs] + rhs_subs = rhs[op] if op in rhs else [rhs] + return {op: lhs_subs + rhs_subs} + + @handle(ast.Comparison, subclasses=True) + @ensure_lhs_attribute(swap_comparison) + def comparison(self, node: ast.Comparison, lhs, rhs): + """Creates a comparison filter.""" + return {lhs: {COMPARISON_OP_MAP[node.op]: rhs}} + + @handle(ast.Between) + @ensure_lhs_attribute() + def between(self, node: ast.Between, lhs, low, high): + """Creates an expression with `$lte`/`$gte` for the `between` node.""" + expr = { + "$lte": high, + "$gte": low, + } + if node.not_: + expr = self.not_(None, expr) + return {lhs: expr} + + @handle(ast.Like) + @ensure_lhs_attribute() + def like(self, node: ast.Like, lhs): + """Creates a regex query for a given like filter""" + re_pattern = like_pattern_to_re_pattern( + node.pattern, node.wildcard, node.singlechar, node.escapechar + ) + expr = {"$regex": re_pattern, "$options": "i" if node.nocase else ""} + if node.not_: + expr = self.not_(None, expr) + return {lhs: expr} + + @handle(ast.In) + @ensure_lhs_attribute() + def in_(self, node: ast.In, lhs, *options): + """Creates a `$in`/`$nin` query for the given `in` filter.""" + return {lhs: {"$nin" if node.not_ else "$in": list(options)}} + + @handle(ast.IsNull) + @ensure_lhs_attribute() + def null(self, node: ast.IsNull, lhs): + """Performs a null check, by using the `$type` query on the given + field. + """ + expr = {"$type": "null"} + if node.not_: + expr = self.not_(None, expr) + return {lhs: expr} + + @handle(ast.Exists) + @ensure_lhs_attribute() + def exists(self, node: ast.Exists, lhs): + """Performs an existense check, by using the `$exists` query on the + given field + """ + return {lhs: {"$exists": not node.not_}} + + # @handle(ast.TemporalPredicate, subclasses=True) + # def temporal(self, node: ast.TemporalPredicate, lhs, rhs): + # """Creates a filter to match the given temporal predicate""" + # op = node.op + # if isinstance(rhs, (date, datetime)): + # low = high = rhs + # else: + # low, high = rhs + + # query = "range" + # not_ = False + # predicate: Dict[str, Union[date, datetime, str]] + # if op == ast.TemporalComparisonOp.DISJOINT: + # not_ = True + # predicate = {"gte": low, "lte": high} + # elif op == ast.TemporalComparisonOp.AFTER: + # predicate = {"gt": high} + # elif op == ast.TemporalComparisonOp.BEFORE: + # predicate = {"lt": low} + # elif ( + # op == ast.TemporalComparisonOp.TOVERLAPS + # or op == ast.TemporalComparisonOp.OVERLAPPEDBY + # ): + # predicate = {"gte": low, "lte": high} + # elif op == ast.TemporalComparisonOp.BEGINS: + # query = "term" + # predicate = {"value": low} + # elif op == ast.TemporalComparisonOp.BEGUNBY: + # query = "term" + # predicate = {"value": high} + # elif op == ast.TemporalComparisonOp.DURING: + # predicate = {"gt": low, "lt": high, "relation": "WITHIN"} + # elif op == ast.TemporalComparisonOp.TCONTAINS: + # predicate = {"gt": low, "lt": high, "relation": "CONTAINS"} + # # elif op == ast.TemporalComparisonOp.ENDS: + # # pass + # # elif op == ast.TemporalComparisonOp.ENDEDBY: + # # pass + # # elif op == ast.TemporalComparisonOp.TEQUALS: + # # pass + # # elif op == ast.TemporalComparisonOp.BEFORE_OR_DURING: + # # pass + # # elif op == ast.TemporalComparisonOp.DURING_OR_AFTER: + # # pass + # else: + # raise NotImplementedError(f"Unsupported temporal operator: {op}") + + # q = Q( + # query, + # **{lhs: predicate}, + # ) + # if not_: + # q = ~q + # return q + + @handle(ast.GeometryIntersects, ast.GeometryWithin) + @ensure_lhs_attribute(swap_spatial_comparison) + def spatial_comparison( + self, node: ast.SpatialComparisonPredicate, lhs: str, rhs + ): + """Creates a query for the give spatial comparison predicate.""" + return {lhs: {SPATIAL_COMPARISON_OP_MAP[node.op]: {"$geometry": rhs}}} + + @handle(ast.DistanceWithin, ast.DistanceBeyond) + @ensure_lhs_attribute(swap_distance_comparison) + def distance(self, node: ast.SpatialDistancePredicate, lhs, rhs): + """Creates a `$near` query for the given spatial distance + predicate. + """ + distance = to_meters(node.distance, node.units) + return { + lhs: { + "$near": { + "$geometry": rhs, + SPATIAL_DISTANCE_OP_MAP[node.op]: distance, + } + } + } + + @handle(ast.BBox) + @ensure_lhs_attribute() + def bbox(self, node: ast.BBox, lhs): + """Creates a `$geoIntersects` query with the given bbox as + a `$box`. Ignores the `crs` parameter of the BBox. + """ + return { + lhs: { + "$geoIntersects": { + "$geometry": self.envelope( + values.Envelope( + node.minx, node.maxx, node.miny, node.maxy + ) + ) + } + } + } + + @handle(ast.ArrayEquals, ast.ArrayOverlaps, ast.ArrayContains) + @ensure_lhs_attribute(swap_array_comparison) + def array(self, node: ast.ArrayPredicate, lhs, rhs): + """Creates the according query for the given array predicate.""" + if node.op == ast.ArrayComparisonOp.AEQUALS: + return {lhs: {"$eq": rhs}} + elif node.op == ast.ArrayComparisonOp.AOVERLAPS: + return {lhs: {"$in": rhs}} + elif node.op == ast.ArrayComparisonOp.ACONTAINS: + return {lhs: {"$all": rhs}} + + @handle(ast.Attribute) + def attribute(self, node: ast.Attribute): + """Attribute mapping from filter fields to elasticsearch fields. + If an attribute mapping is provided, it is used to look up the + field name from there. + """ + if self.attribute_map is not None: + return AttributeWrapper(self.attribute_map[node.name]) + return AttributeWrapper(node.name) + + # @handle(ast.Arithmetic, subclasses=True) + # def arithmetic(self, node: ast.Arithmetic, lhs, rhs): + # op = ARITHMETIC_OP_MAP[node.op] + # return f"({lhs} {op} {rhs})" + + # @handle(ast.Function) + # def function(self, node, *arguments): + # func = self.function_map[node.name] + # return f"{func}({','.join(arguments)})" + + @handle(*values.LITERALS) + def literal(self, node): + """Literal values are directly passed through""" + return node + + @handle(values.Geometry) + def geometry(self, node: values.Geometry): + """Geometry values are converted to a GeoJSON object""" + return node.geometry + + @handle(values.Envelope) + def envelope(self, node: values.Envelope): + """Envelope values are converted to a $box object.""" + return { + "type": "Polygon", + "coordinates": [ + [ + [node.x1, node.y1], + [node.x1, node.y2], + [node.x2, node.y2], + [node.x2, node.y1], + [node.x1, node.y1], + ] + ], + } + + +def to_filter(root, attribute_map: Optional[Dict[str, str]] = None): + """Shorthand function to convert a pygeofilter AST to a MongoDB + filter structure. + """ + return MongoDBEvaluator(attribute_map).evaluate(root) diff --git a/requirements-test.txt b/requirements-test.txt index 5fb3f57..c1ec946 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -11,3 +11,4 @@ pygeoif lark elasticsearch elasticsearch-dsl +pymongo \ No newline at end of file diff --git a/tests/backends/mongodb/test_evaluate.py b/tests/backends/mongodb/test_evaluate.py new file mode 100644 index 0000000..12f24cc --- /dev/null +++ b/tests/backends/mongodb/test_evaluate.py @@ -0,0 +1,269 @@ +# pylint: disable=W0621,C0114,C0115,C0116,C0103 + +import pymongo +import pytest +from pymongo import MongoClient + +from pygeofilter.parsers.ecql import parse +from pygeofilter.backends.mongodb import to_filter +from pygeofilter.util import parse_datetime +from pygeofilter import ast + + +@pytest.fixture +def client(): + return MongoClient() + + +@pytest.fixture +def db(client): + return client["test-db"] + + +@pytest.fixture +def collection(db): + return db["test-collection"] + + +@pytest.fixture +def data(collection): + collection.create_index([ + ("geometry", pymongo.GEOSPHERE), + ("center", pymongo.GEOSPHERE), + # ("identifier"), + ]) + id_a = collection.insert_one({ + "identifier": "A", + "geometry": { + "type": "MultiPolygon", + "coordinates": [[[ + [0, 0], + [0, 5], + [5, 5], + [5, 0], + [0, 0] + ]]], + }, + "center": { + "type": "Point", + "coordinates": [2.5, 2.5], + }, + "float_attribute": 0.0, + "int_attribute": 5, + "str_attribute": "this is a test", + "maybe_str_attribute": None, + "datetime_attribute": parse_datetime("2000-01-01T00:00:00Z"), + "array_attribute": [2, 3], + "extra_attribute": True, + }).inserted_id + record_a = collection.find_one({"_id": id_a}) + + id_b = collection.insert_one({ + "identifier": "B", + "geometry": { + "type": "MultiPolygon", + "coordinates": [[[ + [5, 5], + [5, 10], + [10, 10], + [10, 5], + [5, 5] + ]]], + }, + "center": { + "type": "Point", + "coordinates": [7.5, 7.5], + }, + "float_attribute": 30.0, + "int_attribute": None, + "str_attribute": "this is another test", + "maybe_str_attribute": "some value", + "array_attribute": [1, 2, 3, 4, 5], + "datetime_attribute": parse_datetime("2000-01-01T00:00:10Z"), + }).inserted_id + record_b = collection.find_one({"_id": id_b}) + + yield [record_a, record_b] + + collection.drop() + + +@pytest.fixture +def evaluate(collection, data): # pylint: disable=W0613 + def inner(ast_, expected_ids=None): + query = to_filter(ast_) + result = list(collection.find(query)) + if expected_ids is not None: + assert expected_ids == [r["identifier"] for r in result] + return result + return inner + + +def test_comparison(evaluate): + evaluate(parse('int_attribute = 5'), ["A"]) + evaluate(parse('float_attribute < 6'), ["A"]) + evaluate(parse('float_attribute > 6'), ["B"]) + evaluate(parse('int_attribute <= 5'), ["A"]) + evaluate(parse('float_attribute >= 8'), ["B"]) + evaluate(parse('float_attribute <> 0.0'), ["B"]) + + +def test_combination(evaluate): + evaluate(parse('int_attribute = 5 AND float_attribute < 6.0'), ["A"]) + evaluate(parse('int_attribute = 6 OR float_attribute < 6.0'), ["A"]) + + +def test_between(evaluate): + evaluate(parse('float_attribute BETWEEN -1 AND 1'), ["A"]) + evaluate(parse('int_attribute NOT BETWEEN 4 AND 6'), ["B"]) + + +def test_like(evaluate): + evaluate(parse('str_attribute LIKE \'this is a test\''), ["A"]) + evaluate(parse('str_attribute LIKE \'this is % test\''), ["A", "B"]) + evaluate(parse('str_attribute NOT LIKE \'% another test\''), ["A"]) + evaluate(parse('str_attribute NOT LIKE \'this is . test\''), ["B"]) + evaluate(parse('str_attribute ILIKE \'THIS IS . TEST\''), ["A"]) + evaluate(parse('str_attribute ILIKE \'THIS IS % TEST\''), ["A", "B"]) + + +def test_in(evaluate): + evaluate(parse('int_attribute IN ( 1, 2, 3, 4, 5 )'), ["A"]) + evaluate(parse('int_attribute NOT IN ( 1, 2, 3, 4, 5 )'), ["B"]) + + +def test_null(evaluate): + evaluate(parse('maybe_str_attribute IS NULL'), ["A"]) + evaluate(parse('maybe_str_attribute IS NOT NULL'), ["B"]) + + +def test_has_attr(evaluate): + evaluate(parse('extra_attribute EXISTS'), ["A"]) + evaluate(parse('extra_attribute DOES-NOT-EXIST'), ["B"]) + + +# def test_temporal(data): +# result = filter_( +# ast.TimeDisjoint( +# ast.Attribute("datetime_attribute"), +# [ +# parse_datetime("2000-01-01T00:00:05.00Z"), +# parse_datetime("2000-01-01T00:00:15.00Z"), +# ] +# ) +# ) +# assert len(result) == 1 and result[0].identifier is data[0].identifier + +# result = filter_( +# parse('datetime_attribute BEFORE 2000-01-01T00:00:05.00Z'), +# ) +# assert len(result) == 1 and result[0].identifier is data[0].identifier + +# result = filter_( +# parse('datetime_attribute AFTER 2000-01-01T00:00:05.00Z'), +# ) +# assert len(result) == 1 and result[0].identifier is data[1].identifier + + +def test_spatial(evaluate): + evaluate( + parse('INTERSECTS(geometry, ENVELOPE (0.0 1.0 0.0 1.0))'), + ["A"], + ) + evaluate( + parse( + 'WITHIN(geometry, ' + 'POLYGON ((-1.0 -1.0,-1.0 6.0, 6.0 6.0,6.0 -1.0,-1.0 -1.0)))' + ), + ["A"], + ) + evaluate( + parse('BBOX(center, 2, 2, 3, 3)'), + ) + + +def test_spatial_distance(evaluate): + evaluate( + parse('DWITHIN(geometry, POINT(-0.00001 -0.000001), 5, feet)'), + ["A"] + ) + + evaluate( + parse('BEYOND(geometry, POINT(7.5 7.5), 10, kilometers)'), + ["A"] + ) + + +def test_array(evaluate): + evaluate( + ast.ArrayEquals( + ast.Attribute("array_attribute"), + [2, 3], + ), + ["A"] + ) + + evaluate( + ast.ArrayOverlaps( + ast.Attribute("array_attribute"), + [2, 3, 4], + ), + ["A", "B"] + ) + + evaluate( + ast.ArrayContains( + ast.Attribute("array_attribute"), + [1, 2, 3, 4], + ), + ["B"] + ) + + +def test_swapped_lhs_rhs(evaluate): + evaluate(parse('5 = int_attribute'), ["A"]) + evaluate(parse('6 > float_attribute'), ["A"]) + evaluate(parse('6 < float_attribute'), ["B"]) + evaluate(parse('5 >= int_attribute'), ["A"]) + evaluate(parse('8 <= float_attribute'), ["B"]) + evaluate(parse('0.0 <> float_attribute'), ["B"]) + + evaluate( + parse('INTERSECTS(ENVELOPE (0.0 1.0 0.0 1.0), geometry)'), + ["A"], + ) + with pytest.raises(ValueError): + evaluate( + parse( + 'WITHIN(' + 'POLYGON ((-1.0 -1.0,-1.0 6.0, 6.0 6.0,6.0 -1.0,-1.0 -1.0)),' + 'geometry)' + ), + ["A"], + ) + + evaluate( + ast.ArrayEquals( + [2, 3], + ast.Attribute("array_attribute"), + ), + ["A"] + ) + + with pytest.raises(ValueError): + evaluate( + ast.ArrayOverlaps( + [2, 3, 4], + ast.Attribute("array_attribute"), + ), + ["A", "B"] + ) + + with pytest.raises(ValueError): + evaluate( + ast.ArrayContains( + [1, 2, 3, 4], + ast.Attribute("array_attribute"), + ), + ["B"] + )