From db9edbe9d26627b8eb0a00fa48beefbc2f09c17d Mon Sep 17 00:00:00 2001 From: Vladimir Rudnyh Date: Tue, 11 Feb 2025 22:47:44 +0700 Subject: [PATCH 1/6] Update models Add validation and tests for the following models: - Bounding box - Pose - Segments Add conversion to/from different bounding box formats, including normalized versions. --- src/datachain/model/bbox.py | 358 +++++++++++++++++++++++++++---- src/datachain/model/pose.py | 131 ++++++++++- src/datachain/model/segment.py | 62 +++++- src/datachain/model/utils.py | 76 +++++++ tests/unit/lib/test_models.py | 142 ------------ tests/unit/model/__init__.py | 0 tests/unit/model/test_bbox.py | 208 ++++++++++++++++++ tests/unit/model/test_pose.py | 181 ++++++++++++++++ tests/unit/model/test_segment.py | 86 ++++++++ tests/unit/model/test_utils.py | 132 ++++++++++++ 10 files changed, 1190 insertions(+), 186 deletions(-) create mode 100644 src/datachain/model/utils.py delete mode 100644 tests/unit/lib/test_models.py create mode 100644 tests/unit/model/__init__.py create mode 100644 tests/unit/model/test_bbox.py create mode 100644 tests/unit/model/test_pose.py create mode 100644 tests/unit/model/test_segment.py create mode 100644 tests/unit/model/test_utils.py diff --git a/src/datachain/model/bbox.py b/src/datachain/model/bbox.py index 4dff3375c..a1adf610d 100644 --- a/src/datachain/model/bbox.py +++ b/src/datachain/model/bbox.py @@ -1,7 +1,17 @@ +from collections.abc import Sequence +from typing import Optional + from pydantic import Field from datachain.lib.data_model import DataModel +from .utils import ( + normalize_coords, + validate_bbox, + validate_bbox_normalized, + validate_img_size, +) + class BBox(DataModel): """ @@ -11,7 +21,7 @@ class BBox(DataModel): title (str): The title of the bounding box. coords (list[int]): The coordinates of the bounding box. - The bounding box is defined by two points: + The bounding box is defined by two points with pixel coordinates: - (x1, y1): The top-left corner of the box. - (x2, y2): The bottom-right corner of the box. """ @@ -20,28 +30,258 @@ class BBox(DataModel): coords: list[int] = Field(default=[]) @staticmethod - def from_list(coords: list[float], title: str = "") -> "BBox": - assert len(coords) == 4, "Bounding box must be a list of 4 coordinates." - assert all(isinstance(value, (int, float)) for value in coords), ( - "Bounding box coordinates must be floats or integers." + def from_voc( + coords: Sequence[float], + title: str = "", + normalized_to: Optional[Sequence[int]] = None, + ) -> "BBox": + """ + Create a bounding box from coordinates in PASCAL VOC format. + + PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], + where: + - (x_min, y_min) are the coordinates of the top-left corner. + - (x_max, y_max) are the coordinates of the bottom-right corner. + + If the input coordinates are normalized (i.e., floats between 0 and 1), + they will be converted to absolute pixel values based on the provided + image size. The image size should be given as a tuple (width, height) + via the `normalized_to` argument. + + Args: + coords (Sequence[float]): The bounding box coordinates. + title (str, optional): The title or label for the bounding box. + Defaults to "". + normalized_to (Sequence[int], optional): The reference image size + (width, height) for denormalizing the bounding box. If None (default), + the coordinates are assumed to be absolute pixel values. + + Returns: + BBox: A bounding box object. + """ + coords = ( + validate_bbox(coords) + if normalized_to is None + else validate_bbox_normalized(coords, normalized_to) + ) + + return BBox(title=title, coords=[round(c) for c in coords]) + + def to_voc(self) -> list[int]: + """ + Convert the bounding box to PASCAL VOC format. + + PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], + where: + - (x_min, y_min) are the coordinates of the top-left corner. + - (x_max, y_max) are the coordinates of the bottom-right corner. + + Returns: + list[int]: The bounding box coordinates in PASCAL VOC format. + """ + return self.coords + + def to_voc_normalized(self, img_size: Sequence[int]) -> list[float]: + """ + Convert the bounding box to PASCAL VOC format with normalized coordinates. + + PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], + where: + - (x_min, y_min) are the coordinates of the top-left corner. + - (x_max, y_max) are the coordinates of the bottom-right corner. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + list[float]: The bounding box coordinates in PASCAL VOC format + with normalized coordinates. + """ + return normalize_coords(self.coords, img_size) + + @staticmethod + def from_coco( + coords: Sequence[float], + title: str = "", + normalized_to: Optional[Sequence[int]] = None, + ) -> "BBox": + """ + Create a bounding box from coordinates in COCO format. + + COCO format represents bounding boxes as [x, y, width, height], where: + - (x, y) are the coordinates of the top-left corner. + - width and height define the size of the bounding box. + + If the input coordinates are normalized (i.e., floats between 0 and 1), + they will be converted to absolute pixel values based on the provided + image size. The image size should be given as a tuple (width, height) + via the `normalized_to` argument. + + Args: + coords (Sequence[float]): The bounding box coordinates. + title (str, optional): The title or label for the bounding box. + Defaults to "". + normalized_to (Sequence[int], optional): The reference image size + (width, height) for denormalizing the bounding box. If None (default), + the coordinates are assumed to be absolute pixel values. + + Returns: + BBox: A bounding box object. + """ + coords = ( + validate_bbox(coords) + if normalized_to is None + else validate_bbox_normalized(coords, normalized_to) ) + + x, y, width, height = coords return BBox( title=title, - coords=[round(c) for c in coords], + coords=[round(x), round(y), round(x + width), round(y + height)], ) + def to_coco(self) -> list[int]: + """ + Convert the bounding box to COCO format. + + COCO format represents bounding boxes as [x, y, width, height], where: + - (x, y) are the coordinates of the top-left corner. + - width and height define the size of the bounding box. + + Returns: + list[int]: The bounding box coordinates in PASCAL VOC format. + """ + return [ + self.coords[0], + self.coords[1], + self.coords[2] - self.coords[0], + self.coords[3] - self.coords[1], + ] + + def to_coco_normalized(self, img_size: Sequence[int]) -> list[float]: + """ + Convert the bounding box to COCO format with normalized coordinates. + + COCO format represents bounding boxes as [x, y, width, height], where: + - (x, y) are the coordinates of the top-left corner. + - width and height define the size of the bounding box. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + list[float]: The bounding box coordinates in PASCAL VOC format + with normalized coordinates. + """ + coords_normalized = normalize_coords(self.coords, img_size) + return [ + coords_normalized[0], + coords_normalized[1], + coords_normalized[2] - coords_normalized[0], + coords_normalized[3] - coords_normalized[1], + ] + @staticmethod - def from_dict(coords: dict[str, float], title: str = "") -> "BBox": - assert isinstance(coords, dict) and set(coords) == { - "x1", - "y1", - "x2", - "y2", - }, "Bounding box must be a dictionary with keys 'x1', 'y1', 'x2' and 'y2'." - return BBox.from_list( - [coords["x1"], coords["y1"], coords["x2"], coords["y2"]], + def from_yolo( + coords: Sequence[float], + title: str = "", + normalized_to: Optional[Sequence[int]] = None, + ) -> "BBox": + """ + Create a bounding box from coordinates in YOLO format. + + YOLO format represents bounding boxes as [x_center, y_center, width, height], + where: + - (x_center, y_center) are the coordinates of the box center. + - width and height define the size of the bounding box. + + If the input coordinates are normalized (i.e., floats between 0 and 1), + they will be converted to absolute pixel values based on the provided + image size. The image size should be given as a tuple (width, height) + via the `normalized_to` argument. + + Args: + coords (Sequence[float]): The bounding box coordinates. + title (str, optional): The title or label for the bounding box. + Defaults to "". + normalized_to (Sequence[int], optional): The reference image size + (width, height) for denormalizing the bounding box. If None (default), + the coordinates are assumed to be absolute pixel values. + + Returns: + BBox: The bounding box object. + """ + coords = ( + validate_bbox(coords) + if normalized_to is None + else validate_bbox_normalized(coords, normalized_to) + ) + + x_center, y_center, width, height = coords + return BBox( title=title, + coords=[ + round(x_center - width / 2), + round(y_center - height / 2), + round(x_center + width / 2), + round(y_center + height / 2), + ], + ) + + def to_yolo(self) -> list[int]: + """ + Convert the bounding box to YOLO format. + + YOLO format represents bounding boxes as [x_center, y_center, width, height], + where: + - (x_center, y_center) are the coordinates of the box center. + - width and height define the size of the bounding box. + + Returns: + list[int]: The bounding box coordinates in PASCAL VOC format. + """ + return [ + round((self.coords[0] + self.coords[2]) / 2), + round((self.coords[1] + self.coords[3]) / 2), + self.coords[2] - self.coords[0], + self.coords[3] - self.coords[1], + ] + + def to_yolo_normalized(self, img_size: Sequence[int]) -> list[float]: + """ + Convert the bounding box to YOLO format with normalized coordinates. + + YOLO format represents bounding boxes as [x_center, y_center, width, height], + where: + - (x_center, y_center) are the coordinates of the box center. + - width and height define the size of the bounding box. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + list[float]: The bounding box coordinates in PASCAL VOC format + with normalized coordinates. + """ + coords_normalized = normalize_coords(self.coords, img_size) + return [ + (coords_normalized[0] + coords_normalized[2]) / 2, + (coords_normalized[1] + coords_normalized[3]) / 2, + coords_normalized[2] - coords_normalized[0], + coords_normalized[3] - coords_normalized[1], + ] + + @staticmethod + def from_list(coords: Sequence[float], title: str = "") -> "BBox": + return BBox.from_voc(coords, title) + + @staticmethod + def from_dict(coords: dict[str, float], title: str = "") -> "BBox": + keys = ("x1", "y1", "x2", "y2") + assert isinstance(coords, dict) and set(coords) == set(keys), ( + "Bounding box must be a dictionary with keys 'x1', 'y1', 'x2' and 'y2'." ) + return BBox.from_voc([coords[key] for key in keys], title=title) class OBBox(DataModel): @@ -63,40 +303,80 @@ class OBBox(DataModel): coords: list[int] = Field(default=[]) @staticmethod - def from_list(coords: list[float], title: str = "") -> "OBBox": + def from_list( + coords: Sequence[float], + title: str = "", + normalized_to: Optional[Sequence[int]] = None, + ) -> "OBBox": + """ + Create an oriented bounding box from a list of coordinates. + + If the input coordinates are normalized (i.e., floats between 0 and 1), + they will be converted to absolute pixel values based on the provided + image size. The image size should be given as a tuple (width, height) + via the `normalized_to` argument. + + Args: + coords (Sequence[float]): The oriented bounding box coordinates. + title (str, optional): The title or label for the oriented bounding box. + Defaults to "". + normalized_to (Sequence[int], optional): The reference image size + (width, height) for denormalizing the oriented bounding box. + If None (default), the coordinates are assumed to be + absolute pixel values. + + Returns: + OBBox: An oriented bounding box object. + """ + assert isinstance(coords, (tuple, list)), ( + "Oriented bounding box must be a tuple or list." + ) assert len(coords) == 8, ( - "Oriented bounding box must be a list of 8 coordinates." + "Oriented bounding box must be a tuple or list of 8 coordinates." ) assert all(isinstance(value, (int, float)) for value in coords), ( "Oriented bounding box coordinates must be floats or integers." ) + + if normalized_to is not None: + assert all(0 <= coord <= 1 for coord in coords), ( + "Normalized coordinates must be floats between 0 and 1." + ) + width, height = validate_img_size(normalized_to) + coords = [ + coord * width if i % 2 == 0 else coord * height + for i, coord in enumerate(coords) + ] + return OBBox( title=title, coords=[round(c) for c in coords], ) + def to_normalized(self, img_size: Sequence[int]) -> list[float]: + """ + Return the oriented bounding box in normalized coordinates. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + list[float]: The oriented bounding box with normalized coordinates. + """ + width, height = validate_img_size(img_size) + assert all( + x < width and y < height + for x, y in zip(self.coords[::2], self.coords[1::2]) + ), "Oriented bounding box is out of image size." + return [ + coord / width if i % 2 == 0 else coord / height + for i, coord in enumerate(self.coords) + ] + @staticmethod def from_dict(coords: dict[str, float], title: str = "") -> "OBBox": - assert isinstance(coords, dict) and set(coords) == { - "x1", - "y1", - "x2", - "y2", - "x3", - "y3", - "x4", - "y4", - }, "Oriented bounding box must be a dictionary with coordinates." - return OBBox.from_list( - [ - coords["x1"], - coords["y1"], - coords["x2"], - coords["y2"], - coords["x3"], - coords["y3"], - coords["x4"], - coords["y4"], - ], - title=title, + keys = ("x1", "y1", "x2", "y2", "x3", "y3", "x4", "y4") + assert isinstance(coords, dict) and set(coords) == set(keys), ( + "Oriented bounding box must be a dictionary with coordinates." ) + return OBBox.from_list([coords[key] for key in keys], title=title) diff --git a/src/datachain/model/pose.py b/src/datachain/model/pose.py index 0c5f376aa..00c234a32 100644 --- a/src/datachain/model/pose.py +++ b/src/datachain/model/pose.py @@ -1,7 +1,12 @@ +from collections.abc import Sequence +from typing import Optional + from pydantic import Field from datachain.lib.data_model import DataModel +from .utils import validate_img_size + class Pose(DataModel): """ @@ -19,22 +24,76 @@ class Pose(DataModel): y: list[int] = Field(default=[]) @staticmethod - def from_list(points: list[list[float]]) -> "Pose": + def from_list( + points: Sequence[Sequence[float]], + normalized_to: Optional[Sequence[int]] = None, + ) -> "Pose": + """ + Create a Pose instance from a list of x and y coordinates. + + If the input coordinates are normalized (i.e., floats between 0 and 1), + they will be converted to absolute pixel values based on the provided + image size. The image size should be given as a tuple (width, height) + via the `normalized_to` argument. + + Args: + points (Sequence[Sequence[float]]): The x and y coordinates + of the keypoints. List of 2 lists: x and y coordinates. + normalized_to (Sequence[int], optional): The reference image size + (width, height) for denormalizing the bounding box. If None (default), + the coordinates are assumed to be absolute pixel values. + + Returns: + Pose: A Pose object. + """ + assert isinstance(points, (tuple, list)), "Pose must be a list of 2 lists." assert len(points) == 2, "Pose must be a list of 2 lists: x and y coordinates." points_x, points_y = points + assert isinstance(points_x, (tuple, list)) and isinstance( + points_y, (tuple, list) + ), "Pose must be a list of 2 lists." assert len(points_x) == len(points_y) == 17, ( "Pose x and y coordinates must have the same length of 17." ) assert all( isinstance(value, (int, float)) for value in [*points_x, *points_y] ), "Pose coordinates must be floats or integers." + + if normalized_to is not None: + assert all(0 <= coord <= 1 for coord in [*points_x, *points_y]), ( + "Normalized coordinates must be floats between 0 and 1." + ) + width, height = validate_img_size(normalized_to) + points_x = [coord * width for coord in points_x] + points_y = [coord * height for coord in points_y] + return Pose( x=[round(coord) for coord in points_x], y=[round(coord) for coord in points_y], ) + def to_normalized(self, img_size: Sequence[int]) -> tuple[list[float], list[float]]: + """ + Return the pose keypoints in normalized coordinates. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + tuple[list[float], list[float]]: The pose keypoints + with normalized coordinates. + """ + width, height = validate_img_size(img_size) + assert all(x <= width and y <= height for x, y in zip(self.x, self.y)), ( + "Pose keypoints are out of image size." + ) + return ( + [coord / width for coord in self.x], + [coord / height for coord in self.y], + ) + @staticmethod - def from_dict(points: dict[str, list[float]]) -> "Pose": + def from_dict(points: dict[str, Sequence[float]]) -> "Pose": assert isinstance(points, dict) and set(points) == { "x", "y", @@ -60,11 +119,42 @@ class Pose3D(DataModel): visible: list[float] = Field(default=[]) @staticmethod - def from_list(points: list[list[float]]) -> "Pose3D": + def from_list( + points: Sequence[Sequence[float]], + normalized_to: Optional[Sequence[int]] = None, + ) -> "Pose3D": + """ + Create a Pose3D instance from a list of x, y coordinates and visibility values. + + If the input coordinates are normalized (i.e., floats between 0 and 1), + they will be converted to absolute pixel values based on the provided + image size. The image size should be given as a tuple (width, height) + via the `normalized_to` argument. + + Args: + points (Sequence[Sequence[float]]): The x and y coordinates + of the keypoints. List of 3 lists: x, y coordinates + and visibility values. + normalized_to (Sequence[int], optional): The reference image size + (width, height) for denormalizing the bounding box. If None (default), + the coordinates are assumed to be absolute pixel values. + + Returns: + Pose3D: A Pose3D object. + + """ + assert isinstance(points, (tuple, list)), ( + "Pose3D must be a tuple or list of 3 lists." + ) assert len(points) == 3, ( "Pose3D must be a list of 3 lists: x, y coordinates and visible." ) points_x, points_y, points_v = points + assert ( + isinstance(points_x, (tuple, list)) + and isinstance(points_y, (tuple, list)) + and isinstance(points_v, (tuple, list)) + ), "Pose3D must be a tuple or list of 3 lists." assert len(points_x) == len(points_y) == len(points_v) == 17, ( "Pose3D x, y coordinates and visible must have the same length of 17." ) @@ -72,10 +162,43 @@ def from_list(points: list[list[float]]) -> "Pose3D": isinstance(value, (int, float)) for value in [*points_x, *points_y, *points_v] ), "Pose3D coordinates must be floats or integers." + + if normalized_to is not None: + assert all(0 <= coord <= 1 for coord in [*points_x, *points_y]), ( + "Normalized coordinates must be floats between 0 and 1." + ) + width, height = validate_img_size(normalized_to) + points_x = [coord * width for coord in points_x] + points_y = [coord * height for coord in points_y] + return Pose3D( x=[round(coord) for coord in points_x], y=[round(coord) for coord in points_y], - visible=points_v, + visible=list(points_v), + ) + + def to_normalized( + self, + img_size: Sequence[int], + ) -> tuple[list[float], list[float], list[float]]: + """ + Return the pose 3D keypoints in normalized coordinates. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + tuple[list[float], list[float], list[float]]: The pose keypoints + with normalized coordinates and visibility values. + """ + width, height = validate_img_size(img_size) + assert all(x <= width and y <= height for x, y in zip(self.x, self.y)), ( + "Pose3D keypoints are out of image size." + ) + return ( + [coord / width for coord in self.x], + [coord / height for coord in self.y], + self.visible, ) @staticmethod diff --git a/src/datachain/model/segment.py b/src/datachain/model/segment.py index 85fc4e351..64efcfa34 100644 --- a/src/datachain/model/segment.py +++ b/src/datachain/model/segment.py @@ -1,7 +1,12 @@ +from collections.abc import Sequence +from typing import Optional + from pydantic import Field from datachain.lib.data_model import DataModel +from .utils import validate_img_size + class Segment(DataModel): """ @@ -21,23 +26,78 @@ class Segment(DataModel): y: list[int] = Field(default=[]) @staticmethod - def from_list(points: list[list[float]], title: str = "") -> "Segment": + def from_list( + points: Sequence[Sequence[float]], + title: str = "", + normalized_to: Optional[Sequence[int]] = None, + ) -> "Segment": + """ + Create a Segment object from a list of x and y coordinates. + + If the input coordinates are normalized (i.e., floats between 0 and 1), + they will be converted to absolute pixel values based on the provided + image size. The image size should be given as a tuple (width, height) + via the `normalized_to` argument. + + Args: + points (Sequence[Sequence[float]]): The x and y coordinates + of the keypoints. List of 2 lists: x and y coordinates. + title (str, optional): The title or label for the segment. Defaults to "". + normalized_to (Sequence[int], optional): The reference image size + (width, height) for denormalizing the bounding box. If None (default), + the coordinates are assumed to be absolute pixel values. + + Returns: + Segment: A Segment object. + """ + assert isinstance(points, (tuple, list)), "Segment must be a list of 2 lists." assert len(points) == 2, ( "Segment must be a list of 2 lists: x and y coordinates." ) points_x, points_y = points + assert isinstance(points_x, (tuple, list)) and isinstance( + points_y, (tuple, list) + ), "Segment must be a list of 2 lists." assert len(points_x) == len(points_y), ( "Segment x and y coordinates must have the same length." ) assert all( isinstance(value, (int, float)) for value in [*points_x, *points_y] ), "Segment coordinates must be floats or integers." + + if normalized_to is not None: + assert all(0 <= coord <= 1 for coord in [*points_x, *points_y]), ( + "Normalized coordinates must be floats between 0 and 1." + ) + width, height = validate_img_size(normalized_to) + points_x = [coord * width for coord in points_x] + points_y = [coord * height for coord in points_y] + return Segment( title=title, x=[round(coord) for coord in points_x], y=[round(coord) for coord in points_y], ) + def to_normalized(self, img_size: Sequence[int]) -> tuple[list[float], list[float]]: + """ + Return the segment in normalized coordinates. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + tuple[list[float], list[float]]: The segment with normalized coordinates. + """ + width, height = validate_img_size(img_size) + assert all(x <= width and y <= height for x, y in zip(self.x, self.y)), ( + "Segment keypoints are out of image size." + ) + return ( + [coord / width for coord in self.x], + [coord / height for coord in self.y], + ) + @staticmethod def from_dict(points: dict[str, list[float]], title: str = "") -> "Segment": assert isinstance(points, dict) and set(points) == { diff --git a/src/datachain/model/utils.py b/src/datachain/model/utils.py new file mode 100644 index 000000000..a28ee7465 --- /dev/null +++ b/src/datachain/model/utils.py @@ -0,0 +1,76 @@ +from collections.abc import Sequence + + +def validate_img_size(img_size: Sequence[int]) -> Sequence[int]: + """Validate the image size.""" + assert isinstance(img_size, (tuple, list)), "Image size must be a tuple or list." + assert len(img_size) == 2, "Image size must be a tuple or list of 2 integers." + assert all(isinstance(value, int) for value in img_size), ( + "Image size must be integers." + ) + assert all(value > 0 for value in img_size), "Image size must be positive integers." + return img_size + + +def validate_bbox(coords: Sequence[float]) -> Sequence[float]: + """Validate the bounding box coordinates.""" + assert isinstance(coords, (tuple, list)), "Bounding box must be a tuple or list." + assert len(coords) == 4, "Bounding box must be a tuple or list of 4 coordinates." + assert all(isinstance(value, (int, float)) for value in coords), ( + "Bounding box coordinates must be floats or integers." + ) + assert all(value >= 0 for value in coords), ( + "Bounding box coordinates must be positive." + ) + return coords + + +def validate_bbox_normalized( + coords: Sequence[float], img_size: Sequence[int] +) -> Sequence[float]: + """Validate the bounding box coordinates and normalize them to the image size.""" + assert isinstance(coords, (tuple, list)), "Bounding box must be a tuple or list." + assert len(coords) == 4, "Bounding box must be a tuple or list of 4 coordinates." + assert all(isinstance(value, float) for value in coords), ( + "Bounding box normalized coordinates must be floats." + ) + assert all(0 <= value <= 1 for value in coords), ( + "Bounding box normalized coordinates must be floats between 0 and 1." + ) + + width, height = validate_img_size(img_size) + + return [ + coords[0] * width, + coords[1] * height, + coords[2] * width, + coords[3] * height, + ] + + +def normalize_coords( + coords: Sequence[int], + img_size: Sequence[int], +) -> list[float]: + """Normalize the bounding box coordinates to the image size.""" + assert isinstance(coords, (tuple, list)), "Coords must be a tuple or list." + assert len(coords) == 4, "Coords must be a tuple or list of 4 coordinates." + assert all(isinstance(value, int) for value in coords), ( + "Coords must be a tuple or list of 4 ints." + ) + + width, height = validate_img_size(img_size) + + assert ( + 0 <= coords[0] <= width + and 0 <= coords[1] <= height + and 0 <= coords[2] <= width + and 0 <= coords[3] <= height + ), "Bounding box coordinates are out of image size" + + return [ + coords[0] / width, + coords[1] / height, + coords[2] / width, + coords[3] / height, + ] diff --git a/tests/unit/lib/test_models.py b/tests/unit/lib/test_models.py deleted file mode 100644 index c3520b0ec..000000000 --- a/tests/unit/lib/test_models.py +++ /dev/null @@ -1,142 +0,0 @@ -import pytest - -from datachain import model -from datachain.model.ultralytics.pose import YoloPoseBodyPart - - -@pytest.mark.parametrize( - "bbox", - [ - model.BBox(title="BBox", coords=[0, 1, 2, 3]), - model.BBox.from_list([0.3, 1.1, 1.7, 3.4], title="BBox"), - model.BBox.from_dict({"x1": 0, "y1": 0.8, "x2": 2.2, "y2": 2.9}, title="BBox"), - ], -) -def test_bbox(bbox): - assert bbox.model_dump() == { - "title": "BBox", - "coords": [0, 1, 2, 3], - } - - -@pytest.mark.parametrize( - "obbox", - [ - model.OBBox(title="OBBox", coords=[0, 1, 2, 3, 4, 5, 6, 7]), - model.OBBox.from_list([0.3, 1.1, 1.7, 3.4, 4.0, 4.9, 5.6, 7.0], title="OBBox"), - model.OBBox.from_dict( - { - "x1": 0, - "y1": 0.8, - "x2": 2.2, - "y2": 2.9, - "x3": 3.9, - "y3": 5.4, - "x4": 6.0, - "y4": 7.4, - }, - title="OBBox", - ), - ], -) -def test_obbox(obbox): - assert obbox.model_dump() == { - "title": "OBBox", - "coords": [0, 1, 2, 3, 4, 5, 6, 7], - } - - -@pytest.mark.parametrize( - "pose", - [ - model.Pose(x=list(range(17)), y=[y * 2 for y in range(17)]), - model.Pose.from_list([list(range(17)), [y * 2 for y in range(17)]]), - model.Pose.from_dict({"x": list(range(17)), "y": [y * 2 for y in range(17)]}), - ], -) -def test_pose(pose): - assert pose.model_dump() == { - "x": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], - "y": [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32], - } - - -@pytest.mark.parametrize( - "pose", - [ - model.Pose3D( - x=list(range(17)), y=[y * 2 for y in range(17)], visible=[0.2] * 17 - ), - model.Pose3D.from_list( - [list(range(17)), [y * 2 for y in range(17)], [0.2] * 17] - ), - model.Pose3D.from_dict( - { - "x": list(range(17)), - "y": [y * 2 for y in range(17)], - "visible": [0.2] * 17, - } - ), - ], -) -def test_pose3d(pose): - assert pose.model_dump() == { - "x": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], - "y": [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32], - "visible": [ - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - 0.2, - ], - } - - -@pytest.mark.parametrize( - "segments", - [ - model.Segment(x=[0, 1, 2], y=[2, 3, 5], title="Segments"), - model.Segment.from_list([[0, 1, 2], [2, 3, 5]], title="Segments"), - model.Segment.from_dict({"x": [0, 1, 2], "y": [2, 3, 5]}, title="Segments"), - ], -) -def test_segments(segments): - assert segments.model_dump() == { - "title": "Segments", - "x": [0, 1, 2], - "y": [2, 3, 5], - } - - -def test_yolo_pose_body_parts(): - pose = model.Pose(x=list(range(17)), y=list(range(17))) - assert pose.x[YoloPoseBodyPart.nose] == 0 - assert pose.x[YoloPoseBodyPart.left_eye] == 1 - assert pose.x[YoloPoseBodyPart.right_eye] == 2 - assert pose.x[YoloPoseBodyPart.left_ear] == 3 - assert pose.x[YoloPoseBodyPart.right_ear] == 4 - assert pose.x[YoloPoseBodyPart.left_shoulder] == 5 - assert pose.x[YoloPoseBodyPart.right_shoulder] == 6 - assert pose.x[YoloPoseBodyPart.left_elbow] == 7 - assert pose.x[YoloPoseBodyPart.right_elbow] == 8 - assert pose.x[YoloPoseBodyPart.left_wrist] == 9 - assert pose.x[YoloPoseBodyPart.right_wrist] == 10 - assert pose.x[YoloPoseBodyPart.left_hip] == 11 - assert pose.x[YoloPoseBodyPart.right_hip] == 12 - assert pose.x[YoloPoseBodyPart.left_knee] == 13 - assert pose.x[YoloPoseBodyPart.right_knee] == 14 - assert pose.x[YoloPoseBodyPart.left_ankle] == 15 - assert pose.x[YoloPoseBodyPart.right_ankle] == 16 diff --git a/tests/unit/model/__init__.py b/tests/unit/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/model/test_bbox.py b/tests/unit/model/test_bbox.py new file mode 100644 index 000000000..f069ba675 --- /dev/null +++ b/tests/unit/model/test_bbox.py @@ -0,0 +1,208 @@ +import numpy as np +import pytest + +from datachain.model import BBox, OBBox + + +@pytest.mark.parametrize( + "coords,title,normalized_to", + [ + ((10, 20, 90, 80), "BBox", None), + ([10, 20, 90, 80], "BBox", None), + ([10.4, 19.8, 90.0, 80.1], "BBox", None), + ([0.1, 0.2, 0.9, 0.8], "", (100, 100)), + ], +) +def test_bbox_voc(coords, title, normalized_to): + bbox = BBox.from_voc( + coords, + title=title, + normalized_to=normalized_to, + ) + assert bbox.model_dump() == { + "title": title, + "coords": [10, 20, 90, 80], + } + assert bbox.to_voc() == [10, 20, 90, 80] + np.testing.assert_array_almost_equal( + bbox.to_voc_normalized((100, 100)), + [0.1, 0.2, 0.9, 0.8], + ) + np.testing.assert_array_almost_equal( + bbox.to_voc_normalized((200, 200)), + [0.05, 0.1, 0.45, 0.4], + ) + + +@pytest.mark.parametrize( + "coords,title,normalized_to", + [ + ((10, 20, 80, 60), "BBox", None), + ([10, 20, 80, 60], "BBox", None), + ([9.9, 20.1, 80.4, 60.001], "BBox", None), + ([0.1, 0.2, 0.8, 0.6], "", (100, 100)), + ], +) +def test_bbox_coco(coords, title, normalized_to): + bbox = BBox.from_coco( + coords, + title=title, + normalized_to=normalized_to, + ) + assert bbox.model_dump() == { + "title": title, + "coords": [10, 20, 90, 80], + } + assert bbox.to_coco() == [10, 20, 80, 60] + np.testing.assert_array_almost_equal( + bbox.to_coco_normalized((100, 100)), + [0.1, 0.2, 0.8, 0.6], + ) + np.testing.assert_array_almost_equal( + bbox.to_coco_normalized((200, 200)), + [0.05, 0.1, 0.4, 0.3], + ) + + +@pytest.mark.parametrize( + "coords,title,normalized_to", + [ + ((50, 50, 80, 60), "BBox", None), + ([50, 50, 80, 60], "BBox", None), + ([50.0, 49.6, 79.99, 60.2], "BBox", None), + ([0.5, 0.5, 0.8, 0.6], "", (100, 100)), + ], +) +def test_bbox_yolo(coords, title, normalized_to): + bbox = BBox.from_yolo( + coords, + title=title, + normalized_to=normalized_to, + ) + assert bbox.model_dump() == { + "title": title, + "coords": [10, 20, 90, 80], + } + assert bbox.to_yolo() == [50, 50, 80, 60] + np.testing.assert_array_almost_equal( + bbox.to_yolo_normalized((100, 100)), + [0.5, 0.5, 0.8, 0.6], + ) + np.testing.assert_array_almost_equal( + bbox.to_yolo_normalized((200, 200)), + [0.25, 0.25, 0.4, 0.3], + ) + + +def test_bbox_from_list(): + assert BBox.from_list([10, 20, 90, 80]).model_dump() == { + "title": "", + "coords": [10, 20, 90, 80], + } + + +def test_bbox_from_dict(): + assert BBox.from_dict({"x1": 10, "y1": 20, "x2": 90, "y2": 80}).model_dump() == { + "title": "", + "coords": [10, 20, 90, 80], + } + + +@pytest.mark.parametrize( + "coords", + [ + {"x1": 10, "y1": 20, "x2": 90}, + {"x1": 10, "y1": 20, "x2": 90, "y2": 80, "x3": 100}, + ], +) +def test_bbox_from_dict_errors(coords): + with pytest.raises(AssertionError): + BBox.from_dict(coords) + + +@pytest.mark.parametrize( + "coords,normalized_to", + [ + [(10, 20, 30, 40, 50, 60, 70, 80), None], + [[10, 20, 30, 40, 50, 60, 70, 80], None], + [[9.9, 20.1, 29.6, 40.4, 50.01, 59.99, 70.0, 80], None], + [[10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0], None], + [[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8], (100, 100)], + [(0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8), [100, 100]], + ], +) +def test_obbox_from_list(coords, normalized_to): + obbox = OBBox.from_list(coords, normalized_to=normalized_to) + assert obbox.model_dump() == { + "title": "", + "coords": [10, 20, 30, 40, 50, 60, 70, 80], + } + np.testing.assert_array_almost_equal( + obbox.to_normalized((100, 100)), + [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8], + ) + + +def test_obbox_to_normalized_errors(): + with pytest.raises(AssertionError): + OBBox.from_list([10, 20, 30, 40, 50, 60, 70, 80]).to_normalized((50, 50)) + + +@pytest.mark.parametrize( + "coords,normalized_to", + [ + [None, None], + [12, None], + ["12", None], + [[], None], + [[10, 20, 30, 40, 50, 60, 70], None], + [[10, 20, 30, 40, 50, 60, 70, 80, 90], None], + [[10, 20, 30, 40, 50, 60, 70, 80], (100, 100)], + [[1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8], (100, 100)], + ], +) +def test_obbox_from_list_errors(coords, normalized_to): + with pytest.raises(AssertionError): + OBBox.from_list(coords, normalized_to=normalized_to) + + +def test_obbox_from_dict(): + obbox = OBBox.from_dict( + { + "x1": 0, + "y1": 0.8, + "x2": 2.2, + "y2": 2.9, + "x3": 3.9, + "y3": 5.4, + "x4": 6.0, + "y4": 7.4, + }, + title="OBBox", + ) + assert obbox.model_dump() == { + "title": "OBBox", + "coords": [0, 1, 2, 3, 4, 5, 6, 7], + } + + +@pytest.mark.parametrize( + "coords", + [ + {"x1": 0, "y1": 1, "x2": 2, "y2": 3, "x3": 4, "y3": 5, "x4": 6}, + { + "x1": 0, + "y1": 1, + "x2": 2, + "y2": 3, + "x3": 4, + "y3": 5, + "x4": 6, + "y4": 7, + "x5": 8, + }, + ], +) +def test_obbox_from_dict_errors(coords): + with pytest.raises(AssertionError): + OBBox.from_dict(coords) diff --git a/tests/unit/model/test_pose.py b/tests/unit/model/test_pose.py new file mode 100644 index 000000000..1b08caa9e --- /dev/null +++ b/tests/unit/model/test_pose.py @@ -0,0 +1,181 @@ +import numpy as np +import pytest + +from datachain.model.pose import Pose, Pose3D + +POSE_KEYPOINTS = ( + [5 * i for i in range(17)], + [3 * i for i in reversed(range(17))], +) +POSE_KEYPOINTS_NORMALIZED = ( + [x / 100 for x in POSE_KEYPOINTS[0]], + [y / 100 for y in POSE_KEYPOINTS[1]], +) + +POSE3D_KEYPOINTS = ( + POSE_KEYPOINTS[0], + POSE_KEYPOINTS[1], + [0.05 * i for i in range(17)], +) +POSE3D_KEYPOINTS_NORMALIZED = ( + POSE_KEYPOINTS_NORMALIZED[0], + POSE_KEYPOINTS_NORMALIZED[1], + [0.05 * i for i in range(17)], +) + + +@pytest.mark.parametrize( + "points,normalized_to", + [ + [POSE_KEYPOINTS, None], + [tuple(tuple(c) for c in POSE_KEYPOINTS), None], + [POSE_KEYPOINTS_NORMALIZED, (100, 100)], + ], +) +def test_pose_from_list(points, normalized_to): + pose = Pose.from_list(points, normalized_to=normalized_to) + assert pose.model_dump() == { + "x": POSE_KEYPOINTS[0], + "y": POSE_KEYPOINTS[1], + } + np.testing.assert_array_almost_equal( + pose.to_normalized((100, 100)), + POSE_KEYPOINTS_NORMALIZED, + ) + + +@pytest.mark.parametrize( + "points,normalized_to", + [ + [None, None], + [12, None], + ["12", None], + [[], None], + [[12, []], None], + [[[], "12"], None], + [[[], []], None], + [[[], [], []], None], + [[12, 12], None], + [[POSE_KEYPOINTS[0], POSE_KEYPOINTS[1] + [0]], None], + [ + [ + [p * 2 for p in POSE_KEYPOINTS_NORMALIZED[0]], + POSE_KEYPOINTS_NORMALIZED[1], + ], + (100, 100), + ], + ], +) +def test_pose_from_list_errors(points, normalized_to): + with pytest.raises(AssertionError): + Pose.from_list(points, normalized_to=normalized_to) + + +def test_pose_to_normalized_errors(): + with pytest.raises(AssertionError): + Pose.from_list(POSE_KEYPOINTS).to_normalized((50, 50)) + + +def test_pose_from_dict(): + pose = Pose.from_dict({"x": POSE_KEYPOINTS[0], "y": POSE_KEYPOINTS[1]}) + assert pose.model_dump() == { + "x": POSE_KEYPOINTS[0], + "y": POSE_KEYPOINTS[1], + } + + +@pytest.mark.parametrize( + "points", + [ + {"x": POSE_KEYPOINTS[0]}, + {"x": POSE_KEYPOINTS[0], "y": POSE_KEYPOINTS[1], "z": []}, + ], +) +def test_pose_from_dict_errors(points): + with pytest.raises(AssertionError): + Pose.from_dict(points) + + +@pytest.mark.parametrize( + "points,normalized_to", + [ + [POSE3D_KEYPOINTS, None], + [tuple(tuple(c) for c in POSE3D_KEYPOINTS), None], + [POSE3D_KEYPOINTS_NORMALIZED, (100, 100)], + ], +) +def test_pose3d_from_list(points, normalized_to): + pose = Pose3D.from_list(points, normalized_to=normalized_to) + assert pose.model_dump() == { + "x": POSE3D_KEYPOINTS[0], + "y": POSE3D_KEYPOINTS[1], + "visible": POSE3D_KEYPOINTS[2], + } + np.testing.assert_array_almost_equal( + pose.to_normalized((100, 100)), + POSE3D_KEYPOINTS_NORMALIZED, + ) + + +@pytest.mark.parametrize( + "points,normalized_to", + [ + [None, None], + [12, None], + ["12", None], + [[], None], + [[12, []], None], + [[[], "12"], None], + [[[], []], None], + [[[], [], []], None], + [[12, 12], None], + [[POSE3D_KEYPOINTS[0], POSE3D_KEYPOINTS[1] + [0]], None], + [ + [ + [p * 2 for p in POSE3D_KEYPOINTS_NORMALIZED[0]], + POSE3D_KEYPOINTS_NORMALIZED[1], + ], + (100, 100), + ], + ], +) +def test_pose3d_from_list_errors(points, normalized_to): + with pytest.raises(AssertionError): + Pose3D.from_list(points, normalized_to=normalized_to) + + +def test_pose3d_to_normalized_errors(): + with pytest.raises(AssertionError): + Pose3D.from_list(POSE3D_KEYPOINTS).to_normalized((50, 50)) + + +def test_pose3d_from_dict(): + pose = Pose3D.from_dict( + { + "x": POSE3D_KEYPOINTS[0], + "y": POSE3D_KEYPOINTS[1], + "visible": POSE3D_KEYPOINTS[2], + } + ) + assert pose.model_dump() == { + "x": POSE3D_KEYPOINTS[0], + "y": POSE3D_KEYPOINTS[1], + "visible": POSE3D_KEYPOINTS[2], + } + + +@pytest.mark.parametrize( + "points", + [ + {"x": POSE_KEYPOINTS[0], "y": POSE_KEYPOINTS[1]}, + { + "x": POSE_KEYPOINTS[0], + "y": POSE_KEYPOINTS[1], + "visible": POSE3D_KEYPOINTS[2], + "z": [], + }, + ], +) +def test_pose3d_from_dict_errors(points): + with pytest.raises(AssertionError): + Pose3D.from_dict(points) diff --git a/tests/unit/model/test_segment.py b/tests/unit/model/test_segment.py new file mode 100644 index 000000000..3c671c96e --- /dev/null +++ b/tests/unit/model/test_segment.py @@ -0,0 +1,86 @@ +import numpy as np +import pytest + +from datachain.model.segment import Segment + +SEGMENT_POINTS = ( + [2 * i for i in range(50)], + list(reversed(range(50))), +) +SEGMENT_POINTS_NORMALIZED = ( + [x / 100 for x in SEGMENT_POINTS[0]], + [y / 100 for y in SEGMENT_POINTS[1]], +) + + +@pytest.mark.parametrize( + "points,title,normalized_to", + [ + [SEGMENT_POINTS, "Segment", None], + [tuple(tuple(c) for c in SEGMENT_POINTS), "", None], + [SEGMENT_POINTS_NORMALIZED, "Person", (100, 100)], + ], +) +def test_pose_from_list(points, title, normalized_to): + segment = Segment.from_list(points, title, normalized_to=normalized_to) + assert segment.model_dump() == { + "title": title, + "x": SEGMENT_POINTS[0], + "y": SEGMENT_POINTS[1], + } + np.testing.assert_array_almost_equal( + segment.to_normalized((100, 100)), + SEGMENT_POINTS_NORMALIZED, + ) + + +@pytest.mark.parametrize( + "points,normalized_to", + [ + [None, None], + [12, None], + ["12", None], + [[], None], + [[12, []], None], + [[[], "12"], None], + [[[], [], []], None], + [[12, 12], None], + [[SEGMENT_POINTS[0], SEGMENT_POINTS[1] + [0]], None], + [ + [ + [p * 2 for p in SEGMENT_POINTS_NORMALIZED[0]], + SEGMENT_POINTS_NORMALIZED[1], + ], + (100, 100), + ], + ], +) +def test_pose_from_list_errors(points, normalized_to): + with pytest.raises(AssertionError): + Segment.from_list(points, normalized_to=normalized_to) + + +def test_pose_to_normalized_errors(): + with pytest.raises(AssertionError): + Segment.from_list(SEGMENT_POINTS).to_normalized((50, 50)) + + +def test_pose_from_dict(): + segment = Segment.from_dict({"x": SEGMENT_POINTS[0], "y": SEGMENT_POINTS[1]}) + assert segment.model_dump() == { + "title": "", + "x": SEGMENT_POINTS[0], + "y": SEGMENT_POINTS[1], + } + + +@pytest.mark.parametrize( + "points", + [ + {"x": SEGMENT_POINTS[0]}, + {"x": SEGMENT_POINTS[0], "y": SEGMENT_POINTS[1], "z": []}, + ], +) +def test_pose_from_dict_errors(points): + with pytest.raises(AssertionError): + Segment.from_dict(points) diff --git a/tests/unit/model/test_utils.py b/tests/unit/model/test_utils.py new file mode 100644 index 000000000..5ac074c78 --- /dev/null +++ b/tests/unit/model/test_utils.py @@ -0,0 +1,132 @@ +import pytest + +from datachain.model.utils import ( + normalize_coords, + validate_bbox, + validate_bbox_normalized, + validate_img_size, +) + + +@pytest.mark.parametrize( + "img_size", + [ + [100, 100], + (100, 100), + ], +) +def test_validate_img_size(img_size): + assert validate_img_size(img_size) == img_size + + +@pytest.mark.parametrize( + "img_size", + [ + None, + 12, + "12", + [], + [1], + [1, 2, 3], + [1, "2"], + [0, 2], + [1, 0], + [10.0, 10.0], + ], +) +def test_validate_img_size_errors(img_size): + with pytest.raises(AssertionError): + validate_img_size(img_size) + + +@pytest.mark.parametrize( + "bbox", + [ + (10, 10, 90, 90), + [10, 10, 90, 90], + ], +) +def test_validate_bbox(bbox): + assert validate_bbox(bbox) == bbox + + +@pytest.mark.parametrize( + "bbox", + [ + None, + 12, + "12", + [], + [0, 1, 2], + [0, 1, 2, 3, 4], + [0, 1, 2, "3"], + [0, -1, 2, 3], + ], +) +def test_validate_bbox_errors(bbox): + with pytest.raises(AssertionError): + validate_bbox(bbox) + + +@pytest.mark.parametrize( + "bbox", + [ + (0.1, 0.1, 0.9, 0.9), + [0.1, 0.1, 0.9, 0.9], + ], +) +def test_validate_bbox_normalized(bbox): + assert validate_bbox_normalized(bbox, (100, 100)) == [10, 10, 90, 90] + + +@pytest.mark.parametrize( + "bbox", + [ + None, + 0.2, + "0.2", + [], + [0.0, 0.1, 0.2], + [0.0, 0.1, 0.2, 0.3, 0.4], + [0.0, 0.1, 0.2, "0.3"], + [0.0, 1.0, 2.0, 3.0], + ], +) +def test_validate_bbox_normalized_errors(bbox): + with pytest.raises(AssertionError): + validate_bbox_normalized(bbox, (100, 100)) + + +@pytest.mark.parametrize( + "coords", + [ + (10, 10, 90, 90), + [10, 10, 90, 90], + ], +) +def test_normalize_coords(coords): + assert normalize_coords(coords, (100, 100)) == [0.1, 0.1, 0.9, 0.9] + + +@pytest.mark.parametrize( + "coords", + [ + None, + 10, + "10", + [10, 10, 90], + [10, 10, 90, 90, 90], + [10.0, 10.0, 90.0, 90.0], + [200, 10, 90, 90], + [10, 200, 90, 90], + [10, 10, 200, 90], + [10, 10, 90, 200], + [-10, 10, 90, 90], + [10, -10, 90, 90], + [10, 10, -10, 90], + [10, 10, 90, -10], + ], +) +def test_normalize_coords_errors(coords): + with pytest.raises(AssertionError): + normalize_coords(coords, (100, 100)) From 5d0b461dd741c9d2aeac8ee54c10dc009e1d97b1 Mon Sep 17 00:00:00 2001 From: Vladimir Rudnyh Date: Sun, 16 Feb 2025 12:50:57 +0700 Subject: [PATCH 2/6] Yolo models refactoring --- .DS_Store | Bin 0 -> 6148 bytes src/datachain/model/__init__.py | 16 +- src/datachain/model/bbox.py | 372 +-- src/datachain/model/pose.py | 194 +- src/datachain/model/segment.py | 97 +- src/datachain/model/ultralytics/__init__.py | 27 - src/datachain/model/ultralytics/bbox.py | 147 - src/datachain/model/ultralytics/pose.py | 113 - src/datachain/model/ultralytics/segment.py | 91 - src/datachain/model/utils.py | 241 +- src/datachain/model/yolo.py | 498 ++++ tests/.DS_Store | Bin 0 -> 6148 bytes tests/func/.DS_Store | Bin 0 -> 6148 bytes tests/func/model/.DS_Store | Bin 0 -> 6148 bytes tests/func/model/__init__.py | 0 tests/func/model/conftest.py | 29 + tests/func/model/data/running-mask0.png | Bin 0 -> 1112 bytes tests/func/model/data/running-mask1.png | Bin 0 -> 2087 bytes tests/func/model/data/running.jpg | Bin 0 -> 23189 bytes tests/func/model/data/ships.jpg | Bin 0 -> 32890 bytes tests/func/model/test_yolo.py | 2955 +++++++++++++++++++ tests/unit/.DS_Store | Bin 0 -> 6148 bytes tests/unit/model/test_bbox.py | 208 +- tests/unit/model/test_pose.py | 183 +- tests/unit/model/test_segment.py | 86 +- tests/unit/model/test_utils.py | 182 +- tests/unit/model/test_yolo.py | 190 ++ 27 files changed, 4003 insertions(+), 1626 deletions(-) create mode 100644 .DS_Store delete mode 100644 src/datachain/model/ultralytics/__init__.py delete mode 100644 src/datachain/model/ultralytics/bbox.py delete mode 100644 src/datachain/model/ultralytics/pose.py delete mode 100644 src/datachain/model/ultralytics/segment.py create mode 100644 src/datachain/model/yolo.py create mode 100644 tests/.DS_Store create mode 100644 tests/func/.DS_Store create mode 100644 tests/func/model/.DS_Store create mode 100644 tests/func/model/__init__.py create mode 100644 tests/func/model/conftest.py create mode 100644 tests/func/model/data/running-mask0.png create mode 100644 tests/func/model/data/running-mask1.png create mode 100644 tests/func/model/data/running.jpg create mode 100644 tests/func/model/data/ships.jpg create mode 100644 tests/func/model/test_yolo.py create mode 100644 tests/unit/.DS_Store create mode 100644 tests/unit/model/test_yolo.py diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..a9699e2c17845717b2d11c1f567abfcefac24d25 GIT binary patch literal 6148 zcmeHK%}T>S5Z<+|Z74zx3Oz1(Em&<8i-EVe&b~7Jje;8xjorQ;t*^IFkG(?U{g`m08wPk`4xtb#j=Gi1jWKc29 z-!$R3H(1UhmO;Pz{U5<3iL<=d`{Y}-dSkoUw3_Yaz4s*NUhe0!%=M?YIJ%ND2}<1$ zuH$Gvws+5En)`7YO;ti1Mi6p$6Q`k^yKnacIF!?Ie|*zPPA{nLI|91n)eu2>9@ zdR;N-pDdSFYj6MH>~j2^yrl9?6Ul*bB|8R7cn4*rqE~O4CNg~lYnf3-5)uQ%05L!e zY$gNdJZQByvwW(W7$63IU;y_A0S(bHSZGvR2XuISMt>a<1$2B%APR$y!9pW=K)6l? z)T!J&F}O|#yD)K%!9t@>XI!le^O%*(#|u}hgI%a_#vP5+69dG+Is-L5wDJ7EfM2Ha zk-wfoBVvFU_-738`p6r(P?R}azm "BBox": + def convert( + self, + img_size: Sequence[int], + source: BBoxType, + target: BBoxType, + ) -> list[float]: """ - Create a bounding box from coordinates in PASCAL VOC format. + Convert the bounding box coordinates between different formats. - PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], - where: - - (x_min, y_min) are the coordinates of the top-left corner. - - (x_max, y_max) are the coordinates of the bottom-right corner. - - If the input coordinates are normalized (i.e., floats between 0 and 1), - they will be converted to absolute pixel values based on the provided - image size. The image size should be given as a tuple (width, height) - via the `normalized_to` argument. + Supported formats: "albumentations", "coco", "voc", "yolo". Args: - coords (Sequence[float]): The bounding box coordinates. - title (str, optional): The title or label for the bounding box. - Defaults to "". - normalized_to (Sequence[int], optional): The reference image size - (width, height) for denormalizing the bounding box. If None (default), - the coordinates are assumed to be absolute pixel values. - - Returns: - BBox: A bounding box object. - """ - coords = ( - validate_bbox(coords) - if normalized_to is None - else validate_bbox_normalized(coords, normalized_to) - ) - - return BBox(title=title, coords=[round(c) for c in coords]) - - def to_voc(self) -> list[int]: - """ - Convert the bounding box to PASCAL VOC format. - - PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], - where: - - (x_min, y_min) are the coordinates of the top-left corner. - - (x_max, y_max) are the coordinates of the bottom-right corner. + img_size (Sequence[int]): The reference image size (width, height). + source (str): The source bounding box format. + target (str): The target bounding box format. Returns: - list[int]: The bounding box coordinates in PASCAL VOC format. - """ - return self.coords - - def to_voc_normalized(self, img_size: Sequence[int]) -> list[float]: - """ - Convert the bounding box to PASCAL VOC format with normalized coordinates. - - PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], - where: - - (x_min, y_min) are the coordinates of the top-left corner. - - (x_max, y_max) are the coordinates of the bottom-right corner. - - Normalized coordinates are floats between 0 and 1, representing the - relative position of the pixels in the image. - - Returns: - list[float]: The bounding box coordinates in PASCAL VOC format - with normalized coordinates. - """ - return normalize_coords(self.coords, img_size) - - @staticmethod - def from_coco( - coords: Sequence[float], - title: str = "", - normalized_to: Optional[Sequence[int]] = None, - ) -> "BBox": - """ - Create a bounding box from coordinates in COCO format. - - COCO format represents bounding boxes as [x, y, width, height], where: - - (x, y) are the coordinates of the top-left corner. - - width and height define the size of the bounding box. - - If the input coordinates are normalized (i.e., floats between 0 and 1), - they will be converted to absolute pixel values based on the provided - image size. The image size should be given as a tuple (width, height) - via the `normalized_to` argument. - - Args: - coords (Sequence[float]): The bounding box coordinates. - title (str, optional): The title or label for the bounding box. - Defaults to "". - normalized_to (Sequence[int], optional): The reference image size - (width, height) for denormalizing the bounding box. If None (default), - the coordinates are assumed to be absolute pixel values. - - Returns: - BBox: A bounding box object. - """ - coords = ( - validate_bbox(coords) - if normalized_to is None - else validate_bbox_normalized(coords, normalized_to) - ) - - x, y, width, height = coords - return BBox( - title=title, - coords=[round(x), round(y), round(x + width), round(y + height)], - ) - - def to_coco(self) -> list[int]: - """ - Convert the bounding box to COCO format. - - COCO format represents bounding boxes as [x, y, width, height], where: - - (x, y) are the coordinates of the top-left corner. - - width and height define the size of the bounding box. - - Returns: - list[int]: The bounding box coordinates in PASCAL VOC format. - """ - return [ - self.coords[0], - self.coords[1], - self.coords[2] - self.coords[0], - self.coords[3] - self.coords[1], - ] - - def to_coco_normalized(self, img_size: Sequence[int]) -> list[float]: - """ - Convert the bounding box to COCO format with normalized coordinates. - - COCO format represents bounding boxes as [x, y, width, height], where: - - (x, y) are the coordinates of the top-left corner. - - width and height define the size of the bounding box. - - Normalized coordinates are floats between 0 and 1, representing the - relative position of the pixels in the image. - - Returns: - list[float]: The bounding box coordinates in PASCAL VOC format - with normalized coordinates. - """ - coords_normalized = normalize_coords(self.coords, img_size) - return [ - coords_normalized[0], - coords_normalized[1], - coords_normalized[2] - coords_normalized[0], - coords_normalized[3] - coords_normalized[1], - ] - - @staticmethod - def from_yolo( - coords: Sequence[float], - title: str = "", - normalized_to: Optional[Sequence[int]] = None, - ) -> "BBox": + list[float]: The bounding box coordinates in the target format. """ - Create a bounding box from coordinates in YOLO format. - - YOLO format represents bounding boxes as [x_center, y_center, width, height], - where: - - (x_center, y_center) are the coordinates of the box center. - - width and height define the size of the bounding box. - - If the input coordinates are normalized (i.e., floats between 0 and 1), - they will be converted to absolute pixel values based on the provided - image size. The image size should be given as a tuple (width, height) - via the `normalized_to` argument. - - Args: - coords (Sequence[float]): The bounding box coordinates. - title (str, optional): The title or label for the bounding box. - Defaults to "". - normalized_to (Sequence[int], optional): The reference image size - (width, height) for denormalizing the bounding box. If None (default), - the coordinates are assumed to be absolute pixel values. - - Returns: - BBox: The bounding box object. - """ - coords = ( - validate_bbox(coords) - if normalized_to is None - else validate_bbox_normalized(coords, normalized_to) - ) - - x_center, y_center, width, height = coords - return BBox( - title=title, - coords=[ - round(x_center - width / 2), - round(y_center - height / 2), - round(x_center + width / 2), - round(y_center + height / 2), - ], - ) - - def to_yolo(self) -> list[int]: - """ - Convert the bounding box to YOLO format. - - YOLO format represents bounding boxes as [x_center, y_center, width, height], - where: - - (x_center, y_center) are the coordinates of the box center. - - width and height define the size of the bounding box. - - Returns: - list[int]: The bounding box coordinates in PASCAL VOC format. - """ - return [ - round((self.coords[0] + self.coords[2]) / 2), - round((self.coords[1] + self.coords[3]) / 2), - self.coords[2] - self.coords[0], - self.coords[3] - self.coords[1], - ] - - def to_yolo_normalized(self, img_size: Sequence[int]) -> list[float]: - """ - Convert the bounding box to YOLO format with normalized coordinates. - - YOLO format represents bounding boxes as [x_center, y_center, width, height], - where: - - (x_center, y_center) are the coordinates of the box center. - - width and height define the size of the bounding box. - - Normalized coordinates are floats between 0 and 1, representing the - relative position of the pixels in the image. - - Returns: - list[float]: The bounding box coordinates in PASCAL VOC format - with normalized coordinates. - """ - coords_normalized = normalize_coords(self.coords, img_size) - return [ - (coords_normalized[0] + coords_normalized[2]) / 2, - (coords_normalized[1] + coords_normalized[3]) / 2, - coords_normalized[2] - coords_normalized[0], - coords_normalized[3] - coords_normalized[1], - ] - - @staticmethod - def from_list(coords: Sequence[float], title: str = "") -> "BBox": - return BBox.from_voc(coords, title) - - @staticmethod - def from_dict(coords: dict[str, float], title: str = "") -> "BBox": - keys = ("x1", "y1", "x2", "y2") - assert isinstance(coords, dict) and set(coords) == set(keys), ( - "Bounding box must be a dictionary with keys 'x1', 'y1', 'x2' and 'y2'." - ) - return BBox.from_voc([coords[key] for key in keys], title=title) + return convert_bbox(self.coords, img_size, source, target) class OBBox(DataModel): """ A data model for representing oriented bounding boxes. + Use `datachain.model.YoloObb` for YOLO-specific oriented bounding boxes. + This model is intended for general oriented bounding box representations + or other formats. + Attributes: title (str): The title of the oriented bounding box. - coords (list[int]): The coordinates of the oriented bounding box. - - The oriented bounding box is defined by four points: - - (x1, y1): The first corner of the box. - - (x2, y2): The second corner of the box. - - (x3, y3): The third corner of the box. - - (x4, y4): The fourth corner of the box. + coords (list[float]): The coordinates of the oriented bounding box. """ title: str = Field(default="") - coords: list[int] = Field(default=[]) - - @staticmethod - def from_list( - coords: Sequence[float], - title: str = "", - normalized_to: Optional[Sequence[int]] = None, - ) -> "OBBox": - """ - Create an oriented bounding box from a list of coordinates. - - If the input coordinates are normalized (i.e., floats between 0 and 1), - they will be converted to absolute pixel values based on the provided - image size. The image size should be given as a tuple (width, height) - via the `normalized_to` argument. - - Args: - coords (Sequence[float]): The oriented bounding box coordinates. - title (str, optional): The title or label for the oriented bounding box. - Defaults to "". - normalized_to (Sequence[int], optional): The reference image size - (width, height) for denormalizing the oriented bounding box. - If None (default), the coordinates are assumed to be - absolute pixel values. - - Returns: - OBBox: An oriented bounding box object. - """ - assert isinstance(coords, (tuple, list)), ( - "Oriented bounding box must be a tuple or list." - ) - assert len(coords) == 8, ( - "Oriented bounding box must be a tuple or list of 8 coordinates." - ) - assert all(isinstance(value, (int, float)) for value in coords), ( - "Oriented bounding box coordinates must be floats or integers." - ) - - if normalized_to is not None: - assert all(0 <= coord <= 1 for coord in coords), ( - "Normalized coordinates must be floats between 0 and 1." - ) - width, height = validate_img_size(normalized_to) - coords = [ - coord * width if i % 2 == 0 else coord * height - for i, coord in enumerate(coords) - ] - - return OBBox( - title=title, - coords=[round(c) for c in coords], - ) - - def to_normalized(self, img_size: Sequence[int]) -> list[float]: - """ - Return the oriented bounding box in normalized coordinates. - - Normalized coordinates are floats between 0 and 1, representing the - relative position of the pixels in the image. - - Returns: - list[float]: The oriented bounding box with normalized coordinates. - """ - width, height = validate_img_size(img_size) - assert all( - x < width and y < height - for x, y in zip(self.coords[::2], self.coords[1::2]) - ), "Oriented bounding box is out of image size." - return [ - coord / width if i % 2 == 0 else coord / height - for i, coord in enumerate(self.coords) - ] - - @staticmethod - def from_dict(coords: dict[str, float], title: str = "") -> "OBBox": - keys = ("x1", "y1", "x2", "y2", "x3", "y3", "x4", "y4") - assert isinstance(coords, dict) and set(coords) == set(keys), ( - "Oriented bounding box must be a dictionary with coordinates." - ) - return OBBox.from_list([coords[key] for key in keys], title=title) + coords: list[float] = Field(default=[]) diff --git a/src/datachain/model/pose.py b/src/datachain/model/pose.py index 00c234a32..7b4d9909b 100644 --- a/src/datachain/model/pose.py +++ b/src/datachain/model/pose.py @@ -1,17 +1,15 @@ -from collections.abc import Sequence -from typing import Optional - from pydantic import Field from datachain.lib.data_model import DataModel -from .utils import validate_img_size - class Pose(DataModel): """ A data model for representing pose keypoints. + Use `datachain.model.YoloPose` or for YOLO-specific poses. + This model is intended for general pose representations or other formats. + Attributes: x (list[int]): The x-coordinates of the keypoints. y (list[int]): The y-coordinates of the keypoints. @@ -20,192 +18,26 @@ class Pose(DataModel): corresponds to a specific body part. """ - x: list[int] = Field(default=[]) - y: list[int] = Field(default=[]) - - @staticmethod - def from_list( - points: Sequence[Sequence[float]], - normalized_to: Optional[Sequence[int]] = None, - ) -> "Pose": - """ - Create a Pose instance from a list of x and y coordinates. - - If the input coordinates are normalized (i.e., floats between 0 and 1), - they will be converted to absolute pixel values based on the provided - image size. The image size should be given as a tuple (width, height) - via the `normalized_to` argument. - - Args: - points (Sequence[Sequence[float]]): The x and y coordinates - of the keypoints. List of 2 lists: x and y coordinates. - normalized_to (Sequence[int], optional): The reference image size - (width, height) for denormalizing the bounding box. If None (default), - the coordinates are assumed to be absolute pixel values. - - Returns: - Pose: A Pose object. - """ - assert isinstance(points, (tuple, list)), "Pose must be a list of 2 lists." - assert len(points) == 2, "Pose must be a list of 2 lists: x and y coordinates." - points_x, points_y = points - assert isinstance(points_x, (tuple, list)) and isinstance( - points_y, (tuple, list) - ), "Pose must be a list of 2 lists." - assert len(points_x) == len(points_y) == 17, ( - "Pose x and y coordinates must have the same length of 17." - ) - assert all( - isinstance(value, (int, float)) for value in [*points_x, *points_y] - ), "Pose coordinates must be floats or integers." - - if normalized_to is not None: - assert all(0 <= coord <= 1 for coord in [*points_x, *points_y]), ( - "Normalized coordinates must be floats between 0 and 1." - ) - width, height = validate_img_size(normalized_to) - points_x = [coord * width for coord in points_x] - points_y = [coord * height for coord in points_y] - - return Pose( - x=[round(coord) for coord in points_x], - y=[round(coord) for coord in points_y], - ) - - def to_normalized(self, img_size: Sequence[int]) -> tuple[list[float], list[float]]: - """ - Return the pose keypoints in normalized coordinates. - - Normalized coordinates are floats between 0 and 1, representing the - relative position of the pixels in the image. - - Returns: - tuple[list[float], list[float]]: The pose keypoints - with normalized coordinates. - """ - width, height = validate_img_size(img_size) - assert all(x <= width and y <= height for x, y in zip(self.x, self.y)), ( - "Pose keypoints are out of image size." - ) - return ( - [coord / width for coord in self.x], - [coord / height for coord in self.y], - ) - - @staticmethod - def from_dict(points: dict[str, Sequence[float]]) -> "Pose": - assert isinstance(points, dict) and set(points) == { - "x", - "y", - }, "Pose must be a dict with keys 'x' and 'y'." - return Pose.from_list([points["x"], points["y"]]) + x: list[float] = Field(default=[]) + y: list[float] = Field(default=[]) class Pose3D(DataModel): """ A data model for representing 3D pose keypoints. + Use `datachain.model.YoloPose` or for YOLO-specific poses. + This model is intended for general pose representations or other formats. + Attributes: - x (list[int]): The x-coordinates of the keypoints. - y (list[int]): The y-coordinates of the keypoints. + x (list[float]): The x-coordinates of the keypoints. + y (list[float]): The y-coordinates of the keypoints. visible (list[float]): The visibility of the keypoints. - The keypoints are represented as lists of x, y, and visibility values, + The keypoints are represented as lists of x and y coordinates and visibility, where each index corresponds to a specific body part. """ - x: list[int] = Field(default=[]) - y: list[int] = Field(default=[]) + x: list[float] = Field(default=[]) + y: list[float] = Field(default=[]) visible: list[float] = Field(default=[]) - - @staticmethod - def from_list( - points: Sequence[Sequence[float]], - normalized_to: Optional[Sequence[int]] = None, - ) -> "Pose3D": - """ - Create a Pose3D instance from a list of x, y coordinates and visibility values. - - If the input coordinates are normalized (i.e., floats between 0 and 1), - they will be converted to absolute pixel values based on the provided - image size. The image size should be given as a tuple (width, height) - via the `normalized_to` argument. - - Args: - points (Sequence[Sequence[float]]): The x and y coordinates - of the keypoints. List of 3 lists: x, y coordinates - and visibility values. - normalized_to (Sequence[int], optional): The reference image size - (width, height) for denormalizing the bounding box. If None (default), - the coordinates are assumed to be absolute pixel values. - - Returns: - Pose3D: A Pose3D object. - - """ - assert isinstance(points, (tuple, list)), ( - "Pose3D must be a tuple or list of 3 lists." - ) - assert len(points) == 3, ( - "Pose3D must be a list of 3 lists: x, y coordinates and visible." - ) - points_x, points_y, points_v = points - assert ( - isinstance(points_x, (tuple, list)) - and isinstance(points_y, (tuple, list)) - and isinstance(points_v, (tuple, list)) - ), "Pose3D must be a tuple or list of 3 lists." - assert len(points_x) == len(points_y) == len(points_v) == 17, ( - "Pose3D x, y coordinates and visible must have the same length of 17." - ) - assert all( - isinstance(value, (int, float)) - for value in [*points_x, *points_y, *points_v] - ), "Pose3D coordinates must be floats or integers." - - if normalized_to is not None: - assert all(0 <= coord <= 1 for coord in [*points_x, *points_y]), ( - "Normalized coordinates must be floats between 0 and 1." - ) - width, height = validate_img_size(normalized_to) - points_x = [coord * width for coord in points_x] - points_y = [coord * height for coord in points_y] - - return Pose3D( - x=[round(coord) for coord in points_x], - y=[round(coord) for coord in points_y], - visible=list(points_v), - ) - - def to_normalized( - self, - img_size: Sequence[int], - ) -> tuple[list[float], list[float], list[float]]: - """ - Return the pose 3D keypoints in normalized coordinates. - - Normalized coordinates are floats between 0 and 1, representing the - relative position of the pixels in the image. - - Returns: - tuple[list[float], list[float], list[float]]: The pose keypoints - with normalized coordinates and visibility values. - """ - width, height = validate_img_size(img_size) - assert all(x <= width and y <= height for x, y in zip(self.x, self.y)), ( - "Pose3D keypoints are out of image size." - ) - return ( - [coord / width for coord in self.x], - [coord / height for coord in self.y], - self.visible, - ) - - @staticmethod - def from_dict(points: dict[str, list[float]]) -> "Pose3D": - assert isinstance(points, dict) and set(points) == { - "x", - "y", - "visible", - }, "Pose3D must be a dict with keys 'x', 'y' and 'visible'." - return Pose3D.from_list([points["x"], points["y"], points["visible"]]) diff --git a/src/datachain/model/segment.py b/src/datachain/model/segment.py index 64efcfa34..0a0aa1eb3 100644 --- a/src/datachain/model/segment.py +++ b/src/datachain/model/segment.py @@ -1,107 +1,24 @@ -from collections.abc import Sequence -from typing import Optional - from pydantic import Field from datachain.lib.data_model import DataModel -from .utils import validate_img_size - class Segment(DataModel): """ A data model for representing segment. + Use `datachain.model.YoloSeg` or for YOLO-specific segments. + This model is intended for general pose representations or other formats. + Attributes: title (str): The title of the segment. - x (list[int]): The x-coordinates of the segment. - y (list[int]): The y-coordinates of the segment. + x (list[float]): The x-coordinates of the segment. + y (list[float]): The y-coordinates of the segment. The segment is represented as lists of x and y coordinates, where each index corresponds to a specific point. """ title: str = Field(default="") - x: list[int] = Field(default=[]) - y: list[int] = Field(default=[]) - - @staticmethod - def from_list( - points: Sequence[Sequence[float]], - title: str = "", - normalized_to: Optional[Sequence[int]] = None, - ) -> "Segment": - """ - Create a Segment object from a list of x and y coordinates. - - If the input coordinates are normalized (i.e., floats between 0 and 1), - they will be converted to absolute pixel values based on the provided - image size. The image size should be given as a tuple (width, height) - via the `normalized_to` argument. - - Args: - points (Sequence[Sequence[float]]): The x and y coordinates - of the keypoints. List of 2 lists: x and y coordinates. - title (str, optional): The title or label for the segment. Defaults to "". - normalized_to (Sequence[int], optional): The reference image size - (width, height) for denormalizing the bounding box. If None (default), - the coordinates are assumed to be absolute pixel values. - - Returns: - Segment: A Segment object. - """ - assert isinstance(points, (tuple, list)), "Segment must be a list of 2 lists." - assert len(points) == 2, ( - "Segment must be a list of 2 lists: x and y coordinates." - ) - points_x, points_y = points - assert isinstance(points_x, (tuple, list)) and isinstance( - points_y, (tuple, list) - ), "Segment must be a list of 2 lists." - assert len(points_x) == len(points_y), ( - "Segment x and y coordinates must have the same length." - ) - assert all( - isinstance(value, (int, float)) for value in [*points_x, *points_y] - ), "Segment coordinates must be floats or integers." - - if normalized_to is not None: - assert all(0 <= coord <= 1 for coord in [*points_x, *points_y]), ( - "Normalized coordinates must be floats between 0 and 1." - ) - width, height = validate_img_size(normalized_to) - points_x = [coord * width for coord in points_x] - points_y = [coord * height for coord in points_y] - - return Segment( - title=title, - x=[round(coord) for coord in points_x], - y=[round(coord) for coord in points_y], - ) - - def to_normalized(self, img_size: Sequence[int]) -> tuple[list[float], list[float]]: - """ - Return the segment in normalized coordinates. - - Normalized coordinates are floats between 0 and 1, representing the - relative position of the pixels in the image. - - Returns: - tuple[list[float], list[float]]: The segment with normalized coordinates. - """ - width, height = validate_img_size(img_size) - assert all(x <= width and y <= height for x, y in zip(self.x, self.y)), ( - "Segment keypoints are out of image size." - ) - return ( - [coord / width for coord in self.x], - [coord / height for coord in self.y], - ) - - @staticmethod - def from_dict(points: dict[str, list[float]], title: str = "") -> "Segment": - assert isinstance(points, dict) and set(points) == { - "x", - "y", - }, "Segment must be a dict with keys 'x' and 'y'." - return Segment.from_list([points["x"], points["y"]], title=title) + x: list[float] = Field(default=[]) + y: list[float] = Field(default=[]) diff --git a/src/datachain/model/ultralytics/__init__.py b/src/datachain/model/ultralytics/__init__.py deleted file mode 100644 index 42d35f340..000000000 --- a/src/datachain/model/ultralytics/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -This module contains the YOLO models. - -YOLO stands for "You Only Look Once", a family of object detection models that -are designed to be fast and accurate. The models are trained to detect objects -in images by dividing the image into a grid and predicting the bounding boxes -and class probabilities for each grid cell. - -More information about YOLO can be found here: -- https://pjreddie.com/darknet/yolo/ -- https://docs.ultralytics.com/ -""" - -from .bbox import YoloBBox, YoloBBoxes, YoloOBBox, YoloOBBoxes -from .pose import YoloPose, YoloPoses -from .segment import YoloSegment, YoloSegments - -__all__ = [ - "YoloBBox", - "YoloBBoxes", - "YoloOBBox", - "YoloOBBoxes", - "YoloPose", - "YoloPoses", - "YoloSegment", - "YoloSegments", -] diff --git a/src/datachain/model/ultralytics/bbox.py b/src/datachain/model/ultralytics/bbox.py deleted file mode 100644 index ac3b03f7d..000000000 --- a/src/datachain/model/ultralytics/bbox.py +++ /dev/null @@ -1,147 +0,0 @@ -from typing import TYPE_CHECKING - -from pydantic import Field - -from datachain.lib.data_model import DataModel -from datachain.model.bbox import BBox, OBBox - -if TYPE_CHECKING: - from ultralytics.engine.results import Results - - -class YoloBBox(DataModel): - """ - A class representing a bounding box detected by a YOLO model. - - Attributes: - cls: The class of the detected object. - name: The name of the detected object. - confidence: The confidence score of the detection. - box: The bounding box of the detected object - """ - - cls: int = Field(default=-1) - name: str = Field(default="") - confidence: float = Field(default=0) - box: BBox - - @staticmethod - def from_result(result: "Results") -> "YoloBBox": - summary = result.summary() - if not summary: - return YoloBBox(box=BBox()) - name = summary[0].get("name", "") - box = ( - BBox.from_dict(summary[0]["box"], title=name) - if "box" in summary[0] - else BBox() - ) - return YoloBBox( - cls=summary[0]["class"], - name=name, - confidence=summary[0]["confidence"], - box=box, - ) - - -class YoloBBoxes(DataModel): - """ - A class representing a list of bounding boxes detected by a YOLO model. - - Attributes: - cls: A list of classes of the detected objects. - name: A list of names of the detected objects. - confidence: A list of confidence scores of the detections. - box: A list of bounding boxes of the detected objects - """ - - cls: list[int] - name: list[str] - confidence: list[float] - box: list[BBox] - - @staticmethod - def from_results(results: list["Results"]) -> "YoloBBoxes": - cls, names, confidence, box = [], [], [], [] - for r in results: - for s in r.summary(): - name = s.get("name", "") - cls.append(s["class"]) - names.append(name) - confidence.append(s["confidence"]) - box.append(BBox.from_dict(s.get("box", {}), title=name)) - return YoloBBoxes( - cls=cls, - name=names, - confidence=confidence, - box=box, - ) - - -class YoloOBBox(DataModel): - """ - A class representing an oriented bounding box detected by a YOLO model. - - Attributes: - cls: The class of the detected object. - name: The name of the detected object. - confidence: The confidence score of the detection. - box: The oriented bounding box of the detected object. - """ - - cls: int = Field(default=-1) - name: str = Field(default="") - confidence: float = Field(default=0) - box: OBBox - - @staticmethod - def from_result(result: "Results") -> "YoloOBBox": - summary = result.summary() - if not summary: - return YoloOBBox(box=OBBox()) - name = summary[0].get("name", "") - box = ( - OBBox.from_dict(summary[0]["box"], title=name) - if "box" in summary[0] - else OBBox() - ) - return YoloOBBox( - cls=summary[0]["class"], - name=name, - confidence=summary[0]["confidence"], - box=box, - ) - - -class YoloOBBoxes(DataModel): - """ - A class representing a list of oriented bounding boxes detected by a YOLO model. - - Attributes: - cls: A list of classes of the detected objects. - name: A list of names of the detected objects. - confidence: A list of confidence scores of the detections. - box: A list of oriented bounding boxes of the detected objects. - """ - - cls: list[int] - name: list[str] - confidence: list[float] - box: list[OBBox] - - @staticmethod - def from_results(results: list["Results"]) -> "YoloOBBoxes": - cls, names, confidence, box = [], [], [], [] - for r in results: - for s in r.summary(): - name = s.get("name", "") - cls.append(s["class"]) - names.append(name) - confidence.append(s["confidence"]) - box.append(OBBox.from_dict(s.get("box", {}), title=name)) - return YoloOBBoxes( - cls=cls, - name=names, - confidence=confidence, - box=box, - ) diff --git a/src/datachain/model/ultralytics/pose.py b/src/datachain/model/ultralytics/pose.py deleted file mode 100644 index 6bd687bc5..000000000 --- a/src/datachain/model/ultralytics/pose.py +++ /dev/null @@ -1,113 +0,0 @@ -from typing import TYPE_CHECKING - -from pydantic import Field - -from datachain.lib.data_model import DataModel -from datachain.model.bbox import BBox -from datachain.model.pose import Pose3D - -if TYPE_CHECKING: - from ultralytics.engine.results import Results - - -class YoloPoseBodyPart: - """An enumeration of body parts for YOLO pose keypoints.""" - - nose = 0 - left_eye = 1 - right_eye = 2 - left_ear = 3 - right_ear = 4 - left_shoulder = 5 - right_shoulder = 6 - left_elbow = 7 - right_elbow = 8 - left_wrist = 9 - right_wrist = 10 - left_hip = 11 - right_hip = 12 - left_knee = 13 - right_knee = 14 - left_ankle = 15 - right_ankle = 16 - - -class YoloPose(DataModel): - """ - A data model for YOLO pose keypoints. - - Attributes: - cls: The class of the pose. - name: The name of the pose. - confidence: The confidence score of the pose. - box: The bounding box of the pose. - pose: The 3D pose keypoints. - """ - - cls: int = Field(default=-1) - name: str = Field(default="") - confidence: float = Field(default=0) - box: BBox - pose: Pose3D - - @staticmethod - def from_result(result: "Results") -> "YoloPose": - summary = result.summary() - if not summary: - return YoloPose(box=BBox(), pose=Pose3D()) - name = summary[0].get("name", "") - box = ( - BBox.from_dict(summary[0]["box"], title=name) - if "box" in summary[0] - else BBox() - ) - pose = ( - Pose3D.from_dict(summary[0]["keypoints"]) - if "keypoints" in summary[0] - else Pose3D() - ) - return YoloPose( - cls=summary[0]["class"], - name=name, - confidence=summary[0]["confidence"], - box=box, - pose=pose, - ) - - -class YoloPoses(DataModel): - """ - A data model for a list of YOLO pose keypoints. - - Attributes: - cls: The classes of the poses. - name: The names of the poses. - confidence: The confidence scores of the poses. - box: The bounding boxes of the poses. - pose: The 3D pose keypoints of the poses. - """ - - cls: list[int] - name: list[str] - confidence: list[float] - box: list[BBox] - pose: list[Pose3D] - - @staticmethod - def from_results(results: list["Results"]) -> "YoloPoses": - cls, names, confidence, box, pose = [], [], [], [], [] - for r in results: - for s in r.summary(): - name = s.get("name", "") - cls.append(s["class"]) - names.append(name) - confidence.append(s["confidence"]) - box.append(BBox.from_dict(s.get("box", {}), title=name)) - pose.append(Pose3D.from_dict(s.get("keypoints", {}))) - return YoloPoses( - cls=cls, - name=names, - confidence=confidence, - box=box, - pose=pose, - ) diff --git a/src/datachain/model/ultralytics/segment.py b/src/datachain/model/ultralytics/segment.py deleted file mode 100644 index 424c005c7..000000000 --- a/src/datachain/model/ultralytics/segment.py +++ /dev/null @@ -1,91 +0,0 @@ -from typing import TYPE_CHECKING - -from pydantic import Field - -from datachain.lib.data_model import DataModel -from datachain.model.bbox import BBox -from datachain.model.segment import Segment - -if TYPE_CHECKING: - from ultralytics.engine.results import Results - - -class YoloSegment(DataModel): - """ - A data model for a single YOLO segment. - - Attributes: - cls (int): The class of the segment. - name (str): The name of the segment. - confidence (float): The confidence of the segment. - box (BBox): The bounding box of the segment. - segment (Segments): The segments of the segment. - """ - - cls: int = Field(default=-1) - name: str = Field(default="") - confidence: float = Field(default=0) - box: BBox - segment: Segment - - @staticmethod - def from_result(result: "Results") -> "YoloSegment": - summary = result.summary() - if not summary: - return YoloSegment(box=BBox(), segment=Segment()) - name = summary[0].get("name", "") - box = ( - BBox.from_dict(summary[0]["box"], title=name) - if "box" in summary[0] - else BBox() - ) - segment = ( - Segment.from_dict(summary[0]["segments"], title=name) - if "segments" in summary[0] - else Segment() - ) - return YoloSegment( - cls=summary[0]["class"], - name=summary[0]["name"], - confidence=summary[0]["confidence"], - box=box, - segment=segment, - ) - - -class YoloSegments(DataModel): - """ - A data model for a list of YOLO segments. - - Attributes: - cls (list[int]): The classes of the segments. - name (list[str]): The names of the segments. - confidence (list[float]): The confidences of the segments. - box (list[BBox]): The bounding boxes of the segments. - segment (list[Segments]): The segments of the segments. - """ - - cls: list[int] - name: list[str] - confidence: list[float] - box: list[BBox] - segment: list[Segment] - - @staticmethod - def from_results(results: list["Results"]) -> "YoloSegments": - cls, names, confidence, box, segment = [], [], [], [], [] - for r in results: - for s in r.summary(): - name = s.get("name", "") - cls.append(s["class"]) - names.append(name) - confidence.append(s["confidence"]) - box.append(BBox.from_dict(s.get("box", {}), title=name)) - segment.append(Segment.from_dict(s.get("segments", {}), title=name)) - return YoloSegments( - cls=cls, - name=names, - confidence=confidence, - box=box, - segment=segment, - ) diff --git a/src/datachain/model/utils.py b/src/datachain/model/utils.py index a28ee7465..f1aba20e8 100644 --- a/src/datachain/model/utils.py +++ b/src/datachain/model/utils.py @@ -1,76 +1,175 @@ from collections.abc import Sequence +from typing import Literal +BBoxType = Literal["albumentations", "coco", "voc", "yolo"] -def validate_img_size(img_size: Sequence[int]) -> Sequence[int]: - """Validate the image size.""" - assert isinstance(img_size, (tuple, list)), "Image size must be a tuple or list." - assert len(img_size) == 2, "Image size must be a tuple or list of 2 integers." - assert all(isinstance(value, int) for value in img_size), ( - "Image size must be integers." - ) - assert all(value > 0 for value in img_size), "Image size must be positive integers." - return img_size - - -def validate_bbox(coords: Sequence[float]) -> Sequence[float]: - """Validate the bounding box coordinates.""" - assert isinstance(coords, (tuple, list)), "Bounding box must be a tuple or list." - assert len(coords) == 4, "Bounding box must be a tuple or list of 4 coordinates." - assert all(isinstance(value, (int, float)) for value in coords), ( - "Bounding box coordinates must be floats or integers." - ) - assert all(value >= 0 for value in coords), ( - "Bounding box coordinates must be positive." - ) - return coords - - -def validate_bbox_normalized( - coords: Sequence[float], img_size: Sequence[int] -) -> Sequence[float]: - """Validate the bounding box coordinates and normalize them to the image size.""" - assert isinstance(coords, (tuple, list)), "Bounding box must be a tuple or list." - assert len(coords) == 4, "Bounding box must be a tuple or list of 4 coordinates." - assert all(isinstance(value, float) for value in coords), ( - "Bounding box normalized coordinates must be floats." - ) - assert all(0 <= value <= 1 for value in coords), ( - "Bounding box normalized coordinates must be floats between 0 and 1." - ) - - width, height = validate_img_size(img_size) - - return [ - coords[0] * width, - coords[1] * height, - coords[2] * width, - coords[3] * height, - ] - - -def normalize_coords( - coords: Sequence[int], + +def convert_bbox( + coords: Sequence[float], + img_size: Sequence[int], + source: BBoxType, + target: BBoxType, +) -> list[float]: + """ + Convert the bounding box coordinates between different formats. + + Supported formats: "albumentations", "coco", "voc", "yolo". + + Albumentations format represents bounding boxes as [x_min, y_min, x_max, y_max], + where: + - (x_min, y_min) are the normalized coordinates of the top-left corner. + - (x_max, y_max) are the normalized coordinates of the bottom-right corner. + + COCO format represents bounding boxes as [x_min, y_min, width, height], where: + - (x_min, y_min) are the pixel coordinates of the top-left corner. + - width and height define the size of the bounding box in pixels. + + PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], where: + - (x_min, y_min) are the pixel coordinates of the top-left corner. + - (x_max, y_max) are the pixel coordinates of the bottom-right corner. + + YOLO format represents bounding boxes as [x_center, y_center, width, height], where: + - (x_center, y_center) are the normalized coordinates of the box center. + - width and height normalized values define the size of the bounding box. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Args: + coords (Sequence[float]): The bounding box coordinates to convert. + img_size (Sequence[int]): The reference image size (width, height). + source (str): The source bounding box format. + target (str): The target bounding box format. + + Returns: + list[float]: The bounding box coordinates in the target format. + """ + if source == "albumentations": + return [ + round(c, 4) for c in convert_albumentations_bbox(coords, img_size, target) + ] + if source == "coco": + return [round(c, 4) for c in convert_coco_bbox(coords, img_size, target)] + if source == "voc": + return [round(c, 4) for c in convert_voc_bbox(coords, img_size, target)] + if source == "yolo": + return [round(c, 4) for c in convert_yolo_bbox(coords, img_size, target)] + raise ValueError(f"Unsupported source format: {source}") + + +def convert_albumentations_bbox( + coords: Sequence[float], + img_size: Sequence[int], + target: BBoxType, +) -> list[float]: + """Convert the Albumentations bounding box coordinates to other formats.""" + if target == "albumentations": + return list(coords) + if target == "coco": + return [ + coords[0] * img_size[0], + coords[1] * img_size[1], + (coords[2] - coords[0]) * img_size[0], + (coords[3] - coords[1]) * img_size[1], + ] + if target == "voc": + return [coords[i] * img_size[i % 2] for i in range(4)] + if target == "yolo": + return [ + (coords[0] + coords[2]) / 2, + (coords[1] + coords[3]) / 2, + coords[2] - coords[0], + coords[3] - coords[1], + ] + raise ValueError(f"Unsupported target format: {target}") + + +def convert_coco_bbox( + coords: Sequence[float], + img_size: Sequence[int], + target: BBoxType, +) -> list[float]: + """Convert the COCO bounding box coordinates to other formats.""" + if target == "albumentations": + return [ + coords[0] / img_size[0], + coords[1] / img_size[1], + (coords[0] + coords[2]) / img_size[0], + (coords[1] + coords[3]) / img_size[1], + ] + if target == "coco": + return list(coords) + if target == "voc": + return [coords[0], coords[1], coords[0] + coords[2], coords[1] + coords[3]] + if target == "yolo": + return [ + (coords[0] + coords[2] / 2) / img_size[0], + (coords[1] + coords[3] / 2) / img_size[1], + coords[2] / img_size[0], + coords[3] / img_size[1], + ] + raise ValueError(f"Unsupported target format: {target}") + + +def convert_voc_bbox( + coords: Sequence[float], + img_size: Sequence[int], + target: BBoxType, +) -> list[float]: + """Convert the PASCAL VOC bounding box coordinates to other formats.""" + if target == "albumentations": + return [ + coords[0] / img_size[0], + coords[1] / img_size[1], + coords[2] / img_size[0], + coords[3] / img_size[1], + ] + if target == "coco": + return [ + coords[0], + coords[1], + coords[2] - coords[0], + coords[3] - coords[1], + ] + if target == "voc": + return list(coords) + if target == "yolo": + return [ + (coords[0] + coords[2]) / 2 / img_size[0], + (coords[1] + coords[3]) / 2 / img_size[1], + (coords[2] - coords[0]) / img_size[0], + (coords[3] - coords[1]) / img_size[1], + ] + raise ValueError(f"Unsupported target format: {target}") + + +def convert_yolo_bbox( + coords: Sequence[float], img_size: Sequence[int], + target: BBoxType, ) -> list[float]: - """Normalize the bounding box coordinates to the image size.""" - assert isinstance(coords, (tuple, list)), "Coords must be a tuple or list." - assert len(coords) == 4, "Coords must be a tuple or list of 4 coordinates." - assert all(isinstance(value, int) for value in coords), ( - "Coords must be a tuple or list of 4 ints." - ) - - width, height = validate_img_size(img_size) - - assert ( - 0 <= coords[0] <= width - and 0 <= coords[1] <= height - and 0 <= coords[2] <= width - and 0 <= coords[3] <= height - ), "Bounding box coordinates are out of image size" - - return [ - coords[0] / width, - coords[1] / height, - coords[2] / width, - coords[3] / height, - ] + """Convert the YOLO bounding box coordinates to other formats.""" + if target == "albumentations": + return [ + coords[0] - coords[2] / 2, + coords[1] - coords[3] / 2, + coords[0] + coords[2] / 2, + coords[1] + coords[3] / 2, + ] + if target == "coco": + return [ + (coords[0] - coords[2] / 2) * img_size[0], + (coords[1] - coords[3] / 2) * img_size[1], + coords[2] * img_size[0], + coords[3] * img_size[1], + ] + if target == "voc": + return [ + (coords[0] - coords[2] / 2) * img_size[0], + (coords[1] - coords[3] / 2) * img_size[1], + (coords[0] + coords[2] / 2) * img_size[0], + (coords[1] + coords[3] / 2) * img_size[1], + ] + if target == "yolo": + return list(coords) + raise ValueError(f"Unsupported target format: {target}") diff --git a/src/datachain/model/yolo.py b/src/datachain/model/yolo.py new file mode 100644 index 000000000..97cd4d3a4 --- /dev/null +++ b/src/datachain/model/yolo.py @@ -0,0 +1,498 @@ +""" +This module contains the YOLO models. + +YOLO stands for "You Only Look Once", a family of object detection models that +are designed to be fast and accurate. The models are trained to detect objects +in images by dividing the image into a grid and predicting the bounding boxes +and class probabilities for each grid cell. + +More information about YOLO can be found here: +- https://docs.ultralytics.com/ +- https://docs.ultralytics.com/models/yolo11/ +""" + +from collections.abc import Iterator +from typing import TYPE_CHECKING + +from pydantic import Field + +from datachain.lib.data_model import DataModel + +from .utils import convert_bbox + +if TYPE_CHECKING: + from ultralytics.engine.results import Results + + +class Yolo(DataModel): + """ + A class representing objects bounding boxes detected by a YOLO model. + + Object detection is a task that involves identifying the location and class + of objects in an image or video stream. + + See https://docs.ultralytics.com/tasks/detect/ for more information. + + Attributes: + cls (list[int]): A list of classes of the detected objects, default `[]`. + name (list[str]): A list of names of the detected objects, default `[]`. + confidence (list[float]): A list of confidence scores of the detections, + default `[]`. + box (list[list[float]]): A list of bounding boxes of the detected objects, + stored as pixels coordinates [x_min, y_min, x_max, y_max] + (PASCAL VOC format), default `[]`. + orig_shape (list[int]): The original size of the image (height, width), + default `[]`. + """ + + cls: list[int] = Field(default=[]) + name: list[str] = Field(default=[]) + confidence: list[float] = Field(default=[]) + box: list[list[float]] = Field(default=[]) + orig_shape: list[int] = Field(default=[]) + + @staticmethod + def from_yolo_results(results: list["Results"]) -> "Yolo": + """ + Create a YOLO bounding boxes from the YOLO results. + + Example: + ```python + from ultralytics import YOLO + from datachain.model.bbox import Yolo + + model = YOLO("yolo11n.pt") + results = model("image.jpg", verbose=False) + boxes = Yolo.from_yolo_results(results) + ``` + + Args: + results: YOLO results from the model. + + Returns: + Yolo: A YOLO bounding boxes data model. + """ + if not (summary := results[0].summary(normalize=False)): + return Yolo() + + cls, name, confidence, box = [], [], [], [] + for res in summary: + cls.append(res.get("class", -1)) + name.append(res.get("name", "")) + confidence.append(res.get("confidence", -1)) + box.append(_get_box_from_yolo_result(res)) + + return Yolo( + cls=cls, + name=name, + confidence=confidence, + box=box, + orig_shape=list(results[0].orig_shape), + ) + + @property + def img_size(self) -> tuple[int, int]: + """Get the image size (width, height) from the original shape.""" + return ( + (self.orig_shape[1], self.orig_shape[0]) + if len(self.orig_shape) == 2 + else (0, 0) + ) + + def to_albumentations(self) -> Iterator[list[float]]: + """ + Convert the bounding box to Albumentations format with normalized coordinates. + + Albumentations format represents bounding boxes as [x_min, y_min, x_max, y_max], + where: + - (x_min, y_min) are the normalized coordinates of the top-left corner. + - (x_max, y_max) are the normalized coordinates of the bottom-right corner. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + Iterator[list[float]]: An iterator of bounding box coordinates + in Albumentations format with normalized coordinates. + """ + return ( + convert_bbox(b, self.img_size, source="voc", target="albumentations") + for b in self.box + ) + + def to_coco(self) -> Iterator[list[int]]: + """ + Convert the bounding box to COCO format. + + COCO format represents bounding boxes as [x_min, y_min, width, height], where: + - (x_min, y_min) are the pixel coordinates of the top-left corner. + - width and height define the size of the bounding box in pixels. + + Returns: + Iterator[list[int]]: An iterator of bounding box coordinates in COCO format. + """ + return ( + list( + map(round, convert_bbox(b, self.img_size, source="voc", target="coco")) + ) + for b in self.box + ) + + def to_voc(self) -> Iterator[list[int]]: + """ + Convert the bounding box to PASCAL VOC format. + + PASCAL VOC format represents bounding boxes as [x_min, y_min, x_max, y_max], + where: + - (x_min, y_min) are the pixel coordinates of the top-left corner. + - (x_max, y_max) are the pixel coordinates of the bottom-right corner. + + Returns: + Iterator[list[int]]: An iterator of bounding box coordinates + in PASCAL VOC format. + """ + return ( + list(map(round, convert_bbox(b, self.img_size, source="voc", target="voc"))) + for b in self.box + ) + + def to_yolo(self) -> Iterator[list[float]]: + """ + Convert the bounding box to YOLO format with normalized coordinates. + + YOLO format represents bounding boxes as [x_center, y_center, width, height], + where: + - (x_center, y_center) are the normalized coordinates of the box center. + - width and height normalized values define the size of the bounding box. + + Normalized coordinates are floats between 0 and 1, representing the + relative position of the pixels in the image. + + Returns: + Iterator[list[float]]: An iterator of bounding box coordinates + in YOLO format with normalized coordinates. + """ + return ( + convert_bbox(b, self.img_size, source="voc", target="yolo") + for b in self.box + ) + + +class YoloObb(DataModel): + """ + A class representing objects oriented bounding boxes detected by a YOLO model. + + Oriented object detection goes a step further than object detection and introduce + an extra angle to locate objects more accurate in an image. + + See https://docs.ultralytics.com/tasks/obb/ for more information. + + Attributes: + cls (list[int]): A list of classes of the detected objects, default `[]`. + name (list[str]): A list of names of the detected objects, default `[]`. + confidence (list[float]): A list of confidence scores of the detections, + default `[]`. + obox (list[list[float]]): A list of oriented bounding boxes of the detected + objects, stored as four corners pixels coordinates + [x1, y1, x2, y2, x3, y3, x4, y4], default `[]`. + orig_shape (list[int]): The original size of the image (height, width), + default `[]`. + """ + + cls: list[int] = Field(default=[]) + name: list[str] = Field(default=[]) + confidence: list[float] = Field(default=[]) + obox: list[list[float]] = Field(default=[]) + orig_shape: list[int] = Field(default=[]) + + @staticmethod + def from_yolo_results(results: list["Results"]) -> "YoloObb": + """ + Create a YOLO oriented bounding boxes from the YOLO results. + + Example: + ```python + from ultralytics import YOLO + from datachain.model.bbox import YoloObb + + model = YOLO("yolo11n-obb.pt") + results = model("image.jpg", verbose=False) + boxes = YoloObb.from_yolo_results(results) + ``` + + Args: + results: YOLO results from the model. + + Returns: + YoloObb: A YOLO oriented bounding boxes data model. + """ + if not (summary := results[0].summary(normalize=False)): + return YoloObb() + + cls, name, confidence, obox = [], [], [], [] + for res in summary: + cls.append(res.get("class", -1)) + name.append(res.get("name", "")) + confidence.append(res.get("confidence", -1)) + obox.append(_get_obox_from_yolo_result(res)) + + return YoloObb( + cls=cls, + name=name, + confidence=confidence, + obox=obox, + orig_shape=list(results[0].orig_shape), + ) + + @property + def img_size(self) -> tuple[int, int]: + """Get the image size (width, height) from the original shape.""" + return ( + (self.orig_shape[1], self.orig_shape[0]) + if len(self.orig_shape) == 2 + else (0, 0) + ) + + +class YoloSeg(Yolo): + """ + A class representing objects segmentation detected by a YOLO model. + + This class extends the `Yolo` class to include the segments of the detected objects. + + Instance segmentation goes a step further than object detection and involves + identifying individual objects in an image and segmenting them + from the rest of the image. + + See https://docs.ultralytics.com/tasks/segment/ for more information. + + Attributes: + cls (list[int]): A list of classes of the detected objects, default `[]`. + name (list[str]): A list of names of the detected objects, default `[]`. + confidence (list[float]): A list of confidence scores of the detections, + default `[]`. + box (list[list[float]]): A list of bounding boxes of the detected objects, + stored as pixels coordinates [x_min, y_min, x_max, y_max] + (PASCAL VOC format), default `[]`. + segments (list[list[list[float]]]): A list of segments of the detected objects, + stored as a list of x and y coordinates, default `[]`. + orig_shape (list[int]): The original size of the image (height, width), + default `[]`. + """ + + segments: list[list[list[float]]] = Field(default=[]) + + @staticmethod + def from_yolo_results(results: list["Results"]) -> "YoloSeg": + """ + Create a YOLO oriented bounding boxes from the YOLO results. + + Example: + ```python + from ultralytics import YOLO + from datachain.model.bbox import YoloSeg + + model = YOLO("yolo11n-seg.pt") + results = model("image.jpg", verbose=False) + segments = YoloSeg.from_yolo_results(results) + ``` + + Args: + results: YOLO results from the model. + + Returns: + YoloSeg: A YOLO segmentation data model. + """ + if not (summary := results[0].summary(normalize=False)): + return YoloSeg() + + cls, name, confidence, box, segments = [], [], [], [], [] + for res in summary: + cls.append(res.get("class", -1)) + name.append(res.get("name", "")) + confidence.append(res.get("confidence", -1)) + box.append(_get_box_from_yolo_result(res)) + segments.append(_get_segments_from_yolo_result(res)) + + return YoloSeg( + cls=cls, + name=name, + confidence=confidence, + box=box, + segments=segments, + orig_shape=list(results[0].orig_shape), + ) + + +class YoloPoseBodyPart: + """An enumeration of body parts for YOLO pose keypoints.""" + + nose = 0 + left_eye = 1 + right_eye = 2 + left_ear = 3 + right_ear = 4 + left_shoulder = 5 + right_shoulder = 6 + left_elbow = 7 + right_elbow = 8 + left_wrist = 9 + right_wrist = 10 + left_hip = 11 + right_hip = 12 + left_knee = 13 + right_knee = 14 + left_ankle = 15 + right_ankle = 16 + + +class YoloPose(Yolo): + """ + A class representing human pose keypoints detected by a YOLO model. + + This class extends the `Yolo` class to include the segments of the detected objects. + + Pose estimation is a task that involves identifying the location of specific points + in an image, usually referred to as keypoints. + + See https://docs.ultralytics.com/tasks/pose/ for more information. + + Attributes: + cls (list[int]): A list of classes of the detected objects, default `[]`. + name (list[str]): A list of names of the detected objects, default `[]`. + confidence (list[float]): A list of confidence scores of the detections, + default `[]`. + box (list[list[float]]): A list of bounding boxes of the detected objects, + stored as pixels coordinates [x_min, y_min, x_max, y_max] + (PASCAL VOC format), default `[]`. + keypoints (list[list[list[float]]]): A list of human pose keypoints + of the detected objects, stored as a list of x and y coordinates + and visibility score, default `[]`. + orig_shape (list[int]): The original size of the image (height, width), + default `[]`. + + Note: + There are 17 keypoints in total, each represented by a pair of x and y + coordinates and a visibility score. The keypoints can be accessed by name + using the `datachain.model.YoloPoseBodyPart` enumeration. + """ + + keypoints: list[list[list[float]]] = Field(default=[]) + + @staticmethod + def from_yolo_results(results: list["Results"]) -> "YoloPose": + """ + Create a YOLO pose keypoints from the YOLO results. + + Example: + ```python + from ultralytics import YOLO + from datachain.model.bbox import YoloPose + + model = YOLO("yolo11n-pose.pt") + results = model("image.jpg", verbose=False) + segments = YoloPose.from_yolo_results(results) + ``` + + Args: + results: YOLO results from the model. + + Returns: + YoloPose: A YOLO pose keypoints data model. + """ + if not (summary := results[0].summary(normalize=False)): + return YoloPose() + + cls, name, confidence, box, keypoints = [], [], [], [], [] + for res in summary: + cls.append(res.get("class", -1)) + name.append(res.get("name", "")) + confidence.append(res.get("confidence", -1)) + box.append(_get_box_from_yolo_result(res)) + keypoints.append(_get_keypoints_from_yolo_result(res)) + + return YoloPose( + cls=cls, + name=name, + confidence=confidence, + box=box, + keypoints=keypoints, + orig_shape=list(results[0].orig_shape), + ) + + +class YoloCls(DataModel): + """ + A class representing image classification results from a YOLO model. + + Image classification is the simplest of the three tasks and involves classifying + an entire image into one of a set of predefined classes. + + See https://docs.ultralytics.com/tasks/classify/ for more information. + + Attributes: + cls (list[int]): A list of classes of the detected objects, default `[]`. + name (list[str]): A list of names of the detected objects, default `[]`. + confidence (list[float]): A list of confidence scores of the detections, + default `[]`. + """ + + cls: list[int] = Field(default=[]) + name: list[str] = Field(default=[]) + confidence: list[float] = Field(default=[]) + + @staticmethod + def from_yolo_results(results: list["Results"]) -> "YoloCls": + """ + Create a YOLO classification model from the YOLO results. + + Example: + ```python + from ultralytics import YOLO + from datachain.model.bbox import YoloCls + + model = YOLO("yolo11n-cls.pt") + results = model("image.jpg", verbose=False) + info = YoloCls.from_yolo_results(results) + ``` + + Args: + results: YOLO results from the model. + + Returns: + YoloCls: A YOLO classification data model. + """ + if not results[0].probs: + return YoloCls() + + cls, name, confidence = [], [], [] + for i, cls_id in enumerate(results[0].probs.top5): + cls.append(cls_id) + name.append(results[0].names[cls_id]) + confidence.append(round(results[0].probs.top5conf[i].item(), 4)) + + return YoloCls(cls=cls, name=name, confidence=confidence) + + +def _get_box_from_yolo_result(result: dict) -> list[float]: + """Get the bounding box coordinates from the YOLO result.""" + box = result.get("box", {}) + return [box.get(c, -1) for c in ("x1", "y1", "x2", "y2")] + + +def _get_obox_from_yolo_result(result: dict) -> list[float]: + """Get the oriented bounding box coordinates from the YOLO result.""" + box = result.get("box", {}) + return [box.get(c, -1) for c in ("x1", "y1", "x2", "y2", "x3", "y3", "x4", "y4")] + + +def _get_segments_from_yolo_result(result: dict) -> list[list[float]]: + """Get the segment coordinates from the YOLO result.""" + segment = result.get("segments", {}) + return [segment.get(c, []) for c in ("x", "y")] + + +def _get_keypoints_from_yolo_result(result: dict) -> list[list[float]]: + """Get the pose keypoints coordinates and visibility from the YOLO result.""" + keypoints = result.get("keypoints", {}) + return [keypoints.get(c, []) for c in ("x", "y", "visible")] diff --git a/tests/.DS_Store b/tests/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..de810e5cd5d1dd807a805187f64b838a467df17a GIT binary patch literal 6148 zcmeHK!AiqG5Z!H~Z74zx3Oz1(Em&<8iGO?>$+dD_6y~pS&mam#b4&R%S4TDp7gUKwP7k?B-GQI~J`dcc? zkfke=C!QMQ^DWOQ7lgzBF+dC~W5C>@)!H%#vJ(Tuz`tbx?*{>j=o&0Es;dJA)dB$G zZ=@2KV=sX*!k}xg&bQO295~0_YknG=c?$egqT^G!O&7%D^WiMTUd` literal 0 HcmV?d00001 diff --git a/tests/func/.DS_Store b/tests/func/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..e0d2e115f4431929bc277eccea8e8f7165d1ce26 GIT binary patch literal 6148 zcmeHKOH0E*5Z>*>HWVQTg&r5Y7Ob|4#Y>3w4;aydN=-=6V45v`m_sS#tbfQ~;_q>0 zcLOa3k0N#kcE8zq>}Ed5J{V)%U4%!B*^IF!G(?U{iJ-aERWZScT+NXL*=!OdGANnm zZ<_Gi>uk&<3t7b0zyBkcB=KzC?SAq~wN~G0G^|Fe@!&nn%$xi3#mx1mcR0G1G6@RZ z4{qWp8`*moGM)Qz8ckI~97Ygwe;cQv%v`xh! zj=LSv@13qzR&)R0@ce4@l8jUNritXhxRPCi1-yf@R?(|BO%s_ufwjmeA_<8BVt^PR z2DXy{a~?EX+gUzUO$-nNKQMs%gMfzU7|b=Qtphr|KBK>ehypskB@l%{$6&4zJRn@B z0_s$5o)}!GgI$<7$6&5er!%frhI!1&#p8vm)xj=QIOC2+>WKkjV3UEW9@=>RU&1d_ z`pDl*p%F1a4E!?&cx~tnT`0<&t>4PSvsOTRgoc7~IVvEaZ(IUkfcr>WIkjJ)4snjb WTqDkcc9jlD7Xd{Gb;Q6gFz^LI;Y!{B literal 0 HcmV?d00001 diff --git a/tests/func/model/.DS_Store b/tests/func/model/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..abc43654a57e597cf1d30412076964724f40cb8e GIT binary patch literal 6148 zcmeHK%}T>S5Z-O0Nhm@N3Oz1(Em&<8i**Xq{Brfs`+!@dn4nidyAHs_YT@E z(d`^97Ou0iyLWOvc+8%1`JxHsz_*eugB84j^0}VpaGYl{zXxBHU*!=J1H=F^Kn$!e z1LjO{8tY2|t(+Jj27X`w_XhzD(KlFXR9gphczwpWg@^(=z9kTaLEm7h5h5U5mjdci zZk`ximxEuJJl|lcQI|8WW`=Rh%+=$CtJ%RXR666nM(T+HVqlqpx;8yL|Igr;S^3Cc zE+LB;AO`*!1KjF|{Qwqa&em_s!?RXEyMu;;c_k_!pf6nlz`%W^se(E#P=`Foq>F$eggRp27Z~^er@Klz literal 0 HcmV?d00001 diff --git a/tests/func/model/__init__.py b/tests/func/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/func/model/conftest.py b/tests/func/model/conftest.py new file mode 100644 index 000000000..9386deb9d --- /dev/null +++ b/tests/func/model/conftest.py @@ -0,0 +1,29 @@ +import os + +import numpy as np +import pytest +import torch +from PIL import Image + + +@pytest.fixture +def running_img() -> np.ndarray: + img_file = os.path.join(os.path.dirname(__file__), "data", "running.jpg") + return np.array(Image.open(img_file)) + + +@pytest.fixture +def ships_img() -> np.ndarray: + img_file = os.path.join(os.path.dirname(__file__), "data", "ships.jpg") + return np.array(Image.open(img_file)) + + +@pytest.fixture +def running_img_masks() -> torch.Tensor: + mask0_file = os.path.join(os.path.dirname(__file__), "data", "running-mask0.png") + mask0_np = np.array(Image.open(mask0_file)) + + mask1_file = os.path.join(os.path.dirname(__file__), "data", "running-mask1.png") + mask1_np = np.array(Image.open(mask1_file)) + + return torch.tensor([mask0_np.astype(np.float32), mask1_np.astype(np.float32)]) diff --git a/tests/func/model/data/running-mask0.png b/tests/func/model/data/running-mask0.png new file mode 100644 index 0000000000000000000000000000000000000000..2b6af4225aedf97467e3f5ebb3bb53b8bfebf517 GIT binary patch literal 1112 zcmeAS@N?(olHy`uVBq!ia0y~yU_8LUz|_D26kyQ%_dSk*fkoca#WAE}&fB{ei!M9x zI9#0Z@PEDK9){e@N2YE0=EnWp=Az#tqw2+5V}q)8GlKLDdho}4+m*%d@6Rh*FW+PQ zr{a5jgCUci-+`k`neH*qPh2;j`F!a5#xpGU%I%-%zIuLO@~^vl`Px+S?%IV_RZrBf zm#@&b>$mq}Ja2L9y0cfr*K4;s`lq+_Zh3X5VJA<#oNnUTlfpAre)ti=s{KfP{)=C3 z{XgDR__by}pU>?6&FObW)DP47QMV2R#><@-Sr$_)T4m$8UOs^%coE~~(^eZKwPqN8 zI{T*f`|WdO8kZuQ(&ubs{r}CCkMFsOgn7}KFUIG2S%)^ms;b#q@9M zUa;)TmwdZ$_o3()X+lw6bC^ZnXoMc^+%W%2=!)De^Pl9lG~M079}{D?Krlzmwvp@G zr29^yIZF%pLN-i3<=WHKJ7t}d=$pl5(hC!$-!Y4a^JK8>HfZb?E-1X(@a=+{&x1!S zF>@Nawb@IA-W+7nTL*N@wge_|^#_mGm~J^+LzI55%XgW%wyK{qCgy(wTS55$hBrb5 zg<$6{KTx|lU%Br42Yc~3saXf!ex7e)bMyq?RG$fdUDuqSeuAv`SFw zZW2A(a8Wj7%IeacTzl_c(Ob!C(vTZt)p)aRY5Cbd&%GGeX3K~BE#NI_@;dN^qlOI7{3t+l_;RP6V6{~X0(kv!2a z_Va%B$@)*OG(2DP^Y0$#zwGCti@o{C&a#^O8P4B|XJs~>Q35i<)78&qol`;+01URF AhX4Qo literal 0 HcmV?d00001 diff --git a/tests/func/model/data/running-mask1.png b/tests/func/model/data/running-mask1.png new file mode 100644 index 0000000000000000000000000000000000000000..bc350014ea12643d551349a9234ce221a73f740b GIT binary patch literal 2087 zcmb_d`#aRz8lUgi3}%oTCN^TrL+(>K2btV&+a=c&radN1DiSF(E{$(xZ@0^SyhgcK12%5pB5T z7$(pJzqZ92x?86Y>l3=0@I<#ubtx}IJEow7R?`_Yn6=QYQ(O{~R@-Q1k8KE-YQ z?c$z=Gam4d^*CH;31oqfFj3T`4QiQEuV@?Co;Zn#8@d)XQYncVT#7q3 zlK5l;{|KRfrjh8{3rV_2*XF`$<b|AsAuzmstG^cRSCy9@r<5wcALmMfO}ch zjE|E;O#1P_4)IeN(w3)Yg-7!&_&aama17H;i_S#F=0D4Xt}yM1x_ZJjeGF#5l|bG0 zwTbNI`SLkskdHmln19iWej%^a10m$d{5wV5YB+oJ#TH;RjZvZ4JWI7Xs-1eL>wji1R(V${qqKJ)4A%uT8rrx=_q2t^O5rBhi5y^Pa1D!`i=x zt|WIS7Q)$pl(Dp=^H7yh*(t>qo`(4bk2pdm^Ed z9_qrC@QCXnl~`%V+Sd=`tZF%?UhVSN)G(I}arHEd(6FS*+ecknB4@MZHdK_}{t0a6 zG9~i8t;`i&_hL$>^_${|mNdfH9_omIL*p8LS!#%2)vcEcCXGaEhX-&xGwKRmFFvl@ z4;bR6rNxj|u!RoO_T%W@_EcRP8(IOhYaKt!X|d2oo1YP+B#~D*s?zE1@$=SuM3dz_ zD(D}5wOxT+PlJyvTz^HRK2H_foUDgTKG^fB+AJem=`Tz8{?}>c(5G6v2&tpcCaQ`J z-aYmCc*jF5BSxAV<*^de|NBavuff`>sVvOHB6$3tdauED0$KF#meMNa{SWGO&eX@< zsutAjPAg-6P=B5>P&~84891z!a76L1)S%7w7uleW4jP($-;YY9}Z!VC$4i$DtAUt*eOC(h_{IIG9`4XZ&i z06ml_!tV{Oy#0K>u}o)LZM+3dC%|7Xar?H{*&?O6d+-+xQ_Npo?_@P~Uk$Mr&!&GO zBLH%&Xwl!D%2soTawMA&T`@U(O_~r;n$aVvX4IS$b3Onm`q8CryE`Af{H|smg^Yiz z*LlVkzWqdAV?+9H2GoGgN?6(ANk>VN;2U? z_%nM&E0wJlqlz-ix<@Ww*o{XXQm`1r!*`4f0ID-RwH^|@;YLBEFMo%|m8E4*y^WLS z_e`^XCg=nnhL5*PD_e(Hu7fy6_q)bt(t=kA`@V59hNeCQYB%t3jE>Gcm}`iznA^Z_ zqW~H|42Q#v8|A)$#ITjEk&u71T1HjE13DYb9eQA%OCrm#m?=LxLO_sYg)J3f0S(Qf za5%nb$`_xIc_X$Ox@EpLRt62REi9zZGe4ZMZ7p%}lRjEUxVN`E zJA=KGuk6TiPaiWMo$jsR&*EF-ZLT6In*UeaHwAc&)}hYkm&sVzHClWj3!25QP7G?e z$_l`;*6%`W1X~?}k#?ISPY|_qeVS7o(-)v{WBO5!Kxz1TZZp&uw51H{qln4&(8k-8 z;*ue{4%mL#W+LMcB2d)U-^3K}Q@Gya$jpK?yuYZ44*GDH%_^W_EaT=A(3G0;J@?7a0Ww83`2)0}T}e3kw4mAD5Jr7Wn@=z)L>> z69QxgjDmqw01ze+j0t=h1dzV!2nYU0)qfY@t3F^v2ngxr2LSRPS61@M%F13lK=^TS zao`jI7gTw1Ko|fh4ghi@g9;)5AOM0AOc4`?3xMMUMFD`@-vJPFptuqa91aY~5yhIK z2v7t7IVyr8UQb1!P?6ERJOF*g^Xjg(Q}2&D@0{_FbHj z-xp=-9#SU0Qj7ldd=p3O>yjz&*3Qj-HLm)u80->dz5rA%az&n~w)Ha_y!8v;NknJv zbE9N+1b3g9cf)|DT>1l=v(FS_vk91^Ql)z*Ahlq7mVZ_LZ)&IW`ebEK4ad(1Ey1kj$7K}@ zxr+G?4A)1<@t5lw;mhr%RkO48)#=zSah-fhqcvXBBYO|qY4)`ri8b>o@@1?1Ik(kD zTjlBuZ9lbU^w{HrJ6xr}sTS7UG6&fWp6A39lIQ-#vV2Ko{+9Mzwl4se(N+#?Yg-;! z$;ID(Zq;MnhxNucTc;yG$lY6x@WybNWYT8q?kQ}3a;I$D^NysUOZ?Iav^=I>bdwxf z=$sfyt*)yq{hRSzqX3HFIkAQ$S^??ibQ`j`#rtObRWOu`lE9=bM zBLg-EF96mIkG~p*4J`_uy1NdN9!VdfW{Q(t6_cVzSK$Pkf_bL%=ro%rpC-Wv(O)34 z7tk|ziQX3g_^!MK_ha&IR1;xs#V0&wF6&{0`EM(}_FI>1KjelzmTL(MGp0l}jtOk$ zn$2c^8=D_Bq*^?tsO2Bpdv#Oo91~B@byp?x`wdvco$rT@9`P+kR`D3jR?7XJRq+-= z(pr+jzLH%O>;dY#<_wTK6w8>_a0I#68IgQ)O@4EuMQ1W&RJCGk#P0uUImAE!=+$uk zhvk4?EgAvwUv>k6zS_%!A{c=9i2%^I4~zjO15g1B24H{-0Jv{=3J+itSo76E#s*Hb zT+Yb;!VWAg_GiC-LX7_TnTMQJ%AQC5t>5E>moQIQq^28steZCKd~WWtd=`FRF}FE! zL#Ys|aoVS6j&RBAFGxJL8A^@SBUU}Qn0BIec!R+-fx}MuvdIitq;YG5{S`L#<(4Ir zOKtxTy}>{b90L4*=neiy@B0WC6u<^ahd{o`!o>T){UB)&7XS_l4uJ5>K0^g4Acy8D zbtMKA`nWeNk|X)Y>Qv2S$!QR zOqmL4zv-b_j+B|I<8&2|D$9I4XbLWOlU(Gu5{LUB*Pu;%A1gb}QYD{R61=E(Fg$tw z6_jDCsnPR#$KOH$ARrtF^y&%`!2fbrKoA%JhlvG`O-Y4=OU(|2A<%GeiilFM1^lP) z0>Xn|0DY3(Ixa1kcux((Rq70ma86G0#@ohU6jpVc7Sy6ln~$J=OE=dBGRJ$!5xoN! z22nHVS5HOtoi%@Xzj9Cg0%8l0P#LGSsJ3o-()iy8VLYYLM0gzH_|zszEho&Tk@kPrN>Fp zv7f2AM9kUX<;A8>azoma1)2*)P~+NJ%{GgaaM$$_ng`z;b7wX&6;H3?7)gGwC$#Pu zq1%$lAW5PjY}wd0elJRR&y<(qPkw3Q@+1FSDhJ;2$)AvgDTnd*<%FpWU$B6BoWvBB zMhyFpp9Ruc7^hAO4EH_|!i*Nph0^erNmM(P^}iFfcW+CGp<@4Xa;8c2tZc0+dm}M& zGjvtJw7KRIlPTMFrUf+!<3t&kA(x(%`F_@uEk_p6_YMgR86R$ab7H5yj zFu-EPDUi`f#rYEZcxYB9nx(Gm`({e8My?Z&hxwPdl^*4#Fv0XM=?;+H`*)oWk= zD?-~-&33!gcJG=t`rkvMTsh)y?98IvvEuWxDi*uma$2mmgEMecN2m1`r$6$$n-Z&p zGK#;Ye$$^Zbw@s_p3^Ppz-z%>k*Mz+NbP94^LkjW@>(v&)w)bCk+zQCq0C7SLwx}_ zI;9aCG|q3$JAIQd9ww??jHIVtiq%?dFltNR(H{Ln$S9avabVsdA#giy`>M!y`Ur_SZfo5CiIk;b){DpqF0*sqD9R7s&NL`#maD7sN4 zwYxaa2(_J)3Wc{zKj0}=bFE$gG+ED(!a$80T;t8R;1B*f(WAF`0U`)n$xSwnclmV5 zIum~V1!*-gaX4J{-JP*9rnWFnY*{pK08+7;>hJ2^UaH~_=y{fluG{a$vbDiTx4S?V?m!|zjfJa@0+kcT%}kAq1~ z4d+h9A4q?Q*~3pTI|v;`*Q&txFqa3lBBRg}g-OrU`taYyl2Ph~5;_Pv-?*TVX~V^q z7vq>vb$6YE-;CUmUCj6dcS_+a$lsag%a0NUBr)Sy^NpmWY^@(m+Rn#}AM~P+5 zH8bLJn~JmM;WiT%Cs8T*4vDuT{2pl45}i^+m<(3>;dUCw!2*`N_{J%1#8}cK@%iMN z8M}U%I=!CrTVc0MYx(+wMM`CGd~ng&3ex~XyoYotGp-7=gAq*<23C5qqy|;KOB6~P z{Cp-fDzjdPCgZT}NMjXL2&3O7ZhjXNs;inBfooGr#a#I@U+UA++&)6HDqNanR><$g z_pTV_gIu$4v*O1Sgst9Ocz5QsgEq?(Q+ zI~=jruHjSEcgL2r;GqngUnXXv4t({ZqRW zv6M7hX{ADrv}?v&9=S5o;Q_s;dvsWM0eEeR5jBu4#xIPnz5wKWBDDlE*EVMw^&_54 zbdtYE4O_puc@Ze!)x*DTju9ZQ`{RGs*VmO6lad{bC89)S=*SV6kPpQct?QeEqvrf% ziTE9d~p1J4V=?GDd(|IPdUg|hG}gqSnG#H zqu^{MeL$ktlA&hWw$W1$|ch{I6IspnYp}Zpxj(WI1SQhmLq8UTDYfwU=y&P0A*|28}=%hpP z&6I_xjkiw=f-lxrEa{w01u@+d=WDPslZdY~UQr$m58+6i%q&E5DxanT(e z!i~H+haHn(syaG<6Y$G4Ke-_uQ}3WINE__SCW z>hoqp{*W|SX1c)eQS9q|Q7y{kn&`pY!rl^8Kr0*FA)YLl?ME}xR=*2x+tx4nihf`5 zH`28Z(i#=Kob7%Oe%SdeP&#DDg*^<8RfeEiYl$y->d?9ceTfe^ovg>qtRUt8a`6+j ziu|YI43;`1&Z0jY3r-=(5|r8$%A95XsbZj*)@C+{2cE8+Ezzc!n)cx<$*_YLiL`!S;VX zlKjBPe49>NE&PU3SwmcXr@jS6;iuBf#G+aC0{#9}j+u8F($f<$6{J%YMA|QOY}ZQK z(ohlCZ2|ktMRMSB+=Q*C`}L6t9f@n}la1|GZSfjeT}H_qce0pJLcoe69p|-_@cf`n zzf}9zV9ZYakVfx{?5G1bVgzh$vV}jNzf!XvAAIvd-NoXOcb3N^Hsf9MgN)4kh1x&X zya1TOu@CGfZrS$+3f3iKQwBZ(&mg1p3Hf9ejZ?w0>(ShSi{=^1I9CcP^3P$HZ(jh1 zf+O|wItZsv*k179VmwK=58h$-(HxIS0i}p}SU^5kF3lJ+In5%>X88}50h`m&SSUoa z9DVLXUZqkb@f;GNvJ@IOh-gk7c3)2rb7pf{-{hAJ=+(x_~Lq{^=yH2Ij;QSJ}-_J_Uz)ZZaf}(2?@m{?D>v!(0f=3-5w<(MunjwF@ z<7+uXfxp;wvbtVNZId2(5kZ*ODI{e|;dsNFN(0c#dt61})pLda)Rjk1CEA&y&>=ZD zuci&$koRJEQ0ewy|JHcWW3W=@iTCApu%+LjSs*@fUr+1zY zpkz_|dsoS?A?Vk|$=PuEFMuSp!XYBCziHV+HyPgjIPZ>ivX3umnmfEOJmyY=wNi!g zjZ^j4rFWE|2GOe7zw5yZrm)hDMpd;dW8t%q-%{Q*Igjuzzv*)*(~6nDgf|`56t-J* zs{Z6YJV0w~MPE9ICHgKuqba~qbSh5pJI*8+bE{|2;Kr4h`cBjZRuHL&DY4Npp7Yh) z{$SAA?sk{74~(E=@)zGLLoz4U2S?qsCJG1IKx`OuVE9LAz?3jda6+b^0^z8hY?xwn zHwJxs^s9Pe%s`u-V#aa>ckI&vW|1N^Ag>a8y}`GTlpCK^w0FSui$bJWW>R}C>|?G| z{;9~k=$tsV6veQq8F?d>jz#WK;S%0_Ble}H(~^7pBM&U+O1kms1pp{z-mOHT6K2I8 zyXmBqW}V`07C#fu8D$9gB9*Zm{^ z-c4}@bHJ1xSKgs0v%uWMwEqG(@IvR|tIt6OUsK>@KG>q3y6jZb^?geI?c28%u|gKg zX@5Qo&TgKi(C{ka2(wh2Jm-aF7HWs;x4Zl*$OxjA_>rcsikO@%LFJ-pe#_9v@m52= zQbRj2kQOnMgiGEr0=<;Wf_XuoM#IR!o9J45tm_cj+82{X&QMaLoOE=OR59$>2{Z7c zk{Ep%a~eW+5)ozJ8b-#g1(i0Vc@$vmWd&tzY8p~R;< zj(*(eZ<_|Biua-PdtV)hXY>KR z`do!TFxJ24J1Lb{jhc>gGxH&q3Vu&F&S(^-?zKo|CfAZE zmL^SPPNUfJdkjB9RrFoKV@|Yl}w+Pa%zjq+2J(X zBg#Wobu?>1)#+PwCBvQ*ZHSEF_row=n2a#hVb=9;W#S{E2C=P}jWsv4PZ z)L6}#aoYp`kSgoI+xkV*s5B>Ha<^fY%_AaY_IY+*&(G-x_WE?yI{!KF1pK<<99XV-&y`e zB+S09Z@T?j97vwm;wom$8lf404qcM>xH_=g$~aZOA%87uH>rRL)#K>(IvT@Ixq zTR`)MG@VZuJA5!rZ{HIR(r*s#_x=3e?683)?Y!dICS3!z+lsyp$g!e0?aBIhGb|yqjar)oV#Uhd|7o6l&m*_cj!@7K+$NH6qhm zKv@n}O!%`JlJSub0WqXtvvXe*&7oe8{<%!vd=UJQ{}Dn0$vM^k4Iin6T!g}3`LTkO z`+gve#3KKbjkqx~0PZTR&+FX#130M*|3b3*@ zjvx$+0^O!E!ZqhXEkm9P)gs@P9RVccZVHihTH|KeDeiAPN&7+`;5H5k{agh=orF%` zE%2~DBf{C>trSK`s0LtFmSJcZP~v5M9y~a7!VxD(MuRc8akr|+D#s@HQRCpJg9l9B z&bb2#6x$+36_8I9(oo@pxW5;kFpCiD*6H$wD1z;(%PXUA;YX?n4 zP;uq*uo=&e8z~aTe!nxDtU_~BVb)&VgBF=w(Q2cHt5^lt6)B2!zaJMD`AzOCtMU%| z0@xuf#c8mBArFryEdc|l5yo+;QOt?NsJ#I}(#-8r{KG@FkEj+~_n^%U48WQ64Fa4( zakc@(Cx*kKDAY>(28}mobY(;F8|Kh={Pa#5ur=4={)vxQw%x%=;NQ zlMFQ2hvK!$r}i%XieUOk->4}_ks*z!_{;&~=V{k%9`k9*g$(6w|2>o(>YVVLJm#lat5j~~{cNk>GSl$|iCF3RF zaGD5iLBqBJYbnG*8zpZ7nZrf~`COA!whU(l#+8Y~A->7`JhR za$=iAN=H0|wLwLWY$bB|V0KeqttD}Ze$EGi7C~kmKb^` zwaZA}3LMljL6Rw`wuB2(8GFNIbCXUt0i|8BKojo_fwL?)Jf1}L!#|CmqX7*Ak66z5 zrQ1UEnh|?x6zRdZ0AOH5AWk5G15`EheKPmPW`9(+R7$Ls`9!pvd|6*F#jSgk*l8%= zHwj5|_6BgK4m*AT04@-Xu@wYQ<+6-=6#!y~&2~ZpzO#kE>RSo<*b~dr-3!gFGF%vm zK3|2cW#toCvIOLG?}~;)5uts6+n9W)kBcm?+k@2SEw$N;Qk*~cv0};9g?34(sW|ot#mX9qUbg5D=-Tq6O$OH4>4|Chr)0 zLrO;|bd*9bWLu@a2|a`x@PqRuA!&O7nE0f&;eQvM+T@D%?wzy9McE?zfxNs()PRo4 z-v{POTW2iNJ!5 zb%P+BT1V!NEt|wB;1QnyadB{XND3DX0fRMNLTV{Ob$uVJZ^Qtk(5NV&3O%DBfWdPj zZ$p{HJYW`M7DAO`@f$mS2~Hu{W5pbaK$WtWbTpc1|wr{vJfSNL@+cHrb z>7zo}waNvI8dWXMm|^xud;jY00F305ZK7OkKuKD zIVH!;T5NwdVmjaa7J4s|QWqqhklm4)hVJs2=sl1?t;8%`C&X zAaw++!n45=)}W|{iQcG6B~-jV$KV4Xpqddjy$!gESX85?Kv@j$6yXzQ`V6KxM=FF> z3W+23yJA`fv+5#+@b%1r&X^A{rZ$n9E5nt7>S%eK_D&r;Nt6H(-N4DWSPpdt?M@^D zy?p)YycG?&f&_G?sT49WcpUUG5pDV(6#bOEL<_noAsQ~@a|K?vhrl<8FdgZeAQR_G z_^VFBcoIX?cml&EQYiFRRKE$=BsAa#J^n6GEua^4I!PkIrsZamSk<4HCS`m7Wgs51 znQIvn2{Cd()nuJjj_OZ3Nfl%#tfQbvIwbK}B@A#~2SOFWSwG7(I~a_hb&Bf=P!!s zOXKZI;D&)B$s?9FqN;NVl0U*rBx(K?rEkI}df<;aA9PDRokt0pAktsQx&~}Kj)-lL z;KHg$IiyVk=#(zF-k@;Qt){#1Tab%pM7-H}5TeMdvLJH*RNENAeF=i27cSsNECa!6 zQ+3?R!hz%H7!MOsAq(a(%vPm9c<2O*85B?%b&!?=$XhXdE3pz8;2Q4=h1W_IZ!ORy zN}Gv3M0dn9Y4I=$jN&pFO~V8u!>bH40-<6h?IL~OzVNa%l#juA(s~DrvqUh_cC_-o zMx^R73qwf?WRSt-MrLr8@J5@Aqts36o9T-W;92w|J>Cc+{t=Jk6^KnB=SD#c5u*^D zIC>?7>=SE2#34FX0hbPLf!d~CSuh|jB)4uJXd#oTVzv^EvKX(3yr@G@2vZwp-$a-m z24rY0Yh0LQ5DMTxY5`}hkya?H2703Oy>Qp?Vo8Vi5)s+C6cvwR`}H6IsnIl1gy`=N zrdZRraEB2A^x)4oXn|tk39znswmQ-+%u&NJ3cdn|lkY>*+yO|Y2-|rMZ#763pAG7n zARo@H@TnoQVcN+06U%7N@LQO8&uN9dKY}f+zNv`*@-Vxb>Jy=YjB(-^i+qj@=0p_j zd)$JvLs>~m8aALMWWcNEIbJeBuN;nXj!1N&w?bLIX9FR$$08U=fL68Yk z8O23|c)UeDjerm~@mtW)Is8(rcs==O5Cy?jP_Pl;&)Q%!Ei%q!*=0%{QekZgC{~wL zK7qn>kR@O{q;?S@*%8zXgTBG(l$f>Ijt5CbfKf%n;h7YkYs@rOdKf;+z6 zhM)0+T%@e4lr*t8pJqkS4Pkq&YZ zBVl<5O=C3GP0#?#{6HAiB_ahTP|;N1^%QhjcFpq{swjmTNR8R}F?kBn!ro99rNr^4 zsG2}sFKIl21fU92-=hj>fP?7hRrhYnY#8v3M;ws(yIi#wzI|ce!DC!;eOY2DukM{3 zjDiKJo_TbTi>(y;=i(a?Y(L=+849C>EJ@etK!OcWxmuB0B937ond{AwB2HL9h%_&!NisY|pq$goQ?x!O{2^fF znraA#MqNHg>bDvs`i8{yI|fu5nuB~rPh*BXkSz5RIWyw3{uXGdS)vWqj;FcUH;%fs z-B?`_jg5!tg2FBmfSR$$$`vn8^cuSGccKl5lAeb15{s&|Spc@e*0-V{lqg)Qc7z{T zPd_&zg_P{4Kq~{} znjG~eV@b5K865vdLwBTb2m8NW_*d_z|1RV7Z{dgk6i}G^XTYofA%68=lL)Ik2MXsY ze9Qe7`L8bi=f~Wyf~dLkm=tm5mJUMOO{8 z2gx)RkUl?5l%jnhC?j?3b{P0aJ^wRh3^q4)DcIEEZz$l}@ptvF>7@OKby^XcguI;c>Py zonC*a|MBTnZqZm0p~i(Lqk#dOV>yOx>9zY*@f5UqBz{51uiYv#i3%H zR|aQ7|=HLq6U;!;>y??EIu%7dtUznr0M9d1R^U(Oa)H(Geauq)eD;R z{M~@cf2goX(^-CNOU;$J{HnC$5BOoAunn@^;7#7Ju&j)$s!1VyWZ#Q(lQO*dXlG0- z)X?)W>1*Y>3!#q9_jD$aR&}Gakn4z8uebLn`)Db_2EE1`?1_+Q<)#)E?u0z&!}o2WDPM`xhb`<*OD1ASND8 z(<;!AQ|Kd{LoL+y@*V|^Hx1cE-s?K*dphfRprBbU1sTC>WW248R!rj&7|RoVJn!+? zWGWC3%&|V^GhLB8^d}sVT9&fU5#qEa&OnqCLOm$i$(>&Zc)|rZ6Q^Dm(?k7{puXyi%^Mci{^wL=n4|gq zyvU&EJ&6#tiT;(tdcW9}J-q9dUTNF)H0-h#yHpI6%c9|4c;gzu>K*7Q(}^Q5!T`iD zrrswfh9_tkx!5P;K}Via?DUut(KM#8O-&reKEL$pmk&A<{{`*Q)bCZQ(g-4ZNN>p| zv)yt&Ad^*^vi#hyFWtK(?8B~`Rw0La8;zskjgZnaZJ+6((PppL5V#yCW-Rq{v>W+A zD5yZW6!V?!na%E8mXS%jA|bT>%Rm4}s0|(GG&}SYc4R~mDSXZ|XAxo)X^wGaR(PGo z*Y^*O`}Cb5dGIw4gLZD-f6;leM>*#191mcC*H8{?7($h0hdHuuD~C#ZRcuxw8Ru#T z3j5X5c~<;^b2u| z!I75J^DIm42s4WJGA-Cuw_ze*7_di3967bIG=LU5s~uEXhScOdhDW;`cU_c)>OBOh zRi2f1qPf!u_?VCskzX0xkdU$_H=aJP{(T4dv&-qsX_H;9@pmDvnHhVdPwv5{g0L*5 z`o~dX7HuhY;0`8|0^R*K{jhX3IpQ!PagnEU_R9CtN*}iq65QJbehZ#> zmCO4v)-vqYjs0T`w)XI;aLAm;a4S&2LBV(~wGWMWYxnsC*F5E^A=1stfN%R4{T$r6 zL$(ZCKX(Mj4V_4@L!WzXMfUsUYi9#enzKwYMR0eJK|MTl?^1ud|2ECW zul>H+bjqr=APz+zY%jqp%Rw|2c~3O9tTy`w!`K`J;nTaq4*7k2ID5<)U+YUplqo9r z9s(+21+rfx^!MQ|r=}7M+eEuo;nF3no70xvNtPL0PpwjHS((n9WstW)Q#YI69o}a( z+nyf`-iqM2|M_c;iZYiw;Xzh{{o^C{m!;3z1Jq1*;gOHPw8NrzGoCH|mxSf$EyWY! zj1FF-KV41_OdCl(o-g5LT;<`|wt~Be`~T!>qBkx*)xTr<&XaH)L1)2e){|5Ut&w1g zHmJ$c&u#fj5KsDLb7aXD-AC0;kMWp7+n+ARD{#Ldr|yI{_JEN{CqHEilP}E)eG=3i z$$s9F#h2XZLW0lBTyb{_7X3~Vu1w>2k#xsDCO6rf=jqH5Q1Q2QIg7laJ~!9LWdFCL zA3JBPw(tZJrfrVbnOSs*x?&HD)1fQ7&*0%M-**$wd3qE)&lvsQ-p)Oe9M2JOf8jZ= zAFT^~^g!+d2_R9vSZvgujQtLa%g>BVe>3kQ!{N=hzuXbXl0UN#y7pos((DHy&vwtWo&*r!>ny8EecI@MTDcabUY&@r8v+Rd>GU3#G`7=X?{3R32g;AO zfO5b=V=U_SF=_0^u6EbZ-vZnUq4cJnxx4@fC*RQ8I6aQ+ljzE{>0r79 zjwfAKCahO~@U*-$Q4#x%p?7wW8P?cGZqDgp^2=JMN>9ToJIOt;(tw#$fQnULb{jj% zMd@jOo6!W7D9}@CCI(!f+1^gw<){;;jRV2Mkho2WNACD0bDlD(^$lbkn$a$1uRn~^ zgt_V_R3h;_G>ewgF)|DKTetnb?OUR>gOJyKdI-ry@XwX=W01pD-g^%R>Od!#$QB|A zpDpVj6R>dfrb>l1Vi+j&&a?lEXya%iEs4QvwN1ynxgYCxz3Wmw^3ZQ65^Gmz13n+|Js1< z%b-;ZgrE`I?QNd@$0F8Eb?)DAm_xhUw8nFH{GJw9J7N5m1-#hHF{(k$2^8J36vgTY z(f4DApUCjqIO^%Tq&8>TIM|Xm?UiXoj9yE7-mY2l~j9rqn2c?}Z?+dN>=y@QvLyO4ipF3YZRIx;O6PX1=B z=(ID%dqJ{G*k04N-cry}mXK|3cH#2*B0L?wMXjd?!Ic;tu58(B$yqY;6XNv4oq@@f z1^-*SvCwB#xw%S&(xJ-K6^$ zwuaBXq-U*Iz{)@d9~S;ThIWy@Y*dDA%I z3rJMX6iukIPAVHU4}+Rneyj)iG7CAcjOKi##%rLd5e9B-CtHvCQJE$p_@(AYAD1ZA zPnVKT^X5gp)!f}A^rmts#q$Nh*s_8IUT#Ro7*TyYLw&u62wS5xu`xiSMtd05(tU!B zuSFxaS@MKQ>hCVSQ-#4D_tZ)ju+@VFG5mGuUHQJ8`)ixGj*YI?pWt{diKvG)tuuc0 zs8sZzlZ15SU7ijM%Nbl6Op~2YML%$qq2$nhP(R|6;{^ZnZ{ut3M2Kzk;`>>zz61(v zxZpEuOmglnP@I-Ln=7J04OMpNxFweiM(szv)?w1cB?kF9zJC0Ycm7zN`~xFnIr%k7 zOKQioEN)T`cZN}9BCI>#+lFdV2AWj#>9AD7X zKP^GA^IY6R%NI7_%F{kpD%_9b`uB$l1qT21t7;(?t;} z@XaS3!Ij+!pE6HSNFJap46sPQZE)QQZVnx&jH7IiJ0H>dRf~q?bKe{0aJ_=Lzf*$r zBZ#Y+9O2JCF=h--?SXRB-N;n3d}eTo*qX6i{G7Pi6WM11RHegEmj#!f zCIM@|9Xb!`(vUk;Q#pOxR#1XHh}>XG80gnu(EoM?-R|;E$t*D$@QfbzB`YGgVqBm^ z90sx_hH5Uj4axJK_0 zc*gd{f!1b30?i>^B#9h_FurkWDF=&rTZ;*JP3cdHwk;%P$7bW4`KN}PC3}5?eN#`| zJsy0;g0f$0FI4+W-q4YL=&7Scuxabu_UNRGVP{uWXcXXDT|)7-wAuJ{usdZ`AOUp- zT`lvr1Bz2o$N9IW&qP~#zAp5NKXZ(J#R)K((FR8M4T{2zo{aadAALNXKYZ5;ujr5% zZOhP>WNLsf)N6#85S_C6XRc@l%yY0=k8SV*fDO+H04gN6jnA{@ltQH);qBUo{^$UB z3kH%t9D8Yk~kUGaW_etwOVzISJJa$8|XW+NH8@xW`AMXtu?XG$mh>& zPQ>^uyuqmylZ12`MdEUDSxE1YPNG5`8o_bFS(}uiP)L;a!s2MF``m zcGlK1T_jDi<|C(PZL?tP{e{PP-@}}i-0vCaNqXnxI`bI3Qd?pA=PjYwZ11yi)mLMj0HL*R{s{KslbcrzAybHOviQUueG57)l|(+RgPLy`kRW7q2sX1y8*$dB z!mrF0^((XWPqxA>ra~c2XHiWU>kEfRWoXF)L{5ixY;Vf2|%i>P# zZ;fy+*j}&8e*^a!p<97S2wUp@;q>7z>!to)&rD7a_wmA=kNM?a+!sLfuOaqQVSL?p z*MFI$c4JHk`Gv?2pRt@Tn1qp<&U?N*Tb#ZCc>Kt(gZ~zu9sZsDyKrB+tuP$E7fX(_ zJ)O<($GV?zeJ=m(T_Qwf_WA5JqnC*L>tBq!d-X9b;k%j9&q+e4(|-oy~@~xRRb*C--)EQ(uC%dP+Yyot*Dg%(aeksU*!U=m%plobxkSUP(5^-@Y9 z#N|JV!w4D&3;JR|CROCmx5%RtCtLA(aL$D9Y!W0TCzf6S@fcHD*N1bZJA6JN8-LZM zp4er>m9|+o#yh{#RNEeXV@0VdT@(inMAN>qY@QG|H$lq~0c#o}8I`Z_`d>eTpyJ|E z1$3AKvci~s=dX8a@AZi>BncmLVvC6Zld_n3J^eHBecwOdLfFcLNyf%2ra#Jl1f^kg zxaWGLKzgN#&{dER5~Z*pm6yGPJj<;f{kl$AAjTOsvLze&vTz>t;SYTJJO(stkRPbRgzD#I%dwH?2NjknX+X@m zg7F1LXU~o}9I=0+!scmkL|UKjDeAt}1)Ip7T4Wo+9UnnSoN64V%5LPWx^x`?l{Xzh zW03U9#^eABHu{?5R>8xsTc0T4WWXgAOMX}Cp;)ydt%@!+6sSPB_S(`hyZ8h}LB#mM zdPtu~05aY+?KZLNCd>#;E0}b!tTjtT&g&zpMidaKZQY6BtC_-wEJlQ6`u(!%sNgGc z5gNV|wC%o%h(K`i{h@>NHu6Ul7k{+J%^c6`^az5Rb zFOFJLeE1^~1w;A(hoM8D+KW%}#znLwCF3^@9)WoHm*R$e%_h8SYIb+06?{vAXpA|1 znqFNkH6VzV zm8M~^DB+4>j^7RD&e1TN5Ug_Or@%ip=Mh+%UF)(F){YK|Yr5+m>v#o`3mqG__uf^|U6K|DxCxsF$ z=HXA8q^Nc{aBz1kjykG?a*<;KBhdkT&x>E$QxuT_JZcd}!G(ZIQBdY`AGbB#AWwHS zX_hovf0VN7=M!#1r|whI{zZ`I7hE_SWsTNBxKW&fA^GJ43Y%o3)_yNZHCeRAyw}&7 z1V#%o96jYCxH?q&G4D)L-~d@$sZZ#<2TPQm{&Q5_?;mg9s7h!pdY;(54H1wY+q;q< zf`W2=zSKS?A7BBQkwSLny8FqiX=AJm57P0(8_(AH0NX3TgCu^y5k=5vrcqP1Ku1CQ8S^oeO}$r8_bP$PEFXC7{9x*+1K|@889mEx zjKCqXV&8K}AA!qP#VIpp`XZLZ=VT|!5Y4m}X@cNHC%b+a780t#fyWQyWILXfHl(O8M_$5?p~ z$C}{4QkEu3k=phC-nF$pzn{3QIU$ID@I%%h&Fdm3QHvpx0;xuWG}6jl#u4tdmhwUS z!TWoNaO#xg^632tq?-wg7DN13bm=_NHeW~7m^f2yNkJplViiAG(tYc5tD(SkF*2cF_`5>sySHLlS()AeGkF6EHw>80(>rf zmetfN{n%s6=&pCQ%U2N!|_F#+neFq*k`1ufiJm`v9Y zJ2TUUzA{^pg-1W!a|Zceuj+^kk}n+yHDXT#*_1JdN%39>{7Qa}tkoZ3739{ulT6(Q zz#CZPoeoe~33baOAh)DLgQBeTobK0Um~il)u}QCmgt1WSEBY(K-`N5K`iHFA5RZ5! z?t8jh#Z)3fX>a5WAN1(_pVMo(-DLodS{+kmP|{%8I|c%$X87qurn`LROQ9bD>jq@? zd4QZL2fbv>?-e{b2#~6fXQkeYb!2s(`xQFu?hK`0>!|3yG-Q-fWJ@$S%lp(PT?O z0_&mFu0;l<6tO0 z-wPlC50aR&3;T2GPJ6(x4GzAu`@=RJ7JZF9ZLte`yw{^}bbOdKR461T2@CLuXogrF z`2_a?ug6dsyMvHP>4aitC8LJ>1!ANRk{(Kg;OhM-|6r|5<3~^i|F0U(Iw-35`{TO{ zOYG9!EZv<_F5TTL(kv;Rq6oWm*U}&IEkqKsvl z@BPV|mr06b%ZTP&tMUAu-SYiJ)&3z%hK1VLvS7(;<5~tA(pE)9C|p}(;%@u+YL5UP z{h)9cHceIi_vMfweFS-0oMsa}-qD(*A`Mk-#Mu+O`TTe;0x)@p%XK~J_jjYo~NcR9Jxb?%x8^NO)6~;Wsgmc z+_%gr^@Cs5n8>Rxbc3Dl77Jj1I%vri#4rTlhzq3>6g|;}C%!x3g$WDJ>cp$^V5WrZI6**PMw{Q||ejmuCcil!ofY1|DAfp1GQ5<`!_>AB94sPzd}=7zzy0y0w* zRpPsLYn*X&KCGFiIVB%ah#Ogb=cIVEI+_p^5qZ+gA;fL*uGT%AyH2_W=#DChFE-qiUq_ zO77<+vMk2Mizsh$y(>%K)kOMOiSv6^-Hv&*xMku4 zVOVrafpO+=iaJ{G7D|H5>|6qKse^>lk`c;4JKv(Wk^@Yc9V0wqJfYRIb=T2GNQ1oR(K zb~C=#1(D$EqFVqwLt3TDo%u{!9vXt&wC!dEXN7pJUsY|+Vh;Hr$7VF=ZeSLYYSaaa z=Utg1-gwQ%Xfw}_GIz2!GM|ZtCYs)isV~0a8`@-gh6KsQHT#*67eKL`eLg_b_KV~R zFhs0g-?1)wXsb{1>x)R`8GBljU;d)1M|4m7V+HaVt-K}igT>9-m|JDtLG-rw)V{1z zE6Qg{%mAXEaf|~!{Zxq(Nma0_U@)z;mUgmW(Q(v^8Mrh9p7In$Yzz!p=nO1c1fUE{ z&0%As^pa!a(?|5Qiu_alu}K>=$m|-t@`A2Z16B6pIZvI%HA<2m>2z$Us`HWTXB+X2 znH+!@O1MB26>IYiOhFK6ZvJ{&3qVolP~Lv}ki%-RHZzkASwnUC8Vua<2v{ zYcc6lQK~Rtn+7Y8HM z+}$d#yBoHvolr+kH9HA{yXsr5`^Cvfd}mKqG%V45s^fh%1_Bwyi{5zzu|c9{X^#bl zG^OzioH0@9UcXI7wcYSFzq|u1^|Ew=3^Q0dQt?Gpe>DvSZXm_P_rm8YqT9AUMkk!P z7+l7pHmy$fNMbmLOj2cD#GMkN`hN_d7)wq)+5ADDAKjH{(>5p6;I7E4@vC3-9b4o{FP5xNXLURK|1dM*xPooU@e^(7i znJ**&V0lE*+OII!4{&L^gbYn&)s2S4?*on6%y?s72beOg3`@SeA%A{ zJ2dHs^bI%$ew4qDjSJA=di~%4v~M}t|A%A)ea`8+Rauo}LGHCcTOscM%-f~hBO2tE z<-fAhCD78p`~6w?z_4b^yw%ssSSynFa#Wx{^iF;_VWJ#?}HxZck4{>*T&^+HbI`aANgT^l31!M27o;;B_z-&OBoGa0DyCaHmy z^GgMds3_L#uaJr2Yz1A7P0zj0kWsr%++fW(?WYgNf#lTVfwJWsP zOyb>~uTX^@7roXW=YE8-rFgGY%QPines*eHnLaem4B+72C4=yA)+E~{KQwQF5)95P zLjD4-=E$9YadA*8MM*8kR!(QZ#R%tuFKA0DKRU;dwU+#9>B*uBjqbeFZwvfoZ9?gWZ9$5o`Bx#^kPly*I=oK%Lgw%Ba@-Xym&3rZ%F(cLS7 zA2`TR9nR_Qfp1Be4*#rWAjFaxYw9lFe?3s%Se~#n;$y2_-4BFWm&;HNH!SUHTL6wL zT}Q_1Ua}A=8~$i8%X(m`K%upFq{?GQIROU-KONylR1^2c7U)Op%nMh^Tiy8YSwjKU zptH;~^Bp!;6+v^L(}*kUsO^`R^0pv2#CFMZgK}m|OBUsss{*F>CzVW)9$Ruv1@Y{4 zMZ8@AwJIkGa1M`u{2hqTh#Hk4JA58h>%m`M+4hmVxa^xq4>q5<{sl*?qlf8pKmS<( z0c+F}WKHzv4&}uyev+mALU4WheH%hl=@YqhZ22_s_*FbK?;t7|ZZE4&XD9=`ZdVYY^f{;s*MSam3k_hb{uO^!#ju)lFx!Wwea2Q_>cLJJU| z+|Ck9;__Zg>WZKjnx)gcTp3zLWJg4hDMw+M^Hbu?mbi}eKffjTpW6}u1Of2=v2+Oj!E`8SC170QQcnMMra?d;5n!p9 z+S$i2<7|JTuw6l_ex{UhelM6$&BrjQ^(NF^E7)C&K&o9)s{8X{D4%mMpZYCnVLNV2 zX=gI^FxWllU;IpI;d1HZ_n_|iy%2>`qgpi|Yi_^UqIjyl+Im_p3OrnUL$_lTd-xdr#|W3>^Uhd*BqiZQOY6_09)`3(UQtEN zTXbJ6o!9;XsO$ZnRq?iz)z%j+H#3ueX$3z;vT4~e7p}dP6(sw(9m)!RwJqGE@#}8u z>+!ZzFQ=?ya%&N9K-a3D;hm-6Pp5?EN>f%nbQ#(1AN54KWta6cNv(L%&VF_@JTk+J zDU$L|8TJ?5Sj2LV0VJYhHQy@$iIvX4*!YVnx|Ow0 zGLN**uU*A%j=#x_&87jq!`xtt5}7TkGov9z+oJ z+uVT&4Y8xBb>0woKWBDpZixA)-NGf65Ay%^kC+9g#9`>bcwkZ-tM9*3`ByA>lvEs? z5|V1Fre;n@_!QL60f8~7tn7-)8Lrm0{()IAY~%Pu+_7rze>@TpkoYd(c2u3F-IK|z z#d8$Ttxcy#lDw%6EkOrMa5@jS6-rrysfu`{Bsl{HP!T01 zki3a3vbUrL-ypj9U5LY6@i--9i>323y>Nb=M4fmCWg9I>phWy?e(E*sz zrxqFw0daF-+kx-f;IrA>14n6Pll#fKAB^5dXfC`6@()(Kr4_KfkRPsm#-7mha@V7g z?+Kc3+2@p!h&9PZk0q{->W@0#V2yeE$_sz)N77fIVELCi%H-3QRbdBM&cj5R5s#DW z&b+q}`SSFF_Cc3#j?HPa%OnVR8L3Ea9on2-aL|41JhgMIc_&hico5(0WBqSu~; z>eMLe^vEr*FU8nk)hBNf&CoF>$L&zaMgG1zKu6?f>gtX^Fn+5qy%nJ?S`{{q+<2@*rr_SA!EKLk7^T}%iuKMyCJ9{${Cp!6LH! z0@)`Rf~525G|s*=G6i(xkNFKQMjD+}Y(}P2hdBy>KAqto{LFZWEM?C#7-;6d^B7h- zxxe$?@hRB~2MeRq;j=Nd>*<4aCe4Y>b4@-CQ%-)b4|)PW!tdO#J=n=a#+EU+^7)$R zpnc(!v7<^cmE6f4mId%r4Y>_#UenbxXaFDi0OVe9AX=44^yAE)x}W%`28xsPL)Xg( zThQ{Bxpfi_79Z)o&Nld8z|eY^uIzN-6@f>De-v@eatY4_rl^rRZ1%`AVAgfrXn<^< zVz*Qd~PO1J;-*DIepl|<*?|;-c0uDk> z8VL$on561|LWMhCB?6q=KmXx?xcb;|e)}+a+N!dHw!DcB+a#nny@;u3GW5VyE+%Qx z_2l91xDacT=7v3K7q1^)^Zy@-n78U^@la2gizxX{&g#aphRZ7?M_Sp8%7%t+ zE-~E}cjV~57l|2IIi53$s*Qn5ICeVUTDJ@h%)7xq1LamZ1M>KARF*R&^|qK^z9 zOdKB$QWCVEf32MJ-oUElkyIZ;cRx%=XONi4?_yBcRXx*cSK6t(Pgef&arStLB; z`QmaxdE_xgXJmkW_7S?SZnZF`F99G-3;OSJZH&$18g-N|xP^o)+mTQi*_QgI8%zxFxE9+p5b%+9(IeGH_5 ze*rcZ#h2#qIB(XCPL?P|`tBGdz1!zrx0s=EtjT8(q9{m>-)H;Lp>~~lTTm6l99`K1wmkA?|$vq=N9n2Mx0zieHPC|x6xQ`)tUk=s>#nI`Z^)!x`F*| zJg*8zBe=Q@i9CPWYXOG3LAm%jrrj@aWu<}>wSSB)9C`LX#@2s3Kq@@S|3nR+8YgQ< z6i$Jtib`i3S|PiC228^x4sYFnNwp2k{!h~I{z=-6Q&e>f2%m`ld$Yt){J4NRqS)H6 z2g||g$!i;5t+2SeUu*qMTg~F3R1v0TGLDXx=dCG3bCH;0t83BLd4KK)&w3REF=>7FP9C#qKdu1Q$rqA~`FDN6e)ck(H+7Uf}k#mg{B$963bGRY)L# zRD#nOVrJenSCVW74pX(@4AOEOZrh;YbcxSQEtHH~YFWeq;bfada|nM* zm?idL;d;*o6_ZR!lDN(BV{blb4dn8UIr9qWMfRjrWS}dW6*2ABq`S9zIWT?wLbu&W zM2^7A5Xr2$aQ?JCi(B|2rZxo{#gsQ@;nR*zXt?f)Q{u z49!UT5))gu_)3Z^rU9w1T%(w*$q9z*7tzJ>LCvZ^2YMv-iv3A^ev-yD+gBp6oJa*L z;pG|qgv3^uXoLmrR6@URLJ8>N7$si+f-v0O)%5>kal{h!;H@0Ev1XlwLH@3>2vE8` zO!F^bugq19z8D0~Hdc}(cr%Z%Z&>AvRWt8Mo?g{bHdAR~~ImHX>s&|0cxyh;Z&;az!TMYr1vWjsHt@jjy ze*qcqy)>jBJ7@yp!p`<_>^-zVA&0jq z2VIEOBSe^;)W+b>nlC5p;o_w^GJE*WRjf3utw!mCT5eb4<|V}=4LZ7IBQ|WQax6zx z%6Q8V0y*?0%(_m*S4oz6AM;p^Z<8rK3}19*acwN}ZC^lQcBf2|nSZBX;{B-)w1yAD zaKEk-s&Owo`)U5PC3JlGfx^rCus(~8Z|mJ`wNi+KVUKESd@kEOz5}?KSeyuxrevq@ zk8L>ik(*H?CtilwSHYaa6dln+p~K!4QOlfL;MkKB_M05jNP#nYdYqqO`QA0YZSFo6 d({piEC&^N=6l*AWR9<_?vO$k|H}iM-{{VjD2TuS1 literal 0 HcmV?d00001 diff --git a/tests/func/model/data/ships.jpg b/tests/func/model/data/ships.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dc192def10dbe649804da0d8e7fe92da1f57c9a6 GIT binary patch literal 32890 zcmbrlWl$VZ)AzlLF21>95xUh8~C>$K>lx!h`@ga_^%=V+Z7TB4hTT`y8;0H?}H(wsxpVx1nJavhXcl! zYsKcP(J_{dpY)ZTJ`dSXEB1f-(plX40+aXZomQI<67&pC!u-#~+Fri5y%Yz$T(ld@ z21ts79kU==%{zO>T}g&DuLr-{ZstX8E^_}|7*|a$9P!G}H&IvNer0xO_xrONA>wur zS#ccA|1Q_(VEys&*aptVS&uCs=7$dYT}0(Iw+s3A2b7XY`^vKN!W8-#sCv;eiN-gJ zDiz}A0E<}Z$@+aA%JB*IvQB=NlZbcs#ao%J+UDQND9aBbvbUyiF-R-G4uoe8*r1Xu z{xiAkzBb=K4@MWWvtc|)zLSpkl!qx|^JCDtZ&eqEE%EngWWy05$qp{0^YWsAxg@Vr z{_Kt@Y7>a3AmLj^?y9Wn_9puz>yMWWd)RwF9UHN=zW`+>XF;GgypF}E!-=HIsbClh zLcZ}gjkSJWeHoQP;Kw><>z)acfDyY3If+RfCgR>{`(dku8Dd7=?Xiy^S|{qNe$|;T zFrzo6Iu#qH)dLtsn0~T+)ohOAs=pDOUv{vbyWs0NNQs$`)TzB`6(oYrG#fLT2s1|B zlsMK~1{#Sb06*wes%l@q6dX5aG7-X%<;Cr8jlX5e>3nvUrpeo#3yFbCcUTn8(oIqW za6IkxCmqkOZQ3)jVJXptt=q5t5-(psSm+BPQ7WP=fHP=*VRLAt6z3za%o7)(r^r+Z zVkM{?#ava|@IZWii8fJ5#KhjYCSnZfjy7udd~IfE-D1!7>pyVv;#I&6JDpptPR)ZW zj|8;Te-TL05w(4!;v!z^ha6d-X`h{HSz!|HmwDkVBGo+y1WQnPTHYlS z%?Q6A71Ctd<@054?j1g!Eo3#uv?!b4GGjs+!Qr|1QyrXk3WH*D0%_E-tK+ zVNQaZ=Th@fRso&tG>5-&x};Kj*MR?eHAYP=_v zD!wneH(FpjbfTA?&^B3BFKtgsV$d|VFh-m7blX2y6X=mT2Z!1C$uv%UACx)37?j`g zV1A_k;jw5MM3E{auaRG{Lj|Aaw1uKx2@;TSHT(IP+}{H1AD|fKSmcd?^WN2H^8z&D zdd{5>7WKP)_!s14KmZ*4|0l@d5#SJ!fXD#Q|HL>P_+Rkrm54<3)fjDWP z>Z(^-oagW!J11+&BMH?L%obVWd`rbQjjq2iynXS#;3HdPf|i@XD-BvcW(Uuwx#-(o z`+wS6z{5Cdy+}ViY&d=PM`*&guIWLR);Z-}!J7OF_+O|0C$s%GuK@t?K)8RY?Y~aL z|HtWXl7kkYU^OI~pFQ?R89}l%qSHY9TpC_qZ3lmf4>fY502H0~%Wu4)@mZ#Q@Imj0 z?5~!4Fn$U6$(@!*8QdbXPHq8c?6!svehNAK$ga;P*bkf%`JScPw`2bqh4LnX>IE$1 zXv6i)vhegAACtTP<#~(gDVO8>afGoq>e2mrocb!`o9lj-N_G-|2G<8W7I8`qj9)oB zL{6z77F=AM(P#Z#uGnNPFhHi7F)_=1jvivE%!eyp7=Gfs}UZ zJlv3>=8nq1f4=jB0soF0;C~JqL=gOcayt+X9sz)jgNRFwN5ciCrQ()E!Y6=eaPsg< zNU58-(MfB%|KD^E1i?RF2;7OEcDzjnp31*q?7xM)*15#4y9i`W?7zWqB*=x!20xmZ zWGa2g{0p$z_zTd!bCmvbu(|H?8vYl6&;I3RXnago6EwAPG{_{jEZDiO8Si{b-a18N z8<3|e{M0aQKk~$;o7_1;dV%~zoApXAmRZN&(4KS|d+5Td6)yX$TGl()y^qy*7uLWW zpQkK8@e6vh^A^x$46;g`FaTJ`k|Dp?Qu&nV&$pB3(TlI~-+mG$O$zY;^C^e9oz9X- z!@q4u{G@6oS%Dtq1ofRbi>MebrEi(yF&h=v&)LYqr3DB2AWKyao6<%te$b~xz15>) zGNXjjx6m!UoiTyOXa5gsggd(n-t=43x2PRv2OvEN2?LVMn68~Ze!+UeR1>BYrfX(7X#$Sa%m1W3TL1^|eCz z*=JDZ++go)p!AxyL6r%S7^@)veyKFHJ0wJ@=! z`WeV1eFiROe{+f739z?_>q$h4a}{bfw3KDH%qa;_8&LB33%ChkT=%_5%kF zKs2AG^<^{u2-;~YUWE64j)(kMTlKb#Uu11C9hFl<2yqF*8rAFg=@?7P_$^21W{83C z7|T&LlTuwA*RXlqi}IJt2P}cSo*k+-R#|tVIgZLoh>b{Q0PNsT8Je7p_7~gL*tF_Y zk(qggG8}`7S=&Bq1`ay!lMPE}uY=g4OW4G42kX2u)P!3Bx9&GgLDko0$OJTz*3RqE zTR-w>@lg-aqQfMRfE*He^bjJ{FgZYNKrgF5&w?4~!1#3vayW6!()iKbnj7;>&GQ9! zfb8ns3RkD*(CTNf2u5<~L4(rm)UWCs-Q*sj#t${UVx5)BaW#^E0nfJ6j}@n5@X7T` zp`VoX&}(Iuxd(3)W~%~^@%wkq=fjm!PQt61Hm8dOiC>S;6kb*SxK}^rYr}DBP~L2Q^?C@3uJ)g68@WYr}))~jFxBk~PzB=PCF1m#rGY-PST zSqpBM&>i51A4J!)8l619hkQjK46t2F;+=6FvD{9QNUona9bwj2fY?>foD~z$G#Phj zERFtF?RUHXO3czGZCrgPJLO%x^BB=X6v%7}0n1;AT^Fv#>Q7r5atRDqll|n~;*5X& zZPh{D`=z}W>sy&ehnf{zI`YtEw;;O=WLOR$6= zWu?-N;LRezm(dgRxwSckq$3+?Ni+e}UieiXww)I~?X#p#7KtgrVy%I9P+irgs`T6xXg zDaBEe9HoYl&K#Qi2c(E5Hs$_%>O?eSx^yvh&WQuoU2b7+Dk`LMeH_|&lMO5tD?vu3 z4&w>b-f!n$+3i+*Xl!>G{SGcqTg7eYCbug1^7?GBPLC$HHjWsiw)riOY6TqBW*y=z zW~v1nb zH{=A~PikiXOOAE){YO1+_r=!NG)h03Ls2FL>vGo?>~1|O=Cfv$&t@uAc+E;gNq8cP z2J+fiHjmugoyFTWSJ;U(h0&u%?%M!aTdmk(|4MvzRMUg!E5DoI{z`>kyYgAQ+0ps9 z3v`N7%}7t~DS7QE+Qc`N3o#D?$#cy12W7Cq1Cq*sC`n$<+q*xIe8Q-1LTRL0kr3?C zSsbg>+CIqJDZh+8+H)xLfK8XK{#PD^y8?ynNke4v%`T%?-$Q}$Gs|tBc?knjny+`mc@F$M6QpTE}TVtmFTR`H!Lgq!u`gv1#gB1o$d#T zn0>LCfdDBGXuDx8M#DWp*{(^J;~>B&O!fZund~8_ZDeNj$DL7p@iha)n>7ykw)s*w zrx{S;rh0SmUf*UhOc5=OJjQ}RsR|{tYA~6@QPf7Q13gDjVf#jx-D7T+@{3{9J^uou z54DRP(ZZ1n%UYPxgQ!1;AhKpndFRX>4*!`wlE2)8rMw_|wdp0N?&AL9&nR}i<(vrt z@Ffb1kGD(!X$!6ms5c~5D;+k!8u@FcK#(kYz`ZG>Km|(&WOOcf&BKd37>yS*Wp1c# z&AfPev5FBP&9m7wcEi76gy_T99Bm~!uH@Be)iw>8a~mhxZH6FRM|VFW`S5M32y_U! zL#EFb%j9#D$q!0v#NM}``0#2(wz1yHIHA`SOq(V26fS?Jx&*Pwe8RJ{?zETIqcmMH zGk1Zq$~UKwmP>c~B&Dp359f-iWb-WV1RGjnz<;`puP!r=Cv4;1LEB8qd%}v%-r&Ue z_Deofs7Ybo?Rn3616x*LJhBk8)4*30Adn=z{L`v`(ZaM_do87K>!=ZH;(1D1pWCx7 z6(<90m6*sPxyCTHF^WLbnakD-&x{C~{5i{ilB9c7AT{M&VW`FYhCs(7y`eS<;@6C00m^NL`X_u zy$2pAuc?J+@66TzEtgTm;r{|ARp0>Ret!Yy2+0|gWU@iKF%={NtD~ZY#p7r z#$aAODJ*cFg77B{g~=q9sNqB3=(|YrJ$DNqzMtPy7_TX55tXb-_H!^p$?3F5>1rCX z-rzq3mk$LnxkP)|e7St449-8?W)}&dh%xQsg3sMK38|1+}3uLgP-=MUV)`7s4n=#S>g!vsCEdb;&m{Nzw;rN(mmVca*xUi!v{ za$3dPNeP$n-WwZvL?ArEkd2Nw4RdbvOcx#k|KXeYFSURRLQ9`e47NhYt@g!SMl(^u zmZiY@fZQ)1*d2U@*wIRc*H?nGA@QOdnBS!`_$V?Rm}E(FMf)hz4~19 zTl9%C*pXmg6f;ibC-Egp;aX?J$P61-y7s;GSN#?33?gSX!i41BD?)q^vg={?QD?8= zT`jU+UK-SZKjM+w*~^gKMgt$FFU`4tMMJ-oYZb6WbB?dQE#LUKKtf} zq&#py*-1{;s7&Ehbh;AY?}yJNV6;;~9AxulZ}8)CV|T) z8wzB5usdar^MyP&+VeKl`=#5B(^~9tW+wasa|yF)0S9u=vp*|wQOHZ8y;QqeYP-nf z`r)Gss|fOCOxwr34Ejq|*|u(q;;3SdHRj-Rsiq?f<8C~@5{?Ln`lcG1F!p|f`6eW6 z5&tyAtjrcI^ZAPDB!Ze-tu--(f*PYw?lfz=%Z(Eo*6wYaArFB%3`Kj|MwN9o<{^ToEYm1OR*e5%ywtD7Zk(-Ln7GaZ2CLBK&37SG zzKH|#(Xeh*7RoB!qa!J}7>L!6Ke>#EShv7NA6zse%=3hmUKYkN1jEdIrMy{+u2iUm z&T@RJ9;t2g%Vd26AMMK)C_nDWPs}^y#}T->eLwCfu&CZ!=t*GS`|QkEk$&Qf-HZn~Rpbn41Y)Xj<@YQv<83Ng1;S(3 zITo=U^0L&*;9?ZAKyBXP{hHsX;CG*bl(V_XppE+W-ty6kq=kTE=1-eO%I_+{sw z{gr{=i`f3bcnF!QIM%+X7r*zfhyYcm82Rt1%#b2S z_!1lxe*9wg^hm$$<5kA7q}=^l9B>Z9M3^rKG&C7v|4IKoB)KBo!fRV{kC`tqAaXb+ z#EQDsi)IbV2E>j=QN;*G*nYY%P)aNcs^ZBIftH#2+#43#Ea-4gYfk>AGXJBFe`K}4 z!T-uW@y&^s3_aRVa&hh3b~ADcaa}T&=qhU$d((wKcI0qyNVehhchz}h{gKfk{x4Vt z!G8gBYrXSMMBbm*(zAfAMZd#B3fQ*~4=O#)!aIEb6n&+dFHtFZRiT9%$)6AVjH(>2 zS(H7hO_fF8PS*6K8+w3#FUt;ToNVW?SD2H!ugQy?6A+?$w+z&|o=RxnIGb5)pLLaB zOceWci|>m2`J=@nANA;T`~L;tO$s1$%l`#L=j23Vlzp|0vK!oFLkX%X9kTec)Y4xH zK2U}WVDU{7#Gcu1$Ldg<9ck1t-CX$&-%3f_>TB|lc5V?~&8a)S^7itH`BMyjvl-5~ zW3>{^t1ox+C!a`w87{^}a0aK=p%7#>eI7unJizeWonpNzxL;YfwcB*wG-%+y8NLr4 zcW!6E@*1L3uRZC$7Qzp)D)&HsR6DyR3j)fW`f8uNE_`}yI27PL3 z)PAjOl1{6wY+|*V8lQw2TIp`b203~dO0(Sfqdi0_Hz@bH9SlG(cq0dR7RcDM-@P;S zr!`X1$^QJR^F9y15!$fy@#b^msSX(jl^Xt!;gQnceo`JE6q`8bsp~o|jmWY-Gus4; z{lUe$sN)QHA(&WYvCfsqL#(wii;G{BZP36S2&Hal`9e9bQ@mb4sbi23=NRB%Rp30J zE}2;tx$jNE`~^zj5ZbPIU(XyP&M1i>5+?OovRUTXH>n(G@h+f^In8C_jpbLys|jDxvWLA^Um6Mv zM+6ryT%boJ`r-e>D;#$^YwV8t>q1_Yoi6)lH#!wd{?mC+%thC|m25Rp6VS7-G(YmQl>!+wFdD)dW^Y9INj>MHj`O zJkw*ADCT!~8uBUI7Se9#Ab>R<8PZ)yH+5KnN&h}S;SdvZnS>|5}1+__yNA?dCOg|9pbqHF9k%&Ta_G=5WF_%^m;pMViUVI=(|s$N@dUy!c$D zg81{8^XZ?kCO@SW#DfJxyA+=Lut58+hSEUUW;I}x6T9aczf zhrk!Mq{m9qp#4)pW6i$dt!K8RqG$cusN6_1?UCO7_z;>u(J6t98fN6+%mWay8KR?Q zpxR;i+EtM${u+Ol7Rj55Q_SgJuUvAYYOGKh zGOj>&TOr*qLhf&Hb|LFo@g*klx~N}B?XwiRr|i}Z84A3RRiL+q zDV9CVl?OK5s*;;HX|)Q3v*oGT+#2%K?`<}luZb*Bj#tdnzxBV)Gxz-w$G6^2RX7&; zGK{sIa-+Rf-A5p^3Sr<`E9cTgck__!EC1lr!@H5ZpA`j-8sb0KAt5_cI0MPiss?iJ zwmo628Lnl}Eu>NWe4SS$qD9Cxxds-$Oeotp=*k~|J->1Z9t^DJT)I;E-79?Fl~gjd zT|M>0$P%>K`Ue+|{Nht|8P1{LCF57!4MG&35a-It*Oq z?2Cu)B>V-W%9FVbkuh#@<`u<@=Szr7{{{RWx^){u_yo!cUraf=n1*URGcMJ&FrkFp{^{-&$YxU}U6O#}sXH&$f{*b>y0U#m2+P_WQfeun=L z$_4(@P2E!sCy(u8aReItLnjve<~QI6-NprMWwxIaciG~v$p;g)$xMkm2NZn2+~OKS zpxo45wvntilfWDXV39}-+Ng+?Z-*G%SM1UpwxWB0+z~xdb@5gA6~jQiFQ6P3`i2F^ zV2;4GA>bMRZhIZ5Cxw$7?~0_7)P&nLm_pu4y9E{GZES{Eu7IzqxPZ|!so+<>E4I4m zZS2H(@{`uBsc7j|L=K4>xzGV&R6=uR+#KXeWneu1Ch!IpdhIslr2J+bmLkysg%ej$ zKY(CUF!Z{)Em`dUqn#ka;-rW1y8CY6kmLix$v;-smWO@Aox-b%+|CQS;Wd%J1HlO1 zYj0WSPf+H=MCZ%Y8i~5lw_)y39?c!3aTFWW#8fmDn6y@x8IjdVH?}2-Pgo?fVh{}d z3XOi#7t+fA9P6E|N> zJJFeTgmq4CQY|KBK52(76)uQN`*Vs?@??t~9B@H{tItW3#=D6bLy*n+?D-KZ<@+?) z4IzY@sV-W6_fP`4jWPd^N@$v4jU00%uU?b&>f-M!VShdU0qGw?DJJd7>3RCriS9Z*vY z3qU9>y|jnLu&?q6?dS!dKE+tYd+^5eMWe-dI08A;O>b=UY7Gy+ZJA&FZsa*jLZRM=cu>DzBT8yh2DNjJ_07;+iFo-{vixT95dSR*V6sH)lSZ8&t=+ zLZfpgspbUvF$vxX5taN15C#%RXE7Bf}kzZP>-s){8l*MTq35xMbJMIgE2PlDt zsL2Pu0DCrZe_ReLz<){dPR=biMWpgMnN#lhc!8^c)HVi#?}LW>;yYz*Vc(!*ns6GR z$6n@#<%6p_v5KX)&aE|84@=)Z$ISrD%%yr0vM{fZf=7G~X~UKv@pFJd10H@ISwnJS zP=`NGB(Sz$Oo+@Kda{pyN$d@e)}nt4U)Fk^q%KDxWZQmifvSMms++ZTYT*H-CFzn@ zP%8}FrXSM>P+W`?v*qgxN0jZ=sshxQsQX;&5^{hyO|rorM&#N|1~wd}{TB0_=i-}u ztleOX;yb|J3Og8c`!Aq1Xe~GHdosp>5nMOz1bI0(U0*&$Z6XOGA#2LvUjP`S{LcDE zzlWnk{y`+D8Xs^dty^AB+-*zR_}uM-z+(Mnu&dBM5deA6>y*)(ezXvZ^hxAWW5dD- zC`E|gd^~*9T>kds(oVv$T;f%sUU^2)7v$n31|^48$dQCNkh(@v zpV;zNahf6evSA@;is%mmvB$!Y(u93=@(mOjTMIx$WE*(H7CRh`k@A!%G=iV#-nm?= zgQ*v*U~_bzKhU5aku_)f=mE?bY}+a-o)lbABFzk7YN1V8azn|?{4O>!8L|K_M?!(X zzXYS03d0aNl32I#Bz0j}(h(C^{F@)&ea5LIL+x=_LFXNzq)sJ#KaP@dLTX9Wq4%LK z_>?$v>m#b@fq{Cx1-QY_l80v;y2jUCjT(fv&34Yu%#?E(!jWR!Z}9R_S^IX?hLYu z`)$ZJG9c{9f=TZDS2z-)^z`@dz2+gN-=loMFp~4X0LJ2V_*C>1-$2{JI#{BF6Nrt3 zN^y^S&pS(%f%AtTazQYUBML6ZFRN$~1ClTtB1I`W2}CMP65mTMskcy!>v%V89hFp} zTzk$4cc3E-^%mZ4PD+8=%($J?=G97-i48FDB2xjDu7!5&G%5qXO|()yOI~Nl_2@ep z{RNn_kp`1a+w&@c9j zU)J;_wt~qz?AvNhit$elSO9GzYR*yqAaY_XcvF0PooCR$>_-v+n;Q@^OfwWUF%h!7 z5t;751Io!?#b?{-4bZNBI zlIK$#ycVX;6;e06obv}!(4pA0dsgK!SX^yL1iuWJp&xJRgEwM378Vj%bPG9x3vUHrF07%I@!bdM;5{K?U3Abz8%JzGqn76Y4tBe#XxvW zIvEaF)d8BtDK}0iPB+Z1?+T&HXnfwUGDtLF@X%2i`6`Ac-ZV|J%;(E{5RPT-$zEoE zQ%_IWwKVybARSuEl^g&){?|6rWEc}F=PWMBa^>*dX@TOU?8c=;8iEO7VmzH4HnJ$J z$A&n}xL^*0>hsux!m%3UYsmn)=(Ay6Uv{ivizc zj{ufS!N9O!HdRL$$PK}>5jhcU2e$|8Mm_VRmS)s}QPX*}6A}O-D;}wUcg)L_Bt$(+ z%BM!XE!)RLkEBm_j`yuyS!2A5kYI{n>BMkQr}U9|2Nm$wmZ;6A6>SQGeIpN& zIcln!%%c@JTy0j)Wq1D~w!PHT=xQkFN3!lvBlT0ybu!v?P3100X+jk1KD7dGlvQf;y7pX)AI*mtc-D=5Ko>YS#&RWa26BIJobGLUSxwx0a zdo4|C>0)mpIzWn&{5SV9R9-L;IpTF&Jml)0s;8T?;VA@s316SSX&P=+kTl}Y;E}{i z+4%`X3i1huVu)kMonJg`4}BX9)R*`L1AE|4mDD6bLA(?-8wzYXZM`5S0Ct8cI;dz7 zj^G_wR7keHD@$teuM1PJdM zL+}`v1a8E*3$06PNID~963ugnL^50MqQaen3S}g4F`-ew34qq7)LelX=mmfo5Pojt zJEeMD$?>#-xB3Qi%_z?ui$MegzipAkyzxGqxD~KpJBdAfj5SgIG5c%bcdAAMLM*_u| z$D%W^7^I3|`p{=L8|f1>VJQ1|13=n|{4jwyd(0xbt|>`&;xFItDyEh0J=i;}QP?0h z0ps2510y7uN_t}&7Rqm5C zq5h7ML!m(Th;3Nghv{KNE+(OQgBAht_tMs?;3+6iucJ(~po+DCR zi+ru&AzJ|i@gIYC(}X zU%U#psLT)Qayt16BIHb@eo1wy&hq$Fv1tNxgI-~LWfq`1p0uIizv06%?1;Wl>=j)F z)&{&Op{1=E{K#be^c#2%xV3Ty?;62uQ$PTMBYu}=ptmCTLs-=QI+r!A74ACB4!#&4 zRgTuGvuEA%a&jzU;uj#{P73~$D;Jk>s+G2N?>~W~zex(2DM;Rp^hFk| zsp{d;x~{sF$ToHxniW+2m6jdES{|+r!49Y)JVdpGE9Mb07s{2-?)f&bfuWi&La>Qp z#a++?3jNH~N0UyK5m)SO^hv4{_Qohp_~+8BjT+*Eop0{BQ4m?pI# z-AU@wFoSs92M?~566G$D^TVPMWs6n&2UZxs|K#fbwSP0c|Cw$DVp9YD2dseqLstF+ zR<5Z2A6B8x=5z=PpjRZbOt?!JS}QlLK!`@=@rYV6*Brz zwMvvtOo?vskMc`>hwk=-z(WP~#c@+&uJ~?7@}kpBYG!oAJi`XXQR6Owiv?(SmU zo>JWGFMF;}utEr~J*vbQ7$}1Skd%8y8253xuVpl2?;mANwH5ePiy!|f9!b`7xU@)eE@UZF#x~!F_o*^ z`Br;p9LtMJ0c1I{p`KM5-f|!&sAC%wc%!J-W3rF#Je0#@a&1vNxcS1sSwl&W21y|Q zN=EjHHU?0w2Euex@(RYy@aOy;?pq6P3cTGS79P3DQ~X7Ff_wP^mv-Cd(<5asdf9l$ zAbZSne~~12=_!s#U$BBN&Sh_uIYSKh8V@#$#CHk9VAqjMp#iur^$JJB;VBi?e*2Yp z%kZHgmqcT7j%%1@W3gysrB_f$ertwTg8zg;=T}+B7oI?Y5+bGVF$$|m>6TsSn%G3b z+`6{~Wc4P-&J{Z>97XaPCkPa_;gJi9TyzvR3!e_X_rPo*suL=u0E#H43Xm6d0+%gb zQ~t_{MYfj=av*I3)l8gj6vXQ!R5)GOP@}* zSzuIDR-gyyj>0HvYp$`g!EtdekSGGk^_K8oz~=)bB(Fb$Vkx)`qp|a2Wp=xL1tE1A zNw$YKE_$KuKrC)f8)YPopW{(6Iuq2TN}+AHB3EIdGR8k76fEQsOvj-IW&-Wqs!-i^YDuT5&rbK#~<6TQ`s$;3O>SjUGj38s)KdtbXMPX9!L~9?Q;hZ@w{PIud6?r;)?$`8TXc-dY@i!18HyPI2@v}f!FuqHwkO-m6%;mD{!93rgl1Q|WKj^|Dq^l!3JpJlTBa7JTUz?kv5F^4uC=g3*5gHT6WrAaI z8-dU7re;es9+@>OJdLn=IhcN~#VdThmju7fqc&~4=N7&{`%dU$@Un0X2(U9Nn2H|Cq0?RtDxjqRs-bXyv z+6;&smPTP^{eQy;C~=4n6Xm!lT+% zu)=T$)4Z3U=;NI4u%hwEqr1I(!0jOJoFlBO)d|fWq+k@6E3&s^lxza7exSHQ509{A z0^S<-HOCfeVp=-B%NDN2s?jx6Lo?cf6N=Hsjn?23alwt-h9N~o-HvQmFgbSDr{#L% zeuBHq$0+FMb!UkroXOrQMvaL5xUxGFdw#b$V(Y?1q?MgYO3&7+fsrJZo0LONn+%U6 zC~3vP_n;upsL_$Rd0(4>EtV6&Kwcxs^S;RGP`#&|fwS<90EG*In2ei3|EMa7Qq%4Q&S|2^QV+S1A@CIFy~^f;ERzxoV~~5Ov$Ug*&!w;$tb@ z1veEN@_gjo2;qGLU8D$XW>NwfV}pb7QR(Y(a44G7H8w4YgIq|zlZwJ`7RN?qL`_{_ z_8dgDx_VBoU8Nrpc$2xcE<)L%2NYxLoz}IzkVG3FJ{41!qgh|$!biFhggw3wIy`H; zQ;Du9cMU{kKiO}Cw&kRII6rP|VZX0Jl%>N2(yJB& z=b|shlA6Hh83v#nWE#$K#2L z$vl>b+WTEfRVKdSaMMG?D2%-4sWobsu4!LQebiwviH|p5n#H2r zvCoNVcrD}`OJy!~2&ZpCFVHRM>qf(`0FiDC;e)=F%!1Xz=&-}{2mui|UOajc6JK+IGce!r)sm)g1@Iiw zm#l3;g{M`ga+@vUg+l;9?3a&y?BT-2gJNY(m^Zq*HNv4iCF;s82PrN|A!H@G3f2M) z%1;x1BqeaXt`)?v;`&-ZR}QCmQ;{L@`T|ux|JvcN$`=qtQ_i8p*(-XDG0xs7V+8#$ zeLL$fnWzGERf<)xNo;(kyAx+jA~I$StytYOG8g@yC=hoUS-JzmU%HV1q7rH82VzD( zs^{W{ZcCPlp1rO?#%EI(GH%AKUOf%NkHI}@ByyL=!f@nRce_rX3oLAKdGquu+!Ox- zI{MGCL^2ER(Nh(^3Q|W$2)3j(i=wF~Ba0s7OYt(+Bj$X{@vP17cUz{3JmgYTJ`*%F zPpVaB!Azg{a@8o4fsMqbW*?^7_e;{uOkB#F-u6oo`2s*Ye<8{*V^F>BC|*PUmHXpV z@R|%lWatPIyjP_$y16Mf7r@;Ybt=rSSOYXeW@#Y_am*3R?71U~QB9j{_XsoZLE~iR zN8w{0iRzn$hrQo3;GZ}$=fbx0jxGAVi$GK&Xn7YbHz z?kuRA7qD(=4@Hi0ast+Ej?gK{K36n)G@|gSu$u77aMCkM%6-d7r}m}4wN_N)81y2n~+RjArUOE*GM-l(R7 z63as(#j;#9V|3G0MC#vXMyt~moM(Fz;Fr%tF-IlFCITD-=m@-U8_kBZ~?;nT3d<)FP^-PAtX6RL3DeB>{rl6SwxcUf!P~M$n3CjhyH6d1X zzjr~N`UWdYMDIWRa3eQItVFxWW}FDvX%-av1xTf^MV6txZM9LGXY4Ssa#T(MM|4hpI$t z)6Aw3$y7JC12U)x4`qI7rDi98d-dOyQ=kJRQZ!zikb?8yUE4ra)>e%z80Ay4ckhzA zvhmbC3@nXGbkb;2S#Qz)NpYMbP|tKgjhb{dWlZ4)!AeKr3dN4Vkygi_jkUwe zgrJO~57}dp?Fce__=~a1c|klJ;MBZk*O_FSOjRmNa$*le^6njySjSyfqc9e)8U??0 zF9R7fAsScv4Tr-btioK<-oUEX68c@N#1mF2{HJ~t6tQa>@58!4I26O~n*5R!b@~oX zPRmblL(oV~3+wCPX{t)4>4>OmyX#@bvpm588d^3b0D%%M(_W}8FVfFjGb?gg3}qGr zn(*|$0Jv4;=^U|Xp3)vpDYaYc3Ej{}y5bFl z!kMVzF11omn^Pb%L20fyFPBK&d$7B_LmTW()3dYr?G;M&Ymyl-P>ljma3A|Ul6dyJ ze~nE)BYr_;r?%&2#Dwi9zxxImKJPV@rYhFeEfzXPU`|@Y*_ycAea<6V_;A@ifXitc zaTjO431i);-cDsmD-I;aqq%n*dbRjmkWXtBEuDx*hKX_w7-G(T_XSh{hy-l-@Hp2{ z&eAjWrYe*`>7vL~qYVvIDWnM;uwm__Syhp8$fhtkDGgBSRiemy-;zYmy}P&siM<{- z91{lSeF+i!0{F26eowM&?kT_j)6{u*!xgpRerAj@`V6DD(OWQjXY}4XA$spYLPAC# zHEMz=qjy2{kmy}>Q9_VJj}Rh)$n|~Sy=&dS;H-1jKJUBtetysMN+oh?5-ln#lyVNl zvp0f{w$$2+I^}Lfg83!(u7^JA?)pHI9evn9sf6TP$q5{W2sDwJ+J+A$kV<_IQ3KtO zpcMF-t3!C{y6D_BF-7$Eas*~1uH;D`CQeK7j&Cjt4pTkk(&3stwGZ;kCo0tgawvfL zOCRUjz>a5h=@uOM^(LN9`lWgXqb1F2>GRcJh`7ij;%MC=s#U*g#!gu^(h0e*M%@`6 zKgegEw@r-E{4EiXKY!k%Z3xI{pbnf<)^Taou<|hC=!ZYIh zoklB6~i&D3JnoTR>kYs=Wt~}MJuzj?bEyjD7ZM%pt(M=@|?Q` z*mXHYoKQk16ssj;zJjoTDz;bbAmA()PU+aLl7?jZUd6P z%5_^hB17t(2vymO2e}hXw-YR>gzTX?fu?$J%mc+=PHMDp32GuSZ~qcFgO0swyk_&{ zClpY@Vf;ghhb#-GAgB8VXIYtankC4=z^~?2hi{t7XBuUdigLjJ#y&9)ewdy+W#w*x8$#=o zVsWRnT}!+~1+rV+g`kEQ1HrX(Fu&;}147L-sxN9>%q%&o`)LP*EKB z0WUtO8!Zzk=KDV4nu1kfN8QUmt8s|y1s71=>UJxJQjDHKihDn`D&67*Xnj)ktC{BD zDJoEzwi~0x`;NeyHDg+R85L^@Ie|xk^pzlb&wphF1IyjFzY62hCD3FZ`pNi(gMEf| zRpq9?qb6}fK)H;+_NGpupX6UBj?E!g+0D3EGZ;=|8KWOR$51l}Aq|ZvSa%8dz1awf z27;Chd~33*MWCDu8$7TeEyK4wb~a;uu6;12X1$uki3=bONkQSgK@%roMi3YjdF@)C zdze1RpphRJnZftY5bg@`Z_#_Vt4s0ugS6HP2eVqgG}I#i$lLR*TYLedys2{IWVkEi zjvk?VO-j=r(~HH4c%;-_^p9Ck<|@(V_{;(B{Kwy9W;o;ggd1PUV~sWuBc(Gk!;q5g&fEz5g)w!h zdy+kx>|IBdn$|Hkt3IZ`H1j&t{W3ESd0^O`$NHQUL33~{XRrAM;)TJ!>l}<+F5t=M z0Ph0SFk>>9JnP?4VOdJ^s$*9HDf{IjyKc3IHu+G66DsJxVh*EId_Sy?N*XL>2HA8C-flzqmHUWoHx($`r3E~w&vV&U;lubd zXYf^nhQ{EsG$QlL#AF#HkONCp=4E9wla`c18Fb(VCnb9ZK;Y_IoK^@Eyot48qQ72N z&V>z++K}lO;t!<6sXE(Pl;*j(zFvBmSt0EBig$@L!aUy5xFX+us;|oNTZbt>^i|65H6gr`H?lvHS5Tw{uTV0hu5XL-9m|J#jy`3_w9lx*$AQAO_6{5RQqbPW zmta1{E2nLEA>YlW44WP1h%&hiCQ(-XWh@>&X0>q z%jYSuZV-M;W4q)4|AvAbH5pKZjZe`LxAMU!k}X<0t@1OplvznHhhercXq*nih}%5Z z4?~P|zPsuQ1jzK)$w(uCP9>~?AN(z6WU7JdnB^2^1hHp$v4zl9-BdQJ~7yMf@4$-AxA`brYtge;bsf7@>t*^ZhJv z;D5ubY3#nRX_PUlx9i48KhkpF(3gEgss*2eU#T)S8R#Q3!$i@Z-wKNP#0h;+Cz3`! zr?_N1NKtv;?+3j}aKIPZ4SG1DSvrt}#<%-P=-8;zy)pt4b3=CHG(4ufCDc@Dg7@hg zJwMv6&m!M6UHU*W(W!x>@hTms@&@z+AEe6JHOUFMo;`3=sZ_kvD3sv;wbzVj>9BqxKLaUTZiWI;93BEHP16h&
    !1(I&0atlCB604k@ z6?oioUe=KD)=MA92SuszGcWyO*=lxao+HiP@}(yi+3_ws&O%48>uH}MD#of086)&47s^K`N<9WnxknP9Cb#XN3Sw_z zLlgw(Su<}$CGC3-Inu4)#ml&hdcoBEHdGRLRh`?Kq1|?k9k>JO$;W#96ywIZj~>zQ zY@N;J;CsqoDcSW+d%DL@KeTsTwJZy;-Est#7)$!LXlY$nMA(_8lu%C!pw-LV(Pe@Z zX$X@~gwc1mS%o2WTvU~56x7Bz@!XsgFUr`^Y(Ux0p2n7Hx{}X}PaJ)^HubCv$4iY3 zhyxA2{x|PUT)0I{7>%ki9{0d8e{(*TJ4yJG?r-o11!;f9c4 zg_bzVkrT^jKd^pd=fg}Dx8m|i+5Zb$;$I9|*B|qRWdf3wXM}Q0WfVji96?Ocqdz6Y z7}Qz2XOC&9_ceXoU3U7HvO0(~AuB$XDTQC28s(Eh7cBNBW}1@BW#KK~NVHeA1+!_u zk4DIcd+z;Z{&Mj|I~XHu>EEoS4Y1~jjA*M9c@dM=z!8mSW0_}1H#JYB&4JLx%3Ya{ z_ak;}u<8!3A8H8YSA2FYCq=#NYPCCGOj5`W@nz-tiF!>Y1>W#;ij;v*1LHRWn9=jzH(BV^l>+^O8+z$J!q~`?eic9PH<# zF~-{&M0<$3>>Dh}+i`nJyqsOTlC{U5{OZ%T)1KF=ZH_LzxS%5%7!VB5rplUR2u%F! zyulH`X&)t){ML;pqFXmG|BF%FA<@@UL6TxAWIIRC0~3QAXf9#_?iiSh!F!6-lyr22taNji;{` zh#l%Xp>~E|CO_i1|BQ+~q*8o6h#2p*OpIO|V*i9F{d3h^W-C3hH8Foy&Wswrhp1nT zf@yx>eIn*s_CbyQ{0+n!NlQ?6hT_>tS-#%zu^UeDX5OchSDHGxYjxJHBVvN`#^`6s zs!!ihYc&rL>2hh3D-yXBHQ`++flROZ*u^g)`zn>?i((3=Wm*W#Pg!daRN^R2!MHPIe#E2iz! zpY=)m<}8G7UO7zS#cElJB&Ye3D(=4{bl+NuRgmThvtK^iBsfo;RNLXokUlS+(B?6+ zY{s(9`nAT3+`liontYBpGCQnpjw?Bz*-%47|w-;V!=vRCjjmY{PYF%Fz zYTXRW`u&?B)Q~9MPe8zmW_~=?UY12hkflR@fn}h+1&heSmyU}~i; zfnc~N7)TI@gicLs{~F0i9{09<65o}xI<@2T-e`kY6&S1~19Y;{dyyAP+E zVWjjP{(ElD*&l9S{7|b4xbzuM%dbG%;XxfrucIR0pK~aLqc_^&!iy#`xH~t1)-94| zEKiN5iEyykt|^;mRGZj777)aZlAK$7zIy3WT{7S`fPoq^qF;r7GpC_oQnEP6sCMRM zmyH%18uE^xsV}Lp>tl709Zb{7OJPIs`zAA3GMevW=IitK*&kQwMncRZsA5+SOv=g1 zXjP*Chn9#R!j|d@(zgB_BU(v;^=+7?Q=lawGt9gBySAVZ>|NKaJt!b$&AtCMb9f!o~ zs$V`bhpF88G3uUr-F0^KU1ta3-?`@@(7W)$|I!B_JOCI@!HO?@*V!RP4v`3U5#@k% z7?tsVScDS-QO7`PB!^grNmO)6|9>0%9k!qdtTQK!W&8^P8xyhhox{O9z{qp>t;#u^ zdZ>{StW0Oz%?q)z6(v^A?D9D06&H5I;jM$!*d~<>{&*0ExiGoJ~MkJ{eEK4<3DK4rqJWcfiB(vn2ljEZ9B5R zgR*=iZDp(z4G+{#;D|{3+BK7A@BnVDq>K2D^>CrW7uUKjo|44IBD#5AJI}NKig&J{ zEB`X-9YF2MweD!y{E}*Nr@!TVyiD;Lx5}ZWD)bryU;0f}V$51k2_yQ0A)|FClJLoyZVY4TXmy%tfk+7EwB)@E? zFhMG2LeA58{ma!u@;Zt2W$Cu~VX^5g#6Q5LM?#Ay%OSP-;amgUkzZQ^A)n1%AC{As zo#Qd`jVotMDj6Y_2>_b@old?!PH1<2l0XTdNSl$i!CnQSe~$T zk`wfLB8IADhOioMMIz-ilBwvseoAe5JAI9<`@@NvwYc!&R0qoWdbV+56=z^;=??3< zPu%Kx`>RmWme7n0P_}_TgPDi?c?!F2c5bWM#JA6^WcWby1@#GrN0)GtZkGicqKi;X zE?9-UjJn_Ka=@4d?9jz0TCGA%VYz&lvj3^Jf*N8K+Z4nkd&@9#s@V4P;e1z6EwJ(M zUMxi({X*@E8NXf=y=(JCDi4i-CBQIK_i@nWa4FZ?qOmc9t>h5dtn&CLvhGL|)kG!? zyzT(YqoZ5xLR=hVXkAY38h30jS89+ON@xrNWp=3GIw=44n4HjFfr=MIu9ezN{OPm~ zGGr=b%~qJyZS@saCnp|wgG|%^DpNgl8?D@7Z=yGm(N7+?@uzWJQ~qr2?$@BK#C-7d zENAg8rGT)ckYc6K4A-l`8XKo<<#-9>?AO@O!kiWq^1NBR3&b?qh$>8eWP6rZ;8pnt zKD}sE;j?p>Mo>+LnRf6Adj;}JwRALhei8kHZCP#w%s^dqyjHKuK5xd$Rh?k?^2I71 zR^JCP^wgw1i_Kwas!}XQE*qfS_Or~>nn9HRJT6~AF*2`;-(CS;DU3S$T{5ZJvAVWn zk^o9T)V}xI%lu!d)_(wb{?n(1Xb?pH4#WYs6`atib|{ zpYdCxY(^Hle)A;lSrxmmwbTL!z1T8S%Q_=dNX}3gZTfuC)j+CqPWb2G#iM0SY1+>n z=4lpkOe*$I)W7k}Wf6SfvNo&lvb3=P#uC{dg~W7}@k8cho4!HwHQwbtuJn7k56!Vo z6A0XD!qG9KC_m%}+6ih6c|1t}EW9sT`=A7}G|$7Tqqn8R5semoo_Z>i#M~dM&wu<0 zqRQrAZ+vEu&!_EoTeJnyIz9foEl+r+Z*&H$mNc=cDm*?;<6OK*KZ{!p;uQt)#R;w22?0BPbKE0^|yvD2>poLJ6ZDfxhlCji%8+kpblAydG-F9+(mZG5) z{HL`mq0a+R{Fb&%C&bsG^=z8zi$e&jHH`i5MD%nQb-$TT8mVkA$~4bcl(BOXAIDCx zdefz3h?t}zyXmoK z9wIeLziO4JhJdSFWv*%Q>tEWkB>0u|F#8h}`dw`+)?LNmFWpcwlECL_e_45J8TFB3 z{ZL$27cbk~u{C3WP;u(+9XBilXkzT$8OpljavdKSl|` z5lkG%YM9ZQ%R#9jHMM63df7OAMXX0yRGMF$ZDQh%QQ_$>R$ceF06mqQ^t=Py-ww!Z z19c{pdu&YgaMw0|^X`jgs02ZN(L;R(LkaQb>i4{zkbuGPT+pOg^vqkHwa?f(dWzJLd z9hCJwSfQ1`yG(*Qxp@ylLbRqP0^kH|Qf4K;=P3~#rKrih6iWVqB7o#cPzoLU!QfN! zP1dhgE(kq%VnNfKEXn}*>3Z*my@~m?)nZmMY;g2&&IP`vq_wVGc)nwhQQ}%;!j4BG ze90v%E%p1OwfpOz`FsSjanH5qk5gO&b_;#H*xF1$uP9mWc0^pTI|}nIWAi^q=Kp}q z|KI2eYwFuNnf-rZK|n>=?w9E$&+sJ!+&{p#k(9sBKIViE2woiya2&BbS$Ur={2^8ur?T z$LEjw+B3WK1UhDZIE4HIyq*-+e)zzW`K%P5NxB`+a2hRMw-iw>e%sXB#I%03n>VKR zyhP?7pgP50qqk2{_ph_>ZSsSeM?{t^sqvRdpPte2_d62Im$=k!-)x?*S?rxDXb9OE z{7|4_(a%Rd{B?^QA$3Z0rK9z0i_1t43 z@z3r+x4a)|(7J2oGg^)c0n@_AUN@R?#C1Z$Rt>L7o^4HaD7-&p%pbsdjGQI*iw(e@ z_%}P9J+mtDll0vBxpP&Ra8_5%!A9qEm{6?9u*367p<1!qC(o+!fB_gWY<&OqLuy6RCmAFPHJJhw15bmf77R|GK3Y zkpBZvo<-eSk8x~WG0J;1YSeL%f8lBZ4nrasl}?HO0d^p^?=82vT{{b3LJjt&DwllL zPXBJc;h8pSe1AFgJMC=xa-dR}D}B&&6%tdMa6TpV)Zg@_n&D>H2U?kq>NpDp3{=x_a%gZ9hRN*QX%0nsu^}3;ksmEsuX_k~=cA?Y6 zR!g8A0p6w@N$yWc=e*3%`Ez_NLDa@oM)M@EI)jMDp+LllhBB`&d2&JNj~x-DEPo8Z~4Hen^1GoYTr`&dKQ=BqrRL+S~1DG2{zTN(^wROZ2 z5N|y;RptNbL11}P`$29w#|D)OC_%r`P(m!BxwKejPj_#0Q=;CWd8Pq%LoQlF%V8uW zL2(kaFJX1D9iuTiqx~0IBgP5k(KB20JvQ-XU_-{ungKla}oHJas>CX zBAZAH`@i@}ko5jqQyH8x_-q1;e%7Mw1QSu?DKL%!Yp4GMj6R!@dX!zBR8r761Fs^G z=F8#v^#aGiXIEdRIM||fTU0#wWcrmz^lKE`B!^Qe=8_-l9In_ueb6zQ7nP{z!LHEw zu`R;ktjOTU=OxN0lfO(;dC7)ztRi3I0&DQdO_bt$ofB)S{lVOp@|AOvUj~T$Ow%9g>)M6||$Dad3_LgM4}(~8;2#-DS{ zC#jFcp%Yn&1pbJG7T~W0-3Z3?n@JEN;}(qZ@ws?H?j!dItSRuUQi&dsq4ndN2ysLEO#qjL`JHOTBiTg^dQDCYGdo7pbNiQ}<5@WUA=^nj5pSsL5o}J2trC^v^6Up(humWHnh^n>eL8i-!B&*Njm~TM` zZHBXhRwT2*7Q8JQfFztyo&z!4-smofqC7_LbGciQRn>b}ZV+{_TN7H72`lZogwK^R zTsEKzzxI1-=Jew+4@NUmWcr7<1EZv1R{L51E_! z{{V^yZmNkqCy`D0{!^U(bFq8cy?YHdeQAjljRT(PQ^pY(AQSCEt-ADZ5*nQ5UNfst zN1EWYgvJ=6S}jXSXNp-p=JO7UOrKdQX|N&;bLLeVtz-D-)#9+(BdSGF(oqZ9)ZkYs z@m6J88*H%(aDA7`IQ21tu-`jU0@)j57i4k64NXK=^1!2jCSq1m1s^b+3Abc!i5CPm zNY`F**Ok|5WUE(CK+WYW2jc^Xz1dI*$>$CQbEzX-YF0SK=XKgdtE1lmj*Nf8{3)1u zc~9<(uOf`8aoQt>mMitOEA1E(S1IZ++izgbNN@)coG*4Yv7uKwWY&qy}a*T?tsv!Dlk{Nt^eUN^kF zo?KeO_S>2Cc&35J+SPZG!l}uiOY!CA(`QRu;8{tdXGs3}aBCTGu*NkBQx27eGy$iUv2uZ_8kV5WRVI zE(9~nsHJ4}0PPIYS0;D9^S*gb$W3FAZ-1WocEP08#(|<*`)+=%kv#lU))#3V2FQVJ z_c7btlLcjPNC`3%+&H9X;j=N0^%#s6LV=-qoe+Bj<6pg6rmBpiDH1kA(=90&OmdRG zl;7z9;kAcuSYfrBh;~Q9aGQVD9uilj-K$gLW=E!Tmc#pVH zk_w??IA3uIu7tlhBV!*JUmu3;5b*y8kitCfk4CvF_hfcx%`c1;MeLlkD~mW;eM`o} zedUltojcj96iWfiMz_DkEAw}sNk%;?bS)`9H-1g^md;i|?Rqe>InBIn&t1W-INsSe z8v6EL>OTNU&}udoQeWZcb`#doUC(TJ|C5S}l4{I*R>?%>g*F5uY9NL++Dt;cVoei5 zQ0fbPK}#Q{lSD=2DQx4qYPrp?Op!T&5>@N?v=6%

    b%uTTc5y0x_ z$7+4np)G0;LP>qBg*O)f61&0uKT)7VpT1xE+?x(A(beqiaG5XZU#WvsCn7e-Nx#}~ zYv2{~^699qY>pS5I6N2jz+LYyAwya|DdUD#6iAv>^6Cxk&R-o4MHOPrbH-Vh{Y_6dgm%I8-Y3Wvp=cE1CB| zTJ}b+T@0Scea);N5c>sWDaOo5T;zEG!IOT3ZxynXP-xM8AT^2&_11x9Jz6R+l&(82 zF-OMPRK?h}Bl*7f?AiEXm$Pu-6tQ}=pP9o$Q`6?JtizLri#XXwbamxPq}*GPYxcbs z>fT5$9BWMEXD8fVq*e#57*}<)@A3Yqs%%WMqkWu)k0XPvYbBF6kLaUlZJZ-wzaEfx z=bunfR|ih)Q4WE+r_J8Zm&Lm0$tiDl{qT&Y{#%`uYvaRY_Q9AB(B&|a|Md+KVaI}6 z!bt0AHg}{L*YXK5k)X}5zQ$mzzwIA|*n74|RD)!k9tQUX0wjIz3+*icxDeEr>r} zt31id#_{{96uJHv^FGs0FAi)!Cp!40k>9gFH-~vy)_a_}^_&s0(onVb2f!DoCGN?H z?8JI*6t~S5!zs_^D`*KT;r&J>D$#5{OV*{ zTl49*r2DxZ`%lNY7dp6PIeWLg!!xZs&SKR$qxQ;Jc6EqletEnAd1MtS#V^KJMmD`{ zSM9noPXdJ9_r3$0K2$ZJ9NpC%^NbZ*oO5e{p8dUzojQp#j5a>6j^C6=IbLiunvP^m zI!8DfasbMgd_C&4*n9-#8}%_y@n6EBQy!hIPX&=OjY`4)0KK_FD()bhc{mMiVt^{K zpdsfg3gY*Ox5j@nwO$OiKyLESRU~k7s?UH$#Rt7aO5#7ea=hQWav1L~h6gqj&F%ec zH1mL;(GQgB2Js>O@>)N<+Gj4gjXTri+YiyC^cL zbcAH~F_S;Xh7neE`vAv#eR*VP0kqFJRqa&*#nf?wYp`WkirXi>vq^r#cV(VTo|KEW zjHj5QV5Px*bmX1EFHvve9>r36FO$g>0?Bj6DR$iLKY%^Ig1%C*_$?#EsaFE|Adv(j zQG2hgTAN;n67N^$7=cOS=mm;NxYlST=vuf&%Gk*m6TTCP)oC1gLFrIAv+4dxn+U`} zJzh4pKhjvmo{v&K&NAdP%mOG z5&?Ltx2Fx2=-MCrzU%nHqqLHAH%Q^(URb|xgRM7=OvUR9(^aEI`n4S-k6zDZ6}Sj8 z7#S9d!({5UVhAzcA`#<(fo-}ZY9+-l$ol)S?8B-Io>9?xZ85KV-_N5X?PB$o_@Wbl z#cw4d-_Q{J-A6KB$Y)d3ZYHjY)e(bmro^gzaz-lJ9{!oo$T`*)4)T88?{FrbX4|Hr zfv)nV`qycRky&82{!T|pyJ51WI$y2*NvXV3nJe*9kOWI`Y|iJ(bCzVM#okXW7ae?S zXFtVnbnT%G3Qrk&&3^*7e#?odi@4OFmwrFqa@ZYcJpoOByf5F^$cJtPG)oKK8Tj7a zI7}Mna@0R+AZPuMLt>Fd>#F~#iNkViqC@3_Gl@6l^VCs!lEZ`0CwYS7yd)^qws~Jf z9nfI?IGs>+AkDo|^a0d-+i~k|K#jtlFC|>GZ^KBsZsy?{`;G^Dy?;0$MA#D4!|E1l zw>O&>T50?5-G5CZ*ficgK@2x->U@mJC~FJZN7vs`opVZu5r zq&p-21I>BPgy${8-v@PLJiD#TPDb=>s3O@c@H0;Ocv9!5Hy3|V(lPwp3{_)l@&^#v z)*Kfka3Qs?hI*OHi#lV=%-zn@4F@_+{og4pgeu#Kn=l7AomPsZp^m#IBwE#mrNPK2HznGDG?M~tysW>3oDh>AhhL|BuZ! ztbg6L^@|7gKv~TFun+b*HGRuYO8oB#(YG}lKL>+{Qx;V>2kNIj+KCFDz0!u_C6Mq7 zjWj=1=4$Hb^D+a@7PI4l>oM&33PkjiJ}2~pH;A(d(343Qjwuf6Y^za>3i%q|~q^rns|G~3c zTK3w~SXp4(UJ=HoDTWRtK#{=U?!{E>G2f(}s;)tXUwJn?R-4NaV02kt47qv!tIxkMLp}-%Get> zzsp^<^$=JES-GLYti|{z?s~>@XOrm`X*%W-Ym68ICe1|?$(*`;Vli3%R>r$*(*>YG zLhK<9@jm~hO}qZIzR#g`%|yUW_#Yr$9OCDI&0J?>7kSeEUe|O{Vk4s=QOC{yxX+&Z zNnS~RgvjrlEbDUhRUt}}%ze$SIf5F8Xw@M-cfWrBN$-z595iiAFo)%4vl>`@^dxnQ zxDoxIXxF9%toEH!beG$*SGv%N;<)z_Ffe}eUbZ=0S8NQ@!>O#fbx1VY-w}|W!Y((Q znCTgl3X!y1FOY0O+Dp0BGri`K!{<^z#gDLY14h2i zWihIwmm$mPVG01PXjvrQ+qmr*RnJYoB3kyTq~g7YF1cupjR7oehMicVikjxr4rT9& z=4@4k>Et!q5)wwJ)n<o!56WBm8Zd?*tloy0_U z_sic!I|S#2%IK$!Y1pcWDfa?n;VMH71#Y3Q27?Ihu?aS^dh_q0;IjJn&1@Hw-hb~C zN(I6u(d@W~k@scVsAjyAe$|me;07K%Fyn%QF4}K74)&4X4Q0@xPxz=^dQGC=4j{hY zYl>yVx3qO1HznAM{Cl#peO4yFP@DG*QI3Y^@ zD)6JQ3U>QQ_BunCckt20O>PL2Ys}I%B{nbGQY(gluaP2*Ty;3i9DmSIA?tEe?Y>d- z90^;IPC;2M)u-wND2~xxRjs3LATo!k&xC8yOcn&=&fyH@fl{LNu+q&7e@9p8B=9gb z^m~xKQYwRC_(04IM!f`=CF~+{1R3dw?efo8pz)gFSH*W^3n0CyOT_P1cPb)(;({!J ztl|MimLzCaU)q>NMcAR?uqeRh4#MVVF*E`vIGeQnTq&CjpRaN)J(&kx0{GIQ7M%z% z2fW~}qC`B+>!Si0+;9c$yOM~jo}UXpDJu9H(~w?D0B4h_tTBlqq$F(-kuwTsdHhbP z^tTr4m~8YXgonIJUBu%Dkx9>6TaT()gF!i3LwOs$EX^|_yxy*z9STYv#|or!{{U~Q zqM~xb7&kKpN`m5!C5!yLS1eZ-@InQ*I4sLPM@xDM+-T9&I-1bY9^F%zQfYPA>+rQ= z7hKhDpNB9TM&TtQ!HGGFWEL5cUO|uP?lI`8u?=&jG2tA03)I%166i}MsBkEQqKeylWvW96=ZC?Q^|1ShvN&qZwrzvnQr$i+M`^Scj8AKPYe>``QYSi_p}ABdUYZd&{J$ZR#UsI?sCnjiVq6_CEW0Ir~*?Q4PV4byD z)E9fDPlRN}&wH2(0Ohh-_U6_tqoeFkJbRWNZ>6Z^i|^^pBj*M%-XvJqZNA4BJlTF$ zG#2>s$=>d*$IE$?qDO%Oe~a97pX^vE5pgkkiCd54Ua!YWW6hiyXg1ihHNRP}!|b5B z&fRHDYt2xallPJTji7mq5~P35d)zGj9yiA;!=JXX8Rb}7j@8=Oj}t5INf^~@`^XlQ zgBkiG)abZ>91Kl5t+Dq^<@#NnW?e|jEOrm&BqO0gcGg|)PYQ1;fqwm~)0FR}t)3!3 z(UUEs7P@O*vb|?=(z`xjfSHnRAS67HO#>Z?`NA6{@z#XYmPL$ z6zkOdIY?2lig#K*G0SV771iT>!QKM;PWD2O%c=N5zB1p5iik3YbtEU`hwW6=M}Hnr z2)D*0;<*jJ&qMs)HN=QF3K)nQq5lhZ3-*W zS@cl%S)mJj>rx3XR`G{EFjmj>^YjazifIZs%}TjW{m7{C^}2jMf`zr9QIWH6zAkt2 z8_yj?(YN%Dwcy1dy)dW1dx;ngp*QeeWLBX=9CR_RFY8GQVgxt;hKT5zmsg{Ov1}rG z3s@oEQ{$B0&t7vp!T%xj)Sqn^&rQ$r4&iDhye>)I5U6Fn6I>~1`j;j?yaj~#c`I+i z8NOS9r=BWOes@l!k(Zn#*^Mq?b$wIlB3~&U*@L}1r4kW+STA+xylD{CLPue21sM7X7=!a23E+E^URUVv3ZJP< zH8{N62@fG<5JamaHxB}@@fxo!ZWD%YX5s$&1(ZhJ$MFa(9iD|;^>P^#!E6hJ`~`SP zBhx3@d;c0p;rHr)RZjRjp-MY6M_-cbaM{9=EhDy+HkR6eJa;{e`p#{EXF*VY-<`Lh zKb`frONz-JJ?#v)ZR2Mf@x>>v?yqYz+lSrP0U3&fZPwu8rfRgI{{R?bJhDYy1+No) zyQ7wv2hD0IlYMp8SL_e>Y}DV!>g%Qj$9+zO5)kl@LWKLH<}2Z%D` z*VKOEgJJ$sG+xZf!*3#{=)>8GU4V`RRT}5XIC;)*_;{4xk@&wLsG*HPVEA~n?A4*n z7-?@Oy?2$OKYmcuZiQlFy1A%wsHn1i{iDGu?T6jm?^o|ddJ#nOk})#RqW^-$2PkUh zh*q2_HT%_jm4sNooYHu6YBk*N4vx~m`a9x4oSdpoC#S^aa#sfD-uDDaJB+r)rW@z4 zzY{9Q#5cM^-s7~JkVt@0K1=iXJBh3-Wsog-k|)ly%uiBPH^RQsX(xDD8C%ou%{95d zEx=m!Yqm=72&?o4FH>BeOpR10%tJn_ltQ8EEnkSMYFH)HnAxGYX$!sZ{@p?}?rR~9 zvK`T@fQ{4nGEXPOxd{bxYbe+tN5bb0y-Eb>2C>*?1261M(> zvcl@B?-XbqW_tVDy-Vv>_I?;}#JE)PMwNr{{mP#w$|d|SZV3p_)dE>(Tui3`@1X&9m(W>3lE;16 z_Xo83@};Gt%tqjuqA!;0ugIH(ek-Dl!#5kRsD<{PlSbh`A`B*P!RJCPatSMR5kC}r z`;Cl#b0yhvqolaUvif0x6s62Gxx2^T+RS9y@!xz%?;{vW0vKe@CsL3FkZY4*ixBmA z?P&G6hU&rC3F({$#?-_d^tqKntUsks*+7oRU!O{r2|%(mLR*mo_?ex0nO$)!7$0m1 z=;QbAV(~J(NTDoK+$5&CWl-7d{{$@}f&|J4)DLu(YMKD z9p6sB@lgo~N4cq+DOzHXB3`m`;7D9~R-~UT$2dm6HckS?-1CY z6lIk>>|buA{>T6ZA{cOpG{-mtDAL>{G;^i5sz~qGYE_N(lk;&9+;_%tTWIn|r{YVr zLf#|pykndPf?7FXUlbT*zRll=a>wIsE!-NMlI7f_ zaNlQL<}3Tq$}}u54omFWs5h5FjWrBd1)RiqSKFtW>J{pgAUth;bariQvpTIgc$`)(uCF(PU$I z78Y&%l+f2Sha#VI(M?Y@&_aoX#nW_-ky}BZsRk6%zH4H+V*e819i#A1Y}6$>BM7=+ ztL+aAk5LJx{0BRn8d~;_jMS0KwWh1St=!{sY2}ek+O1p#z`!Q z8qb;hLqQMD{RJai1ZWPh)zTTus{hnNs8c@`J%iOZvcxdV)Y{Vlm-~4pNzA?bp$&58 zn%$&5ui=|6RE?Z{Py)?@_l8#S-63Z>&7_>0(4GY6$9&{f1=ps4FB{>LAkHdR?jc$H zr;PcmbmYxRHLt=;!Srz{)MnFhG2cCzaKpW6(H={kyw&5HG~xiCtiS?A?77{axFm|v z)DARb&~_~=NM<%I#0&woib*TplKZgsNn2w)--21&fpHVlX0KLX{|OL0&&270u9>iT z=d&rg)M1^BHfk|Y&QOue&ABl2u@RO%N-<}`9*x4h_*a_?1rxu*XhrS5lBEuhKo>FN RI_fRCh^q@1!=isH{|98lX=4BY literal 0 HcmV?d00001 diff --git a/tests/func/model/test_yolo.py b/tests/func/model/test_yolo.py new file mode 100644 index 000000000..5d694c0ac --- /dev/null +++ b/tests/func/model/test_yolo.py @@ -0,0 +1,2955 @@ +import os + +import numpy as np +import pytest +import torch +from numpy.testing import assert_array_almost_equal +from PIL import Image +from ultralytics.engine.results import Results + +from datachain.model.yolo import ( + Yolo, + YoloCls, + YoloObb, + YoloPose, + YoloSeg, +) + + +@pytest.fixture +def running_img() -> np.ndarray: + img_file = os.path.join(os.path.dirname(__file__), "data", "running.jpg") + return np.array(Image.open(img_file)) + + +@pytest.fixture +def ships_img() -> np.ndarray: + img_file = os.path.join(os.path.dirname(__file__), "data", "ships.jpg") + return np.array(Image.open(img_file)) + + +@pytest.fixture +def running_img_masks() -> torch.Tensor: + mask0_file = os.path.join(os.path.dirname(__file__), "data", "running-mask0.png") + mask0_np = np.array(Image.open(mask0_file)) + + mask1_file = os.path.join(os.path.dirname(__file__), "data", "running-mask1.png") + mask1_np = np.array(Image.open(mask1_file)) + + return torch.tensor([mask0_np.astype(np.float32), mask1_np.astype(np.float32)]) + + +def test_yolo_from_results_empty(running_img): + result = Results( + orig_img=running_img, + path="running.jpeg", + names={}, + boxes=torch.empty((0, 6)), + ) + + model = Yolo.from_yolo_results([result]) + assert model.model_dump() == { + "cls": [], + "name": [], + "confidence": [], + "box": [], + "orig_shape": [], + } + + +def test_yolo_from_results(running_img): + result = Results( + orig_img=running_img, + path="running.jpeg", + names={0: "person", 16: "dog"}, + boxes=torch.tensor( + [ + [100.2483, 83.7399, 183.179, 238.1918, 0.9057, 0.0], + [10.8968, 177.1552, 71.1275, 239.0617, 0.8919, 16.0], + ], + ), + ) + + model = Yolo.from_yolo_results([result]) + assert model.img_size == (200, 300) + + model_json = model.model_dump() + assert set(model_json.keys()) == {"cls", "name", "confidence", "box", "orig_shape"} + assert model_json["cls"] == [0, 16] + assert model_json["name"] == ["person", "dog"] + assert_array_almost_equal(model_json["confidence"], [0.9057, 0.8919], decimal=3) + assert len(model_json["box"]) == 2 + assert_array_almost_equal( + model_json["box"][0], + [100.2483, 83.7399, 183.179, 238.1918], + decimal=3, + ) + assert_array_almost_equal( + model_json["box"][1], + [10.8968, 177.1552, 71.1275, 239.0617], + decimal=3, + ) + assert model_json["orig_shape"] == [300, 200] + + alb_boxes = list(model.to_albumentations()) + assert len(alb_boxes) == 2 + assert_array_almost_equal( + alb_boxes[0], + [0.5012, 0.2791, 0.9159, 0.794], + decimal=3, + ) + assert_array_almost_equal( + alb_boxes[1], + [0.05448, 0.5905, 0.3556, 0.7969], + decimal=3, + ) + + coco_boxes = list(model.to_coco()) + assert len(coco_boxes) == 2 + assert coco_boxes[0] == [100, 84, 83, 154] + assert coco_boxes[1] == [11, 177, 60, 62] + + voc_boxes = list(model.to_voc()) + assert len(voc_boxes) == 2 + assert voc_boxes[0] == [100, 84, 183, 238] + assert voc_boxes[1] == [11, 177, 71, 239] + + yolo_boxes = list(model.to_yolo()) + assert len(yolo_boxes) == 2 + assert_array_almost_equal( + yolo_boxes[0], + [0.7086, 0.5366, 0.4147, 0.5148], + decimal=3, + ) + assert_array_almost_equal( + yolo_boxes[1], + [0.2051, 0.6937, 0.3012, 0.2064], + decimal=3, + ) + + +def test_yolo_obb_from_results_empty(ships_img): + result = Results( + orig_img=ships_img, + path="ships.jpeg", + names={}, + obb=torch.empty((0, 7)), + ) + + model = YoloObb.from_yolo_results([result]) + assert model.model_dump() == { + "cls": [], + "name": [], + "confidence": [], + "obox": [], + "orig_shape": [], + } + + +def test_yolo_obb_from_results(ships_img): + result = Results( + orig_img=ships_img, + path="ships.jpeg", + names={1: "ship"}, + obb=torch.tensor( + [ + [272.7724, 83.4637, 80.2673, 41.5667, 0.5629, 0.7289, 1.0], + [70.2, 95.1912, 143.2061, 48.404, 0.494, 0.5785, 1.0], + [120.6607, 76.8303, 115.6623, 42.3174, 0.7043, 0.5677, 1.0], + ] + ), + ) + + model = YoloObb.from_yolo_results([result]) + assert model.img_size == (300, 200) + + model_dict = model.model_dump() + assert set(model_dict.keys()) == {"cls", "name", "confidence", "obox", "orig_shape"} + assert model_dict["cls"] == [1, 1, 1] + assert model_dict["name"] == ["ship", "ship", "ship"] + assert_array_almost_equal( + model_dict["confidence"], [0.7289, 0.5785, 0.5677], decimal=3 + ) + assert len(model_dict["obox"]) == 3 + assert_array_almost_equal( + model_dict["obox"][0], + [ + 295.62302, + 122.45737, + 317.80472, + 87.30396, + 249.92178, + 44.47003, + 227.74008, + 79.62344, + ], + decimal=3, + ) + assert_array_almost_equal( + model_dict["obox"][1], + [ + 121.76702, + 150.45038, + 144.71785, + 107.8334, + 18.63298, + 39.93202, + -4.31784, + 82.549, + ], + decimal=3, + ) + assert_array_almost_equal( + model_dict["obox"][2], + [ + 151.03152, + 130.4003, + 178.43207, + 98.1517, + 90.28987, + 23.2603, + 62.88933, + 55.50891, + ], + decimal=3, + ) + assert model_dict["orig_shape"] == [200, 300] + + +def test_yolo_seg_from_results_empty(running_img): + result = Results( + orig_img=running_img, + path="running.jpeg", + names={}, + obb=torch.empty((0, 7)), + ) + + model = YoloSeg.from_yolo_results([result]) + assert model.model_dump() == { + "cls": [], + "name": [], + "confidence": [], + "box": [], + "segments": [], + "orig_shape": [], + } + + +def test_yolo_seg_from_results(running_img, running_img_masks): + result = Results( + orig_img=running_img, + path="running.jpeg", + names={0: "person", 16: "dog"}, + boxes=torch.tensor( + [ + [11.6684, 178.2727, 71.4605, 238.8026, 0.9157, 16.0], + [100.8046, 84.3006, 183.2971, 238.1916, 0.8578, 0.0], + ], + ), + masks=running_img_masks, + ) + + model = YoloSeg.from_yolo_results([result]) + + model_dict = model.model_dump() + assert set(model_dict.keys()) == { + "cls", + "name", + "confidence", + "box", + "segments", + "orig_shape", + } + assert model_dict["cls"] == [16, 0] + assert model_dict["name"] == ["dog", "person"] + assert_array_almost_equal(model_dict["confidence"], [0.9157, 0.8578], decimal=3) + assert len(model_dict["box"]) == 2 + assert_array_almost_equal( + model_dict["box"][0], + [11.6684, 178.2727, 71.4605, 238.8026], + decimal=3, + ) + assert_array_almost_equal( + model_dict["box"][1], + [100.8046, 84.3006, 183.2971, 238.1916], + decimal=3, + ) + assert len(model_dict["segments"]) == 2 + assert len(model_dict["segments"][0]) == 2 + assert_array_almost_equal( + model_dict["segments"][0][0], + [ + 16.0938, + 16.0938, + 15.625, + 15.625, + 15.1562, + 15.1562, + 14.6875, + 14.6875, + 14.2188, + 14.2188, + 13.75, + 13.75, + 13.2812, + 13.2812, + 12.3438, + 10.9375, + 10.9375, + 12.8125, + 13.75, + 14.2188, + 15.625, + 16.0938, + 17.0312, + 17.0312, + 20.3125, + 20.3125, + 20.7812, + 20.7812, + 21.25, + 21.25, + 21.7188, + 21.7188, + 22.1875, + 22.1875, + 22.6562, + 22.6562, + 22.1875, + 22.1875, + 21.7188, + 21.7188, + 21.25, + 21.25, + 20.7812, + 20.7812, + 20.3125, + 20.3125, + 19.8438, + 19.8438, + 19.375, + 19.375, + 18.9062, + 18.9062, + 18.4375, + 18.4375, + 16.5625, + 16.5625, + 15.625, + 15.625, + 16.0938, + 17.0312, + 17.5, + 22.6562, + 23.5938, + 25.4688, + 25.9375, + 25.9375, + 26.4062, + 26.4062, + 26.875, + 27.3438, + 28.75, + 29.2188, + 30.1562, + 30.625, + 31.0938, + 32.5, + 32.5, + 35.7812, + 35.7812, + 37.6562, + 37.6562, + 38.5938, + 38.5938, + 40.0, + 40.0, + 40.4688, + 40.4688, + 40.9375, + 40.9375, + 41.4062, + 41.4062, + 42.8125, + 42.8125, + 44.6875, + 45.1562, + 45.625, + 46.5625, + 47.0312, + 49.8438, + 50.3125, + 51.25, + 51.7188, + 52.1875, + 52.6562, + 53.5938, + 56.4062, + 56.4062, + 58.75, + 58.75, + 60.1562, + 60.1562, + 60.625, + 60.625, + 61.0937, + 61.0937, + 61.5625, + 61.5625, + 62.0312, + 62.0312, + 62.5, + 62.5, + 62.9687, + 63.4375, + 65.3125, + 65.7812, + 66.25, + 66.7187, + 67.1875, + 67.6562, + 67.6562, + 68.125, + 68.125, + 68.5937, + 68.5937, + 68.125, + 68.125, + 68.5937, + 68.5937, + 69.0625, + 69.0625, + 69.5312, + 69.5312, + 70.0, + 70.0, + 70.4687, + 70.4687, + 70.9375, + 72.3437, + 72.3437, + 70.9375, + 70.4687, + 70.4687, + 70.0, + 70.0, + 69.5312, + 69.5312, + 69.0625, + 69.0625, + 68.5937, + 68.5937, + 68.125, + 68.125, + 67.1875, + 67.1875, + 66.7187, + 66.7187, + 65.7812, + 65.7812, + 65.3125, + 65.3125, + 64.8437, + 64.8437, + 64.375, + 64.375, + 63.4375, + 63.4375, + 62.5, + 62.0312, + 61.5625, + 61.0937, + 60.625, + 60.1562, + 59.2188, + 58.75, + 58.2812, + 57.8125, + 57.3438, + 56.875, + 56.4062, + 55.9375, + 55.4688, + 55.0, + 54.5312, + 54.0625, + 51.7188, + 51.7188, + 50.7812, + 50.7812, + 49.375, + 48.9062, + 48.4375, + 44.2188, + 43.75, + 41.875, + 41.4062, + 38.5938, + 38.125, + 36.7188, + 36.25, + 35.7812, + 35.3125, + 34.8438, + 34.375, + 33.9062, + 33.4375, + 32.9688, + 32.5, + 32.0312, + 30.625, + 30.625, + 30.1562, + 30.1562, + 24.5312, + 24.5312, + 23.5938, + 22.6562, + 21.25, + 20.7812, + 20.3125, + 19.375, + 18.9062, + 18.4375, + 17.9688, + 17.9688, + ], + decimal=3, + ) + assert_array_almost_equal( + model_dict["segments"][0][1], + [ + 179.0625, + 180.4687, + 180.9375, + 181.4062, + 181.875, + 182.3437, + 182.8125, + 184.6875, + 185.1562, + 185.625, + 186.0937, + 186.5625, + 187.0312, + 187.5, + 188.4375, + 188.4375, + 200.1562, + 200.1562, + 201.0937, + 201.0937, + 202.5, + 202.5, + 203.4375, + 203.9062, + 207.1875, + 207.6562, + 208.125, + 209.0625, + 209.5312, + 210.9375, + 211.4062, + 211.875, + 212.3437, + 213.2812, + 213.75, + 219.375, + 219.8437, + 221.7187, + 222.1875, + 223.125, + 223.5937, + 225.4687, + 225.9375, + 226.875, + 227.3437, + 227.8125, + 228.2812, + 228.75, + 229.2187, + 229.6875, + 230.1562, + 230.625, + 231.0937, + 231.5625, + 233.4375, + 233.9062, + 234.8437, + 235.3125, + 235.7812, + 235.7812, + 236.25, + 236.25, + 235.3125, + 235.3125, + 235.7812, + 236.25, + 236.7187, + 237.1875, + 237.1875, + 237.6562, + 237.6562, + 237.1875, + 237.1875, + 236.7187, + 236.7187, + 235.3125, + 234.8437, + 231.5625, + 231.0937, + 229.2187, + 228.75, + 227.8125, + 227.3437, + 225.9375, + 225.4687, + 225.0, + 224.5312, + 224.0625, + 223.5937, + 223.125, + 222.6562, + 221.25, + 220.7812, + 218.9062, + 218.9062, + 218.4375, + 218.4375, + 217.9687, + 217.9687, + 218.4375, + 218.4375, + 218.9062, + 218.9062, + 219.375, + 219.375, + 222.1875, + 222.6562, + 225.0, + 225.4687, + 226.875, + 227.3437, + 227.8125, + 228.2812, + 228.75, + 229.6875, + 230.1562, + 234.8437, + 235.3125, + 235.7812, + 236.25, + 236.7187, + 236.7187, + 237.1875, + 237.1875, + 236.7187, + 236.7187, + 236.25, + 236.25, + 235.7812, + 235.3125, + 234.8437, + 232.9687, + 232.5, + 228.75, + 228.2812, + 225.9375, + 225.4687, + 221.25, + 220.7812, + 219.375, + 218.9062, + 217.9687, + 217.5, + 216.0937, + 215.625, + 215.1562, + 214.6875, + 214.6875, + 202.5, + 202.5, + 202.0312, + 201.0937, + 200.625, + 199.6875, + 199.2187, + 198.2812, + 197.8125, + 197.3437, + 196.875, + 196.4062, + 195.9375, + 195.4687, + 194.5312, + 194.0625, + 193.5937, + 193.125, + 192.1875, + 191.7187, + 191.25, + 190.3125, + 189.8437, + 189.375, + 188.9062, + 188.4375, + 187.5, + 187.0312, + 186.0937, + 186.0937, + 185.625, + 185.625, + 185.1562, + 185.1562, + 184.2187, + 184.2187, + 183.75, + 183.75, + 183.2812, + 183.2812, + 182.8125, + 182.8125, + 182.3437, + 182.3437, + 182.8125, + 182.8125, + 185.1562, + 185.625, + 186.5625, + 187.0312, + 188.4375, + 188.4375, + 188.9062, + 188.9062, + 188.4375, + 188.4375, + 187.9687, + 187.9687, + 187.5, + 187.5, + 187.0312, + 187.0312, + 186.5625, + 186.5625, + 186.0937, + 186.0937, + 185.625, + 185.625, + 185.1562, + 185.1562, + 183.75, + 183.2812, + 182.8125, + 179.0625, + 179.0625, + 180.4687, + 181.4062, + 181.4062, + 182.8125, + 182.8125, + 183.2812, + 183.2812, + 182.8125, + 182.8125, + 182.3437, + 179.0625, + ], + decimal=3, + ) + assert len(model_dict["segments"][1]) == 2 + assert_array_almost_equal( + model_dict["segments"][1][0], + [ + 135.625, + 135.625, + 132.3438, + 132.3438, + 132.8125, + 132.8125, + 133.75, + 133.75, + 134.2188, + 134.2188, + 134.6875, + 134.6875, + 135.1562, + 135.1562, + 135.625, + 135.625, + 135.1562, + 135.1562, + 132.3438, + 131.875, + 129.5312, + 129.5312, + 128.5938, + 128.5938, + 128.125, + 128.125, + 127.6562, + 127.6562, + 127.1875, + 127.1875, + 126.7188, + 126.7188, + 126.25, + 126.25, + 125.7812, + 125.7812, + 125.3125, + 125.3125, + 124.8438, + 124.8438, + 124.375, + 124.375, + 123.9062, + 123.9062, + 123.4375, + 123.4375, + 122.9688, + 122.9688, + 122.5, + 122.5, + 122.0312, + 122.0312, + 121.5625, + 121.5625, + 121.0938, + 121.0938, + 120.625, + 120.625, + 120.1562, + 120.1562, + 119.2187, + 119.2187, + 118.2812, + 118.2812, + 117.3437, + 117.3437, + 116.875, + 116.875, + 116.4062, + 116.4062, + 115.4687, + 115.4687, + 115.0, + 115.0, + 114.5312, + 114.5312, + 114.0625, + 114.0625, + 113.5937, + 113.5937, + 113.125, + 113.125, + 110.7812, + 110.3125, + 109.8437, + 108.9062, + 108.4375, + 107.9687, + 107.5, + 107.0312, + 106.5625, + 106.0937, + 105.625, + 105.1562, + 104.6875, + 104.2187, + 102.8125, + 100.9375, + 100.9375, + 103.2812, + 103.75, + 104.2187, + 105.1562, + 105.625, + 106.5625, + 107.5, + 107.9687, + 108.9062, + 109.375, + 111.25, + 111.7187, + 112.1875, + 112.6562, + 113.125, + 114.0625, + 114.5312, + 115.0, + 115.4687, + 115.9375, + 116.4062, + 117.3437, + 119.2187, + 119.6875, + 120.1562, + 119.6875, + 119.6875, + 120.625, + 120.625, + 121.5625, + 122.0312, + 123.4375, + 124.375, + 124.8438, + 125.3125, + 125.3125, + 126.25, + 126.25, + 127.6562, + 128.125, + 128.125, + 129.0625, + 130.4688, + 130.4688, + 130.0, + 130.0, + 129.5312, + 129.5312, + 129.0625, + 129.0625, + 128.5938, + 128.5938, + 129.0625, + 129.0625, + 130.0, + 130.0, + 130.4688, + 130.4688, + 132.8125, + 132.8125, + 133.2812, + 133.2812, + 133.75, + 133.75, + 134.2188, + 134.2188, + 134.6875, + 134.6875, + 135.1562, + 135.1562, + 135.625, + 135.625, + 136.0938, + 136.0938, + 136.5625, + 136.5625, + 137.0312, + 137.0312, + 137.5, + 137.5, + 137.9688, + 137.9688, + 138.4375, + 138.4375, + 138.9062, + 138.9062, + 139.375, + 139.375, + 139.8438, + 139.8438, + 140.3125, + 140.3125, + 139.8438, + 139.8438, + 139.375, + 139.375, + 138.9062, + 138.9062, + 138.4375, + 138.4375, + 137.9688, + 137.9688, + 137.5, + 137.5, + 137.0312, + 137.0312, + 136.5625, + 136.5625, + 136.0938, + 136.0938, + 135.1562, + 135.1562, + 133.75, + 132.8125, + 132.3438, + 129.0625, + 128.5938, + 128.125, + 126.7188, + 126.7188, + 126.25, + 126.25, + 126.7188, + 126.7188, + 127.6562, + 127.6562, + 129.0625, + 130.4688, + 130.9375, + 131.875, + 132.3438, + 133.2812, + 133.75, + 137.5, + 137.9688, + 144.0625, + 144.0625, + 144.5312, + 144.5312, + 145.0, + 145.0, + 144.5312, + 144.5312, + 143.5938, + 143.5938, + 143.125, + 143.125, + 143.5938, + 143.5938, + 144.0625, + 144.0625, + 144.5312, + 144.5312, + 145.0, + 145.0, + 145.4688, + 145.4688, + 145.9375, + 145.9375, + 146.4062, + 146.4062, + 146.875, + 146.875, + 147.3438, + 147.3438, + 147.8125, + 147.8125, + 148.2812, + 148.2812, + 148.75, + 148.75, + 149.2188, + 149.2188, + 149.6875, + 149.6875, + 150.1562, + 150.1562, + 150.625, + 150.625, + 151.0938, + 151.0938, + 151.5625, + 151.5625, + 152.9688, + 158.5938, + 159.0625, + 159.5312, + 161.4062, + 161.4062, + 161.875, + 161.875, + 161.4062, + 161.4062, + 160.9375, + 160.9375, + 161.4062, + 161.4062, + 162.8125, + 165.1562, + 165.625, + 166.0938, + 168.4375, + 168.4375, + 168.9062, + 168.9062, + 169.375, + 169.375, + 169.8438, + 169.8438, + 170.3125, + 170.3125, + 170.7812, + 170.7812, + 171.25, + 171.25, + 171.7188, + 171.7188, + 172.1875, + 172.1875, + 172.6562, + 172.6562, + 173.125, + 173.125, + 174.0625, + 174.0625, + 174.5312, + 174.5312, + 173.5938, + 172.6562, + 172.1875, + 163.75, + 163.2812, + 162.3438, + 161.4062, + 160.9375, + 160.4688, + 160.0, + 159.5312, + 159.0625, + 157.6562, + 157.1875, + 156.7188, + 156.25, + 155.7812, + 155.7812, + 154.375, + 154.8438, + 154.8438, + 155.3125, + 155.3125, + 156.25, + 156.25, + 156.7188, + 156.7188, + 157.1875, + 157.1875, + 157.6562, + 157.6562, + 158.125, + 158.125, + 158.5938, + 158.5938, + 159.5312, + 159.5312, + 160.0, + 160.0, + 161.4062, + 165.625, + 166.0938, + 167.5, + 167.9688, + 168.9062, + 169.375, + 169.8438, + 170.7812, + 171.25, + 171.7188, + 171.7188, + 172.1875, + 172.1875, + 173.125, + 173.125, + 173.5938, + 173.5938, + 174.0625, + 174.0625, + 174.5312, + 174.5312, + 175.0, + 175.0, + 176.4062, + 176.4062, + 177.8125, + 177.8125, + 180.1562, + 180.1562, + 181.0938, + 181.0938, + 181.5625, + 181.5625, + 182.5, + 182.5, + 182.9688, + 182.9688, + 183.4375, + 184.8438, + 184.8438, + 183.4375, + 182.9688, + 182.9688, + 182.5, + 182.5, + 182.0312, + 182.0312, + 181.5625, + 181.5625, + 181.0938, + 181.0938, + 180.625, + 180.625, + 180.1562, + 180.1562, + 179.6875, + 179.6875, + 179.2188, + 179.2188, + 178.75, + 178.75, + 178.2812, + 178.2812, + 177.3438, + 177.3438, + 176.875, + 176.875, + 176.4062, + 176.4062, + 175.0, + 175.0, + 174.5312, + 174.5312, + 173.125, + 173.125, + 171.25, + 171.25, + 170.3125, + 170.3125, + 168.9062, + 168.9062, + 167.5, + 167.0312, + 166.0938, + 165.625, + 165.1562, + 164.6875, + 164.2188, + 163.75, + 162.3438, + 161.875, + 161.4062, + 160.9375, + 160.4688, + 160.0, + 159.5312, + 159.0625, + 158.5938, + 158.125, + 157.6562, + 157.1875, + 155.7812, + 155.3125, + 154.8438, + 154.375, + 153.4375, + 153.4375, + 152.9688, + 152.9688, + 153.4375, + 153.4375, + 153.9062, + 153.9062, + 154.375, + 154.375, + 155.3125, + 155.7812, + 156.7188, + 156.25, + 156.25, + 155.3125, + 154.8438, + 154.375, + 153.4375, + 152.9688, + 152.5, + 152.0312, + 151.5625, + 149.6875, + 149.6875, + ], + decimal=3, + ) + assert_array_almost_equal( + model_dict["segments"][1][1], + [ + 83.4375, + 84.8437, + 88.125, + 89.5312, + 90.0, + 90.4687, + 91.4062, + 91.875, + 92.3437, + 92.8125, + 93.2812, + 95.1562, + 95.625, + 100.3125, + 100.7812, + 105.0, + 105.4687, + 105.9375, + 108.75, + 108.75, + 111.0937, + 111.5625, + 112.5, + 112.9687, + 113.4375, + 113.9062, + 114.375, + 114.8437, + 115.3125, + 116.7187, + 117.1875, + 118.5937, + 119.0625, + 120.0, + 120.4687, + 121.875, + 122.3437, + 123.75, + 124.2187, + 125.625, + 126.0937, + 127.0312, + 127.5, + 127.9687, + 128.4375, + 128.9062, + 129.375, + 129.8438, + 130.3125, + 130.7812, + 131.25, + 132.1875, + 132.6562, + 133.125, + 133.5938, + 134.0625, + 134.5312, + 135.0, + 135.4688, + 135.9375, + 136.875, + 137.3438, + 138.2812, + 138.75, + 139.6875, + 140.1562, + 140.625, + 141.0938, + 141.5625, + 142.0312, + 142.9688, + 143.4375, + 143.9062, + 144.375, + 144.8438, + 145.3125, + 145.7812, + 146.25, + 146.7187, + 148.125, + 148.5937, + 149.0625, + 151.4062, + 151.4062, + 151.875, + 151.875, + 152.3437, + 152.3437, + 152.8125, + 152.8125, + 153.2812, + 153.2812, + 153.75, + 153.75, + 154.2187, + 154.2187, + 155.625, + 155.625, + 159.8437, + 159.8437, + 159.375, + 159.375, + 158.4375, + 158.4375, + 157.5, + 157.5, + 157.0312, + 157.0312, + 156.5625, + 156.5625, + 156.0937, + 156.0937, + 155.625, + 155.625, + 154.6875, + 154.6875, + 154.2187, + 154.2187, + 153.75, + 153.75, + 152.8125, + 152.8125, + 152.3437, + 152.3437, + 151.875, + 150.4687, + 149.5312, + 149.0625, + 148.125, + 148.125, + 146.7187, + 146.7187, + 146.25, + 146.25, + 145.7812, + 144.8438, + 144.375, + 142.9688, + 142.9688, + 142.5, + 141.5625, + 142.9688, + 143.4375, + 143.9062, + 147.1875, + 147.6562, + 148.5937, + 149.0625, + 150.9375, + 151.4062, + 155.625, + 156.0937, + 157.9687, + 158.9062, + 159.375, + 159.8437, + 160.3125, + 162.6562, + 163.125, + 163.5937, + 165.9375, + 166.4062, + 172.5, + 172.9687, + 175.7812, + 176.25, + 180.4687, + 180.9375, + 183.2812, + 183.75, + 185.625, + 186.0937, + 187.9687, + 188.4375, + 189.8437, + 190.3125, + 192.6562, + 193.125, + 195.0, + 195.4687, + 196.875, + 197.3437, + 197.8125, + 198.2812, + 198.75, + 199.2187, + 200.1562, + 200.625, + 201.0937, + 201.5625, + 202.9687, + 203.4375, + 204.8437, + 205.3125, + 207.1875, + 207.6562, + 211.875, + 212.3437, + 214.6875, + 215.1562, + 217.0312, + 217.5, + 219.375, + 219.8437, + 221.25, + 221.7187, + 222.1875, + 222.6562, + 223.125, + 224.0625, + 224.5312, + 225.9375, + 225.9375, + 226.4062, + 226.4062, + 226.875, + 226.875, + 228.2812, + 228.75, + 229.2187, + 231.5625, + 232.0312, + 232.9687, + 233.9062, + 234.375, + 235.7812, + 235.7812, + 236.25, + 236.25, + 236.7187, + 236.7187, + 237.1875, + 237.1875, + 237.6562, + 237.6562, + 237.1875, + 236.7187, + 235.7812, + 235.3125, + 234.8437, + 234.375, + 233.9062, + 232.9687, + 232.5, + 232.0312, + 228.2812, + 227.8125, + 226.875, + 226.4062, + 225.4687, + 225.0, + 224.5312, + 224.0625, + 223.125, + 222.6562, + 221.7187, + 221.25, + 220.7812, + 220.3125, + 218.9062, + 218.4375, + 217.5, + 217.0312, + 216.5625, + 216.0937, + 215.625, + 215.1562, + 214.6875, + 214.2187, + 213.2812, + 212.8125, + 211.4062, + 210.9375, + 209.5312, + 209.0625, + 207.6562, + 207.1875, + 206.25, + 205.7812, + 203.4375, + 202.9687, + 202.0312, + 200.625, + 200.625, + 201.0937, + 201.0937, + 202.9687, + 204.375, + 204.8437, + 206.7187, + 207.1875, + 208.5937, + 209.0625, + 213.75, + 214.2187, + 214.6875, + 216.0937, + 216.0937, + 215.625, + 215.625, + 213.2812, + 212.8125, + 212.3437, + 211.875, + 211.4062, + 210.4687, + 210.0, + 209.5312, + 209.0625, + 208.125, + 207.6562, + 206.7187, + 206.25, + 204.8437, + 204.375, + 203.9062, + 203.4375, + 202.9687, + 202.5, + 202.0312, + 201.5625, + 200.625, + 199.6875, + 199.2187, + 198.75, + 195.9375, + 195.0, + 195.0, + 194.5312, + 194.5312, + 194.0625, + 194.0625, + 193.125, + 193.125, + 192.6562, + 192.6562, + 192.1875, + 192.1875, + 190.7812, + 190.7812, + 190.3125, + 190.3125, + 189.8437, + 189.375, + 187.9687, + 187.5, + 186.5625, + 186.0937, + 185.625, + 184.6875, + 184.2187, + 183.75, + 183.2812, + 182.8125, + 181.875, + 181.4062, + 180.0, + 179.5312, + 178.125, + 177.6562, + 176.7187, + 175.7812, + 175.3125, + 174.8437, + 173.9062, + 172.5, + 172.5, + 172.0312, + 172.0312, + 171.5625, + 171.5625, + 171.0937, + 171.0937, + 170.1562, + 170.1562, + 169.6875, + 169.2187, + 168.75, + 168.2812, + 167.3437, + 164.0625, + 163.5937, + 163.125, + 162.6562, + 162.1875, + 161.7187, + 161.25, + 160.7812, + 160.3125, + 158.9062, + 158.4375, + 157.0312, + 156.5625, + 154.2187, + 153.75, + 152.8125, + 152.3437, + 151.875, + 151.4062, + 150.4687, + 149.0625, + 148.5937, + 148.125, + 147.6562, + 147.6562, + 142.5, + 142.5, + 142.0312, + 140.1562, + 139.6875, + 135.9375, + 135.4688, + 133.125, + 132.6562, + 132.1875, + 131.7188, + 131.25, + 130.7812, + 129.8438, + 129.375, + 128.9062, + 128.4375, + 127.9687, + 127.5, + 127.0312, + 126.5625, + 126.0937, + 125.625, + 125.1562, + 124.2187, + 123.75, + 123.2812, + 122.8125, + 122.3437, + 121.875, + 120.4687, + 120.0, + 119.5312, + 119.0625, + 117.6562, + 117.1875, + 115.3125, + 114.8437, + 113.9062, + 113.4375, + 112.0312, + 111.5625, + 110.1562, + 110.1562, + 109.2187, + 109.2187, + 108.75, + 108.75, + 108.2812, + 108.2812, + 106.875, + 106.875, + 106.4062, + 106.4062, + 105.9375, + 105.9375, + 105.4687, + 105.4687, + 105.0, + 105.0, + 104.5312, + 104.5312, + 103.125, + 103.125, + 102.6562, + 102.6562, + 101.7187, + 101.25, + 100.7812, + 97.5, + 97.0312, + 95.625, + 95.1562, + 93.75, + 93.2812, + 92.3437, + 91.4062, + 91.4062, + 90.4687, + 90.0, + 89.0625, + 88.125, + 88.125, + 87.6562, + 87.6562, + 87.1875, + 87.1875, + 86.7187, + 86.7187, + 84.8437, + 83.4375, + ], + decimal=3, + ) + assert model_dict["orig_shape"] == [300, 200] + + +def test_yolo_pose_from_results_empty(running_img): + result = Results( + orig_img=running_img, + path="running.jpeg", + names={}, + obb=torch.empty((0, 7)), + ) + + model = YoloPose.from_yolo_results([result]) + assert model.model_dump() == { + "cls": [], + "name": [], + "confidence": [], + "box": [], + "keypoints": [], + "orig_shape": [], + } + + +def test_yolo_pose_from_results(running_img, running_img_masks): + result = Results( + orig_img=running_img, + path="running.jpeg", + names={0: "person"}, + boxes=torch.tensor([[102.0, 84.0, 183.0, 238.0, 0.9078, 0.0]]), + keypoints=torch.tensor( + [ + [ + [138.4593, 103.4513, 0.991], + [141.6461, 100.4834, 0.9863], + [136.6068, 100.2756, 0.8826], + [150.1754, 99.0366, 0.9691], + [0.0, 0.0, 0.2733], + [163.061, 114.3064, 0.9993], + [134.8462, 113.5232, 0.9977], + [179.6755, 136.7466, 0.9974], + [127.26, 130.8018, 0.9806], + [172.8256, 158.7771, 0.9962], + [118.0251, 146.8072, 0.9786], + [154.7233, 160.504, 0.9999], + [139.7511, 158.7988, 0.9997], + [143.9287, 193.5594, 0.9995], + [145.329, 190.7517, 0.9991], + [137.9563, 223.4, 0.9941], + [156.7822, 218.5813, 0.9921], + ] + ], + ), + ) + + model = YoloPose.from_yolo_results([result]) + + model_dict = model.model_dump() + assert set(model_dict.keys()) == { + "cls", + "name", + "confidence", + "box", + "keypoints", + "orig_shape", + } + assert model_dict["cls"] == [0] + assert model_dict["name"] == ["person"] + assert_array_almost_equal(model_dict["confidence"], [0.9078], decimal=3) + assert len(model_dict["box"]) == 1 + assert_array_almost_equal( + model_dict["box"][0], + [102.0, 84.0, 183.0, 238.0], + decimal=3, + ) + assert len(model_dict["keypoints"]) == 1 + assert len(model_dict["keypoints"][0]) == 3 + assert_array_almost_equal( + model_dict["keypoints"][0][0], + [ + 138.4593, + 141.6461, + 136.6068, + 150.1754, + 0.0, + 163.061, + 134.8462, + 179.6755, + 127.26, + 172.8256, + 118.0251, + 154.7233, + 139.7511, + 143.9287, + 145.329, + 137.9563, + 156.7822, + ], + decimal=3, + ) + assert_array_almost_equal( + model_dict["keypoints"][0][1], + [ + 103.4513, + 100.4834, + 100.2756, + 99.0366, + 0.0, + 114.3064, + 113.5232, + 136.7466, + 130.8018, + 158.7771, + 146.8072, + 160.504, + 158.7988, + 193.5594, + 190.7517, + 223.4, + 218.5813, + ], + decimal=3, + ) + assert_array_almost_equal( + model_dict["keypoints"][0][2], + [ + 0.991, + 0.9863, + 0.8826, + 0.9691, + 0.2733, + 0.9994, + 0.9977, + 0.9974, + 0.9806, + 0.9962, + 0.9786, + 0.9999, + 0.9997, + 0.9995, + 0.9991, + 0.9941, + 0.9921, + ], + decimal=3, + ) + assert model_dict["orig_shape"] == [300, 200] + + +def test_yolo_cls_from_results_empty(ships_img): + result = Results( + orig_img=ships_img, + path="ships.jpeg", + names={}, + boxes=torch.empty((0, 6)), + ) + + model = YoloCls.from_yolo_results([result]) + assert model.model_dump() == { + "cls": [], + "name": [], + "confidence": [], + } + + +def test_yolo_cls_from_results(ships_img): + result = Results( + orig_img=ships_img, + path="ships.jpeg", + names={ + 510: "container_ship", + 525: "dam", + 536: "dock", + 769: "rule", + 913: "wreck", + }, + boxes=torch.empty((0, 6)), + probs=torch.tensor( + [ + 1.0339e-06, + 5.4460e-06, + 3.1851e-05, + 6.8369e-06, + 3.4396e-06, + 2.0468e-06, + 5.0569e-06, + 2.8160e-07, + 2.7602e-08, + 8.5561e-08, + 3.1910e-07, + 6.6235e-08, + 2.1857e-06, + 5.6748e-07, + 1.8549e-05, + 4.8287e-07, + 6.9187e-07, + 3.0479e-06, + 2.2228e-06, + 3.2149e-07, + 1.1456e-07, + 3.2199e-08, + 8.1776e-08, + 1.7628e-07, + 2.9032e-08, + 8.0677e-08, + 5.6300e-07, + 3.8564e-06, + 7.2937e-07, + 1.4296e-07, + 4.9668e-06, + 2.5541e-07, + 5.8202e-07, + 1.6449e-06, + 9.8739e-06, + 5.4744e-06, + 9.2096e-05, + 6.3834e-06, + 4.9127e-07, + 7.6110e-07, + 5.0316e-07, + 1.6698e-06, + 6.4484e-07, + 2.5988e-07, + 2.9079e-06, + 2.1705e-07, + 1.1246e-06, + 2.1299e-06, + 2.2538e-07, + 5.1255e-06, + 3.2948e-06, + 8.7681e-07, + 1.0686e-07, + 3.4746e-06, + 4.4607e-07, + 5.9090e-07, + 1.7809e-06, + 8.5746e-06, + 4.8708e-06, + 1.4765e-06, + 1.0113e-05, + 3.5899e-05, + 4.4050e-06, + 2.1363e-07, + 1.0856e-05, + 3.0532e-05, + 2.0509e-07, + 6.0136e-07, + 3.5719e-07, + 2.5005e-06, + 1.1699e-07, + 9.6390e-07, + 3.4403e-07, + 4.6927e-07, + 9.4284e-08, + 3.6083e-06, + 1.6057e-05, + 2.7057e-06, + 2.7237e-07, + 7.2639e-06, + 7.6985e-08, + 1.4997e-09, + 2.4123e-08, + 1.6324e-08, + 6.5121e-08, + 6.0461e-07, + 6.4036e-08, + 6.9928e-06, + 5.0647e-06, + 2.6301e-07, + 8.6094e-07, + 4.0644e-07, + 5.9800e-07, + 5.4992e-07, + 1.7880e-07, + 6.4875e-08, + 5.9700e-06, + 1.7628e-07, + 8.0264e-08, + 3.0626e-08, + 5.9519e-07, + 1.4838e-05, + 1.1413e-07, + 3.9644e-06, + 4.2076e-08, + 4.9546e-07, + 4.6340e-07, + 3.2028e-07, + 1.3236e-07, + 4.9423e-07, + 1.1064e-06, + 1.3928e-06, + 3.9063e-06, + 4.6774e-07, + 5.3713e-07, + 9.9779e-08, + 2.7190e-07, + 6.4139e-07, + 1.3905e-06, + 1.7951e-06, + 1.4705e-06, + 5.8198e-06, + 2.2695e-05, + 2.7975e-05, + 7.0015e-06, + 1.7165e-05, + 3.7862e-06, + 1.8833e-06, + 6.6447e-07, + 3.4100e-06, + 5.6826e-07, + 5.3877e-07, + 3.8513e-06, + 3.7360e-07, + 1.4986e-07, + 2.8862e-08, + 1.6428e-05, + 1.3764e-07, + 2.5706e-09, + 2.6865e-07, + 1.6032e-07, + 7.0011e-07, + 6.5186e-08, + 1.3056e-07, + 5.5475e-05, + 2.6903e-08, + 9.1767e-08, + 1.8278e-05, + 2.2962e-07, + 2.4995e-06, + 2.7326e-05, + 3.5784e-07, + 1.1992e-08, + 5.2074e-08, + 3.2423e-07, + 7.5187e-07, + 2.4336e-07, + 8.3929e-08, + 6.7795e-08, + 2.4001e-08, + 2.4156e-08, + 2.1193e-06, + 2.3218e-07, + 3.7463e-07, + 8.4663e-08, + 8.2042e-07, + 4.4080e-07, + 2.9450e-08, + 1.3903e-07, + 2.8527e-07, + 1.4230e-06, + 7.9110e-07, + 4.0477e-07, + 2.2409e-07, + 6.7904e-07, + 2.7826e-08, + 7.9331e-08, + 1.4374e-07, + 3.1142e-07, + 1.4808e-07, + 1.1557e-07, + 3.2558e-07, + 2.3926e-06, + 2.4865e-07, + 6.0354e-07, + 6.7747e-06, + 2.9550e-07, + 2.5099e-07, + 4.4294e-07, + 4.5995e-06, + 3.3352e-07, + 5.4043e-07, + 3.4370e-07, + 1.0096e-07, + 4.0075e-08, + 1.2364e-06, + 2.4370e-07, + 1.6651e-07, + 1.1090e-06, + 1.8287e-07, + 9.3031e-08, + 2.4487e-07, + 1.9278e-07, + 2.2166e-07, + 7.6480e-07, + 4.1652e-08, + 1.0430e-07, + 3.2065e-07, + 1.3711e-06, + 3.6432e-07, + 1.6270e-07, + 2.3222e-07, + 8.7038e-08, + 4.4004e-08, + 1.9766e-08, + 1.6741e-06, + 3.4994e-07, + 7.9892e-08, + 1.7680e-08, + 4.9258e-08, + 4.1861e-07, + 3.3004e-08, + 7.7754e-07, + 9.3901e-06, + 2.5835e-07, + 4.7466e-07, + 8.4931e-08, + 7.1556e-08, + 1.5106e-07, + 1.7860e-07, + 1.8889e-06, + 5.7624e-07, + 2.5433e-07, + 1.3312e-07, + 4.4349e-07, + 2.9750e-07, + 9.8715e-08, + 1.4181e-07, + 1.1274e-07, + 2.9827e-08, + 5.2628e-07, + 1.8990e-07, + 7.0057e-07, + 1.4258e-07, + 5.8943e-08, + 1.6532e-06, + 2.3241e-07, + 6.9180e-08, + 5.2354e-07, + 1.6686e-08, + 1.1729e-07, + 1.5753e-06, + 2.1938e-07, + 1.2920e-08, + 3.1428e-06, + 4.8467e-08, + 2.7383e-06, + 1.6118e-06, + 1.3013e-07, + 1.5359e-07, + 2.9457e-07, + 2.3946e-07, + 2.6019e-08, + 7.0624e-08, + 4.4848e-07, + 8.9196e-07, + 6.3690e-09, + 4.9450e-08, + 1.0656e-08, + 1.3190e-07, + 8.2498e-08, + 4.3761e-08, + 4.4100e-08, + 1.3092e-07, + 1.6525e-08, + 9.9303e-08, + 1.0114e-07, + 3.1145e-07, + 6.4332e-08, + 4.1215e-07, + 9.0145e-08, + 7.1847e-07, + 8.4739e-07, + 5.4657e-07, + 2.6544e-08, + 1.3889e-07, + 2.3277e-07, + 1.1284e-07, + 6.7201e-08, + 2.4814e-07, + 6.1635e-09, + 3.3743e-07, + 4.9829e-07, + 4.1202e-08, + 1.9380e-07, + 1.2975e-07, + 1.3976e-06, + 2.3684e-07, + 1.8620e-06, + 3.7191e-07, + 3.4078e-06, + 6.8581e-07, + 4.4275e-06, + 1.0038e-06, + 2.5014e-06, + 3.9339e-07, + 2.6467e-06, + 3.5794e-07, + 6.6177e-07, + 2.2764e-07, + 1.3387e-05, + 4.8828e-05, + 7.7814e-07, + 4.0185e-06, + 4.1770e-06, + 1.0775e-06, + 1.0317e-05, + 1.9309e-04, + 1.9010e-07, + 3.6020e-06, + 1.8782e-05, + 1.2400e-06, + 2.3987e-07, + 1.2163e-07, + 1.6867e-07, + 3.0382e-07, + 5.4290e-06, + 1.2729e-07, + 1.2550e-07, + 8.8680e-06, + 8.6001e-08, + 8.9568e-08, + 1.1172e-06, + 2.9832e-06, + 8.7542e-08, + 2.4282e-07, + 5.8511e-07, + 6.1146e-05, + 5.2521e-06, + 6.3780e-08, + 1.7729e-08, + 1.2387e-05, + 2.4018e-06, + 1.9504e-07, + 9.9014e-07, + 7.2535e-06, + 6.7222e-06, + 3.7424e-07, + 1.7594e-06, + 2.5205e-07, + 2.9022e-08, + 2.7447e-09, + 2.8436e-08, + 4.0035e-09, + 1.3585e-05, + 3.6005e-07, + 6.5193e-06, + 4.1283e-05, + 2.2135e-06, + 3.3801e-07, + 3.6415e-06, + 1.3274e-06, + 7.0640e-07, + 2.4540e-06, + 4.8871e-08, + 2.5688e-07, + 7.3616e-08, + 2.2044e-07, + 3.4278e-09, + 4.2626e-08, + 8.9066e-08, + 1.8692e-07, + 4.4693e-07, + 1.1432e-05, + 5.4961e-06, + 5.4354e-08, + 1.2610e-07, + 8.9418e-06, + 1.2350e-07, + 9.4670e-07, + 3.2370e-07, + 9.3541e-08, + 1.5692e-07, + 8.4626e-08, + 2.6787e-08, + 5.4514e-06, + 1.5302e-06, + 8.1808e-08, + 7.7230e-08, + 2.1043e-05, + 1.3428e-05, + 4.0298e-06, + 1.2055e-06, + 5.1141e-07, + 5.1419e-05, + 6.3061e-05, + 5.5199e-08, + 1.4497e-06, + 7.5038e-05, + 1.2528e-06, + 1.0297e-07, + 5.6676e-05, + 4.9104e-05, + 4.7398e-04, + 2.5675e-03, + 3.8575e-04, + 6.5726e-06, + 3.3960e-04, + 2.9254e-03, + 7.4354e-05, + 2.0438e-04, + 3.5945e-07, + 2.4158e-03, + 9.9747e-06, + 2.4812e-06, + 4.4772e-05, + 1.9293e-05, + 1.4004e-04, + 1.0914e-04, + 3.0069e-04, + 1.4171e-05, + 1.6060e-05, + 4.1941e-05, + 7.6321e-06, + 7.2933e-06, + 4.9228e-05, + 1.2039e-06, + 1.6067e-04, + 1.7698e-04, + 3.3701e-06, + 2.4253e-06, + 8.1018e-07, + 9.6496e-06, + 4.6068e-06, + 5.3444e-07, + 6.4156e-06, + 1.6763e-04, + 4.7461e-05, + 2.5873e-05, + 1.7743e-06, + 7.0098e-04, + 3.3712e-05, + 3.5956e-04, + 7.6688e-06, + 7.4771e-06, + 8.8832e-06, + 2.1313e-05, + 9.9359e-05, + 1.2783e-05, + 1.1420e-02, + 9.5757e-05, + 3.3358e-06, + 8.1996e-08, + 1.4200e-03, + 5.6512e-04, + 3.5606e-05, + 7.2036e-05, + 4.2829e-06, + 4.4027e-06, + 2.3029e-06, + 3.6532e-04, + 4.2956e-06, + 6.3435e-05, + 1.5382e-04, + 6.8967e-04, + 6.2068e-07, + 7.8262e-05, + 1.6997e-05, + 8.9625e-03, + 3.2502e-06, + 5.3286e-07, + 2.3401e-04, + 4.0736e-03, + 1.9381e-05, + 5.4534e-08, + 1.1060e-05, + 9.4840e-06, + 4.0535e-03, + 5.4135e-04, + 3.6542e-05, + 3.2214e-05, + 2.0456e-03, + 1.4758e-04, + 1.0961e-03, + 2.0349e-04, + 1.8152e-05, + 2.3026e-06, + 3.9448e-05, + 1.0548e-04, + 7.2645e-05, + 4.1688e-06, + 2.2549e-03, + 7.5842e-04, + 8.9876e-08, + 2.1388e-05, + 7.6054e-07, + 1.6883e-06, + 1.2259e-03, + 4.0999e-03, + 1.0383e-05, + 2.1659e-06, + 4.8286e-07, + 2.7598e-05, + 8.7806e-06, + 5.7992e-06, + 7.6040e-07, + 9.7830e-06, + 9.4357e-05, + 3.7646e-05, + 2.1488e-04, + 5.4472e-01, + 2.5666e-05, + 4.5501e-05, + 4.3844e-06, + 1.3204e-05, + 2.0144e-05, + 1.7083e-06, + 1.2525e-02, + 3.1263e-06, + 6.4333e-03, + 3.9654e-06, + 6.7298e-06, + 1.2068e-04, + 7.2805e-05, + 9.3596e-07, + 1.8692e-02, + 1.6789e-05, + 2.9064e-05, + 5.5349e-06, + 3.2655e-06, + 2.0115e-05, + 4.2106e-05, + 1.4059e-06, + 8.3300e-08, + 6.5261e-06, + 9.3935e-06, + 4.7966e-02, + 3.7665e-05, + 3.6581e-04, + 3.9179e-05, + 2.8362e-03, + 1.0948e-04, + 1.6439e-04, + 3.1614e-06, + 6.1657e-07, + 2.1378e-04, + 8.1937e-05, + 4.2556e-04, + 9.9725e-06, + 3.0816e-05, + 9.7204e-08, + 4.6649e-06, + 3.9624e-07, + 1.3634e-04, + 2.0795e-04, + 7.8442e-05, + 1.8761e-06, + 1.2198e-03, + 5.7787e-05, + 3.0881e-05, + 9.5584e-07, + 6.3199e-04, + 1.0105e-05, + 1.0668e-04, + 2.3261e-08, + 8.8458e-04, + 3.2304e-06, + 1.8843e-06, + 1.9418e-07, + 2.1168e-03, + 5.6994e-06, + 3.2264e-04, + 2.4422e-06, + 5.4764e-05, + 3.9458e-06, + 2.5422e-06, + 3.2594e-03, + 1.4089e-06, + 2.7363e-07, + 1.6395e-05, + 6.6269e-06, + 4.6996e-05, + 2.8267e-04, + 2.1016e-04, + 4.5696e-06, + 1.5657e-04, + 3.8074e-05, + 2.3322e-03, + 5.1610e-04, + 3.2008e-07, + 4.5037e-05, + 3.0805e-06, + 3.8774e-04, + 9.2789e-04, + 1.1605e-06, + 3.7155e-03, + 2.0476e-04, + 9.9291e-06, + 1.1460e-04, + 4.3294e-05, + 1.8424e-04, + 2.4899e-08, + 3.9260e-06, + 4.1977e-05, + 5.9541e-06, + 2.7284e-05, + 3.9882e-05, + 5.0652e-08, + 3.6077e-05, + 8.9436e-04, + 1.9533e-06, + 1.5803e-04, + 1.6411e-04, + 6.2870e-05, + 4.6363e-07, + 9.4064e-06, + 8.4848e-06, + 1.8024e-06, + 1.3409e-05, + 2.1879e-06, + 1.2765e-05, + 2.9702e-04, + 4.4837e-06, + 1.9621e-05, + 3.4845e-04, + 1.4989e-02, + 1.7174e-03, + 1.4089e-05, + 1.0732e-02, + 1.1403e-05, + 1.3338e-06, + 7.1927e-06, + 1.9404e-04, + 2.4204e-05, + 1.5791e-02, + 1.1872e-04, + 9.3971e-06, + 1.0985e-04, + 1.6291e-06, + 2.3055e-06, + 5.9968e-07, + 2.1456e-05, + 4.5334e-06, + 2.0301e-06, + 1.2449e-04, + 1.8200e-07, + 1.6887e-05, + 1.7922e-05, + 2.6788e-03, + 1.0181e-06, + 3.9315e-06, + 4.3148e-07, + 1.0232e-05, + 4.7298e-05, + 1.2659e-03, + 3.2736e-07, + 2.4029e-04, + 8.4371e-04, + 1.5479e-06, + 2.0765e-07, + 1.7570e-04, + 2.9174e-05, + 2.8273e-04, + 4.7494e-04, + 6.9165e-05, + 1.9262e-05, + 3.4218e-06, + 1.0205e-06, + 1.7863e-04, + 7.8875e-07, + 9.6507e-06, + 3.7016e-05, + 3.9981e-05, + 1.3857e-04, + 7.5345e-03, + 5.5275e-04, + 5.9803e-06, + 7.2172e-06, + 1.5414e-06, + 4.0544e-06, + 4.0114e-06, + 1.8156e-05, + 1.1082e-04, + 1.2622e-05, + 1.2487e-05, + 3.8845e-06, + 2.9626e-04, + 4.8298e-06, + 1.9641e-04, + 1.7040e-08, + 3.5796e-05, + 3.6755e-06, + 1.4881e-04, + 1.5875e-03, + 3.8941e-03, + 1.3227e-03, + 9.3368e-03, + 3.3969e-07, + 1.9051e-03, + 8.8793e-06, + 1.6344e-06, + 2.1630e-04, + 8.9070e-07, + 1.8490e-05, + 1.8980e-04, + 3.0775e-04, + 2.0193e-05, + 1.4964e-04, + 1.0902e-06, + 4.6114e-04, + 7.0517e-04, + 1.4209e-05, + 7.3717e-06, + 3.7105e-06, + 1.0399e-05, + 7.0047e-07, + 3.2056e-06, + 4.4654e-04, + 8.8495e-04, + 2.1368e-05, + 4.2380e-04, + 2.6436e-06, + 5.2093e-05, + 2.2212e-06, + 1.2320e-04, + 2.0146e-06, + 1.7358e-03, + 2.6474e-03, + 6.7827e-05, + 9.6917e-07, + 8.5105e-06, + 3.6057e-06, + 7.0451e-07, + 2.2139e-03, + 2.0729e-04, + 4.4206e-07, + 4.0139e-04, + 1.2410e-03, + 3.3111e-06, + 1.2990e-06, + 1.8992e-03, + 6.8972e-05, + 3.7218e-05, + 3.4696e-04, + 4.9113e-04, + 9.4812e-06, + 2.1100e-06, + 1.7812e-05, + 1.6068e-05, + 1.1063e-04, + 1.0302e-05, + 3.4797e-04, + 4.2256e-05, + 1.7607e-04, + 3.4885e-04, + 9.8231e-05, + 1.8451e-05, + 6.1739e-05, + 6.9076e-04, + 7.3960e-06, + 4.4040e-05, + 4.8818e-05, + 1.9782e-05, + 1.3774e-05, + 1.2748e-05, + 7.7859e-07, + 2.0858e-06, + 1.0674e-03, + 1.0972e-06, + 2.1021e-02, + 5.5319e-05, + 2.5964e-05, + 9.3970e-05, + 3.7328e-06, + 1.6151e-05, + 4.9763e-07, + 1.1307e-04, + 1.4550e-04, + 6.8459e-05, + 1.0714e-04, + 3.6585e-04, + 2.1454e-03, + 9.9071e-05, + 3.6575e-05, + 1.5198e-03, + 4.3322e-06, + 1.3950e-06, + 3.3566e-05, + 5.6959e-05, + 2.8264e-07, + 2.6748e-04, + 2.1691e-03, + 6.2536e-05, + 7.4546e-07, + 1.8100e-06, + 2.8508e-03, + 7.1616e-06, + 2.0045e-05, + 3.6442e-04, + 3.6042e-06, + 2.6542e-05, + 2.1094e-04, + 8.7308e-05, + 2.3378e-04, + 2.5095e-06, + 2.7602e-06, + 1.5181e-05, + 7.9237e-04, + 2.6402e-05, + 2.5761e-07, + 1.1896e-05, + 2.2731e-05, + 2.0259e-03, + 8.2858e-05, + 1.7472e-03, + 1.0883e-07, + 2.0013e-07, + 1.5432e-04, + 2.9263e-05, + 7.7658e-05, + 1.3546e-03, + 2.0566e-03, + 2.1428e-05, + 3.5669e-05, + 2.1175e-07, + 3.7304e-07, + 3.0253e-05, + 2.4291e-06, + 2.8179e-06, + 3.6609e-04, + 8.1250e-04, + 2.3815e-06, + 1.3512e-04, + 5.9221e-03, + 7.9691e-07, + 3.2934e-05, + 6.5957e-06, + 1.9423e-06, + 2.9405e-05, + 7.0292e-05, + 1.4302e-04, + 1.1914e-06, + 2.6523e-06, + 2.6834e-06, + 3.6671e-04, + 1.2596e-03, + 3.8038e-07, + 3.6828e-04, + 7.7915e-04, + 1.4708e-07, + 7.2079e-06, + 1.0385e-04, + 2.7233e-06, + 4.9033e-05, + 6.0241e-06, + 1.5732e-04, + 2.0117e-04, + 5.1063e-07, + 2.8399e-04, + 7.9459e-06, + 1.3338e-03, + 7.8745e-06, + 2.5891e-05, + 3.2194e-04, + 1.9111e-03, + 1.7667e-04, + 1.4872e-04, + 2.9312e-03, + 7.2199e-06, + 9.1690e-07, + 3.5945e-06, + 2.7855e-04, + 1.7535e-05, + 1.3780e-05, + 2.4442e-05, + 1.1394e-05, + 2.5379e-05, + 2.2238e-06, + 2.0952e-05, + 2.0685e-04, + 9.1137e-06, + 7.8569e-06, + 5.3383e-05, + 2.8387e-07, + 5.4037e-05, + 2.8938e-07, + 1.3174e-04, + 3.7273e-06, + 2.2058e-04, + 5.8939e-05, + 2.6207e-07, + 2.7314e-07, + 9.6123e-06, + 2.9254e-04, + 5.7498e-07, + 5.9684e-04, + 1.1627e-06, + 4.5866e-06, + 1.5758e-04, + 6.5516e-06, + 3.1522e-04, + 7.6669e-07, + 6.6419e-05, + 1.8415e-07, + 1.0275e-06, + 1.2826e-06, + 3.6305e-07, + 3.4141e-04, + 2.9395e-03, + 2.3985e-05, + 6.7220e-06, + 4.9197e-06, + 8.0192e-06, + 1.1667e-01, + 5.6675e-06, + 1.2752e-04, + 3.6448e-05, + 8.6483e-04, + 1.4560e-05, + 3.3398e-04, + 3.4628e-04, + 1.0387e-04, + 4.5040e-06, + 8.4435e-07, + 3.5686e-07, + 1.1549e-07, + 2.3261e-07, + 6.2614e-08, + 2.1089e-07, + 4.7371e-05, + 3.1679e-06, + 7.9653e-07, + 4.8878e-07, + 2.5289e-08, + 1.1155e-06, + 1.1945e-07, + 2.2908e-07, + 2.9379e-08, + 2.0469e-06, + 2.4788e-07, + 1.5314e-07, + 7.1754e-08, + 4.1029e-07, + 1.9101e-07, + 1.8963e-07, + 4.7289e-08, + 3.7885e-07, + 1.1361e-07, + 6.6860e-07, + 3.7893e-07, + 1.2665e-07, + 7.5658e-08, + 1.6936e-07, + 7.1698e-08, + 3.6984e-07, + 3.6376e-08, + 3.0783e-08, + 2.1762e-07, + 3.1531e-05, + 4.6537e-07, + 2.7843e-07, + 6.7744e-06, + 1.8720e-06, + 2.9693e-07, + 4.0405e-07, + 3.9948e-06, + 1.6449e-05, + 4.5527e-06, + 8.6000e-06, + 1.9351e-06, + 2.4189e-04, + 1.2237e-05, + 4.3519e-04, + 9.8503e-06, + 2.8016e-06, + 4.1595e-03, + 8.9333e-05, + 8.8940e-05, + 4.3075e-04, + 3.0794e-05, + 1.7490e-05, + 5.5095e-05, + 4.7048e-06, + 2.5128e-04, + 4.3988e-06, + 2.6698e-08, + 2.4547e-09, + 1.1608e-05, + 2.3761e-07, + 4.3923e-08, + 8.4034e-08, + 4.3723e-08, + 7.5340e-08, + 7.0168e-09, + 2.3958e-07, + 1.0182e-07, + 1.5610e-07, + 4.7116e-08, + 2.3649e-06, + 3.7093e-06, + ] + ), + ) + + model = YoloCls.from_yolo_results([result]) + + model_dict = model.model_dump() + assert set(model_dict.keys()) == {"cls", "name", "confidence"} + assert model_dict["cls"] == [510, 913, 536, 769, 525] + assert model_dict["name"] == ["container_ship", "wreck", "dock", "rule", "dam"] + assert_array_almost_equal( + model_dict["confidence"], + [0.5447, 0.1167, 0.048, 0.021, 0.0187], + decimal=3, + ) diff --git a/tests/unit/.DS_Store b/tests/unit/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..e0d2e115f4431929bc277eccea8e8f7165d1ce26 GIT binary patch literal 6148 zcmeHKOH0E*5Z>*>HWVQTg&r5Y7Ob|4#Y>3w4;aydN=-=6V45v`m_sS#tbfQ~;_q>0 zcLOa3k0N#kcE8zq>}Ed5J{V)%U4%!B*^IF!G(?U{iJ-aERWZScT+NXL*=!OdGANnm zZ<_Gi>uk&<3t7b0zyBkcB=KzC?SAq~wN~G0G^|Fe@!&nn%$xi3#mx1mcR0G1G6@RZ z4{qWp8`*moGM)Qz8ckI~97Ygwe;cQv%v`xh! zj=LSv@13qzR&)R0@ce4@l8jUNritXhxRPCi1-yf@R?(|BO%s_ufwjmeA_<8BVt^PR z2DXy{a~?EX+gUzUO$-nNKQMs%gMfzU7|b=Qtphr|KBK>ehypskB@l%{$6&4zJRn@B z0_s$5o)}!GgI$<7$6&5er!%frhI!1&#p8vm)xj=QIOC2+>WKkjV3UEW9@=>RU&1d_ z`pDl*p%F1a4E!?&cx~tnT`0<&t>4PSvsOTRgoc7~IVvEaZ(IUkfcr>WIkjJ)4snjb WTqDkcc9jlD7Xd{Gb;Q6gFz^LI;Y!{B literal 0 HcmV?d00001 diff --git a/tests/unit/model/test_bbox.py b/tests/unit/model/test_bbox.py index f069ba675..7a640b5d2 100644 --- a/tests/unit/model/test_bbox.py +++ b/tests/unit/model/test_bbox.py @@ -1,208 +1,36 @@ -import numpy as np -import pytest +from numpy.testing import assert_array_almost_equal from datachain.model import BBox, OBBox -@pytest.mark.parametrize( - "coords,title,normalized_to", - [ - ((10, 20, 90, 80), "BBox", None), - ([10, 20, 90, 80], "BBox", None), - ([10.4, 19.8, 90.0, 80.1], "BBox", None), - ([0.1, 0.2, 0.9, 0.8], "", (100, 100)), - ], -) -def test_bbox_voc(coords, title, normalized_to): - bbox = BBox.from_voc( - coords, - title=title, - normalized_to=normalized_to, - ) +def test_bbox(): + bbox = BBox(title="Object", coords=[10, 20, 90, 80]) assert bbox.model_dump() == { - "title": title, - "coords": [10, 20, 90, 80], + "title": "Object", + "coords": [10.0, 20.0, 90.0, 80.0], } - assert bbox.to_voc() == [10, 20, 90, 80] - np.testing.assert_array_almost_equal( - bbox.to_voc_normalized((100, 100)), - [0.1, 0.2, 0.9, 0.8], - ) - np.testing.assert_array_almost_equal( - bbox.to_voc_normalized((200, 200)), - [0.05, 0.1, 0.45, 0.4], - ) -@pytest.mark.parametrize( - "coords,title,normalized_to", - [ - ((10, 20, 80, 60), "BBox", None), - ([10, 20, 80, 60], "BBox", None), - ([9.9, 20.1, 80.4, 60.001], "BBox", None), - ([0.1, 0.2, 0.8, 0.6], "", (100, 100)), - ], -) -def test_bbox_coco(coords, title, normalized_to): - bbox = BBox.from_coco( - coords, - title=title, - normalized_to=normalized_to, - ) - assert bbox.model_dump() == { - "title": title, - "coords": [10, 20, 90, 80], - } - assert bbox.to_coco() == [10, 20, 80, 60] - np.testing.assert_array_almost_equal( - bbox.to_coco_normalized((100, 100)), - [0.1, 0.2, 0.8, 0.6], - ) - np.testing.assert_array_almost_equal( - bbox.to_coco_normalized((200, 200)), - [0.05, 0.1, 0.4, 0.3], - ) - - -@pytest.mark.parametrize( - "coords,title,normalized_to", - [ - ((50, 50, 80, 60), "BBox", None), - ([50, 50, 80, 60], "BBox", None), - ([50.0, 49.6, 79.99, 60.2], "BBox", None), - ([0.5, 0.5, 0.8, 0.6], "", (100, 100)), - ], -) -def test_bbox_yolo(coords, title, normalized_to): - bbox = BBox.from_yolo( - coords, - title=title, - normalized_to=normalized_to, - ) - assert bbox.model_dump() == { - "title": title, - "coords": [10, 20, 90, 80], - } - assert bbox.to_yolo() == [50, 50, 80, 60] - np.testing.assert_array_almost_equal( - bbox.to_yolo_normalized((100, 100)), +def test_bbox_convert(): + bbox = BBox(title="Object", coords=[10, 20, 90, 80]) + assert_array_almost_equal( + bbox.convert([100, 100], "voc", "yolo"), [0.5, 0.5, 0.8, 0.6], - ) - np.testing.assert_array_almost_equal( - bbox.to_yolo_normalized((200, 200)), - [0.25, 0.25, 0.4, 0.3], - ) - - -def test_bbox_from_list(): - assert BBox.from_list([10, 20, 90, 80]).model_dump() == { - "title": "", - "coords": [10, 20, 90, 80], - } - - -def test_bbox_from_dict(): - assert BBox.from_dict({"x1": 10, "y1": 20, "x2": 90, "y2": 80}).model_dump() == { - "title": "", - "coords": [10, 20, 90, 80], - } - - -@pytest.mark.parametrize( - "coords", - [ - {"x1": 10, "y1": 20, "x2": 90}, - {"x1": 10, "y1": 20, "x2": 90, "y2": 80, "x3": 100}, - ], -) -def test_bbox_from_dict_errors(coords): - with pytest.raises(AssertionError): - BBox.from_dict(coords) - - -@pytest.mark.parametrize( - "coords,normalized_to", - [ - [(10, 20, 30, 40, 50, 60, 70, 80), None], - [[10, 20, 30, 40, 50, 60, 70, 80], None], - [[9.9, 20.1, 29.6, 40.4, 50.01, 59.99, 70.0, 80], None], - [[10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0], None], - [[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8], (100, 100)], - [(0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8), [100, 100]], - ], -) -def test_obbox_from_list(coords, normalized_to): - obbox = OBBox.from_list(coords, normalized_to=normalized_to) - assert obbox.model_dump() == { - "title": "", - "coords": [10, 20, 30, 40, 50, 60, 70, 80], - } - np.testing.assert_array_almost_equal( - obbox.to_normalized((100, 100)), - [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8], + decimal=3, ) -def test_obbox_to_normalized_errors(): - with pytest.raises(AssertionError): - OBBox.from_list([10, 20, 30, 40, 50, 60, 70, 80]).to_normalized((50, 50)) +def test_bbox_empty(): + assert BBox().model_dump() == {"title": "", "coords": []} -@pytest.mark.parametrize( - "coords,normalized_to", - [ - [None, None], - [12, None], - ["12", None], - [[], None], - [[10, 20, 30, 40, 50, 60, 70], None], - [[10, 20, 30, 40, 50, 60, 70, 80, 90], None], - [[10, 20, 30, 40, 50, 60, 70, 80], (100, 100)], - [[1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8], (100, 100)], - ], -) -def test_obbox_from_list_errors(coords, normalized_to): - with pytest.raises(AssertionError): - OBBox.from_list(coords, normalized_to=normalized_to) - - -def test_obbox_from_dict(): - obbox = OBBox.from_dict( - { - "x1": 0, - "y1": 0.8, - "x2": 2.2, - "y2": 2.9, - "x3": 3.9, - "y3": 5.4, - "x4": 6.0, - "y4": 7.4, - }, - title="OBBox", - ) +def test_obbox(): + obbox = OBBox(title="Object", coords=[10, 20, 30, 40, 50, 60, 70, 80]) assert obbox.model_dump() == { - "title": "OBBox", - "coords": [0, 1, 2, 3, 4, 5, 6, 7], + "title": "Object", + "coords": [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0], } -@pytest.mark.parametrize( - "coords", - [ - {"x1": 0, "y1": 1, "x2": 2, "y2": 3, "x3": 4, "y3": 5, "x4": 6}, - { - "x1": 0, - "y1": 1, - "x2": 2, - "y2": 3, - "x3": 4, - "y3": 5, - "x4": 6, - "y4": 7, - "x5": 8, - }, - ], -) -def test_obbox_from_dict_errors(coords): - with pytest.raises(AssertionError): - OBBox.from_dict(coords) +def test_obbox_empty(): + assert OBBox().model_dump() == {"title": "", "coords": []} diff --git a/tests/unit/model/test_pose.py b/tests/unit/model/test_pose.py index 1b08caa9e..6e5d20ad8 100644 --- a/tests/unit/model/test_pose.py +++ b/tests/unit/model/test_pose.py @@ -1,181 +1,26 @@ -import numpy as np -import pytest - from datachain.model.pose import Pose, Pose3D -POSE_KEYPOINTS = ( - [5 * i for i in range(17)], - [3 * i for i in reversed(range(17))], -) -POSE_KEYPOINTS_NORMALIZED = ( - [x / 100 for x in POSE_KEYPOINTS[0]], - [y / 100 for y in POSE_KEYPOINTS[1]], -) - -POSE3D_KEYPOINTS = ( - POSE_KEYPOINTS[0], - POSE_KEYPOINTS[1], - [0.05 * i for i in range(17)], -) -POSE3D_KEYPOINTS_NORMALIZED = ( - POSE_KEYPOINTS_NORMALIZED[0], - POSE_KEYPOINTS_NORMALIZED[1], - [0.05 * i for i in range(17)], -) - - -@pytest.mark.parametrize( - "points,normalized_to", - [ - [POSE_KEYPOINTS, None], - [tuple(tuple(c) for c in POSE_KEYPOINTS), None], - [POSE_KEYPOINTS_NORMALIZED, (100, 100)], - ], -) -def test_pose_from_list(points, normalized_to): - pose = Pose.from_list(points, normalized_to=normalized_to) - assert pose.model_dump() == { - "x": POSE_KEYPOINTS[0], - "y": POSE_KEYPOINTS[1], - } - np.testing.assert_array_almost_equal( - pose.to_normalized((100, 100)), - POSE_KEYPOINTS_NORMALIZED, - ) - - -@pytest.mark.parametrize( - "points,normalized_to", - [ - [None, None], - [12, None], - ["12", None], - [[], None], - [[12, []], None], - [[[], "12"], None], - [[[], []], None], - [[[], [], []], None], - [[12, 12], None], - [[POSE_KEYPOINTS[0], POSE_KEYPOINTS[1] + [0]], None], - [ - [ - [p * 2 for p in POSE_KEYPOINTS_NORMALIZED[0]], - POSE_KEYPOINTS_NORMALIZED[1], - ], - (100, 100), - ], - ], -) -def test_pose_from_list_errors(points, normalized_to): - with pytest.raises(AssertionError): - Pose.from_list(points, normalized_to=normalized_to) - - -def test_pose_to_normalized_errors(): - with pytest.raises(AssertionError): - Pose.from_list(POSE_KEYPOINTS).to_normalized((50, 50)) - -def test_pose_from_dict(): - pose = Pose.from_dict({"x": POSE_KEYPOINTS[0], "y": POSE_KEYPOINTS[1]}) - assert pose.model_dump() == { - "x": POSE_KEYPOINTS[0], - "y": POSE_KEYPOINTS[1], - } - - -@pytest.mark.parametrize( - "points", - [ - {"x": POSE_KEYPOINTS[0]}, - {"x": POSE_KEYPOINTS[0], "y": POSE_KEYPOINTS[1], "z": []}, - ], -) -def test_pose_from_dict_errors(points): - with pytest.raises(AssertionError): - Pose.from_dict(points) - - -@pytest.mark.parametrize( - "points,normalized_to", - [ - [POSE3D_KEYPOINTS, None], - [tuple(tuple(c) for c in POSE3D_KEYPOINTS), None], - [POSE3D_KEYPOINTS_NORMALIZED, (100, 100)], - ], -) -def test_pose3d_from_list(points, normalized_to): - pose = Pose3D.from_list(points, normalized_to=normalized_to) +def test_pose(): + pose = Pose(x=[10, 20, 30], y=[40, 50, 60]) assert pose.model_dump() == { - "x": POSE3D_KEYPOINTS[0], - "y": POSE3D_KEYPOINTS[1], - "visible": POSE3D_KEYPOINTS[2], + "x": [10.0, 20.0, 30.0], + "y": [40.0, 50.0, 60.0], } - np.testing.assert_array_almost_equal( - pose.to_normalized((100, 100)), - POSE3D_KEYPOINTS_NORMALIZED, - ) - -@pytest.mark.parametrize( - "points,normalized_to", - [ - [None, None], - [12, None], - ["12", None], - [[], None], - [[12, []], None], - [[[], "12"], None], - [[[], []], None], - [[[], [], []], None], - [[12, 12], None], - [[POSE3D_KEYPOINTS[0], POSE3D_KEYPOINTS[1] + [0]], None], - [ - [ - [p * 2 for p in POSE3D_KEYPOINTS_NORMALIZED[0]], - POSE3D_KEYPOINTS_NORMALIZED[1], - ], - (100, 100), - ], - ], -) -def test_pose3d_from_list_errors(points, normalized_to): - with pytest.raises(AssertionError): - Pose3D.from_list(points, normalized_to=normalized_to) +def test_pose_empty(): + assert Pose().model_dump() == {"x": [], "y": []} -def test_pose3d_to_normalized_errors(): - with pytest.raises(AssertionError): - Pose3D.from_list(POSE3D_KEYPOINTS).to_normalized((50, 50)) - -def test_pose3d_from_dict(): - pose = Pose3D.from_dict( - { - "x": POSE3D_KEYPOINTS[0], - "y": POSE3D_KEYPOINTS[1], - "visible": POSE3D_KEYPOINTS[2], - } - ) - assert pose.model_dump() == { - "x": POSE3D_KEYPOINTS[0], - "y": POSE3D_KEYPOINTS[1], - "visible": POSE3D_KEYPOINTS[2], +def test_pose3d(): + pose3d = Pose3D(x=[10, 20, 30], y=[40, 50, 60], visible=[0.0, 0.5, 1.0]) + assert pose3d.model_dump() == { + "x": [10.0, 20.0, 30.0], + "y": [40.0, 50.0, 60.0], + "visible": [0.0, 0.5, 1.0], } -@pytest.mark.parametrize( - "points", - [ - {"x": POSE_KEYPOINTS[0], "y": POSE_KEYPOINTS[1]}, - { - "x": POSE_KEYPOINTS[0], - "y": POSE_KEYPOINTS[1], - "visible": POSE3D_KEYPOINTS[2], - "z": [], - }, - ], -) -def test_pose3d_from_dict_errors(points): - with pytest.raises(AssertionError): - Pose3D.from_dict(points) +def test_pose3d_empty(): + assert Pose3D().model_dump() == {"x": [], "y": [], "visible": []} diff --git a/tests/unit/model/test_segment.py b/tests/unit/model/test_segment.py index 3c671c96e..f654c3f74 100644 --- a/tests/unit/model/test_segment.py +++ b/tests/unit/model/test_segment.py @@ -1,86 +1,14 @@ -import numpy as np -import pytest - from datachain.model.segment import Segment -SEGMENT_POINTS = ( - [2 * i for i in range(50)], - list(reversed(range(50))), -) -SEGMENT_POINTS_NORMALIZED = ( - [x / 100 for x in SEGMENT_POINTS[0]], - [y / 100 for y in SEGMENT_POINTS[1]], -) - - -@pytest.mark.parametrize( - "points,title,normalized_to", - [ - [SEGMENT_POINTS, "Segment", None], - [tuple(tuple(c) for c in SEGMENT_POINTS), "", None], - [SEGMENT_POINTS_NORMALIZED, "Person", (100, 100)], - ], -) -def test_pose_from_list(points, title, normalized_to): - segment = Segment.from_list(points, title, normalized_to=normalized_to) - assert segment.model_dump() == { - "title": title, - "x": SEGMENT_POINTS[0], - "y": SEGMENT_POINTS[1], - } - np.testing.assert_array_almost_equal( - segment.to_normalized((100, 100)), - SEGMENT_POINTS_NORMALIZED, - ) - - -@pytest.mark.parametrize( - "points,normalized_to", - [ - [None, None], - [12, None], - ["12", None], - [[], None], - [[12, []], None], - [[[], "12"], None], - [[[], [], []], None], - [[12, 12], None], - [[SEGMENT_POINTS[0], SEGMENT_POINTS[1] + [0]], None], - [ - [ - [p * 2 for p in SEGMENT_POINTS_NORMALIZED[0]], - SEGMENT_POINTS_NORMALIZED[1], - ], - (100, 100), - ], - ], -) -def test_pose_from_list_errors(points, normalized_to): - with pytest.raises(AssertionError): - Segment.from_list(points, normalized_to=normalized_to) - - -def test_pose_to_normalized_errors(): - with pytest.raises(AssertionError): - Segment.from_list(SEGMENT_POINTS).to_normalized((50, 50)) - -def test_pose_from_dict(): - segment = Segment.from_dict({"x": SEGMENT_POINTS[0], "y": SEGMENT_POINTS[1]}) +def test_segment(): + segment = Segment(title="Object", x=[10, 20, 30], y=[40, 50, 60]) assert segment.model_dump() == { - "title": "", - "x": SEGMENT_POINTS[0], - "y": SEGMENT_POINTS[1], + "title": "Object", + "x": [10.0, 20.0, 30.0], + "y": [40.0, 50.0, 60.0], } -@pytest.mark.parametrize( - "points", - [ - {"x": SEGMENT_POINTS[0]}, - {"x": SEGMENT_POINTS[0], "y": SEGMENT_POINTS[1], "z": []}, - ], -) -def test_pose_from_dict_errors(points): - with pytest.raises(AssertionError): - Segment.from_dict(points) +def test_segment_empty(): + assert Segment().model_dump() == {"title": "", "x": [], "y": []} diff --git a/tests/unit/model/test_utils.py b/tests/unit/model/test_utils.py index 5ac074c78..23069bef7 100644 --- a/tests/unit/model/test_utils.py +++ b/tests/unit/model/test_utils.py @@ -1,132 +1,76 @@ import pytest - -from datachain.model.utils import ( - normalize_coords, - validate_bbox, - validate_bbox_normalized, - validate_img_size, -) - - -@pytest.mark.parametrize( - "img_size", - [ - [100, 100], - (100, 100), - ], -) -def test_validate_img_size(img_size): - assert validate_img_size(img_size) == img_size - - -@pytest.mark.parametrize( - "img_size", - [ - None, - 12, - "12", - [], - [1], - [1, 2, 3], - [1, "2"], - [0, 2], - [1, 0], - [10.0, 10.0], - ], -) -def test_validate_img_size_errors(img_size): - with pytest.raises(AssertionError): - validate_img_size(img_size) - - -@pytest.mark.parametrize( - "bbox", - [ - (10, 10, 90, 90), - [10, 10, 90, 90], - ], -) -def test_validate_bbox(bbox): - assert validate_bbox(bbox) == bbox - - -@pytest.mark.parametrize( - "bbox", - [ - None, - 12, - "12", - [], - [0, 1, 2], - [0, 1, 2, 3, 4], - [0, 1, 2, "3"], - [0, -1, 2, 3], - ], -) -def test_validate_bbox_errors(bbox): - with pytest.raises(AssertionError): - validate_bbox(bbox) - - -@pytest.mark.parametrize( - "bbox", - [ - (0.1, 0.1, 0.9, 0.9), - [0.1, 0.1, 0.9, 0.9], - ], -) -def test_validate_bbox_normalized(bbox): - assert validate_bbox_normalized(bbox, (100, 100)) == [10, 10, 90, 90] - - -@pytest.mark.parametrize( - "bbox", - [ - None, - 0.2, - "0.2", - [], - [0.0, 0.1, 0.2], - [0.0, 0.1, 0.2, 0.3, 0.4], - [0.0, 0.1, 0.2, "0.3"], - [0.0, 1.0, 2.0, 3.0], - ], -) -def test_validate_bbox_normalized_errors(bbox): - with pytest.raises(AssertionError): - validate_bbox_normalized(bbox, (100, 100)) +from numpy.testing import assert_array_almost_equal + +from datachain.model.utils import convert_bbox + +# Test data: list of bounding boxes in different formats. +# These meant to be the same bounding boxes assuming image size is (100, 100). +# Formats are: +# - albumentations: [x_min, y_min, x_max, y_max], normalized coordinates +# - coco: [x_min, y_min, width, height], pixel coordinates +# - voc: [x_min, y_min, x_max, y_max], pixel coordinates +# - yolo: [x_center, y_center, width, height], normalized coordinates +BOXES = [ + { + "albumentations": [0.0, 0.0, 0.0, 0.0], + "coco": [0, 0, 0, 0], + "voc": [0, 0, 0, 0], + "yolo": [0.0, 0.0, 0.0, 0.0], + }, + { + "albumentations": [0.5, 0.5, 0.5, 0.5], + "coco": [50, 50, 0, 0], + "voc": [50, 50, 50, 50], + "yolo": [0.5, 0.5, 0.0, 0.0], + }, + { + "albumentations": [1.0, 1.0, 1.0, 1.0], + "coco": [100, 100, 0, 0], + "voc": [100, 100, 100, 100], + "yolo": [1.0, 1.0, 0.0, 0.0], + }, + { + "albumentations": [0.0, 0.0, 1.0, 1.0], + "coco": [0, 0, 100, 100], + "voc": [0, 0, 100, 100], + "yolo": [0.5, 0.5, 1.0, 1.0], + }, + { + "albumentations": [0.1, 0.2, 0.9, 0.8], + "coco": [10, 20, 80, 60], + "voc": [10, 20, 90, 80], + "yolo": [0.5, 0.5, 0.8, 0.6], + }, +] @pytest.mark.parametrize( - "coords", + "source,target,coords,result", [ - (10, 10, 90, 90), - [10, 10, 90, 90], + (source, target, coords, result) + for box in BOXES + for source, coords in box.items() + for target, result in box.items() ], ) -def test_normalize_coords(coords): - assert normalize_coords(coords, (100, 100)) == [0.1, 0.1, 0.9, 0.9] +def test_convert_bbox(source, target, coords, result): + assert_array_almost_equal( + convert_bbox(coords, (100, 100), source, target), + result, + decimal=3, + ) @pytest.mark.parametrize( - "coords", + "source,target", [ - None, - 10, - "10", - [10, 10, 90], - [10, 10, 90, 90, 90], - [10.0, 10.0, 90.0, 90.0], - [200, 10, 90, 90], - [10, 200, 90, 90], - [10, 10, 200, 90], - [10, 10, 90, 200], - [-10, 10, 90, 90], - [10, -10, 90, 90], - [10, 10, -10, 90], - [10, 10, 90, -10], + ("unknown", "coco"), + ("albumentations", "unknown"), + ("coco", "unknown"), + ("voc", "unknown"), + ("yolo", "unknown"), ], ) -def test_normalize_coords_errors(coords): - with pytest.raises(AssertionError): - normalize_coords(coords, (100, 100)) +def test_convert_bbox_error_source_target(source, target): + with pytest.raises(ValueError): + convert_bbox([0, 0, 0, 0], (100, 100), source, target) diff --git a/tests/unit/model/test_yolo.py b/tests/unit/model/test_yolo.py new file mode 100644 index 000000000..47b2071d6 --- /dev/null +++ b/tests/unit/model/test_yolo.py @@ -0,0 +1,190 @@ +from numpy.testing import assert_array_almost_equal + +from datachain.model.yolo import ( + Yolo, + YoloCls, + YoloObb, + YoloPose, + YoloPoseBodyPart, + YoloSeg, +) + + +def test_yolo(): + model = Yolo( + cls=[1, 2], + name=["person", "chair"], + confidence=[0.8, 0.5], + box=[ + [10, 20, 190, 80], + [160, 80, 200, 100], + ], + orig_shape=[100, 200], + ) + + assert model.model_dump() == { + "cls": [1, 2], + "name": ["person", "chair"], + "confidence": [0.8, 0.5], + "box": [ + [10, 20, 190, 80], + [160, 80, 200, 100], + ], + "orig_shape": [100, 200], + } + + assert model.img_size == (200, 100) + + alb_boxes = list(model.to_albumentations()) + assert len(alb_boxes) == 2 + assert_array_almost_equal(alb_boxes[0], [0.05, 0.2, 0.95, 0.8], decimal=3) + assert_array_almost_equal(alb_boxes[1], [0.8, 0.8, 1.0, 1.0], decimal=3) + + coco_boxes = list(model.to_coco()) + assert len(coco_boxes) == 2 + assert coco_boxes[0] == [10, 20, 180, 60] + assert coco_boxes[1] == [160, 80, 40, 20] + + voc_boxes = list(model.to_voc()) + assert len(voc_boxes) == 2 + assert voc_boxes[0] == [10, 20, 190, 80] + assert voc_boxes[1] == [160, 80, 200, 100] + + yolo_boxes = list(model.to_yolo()) + assert len(yolo_boxes) == 2 + assert_array_almost_equal(yolo_boxes[0], [0.5, 0.5, 0.9, 0.6], decimal=3) + assert_array_almost_equal(yolo_boxes[1], [0.9, 0.9, 0.2, 0.2], decimal=3) + + +def test_yolo_obb(): + model = YoloObb( + cls=[1, 2], + name=["person", "chair"], + confidence=[0.8, 0.5], + obox=[ + [10, 20, 30, 40, 50, 60, 70, 80], + [110, 120, 130, 140, 150, 160, 170, 180], + ], + orig_shape=[100, 200], + ) + + assert model.model_dump() == { + "cls": [1, 2], + "name": ["person", "chair"], + "confidence": [0.8, 0.5], + "obox": [ + [10, 20, 30, 40, 50, 60, 70, 80], + [110, 120, 130, 140, 150, 160, 170, 180], + ], + "orig_shape": [100, 200], + } + + assert model.img_size == (200, 100) + + +def test_yolo_seg(): + model = YoloSeg( + cls=[1, 2], + name=["person", "chair"], + confidence=[0.8, 0.5], + box=[ + [10, 20, 190, 80], + [160, 80, 200, 100], + ], + segments=[ + [[10, 20], [30, 40], [50, 60]], + [[110, 120], [130, 140], [150, 160]], + ], + orig_shape=[100, 200], + ) + + assert model.model_dump() == { + "cls": [1, 2], + "name": ["person", "chair"], + "confidence": [0.8, 0.5], + "box": [ + [10, 20, 190, 80], + [160, 80, 200, 100], + ], + "segments": [ + [[10, 20], [30, 40], [50, 60]], + [[110, 120], [130, 140], [150, 160]], + ], + "orig_shape": [100, 200], + } + + assert model.img_size == (200, 100) + + +def test_yolo_pose(): + model = YoloPose( + cls=[1], + name=["person"], + confidence=[0.8], + box=[ + [10, 20, 190, 80], + ], + keypoints=[ + [ + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34], + [0.5 for _ in range(17)], + ], + ], + orig_shape=[100, 200], + ) + + assert model.model_dump() == { + "cls": [1], + "name": ["person"], + "confidence": [0.8], + "box": [ + [10, 20, 190, 80], + ], + "keypoints": [ + [ + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34], + [ + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + ], + ], + ], + "orig_shape": [100, 200], + } + + assert model.img_size == (200, 100) + + assert model.keypoints[0][0][YoloPoseBodyPart.left_wrist] == 10 + assert model.keypoints[0][1][YoloPoseBodyPart.left_wrist] == 20 + assert model.keypoints[0][2][YoloPoseBodyPart.left_wrist] == 0.5 + + +def test_yolo_cls(): + model = YoloCls( + cls=[1, 37], + name=["person", "chair"], + confidence=[0.8, 0.6], + ) + + assert model.model_dump() == { + "cls": [1, 37], + "name": ["person", "chair"], + "confidence": [0.8, 0.6], + } From 375a42b6f022b3294e4ffdc0743ccfe1363a119c Mon Sep 17 00:00:00 2001 From: Vladimir Rudnyh Date: Sun, 16 Feb 2025 13:46:18 +0700 Subject: [PATCH 3/6] Update examples and docs --- docs/references/data-types/yolo.md | 46 +++++++++++++++++++ docs/references/index.md | 1 + examples/computer_vision/ultralytics-bbox.py | 27 ----------- examples/computer_vision/ultralytics-pose.py | 27 ----------- .../computer_vision/ultralytics-segment.py | 27 ----------- examples/computer_vision/yolo-bbox.py | 18 ++++++++ examples/computer_vision/yolo-pose.py | 18 ++++++++ examples/computer_vision/yolo-segment.py | 18 ++++++++ mkdocs.yml | 1 + src/datachain/model/__init__.py | 4 +- src/datachain/model/yolo.py | 24 +++++----- tests/func/model/conftest.py | 29 ------------ tests/func/model/test_yolo.py | 10 ++-- tests/unit/model/test_yolo.py | 6 +-- 14 files changed, 125 insertions(+), 131 deletions(-) create mode 100644 docs/references/data-types/yolo.md delete mode 100644 examples/computer_vision/ultralytics-bbox.py delete mode 100644 examples/computer_vision/ultralytics-pose.py delete mode 100644 examples/computer_vision/ultralytics-segment.py create mode 100644 examples/computer_vision/yolo-bbox.py create mode 100644 examples/computer_vision/yolo-pose.py create mode 100644 examples/computer_vision/yolo-segment.py delete mode 100644 tests/func/model/conftest.py diff --git a/docs/references/data-types/yolo.md b/docs/references/data-types/yolo.md new file mode 100644 index 000000000..5f5e2cfb3 --- /dev/null +++ b/docs/references/data-types/yolo.md @@ -0,0 +1,46 @@ +# YOLO (Ultralytics) models + +YOLO (You Only Look Once) by Ultralytics is a state-of-the-art object detection framework +that provides fast and accurate real-time predictions. Developed by Ultralytics, +it supports tasks like object detection, instance segmentation, and pose estimation. +The framework is easy to use, highly optimized, and supports training and inference +with PyTorch. + +For more details, visit [Ultralytics YOLO](https://github.com/ultralytics/ultralytics). + +### Example + +See more use cases in the datachain repo [examples](https://github.com/iterative/datachain/tree/main/examples). + +```python +from ultralytics import YOLO + +from datachain import DataChain, ImageFile +from datachain.model.yolo import YoloBox + + +def process_boxes(yolo: YOLO, file: ImageFile) -> YoloBox: + results = yolo(file.read(), verbose=False) + return YoloBox.from_yolo_results(results) + + +( + DataChain.from_storage("gs://datachain-demo/coco2017/images", type="image") + .limit(20) + .setup(yolo=lambda: YOLO("yolo11n.pt")) + .map(boxes=process_boxes) + .show() +) +``` + +::: datachain.model.yolo.YoloBox + +::: datachain.model.yolo.YoloObb + +::: datachain.model.yolo.YoloSeg + +::: datachain.model.yolo.YoloPose + +::: datachain.model.yolo.YoloPoseBodyPart + +::: datachain.model.yolo.YoloCls diff --git a/docs/references/index.md b/docs/references/index.md index e3a32cdfe..988e37f11 100644 --- a/docs/references/index.md +++ b/docs/references/index.md @@ -17,6 +17,7 @@ DataChain's API is organized into several modules: - [BBox](./data-types/bbox.md) - Bounding box data type - [Pose](./data-types/pose.md) - Pose data type - [Segment](./data-types/segment.md) - Segment data type + - [Yolo](./data-types/yolo.md) - YOLO (Ultralytics) models - [UDF](./udf.md) - User-defined functions and transformations - [Functions](./func.md) - Built-in functions for data manipulation and analysis - [Torch](./torch.md) - PyTorch data loading utilities diff --git a/examples/computer_vision/ultralytics-bbox.py b/examples/computer_vision/ultralytics-bbox.py deleted file mode 100644 index ebe0547e1..000000000 --- a/examples/computer_vision/ultralytics-bbox.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -os.environ["YOLO_VERBOSE"] = "false" - - -from io import BytesIO - -from PIL import Image -from ultralytics import YOLO - -from datachain import C, DataChain, File -from datachain.model.ultralytics import YoloBBoxes - - -def process_bboxes(yolo: YOLO, file: File) -> YoloBBoxes: - results = yolo(Image.open(BytesIO(file.read()))) - return YoloBBoxes.from_results(results) - - -( - DataChain.from_storage("gs://datachain-demo/openimages-v6-test-jsonpairs/") - .filter(C("file.path").glob("*.jpg")) - .limit(20) - .setup(yolo=lambda: YOLO("yolo11n.pt")) - .map(boxes=process_bboxes) - .show() -) diff --git a/examples/computer_vision/ultralytics-pose.py b/examples/computer_vision/ultralytics-pose.py deleted file mode 100644 index 99e1c0465..000000000 --- a/examples/computer_vision/ultralytics-pose.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -os.environ["YOLO_VERBOSE"] = "false" - - -from io import BytesIO - -from PIL import Image -from ultralytics import YOLO - -from datachain import C, DataChain, File -from datachain.model.ultralytics import YoloPoses - - -def process_poses(yolo: YOLO, file: File) -> YoloPoses: - results = yolo(Image.open(BytesIO(file.read()))) - return YoloPoses.from_results(results) - - -( - DataChain.from_storage("gs://datachain-demo/openimages-v6-test-jsonpairs/") - .filter(C("file.path").glob("*.jpg")) - .limit(20) - .setup(yolo=lambda: YOLO("yolo11n-pose.pt")) - .map(poses=process_poses) - .show() -) diff --git a/examples/computer_vision/ultralytics-segment.py b/examples/computer_vision/ultralytics-segment.py deleted file mode 100644 index d6e04bebc..000000000 --- a/examples/computer_vision/ultralytics-segment.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -os.environ["YOLO_VERBOSE"] = "false" - - -from io import BytesIO - -from PIL import Image -from ultralytics import YOLO - -from datachain import C, DataChain, File -from datachain.model.ultralytics import YoloSegments - - -def process_segments(yolo: YOLO, file: File) -> YoloSegments: - results = yolo(Image.open(BytesIO(file.read()))) - return YoloSegments.from_results(results) - - -( - DataChain.from_storage("gs://datachain-demo/openimages-v6-test-jsonpairs/") - .filter(C("file.path").glob("*.jpg")) - .limit(20) - .setup(yolo=lambda: YOLO("yolo11n-seg.pt")) - .map(segments=process_segments) - .show() -) diff --git a/examples/computer_vision/yolo-bbox.py b/examples/computer_vision/yolo-bbox.py new file mode 100644 index 000000000..11e7a680d --- /dev/null +++ b/examples/computer_vision/yolo-bbox.py @@ -0,0 +1,18 @@ +from ultralytics import YOLO + +from datachain import DataChain, ImageFile +from datachain.model.yolo import YoloBox + + +def process_boxes(yolo: YOLO, file: ImageFile) -> YoloBox: + results = yolo(file.read(), verbose=False) + return YoloBox.from_yolo_results(results) + + +( + DataChain.from_storage("gs://datachain-demo/coco2017/images", type="image") + .limit(20) + .setup(yolo=lambda: YOLO("yolo11n.pt")) + .map(boxes=process_boxes) + .show() +) diff --git a/examples/computer_vision/yolo-pose.py b/examples/computer_vision/yolo-pose.py new file mode 100644 index 000000000..4938f4aae --- /dev/null +++ b/examples/computer_vision/yolo-pose.py @@ -0,0 +1,18 @@ +from ultralytics import YOLO + +from datachain import DataChain, ImageFile +from datachain.model.yolo import YoloPose + + +def process_poses(yolo: YOLO, file: ImageFile) -> YoloPose: + results = yolo(file.read(), verbose=False) + return YoloPose.from_yolo_results(results) + + +( + DataChain.from_storage("gs://datachain-demo/coco2017/images", type="image") + .limit(20) + .setup(yolo=lambda: YOLO("yolo11n-pose.pt")) + .map(poses=process_poses) + .show() +) diff --git a/examples/computer_vision/yolo-segment.py b/examples/computer_vision/yolo-segment.py new file mode 100644 index 000000000..18008367f --- /dev/null +++ b/examples/computer_vision/yolo-segment.py @@ -0,0 +1,18 @@ +from ultralytics import YOLO + +from datachain import DataChain, File +from datachain.model.yolo import YoloSeg + + +def process_segments(yolo: YOLO, file: File) -> YoloSeg: + results = yolo(file.read(), verbose=False) + return YoloSeg.from_yolo_results(results) + + +( + DataChain.from_storage("gs://datachain-demo/coco2017/images", type="image") + .limit(20) + .setup(yolo=lambda: YOLO("yolo11n-seg.pt")) + .map(segments=process_segments) + .show() +) diff --git a/mkdocs.yml b/mkdocs.yml index c391f5215..2c5b83c54 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -80,6 +80,7 @@ nav: - BBox: references/data-types/bbox.md - Pose: references/data-types/pose.md - Segment: references/data-types/segment.md + - Yolo: references/data-types/yolo.md - UDF: references/udf.md - Torch: references/torch.md - Functions: references/func.md diff --git a/src/datachain/model/__init__.py b/src/datachain/model/__init__.py index 53ed10bfe..f82183cf6 100644 --- a/src/datachain/model/__init__.py +++ b/src/datachain/model/__init__.py @@ -1,7 +1,7 @@ from .bbox import BBox, OBBox from .pose import Pose, Pose3D from .segment import Segment -from .yolo import Yolo, YoloCls, YoloObb, YoloPose, YoloPoseBodyPart, YoloSeg +from .yolo import YoloBox, YoloCls, YoloObb, YoloPose, YoloPoseBodyPart, YoloSeg __all__ = [ "BBox", @@ -9,7 +9,7 @@ "Pose", "Pose3D", "Segment", - "Yolo", + "YoloBox", "YoloCls", "YoloObb", "YoloPose", diff --git a/src/datachain/model/yolo.py b/src/datachain/model/yolo.py index 97cd4d3a4..25bae5557 100644 --- a/src/datachain/model/yolo.py +++ b/src/datachain/model/yolo.py @@ -24,7 +24,7 @@ from ultralytics.engine.results import Results -class Yolo(DataModel): +class YoloBox(DataModel): """ A class representing objects bounding boxes detected by a YOLO model. @@ -52,28 +52,28 @@ class Yolo(DataModel): orig_shape: list[int] = Field(default=[]) @staticmethod - def from_yolo_results(results: list["Results"]) -> "Yolo": + def from_yolo_results(results: list["Results"]) -> "YoloBox": """ Create a YOLO bounding boxes from the YOLO results. Example: ```python from ultralytics import YOLO - from datachain.model.bbox import Yolo + from datachain.model.bbox import YoloBox model = YOLO("yolo11n.pt") results = model("image.jpg", verbose=False) - boxes = Yolo.from_yolo_results(results) + boxes = YoloBox.from_yolo_results(results) ``` Args: results: YOLO results from the model. Returns: - Yolo: A YOLO bounding boxes data model. + YoloBox: A YOLO bounding boxes data model. """ if not (summary := results[0].summary(normalize=False)): - return Yolo() + return YoloBox() cls, name, confidence, box = [], [], [], [] for res in summary: @@ -82,7 +82,7 @@ def from_yolo_results(results: list["Results"]) -> "Yolo": confidence.append(res.get("confidence", -1)) box.append(_get_box_from_yolo_result(res)) - return Yolo( + return YoloBox( cls=cls, name=name, confidence=confidence, @@ -254,11 +254,12 @@ def img_size(self) -> tuple[int, int]: ) -class YoloSeg(Yolo): +class YoloSeg(YoloBox): """ A class representing objects segmentation detected by a YOLO model. - This class extends the `Yolo` class to include the segments of the detected objects. + This class extends the `YoloBox` class to include the segments + of the detected objects. Instance segmentation goes a step further than object detection and involves identifying individual objects in an image and segmenting them @@ -346,11 +347,12 @@ class YoloPoseBodyPart: right_ankle = 16 -class YoloPose(Yolo): +class YoloPose(YoloBox): """ A class representing human pose keypoints detected by a YOLO model. - This class extends the `Yolo` class to include the segments of the detected objects. + This class extends the `YoloBox` class to include the segments + of the detected objects. Pose estimation is a task that involves identifying the location of specific points in an image, usually referred to as keypoints. diff --git a/tests/func/model/conftest.py b/tests/func/model/conftest.py deleted file mode 100644 index 9386deb9d..000000000 --- a/tests/func/model/conftest.py +++ /dev/null @@ -1,29 +0,0 @@ -import os - -import numpy as np -import pytest -import torch -from PIL import Image - - -@pytest.fixture -def running_img() -> np.ndarray: - img_file = os.path.join(os.path.dirname(__file__), "data", "running.jpg") - return np.array(Image.open(img_file)) - - -@pytest.fixture -def ships_img() -> np.ndarray: - img_file = os.path.join(os.path.dirname(__file__), "data", "ships.jpg") - return np.array(Image.open(img_file)) - - -@pytest.fixture -def running_img_masks() -> torch.Tensor: - mask0_file = os.path.join(os.path.dirname(__file__), "data", "running-mask0.png") - mask0_np = np.array(Image.open(mask0_file)) - - mask1_file = os.path.join(os.path.dirname(__file__), "data", "running-mask1.png") - mask1_np = np.array(Image.open(mask1_file)) - - return torch.tensor([mask0_np.astype(np.float32), mask1_np.astype(np.float32)]) diff --git a/tests/func/model/test_yolo.py b/tests/func/model/test_yolo.py index 5d694c0ac..5c79b9e1a 100644 --- a/tests/func/model/test_yolo.py +++ b/tests/func/model/test_yolo.py @@ -8,7 +8,7 @@ from ultralytics.engine.results import Results from datachain.model.yolo import ( - Yolo, + YoloBox, YoloCls, YoloObb, YoloPose, @@ -39,7 +39,7 @@ def running_img_masks() -> torch.Tensor: return torch.tensor([mask0_np.astype(np.float32), mask1_np.astype(np.float32)]) -def test_yolo_from_results_empty(running_img): +def test_yolo_box_from_results_empty(running_img): result = Results( orig_img=running_img, path="running.jpeg", @@ -47,7 +47,7 @@ def test_yolo_from_results_empty(running_img): boxes=torch.empty((0, 6)), ) - model = Yolo.from_yolo_results([result]) + model = YoloBox.from_yolo_results([result]) assert model.model_dump() == { "cls": [], "name": [], @@ -57,7 +57,7 @@ def test_yolo_from_results_empty(running_img): } -def test_yolo_from_results(running_img): +def test_yolo_box_from_results(running_img): result = Results( orig_img=running_img, path="running.jpeg", @@ -70,7 +70,7 @@ def test_yolo_from_results(running_img): ), ) - model = Yolo.from_yolo_results([result]) + model = YoloBox.from_yolo_results([result]) assert model.img_size == (200, 300) model_json = model.model_dump() diff --git a/tests/unit/model/test_yolo.py b/tests/unit/model/test_yolo.py index 47b2071d6..79257391c 100644 --- a/tests/unit/model/test_yolo.py +++ b/tests/unit/model/test_yolo.py @@ -1,7 +1,7 @@ from numpy.testing import assert_array_almost_equal from datachain.model.yolo import ( - Yolo, + YoloBox, YoloCls, YoloObb, YoloPose, @@ -10,8 +10,8 @@ ) -def test_yolo(): - model = Yolo( +def test_yolo_box(): + model = YoloBox( cls=[1, 2], name=["person", "chair"], confidence=[0.8, 0.5], From a36591a8ca80883d35b530b3d84798863c359f95 Mon Sep 17 00:00:00 2001 From: Vladimir Rudnyh Date: Sun, 16 Feb 2025 14:03:06 +0700 Subject: [PATCH 4/6] Add '.DS_Store' to gitignore --- .DS_Store | Bin 6148 -> 0 bytes .gitignore | 3 +++ tests/.DS_Store | Bin 6148 -> 0 bytes tests/func/.DS_Store | Bin 6148 -> 0 bytes tests/func/model/.DS_Store | Bin 6148 -> 0 bytes tests/unit/.DS_Store | Bin 6148 -> 0 bytes 6 files changed, 3 insertions(+) delete mode 100644 .DS_Store delete mode 100644 tests/.DS_Store delete mode 100644 tests/func/.DS_Store delete mode 100644 tests/func/model/.DS_Store delete mode 100644 tests/unit/.DS_Store diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index a9699e2c17845717b2d11c1f567abfcefac24d25..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHK%}T>S5Z<+|Z74zx3Oz1(Em&<8i-EVe&b~7Jje;8xjorQ;t*^IFkG(?U{g`m08wPk`4xtb#j=Gi1jWKc29 z-!$R3H(1UhmO;Pz{U5<3iL<=d`{Y}-dSkoUw3_Yaz4s*NUhe0!%=M?YIJ%ND2}<1$ zuH$Gvws+5En)`7YO;ti1Mi6p$6Q`k^yKnacIF!?Ie|*zPPA{nLI|91n)eu2>9@ zdR;N-pDdSFYj6MH>~j2^yrl9?6Ul*bB|8R7cn4*rqE~O4CNg~lYnf3-5)uQ%05L!e zY$gNdJZQByvwW(W7$63IU;y_A0S(bHSZGvR2XuISMt>a<1$2B%APR$y!9pW=K)6l? z)T!J&F}O|#yD)K%!9t@>XI!le^O%*(#|u}hgI%a_#vP5+69dG+Is-L5wDJ7EfM2Ha zk-wfoBVvFU_-738`p6r(P?R}azmGO?>$+dD_6y~pS&mam#b4&R%S4TDp7gUKwP7k?B-GQI~J`dcc? zkfke=C!QMQ^DWOQ7lgzBF+dC~W5C>@)!H%#vJ(Tuz`tbx?*{>j=o&0Es;dJA)dB$G zZ=@2KV=sX*!k}xg&bQO295~0_YknG=c?$egqT^G!O&7%D^WiMTUd` diff --git a/tests/func/.DS_Store b/tests/func/.DS_Store deleted file mode 100644 index e0d2e115f4431929bc277eccea8e8f7165d1ce26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKOH0E*5Z>*>HWVQTg&r5Y7Ob|4#Y>3w4;aydN=-=6V45v`m_sS#tbfQ~;_q>0 zcLOa3k0N#kcE8zq>}Ed5J{V)%U4%!B*^IF!G(?U{iJ-aERWZScT+NXL*=!OdGANnm zZ<_Gi>uk&<3t7b0zyBkcB=KzC?SAq~wN~G0G^|Fe@!&nn%$xi3#mx1mcR0G1G6@RZ z4{qWp8`*moGM)Qz8ckI~97Ygwe;cQv%v`xh! zj=LSv@13qzR&)R0@ce4@l8jUNritXhxRPCi1-yf@R?(|BO%s_ufwjmeA_<8BVt^PR z2DXy{a~?EX+gUzUO$-nNKQMs%gMfzU7|b=Qtphr|KBK>ehypskB@l%{$6&4zJRn@B z0_s$5o)}!GgI$<7$6&5er!%frhI!1&#p8vm)xj=QIOC2+>WKkjV3UEW9@=>RU&1d_ z`pDl*p%F1a4E!?&cx~tnT`0<&t>4PSvsOTRgoc7~IVvEaZ(IUkfcr>WIkjJ)4snjb WTqDkcc9jlD7Xd{Gb;Q6gFz^LI;Y!{B diff --git a/tests/func/model/.DS_Store b/tests/func/model/.DS_Store deleted file mode 100644 index abc43654a57e597cf1d30412076964724f40cb8e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHK%}T>S5Z-O0Nhm@N3Oz1(Em&<8i**Xq{Brfs`+!@dn4nidyAHs_YT@E z(d`^97Ou0iyLWOvc+8%1`JxHsz_*eugB84j^0}VpaGYl{zXxBHU*!=J1H=F^Kn$!e z1LjO{8tY2|t(+Jj27X`w_XhzD(KlFXR9gphczwpWg@^(=z9kTaLEm7h5h5U5mjdci zZk`ximxEuJJl|lcQI|8WW`=Rh%+=$CtJ%RXR666nM(T+HVqlqpx;8yL|Igr;S^3Cc zE+LB;AO`*!1KjF|{Qwqa&em_s!?RXEyMu;;c_k_!pf6nlz`%W^se(E#P=`Foq>F$eggRp27Z~^er@Klz diff --git a/tests/unit/.DS_Store b/tests/unit/.DS_Store deleted file mode 100644 index e0d2e115f4431929bc277eccea8e8f7165d1ce26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKOH0E*5Z>*>HWVQTg&r5Y7Ob|4#Y>3w4;aydN=-=6V45v`m_sS#tbfQ~;_q>0 zcLOa3k0N#kcE8zq>}Ed5J{V)%U4%!B*^IF!G(?U{iJ-aERWZScT+NXL*=!OdGANnm zZ<_Gi>uk&<3t7b0zyBkcB=KzC?SAq~wN~G0G^|Fe@!&nn%$xi3#mx1mcR0G1G6@RZ z4{qWp8`*moGM)Qz8ckI~97Ygwe;cQv%v`xh! zj=LSv@13qzR&)R0@ce4@l8jUNritXhxRPCi1-yf@R?(|BO%s_ufwjmeA_<8BVt^PR z2DXy{a~?EX+gUzUO$-nNKQMs%gMfzU7|b=Qtphr|KBK>ehypskB@l%{$6&4zJRn@B z0_s$5o)}!GgI$<7$6&5er!%frhI!1&#p8vm)xj=QIOC2+>WKkjV3UEW9@=>RU&1d_ z`pDl*p%F1a4E!?&cx~tnT`0<&t>4PSvsOTRgoc7~IVvEaZ(IUkfcr>WIkjJ)4snjb WTqDkcc9jlD7Xd{Gb;Q6gFz^LI;Y!{B From 081e6a1890af77aeb508fa57b91498aea8f5ebee Mon Sep 17 00:00:00 2001 From: Vladimir Rudnyh Date: Sun, 16 Feb 2025 14:06:54 +0700 Subject: [PATCH 5/6] Add 'ultralytics' tests dependency --- pyproject.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8cdc1c3d5..ed047fde9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,8 @@ tests = [ "hypothesis", "aiotools>=1.7.0", "requests-mock", - "scipy" + "scipy", + "ultralytics" ] dev = [ "datachain[docs,tests]", @@ -118,7 +119,7 @@ examples = [ "defusedxml", "accelerate", "huggingface_hub[hf_transfer]", - "ultralytics==8.3.74", + "ultralytics", "open_clip_torch" ] From e4ccc3e2ee882a59a9896137810de8b08ca7319f Mon Sep 17 00:00:00 2001 From: Vladimir Rudnyh Date: Sun, 16 Feb 2025 14:52:48 +0700 Subject: [PATCH 6/6] Revert 'BBox.from_list' method --- src/datachain/model/bbox.py | 24 ++++++++++++++++++++++++ tests/unit/model/test_bbox.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/src/datachain/model/bbox.py b/src/datachain/model/bbox.py index 0c1a6c00b..9b6160321 100644 --- a/src/datachain/model/bbox.py +++ b/src/datachain/model/bbox.py @@ -1,4 +1,5 @@ from collections.abc import Sequence +from warnings import warn from pydantic import Field @@ -21,6 +22,29 @@ class BBox(DataModel): title: str = Field(default="") coords: list[float] = Field(default=[]) + @staticmethod + def from_list(coords: Sequence[float], title: str = "") -> "BBox": + """ + Create a bounding box from a list of coordinates. + + Args: + coords (list[float]): The bounding box coordinates. + title (str): The title of the bounding box. + + Returns: + BBox: The bounding box instance. + """ + warn( + "This method is deprecated. Use `BBox(title, coords)` instead.", + DeprecationWarning, + stacklevel=2, + ) + if not isinstance(coords, (list, tuple)) or len(coords) != 4: + raise ValueError("Bounding box must be a list of 4 coordinates.") + if not all(isinstance(value, (int, float)) for value in coords): + raise ValueError("Bounding box coordinates must be floats or integers.") + return BBox(title=title, coords=list(map(float, coords))) + def convert( self, img_size: Sequence[int], diff --git a/tests/unit/model/test_bbox.py b/tests/unit/model/test_bbox.py index 7a640b5d2..7340ae73e 100644 --- a/tests/unit/model/test_bbox.py +++ b/tests/unit/model/test_bbox.py @@ -1,3 +1,4 @@ +import pytest from numpy.testing import assert_array_almost_equal from datachain.model import BBox, OBBox @@ -11,6 +12,34 @@ def test_bbox(): } +@pytest.mark.parametrize( + "coords,result", + [ + ((10, 20, 90, 80), [10.0, 20.0, 90.0, 80.0]), + ([10, 20, 90, 80], [10.0, 20.0, 90.0, 80.0]), + ([0.1, 0.2, 0.9, 0.8], [0.1, 0.2, 0.9, 0.8]), + ], +) +def test_bbox_from_list(coords, result): + bbox = BBox.from_list(coords) + assert bbox.model_dump() == {"title": "", "coords": result} + + +@pytest.mark.parametrize( + "coords", + [ + None, + [], + [10, 20, 90], + [10, 20, 90, 80, 100], + [10, 20, "90", 80], + ], +) +def test_bbox_from_list_error(coords): + with pytest.raises(ValueError): + BBox.from_list(coords) + + def test_bbox_convert(): bbox = BBox(title="Object", coords=[10, 20, 90, 80]) assert_array_almost_equal(