Skip to content

Commit

Permalink
feat: add classmethod called new to generate a schema instance with a…
Browse files Browse the repository at this point in the history
… new token (#52)

* feat: add classmethod called new to generate a schema instance with a new token

Signed-off-by: ktro2828 <[email protected]>

* docs: update document

Signed-off-by: ktro2828 <[email protected]>

* fix: resolve error in schema serialization

Signed-off-by: ktro2828 <[email protected]>

---------

Signed-off-by: ktro2828 <[email protected]>
  • Loading branch information
ktro2828 authored Nov 27, 2024
1 parent 3a38f4f commit 413280a
Show file tree
Hide file tree
Showing 18 changed files with 178 additions and 36 deletions.
24 changes: 24 additions & 0 deletions docs/tutorials/customize.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,27 @@
## Generate Schema with a new token

---

You can crate a schema containing the specified table data with a new token using `new(...)` methods.

```python title="generate_attribute.py"
from t4_devkit.schema import Attribute, serialize_schema

# table data without the token field
data = {
"name": "foo",
"description": "this is re-generated attribute."
}

attr1 = Attribute.new(data)

# Also, it allows us to create a copy of the existing table data with a new token
serialized = serialize_schema(attr1)
attr2 = Attribute.new(serialized)

assert attr1.token != attr2.token
```

## Customize Schema

---
Expand Down
48 changes: 14 additions & 34 deletions t4_devkit/schema/serialize.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
from __future__ import annotations

from enum import Enum
from functools import partial
from typing import TYPE_CHECKING, Any, Sequence
from typing import TYPE_CHECKING, Any

import numpy as np
from attrs import asdict
from attrs import asdict, filters
from pyquaternion import Quaternion

if TYPE_CHECKING:
Expand Down Expand Up @@ -36,34 +35,15 @@ def serialize_schema(data: SchemaTable) -> dict:
Returns:
Serialized dict data.
"""
dict_factory = partial(_schema_as_dict_factory, excludes=data.shortcuts())
return asdict(data, dict_factory=dict_factory)


def _schema_as_dict_factory(
data: list[tuple[str, Any]], *, excludes: Sequence[str] | None = None
) -> dict:
"""A factory to convert schema dataclass field to dict data.
Args:
data (list[tuple[str, Any]]): Some data of dataclass field.
excludes (Sequence[str] | None, optional): Sequence of field names to be excluded.
Returns:
Converted dict data.
"""

def _convert_value(value: Any) -> Any:
if isinstance(value, np.ndarray):
return value.tolist()
elif isinstance(value, Quaternion):
return value.q.tolist()
elif isinstance(value, Enum):
return value.value
return value

return (
{k: _convert_value(v) for k, v in data}
if excludes is None
else {k: _convert_value(v) for k, v in data if k not in excludes}
)
excludes = filters.exclude(*data.shortcuts()) if data.shortcuts() is not None else None
return asdict(data, filter=excludes, value_serializer=_value_serializer)


def _value_serializer(data: SchemaTable, attr: Any, value: Any) -> Any:
if isinstance(value, np.ndarray):
return value.tolist()
elif isinstance(value, Quaternion):
return value.q.tolist()
elif isinstance(value, Enum):
return value.value
return value
19 changes: 19 additions & 0 deletions t4_devkit/schema/tables/base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

from abc import ABC
from secrets import token_hex
from typing import Any, TypeVar

from attrs import define
Expand Down Expand Up @@ -50,5 +51,23 @@ def from_dict(cls, data: dict[str, Any]) -> SchemaTable:
"""
return cls(**data)

@classmethod
def new(cls, data: dict[str, Any], *, token_nbytes: int = 16) -> SchemaTable:
"""Create a new schema instance generating random token.
Args:
data (dict[str, Any]): Schema field data without token.
token_nbytes (int, optional): The number of bytes of a new token.
Returns:
Schema instance with a new token.
"""
new_data = data.copy()

token = token_hex(nbytes=token_nbytes)
new_data["token"] = token

return cls.from_dict(new_data)


SchemaTable = TypeVar("SchemaTable", bound=SchemaBase)
8 changes: 8 additions & 0 deletions tests/schema/tables/test_attribute_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_attribute_json(attribute_json) -> None:
def test_attribute(attribute_dict) -> None:
"""Test loading attribute from a dictionary."""
_ = Attribute.from_dict(attribute_dict)


def test_new_attribute(attribute_dict) -> None:
"""Test generating attribute with a new token."""
without_token = {k: v for k, v in attribute_dict.items() if k != "token"}
ret = Attribute.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != attribute_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_calibrated_sensor_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_calibrated_sensor_json(calibrated_sensor_json) -> None:
def test_calibrated_sensor(calibrated_sensor_dict) -> None:
"""Test loading calibrated sensor from a dictionary."""
_ = CalibratedSensor.from_dict(calibrated_sensor_dict)


def test_new_calibrated_sensor(calibrated_sensor_dict) -> None:
"""Test generating calibrated sensor with a new token."""
without_token = {k: v for k, v in calibrated_sensor_dict.items() if k != "token"}
ret = CalibratedSensor.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != calibrated_sensor_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_category_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_category_json(category_json) -> None:
def test_category(category_dict) -> None:
"""Test loading sample data from a dictionary."""
_ = Category.from_dict(category_dict)


def test_new_category(category_dict) -> None:
"""Test generating category with a new token."""
without_token = {k: v for k, v in category_dict.items() if k != "token"}
ret = Category.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != category_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_ego_pose_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_ego_pose_json(ego_pose_json) -> None:
def test_ego_pose(ego_pose_dict) -> None:
"""Test loading ego pose from a dictionary."""
_ = EgoPose.from_dict(ego_pose_dict)


def test_new_ego_pose(ego_pose_dict) -> None:
"""Test generating ego pose with a new token."""
without_token = {k: v for k, v in ego_pose_dict.items() if k != "token"}
ret = EgoPose.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != ego_pose_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_instance_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_instance_json(instance_json) -> None:
def test_instance(instance_dict) -> None:
"""Test loading instance from a dictionary."""
_ = Instance.from_dict(instance_dict)


def test_new_instance(instance_dict) -> None:
"""Test generating instance with a new token."""
without_token = {k: v for k, v in instance_dict.items() if k != "token"}
ret = Instance.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != instance_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_log_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_log_json(log_json) -> None:
def test_log(log_dict) -> None:
"""Test loading log from a dictionary."""
_ = Log.from_dict(log_dict)


def test_new_log(log_dict) -> None:
"""Test generating log with a new token."""
without_token = {k: v for k, v in log_dict.items() if k != "token"}
ret = Log.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != log_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_map_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_map_json(map_json) -> None:
def test_map(map_dict) -> None:
"""Test loading map from a dictionary."""
_ = Map.from_dict(map_dict)


def test_new_map(map_dict) -> None:
"""Test generating map with a new token."""
without_token = {k: v for k, v in map_dict.items() if k != "token"}
ret = Map.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != map_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_object_ann_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_object_ann_json(object_ann_json) -> None:
def test_object_ann(object_ann_dict) -> None:
"""Test loading object ann from a dictionary."""
_ = ObjectAnn.from_dict(object_ann_dict)


def test_new_object_ann(object_ann_dict) -> None:
"""Test generating object ann with a new token."""
without_token = {k: v for k, v in object_ann_dict.items() if k != "token"}
ret = ObjectAnn.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != object_ann_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_sample_annotation_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_sample_annotation_json(sample_annotation_json) -> None:
def test_sample_annotation(sample_annotation_dict) -> None:
"""Test loading sample annotation from a dictionary."""
_ = SampleAnnotation.from_dict(sample_annotation_dict)


def test_new_sample_annotation(sample_annotation_dict) -> None:
"""Test generating sample annotation with a new token."""
without_token = {k: v for k, v in sample_annotation_dict.items() if k != "token"}
ret = SampleAnnotation.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != sample_annotation_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_sample_data_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,11 @@ def test_sample_data_json(sample_data_json) -> None:
def test_sample_data(sample_data_dict) -> None:
"""Test loading sample data from a dictionary."""
_ = SampleData.from_dict(sample_data_dict)


def test_new_sample_data(sample_data_dict) -> None:
"""Test generating sample data with a new token."""
without_token = {k: v for k, v in sample_data_dict.items() if k != "token"}
ret = SampleData.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != sample_data_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_sample_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_sample_json(sample_json) -> None:
def test_sample(sample_dict) -> None:
"""Test loading sample from a dictionary."""
_ = Sample.from_dict(sample_dict)


def test_new_sample(sample_dict) -> None:
"""Test generating sample with a new token."""
without_token = {k: v for k, v in sample_dict.items() if k != "token"}
ret = Sample.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != sample_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_sensor_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,11 @@ def test_sensor_json(sensor_json) -> None:
def test_sensor(sensor_dict) -> None:
"""Test loading sensor from a dictionary."""
_ = Sensor.from_dict(sensor_dict)


def test_new_sensor(sensor_dict) -> None:
"""Test generating sensor with a new token."""
without_token = {k: v for k, v in sensor_dict.items() if k != "token"}
ret = Sensor.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != sensor_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_surface_ann_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,11 @@ def test_surface_ann_json(surface_ann_json) -> None:
def test_surface_ann(surface_ann_dict) -> None:
"""Test loading surface ann from a dictionary."""
_ = SurfaceAnn.from_dict(surface_ann_dict)


def test_new_surface_ann(surface_ann_dict) -> None:
"""Test generating surface ann with a new token."""
without_token = {k: v for k, v in surface_ann_dict.items() if k != "token"}
ret = SurfaceAnn.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != surface_ann_dict["token"]
11 changes: 9 additions & 2 deletions tests/schema/tables/test_vehicle_state_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,12 @@ def test_vehicle_state_json(vehicle_state_json) -> None:

def test_vehicle_state(vehicle_state_dict) -> None:
"""Test loading vehicle state from a dictionary."""
s = VehicleState.from_dict(vehicle_state_dict)
print(s)
_ = VehicleState.from_dict(vehicle_state_dict)


def test_new_vehicle_state(vehicle_state_dict) -> None:
"""Test generating vehicle state with a new token."""
without_token = {k: v for k, v in vehicle_state_dict.items() if k != "token"}
ret = VehicleState.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != vehicle_state_dict["token"]
8 changes: 8 additions & 0 deletions tests/schema/tables/test_visibility_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,11 @@ def test_visibility_json(visibility_json) -> None:
def test_visibility(visibility_dict) -> None:
"""Test loading visibility from a dictionary."""
_ = Visibility.from_dict(visibility_dict)


def test_new_visibility(visibility_dict) -> None:
"""Test generating visibility with a new token."""
without_token = {k: v for k, v in visibility_dict.items() if k != "token"}
ret = Visibility.new(without_token)
# check the new token is not the same with the token in input data
assert ret.token != visibility_dict["token"]

0 comments on commit 413280a

Please sign in to comment.