Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

valor-lite change iou caching #805

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 19 additions & 8 deletions lite/tests/object_detection/test_accuracy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,26 @@
from valor_lite.object_detection.computation import compute_precion_recall


def test__compute_average_precision():
def test__compute_accuracy():

sorted_pairs = np.array(
[
# dt, gt, pd, iou, gl, pl, score,
[0.0, 0.0, 2.0, 0.25, 0.0, 0.0, 0.95],
[0.0, 0.0, 3.0, 0.33333, 0.0, 0.0, 0.9],
[0.0, 0.0, 4.0, 0.66667, 0.0, 0.0, 0.65],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.1],
[0.0, 0.0, 1.0, 0.5, 0.0, 0.0, 0.01],
# iou, score
[0.25, 0.95],
[0.33333, 0.9],
[0.66667, 0.65],
[1.0, 0.1],
[0.5, 0.01],
]
)
sorted_identifiers = np.array(
[
# dt, gt, pd, gl, pl
[0, 0, 2, 0, 0],
[0, 0, 3, 0, 0],
[0, 0, 4, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
]
)

Expand All @@ -22,6 +32,7 @@ def test__compute_average_precision():

(_, _, accuracy, _, _) = compute_precion_recall(
sorted_pairs,
sorted_identifiers,
label_metadata=label_metadata,
iou_thresholds=iou_thresholds,
score_thresholds=score_thresholds,
Expand All @@ -36,7 +47,7 @@ def test__compute_average_precision():
assert (accuracy == expected).all()


def test_ap_using_torch_metrics_example(
def test_accuracy_using_torch_metrics_example(
torchmetrics_detections: list[Detection],
):
"""
Expand Down
23 changes: 17 additions & 6 deletions lite/tests/object_detection/test_average_precision.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,22 @@ def test__compute_average_precision():

sorted_pairs = np.array(
[
# dt, gt, pd, iou, gl, pl, score,
[0.0, 0.0, 2.0, 0.25, 0.0, 0.0, 0.95],
[0.0, 0.0, 3.0, 0.33333, 0.0, 0.0, 0.9],
[0.0, 0.0, 4.0, 0.66667, 0.0, 0.0, 0.65],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.1],
[0.0, 0.0, 1.0, 0.5, 0.0, 0.0, 0.01],
# iou, score,
[0.25, 0.95],
[0.33333, 0.9],
[0.66667, 0.65],
[1.0, 0.1],
[0.5, 0.01],
]
)
sorted_identifers = np.array(
[
# dt, gt, pd, gl, pl
[0, 0, 2, 0, 0],
[0, 0, 3, 0, 0],
[0, 0, 4, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
]
)

Expand All @@ -26,6 +36,7 @@ def test__compute_average_precision():

(results, _, _, _, _) = compute_precion_recall(
sorted_pairs,
sorted_identifers,
label_metadata=label_metadata,
iou_thresholds=iou_thresholds,
score_thresholds=score_thresholds,
Expand Down
26 changes: 19 additions & 7 deletions lite/tests/object_detection/test_average_recall.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,24 @@ def test__compute_average_recall():

sorted_pairs = np.array(
[
# dt, gt, pd, iou, gl, pl, score,
[0.0, 0.0, 2.0, 0.25, 0.0, 0.0, 0.95],
[0.0, 1.0, 3.0, 0.33333, 0.0, 0.0, 0.9],
[0.0, 0.0, 4.0, 0.66667, 0.0, 0.0, 0.65],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.1],
[0.0, 0.0, 1.0, 0.5, 0.0, 0.0, 0.01],
[0.0, 2.0, 5.0, 0.5, 1.0, 1.0, 0.95],
# iou, score
[0.25, 0.95],
[0.33333, 0.9],
[0.66667, 0.65],
[1.0, 0.1],
[0.5, 0.01],
[0.5, 0.95],
]
)
sorted_identifiers = np.array(
[
# dt, gt, pd, gl, pl,
[0, 0, 2, 0, 0],
[0, 1, 3, 0, 0],
[0, 0, 4, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 2, 5, 1, 1],
]
)

Expand All @@ -27,6 +38,7 @@ def test__compute_average_recall():

(_, results, _, _, _,) = compute_precion_recall(
sorted_pairs,
sorted_identifiers,
label_metadata=label_metadata,
iou_thresholds=iou_thresholds,
score_thresholds=score_thresholds,
Expand Down
30 changes: 23 additions & 7 deletions lite/tests/object_detection/test_confusion_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,27 @@ def _test_compute_confusion_matrix(
):
sorted_pairs = np.array(
[
# dt, gt, pd, iou, gl, pl, score,
[0.0, 0.0, 1.0, 0.98, 0.0, 0.0, 0.9],
[1.0, 1.0, 2.0, 0.55, 1.0, 0.0, 0.9],
[2.0, -1.0, 4.0, 0.0, -1.0, 0.0, 0.65],
[3.0, 4.0, 5.0, 1.0, 0.0, 0.0, 0.1],
[1.0, 2.0, 3.0, 0.55, 0.0, 0.0, 0.1],
[4.0, 5.0, -1.0, 0.0, 0.0, -1.0, -1.0],
# iou, score
[0.98, 0.9],
[0.55, 0.9],
[0.0, 0.65],
[1.0, 0.1],
[0.55, 0.1],
[0.0, -1.0],
]
)
sorted_identifiers = np.array(
[
# dt, gt, pd, gl, pl,
[0, 0, 1, 0, 0],
[1, 1, 2, 1, 0],
[2, -1, 4, -1, 0],
[3, 4, 5, 0, 0],
[1, 2, 3, 0, 0],
[4, 5, -1, 0, -1],
]
)

label_metadata = np.array([[3, 4], [1, 0]])
iou_thresholds = np.array([0.5])
score_thresholds = np.array([score / 100.0 for score in range(1, 101)])
Expand All @@ -39,6 +51,7 @@ def _test_compute_confusion_matrix(
missing_predictions,
) = compute_confusion_matrix(
data=sorted_pairs,
identifiers=sorted_identifiers,
label_metadata=label_metadata,
iou_thresholds=iou_thresholds,
score_thresholds=score_thresholds,
Expand Down Expand Up @@ -1164,6 +1177,9 @@ def test_confusion_matrix(
m["value"]["hallucinations"],
m["value"]["missing_predictions"],
)
import json

print(json.dumps(m, indent=4))
assert m in expected_metrics
for m in expected_metrics:
assert m in actual_metrics
Expand Down
2 changes: 1 addition & 1 deletion lite/tests/object_detection/test_evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,4 +127,4 @@ def test_output_types_dont_contain_numpy(basic_detections: list[Detection]):
values = _flatten_metrics(metrics)
for value in values:
if isinstance(value, (np.generic, np.ndarray)):
raise TypeError(f"Value `{value}` has type `{type(value)}`.")
raise TypeError(f"Value '{value}' has type '{type(value)}'.")
65 changes: 54 additions & 11 deletions lite/tests/object_detection/test_filtering.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,16 @@ def test_filtering_one_detection(one_detection: list[Detection]):
evaluator._ranked_pairs
== np.array(
[
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.3],
[1.0, 0.3],
]
)
).all()

assert (
evaluator._ranked_identifiers
== np.array(
[
[0, 0, 0, 0, 0],
]
)
).all()
Expand Down Expand Up @@ -203,8 +212,18 @@ def test_filtering_two_detections(two_detections: list[Detection]):
evaluator._ranked_pairs
== np.array(
[
[1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.98],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.3],
[1.0, 0.98],
[1.0, 0.3],
]
)
).all()

assert (
evaluator._ranked_identifiers
== np.array(
[
[1, 0, 0, 0, 1],
[0, 0, 0, 0, 0],
]
)
).all()
Expand Down Expand Up @@ -333,10 +352,22 @@ def test_filtering_four_detections(four_detections: list[Detection]):
evaluator._ranked_pairs
== np.array(
[
[1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.98],
[3.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.98],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.3],
[2.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.3],
[1.0, 0.98],
[1.0, 0.98],
[1.0, 0.3],
[1.0, 0.3],
]
)
).all()

assert (
evaluator._ranked_identifiers
== np.array(
[
[1, 0, 0, 0, 1],
[3, 0, 0, 0, 1],
[0, 0, 0, 0, 0],
[2, 0, 0, 0, 0],
]
)
).all()
Expand Down Expand Up @@ -471,10 +502,22 @@ def test_filtering_all_detections(four_detections: list[Detection]):
evaluator._ranked_pairs
== np.array(
[
[1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.98],
[3.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.98],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.3],
[2.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.3],
[1.0, 0.98],
[1.0, 0.98],
[1.0, 0.3],
[1.0, 0.3],
]
)
).all()

assert (
evaluator._ranked_identifiers
== np.array(
[
[1, 0, 0, 0, 1],
[3, 0, 0, 0, 1],
[0, 0, 0, 0, 0],
[2, 0, 0, 0, 0],
]
)
).all()
Expand Down
23 changes: 17 additions & 6 deletions lite/tests/object_detection/test_pr_curve.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,22 @@ def test_pr_curve_simple():

sorted_pairs = np.array(
[
# dt, gt, pd, iou, gl, pl, score,
[0.0, 0.0, 2.0, 0.25, 0.0, 0.0, 0.95],
[0.0, 0.0, 3.0, 0.33333, 0.0, 0.0, 0.9],
[0.0, 0.0, 4.0, 0.66667, 0.0, 0.0, 0.65],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.1],
[0.0, 0.0, 1.0, 0.5, 0.0, 0.0, 0.01],
# iou, score,
[0.25, 0.95],
[0.33333, 0.9],
[0.66667, 0.65],
[1.0, 0.1],
[0.5, 0.01],
]
)
sorted_identifiers = np.array(
[
# dt, gt, pd, gl, pl
[0, 0, 2, 0, 0],
[0, 0, 3, 0, 0],
[0, 0, 4, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
]
)

Expand All @@ -26,6 +36,7 @@ def test_pr_curve_simple():

(_, _, _, _, pr_curve) = compute_precion_recall(
sorted_pairs,
sorted_identifiers,
label_metadata=label_metadata,
iou_thresholds=iou_thresholds,
score_thresholds=score_thresholds,
Expand Down
Loading
Loading