From df050a72b8ae823a409ca53770ab71499a96c29e Mon Sep 17 00:00:00 2001 From: salkhokhar <61519106+salkhokhar@users.noreply.github.com> Date: Tue, 2 Aug 2022 00:02:35 +0530 Subject: [PATCH] Update figure_iou.py As mentioned in issue [#319] (https://github.com/meituan/YOLOv6/issues/319), sometimes during training the IOU loss gets stuck at nan. [@qinhao14 ] (https://github.com/qinhao14) suggested to use the function torch.nan_to_num() and it did solve the problem for me. And after multiple training sessions, no new problem seems to have arisen because of this. --- yolov6/utils/figure_iou.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/yolov6/utils/figure_iou.py b/yolov6/utils/figure_iou.py index 13b69d77..1d4b7589 100644 --- a/yolov6/utils/figure_iou.py +++ b/yolov6/utils/figure_iou.py @@ -83,6 +83,8 @@ def __call__(self, box1, box2): loss = loss.sum() elif self.reduction == 'mean': loss = loss.mean() + + loss = torch.nan_to_num(loss) return loss