From 5ef9afc2b3deba35c2775a95a5e744dafa5d5add Mon Sep 17 00:00:00 2001 From: Dhruvesh Patel Date: Wed, 18 Nov 2020 12:26:16 -0500 Subject: [PATCH 1/2] Added L2 on side-length regularization --- .../modules/regularization/__init__.py | 2 + .../regularization/l2_side_regularizer.py | 58 +++++++++++++++++++ .../modules/regularization/regularizer.py | 47 +++++++++++++++ 3 files changed, 107 insertions(+) create mode 100644 box_embeddings/modules/regularization/l2_side_regularizer.py create mode 100644 box_embeddings/modules/regularization/regularizer.py diff --git a/box_embeddings/modules/regularization/__init__.py b/box_embeddings/modules/regularization/__init__.py index e69de29b..2edfcdf5 100644 --- a/box_embeddings/modules/regularization/__init__.py +++ b/box_embeddings/modules/regularization/__init__.py @@ -0,0 +1,2 @@ +from .regularizer import BoxRegularizer +from .l2_side_regularizer import l2_side_regularizer, L2SideBoxRegularizer diff --git a/box_embeddings/modules/regularization/l2_side_regularizer.py b/box_embeddings/modules/regularization/l2_side_regularizer.py new file mode 100644 index 00000000..00460f04 --- /dev/null +++ b/box_embeddings/modules/regularization/l2_side_regularizer.py @@ -0,0 +1,58 @@ +from typing import List, Tuple, Union, Dict, Any, Optional +from box_embeddings.common.registrable import Registrable +import torch +from box_embeddings.parameterizations.box_tensor import BoxTensor +from box_embeddings.modules.regularization.regularizer import BoxRegularizer + +eps = 1e-23 + + +def l2_side_regularizer( + box_tensor: BoxTensor, log_scale: bool = False +) -> Union[float, torch.Tensor]: + """Applies l2 regularization on all sides of all boxes and returns the sum. + + Args: + box_tensor: TODO + log_scale: mean in log scale + + Returns: + (None) + """ + z = box_tensor.z # (..., box_dim) + Z = box_tensor.Z # (..., box_dim) + + if not log_scale: + return torch.mean((Z - z) ** 2) + else: + return torch.mean(torch.log(torch.abs(Z - z) + eps)) + + +@BoxRegularizer.register("l2_side") +class L2SideBoxRegularizer(BoxRegularizer): + + """Applies l2 regularization on side lengths.""" + + def __init__(self, weight: float, log_scale: bool = False) -> None: + """TODO: to be defined. + + Args: + weight: Weight (hyperparameter) given to this regularization in the overall loss. + log_scale: Whether the output should be in log scale or not. + Should be true in almost any practical case where box_dim>5. + + + """ + super().__init__(weight, log_scale=log_scale) + + def _forward(self, box_tensor: BoxTensor) -> Union[float, torch.Tensor]: + """Applies l2 regularization on all sides of all boxes and returns the sum. + + Args: + box_tensor: TODO + + Returns: + (None) + """ + + return l2_side_regularizer(box_tensor, log_scale=self.log_scale) diff --git a/box_embeddings/modules/regularization/regularizer.py b/box_embeddings/modules/regularization/regularizer.py new file mode 100644 index 00000000..2eb8cede --- /dev/null +++ b/box_embeddings/modules/regularization/regularizer.py @@ -0,0 +1,47 @@ +from typing import List, Tuple, Union, Dict, Any, Optional +from box_embeddings.common.registrable import Registrable +import torch +from box_embeddings.parameterizations.box_tensor import BoxTensor + + +class BoxRegularizer(torch.nn.Module, Registrable): + + """Base box-regularizer class""" + + def __init__( + self, weight: float, log_scale: bool = True, **kwargs: Any + ) -> None: + """ + Args: + weight: Weight (hyperparameter) given to this regularization in the overall loss. + log_scale: Whether the output should be in log scale or not. + Should be true in almost any practical case where box_dim>5. + kwargs: Unused + """ + super().__init__() # type:ignore + self.weight = weight + self.log_scale = log_scale + + def forward(self, box_tensor: BoxTensor) -> Union[float, torch.Tensor]: + """Calls the _forward and multiplies the weight + + Args: + box_tensor: Input box tensor + + Returns: + scalar regularization loss + """ + + return self.weight * self._forward(box_tensor) + + def _forward(self, box_tensor: BoxTensor) -> Union[float, torch.Tensor]: + """The method that does all the work and needs to be overriden + + Args: + box_tensor: Input box tensor + + Returns: + 0 + """ + + return 0.0 From 16ebcb2185666a3e7d6933e2dddaa6bc09a4e021 Mon Sep 17 00:00:00 2001 From: Dhruvesh Date: Wed, 16 Dec 2020 13:53:41 -0500 Subject: [PATCH 2/2] Keep torch version >=1.6 In an earlier commit, I updated the torch version to >=1.7 but that creates incompatibilities when using box-embeddings package in other projects. Hence, loosening the requirement for better compatibility. --- core_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core_requirements.txt b/core_requirements.txt index 1280b3cc..e2b9d2b9 100644 --- a/core_requirements.txt +++ b/core_requirements.txt @@ -1,2 +1,2 @@ -torch >= 1.7.0 +torch >= 1.6.0 numpy