From 41884e2bcc0e69ad6c21a8738578d74a0ce18a0a Mon Sep 17 00:00:00 2001 From: Yoshitomo Matsubara Date: Fri, 24 May 2024 00:07:43 -0700 Subject: [PATCH 1/4] Remove print --- torchdistill/models/custom/bottleneck/detection/rcnn.py | 1 - 1 file changed, 1 deletion(-) diff --git a/torchdistill/models/custom/bottleneck/detection/rcnn.py b/torchdistill/models/custom/bottleneck/detection/rcnn.py index 5800ef64..6953dda3 100644 --- a/torchdistill/models/custom/bottleneck/detection/rcnn.py +++ b/torchdistill/models/custom/bottleneck/detection/rcnn.py @@ -49,7 +49,6 @@ def custom_maskrcnn_resnet_fpn(backbone, weights=None, progress=True, mask_roi_pool = None if num_feature_maps == 4 \ else MultiScaleRoIAlign(featmap_names=[str(i) for i in range(num_feature_maps)], output_size=14, sampling_ratio=2) - print(kwargs) model = MaskRCNN(backbone_model, num_classes, box_roi_pool=box_roi_pool, mask_roi_pool=mask_roi_pool, **kwargs) if weights is not None: state_dict = \ From ef636b8431fed071f9da87672c1282616dc8f8e2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Matsubara Date: Fri, 24 May 2024 00:58:58 -0700 Subject: [PATCH 2/4] Use warning --- torchdistill/common/module_util.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/torchdistill/common/module_util.py b/torchdistill/common/module_util.py index 2b4500c5..01446148 100644 --- a/torchdistill/common/module_util.py +++ b/torchdistill/common/module_util.py @@ -105,15 +105,17 @@ def get_module(root_module, module_path): if isinstance(module, Sequential) and module_name.lstrip('-').isnumeric(): module = module[int(module_name)] else: - logger.info('`{}` of `{}` could not be reached in `{}`'.format(module_name, module_path, - type(root_module).__name__)) + logger.warning('`{}` of `{}` could not be reached in `{}`'.format( + module_name, module_path, type(root_module).__name__) + ) else: module = getattr(module, module_name) elif isinstance(module, (Sequential, ModuleList)) and module_name.lstrip('-').isnumeric(): module = module[int(module_name)] else: - logger.info('`{}` of `{}` could not be reached in `{}`'.format(module_name, module_path, - type(root_module).__name__)) + logger.warning('`{}` of `{}` could not be reached in `{}`'.format( + module_name, module_path, type(root_module).__name__) + ) return None else: module = getattr(module, module_name) From e5189158a4c84939127ba0a3dd6162b90f9a5fdc Mon Sep 17 00:00:00 2001 From: Yoshitomo Matsubara Date: Sat, 25 May 2024 09:08:18 -0700 Subject: [PATCH 3/4] Use warning --- torchdistill/common/main_util.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/torchdistill/common/main_util.py b/torchdistill/common/main_util.py index 94218dfc..664fd39d 100644 --- a/torchdistill/common/main_util.py +++ b/torchdistill/common/main_util.py @@ -296,7 +296,7 @@ def load_ckpt(ckpt_file_path, model=None, optimizer=None, lr_scheduler=None, str logger.info('Loading model parameters only') model.load_state_dict(ckpt, strict=strict) else: - logger.info('No model parameters found') + logger.warning('No model parameters found') if optimizer is not None: if 'optimizer' in ckpt: @@ -306,7 +306,7 @@ def load_ckpt(ckpt_file_path, model=None, optimizer=None, lr_scheduler=None, str logger.info('Loading optimizer parameters only') optimizer.load_state_dict(ckpt) else: - logger.info('No optimizer parameters found') + logger.warning('No optimizer parameters found') if lr_scheduler is not None: if 'lr_scheduler' in ckpt: @@ -316,7 +316,7 @@ def load_ckpt(ckpt_file_path, model=None, optimizer=None, lr_scheduler=None, str logger.info('Loading scheduler parameters only') lr_scheduler.load_state_dict(ckpt) else: - logger.info('No scheduler parameters found') + logger.warning('No scheduler parameters found') return ckpt.get('best_value', 0.0), ckpt.get('args', None) From bfc175a0c960f9fafe93e893f6062d9a31f6a6f8 Mon Sep 17 00:00:00 2001 From: Yoshitomo Matsubara Date: Sat, 25 May 2024 09:28:00 -0700 Subject: [PATCH 4/4] Simplify --- torchdistill/models/registry.py | 1 - 1 file changed, 1 deletion(-) diff --git a/torchdistill/models/registry.py b/torchdistill/models/registry.py index 96164851..1b48265b 100644 --- a/torchdistill/models/registry.py +++ b/torchdistill/models/registry.py @@ -2,7 +2,6 @@ from ..common import misc_util -MODEL_DICT = dict() MODEL_DICT = dict() ADAPTATION_MODULE_DICT = dict() AUXILIARY_MODEL_WRAPPER_DICT = dict()