diff --git a/config/model-unet.toml b/config/model-unet.toml index 6effd699..5303f825 100644 --- a/config/model-unet.toml +++ b/config/model-unet.toml @@ -27,8 +27,5 @@ # Learning rate for the optimizer. lr = 0.0001 - # Weight decay l2 penalty for the optimizer - decay = 0.0001 - # Loss function name (e.g 'Lovasz', 'mIoU' or 'CrossEntropy') loss = 'Lovasz' diff --git a/robosat/tools/train.py b/robosat/tools/train.py index a5246429..a70e6500 100644 --- a/robosat/tools/train.py +++ b/robosat/tools/train.py @@ -78,7 +78,7 @@ def main(args): if model["opt"]["loss"] in ("CrossEntropy", "mIoU", "Focal"): sys.exit("Error: The loss function used, need dataset weights values") - optimizer = Adam(net.parameters(), lr=model["opt"]["lr"], weight_decay=model["opt"]["decay"]) + optimizer = Adam(net.parameters(), lr=model["opt"]["lr"]) resume = 0 if args.checkpoint: @@ -118,7 +118,6 @@ def map_location(storage, _): log.log("Batch Size:\t {}".format(model["common"]["batch_size"])) log.log("Image Size:\t {}".format(model["common"]["image_size"])) log.log("Learning Rate:\t {}".format(model["opt"]["lr"])) - log.log("Weight Decay:\t {}".format(model["opt"]["decay"])) log.log("Loss function:\t {}".format(model["opt"]["loss"])) if "weight" in locals(): log.log("Weights :\t {}".format(dataset["weights"]["values"]))