Skip to content

Commit 27833da

Browse files
committed
Removes adam's l2 regularization until fixed in pytorch#4429
1 parent e31b206 commit 27833da

File tree

2 files changed

+1
-5
lines changed

2 files changed

+1
-5
lines changed

config/model-unet.toml

-3
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,5 @@
2727
# Learning rate for the optimizer.
2828
lr = 0.0001
2929

30-
# Weight decay l2 penalty for the optimizer
31-
decay = 0.0001
32-
3330
# Loss function name (e.g 'Lovasz', 'mIoU' or 'CrossEntropy')
3431
loss = 'Lovasz'

robosat/tools/train.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def main(args):
7878
if model["opt"]["loss"] in ("CrossEntropy", "mIoU", "Focal"):
7979
sys.exit("Error: The loss function used, need dataset weights values")
8080

81-
optimizer = Adam(net.parameters(), lr=model["opt"]["lr"], weight_decay=model["opt"]["decay"])
81+
optimizer = Adam(net.parameters(), lr=model["opt"]["lr"])
8282

8383
resume = 0
8484
if args.checkpoint:
@@ -118,7 +118,6 @@ def map_location(storage, _):
118118
log.log("Batch Size:\t {}".format(model["common"]["batch_size"]))
119119
log.log("Image Size:\t {}".format(model["common"]["image_size"]))
120120
log.log("Learning Rate:\t {}".format(model["opt"]["lr"]))
121-
log.log("Weight Decay:\t {}".format(model["opt"]["decay"]))
122121
log.log("Loss function:\t {}".format(model["opt"]["loss"]))
123122
if "weight" in locals():
124123
log.log("Weights :\t {}".format(dataset["weights"]["values"]))

0 commit comments

Comments
 (0)