Skip to content

Commit f53be01

Browse files
authored
lower bert learning rate (tinygrad#9481)
slightly better. first sub 3hr run https://wandb.ai/chenyuxyz/MLPerf-BERT/runs/0or96ink/overview
1 parent e03c0aa commit f53be01

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

examples/mlperf/model_train.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -658,7 +658,7 @@ def train_bert():
658658
# ** hyperparameters **
659659
BS = config["GLOBAL_BATCH_SIZE"] = getenv("BS", 11 * len(GPUS) if dtypes.default_float in (dtypes.float16, dtypes.bfloat16) else 8 * len(GPUS))
660660
EVAL_BS = config["EVAL_BS"] = getenv("EVAL_BS", 1 * len(GPUS))
661-
max_lr = config["OPT_BASE_LEARNING_RATE"] = getenv("OPT_BASE_LEARNING_RATE", 0.00018 * math.sqrt(BS/96))
661+
max_lr = config["OPT_BASE_LEARNING_RATE"] = getenv("OPT_BASE_LEARNING_RATE", 0.000175 * math.sqrt(BS/96))
662662

663663
train_steps = config["TRAIN_STEPS"] = getenv("TRAIN_STEPS", 3300000 // BS)
664664
warmup_steps = config["NUM_WARMUP_STEPS"] = getenv("NUM_WARMUP_STEPS", 1)

0 commit comments

Comments
 (0)