Skip to content

Commit d48c3a2

Browse files
committed
Use gpu param from config, test last model
1 parent 26f409d commit d48c3a2

File tree

1 file changed

+2
-4
lines changed

1 file changed

+2
-4
lines changed

code2seq/utils/train.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,6 @@ def train(model: LightningModule, data_module: LightningDataModule, config: Dict
3232
early_stopping_callback = EarlyStopping(patience=params.patience, monitor="val/loss", verbose=True, mode="min")
3333
# define callback for printing intermediate result
3434
print_epoch_result_callback = PrintEpochResultCallback(after_test=False)
35-
# use gpu if it exists
36-
gpu = 1 if torch.cuda.is_available() else None
3735
# define learning rate logger
3836
lr_logger = LearningRateMonitor("step")
3937
# define progress bar callback
@@ -45,10 +43,10 @@ def train(model: LightningModule, data_module: LightningDataModule, config: Dict
4543
check_val_every_n_epoch=params.val_every_epoch,
4644
log_every_n_steps=params.log_every_n_steps,
4745
logger=wandb_logger,
48-
gpus=gpu,
46+
gpus=params.gpu,
4947
callbacks=[lr_logger, early_stopping_callback, checkpoint_callback, print_epoch_result_callback, progress_bar],
5048
resume_from_checkpoint=config.get("checkpoint", None),
5149
)
5250

5351
trainer.fit(model=model, datamodule=data_module)
54-
trainer.test(datamodule=data_module, ckpt_path="best")
52+
trainer.test(model=model, datamodule=data_module)

0 commit comments

Comments
 (0)