Skip to content
Snippets Groups Projects
Commit 08a3d5b0 authored by Fanis Baikas's avatar Fanis Baikas
Browse files

Fixed wandb logging for learning rate.

parent 8d5ce588
Branches main
No related tags found
No related merge requests found
......@@ -137,7 +137,7 @@ def train_pipleine(args):
# Report metrics every 4 batches
if ((batch_ct + 1) % 4) == 0:
# Log epoch and loss
wandb.log({'epoch': epoch, 'loss': loss, 'lr': lr_scheduler.get_last_lr()}, step=batch_ct)
wandb.log({'epoch': epoch, 'loss': loss, 'lr': lr_scheduler.get_last_lr()[0]}, step=batch_ct)
print(f"Epoch {epoch}, Batch {i}, Loss after {str(batch_ct)} batches: {loss:.3f},"
f" Batch time: {time_elapsed:.4f}, lr: {lr_scheduler.get_last_lr()[0]:.3f}")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment