Skip to content

Commit

Permalink
Add callback for tracking gradient norm (#313)
Browse files Browse the repository at this point in the history
* add gradient tracker callback

* add norm type to log message
  • Loading branch information
EthanMarx authored Nov 7, 2024
1 parent 87ee91f commit 107233d
Showing 1 changed file with 11 additions and 0 deletions.
11 changes: 11 additions & 0 deletions projects/train/train/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from lightning import pytorch as pl
from lightning.pytorch.callbacks import Callback
from lightning.pytorch.loggers import WandbLogger
from lightning.pytorch.utilities import grad_norm

BOTO_RETRY_EXCEPTIONS = (ClientError, ConnectTimeoutError)

Expand Down Expand Up @@ -125,3 +126,13 @@ def on_train_start(self, trainer, pl_module):
os.path.join(save_dir, "wandb_url.txt"), "w"
) as f:
f.write(url)


class GradientTracker(Callback):
def __init__(self, norm_type: int = 2):
self.norm_type = norm_type

def on_before_optimizer_step(self, trainer, pl_module, optimizer):
norms = grad_norm(pl_module, norm_type=self.norm_type)
total_norm = norms[f"grad_{float(self.norm_type)}_norm_total"]
self.log(f"grad_norm_{self.norm_type}", total_norm)

0 comments on commit 107233d

Please sign in to comment.