Skip to content
This repository was archived by the owner on Jan 22, 2025. It is now read-only.

Commit 1506551

Browse files
Peizhao Zhangfacebook-github-bot
authored andcommitted
print grad scaler as part of the metric.
Summary: Pull Request resolved: #501 X-link: facebookresearch/detectron2#4851 print grad scaler as part of the metric. * Controlled by a flag "SOLVER.AMP.LOG_GRAD_SCALER" Reviewed By: tax313 Differential Revision: D43585363 fbshipit-source-id: 495b37ff524c47e515cea0b3c677ee81b34ad4ca
1 parent 25049cd commit 1506551

File tree

2 files changed

+3
-0
lines changed

2 files changed

+3
-0
lines changed

d2go/runner/config_defaults.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,8 @@ def _add_detectron2go_runner_default_cfg(_C: CN) -> None:
8181
assert not _C.SOLVER.AMP.ENABLED
8282
# AMP precision is used by both D2 and lightning backend. Can be "float16" or "bfloat16".
8383
_C.SOLVER.AMP.PRECISION = "float16"
84+
# log the grad scalar to the output
85+
_C.SOLVER.AMP.LOG_GRAD_SCALER = False
8486

8587
# Betas are used in the AdamW optimizer
8688
_C.SOLVER.BETAS = (0.9, 0.999)

d2go/runner/default_runner.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -551,6 +551,7 @@ def _get_model_with_abnormal_checker(model):
551551
precision=parse_precision_from_string(
552552
cfg.SOLVER.AMP.PRECISION, lightning=False
553553
),
554+
log_grad_scaler=cfg.SOLVER.AMP.LOG_GRAD_SCALER,
554555
)
555556
else:
556557
trainer = SimpleTrainer(

0 commit comments

Comments
 (0)