File tree Expand file tree Collapse file tree 1 file changed +5
-8
lines changed
tests/tests_pytorch/plugins/precision Expand file tree Collapse file tree 1 file changed +5
-8
lines changed Original file line number Diff line number Diff line change 1414from unittest .mock import Mock
1515
1616import pytest
17- from torch .optim import Optimizer
18-
19- from lightning .pytorch .plugins import MixedPrecision
20- from lightning .pytorch .utilities import GradClipAlgorithmType
21-
22- from torch import nn
2317import torch
18+ from torch import nn
19+ from torch .optim import Optimizer
2420
2521from lightning .pytorch .plugins .precision import MixedPrecision
22+ from lightning .pytorch .utilities import GradClipAlgorithmType
2623
2724
2825def test_clip_gradients ():
@@ -62,7 +59,7 @@ def test_optimizer_amp_scaling_support_in_step_method():
6259def test_amp_with_no_grad (precision : str ):
6360 layer = nn .Linear (2 , 1 )
6461 x = torch .randn (1 , 2 )
65- amp = MixedPrecision (precision = precision , device = ' cpu' )
62+ amp = MixedPrecision (precision = precision , device = " cpu" )
6663
6764 with amp .autocast_context_manager ():
6865 with torch .no_grad ():
@@ -72,4 +69,4 @@ def test_amp_with_no_grad(precision: str):
7269
7370 loss .backward ()
7471
75- assert loss .grad_fn is not None
72+ assert loss .grad_fn is not None
You can’t perform that action at this time.
0 commit comments