From 0166e1fbfa69ec85ef4fcb36eac510d193384cf1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 30 May 2023 14:41:45 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/lightning/fabric/plugins/precision/amp.py | 4 +++- src/lightning/pytorch/plugins/precision/amp.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/lightning/fabric/plugins/precision/amp.py b/src/lightning/fabric/plugins/precision/amp.py index 36326bfaa001f..b3044dac26aff 100644 --- a/src/lightning/fabric/plugins/precision/amp.py +++ b/src/lightning/fabric/plugins/precision/amp.py @@ -42,7 +42,9 @@ def __init__( scaler: Optional[torch.cuda.amp.GradScaler] = None, ) -> None: if precision not in ("16-mixed", "bf16-mixed"): - raise ValueError(f"Passed `{type(self).__name__}(precision={precision!r})`. Precision must be '16-mixed' or 'bf16-mixed'") + raise ValueError( + f"Passed `{type(self).__name__}(precision={precision!r})`. Precision must be '16-mixed' or 'bf16-mixed'" + ) self.precision = cast(Literal["16-mixed", "bf16-mixed"], str(precision)) if scaler is None and self.precision == "16-mixed": diff --git a/src/lightning/pytorch/plugins/precision/amp.py b/src/lightning/pytorch/plugins/precision/amp.py index 119502fbcd75b..9a96a45f8315f 100644 --- a/src/lightning/pytorch/plugins/precision/amp.py +++ b/src/lightning/pytorch/plugins/precision/amp.py @@ -41,7 +41,9 @@ def __init__( scaler: Optional[torch.cuda.amp.GradScaler] = None, ) -> None: if precision not in ("16-mixed", "bf16-mixed"): - raise ValueError(f"`Passed `{type(self).__name__}(precision={precision!r})`. Precision must be '16-mixed' or 'bf16-mixed'") + raise ValueError( + f"`Passed `{type(self).__name__}(precision={precision!r})`. Precision must be '16-mixed' or 'bf16-mixed'" + ) self.precision = cast(Literal["16-mixed", "bf16-mixed"], str(precision)) if scaler is None and self.precision == "16-mixed":