From 6e07148242d466b9092e0c3b0846b39886c05a60 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 11 Oct 2023 19:52:04 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/tests_pytorch/strategies/test_fsdp.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/tests_pytorch/strategies/test_fsdp.py b/tests/tests_pytorch/strategies/test_fsdp.py index 233e3a97c3cfc..4a5cbc9fad798 100644 --- a/tests/tests_pytorch/strategies/test_fsdp.py +++ b/tests/tests_pytorch/strategies/test_fsdp.py @@ -361,17 +361,17 @@ def test_fsdp_checkpoint_multi_gpus(tmpdir, model, strategy, strategy_cfg): @RunIf(min_cuda_gpus=1, skip_windows=True, standalone=True) @pytest.mark.parametrize("use_orig_params", [None, False, True]) def test_invalid_parameters_in_optimizer(use_orig_params): - fsdp_kwargs = {} + fsdp_kwargs = {} if _TORCH_GREATER_EQUAL_2_0 and use_orig_params is not None: fsdp_kwargs = {"use_orig_params": use_orig_params} - + trainer = Trainer( strategy=FSDPStrategy(**fsdp_kwargs), accelerator="cuda", devices=1, fast_dev_run=1, ) - + error_context = ( nullcontext() if _TORCH_GREATER_EQUAL_2_0 and (_TORCH_GREATER_EQUAL_2_1 or use_orig_params is not False) @@ -391,7 +391,6 @@ def configure_optimizers(self): layer = torch.nn.Linear(4, 5) return torch.optim.Adam(layer.parameters(), lr=1e-2) - error_context = ( nullcontext() if _TORCH_GREATER_EQUAL_2_0 and use_orig_params is not False