Skip to content

Commit

Permalink
deprecation test
Browse files Browse the repository at this point in the history
  • Loading branch information
DuYicong515 committed Mar 24, 2022
1 parent 4687517 commit f8f34ad
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 2 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -552,6 +552,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Deprecated `Trainer.data_parallel_device_ids` in favor of `Trainer.device_ids` ([#12072](https://github.com/PyTorchLightning/pytorch-lightning/pull/12072))


- Deprecated `Trainer.gpus` in favor of `Trainer.device_ids` or `Trainer.num_devices` ([#12436](https://github.com/PyTorchLightning/pytorch-lightning/pull/12436))


### Removed

- Removed deprecated parameter `method` in `pytorch_lightning.utilities.model_helpers.is_overridden` ([#10507](https://github.com/PyTorchLightning/pytorch-lightning/pull/10507))
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/callbacks/gpu_stats_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,8 @@ def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: O

if trainer.strategy.root_device.type != "cuda":
raise MisconfigurationException(
"You are using GPUStatsMonitor but are not running on GPU"
f" since gpus attribute in Trainer is set to {trainer.gpus}."
"You are using GPUStatsMonitor but are not running on GPU."
f"The root device type is {trainer.strategy.root_device.type}."
)

# The logical device IDs for selected devices
Expand Down
21 changes: 21 additions & 0 deletions tests/deprecated_api/test_remove_1-8.py
Original file line number Diff line number Diff line change
Expand Up @@ -1054,3 +1054,24 @@ def test_trainer_data_parallel_device_ids(monkeypatch, trainer_kwargs, expected_
" Please use `Trainer.device_ids` instead."
):
assert trainer.data_parallel_device_ids == expected_data_parallel_device_ids


@pytest.mark.parametrize(
"trainer_kwargs",
[
{"accelerator": "gpu", "devices": 2},
{"accelerator": "gpu", "devices": [0, 2]},
{"accelerator": "gpu", "devices": "0"},
{"accelerator": "gpu", "devices": "2"},
{"accelerator": "gpu", "devices": "0,"},
],
)
def test_trainer_gpus(monkeypatch, trainer_kwargs):
monkeypatch.setattr(torch.cuda, "is_available", lambda: True)
monkeypatch.setattr(torch.cuda, "device_count", lambda: 4)
trainer = Trainer(**trainer_kwargs)
with pytest.deprecated_call(
match="`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead."
):
assert trainer.gpus == trainer_kwargs.get("devices")

0 comments on commit f8f34ad

Please sign in to comment.