Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

bump python 3.9+ #20413

Merged
merged 14 commits into from
Nov 25, 2024
Prev Previous commit
Next Next commit
Merge branch 'master' into bump/python_3.9+
  • Loading branch information
Borda committed Nov 13, 2024
commit 4f221638d3475e8629d75a879155c2ce26ee32b1
8 changes: 4 additions & 4 deletions src/lightning/fabric/accelerators/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
from typing import List, Union

import torch
from typing_extensions import override
Expand Down Expand Up @@ -39,13 +39,13 @@ def teardown(self) -> None:

@staticmethod
@override
def parse_devices(devices: Union[int, str, list[int]]) -> int:
def parse_devices(devices: Union[int, str]) -> int:
"""Accelerator device parsing logic."""
return _parse_cpu_cores(devices)

@staticmethod
@override
def get_parallel_devices(devices: Union[int, str, list[int]]) -> list[torch.device]:
def get_parallel_devices(devices: Union[int, str]) -> List[torch.device]:
"""Gets parallel devices for the Accelerator."""
devices = _parse_cpu_cores(devices)
return [torch.device("cpu")] * devices
Expand All @@ -72,7 +72,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
)


def _parse_cpu_cores(cpu_cores: Union[int, str, list[int]]) -> int:
def _parse_cpu_cores(cpu_cores: Union[int, str]) -> int:
"""Parses the cpu_cores given in the format as accepted by the ``devices`` argument in the
:class:`~lightning.pytorch.trainer.trainer.Trainer`.

Expand Down
10 changes: 5 additions & 5 deletions src/lightning/pytorch/accelerators/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Union
from typing import Any, Dict, List, Union

import torch
from lightning_utilities.core.imports import RequirementCache
Expand All @@ -38,7 +38,7 @@ def setup_device(self, device: torch.device) -> None:
raise MisconfigurationException(f"Device should be CPU, got {device} instead.")

@override
def get_device_stats(self, device: _DEVICE) -> dict[str, Any]:
def get_device_stats(self, device: _DEVICE) -> Dict[str, Any]:
"""Get CPU stats from ``psutil`` package."""
return get_cpu_stats()

Expand All @@ -48,13 +48,13 @@ def teardown(self) -> None:

@staticmethod
@override
def parse_devices(devices: Union[int, str, list[int]]) -> int:
def parse_devices(devices: Union[int, str]) -> int:
"""Accelerator device parsing logic."""
return _parse_cpu_cores(devices)

@staticmethod
@override
def get_parallel_devices(devices: Union[int, str, list[int]]) -> list[torch.device]:
def get_parallel_devices(devices: Union[int, str]) -> List[torch.device]:
"""Gets parallel devices for the Accelerator."""
devices = _parse_cpu_cores(devices)
return [torch.device("cpu")] * devices
Expand Down Expand Up @@ -89,7 +89,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
_PSUTIL_AVAILABLE = RequirementCache("psutil")


def get_cpu_stats() -> dict[str, float]:
def get_cpu_stats() -> Dict[str, float]:
if not _PSUTIL_AVAILABLE:
raise ModuleNotFoundError(
f"Fetching CPU device stats requires `psutil` to be installed. {str(_PSUTIL_AVAILABLE)}"
Expand Down
You are viewing a condensed version of this merge commit. You can view the full changes here.