Skip to content

Commit

Permalink
move pytorch_lightning >> lightning/pytorch (#16594)
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda authored Feb 1, 2023
1 parent 01b152f commit 7d4780a
Show file tree
Hide file tree
Showing 197 changed files with 1,283 additions and 1,264 deletions.
3 changes: 2 additions & 1 deletion .azure/gpu-tests-pytorch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ pr:
- "examples/pl_basics/backbone_image_classifier.py"
- "examples/pl_basics/autoencoder.py"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "pyproject.toml" # includes pytest config
- "requirements/fabric/**"
Expand Down
3 changes: 2 additions & 1 deletion .azure/hpu-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ pr:
- "src/lightning/fabric/**"
- "src/lightning_fabric/*"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "pyproject.toml" # includes pytest config
exclude:
Expand Down
3 changes: 2 additions & 1 deletion .azure/ipu-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ pr:
- "src/lightning/fabric/**"
- "src/lightning_fabric/*"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "pyproject.toml" # includes pytest config
exclude:
Expand Down
15 changes: 10 additions & 5 deletions .github/checkgroup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ subprojects:
- "src/lightning/fabric/**"
- "src/lightning_fabric/*"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "tests/legacy/**"
- "pyproject.toml" # includes pytest config
Expand Down Expand Up @@ -49,7 +50,8 @@ subprojects:
- "examples/pl_basics/backbone_image_classifier.py"
- "examples/pl_basics/autoencoder.py"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "pyproject.toml" # includes pytest config
- "requirements/fabric/**"
Expand Down Expand Up @@ -82,7 +84,8 @@ subprojects:
- "src/lightning/fabric/**"
- "src/lightning_fabric/*"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "pyproject.toml" # includes pytest config
- "!requirements/*/docs.txt"
Expand All @@ -99,7 +102,8 @@ subprojects:
- "src/lightning/fabric/**"
- "src/lightning_fabric/*"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "pyproject.toml" # includes pytest config
- "!requirements/docs.txt"
Expand Down Expand Up @@ -130,7 +134,8 @@ subprojects:

- id: "pytorch_lightning: Docs"
paths:
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "docs/source-pytorch/**"
- ".actions/**"
- ".github/workflows/docs-checks.yml"
Expand Down
3 changes: 2 additions & 1 deletion .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@ app:
- 'requirements/app/**'

pl:
- 'src/pytorch_lightning/**'
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- 'tests/tests_pytorch/**'
- 'tests/legacy/**'
- 'examples/pl_*/**'
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/ci-tests-pytorch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ on:
paths:
- ".actions/**"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "tests/legacy/**"
- "pyproject.toml" # includes pytest config
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/tpu-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ on:
- "src/lightning_fabric/*"
- "tests/tests_fabric/**"
- "requirements/pytorch/**"
- "src/pytorch_lightning/**"
- "src/lightning/pytorch/**"
- "src/pytorch_lightning/*"
- "tests/tests_pytorch/**"
- "pyproject.toml" # includes pytest config
- "!requirements/*/docs.txt"
Expand Down
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,10 @@ src/lightning_fabric/
!src/lightning_fabric/__*__.py
!src/lightning_fabric/MANIFEST.in
!src/lightning_fabric/README.md
src/lightning/pytorch/
src/pytorch_lightning/
!src/pytorch_lightning/__*__.py
!src/pytorch_lightning/MANIFEST.in
!src/pytorch_lightning/README.md

# PyInstaller
# Usually these files are written by a python script from a template
Expand Down
7 changes: 3 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,8 @@ repos:
docs/source-pytorch/_static/images/general/pl_quick_start_full_compressed.gif|
docs/source-pytorch/_static/images/general/pl_overview_flat.jpg|
docs/source-pytorch/_static/images/general/pl_overview.gif|
src/lightning_app/cli/pl-app-template/ui/yarn.lock|
src/pytorch_lightning/CHANGELOG.md|
src/lightning/fabric/CHANGELOG.md
src/lightning/fabric/CHANGELOG.md|
src/lightning/pytorch/CHANGELOG.md
)$
- id: detect-private-key

Expand Down Expand Up @@ -100,7 +99,7 @@ repos:
(?x)^(
src/lightning/app/CHANGELOG.md|
src/lightning/fabric/CHANGELOG.md|
src/pytorch_lightning/CHANGELOG.md
src/lightning/pytorch/CHANGELOG.md
)$
- repo: https://github.com/charliermarsh/ruff-pre-commit
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
"fabric": "lightning_fabric",
}
# TODO: drop this reverse list when all packages are moved
_MIRROR_PACKAGE_REVERSED = ("app", "fabric")
_MIRROR_PACKAGE_REVERSED = ("app", "fabric", "pytorch")
# https://packaging.python.org/guides/single-sourcing-package-version/
# http://blog.ionelmc.ro/2014/05/25/python-packaging/
_PATH_ROOT = os.path.dirname(__file__)
Expand Down Expand Up @@ -142,7 +142,7 @@ def _set_manifest_path(manifest_dir: str, aggregate: bool = False, mapping: Mapp
package_to_install = _PACKAGE_MAPPING.get(_PACKAGE_NAME, "lightning")
if package_to_install == "lightning":
# merge all requirements files
assistant._load_aggregate_requirements(_PATH_REQUIRE, _FREEZE_REQUIREMENTS) # install everything
assistant._load_aggregate_requirements(_PATH_REQUIRE, _FREEZE_REQUIREMENTS)
# replace imports and copy the code
assistant.create_mirror_package(_PATH_SRC, _PACKAGE_MAPPING, reverse=_MIRROR_PACKAGE_REVERSED)
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
## [1.9.0] - 2023-01-17

### Added

- Added support for native logging of `MetricCollection` with enabled compute groups ([#15580](https://github.com/Lightning-AI/lightning/pull/15580))
- Added support for custom artifact names in `pl.loggers.WandbLogger` ([#16173](https://github.com/Lightning-AI/lightning/pull/16173))
- Added support for DDP with `LRFinder` ([#15304](https://github.com/Lightning-AI/lightning/pull/15304))
Expand All @@ -223,7 +224,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Added info message for Ampere CUDA GPU users to enable tf32 matmul precision ([#16037](https://github.com/Lightning-AI/lightning/pull/16037))
- Added support for returning optimizer-like classes in `LightningModule.configure_optimizers` ([#16189](https://github.com/Lightning-AI/lightning/pull/16189))


### Changed

- Drop PyTorch 1.9 support ([#15347](https://github.com/Lightning-AI/lightning/pull/15347))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,18 @@
"""Root package info."""

import logging
import os
from typing import Any

from pytorch_lightning.__about__ import * # noqa: F401, F403
from lightning_utilities import module_available

if os.path.isfile(os.path.join(os.path.dirname(__file__), "__about__.py")):
from lightning.pytorch.__about__ import * # noqa: F401, F403
if "__version__" not in locals():
from pytorch_lightning.__version__ import version as __version__ # noqa: F401
if os.path.isfile(os.path.join(os.path.dirname(__file__), "__version__.py")):
from lightning.pytorch.__version__ import version as __version__
elif module_available("lightning"):
from lightning import __version__ # noqa: F401

_DETAIL = 15 # between logging.INFO and logging.DEBUG, used for logging in production use cases

Expand All @@ -30,13 +36,13 @@ def _detail(self: Any, message: str, *args: Any, **kwargs: Any) -> None:
_logger.addHandler(logging.StreamHandler())
_logger.propagate = False

from lightning_fabric.utilities.seed import seed_everything # noqa: E402
from pytorch_lightning.callbacks import Callback # noqa: E402
from pytorch_lightning.core import LightningDataModule, LightningModule # noqa: E402
from pytorch_lightning.trainer import Trainer # noqa: E402
from lightning.fabric.utilities.seed import seed_everything # noqa: E402
from lightning.pytorch.callbacks import Callback # noqa: E402
from lightning.pytorch.core import LightningDataModule, LightningModule # noqa: E402
from lightning.pytorch.trainer import Trainer # noqa: E402

# this import needs to go last as it will patch other modules
import pytorch_lightning._graveyard # noqa: E402, F401 # isort: skip
import lightning.pytorch._graveyard # noqa: E402, F401 # isort: skip

__all__ = ["Trainer", "LightningDataModule", "LightningModule", "Callback", "seed_everything"]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytorch_lightning._graveyard.legacy_import_unpickler # noqa: F401
import lightning.pytorch._graveyard.legacy_import_unpickler # noqa: F401
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def _patch_pl_to_mirror_if_necessary(module: str) -> str:
if module.startswith(pl):
# for the standalone package this won't do anything,
# for the unified mirror package it will redirect the imports
module = "pytorch_lightning" + module[len(pl) :]
module = "lightning.pytorch" + module[len(pl) :]
return module


Expand All @@ -29,7 +29,7 @@ def compare_version(package: str, op: Callable, version: str, use_base_version:
return _compare_version(new_package, op, version, use_base_version)


# patching is necessary, since up to v.0.7.3 torchmetrics has a hardcoded reference to pytorch_lightning,
# patching is necessary, since up to v.0.7.3 torchmetrics has a hardcoded reference to lightning.pytorch,
# which has to be redirected to the unified package:
# https://github.com/Lightning-AI/metrics/blob/v0.7.3/torchmetrics/metric.py#L96
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lightning_fabric.accelerators import find_usable_cuda_devices # noqa: F401
from lightning_fabric.accelerators.registry import _AcceleratorRegistry, call_register_accelerators
from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401
from pytorch_lightning.accelerators.cpu import CPUAccelerator # noqa: F401
from pytorch_lightning.accelerators.cuda import CUDAAccelerator # noqa: F401
from pytorch_lightning.accelerators.hpu import HPUAccelerator # noqa: F401
from pytorch_lightning.accelerators.ipu import IPUAccelerator # noqa: F401
from pytorch_lightning.accelerators.mps import MPSAccelerator # noqa: F401
from pytorch_lightning.accelerators.tpu import TPUAccelerator # noqa: F401
from lightning.fabric.accelerators import find_usable_cuda_devices # noqa: F401
from lightning.fabric.accelerators.registry import _AcceleratorRegistry, call_register_accelerators
from lightning.pytorch.accelerators.accelerator import Accelerator # noqa: F401
from lightning.pytorch.accelerators.cpu import CPUAccelerator # noqa: F401
from lightning.pytorch.accelerators.cuda import CUDAAccelerator # noqa: F401
from lightning.pytorch.accelerators.hpu import HPUAccelerator # noqa: F401
from lightning.pytorch.accelerators.ipu import IPUAccelerator # noqa: F401
from lightning.pytorch.accelerators.mps import MPSAccelerator # noqa: F401
from lightning.pytorch.accelerators.tpu import TPUAccelerator # noqa: F401

ACCELERATORS_BASE_MODULE = "pytorch_lightning.accelerators"
ACCELERATORS_BASE_MODULE = "lightning.pytorch.accelerators"
AcceleratorRegistry = _AcceleratorRegistry()
call_register_accelerators(AcceleratorRegistry, ACCELERATORS_BASE_MODULE)
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
from abc import ABC
from typing import Any, Dict

import pytorch_lightning as pl
from lightning_fabric.accelerators.accelerator import Accelerator as _Accelerator
from lightning_fabric.utilities.types import _DEVICE
import lightning.pytorch as pl
from lightning.fabric.accelerators.accelerator import Accelerator as _Accelerator
from lightning.fabric.utilities.types import _DEVICE


class Accelerator(_Accelerator, ABC):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@

import torch

from lightning_fabric.accelerators.cpu import _parse_cpu_cores
from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _PSUTIL_AVAILABLE
from lightning.fabric.accelerators.cpu import _parse_cpu_cores
from lightning.fabric.utilities.types import _DEVICE
from lightning.pytorch.accelerators.accelerator import Accelerator
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.imports import _PSUTIL_AVAILABLE


class CPUAccelerator(Accelerator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@

import torch

import pytorch_lightning as pl
from lightning_fabric.accelerators.cuda import _check_cuda_matmul_precision, num_cuda_devices
from lightning_fabric.utilities.device_parser import _parse_gpu_ids
from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.utilities.exceptions import MisconfigurationException
import lightning.pytorch as pl
from lightning.fabric.accelerators.cuda import _check_cuda_matmul_precision, num_cuda_devices
from lightning.fabric.utilities.device_parser import _parse_gpu_ids
from lightning.fabric.utilities.types import _DEVICE
from lightning.pytorch.accelerators.accelerator import Accelerator
from lightning.pytorch.utilities.exceptions import MisconfigurationException

_log = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@

import torch

from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _HPU_AVAILABLE
from pytorch_lightning.utilities.rank_zero import rank_zero_debug
from lightning.fabric.utilities.types import _DEVICE
from lightning.pytorch.accelerators.accelerator import Accelerator
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.imports import _HPU_AVAILABLE
from lightning.pytorch.utilities.rank_zero import rank_zero_debug

if _HPU_AVAILABLE:
import habana_frameworks.torch.hpu as torch_hpu
Expand Down Expand Up @@ -108,7 +108,7 @@ def register_accelerators(cls, accelerator_registry: Dict) -> None:
def _parse_hpus(devices: Optional[Union[int, str, List[int]]]) -> Optional[int]:
"""
Parses the hpus given in the format as accepted by the
:class:`~pytorch_lightning.trainer.Trainer` for the `devices` flag.
:class:`~lightning.pytorch.trainer.Trainer` for the `devices` flag.
Args:
devices: An integer that indicates the number of Gaudi devices to be used
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@

import torch

from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.utilities.imports import _IPU_AVAILABLE
from lightning.fabric.utilities.types import _DEVICE
from lightning.pytorch.accelerators.accelerator import Accelerator
from lightning.pytorch.utilities.imports import _IPU_AVAILABLE


class IPUAccelerator(Accelerator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@

import torch

from lightning_fabric.accelerators.mps import MPSAccelerator as _MPSAccelerator
from lightning_fabric.utilities.device_parser import _parse_gpu_ids
from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _PSUTIL_AVAILABLE
from lightning.fabric.accelerators.mps import MPSAccelerator as _MPSAccelerator
from lightning.fabric.utilities.device_parser import _parse_gpu_ids
from lightning.fabric.utilities.types import _DEVICE
from lightning.pytorch.accelerators.accelerator import Accelerator
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.imports import _PSUTIL_AVAILABLE


class MPSAccelerator(Accelerator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@

import torch

from lightning_fabric.accelerators.tpu import _parse_tpu_devices, _XLA_AVAILABLE
from lightning_fabric.accelerators.tpu import TPUAccelerator as FabricTPUAccelerator
from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from lightning.fabric.accelerators.tpu import _parse_tpu_devices, _XLA_AVAILABLE
from lightning.fabric.accelerators.tpu import TPUAccelerator as FabricTPUAccelerator
from lightning.fabric.utilities.types import _DEVICE
from lightning.pytorch.accelerators.accelerator import Accelerator


class TPUAccelerator(Accelerator):
Expand Down
Loading

0 comments on commit 7d4780a

Please sign in to comment.