Skip to content

Commit

Permalink
Remove unused mixin attributes (#6487)
Browse files Browse the repository at this point in the history
* Remove unused mixing attributes

* Missing import
  • Loading branch information
carmocca authored Mar 12, 2021
1 parent 6596447 commit 518c7e4
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 27 deletions.
14 changes: 5 additions & 9 deletions pytorch_lightning/trainer/data_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,14 @@
import platform
from abc import ABC
from copy import deepcopy
from typing import Callable, Iterable, List, Optional, Tuple, Union
from typing import Callable, Iterable, List, Tuple, Union

from torch.utils.data import BatchSampler, DataLoader, RandomSampler, SequentialSampler
from torch.utils.data.distributed import DistributedSampler

from pytorch_lightning.accelerators import Accelerator
from pytorch_lightning.core import LightningModule
from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector
from pytorch_lightning.trainer.supporters import CombinedLoader
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.apply_func import apply_to_collection
Expand All @@ -36,8 +37,6 @@ class TrainerDataLoadingMixin(ABC):

# this is just a summary on variables used in this abstract class,
# the proper values/initialisation should be done in child class
global_rank: int
shown_warnings:...
val_check_interval: float
tpu_local_core_rank: int
train_dataloader: DataLoader
Expand All @@ -48,13 +47,10 @@ class TrainerDataLoadingMixin(ABC):
test_dataloaders: List[DataLoader]
num_test_batches: List[Union[int, float]]
limit_train_batches: Union[int, float]
limit_val_batches: Union[int, float]
limit_test_batches: Union[int, float]
replace_sampler_ddp: bool
overfit_batches: Union[int, float]
distributed_sampler_kwargs: dict
accelerator: Accelerator
num_nodes: int
num_processes: int
distributed_backend: Optional[str]
accelerator_connector: AcceleratorConnector
dev_debugger: InternalDebugger

def _worker_check(self, dataloader: DataLoader, name: str) -> None:
Expand Down
4 changes: 1 addition & 3 deletions pytorch_lightning/trainer/deprecated_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

class DeprecatedDistDeviceAttributes:

_distrib_type: DistributedType
_device_type: DeviceType
num_gpus: int
accelerator_connector: AcceleratorConnector

Expand Down Expand Up @@ -135,7 +133,7 @@ def use_single_gpu(self, val: bool) -> None:
class DeprecatedTrainerAttributes:

accelerator: Accelerator
lightning_module = LightningModule
lightning_module: LightningModule
sanity_checking: bool

@property
Expand Down
14 changes: 2 additions & 12 deletions pytorch_lightning/trainer/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,11 @@

import inspect
from abc import ABC
from typing import Mapping, Union
from typing import Mapping

import torch

from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.utilities import DeviceType, DistributedType
from pytorch_lightning.utilities import DistributedType
from pytorch_lightning.utilities.distributed import rank_zero_warn
from pytorch_lightning.utilities.memory import recursive_detach

Expand All @@ -28,17 +27,8 @@ class TrainerLoggingMixin(ABC):

# this is just a summary on variables used in this abstract class,
# the proper values/initialisation should be done in child class
current_epoch: int
_device_type: DeviceType
_distrib_type: DistributedType
log_gpu_memory:...
logger: Union[LightningLoggerBase, bool]
global_step: int
global_rank: int
default_root_dir: str
slurm_job_id: int
num_gpus: int
logged_metrics:...

def metrics_to_scalars(self, metrics):
new_metrics = {}
Expand Down
3 changes: 0 additions & 3 deletions pytorch_lightning/trainer/training_tricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,7 @@ class TrainerTrainingTricksMixin(ABC):

# this is just a summary on variables used in this abstract class,
# the proper values/initialisation should be done in child class
default_root_dir: str
lightning_module: LightningModule
progress_bar_callback:...
on_gpu: bool

def print_nan_gradients(self) -> None:
model = self.lightning_module
Expand Down

0 comments on commit 518c7e4

Please sign in to comment.