diff --git a/CHANGELOG.md b/CHANGELOG.md index d192f814c4081..4d2f403739b47 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [1.2.6] - 2021-03-30 + +### Changed + +- + +### Fixed + +- Fixed `DummyLogger.log_hyperparams` raising a `TypeError` when running with `fast_dev_run=True` ([#6398](https://github.com/PyTorchLightning/pytorch-lightning/pull/6398)) ## [1.2.5] - 2021-03-23 @@ -13,7 +22,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Update Gradient Clipping for the TPU Accelerator ([#6576](https://github.com/PyTorchLightning/pytorch-lightning/pull/6576)) - Refactored setup for typing friendly ([#6590](https://github.com/PyTorchLightning/pytorch-lightning/pull/6590)) - ### Fixed - Fixed a bug where `all_gather` would not work correctly with `tpu_cores=8` ([#6587](https://github.com/PyTorchLightning/pytorch-lightning/pull/6587)) @@ -36,7 +44,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed broadcast to use PyTorch `broadcast_object_list` and add `reduce_decision` ([#6410](https://github.com/PyTorchLightning/pytorch-lightning/pull/6410)) - Fixed logger creating directory structure too early in DDP ([#6380](https://github.com/PyTorchLightning/pytorch-lightning/pull/6380)) - Fixed DeepSpeed additional memory use on rank 0 when default device not set early enough ([#6460](https://github.com/PyTorchLightning/pytorch-lightning/pull/6460)) -- Fixed `DummyLogger.log_hyperparams` raising a `TypeError` when running with `fast_dev_run=True` ([#6398](https://github.com/PyTorchLightning/pytorch-lightning/pull/6398)) - Fixed an issue with `Tuner.scale_batch_size` not finding the batch size attribute in the datamodule ([#5968](https://github.com/PyTorchLightning/pytorch-lightning/pull/5968)) - Fixed an exception in the layer summary when the model contains torch.jit scripted submodules ([#6511](https://github.com/PyTorchLightning/pytorch-lightning/pull/6511)) - Fixed when Train loop config was run during `Trainer.predict` ([#6541](https://github.com/PyTorchLightning/pytorch-lightning/pull/6541)) diff --git a/pytorch_lightning/loggers/base.py b/pytorch_lightning/loggers/base.py index 4fdb5e8c437bf..035a42338fe68 100644 --- a/pytorch_lightning/loggers/base.py +++ b/pytorch_lightning/loggers/base.py @@ -279,12 +279,14 @@ def _sanitize_params(params: Dict[str, Any]) -> Dict[str, Any]: return params @abstractmethod - def log_hyperparams(self, params: argparse.Namespace): + def log_hyperparams(self, params: argparse.Namespace, *args, **kwargs): """ Record hyperparameters. Args: params: :class:`~argparse.Namespace` containing the hyperparameters + args: Optional positional arguments, depends on the specific logger being used + kwargs: Optional keywoard arguments, depends on the specific logger being used """ def log_graph(self, model: LightningModule, input_array=None) -> None: @@ -418,41 +420,41 @@ def nop(*args, **kw): def __getattr__(self, _): return self.nop - def __getitem__(self, idx): - # enables self.logger[0].experiment.add_image - # and self.logger.experiment[0].add_image(...) + def __getitem__(self, idx) -> "DummyExperiment": + # enables self.logger.experiment[0].add_image(...) return self class DummyLogger(LightningLoggerBase): - """ Dummy logger for internal use. Is usefull if we want to disable users - logger for a feature, but still secure that users code can run """ + """ + Dummy logger for internal use. It is useful if we want to disable user's + logger for a feature, but still ensure that user code can run + """ def __init__(self): super().__init__() self._experiment = DummyExperiment() @property - def experiment(self): + def experiment(self) -> DummyExperiment: return self._experiment - @rank_zero_only - def log_metrics(self, metrics, step): + def log_metrics(self, *args, **kwargs) -> None: pass - @rank_zero_only - def log_hyperparams(self, params): + def log_hyperparams(self, *args, **kwargs) -> None: pass @property - def name(self): - pass + def name(self) -> str: + return "" @property - def version(self): - pass + def version(self) -> str: + return "" - def __getitem__(self, idx): + def __getitem__(self, idx) -> "DummyLogger": + # enables self.logger[0].experiment.add_image(...) return self diff --git a/tests/loggers/test_base.py b/tests/loggers/test_base.py index c48fef5e04b49..cf3a0cb74b3f4 100644 --- a/tests/loggers/test_base.py +++ b/tests/loggers/test_base.py @@ -229,15 +229,24 @@ def log_metrics(self, metrics, step): def test_dummyexperiment_support_indexing(): + """ Test that the DummyExperiment can imitate indexing the experiment in a LoggerCollection. """ experiment = DummyExperiment() assert experiment[0] == experiment def test_dummylogger_support_indexing(): + """ Test that the DummyLogger can imitate indexing of a LoggerCollection. """ logger = DummyLogger() assert logger[0] == logger +def test_dummylogger_noop_method_calls(): + """ Test that the DummyLogger methods can be called with arbitrary arguments. """ + logger = DummyLogger() + logger.log_hyperparams("1", 2, three="three") + logger.log_metrics("1", 2, three="three") + + def test_np_sanitization(): class CustomParamsLogger(CustomLogger): diff --git a/tests/trainer/flags/test_fast_dev_run.py b/tests/trainer/flags/test_fast_dev_run.py index 221951e788284..bcfdd6247d550 100644 --- a/tests/trainer/flags/test_fast_dev_run.py +++ b/tests/trainer/flags/test_fast_dev_run.py @@ -71,6 +71,7 @@ def test_step(self, batch, batch_idx): checkpoint_callback = ModelCheckpoint() early_stopping_callback = EarlyStopping() trainer_config = dict( + default_root_dir=tmpdir, fast_dev_run=fast_dev_run, val_check_interval=2, logger=True,