Skip to content

Commit

Permalink
Merge branch 'master' into metrics/fbeta
Browse files Browse the repository at this point in the history
  • Loading branch information
SkafteNicki authored Nov 23, 2020
2 parents 890f743 + cd90dd4 commit 9d5b505
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 3 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

### Changed

- Consistently use `step=trainer.global_step` in `LearningRateMonitor` independently of `logging_interval` ([#4376](https://github.com/PyTorchLightning/pytorch-lightning/pull/4376))


- Tuner algorithms will be skipped if `fast_dev_run=True` ([#3903](https://github.com/PyTorchLightning/pytorch-lightning/pull/3903))


Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/callbacks/lr_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def on_train_epoch_start(self, trainer, *args, **kwargs):
latest_stat = self._extract_stats(trainer, interval)

if trainer.logger is not None and latest_stat:
trainer.logger.log_metrics(latest_stat, step=trainer.current_epoch)
trainer.logger.log_metrics(latest_stat, step=trainer.global_step)

def _extract_stats(self, trainer, interval: str) -> Dict[str, float]:
latest_stat = {}
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/tuner/batch_size_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def scale_batch_size(trainer,
trainer.progress_bar_callback.disable()

# Initially we just double in size until an OOM is encountered
new_size = _adjust_batch_size(trainer, value=init_val) # initially set to init_val
new_size = _adjust_batch_size(trainer, batch_arg_name, value=init_val) # initially set to init_val
if mode == 'power':
new_size = _run_power_scaling(trainer, model, new_size, batch_arg_name, max_trials, **fit_kwargs)
elif mode == 'binsearch':
Expand Down Expand Up @@ -231,7 +231,7 @@ def _run_binsearch_scaling(trainer, model, new_size, batch_arg_name, max_trials,
garbage_collection_cuda()
high = new_size
midval = (high + low) // 2
new_size, _ = _adjust_batch_size(trainer, value=midval, desc='failed')
new_size, _ = _adjust_batch_size(trainer, batch_arg_name, value=midval, desc='failed')
if high - low <= 1:
break
else:
Expand Down

0 comments on commit 9d5b505

Please sign in to comment.