Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

replaced new LinearLR scheduler (only available with torch >= 1.10.0)… #928

Merged
merged 1 commit into from
May 3, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions darts/models/forecasting/block_rnn_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,6 @@ def __init__(

super().__init__(**kwargs)

# required for all modules -> saves hparams for checkpoints
self.save_hyperparameters()

# Defining parameters
self.hidden_dim = hidden_dim
self.n_layers = num_layers
Expand Down
3 changes: 0 additions & 3 deletions darts/models/forecasting/nbeats.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,9 +358,6 @@ def __init__(
"""
super().__init__(**kwargs)

# required for all modules -> saves hparams for checkpoints
self.save_hyperparameters()

self.input_dim = input_dim
self.output_dim = output_dim
self.nr_params = nr_params
Expand Down
3 changes: 0 additions & 3 deletions darts/models/forecasting/nhits.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,9 +347,6 @@ def __init__(
"""
super().__init__(**kwargs)

# required for all modules -> saves hparams for checkpoints
self.save_hyperparameters()

self.input_dim = input_dim
self.output_dim = output_dim
self.nr_params = nr_params
Expand Down
3 changes: 3 additions & 0 deletions darts/models/forecasting/pl_forecasting_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ def __init__(
"""
super().__init__()

# save hyper parameters for saving/loading
self.save_hyperparameters()

raise_if(
input_chunk_length is None or output_chunk_length is None,
"Both `input_chunk_length` and `output_chunk_length` must be passed to `PLForecastingModule`",
Expand Down
3 changes: 0 additions & 3 deletions darts/models/forecasting/rnn_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,6 @@ def __init__(
# RNNModule doesn't really need input and output_chunk_length for PLModule
super().__init__(**kwargs)

# required for all modules -> saves hparams for checkpoints
self.save_hyperparameters()

# Defining parameters
self.target_size = target_size
self.nr_params = nr_params
Expand Down
3 changes: 0 additions & 3 deletions darts/models/forecasting/tcn_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,6 @@ def __init__(

super().__init__(**kwargs)

# required for all modules -> saves hparams for checkpoints
self.save_hyperparameters()

# Defining parameters
self.input_size = input_size
self.n_filters = num_filters
Expand Down
3 changes: 0 additions & 3 deletions darts/models/forecasting/tft_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,6 @@ def __init__(

super().__init__(**kwargs)

# required for all modules -> saves hparams for checkpoints
self.save_hyperparameters()

self.n_targets, self.loss_size = output_dim
self.variables_meta = variables_meta
self.hidden_size = hidden_size
Expand Down
3 changes: 0 additions & 3 deletions darts/models/forecasting/transformer_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,6 @@ def __init__(

super().__init__(**kwargs)

# required for all modules -> saves hparams for checkpoints
self.save_hyperparameters()

self.input_size = input_size
self.target_size = output_size
self.nr_params = nr_params
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ def test_lr_schedulers(self):
series = TimeSeries.from_series(pd_series)

lr_schedulers = [
(torch.optim.lr_scheduler.LinearLR, {}),
(torch.optim.lr_scheduler.StepLR, {"step_size": 10}),
(
torch.optim.lr_scheduler.ReduceLROnPlateau,
{"threshold": 0.001, "monitor": "train_loss"},
Expand Down