From 3b5a9bcc3fd3bb0ecd1199332bd61936287621ad Mon Sep 17 00:00:00 2001 From: Stefan Weil Date: Mon, 28 Nov 2016 12:32:17 +0100 Subject: [PATCH] lstm: Fix explicit null dereferenced Coverity report: CID 1366443 (#1 of 1): Explicit null dereferenced (FORWARD_NULL) 3. var_deref_model: Passing null pointer this->sub_trainer_ to training_iteration, which dereferences it. Signed-off-by: Stefan Weil --- lstm/lstmtrainer.cpp | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/lstm/lstmtrainer.cpp b/lstm/lstmtrainer.cpp index cd59dc5000..9900e08710 100644 --- a/lstm/lstmtrainer.cpp +++ b/lstm/lstmtrainer.cpp @@ -522,17 +522,18 @@ void LSTMTrainer::StartSubtrainer(STRING* log_msg) { *log_msg += " Failed to revert to previous best for trial!"; delete sub_trainer_; sub_trainer_ = NULL; + } else { + log_msg->add_str_int(" Trial sub_trainer_ from iteration ", + sub_trainer_->training_iteration()); + // Reduce learning rate so it doesn't diverge this time. + sub_trainer_->ReduceLearningRates(this, log_msg); + // If it fails again, we will wait twice as long before reverting again. + int stall_offset = learning_iteration() - sub_trainer_->learning_iteration(); + stall_iteration_ = learning_iteration() + 2 * stall_offset; + sub_trainer_->stall_iteration_ = stall_iteration_; + // Re-save the best trainer with the new learning rates and stall iteration. + checkpoint_writer_->Run(NO_BEST_TRAINER, sub_trainer_, &best_trainer_); } - log_msg->add_str_int(" Trial sub_trainer_ from iteration ", - sub_trainer_->training_iteration()); - // Reduce learning rate so it doesn't diverge this time. - sub_trainer_->ReduceLearningRates(this, log_msg); - // If it fails again, we will wait twice as long before reverting again. - int stall_offset = learning_iteration() - sub_trainer_->learning_iteration(); - stall_iteration_ = learning_iteration() + 2 * stall_offset; - sub_trainer_->stall_iteration_ = stall_iteration_; - // Re-save the best trainer with the new learning rates and stall iteration. - checkpoint_writer_->Run(NO_BEST_TRAINER, sub_trainer_, &best_trainer_); } // While the sub_trainer_ is behind the current training iteration and its