From 3fe1f9fb28de015096a8dd1a26d720ba8d7d40ad Mon Sep 17 00:00:00 2001 From: Mike Oliphant Date: Mon, 4 Nov 2024 12:19:47 -0800 Subject: [PATCH 1/2] Add MathsProvider to template for dynamic model loading --- RTNeural/model_loader.h | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/RTNeural/model_loader.h b/RTNeural/model_loader.h index a9e28ae..559552d 100644 --- a/RTNeural/model_loader.h +++ b/RTNeural/model_loader.h @@ -385,10 +385,10 @@ namespace json_parser } /** Creates a LSTMLayer from a json representation of the layer weights. */ - template - std::unique_ptr> createLSTM(int in_size, int out_size, const nlohmann::json& weights) + template + std::unique_ptr> createLSTM(int in_size, int out_size, const nlohmann::json& weights) { - auto lstm = std::make_unique>(in_size, out_size); + auto lstm = std::make_unique>(in_size, out_size); loadLSTM(*lstm.get(), weights); return std::move(lstm); } @@ -602,7 +602,7 @@ namespace json_parser } /** Creates a neural network model from a json stream. */ - template + template std::unique_ptr> parseJson(const nlohmann::json& parent, const bool debug = false) { auto shape = parent.at("in_shape"); @@ -688,7 +688,7 @@ namespace json_parser } else if(type == "lstm") { - auto lstm = createLSTM(model->getNextInSize(), layerDims, weights); + auto lstm = createLSTM(model->getNextInSize(), layerDims, weights); model->addLayer(lstm.release()); } else if(type == "prelu") From 42a2a2c97c0ad528a53a6fb7382f2baf0e6eb689 Mon Sep 17 00:00:00 2001 From: Mike Oliphant Date: Mon, 4 Nov 2024 13:56:46 -0800 Subject: [PATCH 2/2] Add MathProvider to GRU layer creation and to createActivation. --- RTNeural/model_loader.h | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/RTNeural/model_loader.h b/RTNeural/model_loader.h index 559552d..7d66b68 100644 --- a/RTNeural/model_loader.h +++ b/RTNeural/model_loader.h @@ -318,10 +318,10 @@ namespace json_parser } /** Creates a GRULayer from a json representation of the layer weights. */ - template - std::unique_ptr> createGRU(int in_size, int out_size, const nlohmann::json& weights) + template + std::unique_ptr> createGRU(int in_size, int out_size, const nlohmann::json& weights) { - auto gru = std::make_unique>(in_size, out_size); + auto gru = std::make_unique>(in_size, out_size); loadGRU(*gru.get(), weights); return std::move(gru); } @@ -560,24 +560,24 @@ namespace json_parser } /** Creates an activation layer of a given type. */ - template + template std::unique_ptr> createActivation(const std::string& activationType, int dims) { if(activationType == "tanh") - return std::make_unique>(dims); + return std::make_unique>(dims); if(activationType == "relu") return std::make_unique>(dims); if(activationType == "sigmoid") - return std::make_unique>(dims); + return std::make_unique>(dims); if(activationType == "softmax") - return std::make_unique>(dims); + return std::make_unique>(dims); if(activationType == "elu") - return std::make_unique>(dims); + return std::make_unique>(dims); return {}; } @@ -639,7 +639,7 @@ namespace json_parser if(!activationType.empty()) { debug_print(" activation: " + activationType, debug); - auto activation = createActivation(activationType, layerDims); + auto activation = createActivation(activationType, layerDims); _model->addLayer(activation.release()); } } @@ -683,7 +683,7 @@ namespace json_parser } else if(type == "gru") { - auto gru = createGRU(model->getNextInSize(), layerDims, weights); + auto gru = createGRU(model->getNextInSize(), layerDims, weights); model->addLayer(gru.release()); } else if(type == "lstm")