Skip to content

Commit

Permalink
More tweaks in benchmark code
Browse files Browse the repository at this point in the history
  • Loading branch information
jatinchowdhury18 committed Dec 1, 2023
1 parent c65b549 commit 04cb333
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions bench/layer_creator.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -135,28 +135,28 @@ std::unique_ptr<RTNeural::Layer<Float>>
create_layer(const std::string &layer_type, size_t in_size, size_t out_size) {
if (layer_type == "dense") {
auto layer = std::make_unique<RTNeural::Dense<Float>>(in_size, out_size);
randomise_dense(*layer);
randomise_dense<Float>(*layer);
return std::move(layer);
}

if (layer_type == "conv1d") {
const auto kernel_size = in_size - 1;
auto layer = std::make_unique<RTNeural::Conv1D<Float>>(in_size, out_size, kernel_size, 1);
randomise_conv1d(*layer, kernel_size);
randomise_conv1d<Float>(*layer, kernel_size);
return std::move(layer);
}

if (layer_type == "gru") {
auto layer =
std::make_unique<RTNeural::GRULayer<Float>>(in_size, out_size);
randomise_gru(*layer);
randomise_gru<Float>(*layer);
return std::move(layer);
}

if (layer_type == "lstm") {
auto layer =
std::make_unique<RTNeural::LSTMLayer<Float>>(in_size, out_size);
randomise_lstm(*layer);
randomise_lstm<Float>(*layer);
return std::move(layer);
}

Expand All @@ -166,7 +166,7 @@ create_layer(const std::string &layer_type, size_t in_size, size_t out_size) {
}

if (layer_type == "relu") {
auto layer = std::make_unique<RTNeural::ReLuActivation<double>>(in_size);
auto layer = std::make_unique<RTNeural::ReLuActivation<Float>>(in_size);
return std::move(layer);
}

Expand Down

0 comments on commit 04cb333

Please sign in to comment.