From a5f478fd7152c051f49e070b4fa7e24a5831ae60 Mon Sep 17 00:00:00 2001
From: Masoud Kazemi <mkaze@users.noreply.github.com>
Date: Wed, 16 Jun 2021 13:33:28 +0430
Subject: [PATCH] Fix naming inconsistencies (#114)

* Fix naming inconsistencies

* Some small code reformatting

* Some more code reformatting

* Remove unused import

* Revert change in notebook

* Some more code reformatting
---
 .../dl/api/inference/keras/KerasConstants.kt  |  49 +-
 .../dl/api/inference/keras/ModelLoader.kt     | 503 +++++++++---------
 .../dl/api/inference/keras/ModelSaver.kt      | 387 +++++++-------
 ...ooling1DTest.kt => GlobalAvgPool1DTest.kt} |   2 +-
 ...ooling2DTest.kt => GlobalAvgPool2DTest.kt} |   2 +-
 ...ooling3DTest.kt => GlobalAvgPool3DTest.kt} |   2 +-
 .../dl/api/core/layer/PoolLayerTest.kt        |   8 +-
 7 files changed, 475 insertions(+), 478 deletions(-)
 rename api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/{GlobalAvgPooling1DTest.kt => GlobalAvgPool1DTest.kt} (87%)
 rename api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/{GlobalAvgPooling2DTest.kt => GlobalAvgPool2DTest.kt} (87%)
 rename api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/{GlobalAvgPooling3DTest.kt => GlobalAvgPool3DTest.kt} (88%)

diff --git a/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/KerasConstants.kt b/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/KerasConstants.kt
index a560d1ba0..87999086f 100644
--- a/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/KerasConstants.kt
+++ b/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/KerasConstants.kt
@@ -6,35 +6,41 @@
 package org.jetbrains.kotlinx.dl.api.inference.keras
 
 // Keras layers
+// Core layers
+internal const val LAYER_DENSE: String = "Dense"
+internal const val LAYER_INPUT: String = "InputLayer"
+internal const val LAYER_ACTIVATION: String = "Activation"
+// Convolution layers
 internal const val LAYER_CONV1D: String = "Conv1D"
 internal const val LAYER_CONV2D: String = "Conv2D"
 internal const val LAYER_CONV3D: String = "Conv3D"
 internal const val LAYER_DEPTHWISE_CONV2D: String = "DepthwiseConv2D"
 internal const val LAYER_SEPARABLE_CONV2D: String = "SeparableConv2D"
-internal const val LAYER_DENSE: String = "Dense"
-internal const val LAYER_INPUT: String = "InputLayer"
+// Pooling layers
 internal const val LAYER_MAX_POOL_1D: String = "MaxPooling1D"
-internal const val LAYER_MAX_POOLING_2D: String = "MaxPooling2D"
-internal const val LAYER_MAX_POOLING_3D: String = "MaxPooling3D"
+internal const val LAYER_MAX_POOL_2D: String = "MaxPooling2D"
+internal const val LAYER_MAX_POOL_3D: String = "MaxPooling3D"
 internal const val LAYER_AVG_POOL_1D: String = "AveragePooling1D"
-internal const val LAYER_AVG_POOLING_2D: String = "AvgPooling2D"
-internal const val LAYER_AVERAGE_POOLING_2D: String = "AveragePooling2D"
+internal const val LAYER_AVG_POOL_2D: String = "AveragePooling2D"
 internal const val LAYER_AVG_POOL_3D: String = "AveragePooling3D"
-internal const val LAYER_RESCALING: String = "Rescaling"
+internal const val LAYER_GLOBAL_MAX_POOL_1D: String = "GlobalMaxPooling1D"
+internal const val LAYER_GLOBAL_AVG_POOL_1D: String = "GlobalAveragePooling1D"
+internal const val LAYER_GLOBAL_AVG_POOL_2D: String = "GlobalAveragePooling2D"
+internal const val LAYER_GLOBAL_AVG_POOL_3D: String = "GlobalAveragePooling3D"
+// Recurrent layers
+internal const val LAYER_LSTM: String = "LSTM"
+// Normalization layers
 internal const val LAYER_NORMALIZATION: String = "Normalization"
+internal const val LAYER_BATCH_NORM: String = "BatchNormalization"
+// Regularization layers
+internal const val LAYER_DROPOUT: String = "Dropout"
+// Attention layers
+// Reshaping layers
 internal const val LAYER_FLATTEN: String = "Flatten"
 internal const val LAYER_RESHAPE: String = "Reshape"
 internal const val LAYER_ZERO_PADDING_2D = "ZeroPadding2D"
 internal const val LAYER_CROPPING_2D = "Cropping2D"
-internal const val LAYER_BATCH_NORM: String = "BatchNormalization"
-internal const val LAYER_ACTIVATION: String = "Activation"
-internal const val LAYER_RELU: String = "ReLU"
-internal const val LAYER_ELU: String = "ELU"
-internal const val LAYER_PRELU: String = "PReLU"
-internal const val LAYER_LEAKY_RELU: String = "LeakyReLU"
-internal const val LAYER_THRESHOLDED_RELU = "ThresholdedReLU"
-internal const val LAYER_LSTM: String = "LSTM"
-internal const val LAYER_DROPOUT: String = "Dropout"
+// Merging layers
 internal const val LAYER_ADD: String = "Add"
 internal const val LAYER_MULTIPLY: String = "Multiply"
 internal const val LAYER_SUBTRACT: String = "Subtract"
@@ -42,10 +48,13 @@ internal const val LAYER_AVERAGE: String = "Average"
 internal const val LAYER_MAXIMUM: String = "Maximum"
 internal const val LAYER_MINIMUM: String = "Minimum"
 internal const val LAYER_CONCATENATE: String = "Concatenate"
-internal const val LAYER_GLOBAL_AVG_POOLING_2D: String = "GlobalAveragePooling2D"
-internal const val LAYER_GLOBAL_AVG_POOLING_1D: String = "GlobalAveragePooling1D"
-internal const val LAYER_GLOBAL_MAX_POOL_1D: String = "GlobalMaxPooling1D"
-internal const val LAYER_GLOBAL_AVG_POOLING_3D: String = "GlobalAveragePooling3D"
+// Locally-connected layers
+// Activation layers
+internal const val LAYER_RELU: String = "ReLU"
+internal const val LAYER_ELU: String = "ELU"
+internal const val LAYER_PRELU: String = "PReLU"
+internal const val LAYER_LEAKY_RELU: String = "LeakyReLU"
+internal const val LAYER_THRESHOLDED_RELU = "ThresholdedReLU"
 internal const val LAYER_SOFTMAX: String = "Softmax"
 
 // Keras data types
diff --git a/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelLoader.kt b/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelLoader.kt
index 990c9cc2f..7eb509c16 100644
--- a/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelLoader.kt
+++ b/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelLoader.kt
@@ -113,66 +113,53 @@ private fun convertToLayer(
     kerasLayer: KerasLayer
 ): Layer {
     return when (kerasLayer.class_name) {
-        LAYER_CONV1D -> createConv1D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_CONV2D -> createConv2D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_CONV3D -> createConv3D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_DEPTHWISE_CONV2D -> createDepthwiseConv2D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_SEPARABLE_CONV2D -> createSeparableConv2D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_FLATTEN -> createFlatten(kerasLayer.config!!.name!!)
-        LAYER_RESHAPE -> createReshape(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_MAX_POOL_1D -> createMaxPool1D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_MAX_POOLING_2D -> createMaxPooling2D(
-            kerasLayer.config!!,
-            kerasLayer.config.name!!
-        )
-        LAYER_AVG_POOL_1D -> createAvgPool1D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_MAX_POOLING_3D -> createMaxPooling3D(
-            kerasLayer.config!!,
-            kerasLayer.config.name!!
-        )
-        LAYER_AVG_POOLING_2D -> createAvgPooling2D(
-            kerasLayer.config!!,
-            kerasLayer.config.name!!
-        )
-        LAYER_AVERAGE_POOLING_2D -> createAvgPooling2D(
-            kerasLayer.config!!,
-            kerasLayer.config.name!!
-        )
-        LAYER_AVG_POOL_3D -> createAvgPool3DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_DENSE -> createDense(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_ZERO_PADDING_2D -> createZeroPadding2D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_CROPPING_2D -> createCropping2D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_BATCH_NORM -> createBatchNorm(kerasLayer.config!!, kerasLayer.config.name!!)
+        // Core layers
         LAYER_ACTIVATION -> createActivationLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_DENSE -> createDenseLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        // Convolution layers
+        LAYER_CONV1D -> createConv1DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_CONV2D -> createConv2DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_CONV3D -> createConv3DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_DEPTHWISE_CONV2D -> createDepthwiseConv2DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_SEPARABLE_CONV2D -> createSeparableConv2DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        // Pooling layers
+        LAYER_MAX_POOL_1D -> createMaxPool1DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_MAX_POOL_2D -> createMaxPool2DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_MAX_POOL_3D -> createMaxPool3DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_AVG_POOL_1D -> createAvgPool1DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_AVG_POOL_2D -> createAvgPool2DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_AVG_POOL_3D -> createAvgPool3DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_GLOBAL_MAX_POOL_1D -> createGlobalMaxPool1DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_GLOBAL_AVG_POOL_1D -> createGlobalAvgPool1DLayer(kerasLayer.config!!.name!!)
+        LAYER_GLOBAL_AVG_POOL_2D -> createGlobalAvgPool2DLayer(kerasLayer.config!!.name!!)
+        LAYER_GLOBAL_AVG_POOL_3D -> createGlobalAvgPool3DLayer(kerasLayer.config!!.name!!)
+        // Recurrent layers
+        // Normalization layers
+        LAYER_BATCH_NORM -> createBatchNormLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        // Regularization layers
+        LAYER_DROPOUT -> createDropoutLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        // Attention layers
+        // Reshaping layers
+        LAYER_FLATTEN -> createFlattenLayer(kerasLayer.config!!.name!!)
+        LAYER_RESHAPE -> createReshapeLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_CROPPING_2D -> createCropping2DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        LAYER_ZERO_PADDING_2D -> createZeroPadding2DLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        // Merging layers
+        LAYER_ADD -> createAddLayer(kerasLayer.config!!.name!!)
+        LAYER_AVERAGE -> createAverageLayer(kerasLayer.config!!.name!!)
+        LAYER_SUBTRACT -> createSubtractLayer(kerasLayer.config!!.name!!)
+        LAYER_MAXIMUM -> createMaximumLayer(kerasLayer.config!!.name!!)
+        LAYER_MINIMUM -> createMinimumLayer(kerasLayer.config!!.name!!)
+        LAYER_MULTIPLY -> createMultiplyLayer(kerasLayer.config!!.name!!)
+        LAYER_CONCATENATE -> createConcatenateLayer(kerasLayer.config!!, kerasLayer.config.name!!)
+        // Locally-connected layers
+        // Activation layers
         LAYER_RELU -> createReLULayer(kerasLayer.config!!, kerasLayer.config.name!!)
         LAYER_ELU -> createELULayer(kerasLayer.config!!, kerasLayer.config.name!!)
         LAYER_PRELU -> createPReLULayer(kerasLayer.config!!, kerasLayer.config.name!!)
         LAYER_LEAKY_RELU -> createLeakyReLULayer(kerasLayer.config!!, kerasLayer.config.name!!)
         LAYER_THRESHOLDED_RELU -> createThresholdedReLULayer(kerasLayer.config!!, kerasLayer.config.name!!)
         LAYER_SOFTMAX -> createSoftmaxLayer(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_DROPOUT -> createDropoutLayer(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_ADD -> createAddLayer(kerasLayer.config!!.name!!)
-        LAYER_AVERAGE -> createAverageLayer(kerasLayer.config!!.name!!)
-        LAYER_SUBTRACT -> createSubtractLayer(
-            kerasLayer.config!!.name!!
-        )
-        LAYER_MAXIMUM -> createMaximumLayer(kerasLayer.config!!.name!!)
-        LAYER_MINIMUM -> createMinimumLayer(kerasLayer.config!!.name!!)
-        LAYER_MULTIPLY -> createMultiplyLayer(
-            kerasLayer.config!!.name!!
-        )
-        LAYER_CONCATENATE -> createConcatenateLayer(
-            kerasLayer.config!!,
-            kerasLayer.config.name!!
-        )
-        LAYER_GLOBAL_AVG_POOLING_2D -> createGlobalAvgPooling2D(
-            kerasLayer.config!!.name!!
-        )
-        LAYER_GLOBAL_MAX_POOL_1D -> createGlobalMaxPool1D(kerasLayer.config!!, kerasLayer.config.name!!)
-        LAYER_GLOBAL_AVG_POOLING_1D -> createGlobalAvgPooling1D(kerasLayer.config!!.name!!)
-        LAYER_GLOBAL_AVG_POOLING_3D -> createGlobalAvgPooling3D(
-            kerasLayer.config!!.name!!
-        )
         else -> throw IllegalStateException("${kerasLayer.class_name} is not supported yet!")
     }
 }
@@ -295,88 +282,215 @@ private fun convertToLayer(
     return layer
 }
 
-private fun createGlobalAvgPooling2D(
-    name: String
-): Layer {
+private fun convertToRegularizer(regularizer: KerasRegularizer?): Regularizer? {
+    return if (regularizer != null) {
+        val l1 = regularizer.config!!.l1
+        val l2 = regularizer.config!!.l2
+        if (l1 != 0.0 && l2 != 0.0) {
+            L2L1(l1!!.toFloat(), l2!!.toFloat())
+        } else if (l1 == 0.0 && l2 != 0.0) {
+            L2(l2!!.toFloat())
+        } else if (l1 != 0.0 && l2 == 0.0) {
+            L1(l1!!.toFloat())
+        } else {
+            null
+        }
+    } else {
+        null
+    }
+}
+
+private fun convertToInitializer(initializer: KerasInitializer): Initializer {
+    val seed = if (initializer.config!!.seed != null) {
+        initializer.config.seed!!.toLong()
+    } else 12L
+
+    return when (initializer.class_name!!) {
+        INITIALIZER_GLOROT_UNIFORM -> GlorotUniform(seed = seed)
+        INITIALIZER_GLOROT_NORMAL -> GlorotNormal(seed = seed)
+        INITIALIZER_HE_NORMAL -> HeNormal(seed = seed)
+        INITIALIZER_HE_UNIFORM -> HeUniform(seed = seed)
+        INITIALIZER_LECUN_NORMAL -> LeCunNormal(seed = seed)
+        INITIALIZER_LECUN_UNIFORM -> LeCunUniform(seed = seed)
+        INITIALIZER_ZEROS -> RandomUniform(
+            seed = seed,
+            minVal = 0.0f,
+            maxVal = 0.0f
+        ) // instead of real initializers, because it doesn't influence on nothing
+        INITIALIZER_CONSTANT -> RandomUniform(
+            seed = seed,
+            minVal = 0.0f,
+            maxVal = 0.0f
+        ) // instead of real initializers, because it doesn't influence on nothing
+        INITIALIZER_ONES -> RandomUniform(
+            seed = seed,
+            minVal = 1.0f,
+            maxVal = 1.0f
+        ) // instead of real initializers, because it doesn't influence on nothing*/
+        INITIALIZER_RANDOM_NORMAL -> RandomNormal(
+            seed = seed,
+            mean = initializer.config.mean!!.toFloat(),
+            stdev = initializer.config.stddev!!.toFloat()
+        )
+        INITIALIZER_RANDOM_UNIFORM -> RandomUniform(
+            seed = seed,
+            minVal = initializer.config.minval!!.toFloat(),
+            maxVal = initializer.config.maxval!!.toFloat()
+        )
+        INITIALIZER_TRUNCATED_NORMAL -> TruncatedNormal(seed = seed)
+        INITIALIZER_VARIANCE_SCALING -> convertVarianceScalingInitializer(initializer)
+        INITIALIZER_ORTHOGONAL -> Orthogonal( seed = seed, gain = initializer.config.gain!!.toFloat() )
+        /*INITIALIZER_CONSTANT -> Constant(initializer.config.value!!.toFloat())*/
+        INITIALIZER_IDENTITY -> Identity(initializer.config.gain?.toFloat() ?: 1f)
+        else -> throw IllegalStateException("${initializer.class_name} is not supported yet!")
+    }
+}
+
+private fun convertVarianceScalingInitializer(initializer: KerasInitializer): Initializer {
+    val seed = if (initializer.config!!.seed != null) {
+        initializer.config.seed!!.toLong()
+    } else 12L
+
+    val config = initializer.config
+    val scale = config.scale!!
+    val mode: Mode = convertMode(config.mode!!)
+    val distribution: Distribution = convertDistribution(config.distribution!!)
+    return if (scale == 2.0 && mode == Mode.FAN_IN) {
+        when (distribution) {
+            Distribution.UNIFORM -> HeUniform(seed)
+            Distribution.TRUNCATED_NORMAL -> {
+                HeNormal(seed)
+            }
+            else -> VarianceScaling(scale, mode, distribution, seed)
+        }
+    } else {
+        when (mode) {
+            Mode.FAN_IN -> {
+                when (distribution) {
+                    Distribution.UNIFORM -> LeCunUniform(seed)
+                    Distribution.TRUNCATED_NORMAL -> {
+                        LeCunNormal(seed)
+                    }
+                    else -> VarianceScaling(scale, mode, distribution, seed)
+                }
+            }
+            Mode.FAN_AVG -> {
+                when (distribution) {
+                    Distribution.UNIFORM -> GlorotUniform(seed)
+                    Distribution.TRUNCATED_NORMAL -> {
+                        GlorotNormal(seed)
+                    }
+                    else -> VarianceScaling(scale, mode, distribution, seed)
+                }
+            }
+            else -> VarianceScaling(scale, mode, distribution, seed)
+        }
+    }
+}
+
+private fun convertDistribution(distribution: String): Distribution {
+    return when (distribution) {
+        "truncated_normal" -> Distribution.TRUNCATED_NORMAL
+        "uniform" -> Distribution.UNIFORM
+        "untruncated_normal" -> Distribution.UNTRUNCATED_NORMAL
+        else -> Distribution.TRUNCATED_NORMAL
+    }
+}
+
+private fun convertMode(mode: String): Mode {
+    return when (mode) {
+        "fan_in" -> Mode.FAN_IN
+        "fan_out" -> Mode.FAN_OUT
+        "fan_avg" -> Mode.FAN_AVG
+        else -> Mode.FAN_AVG
+    }
+}
+
+private fun convertToActivation(activation: String): Activations {
+    return when (activation) {
+        ACTIVATION_RELU -> Activations.Relu
+        ACTIVATION_SIGMOID -> Activations.Sigmoid
+        ACTIVATION_SOFTMAX -> Activations.Softmax
+        ACTIVATION_LINEAR -> Activations.Linear
+        ACTIVATION_TANH -> Activations.Tanh
+        ACTIVATION_RELU6 -> Activations.Relu6
+        ACTIVATION_ELU -> Activations.Elu
+        ACTIVATION_SELU -> Activations.Selu
+        ACTIVATION_LOG_SOFTMAX -> Activations.LogSoftmax
+        ACTIVATION_EXP -> Activations.Exponential
+        ACTIVATION_SOFTPLUS -> Activations.SoftPlus
+        ACTIVATION_SOFTSIGN -> Activations.SoftSign
+        ACTIVATION_HARD_SIGMOID -> Activations.HardSigmoid
+        ACTIVATION_SWISH -> Activations.Swish
+        else -> throw IllegalStateException("$activation is not supported yet!")
+    }
+}
+
+/**
+ * The layer creator functions should be put below.
+ */
+
+private fun createGlobalAvgPool2DLayer(name: String): Layer {
     return GlobalAvgPool2D(
         name = name
     )
 }
 
-private fun createGlobalAvgPooling1D(
-    name: String
-): Layer {
+private fun createGlobalAvgPool1DLayer(name: String): Layer {
     return GlobalAvgPool1D(
         name = name
     )
 }
 
-private fun createGlobalAvgPooling3D(
-    name: String
-): Layer {
+private fun createGlobalAvgPool3DLayer(name: String): Layer {
     return GlobalAvgPool3D(
         name = name
     )
 }
 
-private fun createGlobalMaxPool1D(config: LayerConfig, name: String): Layer {
+private fun createGlobalMaxPool1DLayer(config: LayerConfig, name: String): Layer {
     return GlobalMaxPool1D(
         name = name
     )
 }
 
-private fun createAddLayer(
-    name: String
-): Layer {
+private fun createAddLayer(name: String): Layer {
     return Add(
         name = name
     )
 }
 
-private fun createSubtractLayer(
-    name: String
-): Layer {
+private fun createSubtractLayer(name: String): Layer {
     return Subtract(
         name = name
     )
 }
 
-private fun createAverageLayer(
-    name: String
-): Layer {
+private fun createAverageLayer(name: String): Layer {
     return Average(
         name = name
     )
 }
 
-private fun createMaximumLayer(
-    name: String
-): Layer {
+private fun createMaximumLayer(name: String): Layer {
     return Maximum(
         name = name
     )
 }
 
-private fun createMinimumLayer(
-    name: String
-): Layer {
+private fun createMinimumLayer(name: String): Layer {
     return Minimum(
         name = name
     )
 }
 
-private fun createMultiplyLayer(
-    name: String
-): Layer {
+private fun createMultiplyLayer(name: String): Layer {
     return Multiply(
         name = name
     )
 }
 
-private fun createConcatenateLayer(
-    config: LayerConfig,
-    name: String
-): Layer {
+private fun createConcatenateLayer(config: LayerConfig, name: String): Layer {
     return Concatenate(
         axis = config.axis!! as Int,
         name = name
@@ -448,7 +562,7 @@ private fun createSoftmaxLayer(config: LayerConfig, name: String): Layer {
     )
 }
 
-private fun createBatchNorm(config: LayerConfig, name: String): Layer {
+private fun createBatchNormLayer(config: LayerConfig, name: String): Layer {
     return BatchNorm(
         axis = config.axis!! as List<Int>,
         momentum = config.momentum!!,
@@ -465,7 +579,7 @@ private fun createBatchNorm(config: LayerConfig, name: String): Layer {
     )
 }
 
-private fun createDense(config: LayerConfig, name: String): Dense {
+private fun createDenseLayer(config: LayerConfig, name: String): Layer {
     return Dense(
         outputSize = config.units!!,
         activation = convertToActivation(config.activation!!),
@@ -478,151 +592,7 @@ private fun createDense(config: LayerConfig, name: String): Dense {
     )
 }
 
-private fun convertToRegularizer(regularizer: KerasRegularizer?): Regularizer? {
-    return if (regularizer != null) {
-        val l1 = regularizer.config!!.l1
-        val l2 = regularizer.config!!.l2
-        if (l1 != 0.0 && l2 != 0.0) {
-            L2L1(l1!!.toFloat(), l2!!.toFloat())
-        } else if (l1 == 0.0 && l2 != 0.0) {
-            L2(l2!!.toFloat())
-        } else if (l1 != 0.0 && l2 == 0.0) {
-            L1(l1!!.toFloat())
-        } else {
-            null
-        }
-    } else {
-        null
-    }
-}
-
-private fun convertToInitializer(initializer: KerasInitializer): Initializer {
-    val seed = if (initializer.config!!.seed != null) {
-        initializer.config.seed!!.toLong()
-    } else 12L
-
-    return when (initializer.class_name!!) {
-        INITIALIZER_GLOROT_UNIFORM -> GlorotUniform(seed = seed)
-        INITIALIZER_GLOROT_NORMAL -> GlorotNormal(seed = seed)
-        INITIALIZER_HE_NORMAL -> HeNormal(seed = seed)
-        INITIALIZER_HE_UNIFORM -> HeUniform(seed = seed)
-        INITIALIZER_LECUN_NORMAL -> LeCunNormal(seed = seed)
-        INITIALIZER_LECUN_UNIFORM -> LeCunUniform(seed = seed)
-        INITIALIZER_ZEROS -> RandomUniform(
-            seed = seed,
-            minVal = 0.0f,
-            maxVal = 0.0f
-        ) // instead of real initializers, because it doesn't influence on nothing
-        INITIALIZER_CONSTANT -> RandomUniform(
-            seed = seed,
-            minVal = 0.0f,
-            maxVal = 0.0f
-        ) // instead of real initializers, because it doesn't influence on nothing
-        INITIALIZER_ONES -> RandomUniform(
-            seed = seed,
-            minVal = 1.0f,
-            maxVal = 1.0f
-        ) // instead of real initializers, because it doesn't influence on nothing*/
-        INITIALIZER_RANDOM_NORMAL -> RandomNormal(
-            seed = seed,
-            mean = initializer.config.mean!!.toFloat(),
-            stdev = initializer.config.stddev!!.toFloat()
-        )
-        INITIALIZER_RANDOM_UNIFORM -> RandomUniform(
-            seed = seed,
-            minVal = initializer.config.minval!!.toFloat(),
-            maxVal = initializer.config.maxval!!.toFloat()
-        )
-        INITIALIZER_TRUNCATED_NORMAL -> TruncatedNormal(seed = seed)
-        INITIALIZER_VARIANCE_SCALING -> convertVarianceScaling(initializer)
-        INITIALIZER_ORTHOGONAL -> Orthogonal( seed = seed, gain = initializer.config.gain!!.toFloat() )
-        /*INITIALIZER_CONSTANT -> Constant(initializer.config.value!!.toFloat())*/
-        INITIALIZER_IDENTITY -> Identity(initializer.config.gain?.toFloat() ?: 1f)
-        else -> throw IllegalStateException("${initializer.class_name} is not supported yet!")
-    }
-}
-
-private fun convertVarianceScaling(initializer: KerasInitializer): Initializer {
-    val seed = if (initializer.config!!.seed != null) {
-        initializer.config.seed!!.toLong()
-    } else 12L
-
-    val config = initializer.config
-    val scale = config.scale!!
-    val mode: Mode = convertMode(config.mode!!)
-    val distribution: Distribution = convertDistribution(config.distribution!!)
-    return if (scale == 2.0 && mode == Mode.FAN_IN) {
-        when (distribution) {
-            Distribution.UNIFORM -> HeUniform(seed)
-            Distribution.TRUNCATED_NORMAL -> {
-                HeNormal(seed)
-            }
-            else -> VarianceScaling(scale, mode, distribution, seed)
-        }
-    } else {
-        when (mode) {
-            Mode.FAN_IN -> {
-                when (distribution) {
-                    Distribution.UNIFORM -> LeCunUniform(seed)
-                    Distribution.TRUNCATED_NORMAL -> {
-                        LeCunNormal(seed)
-                    }
-                    else -> VarianceScaling(scale, mode, distribution, seed)
-                }
-            }
-            Mode.FAN_AVG -> {
-                when (distribution) {
-                    Distribution.UNIFORM -> GlorotUniform(seed)
-                    Distribution.TRUNCATED_NORMAL -> {
-                        GlorotNormal(seed)
-                    }
-                    else -> VarianceScaling(scale, mode, distribution, seed)
-                }
-            }
-            else -> VarianceScaling(scale, mode, distribution, seed)
-        }
-    }
-}
-
-private fun convertDistribution(distribution: String): Distribution {
-    return when (distribution) {
-        "truncated_normal" -> Distribution.TRUNCATED_NORMAL
-        "uniform" -> Distribution.UNIFORM
-        "untruncated_normal" -> Distribution.UNTRUNCATED_NORMAL
-        else -> Distribution.TRUNCATED_NORMAL
-    }
-}
-
-private fun convertMode(mode: String): Mode {
-    return when (mode) {
-        "fan_in" -> Mode.FAN_IN
-        "fan_out" -> Mode.FAN_OUT
-        "fan_avg" -> Mode.FAN_AVG
-        else -> Mode.FAN_AVG
-    }
-}
-
-private fun convertToActivation(activation: String): Activations {
-    return when (activation) {
-        ACTIVATION_RELU -> Activations.Relu
-        ACTIVATION_SIGMOID -> Activations.Sigmoid
-        ACTIVATION_SOFTMAX -> Activations.Softmax
-        ACTIVATION_LINEAR -> Activations.Linear
-        ACTIVATION_TANH -> Activations.Tanh
-        ACTIVATION_RELU6 -> Activations.Relu6
-        ACTIVATION_ELU -> Activations.Elu
-        ACTIVATION_SELU -> Activations.Selu
-        ACTIVATION_LOG_SOFTMAX -> Activations.LogSoftmax
-        ACTIVATION_EXP -> Activations.Exponential
-        ACTIVATION_SOFTPLUS -> Activations.SoftPlus
-        ACTIVATION_SOFTSIGN -> Activations.SoftSign
-        ACTIVATION_HARD_SIGMOID -> Activations.HardSigmoid
-        ACTIVATION_SWISH -> Activations.Swish
-        else -> throw IllegalStateException("$activation is not supported yet!")
-    }
-}
-
-private fun createMaxPool1D(config: LayerConfig, name: String): Layer {
+private fun createMaxPool1DLayer(config: LayerConfig, name: String): Layer {
     val poolSize = config.pool_size!!
     val addedOnesPoolSize = longArrayOf(1, poolSize[0].toLong(), 1)
     val strides = config.strides!!
@@ -635,7 +605,7 @@ private fun createMaxPool1D(config: LayerConfig, name: String): Layer {
     )
 }
 
-private fun createMaxPooling2D(config: LayerConfig, name: String): MaxPool2D {
+private fun createMaxPool2DLayer(config: LayerConfig, name: String): Layer {
     val poolSize = config.pool_size!!.toIntArray()
     val addedOnesPoolSize = IntArray(4)
     addedOnesPoolSize[0] = 1
@@ -650,10 +620,15 @@ private fun createMaxPooling2D(config: LayerConfig, name: String): MaxPool2D {
     addedOnesStrides[2] = strides[1]
     addedOnesStrides[3] = 1
 
-    return MaxPool2D(addedOnesPoolSize, addedOnesStrides, padding = convertPadding(config.padding!!), name = name)
+    return MaxPool2D(
+        poolSize = addedOnesPoolSize,
+        strides = addedOnesStrides,
+        padding = convertPadding(config.padding!!),
+        name = name
+    )
 }
 
-private fun createAvgPool1D(config: LayerConfig, name: String): Layer {
+private fun createAvgPool1DLayer(config: LayerConfig, name: String): Layer {
     val poolSize = config.pool_size!!
     val addedOnesPoolSize = longArrayOf(1, poolSize[0].toLong(), 1)
     val strides = config.strides!!
@@ -666,7 +641,7 @@ private fun createAvgPool1D(config: LayerConfig, name: String): Layer {
     )
 }
 
-private fun createAvgPooling2D(config: LayerConfig, name: String): AvgPool2D {
+private fun createAvgPool2DLayer(config: LayerConfig, name: String): Layer {
     val poolSize = config.pool_size!!.toIntArray()
     val addedOnesPoolSize = IntArray(4)
     addedOnesPoolSize[0] = 1
@@ -681,7 +656,12 @@ private fun createAvgPooling2D(config: LayerConfig, name: String): AvgPool2D {
     addedOnesStrides[2] = strides[1]
     addedOnesStrides[3] = 1
 
-    return AvgPool2D(addedOnesPoolSize, addedOnesStrides, padding = convertPadding(config.padding!!), name = name)
+    return AvgPool2D(
+        poolSize = addedOnesPoolSize,
+        strides = addedOnesStrides,
+        padding = convertPadding(config.padding!!),
+        name = name
+    )
 }
 
 private fun createAvgPool3DLayer(config: LayerConfig, name: String): Layer {
@@ -697,7 +677,7 @@ private fun createAvgPool3DLayer(config: LayerConfig, name: String): Layer {
     )
 }
 
-private fun createMaxPooling3D(config: LayerConfig, name: String): MaxPool3D {
+private fun createMaxPool3DLayer(config: LayerConfig, name: String): Layer {
     val poolSize = config.pool_size!!.toIntArray()
     val addedOnesPoolSize = IntArray(5)
     addedOnesPoolSize[0] = 1
@@ -714,7 +694,12 @@ private fun createMaxPooling3D(config: LayerConfig, name: String): MaxPool3D {
     addedOnesStrides[3] = strides[2]
     addedOnesStrides[4] = 1
 
-    return MaxPool3D(addedOnesPoolSize, addedOnesStrides, padding = convertPadding(config.padding!!), name = name)
+    return MaxPool3D(
+        poolSize = addedOnesPoolSize,
+        strides = addedOnesStrides,
+        padding = convertPadding(config.padding!!),
+        name = name
+    )
 }
 
 private fun convertPadding(padding: KerasPadding): ConvPadding {
@@ -726,15 +711,15 @@ private fun convertPadding(padding: KerasPadding): ConvPadding {
     }
 }
 
-private fun createFlatten(name: String): Flatten {
+private fun createFlattenLayer(name: String): Layer {
     return Flatten(name = name)
 }
 
-private fun createReshape(config: LayerConfig, name: String): Reshape {
+private fun createReshapeLayer(config: LayerConfig, name: String): Layer {
     return Reshape(name = name, targetShape = config.target_shape!!)
 }
 
-private fun createConv1D(config: LayerConfig, name: String): Conv1D {
+private fun createConv1DLayer(config: LayerConfig, name: String): Layer {
     val kernelSize = config.kernel_size!!.map { it.toLong() }[0]
     val strides = config.strides!!.map { it.toLong() }.toLongArray()
 
@@ -766,7 +751,7 @@ private fun createConv1D(config: LayerConfig, name: String): Conv1D {
     )
 }
 
-private fun createConv2D(config: LayerConfig, name: String): Conv2D {
+private fun createConv2DLayer(config: LayerConfig, name: String): Layer {
     val kernelSize = config.kernel_size!!.map { it.toLong() }.toLongArray()
     val strides = config.strides!!.map { it.toLong() }.toLongArray()
 
@@ -800,7 +785,7 @@ private fun createConv2D(config: LayerConfig, name: String): Conv2D {
     )
 }
 
-private fun createConv3D(config: LayerConfig, name: String): Conv3D {
+private fun createConv3DLayer(config: LayerConfig, name: String): Layer {
     val kernelSize = config.kernel_size!!.map { it.toLong() }.toLongArray()
     val strides = config.strides!!.map { it.toLong() }.toLongArray()
 
@@ -836,10 +821,7 @@ private fun createConv3D(config: LayerConfig, name: String): Conv3D {
     )
 }
 
-private fun createDepthwiseConv2D(
-    config: LayerConfig,
-    name: String
-): DepthwiseConv2D {
+private fun createDepthwiseConv2DLayer(config: LayerConfig, name: String): Layer {
     val kernelSize = config.kernel_size!!.map { it.toLong() }.toLongArray()
     val strides = config.strides!!.map { it.toLong() }.toLongArray()
 
@@ -873,10 +855,7 @@ private fun createDepthwiseConv2D(
     )
 }
 
-private fun createSeparableConv2D(
-    config: LayerConfig,
-    name: String
-): SeparableConv2D {
+private fun createSeparableConv2DLayer(config: LayerConfig, name: String): Layer {
     val kernelSize = config.kernel_size!!.map { it.toLong() }.toLongArray()
     val strides = config.strides!!.map { it.toLong() }.toLongArray()
 
@@ -913,25 +892,19 @@ private fun createSeparableConv2D(
     )
 }
 
-private fun createZeroPadding2D(
-    config: LayerConfig,
-    name: String
-): ZeroPadding2D {
+private fun createZeroPadding2DLayer(config: LayerConfig, name: String): Layer {
     assert(config.padding is KerasPadding.ZeroPadding2D)
     return ZeroPadding2D(
-        (config.padding as KerasPadding.ZeroPadding2D).padding,
-        config.data_format,
-        name
+        padding = (config.padding as KerasPadding.ZeroPadding2D).padding,
+        dataFormat = config.data_format,
+        name = name
     )
 }
 
-private fun createCropping2D(
-    config: LayerConfig,
-    name: String
-): Cropping2D {
+private fun createCropping2DLayer(config: LayerConfig, name: String): Layer {
     val cropping = config.cropping!!.map { it.toIntArray() }.toTypedArray()
     return Cropping2D(
-        cropping,
-        name
+        cropping = cropping,
+        name = name
     )
 }
diff --git a/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelSaver.kt b/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelSaver.kt
index 0d79e941f..d7f4e937d 100644
--- a/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelSaver.kt
+++ b/api/src/main/kotlin/org/jetbrains/kotlinx/dl/api/inference/keras/ModelSaver.kt
@@ -72,38 +72,49 @@ internal fun GraphTrainableModel.serializeModel(isKerasFullyCompatible: Boolean)
 
 private fun convertToKerasLayer(layer: Layer, isKerasFullyCompatible: Boolean, isFunctional: Boolean): KerasLayer {
     val kerasLayer = when (layer) {
-        is Conv1D -> createKerasConv1D(layer, isKerasFullyCompatible)
-        is Conv2D -> createKerasConv2D(layer, isKerasFullyCompatible)
-        is Conv3D -> createKerasConv3D(layer, isKerasFullyCompatible)
-        is Flatten -> createKerasFlatten(layer)
-        is MaxPool1D -> createKerasMaxPool1D(layer)
-        is MaxPool2D -> createKerasMaxPooling2D(layer)
-        is MaxPool3D -> createKerasMaxPooling3D(layer)
-        is AvgPool1D -> createKerasAvgPool1D(layer)
-        is AvgPool2D -> createKerasAvgPooling2D(layer)
-        is AvgPool3D -> createKerasAvgPool3DLayer(layer)
-        is Dense -> createKerasDense(layer, isKerasFullyCompatible)
-        is ZeroPadding2D -> createKerasZeroPadding2D(layer)
-        is Input -> createKerasInput(layer)
-        is BatchNorm -> createKerasBatchNorm(layer, isKerasFullyCompatible)
+        // Core layers
+        is Input -> createKerasInputLayer(layer)
+        is Dense -> createKerasDenseLayer(layer, isKerasFullyCompatible)
         is ActivationLayer -> createKerasActivationLayer(layer)
-        is PReLU -> createKerasPReLULayer(layer, isKerasFullyCompatible)
-        is LeakyReLU -> createKerasLeakyReLU(layer)
-        is ThresholdedReLU -> createKerasThresholdedReLULayer(layer)
+        // Convolution layers
+        is Conv1D -> createKerasConv1DLayer(layer, isKerasFullyCompatible)
+        is Conv2D -> createKerasConv2DLayer(layer, isKerasFullyCompatible)
+        is Conv3D -> createKerasConv3DLayer(layer, isKerasFullyCompatible)
+        is DepthwiseConv2D -> createKerasDepthwiseConv2DLayer(layer, isKerasFullyCompatible)
+        is SeparableConv2D -> createKerasSeparableConv2DLayer(layer, isKerasFullyCompatible)
+        // Pooling layers
+        is MaxPool1D -> createKerasMaxPool1DLayer(layer)
+        is MaxPool2D -> createKerasMaxPool2DLayer(layer)
+        is MaxPool3D -> createKerasMaxPool3DLayer(layer)
+        is AvgPool1D -> createKerasAvgPool1DLayer(layer)
+        is AvgPool2D -> createKerasAvgPool2DLayer(layer)
+        is AvgPool3D -> createKerasAvgPool3DLayer(layer)
+        is GlobalMaxPool1D -> createKerasGlobalMaxPool1DLayer(layer)
+        is GlobalAvgPool1D -> createKerasGlobalAvgPool1DLayer(layer)
+        is GlobalAvgPool2D -> createKerasGlobalAvgPool2DLayer(layer)
+        is GlobalAvgPool3D -> createKerasGlobalAvgPool3DLayer(layer)
+        // Recurrent layers (e.g. LSTM)
+        // Normalization layers
+        is BatchNorm -> createKerasBatchNormLayer(layer, isKerasFullyCompatible)
+        // Regularization layers (e.g. Dropout)
+        // Attention layers
+        // Reshaping layers
+        is Flatten -> createKerasFlattenLayer(layer)
+        is ZeroPadding2D -> createKerasZeroPadding2DLayer(layer)
+        // Merging layers
         is Add -> createKerasAddLayer(layer)
-        is Maximum -> createKerasMaximumLayer(layer as Maximum)
-        is Minimum -> createKerasMinimumLayer(layer as Minimum)
-        is Subtract -> createKerasSubtractLayer(layer as Subtract)
-        is Multiply -> createKerasMultiplyLayer(layer as Multiply)
-        is Average -> createKerasAverageLayer(layer as Average)
-        is GlobalMaxPool1D -> createKerasGlobalMaxPool1D(layer)
-        is GlobalAvgPool2D -> createKerasGlobalAveragePooling2DLayer(layer)
-        is GlobalAvgPool3D -> createKerasGlobalAveragePooling3DLayer(layer)
-        is DepthwiseConv2D -> createKerasDepthwiseConv2D(layer, isKerasFullyCompatible)
-        is SeparableConv2D -> createSeparableConv2D(layer, isKerasFullyCompatible)
-        is Concatenate -> createKerasConcatenate(layer)
-        is GlobalAvgPool1D -> createKerasGlobalAveragePooling1DLayer(layer)
+        is Maximum -> createKerasMaximumLayer(layer)
+        is Minimum -> createKerasMinimumLayer(layer)
+        is Subtract -> createKerasSubtractLayer(layer)
+        is Multiply -> createKerasMultiplyLayer(layer)
+        is Average -> createKerasAverageLayer(layer)
+        is Concatenate -> createKerasConcatenateLayer(layer)
+        // Locally-connected layers
+        // Activation layers
         is Softmax -> createKerasSoftmaxLayer(layer)
+        is PReLU -> createKerasPReLULayer(layer, isKerasFullyCompatible)
+        is LeakyReLU -> createKerasLeakyReLULayer(layer)
+        is ThresholdedReLU -> createKerasThresholdedReLULayer(layer)
         else -> throw IllegalStateException("${layer.name} with type ${layer::class.simpleName} is not supported yet!")
     }
 
@@ -133,23 +144,146 @@ private fun convertToKerasLayer(layer: Layer, isKerasFullyCompatible: Boolean, i
     return kerasLayer
 }
 
-private fun createKerasGlobalAveragePooling2DLayer(layer: GlobalAvgPool2D): KerasLayer {
+private fun convertToKerasRegularizer(regularizer: Regularizer?): KerasRegularizer? {
+    return if (regularizer != null) {
+        val className = "L1L2"
+        regularizer as L2L1
+        val config = KerasRegularizerConfig(l1 = regularizer.l1.toDouble(), l2 = regularizer.l2.toDouble())
+        KerasRegularizer(class_name = className, config = config)
+    } else {
+        null
+    }
+}
+
+private fun convertToKerasInitializer(initializer: Initializer, isKerasFullyCompatible: Boolean): KerasInitializer? {
+    val className: String
+    val config: KerasInitializerConfig
+    if (isKerasFullyCompatible) {
+        val (_className, _config) = when (initializer) {
+            is GlorotUniform -> convertToVarianceScalingInitializer(initializer as VarianceScaling)
+            is GlorotNormal -> convertToVarianceScalingInitializer(initializer as VarianceScaling)
+            is HeNormal -> convertToVarianceScalingInitializer(initializer as VarianceScaling)
+            is HeUniform -> convertToVarianceScalingInitializer(initializer as VarianceScaling)
+            is LeCunNormal -> convertToVarianceScalingInitializer(initializer as VarianceScaling)
+            is LeCunUniform -> convertToVarianceScalingInitializer(initializer as VarianceScaling)
+            is RandomUniform -> convertToRandomUniformInitializer(initializer)
+            is Identity -> convertToIdentityInitializer(initializer)
+            else -> throw IllegalStateException("${initializer::class.simpleName} is not supported yet!")
+        }
+
+        className = _className
+        config = _config
+    } else {
+        className = when (initializer) {
+            is GlorotUniform -> INITIALIZER_GLOROT_UNIFORM
+            is GlorotNormal -> INITIALIZER_GLOROT_NORMAL
+            is HeNormal -> INITIALIZER_HE_NORMAL
+            is HeUniform -> INITIALIZER_HE_UNIFORM
+            is LeCunNormal -> INITIALIZER_LECUN_NORMAL
+            is LeCunUniform -> INITIALIZER_LECUN_UNIFORM
+            is Identity -> INITIALIZER_IDENTITY
+            else -> throw IllegalStateException("${initializer::class.simpleName} is not supported yet!")
+        }
+        config = KerasInitializerConfig(seed = 12)
+    }
+
+    return KerasInitializer(class_name = className, config = config)
+}
+
+private fun convertToRandomUniformInitializer(initializer: RandomUniform): Pair<String, KerasInitializerConfig> {
+    return Pair(
+        INITIALIZER_RANDOM_UNIFORM, KerasInitializerConfig(
+            minval = initializer.minVal.toDouble(),
+            maxval = initializer.maxVal.toDouble(),
+            seed = initializer.seed.toInt()
+        )
+    )
+}
+
+private fun convertToVarianceScalingInitializer(initializer: VarianceScaling): Pair<String, KerasInitializerConfig> {
+    return Pair(
+        INITIALIZER_VARIANCE_SCALING, KerasInitializerConfig(
+            seed = initializer.seed.toInt(),
+            scale = initializer.scale,
+            mode = convertMode(initializer.mode),
+            distribution = convertDistribution(initializer.distribution)
+        )
+    )
+}
+
+private fun convertToIdentityInitializer(initializer: Identity): Pair<String, KerasInitializerConfig> {
+    return Pair(
+        INITIALIZER_IDENTITY,
+        KerasInitializerConfig(
+            gain = initializer.gain.toDouble()
+        )
+    )
+}
+
+private fun convertDistribution(distribution: Distribution): String {
+    return when (distribution) {
+        Distribution.TRUNCATED_NORMAL -> "truncated_normal"
+        Distribution.UNIFORM -> "uniform"
+        Distribution.UNTRUNCATED_NORMAL -> "untruncated_normal"
+    }
+}
+
+private fun convertMode(mode: Mode): String {
+    return when (mode) {
+        Mode.FAN_IN -> "fan_in"
+        Mode.FAN_OUT -> "fan_out"
+        Mode.FAN_AVG -> "fan_avg"
+    }
+}
+
+private fun convertToKerasPadding(padding: ConvPadding): KerasPadding {
+    return when (padding) {
+        ConvPadding.SAME -> KerasPadding.Same
+        ConvPadding.VALID -> KerasPadding.Valid
+        ConvPadding.FULL -> KerasPadding.Full
+    }
+}
+
+private fun convertToKerasActivation(activation: Activations): String? {
+    return when (activation) {
+        Activations.Relu -> ACTIVATION_RELU
+        Activations.Sigmoid -> ACTIVATION_SIGMOID
+        Activations.Softmax -> ACTIVATION_SOFTMAX
+        Activations.Linear -> ACTIVATION_LINEAR
+        Activations.Tanh -> ACTIVATION_TANH
+        Activations.Relu6 -> ACTIVATION_RELU6
+        Activations.Elu -> ACTIVATION_ELU
+        Activations.Selu -> ACTIVATION_SELU
+        Activations.LogSoftmax -> ACTIVATION_LOG_SOFTMAX
+        Activations.Exponential -> ACTIVATION_EXP
+        Activations.SoftPlus -> ACTIVATION_SOFTPLUS
+        Activations.SoftSign -> ACTIVATION_SOFTSIGN
+        Activations.HardSigmoid -> ACTIVATION_HARD_SIGMOID
+        Activations.Swish -> ACTIVATION_SWISH
+    }
+}
+
+/**
+ * The layer creator functions for Keras should be put below.
+ */
+
+private fun createKerasGlobalAvgPool2DLayer(layer: GlobalAvgPool2D): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         name = layer.name
     )
-    return KerasLayer(class_name = LAYER_GLOBAL_AVG_POOLING_2D, config = configX)
+    return KerasLayer(class_name = LAYER_GLOBAL_AVG_POOL_2D, config = configX)
 }
 
-private fun createKerasGlobalAveragePooling1DLayer(layer: GlobalAvgPool1D): KerasLayer {
+private fun createKerasGlobalAvgPool1DLayer(layer: GlobalAvgPool1D): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         name = layer.name
     )
-    return KerasLayer(class_name = LAYER_GLOBAL_AVG_POOLING_1D, config = configX)
+    return KerasLayer(class_name = LAYER_GLOBAL_AVG_POOL_1D, config = configX)
 }
 
-private fun createKerasGlobalMaxPool1D(layer: GlobalMaxPool1D): KerasLayer {
+private fun createKerasGlobalMaxPool1DLayer(layer: GlobalMaxPool1D): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         name = layer.name
@@ -157,12 +291,12 @@ private fun createKerasGlobalMaxPool1D(layer: GlobalMaxPool1D): KerasLayer {
     return KerasLayer(class_name = LAYER_GLOBAL_MAX_POOL_1D, config = configX)
 }
 
-private fun createKerasGlobalAveragePooling3DLayer(layer: GlobalAvgPool3D): KerasLayer {
+private fun createKerasGlobalAvgPool3DLayer(layer: GlobalAvgPool3D): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         name = layer.name
     )
-    return KerasLayer(class_name = LAYER_GLOBAL_AVG_POOLING_3D, config = configX)
+    return KerasLayer(class_name = LAYER_GLOBAL_AVG_POOL_3D, config = configX)
 }
 
 private fun createKerasAddLayer(layer: Add): KerasLayer {
@@ -242,7 +376,7 @@ private fun createKerasSoftmaxLayer(layer: Softmax): KerasLayer {
     return KerasLayer(class_name = LAYER_SOFTMAX, config = configX)
 }
 
-private fun createKerasLeakyReLU(layer: LeakyReLU): KerasLayer {
+private fun createKerasLeakyReLULayer(layer: LeakyReLU): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         alpha = layer.alpha.toDouble(),
@@ -260,7 +394,7 @@ private fun createKerasThresholdedReLULayer(layer: ThresholdedReLU): KerasLayer
     return KerasLayer(class_name = LAYER_THRESHOLDED_RELU, config = configX)
 }
 
-private fun createKerasBatchNorm(layer: BatchNorm, isKerasFullyCompatible: Boolean): KerasLayer {
+private fun createKerasBatchNormLayer(layer: BatchNorm, isKerasFullyCompatible: Boolean): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         name = layer.name,
@@ -283,7 +417,7 @@ private fun createKerasBatchNorm(layer: BatchNorm, isKerasFullyCompatible: Boole
     return KerasLayer(class_name = LAYER_BATCH_NORM, config = configX)
 }
 
-private fun createKerasInput(layer: Input): KerasLayer {
+private fun createKerasInputLayer(layer: Input): KerasLayer {
     val shape = mutableListOf<Int?>()
     shape.add(null)
     layer.packedDims.map { it.toInt() }.forEach { shape.add(it) }
@@ -297,7 +431,7 @@ private fun createKerasInput(layer: Input): KerasLayer {
     return KerasLayer(class_name = LAYER_INPUT, config = config)
 }
 
-private fun createKerasDense(layer: Dense, isKerasFullyCompatible: Boolean): KerasLayer {
+private fun createKerasDenseLayer(layer: Dense, isKerasFullyCompatible: Boolean): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         units = layer.outputSize,
@@ -313,186 +447,67 @@ private fun createKerasDense(layer: Dense, isKerasFullyCompatible: Boolean): Ker
     return KerasLayer(class_name = LAYER_DENSE, config = configX)
 }
 
-private fun convertToKerasRegularizer(regularizer: Regularizer?): KerasRegularizer? {
-    return if (regularizer != null) {
-        val className = "L1L2"
-        regularizer as L2L1
-        val config = KerasRegularizerConfig(l1 = regularizer.l1.toDouble(), l2 = regularizer.l2.toDouble())
-        KerasRegularizer(class_name = className, config = config)
-    } else {
-        null
-    }
-}
-
-private fun convertToKerasInitializer(initializer: Initializer, isKerasFullyCompatible: Boolean): KerasInitializer? {
-    val className: String
-    val config: KerasInitializerConfig
-    if (isKerasFullyCompatible) {
-        val (_className, _config) = when (initializer) {
-            is GlorotUniform -> convertToVarianceScaling(initializer as VarianceScaling)
-            is GlorotNormal -> convertToVarianceScaling(initializer as VarianceScaling)
-            is HeNormal -> convertToVarianceScaling(initializer as VarianceScaling)
-            is HeUniform -> convertToVarianceScaling(initializer as VarianceScaling)
-            is LeCunNormal -> convertToVarianceScaling(initializer as VarianceScaling)
-            is LeCunUniform -> convertToVarianceScaling(initializer as VarianceScaling)
-            is RandomUniform -> convertToRandomUniform(initializer)
-            is Identity -> convertToIdentity(initializer)
-            else -> throw IllegalStateException("${initializer::class.simpleName} is not supported yet!")
-        }
-
-        className = _className
-        config = _config
-    } else {
-        className = when (initializer) {
-            is GlorotUniform -> INITIALIZER_GLOROT_UNIFORM
-            is GlorotNormal -> INITIALIZER_GLOROT_NORMAL
-            is HeNormal -> INITIALIZER_HE_NORMAL
-            is HeUniform -> INITIALIZER_HE_UNIFORM
-            is LeCunNormal -> INITIALIZER_LECUN_NORMAL
-            is LeCunUniform -> INITIALIZER_LECUN_UNIFORM
-            is Identity -> INITIALIZER_IDENTITY
-            else -> throw IllegalStateException("${initializer::class.simpleName} is not supported yet!")
-        }
-        config = KerasInitializerConfig(seed = 12)
-    }
-
-    return KerasInitializer(class_name = className, config = config)
-}
-
-private fun convertToRandomUniform(initializer: RandomUniform): Pair<String, KerasInitializerConfig> {
-    return Pair(
-        INITIALIZER_RANDOM_UNIFORM, KerasInitializerConfig(
-            minval = initializer.minVal.toDouble(),
-            maxval = initializer.maxVal.toDouble(),
-            seed = initializer.seed.toInt()
-        )
-    )
-}
-
-private fun convertToVarianceScaling(initializer: VarianceScaling): Pair<String, KerasInitializerConfig> {
-    return Pair(
-        INITIALIZER_VARIANCE_SCALING, KerasInitializerConfig(
-            seed = initializer.seed.toInt(),
-            scale = initializer.scale,
-            mode = convertMode(initializer.mode),
-            distribution = convertDistribution(initializer.distribution)
-        )
-    )
-}
-
-private fun convertToIdentity(initializer: Identity): Pair<String, KerasInitializerConfig> {
-    return Pair(
-        INITIALIZER_IDENTITY,
-        KerasInitializerConfig(
-            gain = initializer.gain.toDouble()
-        )
-    )
-}
-
-private fun convertDistribution(distribution: Distribution): String {
-    return when (distribution) {
-        Distribution.TRUNCATED_NORMAL -> "truncated_normal"
-        Distribution.UNIFORM -> "uniform"
-        Distribution.UNTRUNCATED_NORMAL -> "untruncated_normal"
-    }
-}
-
-private fun convertMode(mode: Mode): String {
-    return when (mode) {
-        Mode.FAN_IN -> "fan_in"
-        Mode.FAN_OUT -> "fan_out"
-        Mode.FAN_AVG -> "fan_avg"
-    }
-}
-
-private fun convertPadding(padding: ConvPadding): KerasPadding {
-    return when (padding) {
-        ConvPadding.SAME -> KerasPadding.Same
-        ConvPadding.VALID -> KerasPadding.Valid
-        ConvPadding.FULL -> KerasPadding.Full
-    }
-}
-
-private fun convertToKerasActivation(activation: Activations): String? {
-    return when (activation) {
-        Activations.Relu -> ACTIVATION_RELU
-        Activations.Sigmoid -> ACTIVATION_SIGMOID
-        Activations.Softmax -> ACTIVATION_SOFTMAX
-        Activations.Linear -> ACTIVATION_LINEAR
-        Activations.Tanh -> ACTIVATION_TANH
-        Activations.Relu6 -> ACTIVATION_RELU6
-        Activations.Elu -> ACTIVATION_ELU
-        Activations.Selu -> ACTIVATION_SELU
-        Activations.LogSoftmax -> ACTIVATION_LOG_SOFTMAX
-        Activations.Exponential -> ACTIVATION_EXP
-        Activations.SoftPlus -> ACTIVATION_SOFTPLUS
-        Activations.SoftSign -> ACTIVATION_SOFTSIGN
-        Activations.HardSigmoid -> ACTIVATION_HARD_SIGMOID
-        Activations.Swish -> ACTIVATION_SWISH
-    }
-}
-
-private fun createKerasMaxPool1D(layer: MaxPool1D): KerasLayer {
+private fun createKerasMaxPool1DLayer(layer: MaxPool1D): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         pool_size = listOf(layer.poolSize[1].toInt()),
         strides = listOf(layer.strides[1].toInt()),
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         name = layer.name
     )
     return KerasLayer(class_name = LAYER_MAX_POOL_1D, config = configX)
 }
 
-private fun createKerasMaxPooling2D(layer: MaxPool2D): KerasLayer {
+private fun createKerasMaxPool2DLayer(layer: MaxPool2D): KerasLayer {
     val poolSize = mutableListOf(layer.poolSize[1], layer.poolSize[2])
     val strides = mutableListOf(layer.strides[1], layer.strides[2])
     val configX = LayerConfig(
         data_format = CHANNELS_LAST,
         dtype = DATATYPE_FLOAT32,
         name = layer.name,
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         pool_size = poolSize,
         strides = strides
     )
-    return KerasLayer(class_name = LAYER_MAX_POOLING_2D, config = configX)
+    return KerasLayer(class_name = LAYER_MAX_POOL_2D, config = configX)
 }
 
-private fun createKerasAvgPool1D(layer: AvgPool1D): KerasLayer {
+private fun createKerasAvgPool1DLayer(layer: AvgPool1D): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         pool_size = listOf(layer.poolSize[1].toInt()),
         strides = listOf(layer.strides[1].toInt()),
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         name = layer.name
     )
     return KerasLayer(class_name = LAYER_AVG_POOL_1D, config = configX)
 }
 
-private fun createKerasMaxPooling3D(layer: MaxPool3D): KerasLayer {
+private fun createKerasMaxPool3DLayer(layer: MaxPool3D): KerasLayer {
     val poolSize = mutableListOf(layer.poolSize[1], layer.poolSize[3])
     val strides = mutableListOf(layer.strides[1] , layer.strides[3])
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         name = layer.name,
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         pool_size = poolSize,
         strides = strides
     )
-    return KerasLayer(class_name = LAYER_MAX_POOLING_3D, config = configX)
+    return KerasLayer(class_name = LAYER_MAX_POOL_3D, config = configX)
 }
 
-private fun createKerasAvgPooling2D(layer: AvgPool2D): KerasLayer {
+private fun createKerasAvgPool2DLayer(layer: AvgPool2D): KerasLayer {
     val poolSize = mutableListOf(layer.poolSize[1], layer.poolSize[2])
     val strides = mutableListOf(layer.strides[1], layer.strides[2])
     val configX = LayerConfig(
         data_format = CHANNELS_LAST,
         dtype = DATATYPE_FLOAT32,
         name = layer.name,
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         pool_size = poolSize,
         strides = strides
     )
-    return KerasLayer(class_name = LAYER_AVG_POOLING_2D, config = configX)
+    return KerasLayer(class_name = LAYER_AVG_POOL_2D, config = configX)
 }
 
 private fun createKerasAvgPool3DLayer(layer: AvgPool3D): KerasLayer {
@@ -500,13 +515,13 @@ private fun createKerasAvgPool3DLayer(layer: AvgPool3D): KerasLayer {
         dtype = DATATYPE_FLOAT32,
         pool_size = layer.poolSize.slice(1..3).map { it.toInt() },
         strides = layer.strides.slice(1..3).map { it.toInt() },
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         name = layer.name
     )
     return KerasLayer(class_name = LAYER_AVG_POOL_3D, config = configX)
 }
 
-private fun createKerasFlatten(layer: Flatten): KerasLayer {
+private fun createKerasFlattenLayer(layer: Flatten): KerasLayer {
     val configX = LayerConfig(
         data_format = CHANNELS_LAST,
         dtype = DATATYPE_FLOAT32,
@@ -515,7 +530,7 @@ private fun createKerasFlatten(layer: Flatten): KerasLayer {
     return KerasLayer(class_name = LAYER_FLATTEN, config = configX)
 }
 
-private fun createKerasConcatenate(layer: Concatenate): KerasLayer {
+private fun createKerasConcatenateLayer(layer: Concatenate): KerasLayer {
     val configX = LayerConfig(
         dtype = DATATYPE_FLOAT32,
         axis = layer.axis,
@@ -524,7 +539,7 @@ private fun createKerasConcatenate(layer: Concatenate): KerasLayer {
     return KerasLayer(class_name = LAYER_CONCATENATE, config = configX)
 }
 
-private fun createKerasConv1D(layer: Conv1D, isKerasFullyCompatible: Boolean): KerasLayer {
+private fun createKerasConv1DLayer(layer: Conv1D, isKerasFullyCompatible: Boolean): KerasLayer {
     val configX = LayerConfig(
         filters = layer.filters.toInt(),
         kernel_size = listOf(layer.kernelSize.toInt()),
@@ -536,14 +551,14 @@ private fun createKerasConv1D(layer: Conv1D, isKerasFullyCompatible: Boolean): K
         kernel_regularizer = convertToKerasRegularizer(layer.kernelRegularizer),
         bias_regularizer = convertToKerasRegularizer(layer.biasRegularizer),
         activity_regularizer = convertToKerasRegularizer(layer.activityRegularizer),
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         name = layer.name,
         use_bias = layer.useBias
     )
     return KerasLayer(class_name = LAYER_CONV1D, config = configX)
 }
 
-private fun createKerasConv2D(layer: Conv2D, isKerasFullyCompatible: Boolean): KerasLayer {
+private fun createKerasConv2DLayer(layer: Conv2D, isKerasFullyCompatible: Boolean): KerasLayer {
     val kernelSize = layer.kernelSize.map { it.toInt() }.toList()
     val configX = LayerConfig(
         filters = layer.filters.toInt(),
@@ -556,14 +571,14 @@ private fun createKerasConv2D(layer: Conv2D, isKerasFullyCompatible: Boolean): K
         kernel_regularizer = convertToKerasRegularizer(layer.kernelRegularizer),
         bias_regularizer = convertToKerasRegularizer(layer.biasRegularizer),
         activity_regularizer = convertToKerasRegularizer(layer.activityRegularizer),
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         name = layer.name,
         use_bias = layer.useBias
     )
     return KerasLayer(class_name = LAYER_CONV2D, config = configX)
 }
 
-private fun createKerasConv3D(layer: Conv3D, isKerasFullyCompatible: Boolean): KerasLayer {
+private fun createKerasConv3DLayer(layer: Conv3D, isKerasFullyCompatible: Boolean): KerasLayer {
     val kernelSize = layer.kernelSize.map { it.toInt() }.toList()
     val configX = LayerConfig(
         filters = layer.filters.toInt(),
@@ -576,14 +591,14 @@ private fun createKerasConv3D(layer: Conv3D, isKerasFullyCompatible: Boolean): K
         kernel_regularizer = convertToKerasRegularizer(layer.kernelRegularizer),
         bias_regularizer = convertToKerasRegularizer(layer.biasRegularizer),
         activity_regularizer = convertToKerasRegularizer(layer.activityRegularizer),
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         name = layer.name,
         use_bias = layer.useBias
     )
     return KerasLayer(class_name = LAYER_CONV3D, config = configX)
 }
 
-private fun createKerasDepthwiseConv2D(layer: DepthwiseConv2D, isKerasFullyCompatible: Boolean): KerasLayer {
+private fun createKerasDepthwiseConv2DLayer(layer: DepthwiseConv2D, isKerasFullyCompatible: Boolean): KerasLayer {
     val configX = LayerConfig(
         kernel_size = layer.kernelSize.map { it.toInt() },
         strides = listOf(layer.strides[1].toInt(), layer.strides[2].toInt()),
@@ -592,14 +607,14 @@ private fun createKerasDepthwiseConv2D(layer: DepthwiseConv2D, isKerasFullyCompa
         depthwise_initializer = convertToKerasInitializer(layer.depthwiseInitializer, isKerasFullyCompatible),
         depth_multiplier = layer.depthMultiplier,
         bias_initializer = convertToKerasInitializer(layer.biasInitializer, isKerasFullyCompatible),
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         use_bias = layer.useBias,
         name = layer.name
     )
     return KerasLayer(class_name = LAYER_DEPTHWISE_CONV2D, configX)
 }
 
-private fun createSeparableConv2D(layer: SeparableConv2D, isKerasFullyCompatible: Boolean): KerasLayer {
+private fun createKerasSeparableConv2DLayer(layer: SeparableConv2D, isKerasFullyCompatible: Boolean): KerasLayer {
     val configX = LayerConfig(
         filters = layer.filters.toInt(),
         kernel_size = layer.kernelSize.map { it.toInt() },
@@ -610,14 +625,14 @@ private fun createSeparableConv2D(layer: SeparableConv2D, isKerasFullyCompatible
         pointwise_initializer = convertToKerasInitializer(layer.pointwiseInitializer, isKerasFullyCompatible),
         depth_multiplier = layer.depthMultiplier,
         bias_initializer = convertToKerasInitializer(layer.biasInitializer, isKerasFullyCompatible),
-        padding = convertPadding(layer.padding),
+        padding = convertToKerasPadding(layer.padding),
         use_bias = layer.useBias,
         name = layer.name
     )
     return KerasLayer(class_name = LAYER_SEPARABLE_CONV2D, config = configX)
 }
 
-private fun createKerasZeroPadding2D(layer: ZeroPadding2D): KerasLayer {
+private fun createKerasZeroPadding2DLayer(layer: ZeroPadding2D): KerasLayer {
     val configX = LayerConfig(
         data_format = CHANNELS_LAST,
         dtype = DATATYPE_FLOAT32,
diff --git a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling1DTest.kt b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool1DTest.kt
similarity index 87%
rename from api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling1DTest.kt
rename to api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool1DTest.kt
index aa55743a7..259e9aefa 100644
--- a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling1DTest.kt
+++ b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool1DTest.kt
@@ -3,7 +3,7 @@ package org.jetbrains.kotlinx.dl.api.core.layer
 import org.jetbrains.kotlinx.dl.api.core.layer.pooling.GlobalAvgPool1D
 import org.junit.jupiter.api.Test
 
-internal class GlobalAvgPooling1DTest : PoolLayerTest() {
+internal class GlobalAvgPool1DTest : PoolLayerTest() {
     @Test
     fun globalAvgPool1DTest(){
         val input = Array(2, { Array(3, { FloatArray(4) { 0f } } ) } )
diff --git a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling2DTest.kt b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool2DTest.kt
similarity index 87%
rename from api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling2DTest.kt
rename to api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool2DTest.kt
index 11a5e4ef2..4a837a274 100644
--- a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling2DTest.kt
+++ b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool2DTest.kt
@@ -3,7 +3,7 @@ package org.jetbrains.kotlinx.dl.api.core.layer
 import org.jetbrains.kotlinx.dl.api.core.layer.pooling.GlobalAvgPool2D
 import org.junit.jupiter.api.Test
 
-internal class GlobalAvgPooling2DTest : PoolLayerTest() {
+internal class GlobalAvgPool2DTest : PoolLayerTest() {
     @Test
     fun globalAvgPool2DTest() {
         val input = Array(2, { Array(4, { Array(5, { FloatArray(3) { 0f } }) }) })
diff --git a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling3DTest.kt b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool3DTest.kt
similarity index 88%
rename from api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling3DTest.kt
rename to api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool3DTest.kt
index dd0c61c08..53d849171 100644
--- a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPooling3DTest.kt
+++ b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/GlobalAvgPool3DTest.kt
@@ -3,7 +3,7 @@ package org.jetbrains.kotlinx.dl.api.core.layer
 import org.jetbrains.kotlinx.dl.api.core.layer.pooling.GlobalAvgPool3D
 import org.junit.jupiter.api.Test
 
-internal class GlobalAvgPooling3DTest : PoolLayerTest() {
+internal class GlobalAvgPool3DTest : PoolLayerTest() {
     @Test
     fun globalAvgPool3DTest() {
         val input = Array(2, { Array(3, { Array(4, { Array(5, { FloatArray(6) { 0f } }) }) }) })
diff --git a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/PoolLayerTest.kt b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/PoolLayerTest.kt
index e07dc743a..2871e30c4 100644
--- a/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/PoolLayerTest.kt
+++ b/api/src/test/kotlin/org/jetbrains/kotlinx/dl/api/core/layer/PoolLayerTest.kt
@@ -18,7 +18,7 @@ open class PoolLayerTest {
         expected: Array<FloatArray>,
     ) {
         val actual = Array(expected.size) { FloatArray(expected[0].size) { 0f } }
-        assertPoolingLayer(layer, input, expected, actual, ::assertGlobalAvgPoolEquals) { tf, tensor ->
+        assertPoolLayer(layer, input, expected, actual, ::assertGlobalAvgPoolEquals) { tf, tensor ->
             tf.constant(
                 tensor.cast3D<FloatArray>()
             )
@@ -31,7 +31,7 @@ open class PoolLayerTest {
         expected: Array<FloatArray>,
     ) {
         val actual = Array(expected.size) { FloatArray(expected[0].size) { 0f } }
-        assertPoolingLayer(layer, input, expected, actual, ::assertGlobalAvgPoolEquals) { tf, tensor ->
+        assertPoolLayer(layer, input, expected, actual, ::assertGlobalAvgPoolEquals) { tf, tensor ->
             tf.constant(
                 tensor.cast4D<FloatArray>()
             )
@@ -44,14 +44,14 @@ open class PoolLayerTest {
         expected: Array<FloatArray>,
     ) {
         val actual = Array(expected.size) { FloatArray(expected[0].size) { 0f } }
-        assertPoolingLayer(layer, input, expected, actual, ::assertGlobalAvgPoolEquals) { tf, tensor ->
+        assertPoolLayer(layer, input, expected, actual, ::assertGlobalAvgPoolEquals) { tf, tensor ->
             tf.constant(
                 tensor.cast5D<FloatArray>()
             )
         }
     }
 
-    private fun assertPoolingLayer(
+    private fun assertPoolLayer(
         layer: Layer,
         input: Array<*>,
         expected: Array<*>,