-
Notifications
You must be signed in to change notification settings - Fork 31
/
Copy pathactiv.py
123 lines (99 loc) · 4.26 KB
/
activ.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import numpy as np
import keras as ks
# import keras_core.saving
@ks.saving.register_keras_serializable(package='kgcnn', name='LeakySoftplus')
class LeakySoftplus(ks.layers.Layer):
r"""Leaky softplus activation function similar to :obj:`tf.nn.leaky_relu` but smooth. """
def __init__(self, alpha: float = 0.05, trainable: bool = False, **kwargs):
"""Initialize with optionally learnable parameter.
Args:
alpha (float, optional): Leak parameter alpha. Default is 0.05.
trainable (bool, optional): Whether set alpha trainable. Default is False.
"""
super(LeakySoftplus, self).__init__(**kwargs)
self._alpha_config = float(alpha)
self._alpha_trainable = bool(trainable)
self.alpha = self.add_weight(
shape=tuple(),
initializer=ks.initializers.Constant(alpha),
dtype=self.dtype,
trainable=self._alpha_trainable
)
def call(self, inputs, *args, **kwargs):
"""Forward pass.
Args:
inputs (Tensor): Input tenor of arbitrary shape.
Returns:
Tensor: Leaky soft-plus activation of inputs.
"""
x = inputs
return ks.activations.softplus(x) * (1 - self.alpha) + self.alpha * x
def get_config(self):
"""Get layer config."""
config = super(LeakySoftplus, self).get_config()
config.update({"alpha": self._alpha_config, "trainable": self._alpha_trainable})
return config
@ks.saving.register_keras_serializable(package='kgcnn', name='LeakyRelu')
class LeakyRelu(ks.layers.Layer):
r"""Leaky RELU function. Equivalent to :obj:`tf.nn.leaky_relu(x,alpha)` ."""
def __init__(self, alpha: float = 0.05, trainable: bool = False, **kwargs):
"""Initialize with optionally learnable parameter.
Args:
alpha (float, optional): Leak parameter alpha. Default is 0.05.
trainable (bool, optional): Whether set alpha trainable. Default is False.
"""
super(LeakyRelu, self).__init__(**kwargs)
self._alpha_config = float(alpha)
self._alpha_trainable = bool(trainable)
self.alpha = self.add_weight(
shape=tuple(), dtype=self.dtype,
initializer=ks.initializers.Constant(alpha),
trainable=self._alpha_trainable
)
def call(self, inputs, *args, **kwargs):
"""Forward pass.
Args:
inputs (Tensor): Input tenor of arbitrary shape.
Returns:
Tensor: Leaky relu activation of inputs.
"""
x = inputs
return ks.activations.leaky_relu(x, alpha=self.alpha)
# return tf.nn.relu(x) - tf.nn.relu(-x)*self.alpha
def get_config(self):
"""Get layer config."""
config = super(LeakyRelu, self).get_config()
config.update({"alpha": self._alpha_config, "trainable": self._alpha_trainable})
return config
@ks.saving.register_keras_serializable(package='kgcnn', name='Swish')
class Swish(ks.layers.Layer):
r"""Swish activation function. Computes :math:`x \; \text{sig}(\beta x)`,
with :math:`\text{sig}(x) = 1/(1+e^{-x})`."""
def __init__(self, beta: float = 1.0, trainable: bool = False, **kwargs):
"""Initialize with optionally learnable parameter.
Args:
beta (float, optional): Parameter beta in sigmoid. Default is 1.0.
trainable (bool, optional): Whether set beta trainable. Default is False.
"""
super(Swish, self).__init__(**kwargs)
self._beta_config = float(beta)
self._beta_trainable = bool(trainable)
self.beta = self.add_weight(
shape=tuple(), dtype=self.dtype,
initializer=ks.initializers.Constant(beta),
trainable=self._beta_trainable
)
def call(self, inputs, *args, **kwargs):
"""Forward pass.
Args:
inputs (Tensor): Input tenor of arbitrary shape.
Returns:
Tensor: Swish activation of inputs.
"""
x = inputs
return x * ks.activations.sigmoid(self.beta * x)
def get_config(self):
"""Get layer config."""
config = super(Swish, self).get_config()
config.update({"beta": self._beta_config, "trainable": self._beta_trainable})
return config