@@ -12,15 +12,17 @@ module nf_activation_1d
1212 public :: gaussian, gaussian_prime
1313 public :: linear, linear_prime
1414 public :: relu, relu_prime
15+ public :: leaky_relu, leaky_relu_prime
1516 public :: sigmoid, sigmoid_prime
1617 public :: softmax, softmax_prime
1718 public :: softplus, softplus_prime
1819 public :: step, step_prime
1920 public :: tanhf, tanh_prime
2021
2122 interface
22- pure function activation_function (x )
23+ pure function activation_function (x , alpha )
2324 real , intent (in ) :: x(:)
25+ real , intent (in ), optional :: alpha
2426 real :: activation_function(size (x))
2527 end function activation_function
2628 end interface
@@ -30,7 +32,7 @@ end function activation_function
3032 pure function elu (x , alpha ) result(res)
3133 ! Exponential Linear Unit (ELU) activation function.
3234 real , intent (in ) :: x(:)
33- real , intent (in ) :: alpha
35+ real , intent (in ), optional :: alpha
3436 real :: res(size (x))
3537 where (x >= 0 )
3638 res = x
@@ -43,7 +45,7 @@ pure function elu_prime(x, alpha) result(res)
4345 ! First derivative of the Exponential Linear Unit (ELU)
4446 ! activation function.
4547 real , intent (in ) :: x(:)
46- real , intent (in ) :: alpha
48+ real , intent (in ), optional :: alpha
4749 real :: res(size (x))
4850 where (x >= 0 )
4951 res = 1
@@ -52,51 +54,58 @@ pure function elu_prime(x, alpha) result(res)
5254 end where
5355 end function elu_prime
5456
55- pure function exponential (x ) result(res)
57+ pure function exponential (x , alpha ) result(res)
5658 ! Exponential activation function.
5759 real , intent (in ) :: x(:)
60+ real , intent (in ), optional :: alpha
5861 real :: res(size (x))
5962 res = exp (x)
6063 end function exponential
6164
62- pure function gaussian (x ) result(res)
65+ pure function gaussian (x , alpha ) result(res)
6366 ! Gaussian activation function.
6467 real , intent (in ) :: x(:)
68+ real , intent (in ), optional :: alpha
6569 real :: res(size (x))
6670 res = exp (- x** 2 )
6771 end function gaussian
6872
69- pure function gaussian_prime (x ) result(res)
73+ pure function gaussian_prime (x , alpha ) result(res)
7074 ! First derivative of the Gaussian activation function.
7175 real , intent (in ) :: x(:)
76+ real , intent (in ), optional :: alpha
7277 real :: res(size (x))
7378 res = - 2 * x * gaussian(x)
7479 end function gaussian_prime
7580
76- pure function linear (x ) result(res)
81+ pure function linear (x , alpha ) result(res)
7782 ! Linear activation function.
7883 real , intent (in ) :: x(:)
84+ real , intent (in ), optional :: alpha
7985 real :: res(size (x))
8086 res = x
8187 end function linear
8288
83- pure function linear_prime (x ) result(res)
89+ pure function linear_prime (x , alpha ) result(res)
8490 ! First derivative of the Gaussian activation function.
8591 real , intent (in ) :: x(:)
92+ real , intent (in ), optional :: alpha
8693 real :: res(size (x))
8794 res = 1
8895 end function linear_prime
8996
90- pure function relu (x ) result(res)
97+ pure function relu (x , alpha ) result(res)
9198 ! ! Rectified Linear Unit (ReLU) activation function.
9299 real , intent (in ) :: x(:)
100+ real , intent (in ), optional :: alpha
93101 real :: res(size (x))
94102 res = max (0 ., x)
95103 end function relu
96104
97- pure function relu_prime (x ) result(res)
105+ pure function relu_prime (x , alpha ) result(res)
98106 ! First derivative of the Rectified Linear Unit (ReLU) activation function.
99107 real , intent (in ) :: x(:)
108+ real , intent (in ), optional :: alpha
100109 real :: res(size (x))
101110 where (x > 0 )
102111 res = 1
@@ -105,52 +114,79 @@ pure function relu_prime(x) result(res)
105114 end where
106115 end function relu_prime
107116
108- pure function sigmoid (x ) result(res)
117+ pure function leaky_relu (x , alpha ) result(res)
118+ ! ! Leaky Rectified Linear Unit (Leaky ReLU) activation function.
119+ real , intent (in ) :: x(:)
120+ real , intent (in ), optional :: alpha
121+ real :: res(size (x))
122+ res = max (alpha* x, x)
123+ end function leaky_relu
124+
125+ pure function leaky_relu_prime (x , alpha ) result(res)
126+ ! First derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
127+ real , intent (in ) :: x(:)
128+ real , intent (in ), optional :: alpha
129+ real :: res(size (x))
130+ where (x > 0 )
131+ res = 1
132+ elsewhere
133+ res = alpha
134+ end where
135+ end function leaky_relu_prime
136+
137+ pure function sigmoid (x , alpha ) result(res)
109138 ! Sigmoid activation function.
110139 real , intent (in ) :: x(:)
140+ real , intent (in ), optional :: alpha
111141 real :: res(size (x))
112142 res = 1 / (1 + exp (- x))
113143 end function sigmoid
114144
115- pure function sigmoid_prime (x ) result(res)
145+ pure function sigmoid_prime (x , alpha ) result(res)
116146 ! First derivative of the sigmoid activation function.
117147 real , intent (in ) :: x(:)
148+ real , intent (in ), optional :: alpha
118149 real :: res(size (x))
119150 res = sigmoid(x) * (1 - sigmoid(x))
120151 end function sigmoid_prime
121152
122- pure function softmax (x ) result(res)
153+ pure function softmax (x , alpha ) result(res)
123154 ! ! Softmax activation function
124155 real , intent (in ) :: x(:)
156+ real , intent (in ), optional :: alpha
125157 real :: res(size (x))
126158 res = exp (x - maxval (x))
127159 res = res / sum (res)
128160 end function softmax
129161
130- pure function softmax_prime (x ) result(res)
162+ pure function softmax_prime (x , alpha ) result(res)
131163 ! ! Derivative of the softmax activation function.
132164 real , intent (in ) :: x(:)
165+ real , intent (in ), optional :: alpha
133166 real :: res(size (x))
134167 res = softmax(x) * (1 - softmax(x))
135168 end function softmax_prime
136169
137- pure function softplus (x ) result(res)
170+ pure function softplus (x , alpha ) result(res)
138171 ! Softplus activation function.
139172 real , intent (in ) :: x(:)
173+ real , intent (in ), optional :: alpha
140174 real :: res(size (x))
141175 res = log (exp (x) + 1 )
142176 end function softplus
143177
144- pure function softplus_prime (x ) result(res)
178+ pure function softplus_prime (x , alpha ) result(res)
145179 ! First derivative of the softplus activation function.
146180 real , intent (in ) :: x(:)
181+ real , intent (in ), optional :: alpha
147182 real :: res(size (x))
148183 res = exp (x) / (exp (x) + 1 )
149184 end function softplus_prime
150185
151- pure function step (x ) result(res)
186+ pure function step (x , alpha ) result(res)
152187 ! Step activation function.
153188 real , intent (in ) :: x(:)
189+ real , intent (in ), optional :: alpha
154190 real :: res(size (x))
155191 where (x > 0 )
156192 res = 1
@@ -159,26 +195,29 @@ pure function step(x) result(res)
159195 end where
160196 end function step
161197
162- pure function step_prime (x ) result(res)
198+ pure function step_prime (x , alpha ) result(res)
163199 ! First derivative of the step activation function.
164200 real , intent (in ) :: x(:)
201+ real , intent (in ), optional :: alpha
165202 real :: res(size (x))
166203 res = 0
167204 end function step_prime
168205
169- pure function tanhf (x ) result(res)
206+ pure function tanhf (x , alpha ) result(res)
170207 ! Tangent hyperbolic activation function.
171208 ! Same as the intrinsic tanh, but must be
172209 ! defined here so that we can use procedure
173210 ! pointer with it.
174211 real , intent (in ) :: x(:)
212+ real , intent (in ), optional :: alpha
175213 real :: res(size (x))
176214 res = tanh (x)
177215 end function tanhf
178216
179- pure function tanh_prime (x ) result(res)
217+ pure function tanh_prime (x , alpha ) result(res)
180218 ! First derivative of the tanh activation function.
181219 real , intent (in ) :: x(:)
220+ real , intent (in ), optional :: alpha
182221 real :: res(size (x))
183222 res = 1 - tanh (x)** 2
184223 end function tanh_prime
0 commit comments