-
Notifications
You must be signed in to change notification settings - Fork 651
Add Parametric Soft Exponential Unit (PSEU) activation layer #451
Changes from 24 commits
6ba333a
82ae700
3012cde
d6cf865
c9ac0e4
abd58ae
1891e17
8022191
542db44
6888fd5
8497f72
caa51f9
94c5ac1
1015e10
dc3588a
f214d53
60d1d1e
e1cefb0
326bfa3
d28a544
df9e964
51ef0c8
a3bccfb
a880bd0
3fd819e
55676cd
be42270
df24e8f
d57d963
e6456e1
aa03980
839be17
9813264
8a34e8f
dd84064
f2c5d1a
f2b0f97
6bdf843
6d7b8aa
0aa0b40
7d52579
df63af4
3736b42
f7758b9
ed77ff7
7749c67
19d978b
75c83d2
8791343
605ddc1
e722372
351ef64
69feddb
ef4a20f
4d8f0fb
dd22dd8
49ec6c8
3e53036
5ce52c3
b919451
f4e1c65
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,87 @@ | ||
# -*- coding: utf-8 -*- | ||
from keras import initializers | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Usually, we like to have the imports sorted alphabetically, with first all the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Fixed |
||
from keras import regularizers | ||
from keras import constraints | ||
from keras.layers import Layer | ||
from keras import backend as K | ||
import numpy as np | ||
|
||
|
||
class PSEU(Layer): | ||
"""Parametric Soft Exponential Unit with trainable alpha | ||
See: https://arxiv.org/pdf/1602.01321.pdf by Godfrey and Gashler | ||
Reference: https://github.com/keras-team/keras/issues/3842 (@hobson) | ||
# Input shape | ||
Arbitrary. Use the keyword argument `input_shape` | ||
(tuple of integers, does not include the samples axis) | ||
when using this layer as the first layer in a model. | ||
# Output shape | ||
Same shape as the input. | ||
# Arguments | ||
alpha_init: Initial value of the alpha weights (float) | ||
regularizer: Regularizer for alpha weights. | ||
constraint: Constraint for alpha weights. | ||
trainable: Whether the alpha weights are trainable or not | ||
|
||
# Example | ||
model = Sequential() | ||
model.add(Dense(10)) | ||
model.add(PSEU()) | ||
|
||
Note : Specify alpha_init=None to use other intializers | ||
|
||
Soft Exponential f(α, x): | ||
α == 0: x | ||
α > 0: (exp(αx)-1) / α + α | ||
α < 0: -ln(1-α(x + α)) / α | ||
""" | ||
def __init__(self, | ||
alpha_init=0.1, | ||
regularizer=None, | ||
constraint=None, | ||
trainable=True, | ||
**kwargs): | ||
|
||
super(PSEU, self).__init__(**kwargs) | ||
self.supports_masking = True | ||
self.alpha_init = alpha_init | ||
self.initializer = initializers.get('glorot_uniform') | ||
# Add random initializer | ||
self.regularizer = regularizers.get(regularizer) | ||
self.constraint = constraints.get(constraint) | ||
self.trainable = trainable | ||
|
||
def build(self, input_shape): | ||
new_input_shape = input_shape[1:] | ||
|
||
self.alphas = self.add_weight(shape=new_input_shape, | ||
name='{}_alphas'.format(self.name), | ||
initializer=self.initializer, | ||
regularizer=self.regularizer, | ||
constraint=self.constraint) | ||
if self.trainable: | ||
self.trainable_weights = [self.alphas] | ||
if self.alpha_init is not None: | ||
self.set_weights([self.alpha_init * np.ones(new_input_shape)]) | ||
|
||
self.build = True | ||
|
||
def call(self, x, mask=None): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What happens if mask is not None, is there any change in the behaviour? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Removed. It has no impact. |
||
if self.alpha_init is not None and self.alpha_init < 0: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Removed |
||
return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas | ||
elif self.alpha_init is not None and self.alpha_init > 0: | ||
return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas | ||
else: | ||
return x | ||
|
||
def compute_output_shape(self, input_shape): | ||
return input_shape | ||
|
||
def get_config(self): | ||
config = {'alpha_init': float(self.alpha_init), | ||
'regularizer': regularizers.serialize(self.regularizer), | ||
'constraint': constraints.serialize(self.constraint), | ||
'trainable': self.trainable} | ||
|
||
base_config = super(PSEU, self).get_config() | ||
return dict(list(base_config.items()) + list(config.items())) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
# -*- coding: utf-8 -*- | ||
import pytest | ||
from keras_contrib.utils.test_utils import layer_test | ||
from keras_contrib.layers import PSEU | ||
|
||
|
||
@pytest.mark.parametrize('trainable', [True, False]) | ||
@pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) | ||
def test_pseu(trainable, | ||
alpha_init): | ||
layer_test(PSEU, | ||
kwargs={'trainable': trainable, | ||
'alpha_init': alpha_init}, | ||
input_shape=(2, 3, 4)) | ||
|
||
|
||
if __name__ == '__main__': | ||
pytest.main([__file__]) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
You should insert the import in alphabetical order
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Fixed