diff --git a/keras2c/layer2c.py b/keras2c/layer2c.py index 79e1985..6a574c8 100644 --- a/keras2c/layer2c.py +++ b/keras2c/layer2c.py @@ -355,7 +355,7 @@ def _write_layer_AdvancedActivation(self, layer, inputs, outputs, i): if is_model_input: inp = inputs + '->' else: - inp = inputs + '.' + inp = inputs[1:] + '.' #remove & prefix that is added to output_layers to pass tensor by reference if layer_type(layer) == 'LeakyReLU': self.layers += 'k2c_LeakyReLU(' + inp + 'array,' + \ diff --git a/keras2c/weights2c.py b/keras2c/weights2c.py index a4b3e47..ffb1407 100644 --- a/keras2c/weights2c.py +++ b/keras2c/weights2c.py @@ -118,9 +118,17 @@ def write_weights(self, verbose=True): - **static_vars** (*str*): code fora C struct containing static variables (eg, states of a stateful RNN) """ + if verbose: + print(__class__.__name__+"."+__name__) + for layer in self.model.layers: method = getattr(self, '_write_weights_' + layer_type(layer)) method(layer) + if(verbose): + print("") + print(__class__.__name__+"."+__name__+" : layer : "+layer.name+" : "+method.__name__) + print("Stack: " + str(self.stack_vars)) + print("Malloc: " + str(self.malloc_vars)) return self.stack_vars, self.malloc_vars, self._write_static_vars() def _write_static_vars(self): @@ -647,7 +655,7 @@ def _write_weights_LeakyReLU(self, layer): def _write_weights_ThresholdedReLU(self, layer): theta = layer.get_config()['theta'] - self.stack_vars = 'float ' + layer.name + \ + self.stack_vars += 'float ' + layer.name + \ '_theta = ' + str(theta) + '; \n' self.stack_vars += '\n\n' diff --git a/tests/test_advanced_activation_layers.py b/tests/test_advanced_activation_layers.py index c9e81aa..fe4098b 100644 --- a/tests/test_advanced_activation_layers.py +++ b/tests/test_advanced_activation_layers.py @@ -82,3 +82,25 @@ def test_ReLU(self): keras2c_main.k2c(model, name) rcode = build_and_run(name) self.assertEqual(rcode, 0) + + def test_AdvancedActivationLayers_NonInputLayer(self): + inshp = (9, 7, 6, 3) + alpha = 0.5 + theta = 0.3 + max_value = 1.0 + negative_slope = 1.0 + threshold = 0.3 + input_layer = keras.layers.Input(inshp) + first = keras.layers.LeakyReLU(alpha=0.3)(input_layer) + middle1 = keras.layers.LeakyReLU(alpha=alpha)(first) + middle2 = keras.layers.PReLU(alpha_initializer='glorot_uniform')(middle1) + middle3 = keras.layers.ELU(alpha=alpha)(middle2) + middle4 = keras.layers.ThresholdedReLU(theta=theta)(middle3) + output_layer = keras.layers.ReLU(max_value=max_value, + negative_slope=negative_slope, + threshold=threshold)(middle4) + model = keras.models.Model(inputs=input_layer, outputs=output_layer) + name = 'test___AdvancedActivationLayers_NonInputLayers' + str(int(time.time())) + keras2c_main.k2c(model, name) + rcode = build_and_run(name) + self.assertEqual(rcode, 0)