Skip to content

Commit

Permalink
Return regularization losses in stateless_call. (#18960)
Browse files Browse the repository at this point in the history
* Also return regularization losses in stateless_call.

* Fix formatting + stateless scope.

* Fix

* Fix

* Fix
  • Loading branch information
3lim authored Dec 18, 2023
1 parent 6a6e4f8 commit 9f6d5a3
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 9 deletions.
26 changes: 17 additions & 9 deletions keras/layers/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ def add_weight(
name=name,
)
# Will be added to layer.losses
variable.regularizer = regularizer
variable.regularizer = regularizers.get(regularizer)
variable.constraint = constraints.get(constraint)
self._track_variable(variable)
return variable
Expand Down Expand Up @@ -962,10 +962,13 @@ def stateless_call(
mapping = list(trainable_mapping) + list(non_trainable_mapping)

# Call in stateless scope
losses = None
with backend.StatelessScope(
state_mapping=mapping, collect_losses=return_losses
) as scope:
outputs = self.call(*args, **kwargs)
if return_losses:
losses = self.losses

# Gather updated non-trainable variables
non_trainable_variables = []
Expand All @@ -977,7 +980,7 @@ def stateless_call(
non_trainable_variables.append(v)

if return_losses:
return outputs, non_trainable_variables, scope.losses[:]
return outputs, non_trainable_variables, losses
return outputs, non_trainable_variables

def compute_output_spec(self, *args, **kwargs):
Expand Down Expand Up @@ -1072,19 +1075,24 @@ def _get_own_losses(self):
else:
return self._losses[:]

def _get_regularization_losses(self):
weight_regularization_losses = []
for v in self.trainable_weights:
regularizer = getattr(v, "regularizer", None)
if regularizer is None:
continue
if backend.in_stateless_scope():
v = backend.get_stateless_scope().get_current_value(v)
weight_regularization_losses.append(regularizer(v))
return weight_regularization_losses

@property
def losses(self):
"""List of scalar losses from `add_loss`, regularizers and sublayers."""
losses = self._get_own_losses()
for layer in self._flatten_layers(include_self=False):
losses.extend(layer._get_own_losses())
weight_regularization_losses = []
for v in self.trainable_weights:
if backend.in_stateless_scope():
v = backend.get_stateless_scope().get_current_value(v)
regularizer = getattr(v, "regularizer", None)
if regularizer:
weight_regularization_losses.append(regularizer(v))
weight_regularization_losses = self._get_regularization_losses()
losses.extend(weight_regularization_losses)
return losses

Expand Down
2 changes: 2 additions & 0 deletions keras/layers/layer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -581,6 +581,7 @@ def __init__(self):
shape=(),
initializer="zeros",
trainable=True,
regularizer="l1",
)
self.built = True

Expand Down Expand Up @@ -632,6 +633,7 @@ def call(self, x):
layer1.non_trainable_variables, non_trainable_variables
):
self.assertAllClose(ref_v, v)
self.assertLen(losses, 2)
for ref_loss, loss in zip(layer1.losses, losses):
self.assertAllClose(ref_loss, loss)

Expand Down

0 comments on commit 9f6d5a3

Please sign in to comment.