Skip to content

Commit

Permalink
Adding custom loss function to penalize encode dim
Browse files Browse the repository at this point in the history
  • Loading branch information
DiogenesAnalytics committed Jan 22, 2024
1 parent 18794e8 commit 7ecdbe5
Showing 1 changed file with 28 additions and 4 deletions.
32 changes: 28 additions & 4 deletions src/autoencoder/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,7 @@ def build_anomaly_loss_function(
model = model.model

# create function
def anomaly_diff(
y_true: tf.Tensor,
y_pred: tf.Tensor,
) -> tf.Tensor:
def anomaly_diff(y_true: tf.Tensor, y_pred: tf.Tensor) -> tf.Tensor:
"""Calculates mean training/anomalous data reconstruction error difference."""
# calculate the dynamic mean reconstruction error on training data
train_reconstruction_errors = tf.reduce_mean(
Expand All @@ -49,3 +46,30 @@ def anomaly_diff(

# get wrapped function
return optimized_func


def build_encode_dim_loss_function(
encode_dim: int,
regularization_factor: float = 0.001,
axis: Tuple[int, ...] = (1, 2, 3),
) -> Callable[[tf.Tensor, tf.Tensor], tf.Tensor]:
"""Closure that sets up the custom encode dim loss function."""
# calculate the encoding dim loss
encode_dim_loss = encode_dim * regularization_factor

# create function
def penalize_encode_dimension(y_true: tf.Tensor, y_pred: tf.Tensor) -> tf.Tensor:
"""Penalizes loss with additional encoding dimension value."""
# calculate the dynamic mean reconstruction error on training data
reconstruction_loss = tf.reduce_mean(tf.square(y_true - y_pred), axis=axis)

# calculate penalized loss
return reconstruction_loss + encode_dim_loss

# optimize with tf.function
optimized_func: Callable[[tf.Tensor, tf.Tensor], tf.Tensor] = tf.function(
penalize_encode_dimension
)

# get wrapped function
return optimized_func

0 comments on commit 7ecdbe5

Please sign in to comment.