diff --git a/src/torchmetrics/functional/regression/kl_divergence.py b/src/torchmetrics/functional/regression/kl_divergence.py index 3aca064cc35..6e6563aee71 100644 --- a/src/torchmetrics/functional/regression/kl_divergence.py +++ b/src/torchmetrics/functional/regression/kl_divergence.py @@ -89,7 +89,7 @@ def kl_divergence( Where :math:`P` and :math:`Q` are probability distributions where :math:`P` usually represents a distribution over data and :math:`Q` is often a prior or approximation of :math:`P`. It should be noted that the KL divergence - is a non-symetrical metric i.e. :math:`D_{KL}(P||Q) \neq D_{KL}(Q||P)`. + is a non-symmetrical metric i.e. :math:`D_{KL}(P||Q) \neq D_{KL}(Q||P)`. Args: p: data distribution with shape ``[N, d]`` diff --git a/src/torchmetrics/regression/kl_divergence.py b/src/torchmetrics/regression/kl_divergence.py index ee797d8c795..7cb3d478fe6 100644 --- a/src/torchmetrics/regression/kl_divergence.py +++ b/src/torchmetrics/regression/kl_divergence.py @@ -35,7 +35,7 @@ class KLDivergence(Metric): Where :math:`P` and :math:`Q` are probability distributions where :math:`P` usually represents a distribution over data and :math:`Q` is often a prior or approximation of :math:`P`. It should be noted that the KL divergence - is a non-symetrical metric i.e. :math:`D_{KL}(P||Q) \neq D_{KL}(Q||P)`. + is a non-symmetrical metric i.e. :math:`D_{KL}(P||Q) \neq D_{KL}(Q||P)`. As input to ``forward`` and ``update`` the metric accepts the following input: