-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
90 additions
and
13 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
import torch | ||
from torch import nn, Tensor | ||
from jaxtyping import Float | ||
|
||
def gaussian_kl( | ||
p_mean: Float[Tensor, "1"], | ||
p_var: Float[Tensor, "1"], | ||
q_mean: Float[Tensor, "1"], | ||
q_var: Float[Tensor, "1"] | ||
) -> Float[Tensor, "1"]: | ||
"""Calculate KL Divergence of 2 Gaussian distributions. | ||
KL divergence between two univariate Gaussians, as derived in [1], with k=1 (dimensionality). | ||
Parameters | ||
---------- | ||
p_mean | ||
mean value of first distribution | ||
p_var | ||
variance value of first distribution | ||
q_mean | ||
mean value of second distribution | ||
q_var | ||
variance value of second distribution | ||
Returns | ||
------- | ||
out | ||
KL divergence of inputs | ||
References | ||
---------- | ||
.. math:: | ||
D_{KL}(p||q) = \frac{1}{2}\left[\log\frac{|\Sigma_q|}{|\Sigma_p|} - k + (\boldsymbol{\mu_p}-\boldsymbol{\mu_q})^T\Sigma_q^{-1}(\boldsymbol{\mu_p}-\boldsymbol{\mu_q}) + tr\left\{\Sigma_q^{-1}\Sigma_p\right\}\right] | ||
.. [1] https://mr-easy.github.io/2020-04-16-kl-divergence-between-2-gaussian-distributions/ | ||
""" | ||
return 0.5 * (torch.log(torch.abs(q_var) / torch.abs(p_var)) - 1.0 + ((p_mean-q_mean)**2)/q_var + p_var/q_var) | ||
|
||
def log_gaussian_kl( | ||
p_mean: Float[Tensor, "1"], | ||
p_logvar: Float[Tensor, "1"], | ||
q_mean: Float[Tensor, "1"], | ||
q_logvar: Float[Tensor, "1"] | ||
) -> Float[Tensor, "1"]: | ||
"""Calculate KL Divergence of 2 Gaussian distributions. | ||
KL divergence between two univariate Gaussians, as derived in [1], with k=1 (dimensionality) and log variances. | ||
Parameters | ||
---------- | ||
p_mean | ||
mean value of first distribution | ||
p_logvar | ||
log of variance value of first distribution | ||
q_mean | ||
mean value of second distribution | ||
q_logvar | ||
log of variance value of second distribution | ||
Returns | ||
------- | ||
out | ||
KL divergence of inputs | ||
References | ||
---------- | ||
.. math:: | ||
D_{KL}(p||q) = \frac{1}{2}\left[\log\frac{|\Sigma_q|}{|\Sigma_p|} - k + (\boldsymbol{\mu_p}-\boldsymbol{\mu_q})^T\Sigma_q^{-1}(\boldsymbol{\mu_p}-\boldsymbol{\mu_q}) + tr\left\{\Sigma_q^{-1}\Sigma_p\right\}\right] | ||
.. [1] https://mr-easy.github.io/2020-04-16-kl-divergence-between-2-gaussian-distributions/ | ||
""" | ||
return 0.5 * (q_logvar - p_logvar - 1.0 + torch.exp(p_logvar - q_logvar) + ((p_mean - q_mean)**2)*torch.exp(-q_logvar)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters