File tree Expand file tree Collapse file tree 1 file changed +4
-8
lines changed Expand file tree Collapse file tree 1 file changed +4
-8
lines changed Original file line number Diff line number Diff line change @@ -11,6 +11,8 @@ def gaussian_kl(
11
11
"""Calculate KL Divergence of 2 Gaussian distributions.
12
12
13
13
KL divergence between two univariate Gaussians, as derived in [1], with k=1 (dimensionality).
14
+ .. math::
15
+ D_{KL}(p||q) = \f rac{1}{2}\left[\log\f rac{|\Sigma_q|}{|\Sigma_p|} - k + (\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q})^T\Sigma_q^{-1}(\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q}) + tr\left\{\Sigma_q^{-1}\Sigma_p\r ight\}\r ight]
14
16
15
17
Parameters
16
18
----------
@@ -30,11 +32,7 @@ def gaussian_kl(
30
32
31
33
References
32
34
----------
33
- .. math::
34
- D_{KL}(p||q) = \f rac{1}{2}\left[\log\f rac{|\Sigma_q|}{|\Sigma_p|} - k + (\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q})^T\Sigma_q^{-1}(\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q}) + tr\left\{\Sigma_q^{-1}\Sigma_p\r ight\}\r ight]
35
-
36
35
.. [1] https://mr-easy.github.io/2020-04-16-kl-divergence-between-2-gaussian-distributions/
37
-
38
36
"""
39
37
return 0.5 * (torch .log (torch .abs (q_var ) / torch .abs (p_var )) - 1.0 + ((p_mean - q_mean )** 2 )/ q_var + p_var / q_var )
40
38
@@ -47,6 +45,8 @@ def log_gaussian_kl(
47
45
"""Calculate KL Divergence of 2 Gaussian distributions.
48
46
49
47
KL divergence between two univariate Gaussians, as derived in [1], with k=1 (dimensionality) and log variances.
48
+ .. math::
49
+ D_{KL}(p||q) = \f rac{1}{2}\left[\log\f rac{|\Sigma_q|}{|\Sigma_p|} - k + (\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q})^T\Sigma_q^{-1}(\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q}) + tr\left\{\Sigma_q^{-1}\Sigma_p\r ight\}\r ight]
50
50
51
51
Parameters
52
52
----------
@@ -66,10 +66,6 @@ def log_gaussian_kl(
66
66
67
67
References
68
68
----------
69
- .. math::
70
- D_{KL}(p||q) = \f rac{1}{2}\left[\log\f rac{|\Sigma_q|}{|\Sigma_p|} - k + (\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q})^T\Sigma_q^{-1}(\b oldsymbol{\mu_p}-\b oldsymbol{\mu_q}) + tr\left\{\Sigma_q^{-1}\Sigma_p\r ight\}\r ight]
71
-
72
69
.. [1] https://mr-easy.github.io/2020-04-16-kl-divergence-between-2-gaussian-distributions/
73
-
74
70
"""
75
71
return 0.5 * (q_logvar - p_logvar - 1.0 + torch .exp (p_logvar - q_logvar ) + ((p_mean - q_mean )** 2 )* torch .exp (- q_logvar ))
You can’t perform that action at this time.
0 commit comments