From ebb384f995a78054b54c0f846b0fb2f1de6e1560 Mon Sep 17 00:00:00 2001 From: Alexandra Semposki Date: Tue, 17 Sep 2024 22:42:41 -0400 Subject: [PATCH] rest of the changes attempted --- src/Taweret/core/base_mixer.py | 8 +++----- src/Taweret/utils/utils.py | 2 +- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/Taweret/core/base_mixer.py b/src/Taweret/core/base_mixer.py index d3f74628..f3c2990d 100644 --- a/src/Taweret/core/base_mixer.py +++ b/src/Taweret/core/base_mixer.py @@ -230,12 +230,10 @@ def train(self): ''' Run sampler to learn parameters. Method should also create class members that store the posterior and other diagnostic quantities - important for plotting - MAP values should also calculate and set as member variable of - class + important for plotting MAP values. - Return: - ------- + Returns: + -------- _posterior : np.ndarray the mcmc chain return from sampler ''' diff --git a/src/Taweret/utils/utils.py b/src/Taweret/utils/utils.py index 922bd96d..d263700a 100644 --- a/src/Taweret/utils/utils.py +++ b/src/Taweret/utils/utils.py @@ -25,7 +25,7 @@ def normed_mvn_loglike(y, cov): Evaluate the multivariate-normal log-likelihood for difference vector `y` and covariance matrix `cov`: - log_p = -1/2*[(y^T).(C^-1).y + log(det(C))] + const. + .. math:: log_p = -1/2*[(y^T).(C^-1).y + log(det(C))] + const. This likelihood IS NORMALIZED. The normalization const = -n/2*log(2*pi), where n is the dimensionality.