Skip to content

Commit

Permalink
Merge pull request #72 from /issues/70
Browse files Browse the repository at this point in the history
trying to fix a bug/feature for firecrown
  • Loading branch information
fjaviersanchez authored Jan 31, 2025
2 parents 15469b8 + 9e8264a commit d5455ea
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 8 deletions.
17 changes: 11 additions & 6 deletions augur/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,14 +128,15 @@ def __init__(self, config, likelihood=None, tools=None, req_params=None,
raise ValueError(f'The requested parameter {var} is not \
in the list of parameters in the likelihood.')
# Cast to numpy array (this will be done later anyway)
self.x = np.array(self.x)
self.x = np.array(self.x).astype(np.float64)
self.par_bounds = np.array(self.par_bounds)
if (len(self.par_bounds) < 1) & (self.norm_step):
self.norm_step = False
warnings.warn('Parameter bounds not provided -- the step will not be normalized')
# Normalize the pivot point given the sampling region
if self.norm_step:
self.norm = self.par_bounds[:, 1] - self.par_bounds[:, 0]
self.norm = np.array(self.par_bounds[:, 1]).astype(np.float64) - \
np.array(self.par_bounds[:, 0]).astype(np.float64)

def f(self, x, labels, pars_fid, sys_fid, donorm=False):
"""
Expand Down Expand Up @@ -166,10 +167,10 @@ def f(self, x, labels, pars_fid, sys_fid, donorm=False):
raise ValueError('The labels should have the same length as the parameters!')
else:
if isinstance(x, list):
x = np.array(x)
x = np.array(x).astype(np.float64)
# If we normalize the sampling we need to undo the normalization
if donorm:
x = self.norm * x + self.par_bounds[:, 0]
x = self.norm * x + np.array(self.par_bounds[:, 0]).astype(np.float64)

if x.ndim == 1:
_pars = pars_fid.copy()
Expand Down Expand Up @@ -221,7 +222,9 @@ def get_derivatives(self, force=False, method='5pt_stencil', step=None):
if (self.derivatives is None) or (force):
if '5pt_stencil' in method:
if self.norm_step:
x_here = (self.x - self.par_bounds[:, 0]) * 1/self.norm
print(self.x)
x_here = (self.x - np.array(self.par_bounds[:, 0]).astype(np.float64)) \
* 1/self.norm
else:
x_here = self.x
self.derivatives = five_pt_stencil(lambda y: self.f(y, self.var_pars, self.pars_fid,
Expand All @@ -234,7 +237,9 @@ def get_derivatives(self, force=False, method='5pt_stencil', step=None):
else:
ndkwargs = {}
if self.norm_step:
x_here = (self.x - self.par_bounds[:, 0]) * 1/self.norm
print(self.x)
x_here = (self.x - np.array(self.par_bounds[:, 0]).astype(np.float64)) \
* 1/self.norm
else:
x_here = self.x
jacobian_calc = nd.Jacobian(lambda y: self.f(y, self.var_pars, self.pars_fid,
Expand Down
2 changes: 1 addition & 1 deletion augur/utils/theory_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def compute_new_theory_vector(lk, tools, _sys_pars, _pars, cf=None, return_all=F
extra_dict['amplitude_parameter'] = 'sigma8'
dict_all.pop('A_s')
else:
extra_dict['amplitude_parameter'] = 'A_s'
extra_dict['amplitude_parameter'] = 'As'
dict_all.pop('sigma8')

extra_dict['mass_split'] = dict_all['mass_split']
Expand Down
4 changes: 3 additions & 1 deletion examples/config_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ cosmo:
Omega_b : 0.0491685
h : 0.6727
n_s : 0.9645
#A_s : 2.105e-9
sigma8 : 0.831
extra_parameters :
camb :
Expand Down Expand Up @@ -108,7 +109,8 @@ fisher:
'src0_delta_z', 'src1_delta_z', 'src2_delta_z', 'src3_delta_z', 'src4_delta_z']
# parameters: # TODO: For now priors are ignored
# Omega_c: [0.1, 0.26, 0.9]
# sigma8: [0.4, 0.81, 1.2]
# A_s: [1e-9, 4e-9]
# #sigma8: [0.4, 0.81, 1.2]
# w0: [-1.8, -1.0, -0.2]
# wa: [-4, 0.0, 0.5]
# h: [0.5, 0.6727, 0.8]
Expand Down

0 comments on commit d5455ea

Please sign in to comment.