Commit 8ba86213 authored by Marc Arene's avatar Marc Arene
Browse files

Renaming config.py in CONST.py

parent 8ea5ea31
......@@ -17,7 +17,7 @@ import Library.param_utils as paru
import Library.roq_utils as roqu
import Library.BNS_HMC_Tools as bht
import Library.Fit_NR as fnr
import Library.config as conf
import Library.CONST as CONST
import Library.bilby_waveform as bilby_wv
import Library.bilby_utils as bilby_utils
import Library.plots as plots
......@@ -46,7 +46,7 @@ if __name__=='__main__':
parser.add_option("--fit_method", default='cubic', action="store", type="string", help="""Optional sub-directory that will be appended at the endto the default output directory such that the final output directory is: '/default-output-dir/sub_dir/'""")
parser.add_option("--no_seed", default=False, action="store_true", help="""New noise realisation from PSDs is generated""")
n_param = len(conf.param_keys)
n_param = len(CONST.param_keys)
randGen = np.random.RandomState(seed=2)
# PARSE INPUT ARGUMENTS
......@@ -64,7 +64,7 @@ if __name__=='__main__':
inj_file_name = opts.inj_file.split('/')[-1]
inj_name = inj_file_name.split('.')[0]
conf.debug = opts.debug
CONST.debug = opts.debug
if not(opts.no_seed):
# Set up a random seed for result reproducibility. This is optional!
np.random.seed(88170235)
......@@ -87,21 +87,21 @@ if __name__=='__main__':
sampler_dict = {}
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
sampler_dict['search_parameter_keys'] = [conf.param_keys[i] for i in sampler_dict['search_parameter_indices']]
sampler_dict['search_fparameter_keys'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
sampler_dict['search_parameter_keys'] = [CONST.param_keys[i] for i in sampler_dict['search_parameter_indices']]
sampler_dict['search_fparameter_keys'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
sampler_dict['parameter_offsets'] = [0] * n_param
idx_nonzero = np.nonzero(exponents)[0]
for i in idx_nonzero:
sampler_dict['parameter_offsets'][i] = 10**(-exponents[i])
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], conf.parameter_indices_local_fit).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(sampler_dict['search_fparameter_keys'], conf.fparam_keys_local_fit).tolist()
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], CONST.parameter_indices_local_fit).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(sampler_dict['search_fparameter_keys'], CONST.fparam_keys_local_fit).tolist()
# need this line in case for instance: `parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['parameter_indices_local_fit_full'])
parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], parameter_indices_local_fit_full).tolist()
sampler_dict['search_fparameter_keys_local_fit_full'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
sampler_dict['search_fparameter_keys_local_fit_full'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
print('\nOptions for the sampler are:')
pu.print_dict(sampler_dict)
......@@ -303,7 +303,7 @@ if __name__=='__main__':
likelihood.save_weights(weights_file_path_no_format, format=weight_format)
search_parameters_keys = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
search_parameters_keys = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
likelihood.parameters = injection_parameters
logL = likelihood.log_likelihood_ratio()
......@@ -331,7 +331,7 @@ if __name__=='__main__':
# interferometers=interferometers, waveform_generator=search_waveform_generator,
# priors=priors, phase_marginalization=config_dict['analysis']['phase_marginalization'])
#
# search_parameters_keys = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
# search_parameters_keys = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
# likelihood_gradient = lg.GWTransientLikelihoodGradient(likelihood, search_parameters_keys)
likelihood_gradient = None
......@@ -350,7 +350,7 @@ if __name__=='__main__':
print("{:20} | {:20} | {:14} | {:20} | {:20} | {:20} | {:14}".format("param name", "param value (q_pos)", "fparam_name", "fparam value", "scale", "dlogL", "offset python"))
for i in range(9):
print("{:20} | {:20.6e} | {:14} | {:20.6e} | {:20.6e} | {:20.6e} | {:14.0e}\t".format(conf.param_keys[i], q_pos[i], conf.fparam_keys[i], conf.param_functions[i](q_pos[i]), scale[i], dlogL[i], sampler_dict['parameter_offsets'][i]))
print("{:20} | {:20.6e} | {:14} | {:20.6e} | {:20.6e} | {:20.6e} | {:14.0e}\t".format(CONST.param_keys[i], q_pos[i], CONST.fparam_keys[i], CONST.param_functions[i](q_pos[i]), scale[i], dlogL[i], sampler_dict['parameter_offsets'][i]))
print("\n\n")
# import IPython; IPython.embed();
......@@ -541,7 +541,7 @@ if __name__=='__main__':
if os.path.exists(fileOutChainPhase1_path):
subprocess.run('rm ' + fileOutChainPhase1_path, shell=True)
subprocess.run('cp ' + fileOutChain_path + ' ' + fileOutChainPhase1_path, shell=True)
if conf.debug:
if CONST.debug:
H_p_logL_filename = outdir_phase1 + "H_p_logL.dat"
pmom_trajs_filename = outdir_phase1 + "pmom_trajs.dat"
np.savetxt(H_p_logL_filename, H_p_logL)
......@@ -634,7 +634,7 @@ if __name__=='__main__':
dlogL_scaler = preprocessing.StandardScaler().fit(dlogL_of_param)
dlogL_scalers[i] = dlogL_scaler
dlogL_phase1_scaled = dlogL_scaler.transform(dlogL_of_param).reshape(-1)
print(f'Fitting phase1 data for dlogL/d{conf.fparam_keys[i]}')
print(f'Fitting phase1 data for dlogL/d{CONST.fparam_keys[i]}')
dlogL_fit_methods[i] = method.fit(qpos_phase1_scaled, dlogL_phase1_scaled)
# if True:
......@@ -672,7 +672,7 @@ if __name__=='__main__':
breakpoint()
#*************************************************************************************************/
#******************************** Phase 3 : Analytical HMC *************************************/
#*************************************************************************************************/
......
......@@ -8,7 +8,7 @@ import bilby
from Headers.PN_Coefficients import * # Headers.Constants already imported in PN_Coeff
import Library.bilby_waveform as bilby_wv
import Library.param_utils as paru
import Library.config as conf
import Library.CONST as CONST
import Codes.set_injection_parameters as set_inj
import Codes.set_psds as set_psds
......@@ -171,4 +171,4 @@ if __name__=='__main__':
print("\nValue of scales")
print("{:20} | {:20} | {:20} | {:20} | {:20} | {:20}".format("param name", "bilby linalg.svd()", "hmc in C", "bilby / hmc_C", "offset python", "param name"))
for i in range(9):
print("{:20} | {:20.6e} | {:20.6e} | {:20.2f} | {:20.0e} | {:20}\t".format(conf.fparam_keys[i], scale_SVD[i], ndscale[i], scale_SVD[i]/ndscale[i], sampler_dict['parameter_offsets'][i], conf.fparam_keys[i]))
print("{:20} | {:20.6e} | {:20.6e} | {:20.2f} | {:20.0e} | {:20}\t".format(CONST.fparam_keys[i], scale_SVD[i], ndscale[i], scale_SVD[i]/ndscale[i], sampler_dict['parameter_offsets'][i], CONST.fparam_keys[i]))
......@@ -8,7 +8,7 @@ import bilby
from Headers.PN_Coefficients import * # Headers.Constants already imported in PN_Coeff
import Library.bilby_waveform as bilby_wv
import Library.param_utils as paru
import Library.config as conf
import Library.CONST as CONST
import Library.python_utils as pu
import Codes.set_injection_parameters as set_inj
......
......@@ -10,7 +10,7 @@ from Headers.PN_Coefficients import * # Headers.Constants already imported in PN
import Library.python_utils as pu
import Library.param_utils as paru
import Library.config as conf
import Library.CONST as CONST
import Library.roq_utils as roqu
......
......@@ -17,7 +17,7 @@ import Library.param_utils as paru
import Library.roq_utils as roqu
import Library.BNS_HMC_Tools as bht
import Library.Fit_NR as fnr
import Library.config as conf
import Library.CONST as CONST
import Library.bilby_waveform as bilby_wv
import Library.bilby_utils as bilby_utils
import Library.plots as plots
......@@ -149,7 +149,7 @@ if __name__=='__main__':
inj_name = inj_file_name.split('.')[0]
SkipPhase1 = opts.SkipPhase1
verbose = opts.verbose
conf.debug = opts.debug
CONST.debug = opts.debug
if not(opts.no_seed):
# Set up a random seed for result reproducibility. This is optional!
np.random.seed(88170235)
......@@ -172,17 +172,17 @@ if __name__=='__main__':
ifo_chosen = config_dict['analysis']['ifos'].split(',')
if set.intersection(set(ifos_possible), set(ifo_chosen)) != set(ifo_chosen):
raise ValueError("the '--ifos' option was wrongly set, you must choose between {}. Example: '--ifos=H1,V1'. ".format(ifos_possible))
conf.approximant = config_dict['analysis']['approx']
conf.minimum_frequency = config_dict['analysis']['minimum_frequency']
conf.maximum_frequency = config_dict['analysis']['maximum_frequency']
conf.reference_frequency = config_dict['analysis']['reference_frequency']
conf.roq = config_dict['analysis']['roq']
if conf.roq:
conf.roq_b_matrix_directory = config_dict['analysis']['roq_b_matrix_directory']
CONST.approximant = config_dict['analysis']['approx']
CONST.minimum_frequency = config_dict['analysis']['minimum_frequency']
CONST.maximum_frequency = config_dict['analysis']['maximum_frequency']
CONST.reference_frequency = config_dict['analysis']['reference_frequency']
CONST.roq = config_dict['analysis']['roq']
if CONST.roq:
CONST.roq_b_matrix_directory = config_dict['analysis']['roq_b_matrix_directory']
if opts.psd is None:
conf.psd = config_dict['analysis']['psd']
CONST.psd = config_dict['analysis']['psd']
else:
conf.psd = opts.psd
CONST.psd = opts.psd
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
......@@ -191,15 +191,15 @@ if __name__=='__main__':
for i in idx_nonzero:
sampler_dict['parameter_offsets'][i] = 10**(-exponents[i])
search_fparameter_keys = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
search_fparameter_keys = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], sampler_dict['search_parameter_indices_local_fit']).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(search_fparameter_keys, conf.fparam_keys_local_fit).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(search_fparameter_keys, CONST.fparam_keys_local_fit).tolist()
# need this line in case for instance: `parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['parameter_indices_local_fit_full'])
parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], parameter_indices_local_fit_full).tolist()
sampler_dict['search_fparameter_keys_local_fit_full'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
sampler_dict['search_fparameter_keys_local_fit_full'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
n_param = 9 # Number of parameters used
randGen = np.random.RandomState(seed=2)
......@@ -221,7 +221,7 @@ if __name__=='__main__':
outdir += outdir_subD + '/'
# example: outdir = '../.output_data/GW170817/SUB-D_######6##/PSD_1/'
outdir_psd = 'PSD_{}/'.format(conf.psd)
outdir_psd = 'PSD_{}/'.format(CONST.psd)
outdir += outdir_psd
# example: outdir = '../.output_data/GW170817/SUB-D_######6##/PSD_1/NO_SEED/'
......@@ -234,8 +234,8 @@ if __name__=='__main__':
outdir_opts = "{}_{}_{}_{}_{}_{}_{}{}/".format(n_traj_hmc_tot, n_traj_fit, n_fit_1, n_fit_2, length_num_traj, epsilon0/1000, sampler_dict['dlogL'], offset_suffix)
outdir += outdir_opts
approx_suffix = conf.approximant
if conf.roq and conf.approximant == 'IMRPhenomPv2':
approx_suffix = CONST.approximant
if CONST.roq and CONST.approximant == 'IMRPhenomPv2':
approx_suffix += '_ROQ'
outdir += approx_suffix + '/'
......@@ -246,11 +246,11 @@ if __name__=='__main__':
# SET THE INJECTION PARAMETERS
injection_parameters = set_inj.ini_file_to_dict(opts.inj_file)
minimum_frequency = conf.minimum_frequency
maximum_frequency_ifo = conf.maximum_frequency_ifo
duration = conf.duration
sampling_frequency = conf.sampling_frequency
start_time = conf.start_time
minimum_frequency = CONST.minimum_frequency
maximum_frequency_ifo = CONST.maximum_frequency_ifo
duration = CONST.duration
sampling_frequency = CONST.sampling_frequency
start_time = CONST.start_time
# import IPython; IPython.embed(); sys.exit()
......@@ -270,7 +270,7 @@ if __name__=='__main__':
# SET THEIR POWER SPECTRAL DENSITIES
# This function does not interpolate the psd
plot_title = main(interferometers, opt_psd=conf.psd, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time)
plot_title = main(interferometers, opt_psd=CONST.psd, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time)
# import IPython; IPython.embed()
......
......@@ -10,7 +10,7 @@ import Library.python_utils as pu
import Library.param_utils as paru
import Library.bilby_waveform as bilby_wv
import Library.bilby_utils as bilby_utils
import Library.config as conf
import Library.CONST as CONST
import Library.sklearn_fit as sklf
import Library.Fit_NR as fnr
......@@ -257,7 +257,7 @@ def Table_GradientFit_old(q_pos_fit, PointFit_Sort, n_pt_fit, n_param, n_1_fit,
defaults_kwargs = {}
# defaults_kwargs = dict(bins=25, smooth=0.9, label_kwargs=dict(fontsize=16), title_kwargs=dict(fontsize=16), color='#0072C1', truth_color='tab:orange', plot_density=False, plot_datapoints=True, fill_contours=False, max_n_ticks=3)
kwargs = {}
kwargs['labels'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['labels'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['plot_contours'] = False
kwargs['truths'] = q_pos_fit[sampler_dict['search_parameter_indices']]
......@@ -316,7 +316,7 @@ def local_olut_fit(q_pos_fit, olut_x, olut_y, n_1_fit, n_2_fit, SquaredScale, pa
defaults_kwargs = {}
# defaults_kwargs = dict(bins=25, smooth=0.9, label_kwargs=dict(fontsize=16), title_kwargs=dict(fontsize=16), color='#0072C1', truth_color='tab:orange', plot_density=False, plot_datapoints=True, fill_contours=False, max_n_ticks=3)
kwargs = {}
kwargs['labels'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['labels'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['plot_contours'] = False
kwargs['truths'] = q_pos_fit[sampler_dict['search_parameter_indices']]
......@@ -335,7 +335,7 @@ def local_olut_fit(q_pos_fit, olut_x, olut_y, n_1_fit, n_2_fit, SquaredScale, pa
def update_dlogL_with_oluts(q_pos_fit, dlogL_pos, parameter_indices_to_update, oluts_dict, n_1_fit, n_2_fit, squared_scales, traj_index=-1, step=-1, **sampler_dict):
for param_index in parameter_indices_to_update:
param_key = conf.fparam_keys[param_index]
param_key = CONST.fparam_keys[param_index]
q_pos_fit_olut = oluts_dict[param_key][0]
dlogL_olut = oluts_dict[param_key][1]
dlogL_pos[param_index] = local_olut_fit(q_pos_fit, q_pos_fit_olut, dlogL_olut, n_1_fit, n_2_fit, squared_scales, param_index, traj_index, step, **sampler_dict)
......@@ -396,7 +396,7 @@ def NumericalGradientLeapfrog_ThreeDetectors(q_pos_0, logL_0, dlogL_0, start_tim
Accept = 0
a = 1
H = 0.9 * H_0
if conf.debug:
if CONST.debug:
for i in range(len(pt_fit_traj)):
H_p_logL.append(H_p_logL_traj[i])
pmom_trajs.append(pmom_traj[i])
......@@ -473,7 +473,7 @@ def NumericalGradientLeapfrog_ThreeDetectors_Trajectory(q_pos_0, p_mom_0, dlogL_
pt_fit_traj.append(q_pos_fit)
dlogL_fit_traj.append(dlogL_pos.tolist())
if conf.debug:
if CONST.debug:
H_p_logL_traj.append([H, 0.5 * (p_mom[sampler_dict['search_parameter_indices']] ** 2).sum(), logL])
pmom_traj.append(p_mom.tolist())
# H_p_logL_traj[i][0] = H
......@@ -644,7 +644,7 @@ def HybridGradientLeapfrog_ThreeDetectors(q_pos_0, logL_0, dlogL_0, start_time,
a = 1
H = 0.9 * H_0
if conf.debug:
if CONST.debug:
for i in range(len(pt_fit_traj)):
H_p_logL.append(H_p_logL_traj[i])
pmom_trajs.append(pmom_traj[i])
......@@ -767,7 +767,7 @@ def FullHybridGradientLeapfrog_ThreeDetectors(q_pos_0, logL_0, dlogL_0, start_ti
a = 1
H = 0.9 * H_0
if conf.debug:
if CONST.debug:
for i in range(len(pt_fit_traj)):
H_p_logL.append(H_p_logL_traj[i])
pmom_trajs.append(pmom_traj[i])
......@@ -795,7 +795,7 @@ def FullHybridGradientLeapfrog_ThreeDetectors_Trajectory(q_pos_0, p_mom_0, dlogL
pt_fit_traj[0] = paru.q_pos_to_q_pos_fit(q_pos)
dlogL_fit_traj[0] = dlogL_pos
if conf.debug:
if CONST.debug:
parameters = paru.q_pos_to_dictionary(q_pos, start_time)
template_ifos = bilby_wv.WaveForm_ThreeDetectors(parameters, interferometers, waveform_arguments)
logL = bilby_utils.loglikelihood(template_ifos, interferometers)
......@@ -869,7 +869,7 @@ def FullHybridGradientLeapfrog_ThreeDetectors_Trajectory(q_pos_0, p_mom_0, dlogL
dlogL_fit_traj[i] = dlogL_pos
if conf.debug:
if CONST.debug:
template_ifos = bilby_wv.WaveForm_ThreeDetectors(parameters, interferometers, waveform_arguments)
logL = bilby_utils.loglikelihood(template_ifos, interferometers)
H = computeHamiltonian(p_mom, logL, **sampler_dict)
......@@ -1237,7 +1237,7 @@ def NumGradientBothMethods_ThreeDetectors(q_pos_0, logL_0, dlogL_0, start_time,
n_qpos_global_fit = len(qpos_global_fit) # Update current number of points inside the pool for the cubic fit
n_qpos_olut_fit = len(qpos_olut_fit) # Update current number of points inside the pool for the look-up table
if conf.debug:
if CONST.debug:
H_p_logL = np.concatenate((H_p_logL, H_p_logL_traj))
pmom_trajs = np.concatenate((pmom_trajs, pmom_traj))
......
......@@ -4,7 +4,7 @@
import numpy as np
import Library.config as conf
import Library.CONST as CONST
def svdfit(x, y, n, a, ma, m, u, v, w, chisq, funcs):
"""
......@@ -196,7 +196,7 @@ def get_OLUT(table_x, table_y, idx_to_sort):
def get_olut_dict(table_x, table_y, param_idx):
olut_x, olut_y = get_OLUT(table_x, table_y, param_idx)
return {conf.fparam_keys[param_idx]: (olut_x, olut_y)}
return {CONST.fparam_keys[param_idx]: (olut_x, olut_y)}
def get_oluts_dict(q_pos, dlogL, parameter_indices_for_olut):
......
......@@ -8,7 +8,7 @@ import Library.bilby_utils as bilby_utils
import Library.python_utils as pu
import Library.param_utils as paru
import Library.bilby_detector as bilby_det
import Library.config as conf
import Library.CONST as CONST
def get_source_frame_polarizations(parameters, interferometers, waveform_arguments):
# Create the waveform_generator using a LAL Binary Neutron Star source function
......
......@@ -8,7 +8,7 @@ from bilby.gw.likelihood import GravitationalWaveTransient
from bilby.core.likelihood import Likelihood
import Library.param_utils as paru
import Library.config as conf
import Library.CONST as CONST
class LikelihoodGradient(object):
"""
......@@ -181,9 +181,9 @@ class GWTransientLikelihoodGradient(LikelihoodGradient):
def convert_dlogL_dict_to_np_array(self, dlogL_dict):
dlogL_np = np.zeros(9)
# dlogL_np = np.zeros(len(conf.fparam_keys))
# dlogL_np = np.zeros(len(CONST.fparam_keys))
dict_keys = dlogL_dict.keys()
for i, key in enumerate(conf.fparam_keys):
for i, key in enumerate(CONST.fparam_keys):
if key in dict_keys:
dlogL_np[i] = dlogL_dict[key]
......
......@@ -4,7 +4,7 @@ sys.path.append('../')
import numpy as np
from Headers.PN_Coefficients import * # Headers.Constants already imported in PN_Coeff
import Library.config as conf
import Library.CONST as CONST
......
......@@ -5,7 +5,7 @@ import corner
import sys
sys.path.append('../')
import Library.config as conf
import Library.CONST as CONST
import Library.python_utils as pu
def perso_plot_walker(walk, parameter_name, figsize=(15,5)):
......@@ -128,7 +128,7 @@ def plot_num_and_ana_gradients(dlogL_num, dlogL_ana, opts_dict=None, save=True,
fig, axs = plt.subplots(nrows=len(params_to_plot), figsize=(15, 3 * dlogL_num.shape[0]))
# fig, axs = plt.subplots(nrows=dlogL_num.shape[0], figsize=(15, 3 * dlogL_num.shape[0]))
# fig, axs = plt.subplots(dlogL_num.shape[0], 1, figsize=(16,8.2))
gradient_names = ['dlogL/d' + fpname for fpname in conf.fparam_keys]
gradient_names = ['dlogL/d' + fpname for fpname in CONST.fparam_keys]
if alongNum:
label_ana = 'analytical along num trajectory'
title = 'Analytical gradients computed along a numerical trajectory'
......@@ -181,7 +181,7 @@ def plot_num_vs_ana_positions(q_pos_num, q_pos_ana, opts_dict=None, save=True, s
axs[param_index].plot(steps, q_pos_num[param_index], label='numerical')
axs[param_index].plot(steps, q_pos_ana[param_index], label='analytical')
axs[param_index].set_xlabel("steps", color='black')
axs[param_index].set_ylabel("{}".format(conf.fparam_keys[param_index]), color='black')
axs[param_index].set_ylabel("{}".format(CONST.fparam_keys[param_index]), color='black')
axs[param_index].legend()
if opts_dict is None:
......@@ -220,7 +220,7 @@ def plot_num_gradients_meth12_and_logL(dlogL_num_meth1, dlogL_num_meth2, logL, o
fig, axs = plt.subplots(nrows=dlogL_num_meth1.shape[0], figsize=(15, 3 * dlogL_num_meth1.shape[0]))
# fig, axs = plt.subplots(dlogL_num_meth1.shape[0], 1, figsize=(16,8.2))
gradient_names = ['dlogL/d' + fpname for fpname in conf.fparam_keys]
gradient_names = ['dlogL/d' + fpname for fpname in CONST.fparam_keys]
title = 'Comparision of the two methods to get numerical gradients'
file_name_default = 'dlogL_num_1vs2_single_traj_{}'.format(dlogL_num_meth1.shape[1])
......
......@@ -14,7 +14,7 @@ import Library.python_utils as pu
import Library.param_utils as paru
import Library.bilby_waveform as bilby_wv
import Library.bilby_utils as bilby_utils
import Library.config as conf
import Library.CONST as CONST
import Library.python_utils as pu
import Library.sklearn_fit as sklf
......@@ -264,7 +264,7 @@ if __name__=='__main__':
defaults_kwargs = {}
# defaults_kwargs = dict(bins=25, smooth=0.9, label_kwargs=dict(fontsize=16), title_kwargs=dict(fontsize=16), color='#0072C1', truth_color='tab:orange', plot_density=False, plot_datapoints=True, fill_contours=False, max_n_ticks=3)
kwargs = {}
kwargs['labels'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['labels'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['plot_contours'] = False
# kwargs['truths'] = q_pos_fit[sampler_dict['search_parameter_indices']]
......@@ -539,7 +539,7 @@ if __name__=='__main__':
defaults_kwargs = {}
# defaults_kwargs = dict(bins=25, smooth=0.9, label_kwargs=dict(fontsize=16), title_kwargs=dict(fontsize=16), color='#0072C1', truth_color='tab:orange', plot_density=False, plot_datapoints=True, fill_contours=False, max_n_ticks=3)
kwargs = {}
kwargs['labels'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['labels'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
kwargs['plot_contours'] = False
kwargs['truths'] = q_pos_fit[sampler_dict['search_parameter_indices']]
......
......@@ -17,7 +17,7 @@ import Library.param_utils as paru
import Library.roq_utils as roqu
import Library.BNS_HMC_Tools as bht
import Library.Fit_NR as fnr
import Library.config as conf
import Library.CONST as CONST
import Library.bilby_waveform as bilby_wv
import Library.bilby_utils as bilby_utils
import Library.plots as plots
......@@ -70,14 +70,14 @@ if __name__=='__main__':
ifo_chosen = config_dict['analysis']['ifos'].split(',')
if set.intersection(set(ifos_possible), set(ifo_chosen)) != set(ifo_chosen):
raise ValueError("the '--ifos' option was wrongly set, you must choose between {}. Example: '--ifos=H1,V1'. ".format(ifos_possible))
conf.approximant = config_dict['analysis']['approx']
conf.minimum_frequency = config_dict['analysis']['minimum_frequency']
conf.maximum_frequency = config_dict['analysis']['maximum_frequency']
conf.reference_frequency = config_dict['analysis']['reference_frequency']
conf.roq = config_dict['analysis']['roq']
if conf.roq:
conf.roq_b_matrix_directory = config_dict['analysis']['roq_b_matrix_directory']
conf.psd = config_dict['analysis']['psd']
CONST.approximant = config_dict['analysis']['approx']
CONST.minimum_frequency = config_dict['analysis']['minimum_frequency']
CONST.maximum_frequency = config_dict['analysis']['maximum_frequency']
CONST.reference_frequency = config_dict['analysis']['reference_frequency']
CONST.roq = config_dict['analysis']['roq']
if CONST.roq:
CONST.roq_b_matrix_directory = config_dict['analysis']['roq_b_matrix_directory']
CONST.psd = config_dict['analysis']['psd']
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
......@@ -86,15 +86,15 @@ if __name__=='__main__':
for i in idx_nonzero:
sampler_dict['parameter_offsets'][i] = 10**(-exponents[i])
search_fparameter_keys = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
search_fparameter_keys = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], sampler_dict['search_parameter_indices_local_fit']).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(search_fparameter_keys, conf.fparam_keys_local_fit).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(search_fparameter_keys, CONST.fparam_keys_local_fit).tolist()
# need this line in case for instance: `parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['parameter_indices_local_fit_full'])
parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], parameter_indices_local_fit_full).tolist()
sampler_dict['search_fparameter_keys_local_fit_full'] = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
sampler_dict['search_fparameter_keys_local_fit_full'] = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
n_param = 9 # Number of parameters used
randGen = np.random.RandomState(seed=2)
......@@ -109,11 +109,11 @@ if __name__=='__main__':
injection_parameters = set_inj.ini_file_to_dict(opts.outdir + '/injection.ini')
# print("\nInjected parameters derived from {} are:".format(opts.inj_file))
# pu.print_dict(injection_parameters)
minimum_frequency = conf.minimum_frequency
maximum_frequency_ifo = conf.maximum_frequency_ifo
duration = conf.duration
sampling_frequency = conf.sampling_frequency
start_time = conf.start_time
minimum_frequency = CONST.minimum_frequency
maximum_frequency_ifo = CONST.maximum_frequency_ifo
duration = CONST.duration
sampling_frequency = CONST.sampling_frequency
start_time = CONST.start_time
# INITIALIZE THE THREE INTERFEROMETERS
......@@ -132,7 +132,7 @@ if __name__=='__main__':
# SET THEIR POWER SPECTRAL DENSITIES
# This function does not interpolate the psd
set_psds.main(interferometers, opt_psd=conf.psd, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time)
set_psds.main(interferometers, opt_psd=CONST.psd, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time)
# SET NOISE STRAIN DATA FROM PSD
......@@ -142,7 +142,7 @@ if __name__=='__main__':
start_time=start_time)
# WAVEFORM GENERATION
conf.waveform_arguments['maximum_frequency'] = conf.maximum_frequency_injected_waveform
CONST.waveform_arguments['maximum_frequency'] = CONST.maximum_frequency_injected_waveform
template_ifos_injected = bilby_wv.WaveForm_ThreeDetectors(injection_parameters, minimum_frequency, interferometers)
# INJECT TEMPLATE INTO NOISE STRAIN DATA
......@@ -152,7 +152,7 @@ if __name__=='__main__':
interferometers[i].fd_strain = interferometers[i].strain_data.frequency_domain_strain
# COMPUTE AND PRINT SNR
conf.waveform_arguments['maximum_frequency'] = conf.maximum_frequency_search_waveform
CONST.waveform_arguments['maximum_frequency'] = CONST.maximum_frequency_search_waveform
template_ifos = bilby_wv.WaveForm_ThreeDetectors(injection_parameters, minimum_frequency, interferometers)
logL, Snr, opt_snr_Best = logL_snr.main(template_ifos, interferometers, True)
......@@ -197,7 +197,7 @@ if __name__=='__main__':
# ROQ Basis used?
if conf.roq:
if CONST.roq:
if injection_parameters['meta']['approximant'] != 'IMRPhenomPv2':
raise ValueError("Need to set approximant to IMRPhenomPv2 to work with the ROQ basis, approximant here is {}".format(injection_parameters['meta']['approximant']))
......@@ -211,7 +211,7 @@ if __name__=='__main__':
# Load the parameters describing the valid parameters for the basis.
params = injection_parameters['roq']['rescaled_params'].copy()
outdir_psd = 'PSD_{}/'.format(conf.psd)
outdir_psd = 'PSD_{}/'.format(CONST.psd)
roq_outdir = '../.ROQ_weights/' + 'GW170817' + '/' + outdir_psd
weights_file_path = roq_outdir + '{:.0f}Hz_{:.0f}Hz_{:.0f}Hz_{:.0f}s_weights.json'.format(params['flow'], injection_parameters['meta']['maximum_frequency_ifo'], injection_parameters['meta']['maximum_frequency_injected_waveform'], params['seglen'])
......@@ -266,7 +266,7 @@ if __name__=='__main__':
# likelihood.save_weights(weights_file_path)
search_parameters_keys = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
search_parameters_keys = [CONST.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]
likelihood.parameters = injection_parameters
logL = likelihood.log_likelihood_ratio()
......
......@@ -22,7 +22,7 @@ import bilby
from Headers.PN_Coefficients import * # Headers.Constants already imported in PN_Coeff
import Library.bilby_waveform as bilby_wv
# import Library.param_utils as paru
import Library.config as conf
import Library.CONST as CONST
import Library.likelihood_gradient as lg
import Library.python_utils as pu
import Library.roq_utils as roqu
......@@ -190,7 +190,7 @@ if __name__=='__main__':
likelihood.save_weights(weights_file_path)
search_parameters_keys = [conf.fparam_keys[i] for i in sampler_dict['search_parameter_indices']]