Commit e1f0d6e1 authored by Marc Arene's avatar Marc Arene
Browse files

param_indices => search_param_indices.

parent bce95e39
......@@ -130,7 +130,7 @@ if __name__=='__main__':
# INITIALIZE THE THREE INTERFEROMETERS
interferometers = bilby.gw.detector.InterferometerList(ifo_chosen)
interferometers = bilby.gw.detector.InterferometerList(ext_analysis_dict['ifos'])
# The minimun frequency for each interferometer needs to be set if we want the strain derived from the psd to start being non zero at that frequency and not at the default one which is 20Hz
for ifo in interferometers:
ifo.minimum_frequency = ext_analysis_dict['minimum_frequency']
......
......@@ -10,9 +10,9 @@ roq_b_matrix_directory = /Users/marcarene/roq/ROQ_data/IMRPhenomPv2/
# 1: sets the PSDs from official GWTC-1 open PSD data; 2: computes PSDs using Welch methods from gwosc open strain data; 3: uses bilby's analytical pre-stored PSD files.
psd = 3
# Indices of the parameters to run the hmc on. Other are kept fixed.
parameter_indices = 0,1,2,3,4,5,6,7,8
search_parameter_indices = 0,1,2,3,4,5,6,7,8
# Indices of the parameters where numerical gradients are going to be used instead of analytical formula if using the `FullHybridGradient_LeapFrog()` function
parameter_indices_local_fit_full = 0,2,3
search_parameter_indices_local_fit_full = 0,2,3
# -log10() of the offset used for central differencing on each parameter. Default is '707077777' meaning sampler_dict['parameter_offsets'] = [1e-07, 0, 1e-07, 0, 1e-07, 1e-07, 1e-07, 1e-07, 1e-07]. 0 Means analytical formula is used if available.
parameter_offsets = 7,0,7,0,7,7,7,7,7
# Method to use to compute the gradient of the log-likelihood. Use `dlogL1` to use dlogL = <s|dh> - <h|dh>, `dlogL2` for dlogL = logL(p+offset) - logL(p-offset).
......
......@@ -113,7 +113,11 @@ def compute_extended_analysis_dict(mass_1, mass_2, geocent_time, chirp_mass, **a
maximum_frequency = analysis_kwargs['maximum_frequency']
ext_analysis_dict = {}
ifo_chosen = analysis_kwargs['ifos'].split(',')
if set.intersection(set(CONST.IFOS_POSSIBLE), set(ifo_chosen)) != set(ifo_chosen):
raise ValueError("IFOs wrongly chosen: you must choose between {}. Example: '--ifos=H1,V1'. ".format(CONST.IFOS_POSSIBLE))
else:
ext_analysis_dict['ifos'] = ifo_chosen
# approximant = 'TaylorF2'
# approximant = 'SpinTaylorF2'
# approximant = 'IMRPhenomD'
......
......@@ -135,7 +135,7 @@ if __name__=='__main__':
parser.add_option("--psd", default=None, action="store", type="int", help="""1: sets the PSDs from official GWTC-1 open PSD data; 2: computes PSDs using Welch methods from gwosc open strain data; 3: uses bilby's analytical pre-stored PSD files.""", metavar="INT from {1,2,3}")
parser.add_option("--no_seed", default=False, action="store_true", help="""New noise realisation from PSDs is generated""")
# parser.add_option("--parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in config.py file.""")
# parser.add_option("--search_parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in config.py file.""")
# parser.add_option("--parameter_offsets", default='707077777', action="store", type="string", help=""" -log10() of the offset used for central differencing on each parameter. Default is '707077777' meaning sampler_dict['parameter_offsets'] = [1e-07, 0, 1e-07, 0, 1e-07, 1e-07, 1e-07, 1e-07, 1e-07]. 0 Means analytical formula is used if available.""")
# parser.add_option("--dlogL", default='dlogL1', action="store", type="string", help=""" Method to use to compute the gradient of the log-likelihood. Use `dlogL1` to use dlogL = <s|dh> - <h|dh>, `dlogL2` for dlogL = logL(p+offset) - logL(p-offset).""")
# parser.add_option("--ifos", default='H1,L1,V1', action="store", type="string", help=""" Interferometers to run the analysis on. Default iss 'H1,L1,V1' """)
......@@ -184,7 +184,7 @@ if __name__=='__main__':
else:
CONST.psd = opts.psd
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['search_parameter_indices'].split(','))]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
sampler_dict['parameter_offsets'] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
idx_nonzero = np.nonzero(exponents)[0]
......@@ -195,10 +195,10 @@ if __name__=='__main__':
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], sampler_dict['search_parameter_indices_local_fit']).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(search_fparameter_keys, CONST.PARAMETER_KEYS_LOCAL_FIT).tolist()
# need this line in case for instance: `parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['parameter_indices_local_fit_full'])
parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], parameter_indices_local_fit_full).tolist()
# need this line in case for instance: `search_parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['search_parameter_indices_local_fit_full'])
search_parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], search_parameter_indices_local_fit_full).tolist()
sampler_dict['search_fparameter_keys_local_fit_full'] = [CONST.FPARAMETER_KEYS[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
n_param = 9 # Number of parameters used
......
......@@ -20,7 +20,7 @@ PARAMETER_KEYS_LOCAL_FIT = ["cos_theta_jn", "psi", "ln(D)"]
_, PARAMETER_INDICES_LOCAL_FIT, _ = np.intersect1d(FPARAMETER_KEYS, PARAMETER_KEYS_LOCAL_FIT, return_indices=True)
PARAMETER_INDICES_LOCAL_FIT.sort()
PARAMETER_INDICES_LOCAL_FIT = PARAMETER_INDICES_LOCAL_FIT.tolist()
# parameter_indices_local_fit = [0, 2, 3]
# PARAMETER_INDICES_LOCAL_FIT = [0, 2, 3]
IFOS_POSSIBLE = ['H1', 'L1', 'V1']
......
......@@ -7,7 +7,7 @@ import Library.CONST as CONST
def get_sampler_dict(config_dict):
sampler_dict = {}
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['search_parameter_indices'].split(','))]
sampler_dict['search_parameter_keys'] = [CONST.PARAMETER_KEYS[i] for i in sampler_dict['search_parameter_indices']]
sampler_dict['search_fparameter_keys'] = [CONST.FPARAMETER_KEYS[i] for i in sampler_dict['search_parameter_indices']]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
......@@ -18,14 +18,14 @@ def get_sampler_dict(config_dict):
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], CONST.PARAMETER_INDICES_LOCAL_FIT).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(sampler_dict['search_fparameter_keys'], CONST.PARAMETER_KEYS_LOCAL_FIT).tolist()
# need this line in case for instance: `parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['parameter_indices_local_fit_full'])
parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], parameter_indices_local_fit_full).tolist()
# need this line in case for instance: `search_parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['search_parameter_indices_local_fit_full'])
search_parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], search_parameter_indices_local_fit_full).tolist()
sampler_dict['search_fparameter_keys_local_fit_full'] = [CONST.FPARAMETER_KEYS[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
return sampler_dict
def get_output_dir(inj_name, parameter_indices, parameter_offsets, config_dict, sub_dir=None):
def get_output_dir(inj_name, search_parameter_indices, parameter_offsets, config_dict, sub_dir=None):
# Parse the inputs relative to the HMC algorithm
n_traj_hmc_tot = config_dict['hmc']['n_traj_hmc_tot']
......@@ -41,8 +41,8 @@ def get_output_dir(inj_name, parameter_indices, parameter_offsets, config_dict,
# example: outdir = '../.output_data/GW170817/bilbyoriented/SUB-D_######6##/'
outdir_subD = 'SUB-D_'
for i in range(9):
if i in parameter_indices:
for i in range(len(CONST.PARAMETER_KEYS)):
if i in search_parameter_indices:
outdir_subD+='{}'.format(i)
else:
outdir_subD += '#'
......@@ -57,15 +57,15 @@ def get_output_dir(inj_name, parameter_indices, parameter_offsets, config_dict,
# example: outdir = '../.output_data/GW170817/SUB-D_######6##/logL920.44/0e+00_1_2000_200_25_0.005_dlogL2_1e-06'
offset_suffix = ''
if len(parameter_indices) == 1 and (parameter_indices[0]!=1 or parameter_indices[0]!=3):
offset_suffix = '_{:.0e}'.format(parameter_offsets[parameter_indices[0]])
if len(search_parameter_indices) == 1 and (search_parameter_indices[0]!=1 or search_parameter_indices[0]!=3):
offset_suffix = '_{:.0e}'.format(parameter_offsets[search_parameter_indices[0]])
outdir_opts = "{}_{}_{}_{}_{}_{}_{}{}/".format(n_traj_hmc_tot, n_traj_fit, n_fit_1, n_fit_2, length_num_traj, epsilon0/1000, config_dict['analysis']['dlogl'], offset_suffix)
outdir += outdir_opts
if 'phase_marginalization' in config_dict['analysis'].keys():
if config_dict['analysis']['phase_marginalization']:
if 1 in parameter_indices:
raise ValueError('1 is parameter_indices => you cannot marginalize over phase and estimate this parameter at the same time !')
if 1 in search_parameter_indices:
raise ValueError('1 is search_parameter_indices => you cannot marginalize over phase and estimate this parameter at the same time !')
outdir += 'phase_marginalization/'
approx_suffix = config_dict['analysis']['approximant']
......
......@@ -210,7 +210,7 @@ d2_other_param_2 = (Matrix_ClosestPoint[other_param_index_2] - q_pos_fit[other_p
- SUB01345678: look at some phase1 rejected traj like: `538 - rejected - Acc = 99.3% - 0.12294 < 0.01008 - ??`
- `[DONE]` did I really make this mistake: `if 0 is sampler_dict['search_parameter_indices']` instead of `in` ?? yes...
- `[TO DO !]` create new bilby branch from the new master and add my personal updates
- Normally with my code I should get the same results for phase3 whether I run it with pure analytical trajectories or with fullhybrid traj where I set `parameter_indices_local_fit_full = -1`. However when I compare the first dlogL_pos that's being computed here is what I get:
- Normally with my code I should get the same results for phase3 whether I run it with pure analytical trajectories or with fullhybrid traj where I set `search_parameter_indices_local_fit_full = -1`. However when I compare the first dlogL_pos that's being computed here is what I get:
- AnalyticalGradientLeapfrog_ThreeDetectors_Trajectory():
```py
array([-1.24156077e+01, 0.00000000e+00, 6.63549793e+00,
......
......@@ -1043,7 +1043,7 @@ logL on gpu = 4ms
- check that the logL computed with IMRPhenomPv2 and IMRPhenomPv2_ROQ_scaled are the same, with an injection using IMRPhenomPv2 with non rescaled chirp mass but rescaled minimum, maximum and sampling freq (?)
- with the scaling: can I compute dlogL for GW170817 with IMRPhenomPv2_ROQ? if yes does it compare with that of TaylorF2? what is btw the comparision of dlogL between TaylorF2 and IMRPhenomPv2?
- `ValueError: Waveform longer than frequency array`: duration is = 213.33s after rescaling of the roq which seems to be the reason for that bug; but why doesn't it bug in bilby's roq_example when they rescaled the duration from 4s to 4s / 1.6 ?
- Adapt OLUTs so that it works with less than 9 dimension and then do it on the --parameter_indices=03 run
- Adapt OLUTs so that it works with less than 9 dimension and then do it on the --search_parameter_indices=03 run
- Timing comparision IMRPhenomPv2 with and without ROQ
- for a freq array of 524289 elements -going from flow=34Hz until fhigh=6963.2Hz- dlogL took 2.63s without ROQ versus 33ms with ROQ => speed up factor of 80 ! (Values of logL and dlogL of both methods being very close (logL=674 case))
......
......@@ -211,7 +211,7 @@ if __name__=='__main__':
from optparse import OptionParser
usage = """Script meant to test and improve the function `Table_GradientFit()`, ie the local OLUTs fit."""
parser=OptionParser(usage)
parser.add_option("--parameter_indices", default='02345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in optsig.py file.""")
parser.add_option("--search_parameter_indices", default='02345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in optsig.py file.""")
parser.add_option("--training", default='1500', action="store", type="string", help=""" Number of trajectories in phase 1.""")
parser.add_option("--testing", default='1500', action="store", type="string", help=""" Selects the testing set. It corresponds to the 1000 step trajectory that was run at the end of phase1.""")
......@@ -220,9 +220,9 @@ if __name__=='__main__':
(opts,args)=parser.parse_args()
sampler_dict['search_parameter_indices'] = [int(i) for i in list(opts.parameter_indices)]
sampler_dict['search_parameter_indices'] = [int(i) for i in list(opts.search_parameter_indices)]
directory = '../Tests/.OLUTs_test/SUB' + opts.parameter_indices + '/'
directory = '../Tests/.OLUTs_test/SUB' + opts.search_parameter_indices + '/'
input_dir = directory + 'input_data/'
q_pos_fit = np.loadtxt(input_dir + 'q_pos_fit.dat')
testing_dir = input_dir + 'testing_data/'
......
......@@ -79,7 +79,7 @@ if __name__=='__main__':
CONST.roq_b_matrix_directory = config_dict['analysis']['roq_b_matrix_directory']
CONST.psd = config_dict['analysis']['psd']
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['search_parameter_indices'].split(','))]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
sampler_dict['parameter_offsets'] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
idx_nonzero = np.nonzero(exponents)[0]
......@@ -90,10 +90,10 @@ if __name__=='__main__':
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], sampler_dict['search_parameter_indices_local_fit']).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(search_fparameter_keys, CONST.PARAMETER_KEYS_LOCAL_FIT).tolist()
# need this line in case for instance: `parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['parameter_indices_local_fit_full'])
parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], parameter_indices_local_fit_full).tolist()
# need this line in case for instance: `search_parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['search_parameter_indices_local_fit_full'])
search_parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], search_parameter_indices_local_fit_full).tolist()
sampler_dict['search_fparameter_keys_local_fit_full'] = [CONST.FPARAMETER_KEYS[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
n_param = 9 # Number of parameters used
......
......@@ -10,9 +10,9 @@ roq_b_matrix_directory = /Users/marcarene/roq/ROQ_data/IMRPhenomPv2/
# 1: sets the PSDs from official GWTC-1 open PSD data; 2: computes PSDs using Welch methods from gwosc open strain data; 3: uses bilby's analytical pre-stored PSD files.
psd = 3
# Indices of the parameters to run the hmc on. Other are kept fixed.
parameter_indices = 0,1,2,3,4,5,6,7,8
search_parameter_indices = 0,1,2,3,4,5,6,7,8
# Indices of the parameters where numerical gradients are going to be used instead of analytical formula if using the `FullHybridGradient_LeapFrog()` function
parameter_indices_local_fit_full = 0,2,3
search_parameter_indices_local_fit_full = 0,2,3
# -log10() of the offset used for central differencing on each parameter. Default is '707077777' meaning sampler_dict['parameter_offsets'] = [1e-07, 0, 1e-07, 0, 1e-07, 1e-07, 1e-07, 1e-07, 1e-07]. 0 Means analytical formula is used if available.
parameter_offsets = 7,0,7,0,7,7,7,7,7
# Method to use to compute the gradient of the log-likelihood. Use `dlogL1` to use dlogL = <s|dh> - <h|dh>, `dlogL2` for dlogL = logL(p+offset) - logL(p-offset).
......
......@@ -11,9 +11,9 @@ roq_b_matrix_directory = /Users/marcarene/roq/ROQ_data/IMRPhenomPv2/
# 1: sets the PSDs from official GWTC-1 open PSD data; 2: computes PSDs using Welch methods from gwosc open strain data; 3: uses bilby's analytical pre-stored PSD files.
psd = 3
# Indices of the parameters to run the hmc on. Other are kept fixed.
parameter_indices = 0,1,2,3,4,5,6,7,8
search_parameter_indices = 0,1,2,3,4,5,6,7,8
# Indices of the parameters where numerical gradients are going to be used instead of analytical formula if using the `FullHybridGradient_LeapFrog()` function
parameter_indices_local_fit_full = 0,2,3
search_parameter_indices_local_fit_full = 0,2,3
# -log10() of the offset used for central differencing on each parameter. Default is '707077777' meaning sampler_dict['parameter_offsets'] = [1e-07, 0, 1e-07, 0, 1e-07, 1e-07, 1e-07, 1e-07, 1e-07]. 0 Means analytical formula is used if available.
parameter_offsets = 7,0,7,0,7,7,7,7,7
# Method to use to compute the gradient of the log-likelihood. Use `dlogL1` to use dlogL = <s|dh> - <h|dh>, `dlogL2` for dlogL = logL(p+offset) - logL(p-offset).
......
......@@ -39,7 +39,7 @@ if __name__=='__main__':
parser.add_option("--dlogL", default='dlogL1', action="store", type="string", help=""" Method to use to compute the gradient of the log-likelihood. Use `dlogL1` to use dlogL = <s|dh> - <h|dh>, `dlogL2` for dlogL = logL(p+offset) - logL(p-offset).""")
parser.add_option("--ifos", default='H1,L1,V1', action="store", type="string", help=""" Interferometers to run the analysis on. Default is 'H1,L1,V1' """)
parser.add_option("--parameter_offsets", default='707077777', action="store", type="string", help=""" -log10() of the offset used for central differencing on each parameter. Default is '707077777' meaning sampler_dict['parameter_offsets'] = [1e-07, 0, 1e-07, 0, 1e-07, 1e-07, 1e-07, 1e-07, 1e-07]. 0 Means analytical formula is used if available.""")
parser.add_option("--parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in config.py file.""")
parser.add_option("--search_parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in config.py file.""")
parser.add_option("--inj_file", default='../examples/massive_GW170817.ini', action="store", type="string", help="""Injection file path leading to the `.ini` file specifying the parameters of the injection.""")
......@@ -57,7 +57,7 @@ if __name__=='__main__':
sampler_dict['parameter_offsets'][i] = 10**(-exponents[i])
sampler_dict['dlogL'] = opts.dlogL
sampler_dict['search_parameter_indices'] = [int(i) for i in list(opts.parameter_indices)]
sampler_dict['search_parameter_indices'] = [int(i) for i in list(opts.search_parameter_indices)]
inj_file_name = opts.inj_file.split('/')[-1]
inj_name = inj_file_name.split('.')[0]
......
......@@ -98,7 +98,7 @@ if __name__=='__main__':
(opts,args)=parser.parse_args()
if len(sampler_dict['search_parameter_indices'])!=1:
print("Error: please choose only one index to plot in `config.parameter_indices`")
print("Error: please choose only one index to plot in `config.search_parameter_indices`")
sys.exit(1)
if not(opts.no_seed):
......
......@@ -114,7 +114,7 @@ if __name__=='__main__':
(opts,args)=parser.parse_args()
if len(sampler_dict['search_parameter_indices'])!=1:
print("Error: please choose only one index to plot in `config.parameter_indices`")
print("Error: please choose only one index to plot in `config.search_parameter_indices`")
sys.exit(1)
if not(opts.no_seed):
......
......@@ -261,7 +261,7 @@ if __name__=='__main__':
# parser.add_option("--psd", default=1, action="store", type="int", help="""1: sets the PSDs from official GWTC-1 open PSD data; 2: computes PSDs using Welch methods from gwosc open strain data; 3: uses bilby's analytical pre-stored PSD files.""", metavar="INT from {1,2,3}")
parser.add_option("--no_seed", default=False, action="store_true", help="""New noise realisation from PSDs is generated""")
# parser.add_option("--parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in config.py file.""")
# parser.add_option("--search_parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in config.py file.""")
# parser.add_option("--parameter_offsets", default='707077777', action="store", type="string", help=""" -log10() of the offset used for central differencing on each parameter. Default is '707077777' meaning sampler_dict['parameter_offsets'] = [1e-07, 0, 1e-07, 0, 1e-07, 1e-07, 1e-07, 1e-07, 1e-07]. 0 Means analytical formula is used if available.""")
# parser.add_option("--dlogL", default='dlogL1', action="store", type="string", help=""" Method to use to compute the gradient of the log-likelihood. Use `dlogL1` to use dlogL = <s|dh> - <h|dh>, `dlogL2` for dlogL = logL(p+offset) - logL(p-offset).""")
# parser.add_option("--ifos", default='H1,L1,V1', action="store", type="string", help=""" Interferometers to run the analysis on. Default iss 'H1,L1,V1' """)
......@@ -313,7 +313,7 @@ if __name__=='__main__':
CONST.roq_b_matrix_directory = config_dict['analysis']['roq_b_matrix_directory']
CONST.psd = config_dict['analysis']['psd']
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['search_parameter_indices'].split(','))]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
sampler_dict['parameter_offsets'] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
idx_nonzero = np.nonzero(exponents)[0]
......
......@@ -88,7 +88,7 @@ if __name__=='__main__':
CONST.roq_b_matrix_directory = config_dict['analysis']['roq_b_matrix_directory']
CONST.psd = config_dict['analysis']['psd']
sampler_dict['dlogL'] = config_dict['analysis']['dlogl']
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
sampler_dict['search_parameter_indices'] = [int(i) for i in list(config_dict['analysis']['search_parameter_indices'].split(','))]
exponents = [int(e) for e in list(config_dict['analysis']['parameter_offsets'].split(','))]
sampler_dict['parameter_offsets'] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
idx_nonzero = np.nonzero(exponents)[0]
......@@ -100,10 +100,10 @@ if __name__=='__main__':
sampler_dict['search_parameter_indices_local_fit'] = np.intersect1d(sampler_dict['search_parameter_indices'], sampler_dict['search_parameter_indices_local_fit']).tolist()
sampler_dict['search_fparameter_keys_local_fit'] = np.intersect1d(search_fparameter_keys, CONST.PARAMETER_KEYS_LOCAL_FIT).tolist()
# need this line in case for instance: `parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['parameter_indices_local_fit_full'])
parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], parameter_indices_local_fit_full).tolist()
# need this line in case for instance: `search_parameter_indices_local_fit_full = 2` where it's gonna be converted into an int and hence won't be able to be split()
parameter_indices_local_fit_full_str = str(config_dict['analysis']['search_parameter_indices_local_fit_full'])
search_parameter_indices_local_fit_full = [int(i) for i in list(parameter_indices_local_fit_full_str.split(','))]
sampler_dict['search_parameter_indices_local_fit_full'] = np.intersect1d(sampler_dict['search_parameter_indices'], search_parameter_indices_local_fit_full).tolist()
sampler_dict['search_fparameter_keys_local_fit_full'] = [CONST.FPARAMETER_KEYS[i] for i in sampler_dict['search_parameter_indices_local_fit_full']]
n_param = 9 # Number of parameters used
......
......@@ -11,7 +11,7 @@ from optparse import OptionParser
usage = """%prog [options]
Plots one hmc trajectory for each parameter involved."""
parser=OptionParser(usage)
parser.add_option("--parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in /opts_dict.npy file.""")
parser.add_option("--search_parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in /opts_dict.npy file.""")
parser.add_option("--traj_nb", default=1, action="store", type="int", help=""" Which trajectory to plot. Default is 1. """,metavar="")
(opts,args)=parser.parse_args()
......@@ -23,9 +23,9 @@ output_path = '../.output_data/GW170817/bilbyoriented/SUB-D_012345678/PSD_1/1020
# if len(sys.argv)==3: trajectory_number = int(sys.argv[2])
# Intersect the default parameter_indices to plot on w.r.t those asked in input
# parameter_indices_input = [int(i) for i in list(opts.parameter_indices)]
# parameter_indices = list(set.intersection(set(parameter_indices_input), set(parameter_indices)))
# Intersect the default search_parameter_indices to plot on w.r.t those asked in input
# parameter_indices_input = [int(i) for i in list(opts.search_parameter_indices)]
# search_parameter_indices = list(set.intersection(set(parameter_indices_input), set(search_parameter_indices)))
dlogL_num_file = output_path + 'dlogL_num_single_traj_1000_phase3.dat'
dlogL_ana_file = output_path + 'dlogL_ana_single_traj_1000_phase3.dat'
......@@ -34,7 +34,7 @@ dlogL_ana_file = output_path + 'dlogL_ana_single_traj_1000_phase3.dat'
dlogL_num = np.loadtxt(dlogL_num_file).T
dlogL_ana = np.loadtxt(dlogL_ana_file).T
parameter_indices = np.arange(dlogL_num.shape[0])
search_parameter_indices = np.arange(dlogL_num.shape[0])
steps = np.arange(len(dlogL_num[0]))
......
......@@ -165,7 +165,7 @@ if __name__=='__main__':
parser=OptionParser()
parser.add_option("-n", "--nb_traj_test", default=100, action="store", type="int", help="""Number of trajectories from phase1 to use for the test set.""")
parser.add_option("--method", default='cubic', action="store", help="""Method of regression to use. RF, KN, cubic """)
parser.add_option("--parameter_indices", default=None, action="store", help=""" Indices for which to plot dlogL. Default is all of those contained in the config.ini file. """)
parser.add_option("--search_parameter_indices", default=None, action="store", help=""" Indices for which to plot dlogL. Default is all of those contained in the config.ini file. """)
parser.add_option("--rescale", default=False, action="store_true", help="""Method of regression to use. RF, KN, cubic """)
parser.add_option("--outdir", default='../.output_data/GW170817/SUB-D_012345678/PSD_3/1501_1500_2000_200_200_0.005_dlogL1/IMRPhenomPv2_ROQ', action="store", type="string", help="""Output directory of the run we are interested in.""")
(opts,args)=parser.parse_args()
......@@ -175,16 +175,16 @@ if __name__=='__main__':
config_parser.read(config_file_path)
config_dict = pu.config_parser_to_dict(config_parser)
parameter_indices = [int(i) for i in list(config_dict['analysis']['parameter_indices'].split(','))]
if opts.parameter_indices is None:
parameter_indices_to_plot = parameter_indices.copy()
search_parameter_indices = [int(i) for i in list(config_dict['analysis']['search_parameter_indices'].split(','))]
if opts.search_parameter_indices is None:
parameter_indices_to_plot = search_parameter_indices.copy()
else:
parameter_indices_to_plot = opts.parameter_indices
parameter_indices_to_plot = opts.search_parameter_indices
directory = opts.outdir + '/phase1'
qpos_phase1_all = np.loadtxt(directory + '/pt_fit_phase1.dat')
qpos_phase1 = qpos_phase1_all[:, parameter_indices]
qpos_phase1 = qpos_phase1_all[:, search_parameter_indices]
dlogL_phase1 = np.loadtxt(directory + '/dlogL_fit_phase1.dat')
if False:
......@@ -215,13 +215,13 @@ if __name__=='__main__':
qpos_train_scaled = qpos_scaler.transform(qpos_train)
qpos_test_scaled = qpos_scaler.transform(qpos_test)
parameter_indices_to_plot = parameter_indices.copy()
parameter_indices_to_plot = search_parameter_indices.copy()
# parameter_indices_to_plot = [4]
dlogL_names = ['dlogL/d' + fpname for fpname in [CONST.FPARAMETER_KEYS[j] for j in parameter_indices_to_plot]]
nb_dlogL_to_predict = len(parameter_indices_to_plot)
fig, ax = plt.subplots(nb_dlogL_to_predict, 2, sharex=False, sharey=False, figsize=(10, 4 * nb_dlogL_to_predict))
fig.text(x=0.4, y=0.95, s=f'{opts.method} fit - {parameter_indices} case', fontsize=15)
fig.text(x=0.4, y=0.95, s=f'{opts.method} fit - {search_parameter_indices} case', fontsize=15)
fig.text(x=0.2, y=0.90, s=f'train set: {qpos_train.shape[0]} pts', fontsize=12)
fig.text(x=0.7, y=0.90, s=f'test set: {qpos_test.shape[0]} pts', fontsize=12)
......
......@@ -13,7 +13,7 @@ from configparser import ConfigParser
usage = """%prog [options]
Plots one hmc trajectory for each parameter involved."""
parser=OptionParser(usage)
parser.add_option("--parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in /opts_dict.npy file.""")
parser.add_option("--search_parameter_indices", default='012345678', action="store", type="string", help=""" Indices of the parameters to run the hmc on. Other are kept fixed. Default are values already stored in /opts_dict.npy file.""")
parser.add_option("--traj_nb", default=1, action="store", type="int", help=""" Which trajectory to plot. Default is 1. """,metavar="")
(opts,args)=parser.parse_args()
......@@ -30,20 +30,20 @@ trajectory_index = trajectory_number - 1
if os.path.isfile(output_path + '/conf_dict.npy'):
conf_dict_file = output_path + '/conf_dict.npy'
conf_dict = np.load(conf_dict_file).item()
parameter_indices = conf_dict['parameter_indices']
search_parameter_indices = conf_dict['search_parameter_indices']
parameter_offsets = conf_dict['parameter_offsets']
lengthT = conf_dict['length_num_traj']
elif os.path.isfile(output_path + '/opts_dict.npy'):
conf_dict_file = output_path + '/opts_dict.npy'
conf_dict = np.load(conf_dict_file).item()
parameter_indices = [int(i) for i in list(conf_dict['parameter_indices'])]
search_parameter_indices = [int(i) for i in list(conf_dict['search_parameter_indices'])]
exponents = [int(e) for e in list(conf_dict['parameter_offsets'])]
parameter_offsets = [10**(-e) for e in exponents]
lengthT = conf_dict['length_num_traj']
elif os.path.isfile(output_path + '/config.json'):
conf_dict_file = output_path + '/config.json'
conf_dict = pu.json_to_dict(conf_dict_file)
parameter_indices = [int(i) for i in list(conf_dict['analysis']['parameter_indices'].split(','))]
search_parameter_indices = [int(i) for i in list(conf_dict['analysis']['search_parameter_indices'].split(','))]
exponents = [int(e) for e in list(conf_dict['analysis']['parameter_offsets'].split(','))]
parameter_offsets = [10**(-e) for e in exponents]
lengthT = conf_dict['hmc']['length_num_traj']
......@@ -52,14 +52,14 @@ elif os.path.isfile(output_path + '/config.ini'):
config_parser = ConfigParser()
config_parser.read(config_file_path)
conf_dict = pu.config_parser_to_dict(config_parser)
parameter_indices = [int(i) for i in list(conf_dict['analysis']['parameter_indices'].split(','))]
search_parameter_indices = [int(i) for i in list(conf_dict['analysis']['search_parameter_indices'].split(','))]
exponents = [int(e) for e in list(conf_dict['analysis']['parameter_offsets'].split(','))]
parameter_offsets = [10**(-e) for e in exponents]
lengthT = conf_dict['hmc']['length_num_traj']
elif os.path.isfile(output_path + '/opts.json'):
conf_dict_file = output_path + '/opts.json'
conf_dict = pu.json_to_dict(conf_dict_file)
parameter_indices = [int(i) for i in list(conf_dict['parameter_indices'])]
search_parameter_indices = [int(i) for i in list(conf_dict['search_parameter_indices'])]
exponents = [int(e) for e in list(conf_dict['parameter_offsets'])]
parameter_offsets = [10**(-e) for e in exponents]
lengthT = conf_dict['length_num_traj']
......@@ -67,10 +67,10 @@ else:
print("ERROR: COULD NOT FIND ANY SORT OF CONFIGURATION FILE")
sys.exit()
# Intersect the default parameter_indices to plot on w.r.t those asked in input
parameter_indices_input = [int(i) for i in list(opts.parameter_indices)]
parameter_indices = list(set.intersection(set(parameter_indices_input), set(parameter_indices)))
parameter_indices.sort()
# Intersect the default search_parameter_indices to plot on w.r.t those asked in input
parameter_indices_input = [int(i) for i in list(opts.search_parameter_indices)]
search_parameter_indices = list(set.intersection(set(parameter_indices_input), set(search_parameter_indices)))
search_parameter_indices.sort()
pt_fit_traj_file = output_path_phase1 + '/pt_fit_phase1.dat'
dlogL_file = output_path_phase1 + '/dlogL_fit_phase1.dat'
......@@ -79,10 +79,10 @@ pmom_trajs_file = output_path_phase1 + '/pmom_trajs.dat'
scale_file = output_path + '/scale.dat'
# # Pb with np.loadtxt(): when there is only one index in the file, the resulting array has a shape = (), but size still equal to 1. Reshaping it puts it with shape = (1,) and hence it can be normally manipulated then
# if parameter_indices.size == 1:
# parameter_indices = parameter_indices.reshape(1)
# if search_parameter_indices.size == 1:
# search_parameter_indices = search_parameter_indices.reshape(1)
# # Need the indices to be integers
# parameter_indices = parameter_indices.astype(np.int64)
# search_parameter_indices = search_parameter_indices.astype(np.int64)
pt_fit_traj = np.loadtxt(pt_fit_traj_file).T
dlogL = np.loadtxt(dlogL_file).T
H_p_logL = np.loadtxt(H_p_logL_file).T
......@@ -93,8 +93,8 @@ except FileNotFoundError:
print("No scale.dat file found")
scale = [None for i in range(9)]
# q_pos = pt_fit_traj[parameter_indices]
# dlogL = dlogL_all[parameter_indices]
# q_pos = pt_fit_traj[search_parameter_indices]
# dlogL = dlogL_all[search_parameter_indices]
pt_fit_traj = pt_fit_traj[:, trajectory_index * lengthT: (trajectory_index +1) * lengthT]
dlogL = dlogL[:, trajectory_index * lengthT: (trajectory_index +1) * lengthT]
......@@ -138,7 +138,7 @@ plt.rcParams['xtick.labelsize'] = 12
plt.rcParams['ytick.labelsize'] = 12
plt.rcParams['font.size'] = 10
for param_index in parameter_indices:
for param_index in search_parameter_indices:
H_start = H_p_logL[0, 0]
H_end = H_p_logL[0, -1]
acc_proba = np.exp(-(H_end-H_start))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment