Commit 57fe6fba authored by Marc Arene's avatar Marc Arene
Browse files

life's too short for bullshit

parent b0e327f5
No preview for this file type
python main.py --config_file=../Tests/config_6500_1500.ini
python main.py --config_file=../Tests/config_7000_2000.ini
python main.py --config_file=../Tests/config_7500_2500.ini
# python ../Tests/plot_regression_phase1.py --outdir=../.output_data/GW170817inj/0_2_3_4_5_6_7_8_15_16/PSD_3/7500_2500_2000_200_200_0.005/phase_marginalization/IMRPhenomD -n 87400 -m 275000
python ../Tests/plot_regression_phase1.py --outdir=../.output_data/GW170817inj/0_2_3_4_5_6_7_8_15_16/PSD_3/7500_2500_2000_200_200_0.005/phase_marginalization/IMRPhenomD -n 87400 -m 300000
# python ../Tests/plot_regression_phase1.py --outdir=../.output_data/GW170817inj/0_2_3_4_5_6_7_8_15_16/PSD_3/7500_2500_2000_200_200_0.005/phase_marginalization/IMRPhenomD -n 87400 -m 325000
python ../Tests/plot_regression_phase1.py --outdir=../.output_data/GW170817inj/0_2_3_4_5_6_7_8_15_16/PSD_3/7500_2500_2000_200_200_0.005/phase_marginalization/IMRPhenomD -n 87400 -m 350000
# python ../Tests/plot_regression_phase1.py --outdir=../.output_data/GW170817inj/0_2_3_4_5_6_7_8_15_16/PSD_3/7500_2500_2000_200_200_0.005/phase_marginalization/IMRPhenomD -n 87400 -m 375000
python ../Tests/plot_regression_phase1.py --outdir=../.output_data/GW170817inj/0_2_3_4_5_6_7_8_15_16/PSD_3/7500_2500_2000_200_200_0.005/phase_marginalization/IMRPhenomD -n 87400 -m 400000
This diff is collapsed.
......@@ -90,7 +90,7 @@ def compute_time_from_flow_to_fhigh_3p5PN(f_low, f_high, mass_1, mass_2):
return tc_flow - tc_fhigh
def ini_file_to_dict(inj_file_path='../examples/GW170817.ini'):
def ini_file_to_dict(inj_file_path='../examples/injection_files/GW170817.ini'):
config = ConfigParser()
config.read(inj_file_path)
injection_parameters = pu.config_parser_to_dict(config)['parameters']
......@@ -144,7 +144,7 @@ def compute_extended_analysis_dict(mass_1, mass_2, geocent_time, chirp_mass, **a
# maximum_frequency_ifo = min(4096 * 4, f_high_run)
maximum_frequency_ifo = f_high_run
sampling_frequency = 2 * f_high_run
duration = tc_3p5PN + 2
duration = int(tc_3p5PN) + 1 + 2
# reference_frequency = 0
elif approximant == 'IMRPhenomPv2' and roq:
# cf:
......@@ -213,9 +213,13 @@ def compute_extended_analysis_dict(mass_1, mass_2, geocent_time, chirp_mass, **a
maximum_frequency_search_waveform = maximum_frequency_ifo
sampling_frequency = 2 * maximum_frequency_ifo
duration = 64
# duration = 64
duration = int(tc_3p5PN) + 1 + 2
# Use an integer because frame files have an integer start_time
# hence when extracting a subpart of it, the index will be exact
start_time = geocent_time - tc_3p5PN
# start_time = int(geocent_time - tc_3p5PN)
ext_analysis_dict.update(
minimum_frequency = minimum_frequency,
......@@ -235,7 +239,7 @@ def compute_extended_analysis_dict(mass_1, mass_2, geocent_time, chirp_mass, **a
return ext_analysis_dict
def get_inj_parameters_and_analysis_dict(inj_file_path='../examples/GW170817.ini', **analysis_kwargs):
def get_inj_parameters_and_analysis_dict(inj_file_path='../examples/injection_files/GW170817.ini', **analysis_kwargs):
injection_parameters = ini_file_to_dict(inj_file_path)
......@@ -254,7 +258,7 @@ if __name__=='__main__':
Plots the three waveforms in the time domain"""
parser=OptionParser(usage)
parser.add_option("--inj_file", default='../examples/GW170817.ini', action="store", type="string", help="""Injection file path leading to the `.ini` file specifying the parameters of the injection.""")
parser.add_option("--inj_file", default='../examples/injection_files/GW170817.ini', action="store", type="string", help="""Injection file path leading to the `.ini` file specifying the parameters of the injection.""")
(opts,args)=parser.parse_args()
......
......@@ -8,6 +8,7 @@ import time
from optparse import OptionParser
from configparser import ConfigParser
import subprocess
import pandas as pd
import bilby
......@@ -21,6 +22,7 @@ import Library.CONST as CONST
import Library.RUN_CONST as RUN_CONST
import Library.bilby_waveform as bilby_wv
import Library.bilby_utils as bilby_utils
import Library.psd_utils as psd_utils
import Library.plots as plots
import Library.likelihood_gradient as lg
import Library.sklearn_fit as sklf
......@@ -31,6 +33,9 @@ import Codes.set_psds as set_psds
import Codes.logL_snr as logL_snr
import Codes.dlogL as CdlogL
import Codes.FIM as FIM
import gw.detector.networks as gwdn
import core.sampler
import core.utils as cut
......@@ -67,7 +72,7 @@ def main(interferometers, opt_psd=1, sampling_frequency=None, duration=None, sta
filter_freq = None
# NFFT input description from `mlab.psd(.., NFFT = )`: The number of data points used in each block for the FFT. A power 2 is most efficient. The default value is 256. This should *NOT* be used to get zero padding, or the scaling of the result will be incorrect.
# NFFT description in gwosc script `lotsofplots.py`: number of sample for the fast fourier transform
NFFT = 4 * sampling_frequency
NFFT = int(4 * sampling_frequency)
for i, ifo in enumerate(interferometers):
psd_utils.set_interferometer_psd_from_hdf5_file(interferometer=ifo, hdf5_file_path=hdf5_file_path_list[i], signal_start_time=start_time, signal_duration=duration, psd_duration=psd_duration, psd_offset=psd_offset, filter_freq=filter_freq, NFFT=NFFT)
elif opt_psd==3:
......@@ -109,95 +114,66 @@ def main(interferometers, opt_psd=1, sampling_frequency=None, duration=None, sta
if __name__=='__main__':
if __name__ == '__main__':
usage = """%prog [options]
To be written ..."""
parser=OptionParser(usage)
parser.add_option("--inj_file", default='../examples/injection_files/GW170817.ini', action="store", type="string", help="""Injection file path leading to the `.ini` file specifying the parameters of the injection.""")
parser.add_option("--event", default='GW170817', action="store", type="string", help="""Event label part of the catalog GWTC-1""")
parser.add_option("--trigger_time", default=None, action="store", type="string", help="""Trigger GPS time.""")
parser.add_option("--config_file", default='../Codes/config_default.ini', action="store", type="string", help="""Configuration file path leading to the `.ini` file specifying some key options to run the HMC. The default file is '../Codes/config_default.ini'. Options set in the command line which are redundant with that of the config.ini will (should...) overwrite them.""")
parser.add_option("-v", "--verbose", default=True, action="store_true", help="""Print out information of interest during the run.""",dest='verbose')
parser.add_option("-d", "--debug", default=False, action="store_true", help="""Print out heavy information on trajectories to debug them.""")
parser.add_option("--stdout_file", default=False, action="store_true", help="""All printed outputs will be written in the file `print_out.txt` instead of the terminal""")
parser.add_option("--plot_traj", default=False, action="store_true", help="""Will save positions, momentas, hamiltonian values etc of every point along every trajectory in phase to be able to plot them with `plot_trajectories.py` script.""")
parser.add_option("--sub_dir", default=None, action="store", type="string", help="""Optional sub-directory that will be appended at the endto the default output directory such that the final output directory is: '/default-output-dir/sub_dir/'""")
parser.add_option("--fit_method", default='cubic', action="store", type="string", help="""Optional sub-directory that will be appended at the endto the default output directory such that the final output directory is: '/default-output-dir/sub_dir/'""")
parser.add_option("--fit_method", default='cubic', action="store", type="string", help="""Method used to fit the numerical gradients of the log likelihood accumulated in phase1. """)
parser.add_option("--no_seed", default=False, action="store_true", help="""New noise realisation from PSDs is generated""")
parser.add_option("--on_CIT", default=False, action="store_true", help="""Set this to True if running this script on Caltech's cluster. Will set the right path for the ROQ basis.""")
n_param = len(CONST.PARAMETERS_KEYS)
randGen = np.random.RandomState(seed=2)
# randGen = np.random.RandomState(seed=2)
# PARSE INPUT ARGUMENTS
(opts,args)=parser.parse_args()
print("_____________________________")
print("_____________________________")
print("Options from the command line are:")
pu.print_dict(opts.__dict__)
(opts, args) = parser.parse_args()
opts_dict_formatted = cut.dictionary_to_formatted_string(opts.__dict__)
cut.logger.info(f'Options from the command line are: {opts_dict_formatted}')
config_parser = ConfigParser()
config_parser.read(opts.config_file)
config_dict = pu.config_parser_to_dict(config_parser)
print("\nOptions from the configuration file {} are:".format(opts.config_file))
pu.print_dict(config_dict)
config_dict_formatted = cut.dictionary_to_formatted_string(config_dict)
cut.logger.info(f'Options from the configuration file {opts.config_file} are: {config_dict_formatted}')
if opts.on_CIT and config_dict['analysis']['roq']:
config_dict['analysis']['roq_b_matrix_directory'] = '/home/cbc/ROQ_data/IMRPhenomPv2/'
cut.logger.info('Setting the roq_b_matrix_directory to "/home/cbc/ROQ_data/IMRPhenomPv2/"')
RUN_CONST.debug = opts.debug
RUN_CONST.plot_traj = opts.plot_traj
if not(opts.no_seed):
# Set up a random seed for result reproducibility. This is optional!
np.random.seed(88170235)
inj_file_name = opts.inj_file.split('/')[-1]
inj_name = inj_file_name.split('.')[0]
# Parse the inputs relative to the HMC algorithm
n_traj_hmc_tot = config_dict['hmc']['n_traj_hmc_tot']
n_traj_fit = config_dict['hmc']['n_traj_fit']
n_traj_for_this_run = config_dict['hmc']['n_traj_for_this_run']
n_fit_1 = config_dict['hmc']['n_fit_1']
n_fit_2 = config_dict['hmc']['n_fit_2']
length_num_traj = config_dict['hmc']['length_num_traj']
epsilon0 = config_dict['hmc']['epsilon0']
# Parse the inputs relative to the sampler and the search
sampler_dict = init.get_sampler_dict(config_dict)
print('\nOptions for the sampler are:')
pu.print_dict(sampler_dict)
# SET THE OUTPUT DIRECTORY WHERE OUTPUT FILES WILL BE SAVED
outdir = init.get_output_dir(inj_name, sampler_dict['search_parameter_indices'], sampler_dict['parameter_offsets'], config_dict, opts.sub_dir)
print("\nOutput directory is:\n{}".format(outdir))
# INITIAL PRINTS, CREATE OUTPUT FILES
if opts.stdout_file:
stdout_terminal = sys.stdout # Keep in memory what the terminal output is for stuff we still want to output in the terminal to follow script process
stdout_file_path = outdir + 'print_out_{}_{}.txt'.format(n_traj_hmc_tot, n_traj_fit)
print("\nStandard output is now redirected to file:\n{}".format(stdout_file_path))
sys.stdout = open(stdout_file_path, 'w')
print("Options from the command line are:")
pu.print_dict(opts.__dict__)
print("\nOptions from the configuration file {} are:".format(opts.config_file))
pu.print_dict(config_dict)
samples_file_path = "{}samples.dat".format(outdir) # chain file
# Check whether the run is on an injection, an event from the catalog or a trigger time
if opts.event is not None:
data_name = opts.event + 'real'
elif opts.inj_file is not None:
inj_file_name = opts.inj_file.split('/')[-1]
data_name = inj_file_name.split('.')[0] + 'inj'
elif opts.trigger_time is not None:
data_name = opts.trigger_time
else:
raise ValueError('You must specify at least one of the 3 options: --event, --inj_file, --trigger_time')
# SET THE INJECTION PARAMETERS AND THE ANALYSIS PARAMETERS
injection_parameters, ext_analysis_dict = set_inj.get_inj_parameters_and_analysis_dict(opts.inj_file, **config_dict['analysis'])
print("\nInjected parameters derived from {} are:".format(opts.inj_file))
pu.print_dict(injection_parameters)
print("\nAnalysis parameters derived from the component masses are:")
pu.print_dict(ext_analysis_dict)
start_time = ext_analysis_dict['start_time']
# INITIALIZE THE THREE INTERFEROMETERS
interferometers = bilby.gw.detector.InterferometerList(ext_analysis_dict['ifos'])
# interferometers = bilby.gw.detector.InterferometerList(ext_analysis_dict['ifos'])
interferometers = gwdn.InterferometerList(ext_analysis_dict['ifos'])
# The minimun frequency for each interferometer needs to be set if we want the strain derived from the psd to start being non zero at that frequency and not at the default one which is 20Hz
for ifo in interferometers:
ifo.minimum_frequency = ext_analysis_dict['minimum_frequency']
......@@ -210,9 +186,11 @@ if __name__=='__main__':
ifo.strain_data.start_time = start_time
# SET THEIR POWER SPECTRAL DENSITIES
# This function does not interpolate the psd
plot_title = main(interferometers, opt_psd=config_dict['analysis']['psd'], sampling_frequency=ext_analysis_dict['sampling_frequency'], duration=ext_analysis_dict['duration'], start_time=start_time)
plot_title = main(interferometers, opt_psd=2, sampling_frequency=ext_analysis_dict['sampling_frequency'], duration=ext_analysis_dict['duration'], start_time=start_time)
# plot_title = main(interferometers, opt_psd=config_dict['analysis']['psd'], sampling_frequency=ext_analysis_dict['sampling_frequency'], duration=ext_analysis_dict['duration'], start_time=start_time)
# import IPython; IPython.embed()
......
......@@ -19,10 +19,10 @@ def get_strain_from_hdf5(hdf5_file_path):
"""
hdf5 = h5py.File(hdf5_file_path, 'r')
strain = hdf5['strain/Strain'].value
strain = hdf5['strain/Strain'][()]
number_of_samples = len(strain)
start_time = hdf5['meta/GPSstart'].value
duration = hdf5['meta/Duration'].value
start_time = hdf5['meta/GPSstart'][()]
duration = hdf5['meta/Duration'][()]
sampling_frequency = number_of_samples / duration
return strain, start_time, duration, sampling_frequency, number_of_samples
......@@ -34,7 +34,6 @@ def get_psd_from_strain(strain, sampling_frequency, NFFT=None):
sampling_frequency = int(sampling_frequency)
if NFFT is None: NFFT = 4*sampling_frequency
Pxx, freqs = mlab.psd(strain, Fs = sampling_frequency, NFFT = NFFT)
return Pxx, freqs
......
......@@ -417,6 +417,34 @@ d2_other_param_2 = (Matrix_ClosestPoint[other_param_index_2] - q_pos_fit[other_p
- understand why scales of lnMc, lnmu and lntc get x10 which screws everything up...
- `[TO DO !]` Implement boundaries from ROQ as constraints
- `[TO DO !]` run over real GW170817 data
- do the same structure as in `network.get_interferometer_with_open_data()`, but...
- load the strain from the hdf5 file or not?
- load the psd from the catalog file
- understand why logL = -480, why bilby does certain things and not other as no highpass filter etc
- follow all the steps on the losc tutorial which lead to the plots but apply that to GW170817real data and make the plot
- do this on GW150914 to check that my script gives the same plot as the tutorial
- yes !
- why do they bandpass between 43 and 300 Hz? The f_CUT I compute is = 613 Hz and f_ISCO = 66 Hz: Try both upper limits to see
- compute the snr with bilby to check I get correct answer and then do the same on 170817
- The optimal snr computed seems correct, as expected the problem comes from the inner product between the data and the template: `d_inner_h`. When dealing with the injected signal, `d_inner_h` is very close to `h_inner_h`; but with the real data I get:
```
d_inner_h_H1 = 24.115640121409307-1.205623587326565j, mf = 1.1442315243909515-0.05720405961538892j
d_inner_h_L1 = -148.06215812591506+72.89780045847147j, mf = -5.180201818823724+2.5504512652185074j
d_inner_h_V1 = 0.8803577226942961-3.865370691605366j, mf = 0.2675720711365441-1.174823841492547j
```
- compare with what bilby renders in the time domain
- The data from GWOSC should be bandpassed before the analysis but I don't think it should be whitened since the whitening of data and template is inherently part of the matched-filter analysis when we do the noise weighed inner product, no?
- should not even be bandpassed: quote from the guide: " Note that such narrow bandpassing is only used for visualization purposes and is not employed in the LVC analyses."
- Why don't I get my IMRPhenomD fourier domain template, whitened+bp, to display well in the time-domain wrt to time-domain strain as the gwosc tutorial does?
- SNR mismatch: compute the opt_snr and mf_snr using template_fft from the file and not from lalsim
- when should the windowing take place? before or after bandpassing? => before, bilby does it automatically when setting the frequency domain strain from the time domain one
- now that the template matches the data in the time domain (!!!), understand properly why...!
- I am using irff(), not ifft(): no `2*` line 270
- phaseshift = 0 for me
- I don't divide by d_eff: because my template is not scaled at 1Mpc I think
- understand `peaksample`: I think that if I don't use it, offset could be left to indmax simply
- Templates generated with lalsim + bilby and shifted in time by `time_shift` look how we expect, ie peak at `geocent_time + time_shift` in the time_domain, and hence comparing H1 and L1 they are naturally separated by the relative timeshift difference. For GW150914: `time_shift_L1_template = 0.004` and `time_shift_H1_template = 0.011`, consistently with the 7ms offset that was reported by the discovery. However when looking at the ifo data, this 7ms difference is visible BUT the data peaks BEFORE geocentime, as if `time_shift_L1_data = -0.018` and `time_shift_H1_data = -0.011`. I am not sure whether `time_shift_H1_data = -0.011 = -time_shift_H1_template` is a coincidence or the bottom line of the problem... Doing the same exercise with GW170817 if a bit more difficult since it's not visible by eye in the data.
- Since this shift is constant whatever the value of geocent_time, it makes sense that when injecting a signal from a fourier-domain template, then the others match it well during the sampling.
- `[TO DO !]` To determine which gradient should be run on local fit, compute the R^2 at the end of phase1 and if it is < 0.9 they decide to run a local fit with OLUTs.
- How do decide on the good threshold to consider for the coefficient of determination R**2?
- => Take the 9 dimensional case and make a benchmark of multiple phase1 cases recording each time: nb of traj in phase1 used, R**2 for each parameter (using the cubic fit) at the end of phase1, acceptance rate in phase3 after 10000 trajectories, nb of hybrid and numerical traj in phase3
......@@ -453,3 +481,17 @@ d2_other_param_2 = (Matrix_ClosestPoint[other_param_index_2] - q_pos_fit[other_p
- save data so that I don't have to recompute everything all the time
- make goodness of fit plots for 1500, 2000 and 2500 num traj
- write up the detailed plan of the thesis and of a publication
- make run with 1500 num, starting from the one at 1000.
- make goodness of fit plot at the end of phase1 and compare with that of 1000
- run 9000 trajectories in phase3 and compare Acc, atr, htr, ntr, runtimes etc
- from a run with 2500 num traj, make a benchmark of regression plots:
- using from 100 000 to 250 000 fit points
- always fitting the same test set = the last 237 000 points
- compare the R**2 and its evolution
- regression plot with OLUTs
- continue to understand why templates are shifted after geocent when data is before ?
- write detailed plan
- make PhDDay presentation with video of trajectory
- C01 CIT analysis of S190814bv !!s
# Runs comparing the influence of the 1000 vs 1500 numerical trajectories in phase1
- With IMRPhenomD on GW170817 injected data
- Comparing two runs having both a total of 51000 trajectories, but the 1st one running phase1 for 1000 numerical traj when the 2nd ones does it for 1500 traj.
- Bottom line is, surprisingly, the 500 extra numerical trajectories don't seem to make much of a difference for phase3...!
| |Phase1 duration|Nb qpos_fit|Phase2 duration|Acc Phase3 only|Nb of hybrid traj|Nb of num traj|Phase3 duration|Total run time|ACT|Nb of SIS|
|-|-:|-:|-:|-:|-:|-:|-:|-:|-:|-:|
|1000|22.1 hours|179800|147 sec|65.4%|2090 / 50 000|555 / 50 000|13.4 hours|35.6 hours|L=89, mu|574|
|1500|33.1 hours|267800|236 sec|68.5%|1730 / 49 500|514 / 49 500|12.5 hours|45.6 hours|L=95, mu|537|
### Autocorrelation plot of the run with 1000 numerical traj in phase1
![51000_autocorrelations_emcee_1000phase1.png](51000_autocorrelations_emcee_1000phase1.png)
### Autocorrelation plot of the run with 1500 numerical traj in phase1
![51000_autocorrelations_emcee_1500phase1.png](51000_autocorrelations_emcee_1500phase1.png)
No preview for this file type
# How to import real data of GW170817 and create the interferometer, psd and strain_data structures correctly
import sys
# cf https://docs.python.org/3/tutorial/modules.html
sys.path.append('../')
import os
import numpy as np
# import time
# from optparse import OptionParser
# from configparser import ConfigParser
# import subprocess
# import pandas as pd
#
import bilby
#
# from Headers.PN_Coefficients import * # Headers.Constants already imported in PN_Coeff
# import Library.python_utils as pu
# import Library.param_utils as paru
# import Library.roq_utils as roqu
# import Library.BNS_HMC_Tools as bht
# import Library.Fit_NR as fnr
# import Library.CONST as CONST
# import Library.RUN_CONST as RUN_CONST
# import Library.bilby_waveform as bilby_wv
# import Library.bilby_utils as bilby_utils
# import Library.plots as plots
# import Library.likelihood_gradient as lg
# import Library.sklearn_fit as sklf
# import Library.initialization as init
#
# import Codes.set_injection_parameters as set_inj
# import Codes.set_psds as set_psds
# import Codes.logL_snr as logL_snr
# import Codes.dlogL as CdlogL
# import Codes.FIM as FIM
#
import gw.detector.networks as gwdn
# import core.sampler
# import core.utils as cut
import matplotlib.pyplot as plt
import Library.psd_utils as psd_utils
from bilby.gw.detector.strain_data import InterferometerStrainData
if __name__ == '__main__':
hdf5_file_path_H1 = '../.input_data/GW170817/H-H1_GWOSC_4KHZ_R1-1187006835-4096.hdf5'
hdf5_file_path_L1 = '../.input_data/GW170817/L-L1_GWOSC_4KHZ_R1-1187006835-4096.hdf5'
hdf5_file_path_V1 = '../.input_data/GW170817/V-V1_GWOSC_4KHZ_R1-1187006835-4096.hdf5'
hdf5_file_path_list = [hdf5_file_path_H1, hdf5_file_path_L1, hdf5_file_path_V1]
hdf5_file_path = hdf5_file_path_H1
hdf5_strain, hdf5_strain_start_time, duration, sampling_frequency, number_of_samples = psd_utils.get_strain_from_hdf5(hdf5_file_path)
geocent_time=1187008882.43
tc_3p5PN = 56.9
flow_start_time = int(geocent_time - tc_3p5PN)
segment_duration = int(tc_3p5PN) + 1 + 2
segment_duration2 = int(geocent_time - flow_start_time + 2)
idx_segment_start = int((flow_start_time - hdf5_strain_start_time) * sampling_frequency) + 1
idx_segment_end = int((flow_start_time + segment_duration - hdf5_strain_start_time) * sampling_frequency)
# idx_segment_end = int((flow_start_time + segment_duration - hdf5_strain_start_time) * sampling_frequency) + 1
seg_strain = hdf5_strain[idx_segment_start:idx_segment_end+1]
bilby_strain = InterferometerStrainData(minimum_frequency=30)
bilby_strain.set_from_time_domain_strain(time_domain_strain=seg_strain, sampling_frequency=sampling_frequency, duration=segment_duration)
H1 = gwdn.InterferometerList(['H1'])[0]
H1.strain_data = bilby_strain
GWTC1_GW170817_PSDs_file = '../.input_data/GW170817/GWTC1_GW170817_PSDs.dat'
GWTC1_GW170817_PSDs = np.loadtxt(GWTC1_GW170817_PSDs_file)
H1.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(psd_array=GWTC1_GW170817_PSDs[:, 1], frequency_array=GWTC1_GW170817_PSDs[:, 0])
# H1.plot_time_domain_data(bandpass_frequencies=None)
import IPython; IPython.embed(); sys.exit()
breakpoint()
This diff is collapsed.
# Standard python numerical analysis imports:
from scipy import signal
from scipy.interpolate import interp1d
from scipy.signal import butter, filtfilt, iirdesign, zpk2tf, freqz
import h5py
import json
# the IPython magic below must be commented out in the .py file, since it doesn't work there.
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import sys
sys.path.append('../')
import os
import numpy as np
import time
from optparse import OptionParser
from configparser import ConfigParser
import pandas as pd
import bilby
from Headers.PN_Coefficients import * # Headers.Constants already imported in PN_Coeff
import Library.python_utils as pu
import Library.param_utils as paru
import Library.roq_utils as roqu
import Library.BNS_HMC_Tools as bht
import Library.Fit_NR as fnr
import Library.CONST as CONST
import Library.RUN_CONST as RUN_CONST
import Library.bilby_waveform as bilby_wv
import Library.bilby_utils as bilby_utils
import Library.plots as plots
import Library.likelihood_gradient as lg
import Library.sklearn_fit as sklf
import Library.initialization as init
import Library.psd_utils as psd_utils
import Codes.set_injection_parameters as set_inj
import Codes.set_psds as set_psds
import Codes.logL_snr as logL_snr
import Codes.dlogL as CdlogL
import Codes.FIM as FIM
import gw.detector.networks as gwdn
import core.sampler
import core.utils as cut
make_plots = 1
plottype = "png"
fs = sampling_frequency = 4096
dt = 1 / fs
geocent_time=1187008882.43
tc_3p5PN = 56.9
start_time = int(geocent_time - tc_3p5PN)
duration = int(tc_3p5PN) + 1 + 2
fband = [20, 2048]
# function to whiten data
def whiten_tutorial(strain, interp_psd, dt):
Nt = len(strain)
freqs = np.fft.rfftfreq(Nt, dt)
freqs1 = np.linspace(0,2048.,Nt/2+1)
# whitening: transform to freq domain, divide by asd, then transform back,
# taking care to get normalization right.
hf = np.fft.rfft(strain)
norm = 1./np.sqrt(1./(dt*2))
white_hf = hf / np.sqrt(interp_psd(freqs)) * norm
white_ht = np.fft.irfft(white_hf, n=Nt)
return white_ht
# function to whiten data
def whitened_time_domain_strain(ifo):
# norm = np.sqrt(2 / ifo.strain_data.sampling_frequency)
# hf = np.fft.rfft(ifo.strain_data.time_domain_strain)
# # white_hf = hf / ifo.amplitude_spectral_density_array * norm
# white_hf = hf / np.sqrt(ifo.power_spectral_density_array) * norm
# When setting the ifo.frequency_domain_strain for the first time
# bilby applies automatically a window function to the time domain
# strain it uses before applying the FFT
# white_hf = ifo.whitened_frequency_domain_strain * norm
# adhoc_norm = np.sqrt(2 * ifo.strain_data.sampling_frequency)
adhoc_norm = 1
white_hf = ifo.whitened_frequency_domain_strain * adhoc_norm
# white_ht = np.fft.irfft(white_hf, n=ifo.strain_data.time_domain_strain.shape[0]) * ifo.strain_data.sampling_frequency
white_ht = np.fft.irfft(white_hf) * ifo.strain_data.sampling_frequency
return white_ht
def bandpass_time_domain_strain(ifo, strain, fband=None):
if fband is None:
fband = [ifo.minimum_frequency, ifo.maximum_frequency]
fs = ifo.strain_data.sampling_frequency
if fband[0] * 2 / fs <= 0:
fband[0] = 0.01 * fs / 2
if fband[1] * 2 / fs >= 1:
fband[1] = 0.99 * fs / 2
bb, ab = butter(4, [fband[0] * 2 / fs, fband[1] * 2 / fs], btype='bandpass')
normalization = np.sqrt((fband[1]-fband[0])/(fs/2))
strain_bp = filtfilt(bb, ab, strain) / normalization
return strain_bp
if __name__ == '__main__':
usage = """%prog [options]
To be written ..."""
parser=OptionParser(usage)
parser.add_option("--event", default='GW170817', action="store", type="string", help="""Event label part of the catalog GWTC-1""")
parser.add_option("--template_file", default=None, action="store", type="string", help="""File .ini describing the parameters where to start the chain. If left to None and --event used, a default file corresponding to the event will be used.""")
parser.add_option("--trigger_time", default=None, action="store", type="string", help="""Trigger GPS time.""")
parser.add_option("--config_file", default='../Codes/config_default.ini', action="store", type="string", help="""Configuration file path leading to the `.ini` file specifying some key options to run the HMC. The default file is '../Codes/config_default.ini'. Options set in the command line which are redundant with that of the config.ini will (should...) overwrite them.""")
# randGen = np.random.RandomState(seed=2)
# PARSE INPUT ARGUMENTS
(opts, args) = parser.parse_args()
config_parser = ConfigParser()
config_parser.read(opts.config_file)
config_dict = pu.config_parser_to_dict(config_parser)
config_dict_formatted = cut.dictionary_to_formatted_string(config_dict)
geocent_time_gwosc = bilby.gw.utils.get_event_time(opts.event)
# if opts.event == 'GW150914': geocent_time_gwosc = 1126259462.4 + 0.01611328125 # L: +0.015869140625
if opts.event == 'GW150914': geocent_time_gwosc = 1126259462.4
if opts.event == 'GW170817': geocent_time_gwosc = 1187008882.4
# H: +0.039794921875, L: +0.07666015625, V: +25.518310546875
if opts.template_file is None:
template_file = '../examples/trigger_files/' + opts.event + '.ini'
else:
template_file = opts.template_file
template_parameters = set_inj.ini_file_to_dict(template_file)
template_parameters['geocent_time'] = geocent_time_gwosc
ext_analysis_dict = set_inj.compute_extended_analysis_dict(template_parameters['mass_1'], template_parameters['mass_2'], template_parameters['geocent_time'], template_parameters['chirp_mass'], **config_dict['analysis'])
start_time = ext_analysis_dict['start_time']
duration = ext_analysis_dict['duration']
hdf5_file_path_prefix = '../.input_data/' + opts.event
if not(os.path.exists(hdf5_file_path_prefix)):
cut.logger.error(f'The folder given to look for hdf5 strain files of the event: {hdf5_file_path_prefix}, does not exist.')
sys.exit(1)
GWTC1_event_PSDs_file = f'../.input_data/{opts.event}/GWTC1_{opts.event}_PSDs.dat'
GWTC1_event_PSDs = np.loadtxt(GWTC1_event_PSDs_file)
# INITIALIZE THE THREE INTERFEROMETERS
# interferometers = bilby.gw.detector.InterferometerList(ext_analysis_dict['ifos'])
interferometers = gwdn.InterferometerList(ext_analysis_dict['ifos'])
# SET WAVEFORM GENERATOR FOR SNR COMPUTATION
frequency_domain_source_model = bilby.gw.source.lal_binary_neutron_star
parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_neutron_star_parameters
if opts.event == 'GW150914':
frequency_domain_source_model = bilby.gw.source.lal_binary_black_hole
parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_black_hole_parameters
waveform_arguments = {}
waveform_arguments['minimum_frequency'] = ext_analysis_dict['minimum_frequency']
waveform_arguments['waveform_approximant'] = ext_analysis_dict['approximant']