Commit 4bdc0bc3 authored by Enrique Garcia's avatar Enrique Garcia
Browse files

Fix calibration in base structure

parent d7783846
......@@ -4,199 +4,222 @@
import tables
import argparse
import numpy as np
from ctapipe_io_lst import load_camera_geometry
# from ctapipe.instrument import CameraGeometry
class TelescopeInformation(tables.IsDescription):
"""
Class to create telescope information within r0/monitoring/telescope/information
"""
nb_pixel = tables.UInt64Col()
nb_gain = tables.UInt64Col()
nb_slice = tables.UInt64Col()
tel_index = tables.UInt64Col()
tel_id = tables.UInt64Col()
tel_type = tables.UInt64Col()
"""
Class to create telescope information within r0/monitoring/telescope/information
"""
nb_pixel = tables.UInt64Col()
nb_gain = tables.UInt64Col()
nb_slice = tables.UInt64Col()
tel_index = tables.UInt64Col()
tel_id = tables.UInt64Col()
tel_type = tables.UInt64Col()
class CameraGeometry(tables.IsDescription):
"""
Camera geometry
"""
pix_id = tables.UInt64Col()
pix_x = tables.Float32Col()
pix_y = tables.Float32Col()
pix_area = tables.Float32Col()
def update_calibration_data(hfile_out, gain_and_pedestal):
"""
Reads and copies to the output file the gain, the pedestal and the unused pixels.
It **MUST** be the same calibration file as the file to be processed by the stream.
f.eg:
- /fefs/aswg/data/real/calibration/20201008/v05/calibration.Run2833.0000.hdf5
- LST-1.1.Run2833.0000.fits.fz
param hfile_out : obj
hdf5 output file to be created
gain_and_pedestal : str
Path to the calibration file to be read and copied
"""
hfile_gain_ped = tables.open_file(gain_and_pedestal, 'r')
# We take the np.arrays to copy (Table with a single column not named !)
dc_pe = np.asarray(hfile_gain_ped.root.tel_1.calibration.col('dc_to_pe'), dtype=np.float32)[0]
ped = np.asarray(hfile_gain_ped.root.tel_1.pedestal.col('charge_std'), dtype=np.float32)[0]
unusable_pix = np.asarray(hfile_gain_ped.root.tel_1.calibration.col('unusable_pixels'), dtype=np.float32)[0]
# Create groups in output file
dc_pe_group = hfile_out.create_group('/r0/monitoring/telescope', 'gain', createparents=True)
nonused_pix_group = hfile_out.create_group('/r0/monitoring/telescope', 'unusable_pixels', createparents=True)
# Copy the np.arrays (array pyTable) into the output
hfile_out.create_array(dc_pe_group, 'tel_001', dc_pe, 'Gain of the camera.')
hfile_out.create_array(nonused_pix_group, 'tel_001', unusable_pix, 'Unused pixels.')
# Create telescope information table (we need to create a class before). Way 1.
info_group = hfile_out.create_group('/r0/monitoring/telescope', 'information', createparents=True)
table_info_group = hfile_out.create_table(info_group, 'tel_001', TelescopeInformation, 'Telescope Information')
# And then add row by row all the value. Do not forget the .append() !!
row = table_info_group.row
row['nb_pixel'] = ped.shape[1]
row['nb_gain'] = ped.shape[0]
row['nb_slice'] = 40 # TODO read configuration file whenever there would be more telescopes
row['tel_index'] = 0 # TODO read configuration file whenever there would be more telescopes
row['tel_id'] = 1 # TODO read configuration file whenever there would be more telescopes
row['tel_type'] = 0 # TODO read configuration file whenever there would be more telescopes
row.append()
# Create ped table. Way 2.
ped_group = hfile_out.create_group('/r0/monitoring/telescope', 'pedestal', createparents=True) # same place as bef
columns_dict_pedestal = {
"first_event_id": tables.UInt64Col(),
"last_event_id": tables.UInt64Col(),
"pedestal": tables.Float32Col(shape=ped.shape)
}
# magic to create a description from a dictionary
description_pedestal = type('description columns_dict_pedestal', (tables.IsDescription,), columns_dict_pedestal)
table_pedestal = hfile_out.create_table(ped_group, 'tel_001', description_pedestal,
"Table of the pedestal for high and low gain", expectedrows=1, chunkshape=1)
# and lastly, add the row as always
tab_ped_for_entry = table_pedestal.row
tab_ped_for_entry["first_event_id"] = np.uint64(0)
tab_ped_for_entry["last_event_id"] = np.uint64(1)
tab_ped_for_entry["pedestal"] = ped
tab_ped_for_entry.append()
hfile_gain_ped.close()
"""
Camera geometry
"""
pix_id = tables.UInt64Col()
pix_x = tables.Float32Col()
pix_y = tables.Float32Col()
pix_area = tables.Float32Col()
def update_calibration_data(hfile_out, gain_and_pedestal, sample_offset):
"""
Reads and copies to the output file the gain, the pedestal and the unused pixels.
It **MUST** be the same calibration file as the file to be processed by the stream.
f.eg:
- /fefs/aswg/data/real/calibration/20201008/v05/calibration.Run2833.0000.hdf5
- LST-1.1.Run2833.0000.fits.fz
param hfile_out : obj
hdf5 output file to be created
gain_and_pedestal : str
Path to the calibration file to be read and copied
sample_offset : float
Offset of the pedestal we have to use for some nonsense reason
"""
hfile_gain_ped = tables.open_file(gain_and_pedestal, 'r')
# We take the np.arrays to copy (Table with a single column not named !)
dc_pe = np.asarray(hfile_gain_ped.root.tel_1.calibration.col('dc_to_pe'), dtype=np.float32)[0]
ped = np.asarray(hfile_gain_ped.root.tel_1.calibration.col('pedestal_per_sample'), dtype=np.float32)[0]
unusable_pix = np.asarray(hfile_gain_ped.root.tel_1.calibration.col('unusable_pixels'), dtype=np.float32)[0]
ped += sample_offset
# Create groups in output file
dc_pe_group = hfile_out.create_group('/r0/monitoring/telescope', 'gain', createparents=True)
nonused_pix_group = hfile_out.create_group('/r0/monitoring/telescope', 'unusable_pixels', createparents=True)
# Copy the np.arrays (array pyTable) into the output
hfile_out.create_array(dc_pe_group, 'tel_001', dc_pe, 'Gain of the camera.')
hfile_out.create_array(nonused_pix_group, 'tel_001', unusable_pix, 'Unused pixels.')
# Create telescope information table (we need to create a class before). Way 1.
info_group = hfile_out.create_group('/r0/monitoring/telescope', 'information', createparents=True)
table_info_group = hfile_out.create_table(info_group, 'tel_001', TelescopeInformation, 'Telescope Information')
# And then add row by row all the value. Do not forget the .append() !!
row = table_info_group.row
row['nb_pixel'] = ped.shape[1]
row['nb_gain'] = ped.shape[0]
row['nb_slice'] = 40 # TODO read configuration file whenever there would be more telescopes
row['tel_index'] = 0 # TODO read configuration file whenever there would be more telescopes
row['tel_id'] = 1 # TODO read configuration file whenever there would be more telescopes
row['tel_type'] = 0 # TODO read configuration file whenever there would be more telescopes
row.append()
# Create ped table. Way 2.
ped_group = hfile_out.create_group('/r0/monitoring/telescope', 'pedestal', createparents=True) # same place as bef
columns_dict_pedestal = {
"first_event_id": tables.UInt64Col(),
"last_event_id": tables.UInt64Col(),
"pedestal": tables.Float32Col(shape=ped.shape)
}
# magic to create a description from a dictionary
description_pedestal = type('description columns_dict_pedestal', (tables.IsDescription,), columns_dict_pedestal)
table_pedestal = hfile_out.create_table(ped_group, 'tel_001', description_pedestal,
"Table of the pedestal for high and low gain", expectedrows=1, chunkshape=1)
# and lastly, add the row as always
tab_ped_for_entry = table_pedestal.row
tab_ped_for_entry["first_event_id"] = np.uint64(0)
tab_ped_for_entry["last_event_id"] = np.uint64(1)
tab_ped_for_entry["pedestal"] = ped
tab_ped_for_entry.append()
hfile_gain_ped.close()
def update_pixel_order(hfile_out, pix_order):
"""
Copies to the final h5 output file the order of pixels of the camera (injection tables; position [x] in array
corresponds to pixel Y in the camera).
"""
Copies to the final h5 output file the order of pixels of the camera (injection tables; position [x] in array
corresponds to pixel Y in the camera).
Once the modules is installed, this file will ALWAYS be at
~/miniconda3/envs/CONDA_ENV_NAME/share/HiPeRTA/LST-1.1.Run00442.0000_pixel_order_bin.npy
Once the modules is installed, this file will ALWAYS be at
~/miniconda3/envs/CONDA_ENV_NAME/share/HiPeRTA/LST-1.1.Run00442.0000_pixel_order_bin.npy
hfile_out : obj
hdf5 output file to be created
pix_order : str
Path to file that contains the injection table
hfile_out : obj
hdf5 output file to be created
pix_order : str
Path to file that contains the injection table
"""
table_pix_order = np.fromfile(pix_order, dtype=np.uint16)
"""
table_pix_order = np.fromfile(pix_order, dtype=np.uint16)
# update table, we kill it, then we ask
pix_order_group = hfile_out.root.configuration.instrument.telescope.camera.pixel_order
pix_order_table = pix_order_group.tel_001
hfile_out.remove_node(pix_order_table)
# update table, we kill it, then we ask
pix_order_group = hfile_out.root.configuration.instrument.telescope.camera.pixel_order
pix_order_table = pix_order_group.tel_001
hfile_out.remove_node(pix_order_table)
# And add the new table at same place
hfile_out.create_array(pix_order_group, 'tel_001', table_pix_order, 'Pixel order of the Camera.')
# And add the new table at same place
hfile_out.create_array(pix_order_group, 'tel_001', table_pix_order, 'Pixel order of the Camera.')
def update_camera_geometry(hfile_out):
"""
Update the camera geometry with the correct one taken from ctapipe_io_lst
:param hfile_out: obj
hdf5 output file to be created
"""
geometry = load_camera_geometry()
lstcam_table = hfile_out.root.configuration.instrument.telescope.camera.geometry_LSTCam
hfile_out.remove_node(lstcam_table)
# Remember to pass the parent group !!
new_lstcam_table = hfile_out.create_table(
hfile_out.root.configuration.instrument.telescope.camera, "geometry_LSTCam", CameraGeometry,
"LST camera geometry", expectedrows=geometry.n_pixels
)
new_lstcam_row = new_lstcam_table.row
for pix_area, pix_id, pix_x, pix_y in zip(geometry.pix_area.value, geometry.pix_id, geometry.pix_x.value,
geometry.pix_y.value):
new_lstcam_row['pix_area'] = pix_area
new_lstcam_row['pix_id'] = pix_id
new_lstcam_row['pix_x'] = pix_x
new_lstcam_row['pix_y'] = pix_y
new_lstcam_row.append()
"""
Update the camera geometry with the correct one taken from ctapipe_io_lst
:param hfile_out: obj
hdf5 output file to be created
"""
try:
from ctapipe_io_lst import load_camera_geometry
geometry = load_camera_geometry()
tabX = geometry.pix_x.value
tabY = geometry.pix_y.value
tabId = geometry.pix_id.value
tabArea = geometry.pix_area.value
nbPixel = geometry.n_pixels
except:
from hiperta_stream.dataset import get
tabX = np.fromfile(get('lst_proto_cam_pix_x.bin'), dtype=np.float32)
tabY = np.fromfile(get('lst_proto_cam_pix_y.bin'), dtype=np.float32)
tabId = np.fromfile(get('lst_proto_cam_pix_id.bin'), dtype=np.uint16)
tabArea = np.fromfile(get('lst_proto_cam_pix_area.bin'), dtype=np.float32)
nbPixel = tabArea.size
lstcam_table = hfile_out.root.configuration.instrument.telescope.camera.geometry_LSTCam
hfile_out.remove_node(lstcam_table)
# Remember to pass the parent group !!
new_lstcam_table = hfile_out.create_table(
hfile_out.root.configuration.instrument.telescope.camera, "geometry_LSTCam", CameraGeometry,
"LST camera geometry", expectedrows=nbPixel
)
new_lstcam_row = new_lstcam_table.row
for pix_area, pix_id, pix_x, pix_y in zip(tabArea, tabId, tabX, tabY):
new_lstcam_row['pix_area'] = pix_area
new_lstcam_row['pix_id'] = pix_id
new_lstcam_row['pix_x'] = pix_x
new_lstcam_row['pix_y'] = pix_y
new_lstcam_row.append()
def main():
parser = argparse.ArgumentParser(description="Create hdf5 configuration files for hiperta_stream ")
parser.add_argument('--config_file', '-c', action='store', type=str,
dest='config_file',
help='Path to the file that contains the /configuration node (gain, pedestal, pix position,'
'geometry ...). It should be stored at HiPeRTA/shared/HiPeRTA. '
'$CONDA_PREFIX/share/HiPeRTA/default_configuration.h5',
default=None
)
parser.add_argument('--gain_pedestal', '-g', action='store', type=str,
dest='gain_pedestal',
help='Gain and pedestal hdf5 files. ',
default=None
)
parser.add_argument('--pixel_order', '-p', action='store', type=str,
dest='pixel_order',
help='Path to the file that contains the pixel injection table. It should be stored at '
'HiPeRTA/shared/HiPeRTA. '
'$CONDA_PREFIX/share/HiPeRTA/LST-1.1.Run00442.0000_pixel_order_bin.npy',
default=None
)
parser.add_argument('--output_file', '-o', action='store', type=str,
dest='output_file',
help='Output filename.',
default='./base_structure_hdf5.h5'
)
args = parser.parse_args()
hout = tables.open_file(args.output_file, 'w')
hin = tables.open_file(args.config_file, 'r')
# Copy the configuration node
hout.copy_node(hin.root.configuration, newparent=hout.root, recursive=True)
hin.close()
update_calibration_data(hout, args.gain_pedestal)
update_pixel_order(hout, args.pixel_order)
update_camera_geometry(hout)
hout.close()
parser = argparse.ArgumentParser(description="Create hdf5 configuration files for hiperta_stream ")
parser.add_argument('--config_file', '-c', action='store', type=str,
dest='config_file',
help='Path to the file that contains the /configuration node (gain, pedestal, pix position,'
'geometry ...). It should be stored at HiPeRTA/shared/HiPeRTA. '
'$CONDA_PREFIX/share/HiPeRTA/default_configuration.h5',
default=None
)
parser.add_argument('--gain_pedestal', '-g', action='store', type=str,
dest='gain_pedestal',
help='Gain and pedestal hdf5 files. ',
default=None
)
parser.add_argument('--pixel_order', '-p', action='store', type=str,
dest='pixel_order',
help='Path to the file that contains the pixel injection table. It should be stored at '
'HiPeRTA/shared/HiPeRTA. '
'$CONDA_PREFIX/share/HiPeRTA/LST-1.1.Run00442.0000_pixel_order_bin.npy',
default=None
)
parser.add_argument('--output_file', '-o', action='store', type=str,
dest='output_file',
help='Output filename.',
default='./base_structure_hdf5.h5'
)
parser.add_argument('--offset', '-s', action='store', type=float,
dest='sample_offset',
help='Offset of the pedestal.',
required=False,
default=400
)
args = parser.parse_args()
hout = tables.open_file(args.output_file, 'w')
hin = tables.open_file(args.config_file, 'r')
# Copy the configuration node
hout.copy_node(hin.root.configuration, newparent=hout.root, recursive=True)
hin.close()
update_calibration_data(hout, args.gain_pedestal, args.sample_offset)
update_pixel_order(hout, args.pixel_order)
update_camera_geometry(hout)
hout.close()
if __name__ == '__main__':
main()
main()
import pkg_resources
__all__ = ['get']
def get(resource_name):
""" get the filename for a resource """
if not pkg_resources.resource_exists(__name__, resource_name):
raise FileNotFoundError("Couldn't find resource: '{}'"
.format(resource_name))
return pkg_resources.resource_filename(__name__, resource_name)
# some helper attributes
#gamma_test_file = get('gamma_test.simtel.gz')
# a larger test file, from prod3. original name was
# gamma_20deg_0deg_run7514___cta-prod3_desert-2150m-Paranal-HB9-FA_cone10.simtel.gz
#test_events_file = get('gamma_test_large.simtel.gz')
$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;$в ;
\ No newline at end of file
"""
Author : Pierre Aubert
Mail : aubertp7@gmail.com
Licence : CeCILL-C
"""
import os
import tables
import argparse
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
def plotCalibEvent(pdf, tabImage, varName, nbbin):
fig = plt.figure(figsize=(16, 10))
try:
tabValue = tabImage.col(varName)
tabValue = tabValue.ravel()
strValue = varName + ", size = " + str(tabValue.size) + ", Min : " + str(tabValue.min()) + ", max : " + str(tabValue.max()) + ", mean : " + str(tabValue.mean()) + ", std : " + str(tabValue.std())
print(strValue)
plt.hist(tabValue, bins=nbbin, alpha=1.0)
plt.xlabel(varName)
plt.title("Signal "+varName)
plt.ylabel("nb")
plt.yscale("log")
plt.grid()
pdf.savefig() # saves the current figure into a pdf page
plt.close()
except KeyError:
pass
def plotParam(pdf, tabParam, tabIsGoodEvent, varName, nbbin):
'''
Plot the parameters distributions of the event
Parameters:
pdf : pdf file to be written
tabParam : table of parameters
tabIsGoodEvent : table which determines which events were reconstructed by the analysis
varName : name of the parameters to be ploted
nbbin : number of bins of the histograms
'''
fig = plt.figure(figsize=(16, 10))
try:
tabValue = tabParam.col(varName)[tabIsGoodEvent]
strValue = varName + ", size = " + str(tabValue.size) + ", Min : " + str(tabValue.min()) + ", max : " + str(tabValue.max()) + ", mean : " + str(tabValue.mean()) + ", std : " + str(tabValue.std())
print(strValue)
plt.hist(tabValue, bins=nbbin, alpha=1.0)
plt.xlabel(varName)
plt.title("Parameter "+varName)
plt.ylabel("nb")
plt.yscale("log")
plt.grid()
pdf.savefig() # saves the current figure into a pdf page
plt.close()
except KeyError:
pass
def processAllFile(outputPlotFile, inputFile, nbBin):
'''
List of the input files
Parameters:
outputPlotFile : name of the output plot
inputFile : input file
nbBin : number of bins of the plot
'''
hfile = tables.open_file(inputFile, mode="r")
tabCalibImage = hfile.root.dl1.event.telescope.images.tel_001
tabParam = hfile.root.dl1.event.telescope.parameters.tel_001
listColName = tabParam.colnames
listColName.remove("is_good_event")
tabIsGoodEvent = tabParam.col("is_good_event") > 0
with PdfPages(outputPlotFile) as pdf:
plotCalibEvent(pdf, tabCalibImage, "image", nbBin)
for varName in listColName:
plotParam(pdf, tabParam, tabIsGoodEvent, varName, nbBin)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, dest='input_file', help="input HDF5 DL1 file", required=True)
parser.add_argument('-o', '--output', type=str, dest='output', help="output DL1 plot", default='output_dl1_plot.pdf')
parser.add_argument('-b', '--nbbin', type=int, dest='nb_bin', help="number of bins of the plot", default=100)
args = parser.parse_args()
processAllFile(args.output, args.input_file, args.nb_bin)
if __name__ == '__main__':
main()
#!/usr/bin/env python
from setuptools import find_packages, setup
entry_points = {
'console_scripts': [
'hiperta_stream_start = hiperta_stream.hiperta_stream_start:main',
'hiperta_dl1_to_dl2 = hiperta_stream.rta_dl1_to_dl2:main',
'stream_get_zfits_pix_order = hiperta_stream.get_zfits_pixel_order:main',
'stream_create_base_config = hiperta_stream.create_base_hdf5_config_structure_for_hiperta_stream:main',
'stream_log_summary = hiperta_stream.create_log_summary:main'
]}
'console_scripts': [
'hiperta_stream_start = hiperta_stream.hiperta_stream_start:main',
'hiperta_dl1_to_dl2 = hiperta_stream.rta_dl1_to_dl2:main',
'stream_get_zfits_pix_order = hiperta_stream.get_zfits_pixel_order:main',
'stream_create_base_config = hiperta_stream.create_base_hdf5_config_structure_for_hiperta_stream:main',
'stream_log_summary = hiperta_stream.create_log_summary:main',
'stream_plot_dl1 = hiperta_stream.stream_plot_dl1:main'
]}
setup(
name='hiperta_stream',
version=0.1,
description="Python packages to launch and manage the C++ HiPeRTA library.",
packages=find_packages(),
package_data={'hiperta_stream_config': ['config_hiperta_stream.yml']},
author='P. Aubert, E. Garcia',
author_email='paubert<at>lapp.in2p3.fr',
license='CeCILL-C',
url='https://gitlab.in2p3.fr/CTA-LAPP/rta/HiPeRTA_Stream',
entry_points=entry_points,
install_requires=[
'numpy',
'pyyaml',
'tables'
],
name='hiperta_stream',
version=0.1,
description="Python packages to launch and manage the C++ HiPeRTA library.",
packages=find_packages(),