Skip to content
Snippets Groups Projects
Commit e140acab authored by Mikhail Smirnov's avatar Mikhail Smirnov :dart:
Browse files

Merge branch 'another-dev' into 'master'

Add CICD for test

See merge request !7
parents e0a033e3 6c3b0f51
No related branches found
No related tags found
1 merge request!7Add CICD for test
Pipeline #219738 passed
......@@ -8,3 +8,8 @@ __pycache__
/notebooks/test_edisp.fits
/notebooks/IRF_combined.fits
/results
/.vscode
/reana_analysis/test_aeff.fits
/reana_analysis/test_psf.fits
/reana_analysis/test_edisp.fits
/reana_analysis/IRF_combined.fits
\ No newline at end of file
image: python:latest
# Change pip's cache directory to be inside the project directory since we can
# only cache local items.
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
# Pip's cache doesn't store the python packages
# https://pip.pypa.io/en/stable/reference/pip_install/#caching
cache:
paths:
- .cache/pip
stages:
- test
.virtualenv_template: &virtualenv_definition |
python -V
python3 -m venv venv
source venv/bin/activate
pip install -U pip setuptools wheel setuptools_scm
pip install pytest
test-python3.8:
stage: test
image: python:3.8-slim
script:
- *virtualenv_definition
- pytest ./tests/test_general.py
test-python3.9:
stage: test
image: python:3.9-slim
script:
- *virtualenv_definition
- pytest ./tests/test_general.py
# before_script:
# - python -V # Print out python version for debugging
# - pip install virtualenv
# - virtualenv venv
# - source venv/bin/activate
# test-python3.8:
# script:
# - pip install pytest
# - pytest ./tests/test_general.py
# - pip install flake8
# - flake8 .
......@@ -1077,7 +1077,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
......@@ -1091,7 +1091,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
"version": "3.9.9"
},
"rucio_attachments": [
{
......
......@@ -7,6 +7,7 @@ import astropy.coordinates as ac
from astropy.time import Time
import astropy.units as u
def calc_theta(table, mc=True):
"""
Calculate the zenith angle theta of events given the direction coordinates x,y,z
......@@ -69,7 +70,15 @@ def edisp_3D(e_bins, m_bins, t_bins, dataset, weights=1):
energy_bins = pd.cut(dataset.energy_mc, e_bins, labels=False).to_numpy()
migra_bins = pd.cut(dataset.migra, m_bins, labels=False).to_numpy()
edisp = fill_edisp_3D(e_bins, m_bins, t_bins, energy_bins, migra_bins, theta_bins, weights)
edisp = fill_edisp_3D(
e_bins,
m_bins,
t_bins,
energy_bins,
migra_bins,
theta_bins,
weights
)
return edisp
......
# script for REANA most of from jupyter notebook
import numpy as np
# import awkward as ak
import pandas as pd
from km3io import OfflineReader
# import matplotlib.pyplot as plt
# from matplotlib.colors import LogNorm
# from astropy.io import fits
# import astropy.units as u
# from gammapy.irf import EnergyDispersion2D
from scipy.stats import binned_statistic
from scipy.ndimage import gaussian_filter1d, gaussian_filter
import uproot as ur
# from collections import defaultdict
from os import path
from km3irf import utils
import sys
from datetime import datetime
start_time = datetime.now()
sys.path.append("../")
from python_scripts.irf_utils import aeff_2D, psf_3D, edisp_3D
from python_scripts.func import get_cut_mask
from python_scripts.func import WriteAeff, WritePSF, WriteEdisp
# create path for data dst files
# data_path = "../../some_data/files_cta_km3net"
data_path = "../data"
# data_path = "/run/media/msmirnov/DATA2/data_files/IRF_data_create"
# normal data with bdt
filename_nu = path.join(data_path, "mcv5.1.km3_numuCC.ALL.dst.bdt.root")
filename_nubar = path.join(data_path, "mcv5.1.km3_anumuCC.ALL.dst.bdt.root")
# filename_nu = "data/IRF_data_create/mcv5.1.km3_numuCC.ALL.dst.bdt.root"
# filename_nubar = "data/IRF_data_create/mcv5.1.km3_anumuCC.ALL.dst.bdt.root"
no_bdt = False
# Read data files using km3io
f_nu_km3io = OfflineReader(filename_nu)
f_nubar_km3io = OfflineReader(filename_nubar)
# Read data files using uproot
f_nu_uproot = ur.open(filename_nu)
f_nubar_uproot = ur.open(filename_nubar)
# Access data arrays
data_km3io = dict()
for l, f in zip(["nu", "nubar"], [f_nu_km3io, f_nubar_km3io]):
data_km3io[l] = dict()
data_km3io[l]["E"] = f.tracks.E[:, 0]
data_km3io[l]["dir_x"] = f.tracks.dir_x[:, 0]
data_km3io[l]["dir_y"] = f.tracks.dir_y[:, 0]
data_km3io[l]["dir_z"] = f.tracks.dir_z[:, 0]
data_km3io[l]["energy_mc"] = f.mc_tracks.E[:, 0]
data_km3io[l]["dir_x_mc"] = f.mc_tracks.dir_x[:, 0]
data_km3io[l]["dir_y_mc"] = f.mc_tracks.dir_y[:, 0]
data_km3io[l]["dir_z_mc"] = f.mc_tracks.dir_z[:, 0]
data_km3io[l]["weight_w2"] = f.w[:, 1]
# extracting bdt information
if not no_bdt:
for l, f in zip(["nu", "nubar"], [f_nu_uproot, f_nubar_uproot]):
T = f["T;1"]
bdt = T["bdt"].array()
data_km3io[l]["bdt0"] = bdt[:, 0]
data_km3io[l]["bdt1"] = bdt[:, 1]
# awk_nu = ak.Array(data_km3io['nu'])
# awk_nubar = ak.Array(data_km3io['nubar'])
# create Data Frames
df_nu = pd.DataFrame(data_km3io["nu"])
df_nubar = pd.DataFrame(data_km3io["nubar"])
# Cuts defenition
# bdt0: to determine groups to which BDT cut should be applied (upgoing/horizontal/downgoing).
# bdt1: BDT score in the range [-1, 1]. Closer to 1 means more signal-like.
# dir_z: is the reconstructed z-direction of the event
# Apply the cuts
# nu
mask_nu = get_cut_mask(df_nu.bdt0, df_nu.bdt1, df_nu.dir_z)
df_nu_cut = df_nu[mask_nu].copy()
# nubar
mask_nubar = get_cut_mask(df_nubar.bdt0, df_nubar.bdt1, df_nubar.dir_z)
df_nubar_cut = df_nubar[mask_nubar].copy()
# calculate the normalized weight factor for each event
weight_factor = -2.5 # Spectral index to re-weight to
# usually alpha is the same for nu and nubar in MC simullations
alpha_value = f_nu_km3io.header.spectrum.alpha
weights = dict()
for l, df in zip(["nu", "nubar"], [df_nu_cut, df_nubar_cut]):
weights[l] = (df.energy_mc ** (weight_factor - alpha_value)).to_numpy()
weights[l] *= len(df) / weights[l].sum()
# Create DataFrames with neutrinos and anti-neutrinos
df_nu_all = pd.concat([df_nu, df_nubar], ignore_index=True)
df_nu_all_cut = pd.concat([df_nu_cut, df_nubar_cut], ignore_index=True)
# Also create a concatenated array for the weights
weights_all = np.concatenate([weights["nu"], weights["nubar"]])
# Define bins for Aeff
cos_bins_fine = np.linspace(1, -1, 13)
t_bins_fine = np.arccos(cos_bins_fine)
e_bins_fine = np.logspace(2, 8, 49)
# Bin centers
e_binc_fine = np.sqrt(e_bins_fine[:-1] * e_bins_fine[1:])
t_binc_fine = np.arccos(0.5 * (cos_bins_fine[:-1] + cos_bins_fine[1:]))
# Fill histograms for effective area with cuts
aeff_all_cut = (
aeff_2D(
e_bins=e_bins_fine,
t_bins=t_bins_fine,
dataset=df_nu_all_cut,
gamma=(-weight_factor),
nevents=f_nu_km3io.header.genvol.numberOfEvents
+ f_nubar_km3io.header.genvol.numberOfEvents,
)
* 2
) # two building blocks for ARCA
# Wtite aeff_2D to fits files
new_aeff_file = WriteAeff(
e_binc_fine, e_bins_fine, t_binc_fine, t_bins_fine, aeff_T=aeff_all_cut
)
new_aeff_file.to_fits(file_name="test_aeff.fits")
finish_time = datetime.now()
print("Duration: {}".format(finish_time - start_time))
---
version: 0.8.1
inputs:
# directories:
# - data
files:
# - data/aeff.fits
- /run/media/msmirnov/DATA2/data_files/IRF_data_create/mcv5.1.km3_anumuCC.ALL.dst.bdt.root
- /run/media/msmirnov/DATA2/data_files/IRF_data_create/mcv5.1.km3_numuCC.ALL.dst.bdt.root
- reana_analysis/production_of_fits_WF.py
- python_scripts/__init__.py
- python_scripts/func.py
- python_scripts/irf_utils.py
- python_scripts/plot_utils.py
parameters:
main: production_of_fits_WF.py
path: reana_analysis
data_nu: mcv5.1.km3_numuCC.ALL.dst.bdt.root
data_anu: mcv5.1.km3_anumuCC.ALL.dst.bdt.root
# data: data/aeff.fits
# plot: results/plot.png
workflow:
type: serial
specification:
steps:
# - environment: 'gitlab-registry.in2p3.fr/escape2020/virtual-environment/docker-images/km3net-irfs:latest'
- environment: 'gear8mike/test-repo:km3irf'
kubernetes_memory_limit: '1000Mi'
commands:
- mkdir data
- mv run/media/msmirnov/DATA2/data_files/IRF_data_create/mcv5.1.km3_anumuCC.ALL.dst.bdt.root ./data
- mv run/media/msmirnov/DATA2/data_files/IRF_data_create/mcv5.1.km3_numuCC.ALL.dst.bdt.root ./data
# - mv "${data_nu}" ./data && mv "${data_anu}" ./data
- cd "${path}" && python "${main}"
# - cd reana_analysis && python plot_from_fits.py
# --data "${data}"
# --plot "${plot}"
outputs:
files:
- results/test_aeff.fits
---
version: 0.8.1
inputs:
files:
- reana_analysis/production_of_fits_WF.py
- python_scripts/__init__.py
- python_scripts/func.py
- python_scripts/irf_utils.py
- python_scripts/plot_utils.py
parameters:
main: production_of_fits_WF.py
path: reana_analysis
workflow:
type: serial
specification:
steps:
- name: fetchdata
voms_proxy: true
rucio: true
environment: 'projectescape/rucio-client'
commands:
- rucio get "KM3NET_ECAP_MS:mcv5.1.km3_anumuCC.ALL.dst.bdt.root"
- rucio get <KM3NET_ECAP_MS:mcv5.1.km3_numuCC.ALL.dst.bdt.root>
- name: fitdata
environment: <gear8mike/test-repo:km3irf>
kubernetes_memory_limit: '1000Mi'
commands:
- mkdir data
- mv mcv5.1.km3_anumuCC.ALL.dst.bdt.root ./data
- mv mcv5.1.km3_numuCC.ALL.dst.bdt.root ./data
- cd "${path}" && python "${main}"
outputs:
files:
- results/test_aeff.fits
#! /bin/bash
reana-client secrets-add --file ~/cert/client.key --file ~/cert/client.crt --env VOMSPROXY_PASS=cGFzc3dvcmQ= --env VONAME=escape --env RUCIO_USERNAME=msmirnov --overwrite
reana-client create -w aeff_build -f reana_heavy_DL.yaml
export REANA_WORKON=aeff_build
reana-client upload
reana-client start
reana-client status
reana-client ls
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
class TestGeneral(unittest.TestCase):
def setUp(self):
print("Test has started!")
def test_word_lenght(self):
self.assertEqual(len("word"), 4)
def test_simple(self):
self.assertEqual(3 + 5, 8)
if __name__ == "__main__":
unittest.main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment