diff --git a/reana_analysis/production_of_fits_WF.py b/reana_analysis/production_of_fits_WF.py
index d497cf04fb4f241b519c3b5f6274b092aab533b7..2f8b27759a6df57eda9bc7278ba30fa7f64c1661 100644
--- a/reana_analysis/production_of_fits_WF.py
+++ b/reana_analysis/production_of_fits_WF.py
@@ -34,13 +34,16 @@ from python_scripts.func import WriteAeff, WritePSF, WriteEdisp
 
 # create path for data dst files
 # data_path = "../../some_data/files_cta_km3net"
-data_path = "./data"
+data_path = "../data"
 # data_path = "/run/media/msmirnov/DATA2/data_files/IRF_data_create"
 
 # normal data with bdt
 filename_nu = path.join(data_path, "mcv5.1.km3_numuCC.ALL.dst.bdt.root")
 filename_nubar = path.join(data_path, "mcv5.1.km3_anumuCC.ALL.dst.bdt.root")
 
+# filename_nu = "data/IRF_data_create/mcv5.1.km3_numuCC.ALL.dst.bdt.root"
+# filename_nubar = "data/IRF_data_create/mcv5.1.km3_anumuCC.ALL.dst.bdt.root"
+
 no_bdt = False
 # Read data files using km3io
 f_nu_km3io = OfflineReader(filename_nu)
diff --git a/reana_heavy.yaml b/reana_heavy.yaml
index cec3c035bc7f08b5c3de26e101d9f409a0807f5b..5c9f83f2d6e93088ebfad921b4dfee18f4c97269 100644
--- a/reana_heavy.yaml
+++ b/reana_heavy.yaml
@@ -27,7 +27,6 @@ workflow:
      -  environment: 'gear8mike/test-repo:km3irf'
         kubernetes_memory_limit: '1000Mi'
         commands:
-          - pip install --user pandas
           - mkdir data
           - mv run/media/msmirnov/DATA2/data_files/IRF_data_create/mcv5.1.km3_anumuCC.ALL.dst.bdt.root ./data
           - mv run/media/msmirnov/DATA2/data_files/IRF_data_create/mcv5.1.km3_numuCC.ALL.dst.bdt.root ./data