Commit ade49bce authored by Marc-Antoine Drouin's avatar Marc-Antoine Drouin

Merge branch 'unordered_time' into 'develop'

Solved problem detexted during prod

See merge request !30
parents 37a20297 1c9ce32a
Pipeline #88911 passed with stage
in 23 minutes and 15 seconds
......@@ -114,9 +114,34 @@ def main(conf_station, input_file, output_file, verbose, log_file):
# launch
rs_launch = xr.open_dataset(input_file, group="launch")
# 100% SHC
rs_shc = xr.open_dataset(input_file, group="chamber_100_rh")
try:
rs_shc = xr.open_dataset(input_file, group="chamber_100_rh")
except ValueError:
rs_shc = pd.DataFrame(
columns=[
"chamber",
"ta_chamber",
"ta_ref_chamber",
"rh_chamber",
"rh_sensor_t_chamber",
]
)
# shelter
rs_shelter = xr.open_dataset(input_file, group="gc_shelter")
try:
rs_shelter = xr.open_dataset(input_file, group="gc_shelter")
except ValueError:
rs_shelter = pd.DataFrame(
columns=[
"gc_shelter",
"alt_rs_gc_shelter",
"alt_ref_sensor_gc_shelter",
"ta_gc_shelter",
"ta_ref_gc_shelter",
"rh_gc_shelter",
"rh_ref_gc_shelter",
"t_rh_sensor_gc_shelter",
]
)
# data from L1 going directly into L2 files
anc_data = {
......@@ -164,6 +189,18 @@ def main(conf_station, input_file, output_file, verbose, log_file):
"ams": rs_shelter,
}
# remove duplicated timesteps
# ---------------------------------------------------------------------------------
duplicated = rs_profile.index.duplicated()
# remove timesteps from raw data
rs_profile = rs_profile[np.logical_not(duplicated)]
logger.warning("remove %d duplicated timesteps", duplicated.sum())
# sort timesteps
# ---------------------------------------------------------------------------------
# L2 data structure
# ---------------------------------------------------------------------------------
data = pd.DataFrame(index=rs_profile.index)
......
......@@ -1348,16 +1348,18 @@ class GsdFileData:
Before converting the seconds if have to chack if seconds is smaller than the
first time.
"""
one_day = dt.timedelta(days=1)
delta_day = dt.timedelta(days=0)
time_ref = self.data.datetime.values[0]
date_as_dt = []
for second in self.data.datetime.values:
time_as_dt = dt.timedelta(seconds=int(second))
if second >= time_ref:
date_as_dt.append(self.date + time_as_dt)
else:
date_as_dt.append(self.date + one_day + time_as_dt)
if second < time_ref:
delta_day += dt.timedelta(days=1)
date_as_dt.append(self.date + delta_day + time_as_dt)
time_ref = second
self.data.datetime = date_as_dt
......
This diff is collapsed.
......@@ -102,12 +102,24 @@ EXE = MAIN_DIR / "gruan_raw_to_1a.py"
Path("v2.0.0") / "rs_0a_GRUAN_REU_M10_20200706_112021_V02.zip",
0,
),
# chaudron error L2 processing
# chaudron duplicated timesteps
(
"chaudron_l1.toml",
Path("v2.0.0") / "rs_0a_GRUAN_REU_M10_20191128_231547_V02.zip",
0,
),
# chaudron unordered timesteps
(
"chaudron_l1.toml",
Path("v2.0.0") / "rs_0a_GRUAN_REU_M10_20200320_231549_V02.zip",
0,
),
# Trappes
(
"trappes_l1.toml",
Path("v2.0.0") / "rs_0a_GRUAN_TRP_M10_20200205_231550_V02.zip",
0,
),
],
)
def test_run_l1(conf, zip_in_file, ret_code):
......
......@@ -32,6 +32,7 @@ EXE = MAIN_DIR / "gruan_1a_to_2a.py"
("chaudron_l2.toml", "rs_1a_GRUAN_REU_M10_20200901_231547_V02.nc"),
("chaudron_l2.toml", "rs_1a_GRUAN_REU_M10_20200706_112021_V02.nc"),
("chaudron_l2.toml", "rs_1a_GRUAN_REU_M10_20191128_231547_V02.nc"),
("chaudron_l2.toml", "rs_1a_GRUAN_REU_M10_20200320_231549_V02.nc"),
],
)
def test_run_l2(conf, in_file):
......
......@@ -46,6 +46,7 @@ K_TO_DEG = 273.15
("rs_2a_LchaudronIm10-gruan_v01_20191102_111549_120.nc"),
("rs_2a_GRUAN_REU_M10_20200706_112021_V02.nc"),
("rs_2a_GRUAN_REU_M10_20191128_231547_V02.nc"),
("rs_2a_GRUAN_REU_M10_20200320_231549_V02.nc"),
],
)
def test_validation_qls(in_file):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment