Commit 01d71d16 authored by JOSSOUD Olivier's avatar JOSSOUD Olivier
Browse files

Flow Processor. Get absolute melted height.

parent 4a70ad90
Pipeline #119344 passed with stages
in 1 minute and 4 seconds
......@@ -436,26 +436,56 @@ def __compute_mm__(df: pd.DataFrame) -> pd.DataFrame:
return df
def get_continuous_melting_height(encoder_df: pd.DataFrame,
stacked_iceblocks_df: pd.DataFrame,
moveup_event_df: pd.DataFrame) -> pd.DataFrame:
def get_absolute_melted_height(encoder_df: pd.DataFrame,
stacked_iceblocks_df: pd.DataFrame,
moveup_event_df: pd.DataFrame) -> pd.DataFrame:
"""Get the total absolute melting height, for each encoder's timestep.
Parameters
----------
encoder_df: pd.DataFrame
Encoder's dataset.
stacked_iceblocks_df: pd.DataFrame
Datetime-indexed DataFrame containing the total height of ice blocks stacked. In other words, the height of all
the ice blocks stacked at the same time should be summed.
moveup_event_df: pd.DataFrame
Output of :func:`~processor.encoder.get_moveup_events` function.
Returns
-------
pd.DataFrame
Same as ``encoder_df``, with an additional ``melted_height`` column.
"""
stack_events_df = moveup_event_df[moveup_event_df["event_type"] == "stacking"]
if len(stack_events_df.index) != len(stacked_iceblocks_df):
raise ValueError("There should be the same number of stacking events as stakced heights!")
raise ValueError("There should be the same number of stacking events as stacked heights!")
# Associate the stack events to the total height of ice blocks stacked at this event
stack_events_df = stack_events_df.copy()
stack_events_df = pd.merge_asof(stack_events_df, stacked_iceblocks_df,
left_on="start_datetime", right_index=True,
direction="forward")
# Compute the height of already-stacked ice which has been melted during the stacking event, while the encoder was
# moving or at its parking position
stack_events_df["melted_while_event"] = \
stack_events_df["stacked_height"] - (stack_events_df["end_position"] - stack_events_df["start_position"])
stack_events_df["tot_stacked_height"] - (stack_events_df["end_position"] - stack_events_df["start_position"])
# Compute the average melting speed of the ice block on the melting surface, during the stacking event.
stack_events_df["avg_melting_speed"] = \
stack_events_df["melted_while_event"] / (stack_events_df["end_datetime"] - stack_events_df["start_datetime"]).dt.total_seconds()
-stack_events_df["melted_while_event"] / (stack_events_df["end_datetime"] - stack_events_df["start_datetime"]).dt.total_seconds()
encoder_df["time_diff"] = encoder_df.index.to_series().diff().dt.total_seconds()
encoder_df["ice_speed"] = encoder_df["position"].diff() / encoder_df["time_diff"]
# Compute speed for each timestep of each event
for index, stack_event in stack_events_df.iterrows():
# Compute position change for each timestep of each event
pass
pass
encoder_df.loc[stack_event["start_datetime"]:stack_event["end_datetime"], "ice_speed"] = \
stack_event["avg_melting_speed"]
encoder_df["melted_height"] = (-encoder_df["ice_speed"] * encoder_df["time_diff"]).cumsum()
return encoder_df
def get_tubing_volume_dict(filepath: str,
......
......@@ -153,4 +153,4 @@ class TestFlow(TestCase):
encoder_df = inst_reader.get_timeseries("20210507_ASUMA2016_8_14", "ENCODER_periodic")
moveup_event_df = encoder.get_moveup_events(encoder_df, (-3.0, -0.01))
flow.get_continuous_melting_height(encoder_df, stacked_iceblock_df, moveup_event_df)
flow.get_absolute_melted_height(encoder_df, stacked_iceblock_df, moveup_event_df)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment