Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Open sidebar
JOSSOUD Olivier
CFA Tools
Commits
76526650
Commit
76526650
authored
Feb 16, 2021
by
JOSSOUD Olivier
Browse files
Flow Processor. Debubbler effect test data.
parent
7af61021
Pipeline
#104761
passed with stages
in 2 minutes and 9 seconds
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
96 additions
and
12 deletions
+96
-12
cfatools/processor/flow.py
cfatools/processor/flow.py
+94
-11
cfatools/tests/test_processor/test_flow.py
cfatools/tests/test_processor/test_flow.py
+2
-1
No files found.
cfatools/processor/flow.py
View file @
76526650
...
...
@@ -43,34 +43,118 @@ def get_flow_timeseries(dataset: DatasetReader):
def
get_debubbler_effect
(
dataset
:
DatasetReader
):
pump_df
=
get_bubble_density_from_pump
(
dataset
)
conduct_df
=
dataset
.
get_timeseries
(
"CONDUCTI_periodic"
)
conduct_df
=
conduct_df
[[
"C1"
]]
# conduct_df = conduct_df[conduct_df["C1"] > 1.35]
conduct_df
[
"datetime_C1"
]
=
conduct_df
.
index
# conduct_df = dataset.get_timeseries("CONDUCTI_periodic")
# conduct_df = conduct_df[["C1"]]
# conduct_df["datetime_C1"] = conduct_df.index
conduct_df
=
pd
.
DataFrame
({
"datetime_C1"
:
[
"2021-02-12 15:57:48.90+00:00"
,
"2021-02-12 16:01:13.83+00:00"
,
"2021-02-12 16:06:12.04+00:00"
,
"2021-02-12 16:09:40.08+00:00"
,
"2021-02-12 16:11:22.75+00:00"
,
"2021-02-12 16:15:37.19+00:00"
,
"2021-02-12 16:18:09.62+00:00"
,
"2021-02-12 16:22:12.37+00:00"
,
"2021-02-12 16:24:41.02+00:00"
,
"2021-02-12 16:30:33.91+00:00"
,
"2021-02-12 16:32:25.83+00:00"
,
"2021-02-12 16:38:04.73+00:00"
,
"2021-02-12 16:42:14.41+00:00"
,
"2021-02-12 16:45:20.80+00:00"
,
"2021-02-12 16:48:19.08+00:00"
],
"datetime_C2_reel"
:
[
"2021-02-12 15:57:57.64+00:00"
,
"2021-02-12 16:01:28.72+00:00"
,
"2021-02-12 16:06:24.72+00:00"
,
"2021-02-12 16:09:48.79+00:00"
,
"2021-02-12 16:11:30.73+00:00"
,
"2021-02-12 16:16:03.03+00:00"
,
"2021-02-12 16:18:32.37+00:00"
,
"2021-02-12 16:22:36.23+00:00"
,
"2021-02-12 16:25:02.58+00:00"
,
"2021-02-12 16:30:47.83+00:00"
,
"2021-02-12 16:32:38.51+00:00"
,
"2021-02-12 16:38:31.58+00:00"
,
"2021-02-12 16:42:38.04+00:00"
,
"2021-02-12 16:45:36.56+00:00"
,
"2021-02-12 16:48:33.43+00:00"
],
"datetime_C4_reel"
:
[
"2021-02-12 15:58:09.00+00:00"
,
"2021-02-12 16:01:45.17+00:00"
,
"2021-02-12 16:06:39.78+00:00"
,
"2021-02-12 16:09:58.38+00:00"
,
"2021-02-12 16:11:39.23+00:00"
,
"2021-02-12 16:16:28.00+00:00"
,
"2021-02-12 16:18:55.56+00:00"
,
"2021-02-12 16:22:55.71+00:00"
,
"2021-02-12 16:25:20.49+00:00"
,
"2021-02-12 16:31:02.87+00:00"
,
"2021-02-12 16:32:51.78+00:00"
,
"2021-02-12 16:39:02.38+00:00"
,
"2021-02-12 16:43:06.65+00:00"
,
"2021-02-12 16:45:55.25+00:00"
,
"2021-02-12 16:48:49.91+00:00"
],
"datetime_C5_reel"
:
[
"2021-02-12 15:58:21.479+00:00"
,
"2021-02-12 16:01:54.363+00:00"
,
"2021-02-12 16:06:46.968+00:00"
,
"2021-02-12 16:10:15.368+00:00"
,
"2021-02-12 16:11:52.210+00:00"
,
"2021-02-12 16:16:22.574+00:00"
,
"2021-02-12 16:18:49.811+00:00"
,
""
,
""
,
""
,
""
,
"2021-02-12 16:38:39.968+00:00"
,
"2021-02-12 16:42:45.176+00:00"
,
"2021-02-12 16:45:48.111+00:00"
,
"2021-02-12 16:48:42.705+00:00"
]
})
conduct_df
[
"datetime_C1"
]
=
pd
.
to_datetime
(
conduct_df
[
"datetime_C1"
])
conduct_df
[
"datetime_C2_reel"
]
=
pd
.
to_datetime
(
conduct_df
[
"datetime_C2_reel"
])
conduct_df
[
"datetime_C4_reel"
]
=
pd
.
to_datetime
(
conduct_df
[
"datetime_C4_reel"
])
conduct_df
[
"datetime_C5_reel"
]
=
pd
.
to_datetime
(
conduct_df
[
"datetime_C5_reel"
])
conduct_df
=
conduct_df
.
set_index
(
"datetime_C1"
,
drop
=
False
)
com_water_df
=
pump_df
[[
"com_water_pump"
]]
com_water_df
=
com_water_df
.
rename
(
columns
=
{
"com_water_pump"
:
"mlmin"
})
conduct_df
[
"datetime_debubbler"
]
=
get_datetime_out
(
conduct_df
[
"datetime_C1"
],
com_water_df
,
0.087
,
parallel
=
False
)
vol_debub_C5
=
0.630
vol_debub_T1
=
0.432
vol_
debub
_C
4
=
1
.18
7
vol_T1_C4
=
vol_debub_C4
-
vol_debub_T1
vol_T1_C5
=
vol_debub_C5
-
vol_debub_T1
vol_
T1
_C
2
=
0
.1
0
8
vol_T1_C4
=
0.808
vol_T1_C5
=
0.198
colpic_df
=
pump_df
[[
"pic"
,
"col"
]]
colpic_df
=
colpic_df
.
ewm
(
alpha
=
0.05
).
mean
()
colpic_df
[
"mlmin"
]
=
colpic_df
[
"pic"
]
+
colpic_df
[
"col"
]
conduct_df
[
"datetime_T1"
]
=
get_datetime_out
(
conduct_df
[
"datetime_debubbler"
],
colpic_df
,
vol_debub_T1
,
parallel
=
False
)
col_df
=
pump_df
[[
"col"
]]
col_df
=
col_df
.
rename
(
columns
=
{
"col"
:
"mlmin"
})
conduct_df
[
"datetime_C2"
]
=
get_datetime_out
(
conduct_df
[
"datetime_T1"
],
col_df
,
vol_T1_C2
,
parallel
=
False
)
conduct_df
[
"datetime_C4"
]
=
get_datetime_out
(
conduct_df
[
"datetime_T1"
],
col_df
,
vol_T1_C4
,
parallel
=
False
)
pic_df
=
pump_df
[[
"pic"
]]
pic_df
=
pic_df
.
rename
(
columns
=
{
"pic"
:
"mlmin"
})
conduct_df
[
"datetime_C5"
]
=
get_datetime_out
(
conduct_df
[
"datetime_T1"
],
pic_df
,
vol_T1_C5
,
parallel
=
False
)
conduct_df
[
"diff_C2_C1_reel"
]
=
conduct_df
[
"datetime_C2_reel"
]
-
conduct_df
[
"datetime_C1"
]
conduct_df
[
"diff_C2_C1"
]
=
conduct_df
[
"datetime_C2"
]
-
conduct_df
[
"datetime_C1"
]
conduct_df
[
"diff_diff_C2_C1"
]
=
100
*
abs
(
conduct_df
[
"diff_C2_C1"
]
-
conduct_df
[
"diff_C2_C1_reel"
])
/
conduct_df
[
"diff_C2_C1_reel"
]
conduct_df
[
"diff_C4_C1_reel"
]
=
conduct_df
[
"datetime_C4_reel"
]
-
conduct_df
[
"datetime_C1"
]
conduct_df
[
"diff_C4_C1"
]
=
conduct_df
[
"datetime_C4"
]
-
conduct_df
[
"datetime_C1"
]
conduct_df
[
"diff_diff_C4_C1"
]
=
100
*
abs
(
conduct_df
[
"diff_C4_C1"
]
-
conduct_df
[
"diff_C4_C1_reel"
])
/
conduct_df
[
"diff_C4_C1_reel"
]
conduct_df
[
"diff_C4_C2_reel"
]
=
conduct_df
[
"datetime_C4_reel"
]
-
conduct_df
[
"datetime_C2_reel"
]
conduct_df
[
"diff_C4_C2"
]
=
conduct_df
[
"datetime_C4"
]
-
conduct_df
[
"datetime_C2"
]
conduct_df
[
"diff_diff_C4_C2"
]
=
100
*
abs
(
conduct_df
[
"diff_C4_C2"
]
-
conduct_df
[
"diff_C4_C2_reel"
])
/
conduct_df
[
"diff_C4_C2_reel"
]
conduct_df
[
"diff_C5_C1_reel"
]
=
conduct_df
[
"datetime_C5_reel"
]
-
conduct_df
[
"datetime_C1"
]
conduct_df
[
"diff_C5_C1"
]
=
conduct_df
[
"datetime_C5"
]
-
conduct_df
[
"datetime_C1"
]
conduct_df
[
"diff_diff_C5_C1"
]
=
100
*
abs
(
conduct_df
[
"diff_C5_C1"
]
-
conduct_df
[
"diff_C5_C1_reel"
])
/
conduct_df
[
"diff_C5_C1_reel"
]
pass
...
...
@@ -148,7 +232,6 @@ def get_bubble_density_from_conduct(dataset: DatasetReader):
return
compump_df
def
get_datetime_out
(
datetime_in
:
pd
.
Series
,
mlmin_df
:
pd
.
DataFrame
,
tube_volume_ml
:
float
,
parallel
:
bool
=
True
)
->
pd
.
Series
:
"""Get the date/time the fluid reaches the end of the tube.
...
...
@@ -231,7 +314,7 @@ def __get_single_datetime_out__(datetime_in: pd.Timestamp, mlmin_df: pd.DataFram
# Compute the duration between the last data line and the moment when the tube's volume is reached
missing_volume
=
(
tube_volume_ml
+
initial_volume
)
-
object_mlmin_df
[
'ml_cumul'
].
iloc
[
-
1
]
last_speed
=
object_mlmin_df
[
'mlmin'
].
iloc
[
-
1
]
missing_duration_min
=
float
(
missing_volume
*
last_speed
)
missing_duration_min
=
float
(
missing_volume
/
last_speed
)
# Compute the datetime when the tube's output is reached
datetime_out
=
object_mlmin_df
.
index
[
-
1
]
+
datetime
.
timedelta
(
minutes
=
missing_duration_min
)
...
...
cfatools/tests/test_processor/test_flow.py
View file @
76526650
...
...
@@ -59,7 +59,8 @@ class TestFlow(TestCase):
icecore_df
.
set_index
(
"datetime_in"
,
inplace
=
True
)
icecore_df
[
"datetime_in"
]
=
icecore_df
.
index
icecore_df
[
"datetime_out"
]
=
flow
.
get_datetime_out
(
icecore_df
[
"datetime_in"
],
mlmin_df
,
tube_volume_ml
)
icecore_df
[
"datetime_out"
]
=
flow
.
get_datetime_out
(
icecore_df
[
"datetime_in"
],
mlmin_df
,
tube_volume_ml
,
parallel
=
False
)
self
.
assertEqual
(
icecore_df
.
iloc
[
0
][
"datetime_out"
],
pd
.
to_datetime
(
"2021-01-01 00:25:00"
))
self
.
assertEqual
(
icecore_df
.
iloc
[
1
][
"datetime_out"
],
pd
.
to_datetime
(
"2021-01-01 00:29:00"
))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment