Commit 2c6ca252 authored by JOSSOUD Olivier's avatar JOSSOUD Olivier
Browse files

QuickViz.

parent b8e1d505
Pipeline #56935 passed with stages
in 1 minute and 18 seconds
......@@ -19,6 +19,7 @@ setup(
entry_points={ # Optional
'console_scripts': [
'wimcollect=wimcollect.main:main',
'wimvizu=wimcollect.main:quickviz_antarctica'
],
},
package_data={ # Optional
......
import datetime
import configobj
import pkgutil
import os
import sys
import argparse
import wimcollect.httpddu as httpddu
import wimcollect.sshdmc as sshdmc
import wimcollect.logger as logger
import wimcollect.quickviz as quickviz
def main():
# Configuration file
pkgpath = os.path.dirname(pkgutil.get_loader("wimcollect").path)
conf_file_path = os.path.join(pkgpath, "config", "settings.ini")
if not os.path.exists(conf_file_path):
sys.stderr.write("Configuration file not found in [" + conf_file_path + "]")
return
config_parser = configobj.ConfigObj(conf_file_path)
# Logger
log = logger.Logger(config_parser)
log.write(instrument="", message="------------------------------------------------------------------")
log.write(instrument="", message=" ".join(sys.argv))
log.write(instrument='', message="------------------------------------------------------------------")
yesterday = datetime.datetime.now() - datetime.timedelta(1)
httpddu_col = httpddu.Collector(config_parser, log)
httpddu_col.download_picarro(yesterday)
sshdmc_col = sshdmc.Collector(config_parser, log)
sshdmc_col.download_picarro(yesterday)
vizual = quickviz.Visualizator(config_parser, log)
vizual.create_data_source_file("DDU", "HIDS2189", yesterday)
vizual.create_data_source_file("DMC", "HIDS2319", yesterday)
#
# # Parse CLI arguments
# parser = argparse.ArgumentParser(description='Launch a push.')
# parser.add_argument('-i', '--instruments',
# nargs='+',
# default="picarro",
# choices=["picarro", "hobo"])
# parser.add_argument('-a', '--actions',
# nargs='+',
# default=["compress", "send"],
# choices=["compress", "send"])
# parser.add_argument('-fd', '--firstday', default=None, type=format_date,
# help="First day (in ISO format YYYY-MM-DD) of the desired timeframe to be compressed.")
# parser.add_argument('-ld', '--lastday', default=None, type=format_date,
# help="Last day (in ISO format YYYY-MM-DD) of the desired timeframe to be compressed.")
# parser.add_argument('-d', '--day', default=None, type=format_date,
# help="Day (in ISO format YYYY-MM-DD) whose Picarro data whould be compressed. "
# "Note: single-day selection is not applicable for Hobo, used --firstday instead.")
# args = parser.parse_args()
#
# # Compress
# if "compress" in args.actions:
# # Picarro
# if "picarro" in args.instruments:
# pic_comp = ftpcea.Compressor(config_parser, log)
# if args.firstday is not None:
# pic_comp.compress_timeframe(first_day=args.firstday, last_day=args.lastday)
# elif args.day is not None:
# pic_comp.compress_single_day(day=args.day)
# else:
# pic_comp.compress_changed()
#
# # Hobo
# if "hobo" in args.instruments:
# hobo_comp = hobo.Compressor(config_parser, log)
# if args.firstday is not None:
# hobo_comp.compress_timeframe(first_day=args.firstday, last_day=args.lastday)
# elif args.day is not None:
# sys.stderr.write("Parameter 'day' is not useable with Hobo. Use '--firstday' instead.")
# else:
# hobo_comp.compress_changed()
#
# # Send
# if "send" in args.actions:
# sender = ftpsender.FtpSender(config_parser, log)
# sender.send_all()
def quickviz_antarctica():
# Configuration file
pkgpath = os.path.dirname(pkgutil.get_loader("wimcollect").path)
conf_file_path = os.path.join(pkgpath, "config", "settings.ini")
if not os.path.exists(conf_file_path):
sys.stderr.write("Configuration file not found in [" + conf_file_path + "]")
return
config_parser = configobj.ConfigObj(conf_file_path)
# Logger
log = logger.Logger(config_parser)
yesterday = datetime.datetime.now() - datetime.timedelta(1)
# Parse CLI arguments
parser = argparse.ArgumentParser(description='Generate data for PIcarro\' s quick calibration visualization.')
parser.add_argument('-d', '--day', default=yesterday, type=format_date,
help="Day (in ISO format YYYY-MM-DD) which should be processed.")
args = parser.parse_args()
httpddu_col = httpddu.Collector(config_parser, log)
httpddu_col.download_picarro(args.day)
sshdmc_col = sshdmc.Collector(config_parser, log)
sshdmc_col.download_picarro(args.day)
vizual = quickviz.Visualizator(config_parser, log)
vizual.create_data_source_file("DDU", "HIDS2189", args.day)
vizual.create_data_source_file("DMC", "HIDS2319", args.day)
def format_date(string: str) -> datetime.date:
return datetime.datetime.strptime(string, "%Y-%m-%d").date()
if __name__ == "__main__":
main()
import os
import datetime
import tempfile
import configobj
import pandas as pd
import wimcollect.logger as logger
import wimcollect.utils as utils
class Visualizator:
def __init__(self, config_parser: configobj.ConfigObj, log: logger):
# Logger
self.logger = log
self.object_id = "VIZU"
# Config
self.config = config_parser
self.base_dir = self.config[self.object_id]["root_dir"]
def create_data_source_file(self, site_id: str, picarro_id: str, day: datetime.date):
source_dir = os.path.join(self.config["LOCAL"]["base_dir"], site_id, "picarro", picarro_id, str(day.year))
source_filename = site_id + "_" + picarro_id + "_" + day.strftime("%Y%m%d") + ".lzma"
source_filepath = os.path.join(source_dir, source_filename)
if not os.path.exists(source_filepath):
self.logger.write(self.object_id, "Failed to open source file " + source_filepath)
return
# Decompress, and read data as Pandas' DataFrame
with tempfile.TemporaryDirectory() as tmpdirname:
dat_files = utils.extract_compressed_file(source_filepath, tmpdirname)
df = self.get_data(dat_files)
# Save calibration data file
calib_dir = os.path.join(self.base_dir, site_id)
if not os.path.exists(calib_dir):
os.makedirs(calib_dir)
calib_filename = site_id + "_" + picarro_id + "_" + day.strftime("%Y%m%d") + ".csv"
calib_filepath = os.path.join(calib_dir, calib_filename)
df.to_csv(calib_filepath, index=False)
def get_data(self, dat_files: list) -> pd.DataFrame:
dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S.%f')
day_df = pd.DataFrame()
for dat_file in dat_files:
print(dat_file)
df = pd.read_csv(dat_file,
delim_whitespace=True,
parse_dates={'datetime': ['DATE', 'TIME']},
date_parser=dateparse)
df = df[["datetime", "H2O", "Delta_18_16", "Delta_D_H", "ValveMask"]]
df = df[df["ValveMask"] != 1]
if len(day_df.index) == 0:
day_df = df
else:
day_df = day_df.append(df)
return day_df
configobj==5.0.*
paramiko==2.7.*
\ No newline at end of file
paramiko==2.7.*
pandas==0.25.*
\ No newline at end of file
......@@ -14,11 +14,9 @@ def recompress_file(zip_filepath: str):
current_directory = os.path.dirname(zip_filepath)
# Extract zip file and delete it
source_zip = zipfile.ZipFile(zip_filepath, 'r')
files_in_zip = [os.path.join(current_directory, filename) for filename in source_zip.namelist()]
source_zip.extractall(current_directory)
source_zip.close()
os.remove(zip_filepath)
files_in_zip = extract_compressed_file(compressed_filepath=zip_filepath,
dest_dir=current_directory,
delete_compressed=True)
# Compress the files
compressed_filepath = os.path.splitext(zip_filepath)[0] + ".lzma"
......@@ -33,3 +31,13 @@ def recompress_file(zip_filepath: str):
return compressed_filepath
else:
return None
def extract_compressed_file(compressed_filepath: str, dest_dir: str, delete_compressed: bool = False) -> list:
source_zip = zipfile.ZipFile(compressed_filepath, 'r')
files_in_zip = [os.path.join(dest_dir, filename) for filename in source_zip.namelist()]
source_zip.extractall(dest_dir)
source_zip.close()
if delete_compressed:
os.remove(compressed_filepath)
return files_in_zip
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment