Commit 7a50288e authored by Clément Haëck's avatar Clément Haëck
Browse files

Replace m_* function by Logger

parent e4a0e906
......@@ -24,7 +24,6 @@ import lib
import lib.data.hi
import lib.data.hists
import lib.zones
from lib import m_start, m_end, m_progress
REDUCE = False
......@@ -33,21 +32,21 @@ HI_TARGET = 9.5
def main(args):
m_start("Getting data")
lgr = lib.Logger("Getting data")
ds = lib.data.hi.get_data(args)
m_end()
lgr.end()
if REDUCE:
print("DATASET REDUCED FOR TESTING")
ds = ds.isel(time=slice(None, None, 50))
m_start("Getting static masks")
lgr.msg("Getting static masks")
# Remove land and select standard study region.
args['grid'] = '4km_EPSG32662'
zone = lib.zones.get_data(args)['total']
land = lib.zones.get_land(args)['land_large']
ds = ds.where(zone*~land)
m_end()
lgr.end()
# If coef argument is 0, recompute normalisation coefs for V, S, B.
if args['coef'] == 0:
......@@ -77,23 +76,23 @@ def main(args):
def get_components_coef(ds):
m_start("Computing SVB coef")
lgr = lib.Logger("Computing SVB coef")
coef = {}
for variable in 'SVB':
m_progress()
lgr.progress()
data = ds[variable]
if variable == 'S':
data = np.fabs(data)
std = data.std().values
coef[variable] = 1./std
m_end()
lgr.end()
return coef
def coef_hi(ds, coef):
m_start("Getting HI coef")
lgr = lib.Logger("Getting HI coef")
coef['HI'] = 1.
ds['HI'] = lib.data.hi.apply_coef(ds, coef)
......@@ -105,7 +104,7 @@ def coef_hi(ds, coef):
hi_x = h.ppf(Q_TARGET)
coef_hi = HI_TARGET/hi_x
m_end()
lgr.end()
return {'HI': coef_hi}
......
......@@ -30,9 +30,6 @@ def main(args):
hi.attrs = {}
sst = ds['sst']
lgr.end()
print(ds)
lgr.msg("Computing window size")
sx, sy = get_scale(hi, args['scale'], grid)
......
......@@ -27,7 +27,6 @@ import lib.data.ostia
import lib.data.p_frt_mask
import lib.data.SN_separation
import lib.zones
from lib import m_start, m_next, m_end, m_progress
# Width of SST discretization step
SST_STEP = 1e-2
......@@ -49,19 +48,19 @@ INFILE_PARAMS = ['threshold', 'scale', 'number', 'coef']
def main(args):
m_start("Loading data")
lgr = lib.Logger("Loading data")
ds = get_data(args)
m_next("Smoothing SN separation temperature")
lgr.msg("Smoothing SN separation temperature")
# Smooth SN separation
ds['threshold'] = lib.data.SN_separation.smooth(ds, time_step=8)
m_next("Computing HI")
lgr.msg("Computing HI")
# Compute HI
ds['HI'] = lib.data.hi.apply_coef(ds, lib.data.hi.get_coef(args))
ds = ds.drop_vars(['S', 'V', 'B'])
m_next("Applying static masks")
lgr.msg("Applying static masks")
# Apply masks: land (enlarged), total zone, min front proba
static = ~ds.land_large * ds.total
if args['mask']:
......@@ -70,13 +69,13 @@ def main(args):
ds = ds.drop_vars(['land_large', 'total', 'p_frt'])
m_next("Computing HI masks")
lgr.msg("Computing HI masks")
# Masks
ds['mask_frt'] = ds.HI > args['threshold']
ds['mask_bkg'] = ds.HI < args['threshold']
ds = ds.drop_vars(['HI'])
m_next("Computing zones datasets")
lgr.msg("Computing zones datasets")
# Datasets for each zone
zones = dict()
if 'S' in args['zones']:
......@@ -90,7 +89,7 @@ def main(args):
up[mask] = up[mask] * op(ds.sst, ds.threshold)
zones['GS3_'+zone] = up
m_next("Setting up histogram computations")
lgr.msg("Setting up histogram computations")
hists_var = []
for var in VARS:
hists = []
......@@ -98,7 +97,7 @@ def main(args):
bins_name = 'bins_' + var
for zone, zone_ds in zones.items():
for m, mask in zip(MASKS, MASKS_VAR):
m_progress()
lgr.progress()
h = zone_ds[var].where(zone_ds[mask]).groupby('time').map(
hist_grp, shortcut=True, args=[bins, bins_name]
)
......@@ -114,7 +113,7 @@ def main(args):
hist.attrs['VAR'] = var
hists_var.append(hist)
m_next("Merging results")
lgr.msg("Merging results")
hist = xr.merge(hists_var)
hist.attrs['VARS'] = VARS
hist.attrs.pop('VAR')
......@@ -127,12 +126,12 @@ def main(args):
'_FillValue': 2**30-1}
for v in VARS}
m_next("Executing computations / Writing to disk")
lgr.msg("Executing computations / Writing to disk")
ofile = lib.data.hists.get_filename(args)
lib.check_output_dir(ofile, file=True)
lib.setup_metadata(hist, args)
hist.to_netcdf(ofile, encoding=encoding)
m_end()
lgr.end()
return hist
......
......@@ -29,7 +29,6 @@ import lib.data.ostia
import lib.data.p_frt_mask
import lib.data.SN_separation
import lib.zones
from lib import m_start, m_end, m_next
CHL_NBINS = 500
HI_STEP = 0.5
......@@ -46,24 +45,24 @@ INFILE_PARAMS = ['scale', 'number', 'coef']
def main(args):
m_start("Loading data")
lgr = lib.Logger("Loading data")
ds = get_data(args)
m_next("Smoothing SN separation temperature")
lgr.msg("Smoothing SN separation temperature")
# Smooth SN separation
ds['threshold'] = lib.data.SN_separation.smooth(ds, time_step=8)
m_next("Computing HI")
lgr.msg("Computing HI")
# Compute HI
ds['HI'] = lib.data.hi.apply_coef(ds, lib.data.hi.get_coef(args))
ds = ds.drop_vars(['S', 'V', 'B'])
m_next("Applying static masks")
lgr.msg("Applying static masks")
# Apply masks: land (enlarged), total zone, min front proba
ds['HI'] = ds.HI.where(~ds.land_large * ds.total * ds.p_frt)
ds = ds.drop_vars(['land_large', 'total', 'p_frt'])
m_next("Computing zones datasets")
lgr.msg("Computing zones datasets")
# Datasets for each zone
zones = dict()
if 'S' in args['zones']:
......@@ -76,7 +75,7 @@ def main(args):
up['HI'] = up.HI.where(op(ds.sst, ds.threshold))
zones['GS3_'+zone] = up
m_next("Setting up histogram computations")
lgr.msg("Setting up histogram computations")
bins_names = ['bins_' + v for v in VARS]
bins = [get_bins(v) for v in VARS]
......@@ -88,7 +87,7 @@ def main(args):
h = h.expand_dims(zone=[zone])
hists.append(h)
m_next("Merging results")
lgr.msg("Merging results")
hist = xr.combine_by_coords(hists)
for v, name, bins in zip(VARS, bins_names, bins):
......@@ -105,14 +104,14 @@ def main(args):
# so approx 1000*1000 = 1e6 values. Uint32 stores up to 2**32~4e9.
encoding = {'hist': {'dtype': 'uint32', '_FillValue': 2**30-1}}
m_next("Executing computations / Writing to disk")
lgr.msg("Executing computations / Writing to disk")
# dask.visualize(hist.isel(time=0),
# filename=path.join(lib.root_data, 'graph.pdf'))
ofile = lib.data.hists.get_filename(args)
lib.check_output_dir(ofile, file=True)
lib.setup_metadata(hist, args)
hist.to_netcdf(ofile, encoding=encoding)
m_end()
lgr.msg()
return hist
......
......@@ -27,7 +27,6 @@ import lib.data.ostia
import lib.data.p_frt_mask
import lib.data.SN_separation
import lib.zones
from lib import m_start, m_end, m_next, m_progress
# Width of SST bins
SST_STEP = 1e-2
......@@ -49,19 +48,19 @@ INFILE_PARAMS = ['thr_lo', 'thr_hi', 'scale', 'number', 'coef']
def main(args):
m_start("Loading data")
lgr = lib.Logger("Loading data")
ds = get_data(args)
m_next("Smoothing SN separation temperature")
lgr.msg("Smoothing SN separation temperature")
# Smooth SN separation
ds['threshold'] = lib.data.SN_separation.smooth(ds, time_step=8)
m_next("Computing HI")
lgr.msg("Computing HI")
# Compute HI
ds['HI'] = lib.data.hi.apply_coef(ds, lib.data.hi.get_coef(args))
ds = ds.drop_vars(['S', 'V', 'B'])
m_next("Applying static masks")
lgr.msg("Applying static masks")
# Apply masks: land (enlarged), total zone, min front proba
static = ~ds.land_large * ds.total
if args['mask']:
......@@ -69,14 +68,14 @@ def main(args):
ds['HI'] = ds.HI.where(static)
ds = ds.drop_vars(['land_large', 'total', 'p_frt'])
m_next("Computing HI masks")
lgr.msg("Computing HI masks")
# Masks
ds['mask_low'] = ds.HI < args['thr_lo']
ds['mask_hi'] = ds.HI > args['thr_hi']
ds['mask_mid'] = (ds.HI > args['thr_lo']) * (ds.HI < args['thr_hi'])
ds = ds.drop_vars(['HI'])
m_next("Computing zones datasets")
lgr.msg("Computing zones datasets")
# Datasets for each zone
zones = dict()
if 'S' in args['zones']:
......@@ -90,7 +89,7 @@ def main(args):
up[mask] = up[mask] * op(ds.sst, ds.threshold)
zones['GS3_'+zone] = up
m_next("Setting up histogram computations")
lgr.msg("Setting up histogram computations")
hists_var = []
for var in VARS:
hists = []
......@@ -98,7 +97,7 @@ def main(args):
bins_name = lh.var_name('bins', var)
for zone, zone_ds in zones.items():
for m, mask in zip(MASKS, MASKS_VAR):
m_progress()
lgr.progress()
h = zone_ds[var].where(zone_ds[mask]).groupby('time').map(
hist_grp, shortcut=True, args=[bins, bins_name]
)
......@@ -114,7 +113,7 @@ def main(args):
hist.attrs['VAR'] = var
hists_var.append(hist)
m_next("Merging results")
lgr.msg("Merging results")
hist = xr.merge(hists_var)
hist.attrs['VARS'] = VARS
hist.attrs.pop('VAR')
......@@ -126,12 +125,12 @@ def main(args):
encoding = {v: {'dtype': 'uint32', '_FillValue': 2**30-1}
for v in ['hist_' + v for v in VARS]}
m_next("Executing computations / Writing to disk")
lgr.msg("Executing computations / Writing to disk")
ofile = lib.data.hists.get_filename(args)
lib.check_output_dir(ofile, file=True)
lib.setup_metadata(hist, args)
hist.to_netcdf(ofile, encoding=encoding)
m_end()
lgr.end()
return hist
......
......@@ -10,7 +10,6 @@ import lib.data.ostia
import lib.data.p_frt_mask
import lib.data.SN_separation
import lib.zones
from lib import m_start, m_end, m_next
# Number of Chl bins
CHL_NBINS = 500
......@@ -24,19 +23,19 @@ INFILE_PARAMS = ['threshold', 'scale', 'number', 'coef']
def main(args):
m_start("Loading data")
lgr = lib.Logger("Loading data")
ds = get_data(args)
m_next("Smoothing SN separation temperature")
lgr.msg("Smoothing SN separation temperature")
# Smooth SN separation
ds['threshold'] = lib.data.SN_separation.smooth(ds, time_step=8)
m_next("Computing HI")
lgr.msg("Computing HI")
# Compute HI
ds['HI'] = lib.data.hi.apply_coef(ds, lib.data.hi.get_coef(args))
ds = ds.drop_vars(['S', 'V', 'B'])
m_next("Applying static masks")
lgr.msg("Applying static masks")
# Apply masks: land (enlarged), total zone, min front proba
static = ~ds.land_large * ds.total
if args['mask']:
......@@ -45,25 +44,24 @@ def main(args):
ds = ds.drop_vars(['land_large', 'total', 'p_frt'])
m_next("Computing HI masks")
lgr.msg("Computing HI masks")
# Masks
ds['mask_frt'] = ds.HI > args['threshold']
ds['mask_bkg'] = ds.HI < args['threshold']
ds = ds.drop_vars(['HI'])
m_next("Selecting South zone")
lgr.msg("Selecting South zone")
ds = ds.sel(lat=slice(32, None))
m_end()
print("Setting up histogram computations", end='', flush=True)
var = 'CHL'
bins_name = 'bins_' + var
bins = dh.axis.Regular(CHL_NBINS, *CHL_RANGE,
transform=dh.axis.transform.log)
hists = []
lgr.msg("Setting up histogram computations")
for m, mask in zip(MASKS, MASKS_VAR):
print('.', end='', flush=True)
lgr.progress()
h = ds[var].where(ds[mask]).groupby('time').map(
hist_grp, shortcut=True, args=[bins, bins_name]
)
......@@ -88,13 +86,13 @@ def main(args):
encoding = {v: {'dtype': 'uint32', '_FillValue': 2**30-1}
for v in ['hist_' + v for v in VARS]}
m_next("Executing computations / Writing to disk")
lgr.msg("Executing computations / Writing to disk")
hist = hist.load()
ofile = lib.data.hists.get_filename(args)
lib.check_output_dir(ofile, file=True)
lib.setup_metadata(hist, args)
hist.to_netcdf(ofile, encoding=encoding)
m_end()
lgr.end()
return hist
......
......@@ -216,26 +216,6 @@ def progressbar(it, prefix="", size=60, file=sys.stdout):
file.flush()
def m_start(msg):
print(msg + '...', end='', flush=True)
def m_end(msg='done'):
print(msg + '.', flush=True)
def m_next(msg_start, msg_end=None):
if msg_end is None:
m_end()
else:
m_end(msg_end)
m_start(msg_start)
def m_progress(char="."):
print(char, end='', flush=True)
def fix_time_daily(*datasets):
out = []
for ds in datasets:
......@@ -256,6 +236,10 @@ class Logger:
def _msg(msg: str):
print(msg, end='', flush=True)
@classmethod
def progress(cls, char: str = '.'):
cls._msg(char)
def msg(self, msg: str = None, msg_end: str = None):
if self.in_progress:
self.end(msg_end)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment