Skip to content
Snippets Groups Projects
Commit e46a9a20 authored by POLCHER Jan's avatar POLCHER Jan :bicyclist_tone4:
Browse files

First working version of the tool to build the diagnostic file.

parent 8882b49b
No related branches found
No related tags found
No related merge requests found
......@@ -8,7 +8,6 @@ import codecs
#
import getargs
config = getargs.SetupConfig()
import RPPtools as RPP
#
log_master, log_world = getargs.getLogger(__name__)
INFO, DEBUG, ERROR = log_master.info, log_master.debug, log_world.error
......@@ -16,6 +15,7 @@ INFO_ALL, DEBUG_ALL = log_world.info, log_world.debug
#
class Locations :
def __init__(self, bbox) :
import RPPtools as RPP
self.lid = []
self.lon = []
self.lat = []
......@@ -262,3 +262,25 @@ class Locations :
# uperrvar[:] = gupstrerr[:]
return
#
class PlacedLocations :
def __init__(self, GraphFile) :
nc = Dataset(GraphFile, 'r')
self.nbloc = nc.dimensions["locations"].size
self.gridshape = [nc.dimensions["y"].size,nc.dimensions["x"].size]
if "st_locations" in nc.variables.keys() :
locinfo=nc.variables["st_locations"][:,:]
else :
locinfo=nc.variables["locations"][:,:]
self.ids = locinfo[0,:].astype(int).tolist()
self.Fiindex = locinfo[1,:].astype(int).tolist()
self.Fjindex = locinfo[2,:].astype(int).tolist()
self.FHTUindex = locinfo[3,:].astype(int).tolist()
self.FMONindex = locinfo[4,:].astype(int).tolist()
self.lonrange = [np.min(nc.variables["lon"][:,:]), np.max(nc.variables["lon"][:,:])]
self.latrange = [np.min(nc.variables["lat"][:,:]), np.max(nc.variables["lat"][:,:])]
nc.close()
return
def mask(self, i) :
# Function to compute mask of area upstream of function with information in the GraphFile
mask=np.zeros(self.gridshape)
return mask
import numpy as np
import numpy as np
import datetime
import sys
import os
from inspect import currentframe, getframeinfo
from netCDF4 import Dataset
from netCDF4 import stringtochar, chartostring, stringtoarr
#
localdir=os.path.dirname(getframeinfo(currentframe()).filename)
sys.path.append(localdir+'/Stations')
FillValue = 1.e+20
#
# Gather user input.
# Information used to construct routing-graph.nc (i.e. run.def used by RoutingPP)
ConfigFile="run.def"
import configparser
stationsconfig = configparser.ConfigParser()
stationsconfig.read("stations.def")
GRDCFile=stationsconfig.get("OverAll","GRDCFile",fallback=None)
StationList=stationsconfig.get("OverAll","StationList",fallback=[]).split(',')
GraphFile=stationsconfig.get("OverAll","GraphFile",fallback=None)
DiagFile=stationsconfig.get("OverAll","DiagFile",fallback="StationDiag.nc")
StructureFile = stationsconfig.get("OverAll","StructuresFile",fallback=None)
# If not all the information is in stations.def get it from run.def
if not isinstance(GraphFile, str) or not isinstance(StructureFile, str) :
config = configparser.ConfigParser()
config.read(ConfigFile)
GraphFile=config.get("OverAll","GraphFile")
StructureFile = config.get("OverAll","StructuresFile",fallback=None)
#
# Check we have a list of stations to be diagnosed
#
if len(StationList) < 1 :
print("ERROR : no stations provided")
sys.exit()
print(StationList)
#
# Some extra modules so we can start to work
#
import AllStations as AS
import Locations as LO
#
# Read all the gauging stations positoned in the routing graph
#
L = LO.PlacedLocations(GraphFile)
#
# Read all the stations in the river gauge (GRDC) data base
#
A=AS.AllStations()
A.fetchnetcdf(GRDCFile, metaonly=True, region=[L.lonrange,L.latrange])
#
# Find the IDs of all the stations listed in stations.def
#
IDs=[]
for teststn in StationList :
IDs.append(A.FindID(teststn.strip()))
if IDs[-1] < 0 :
print("Station ", teststn," could not be found in the GRDC file over your regions.")
# Get additional information from the GRDC file.
Rivers=[]
Upstream=[]
for istn in IDs :
if istn >= 0 :
Rivers.append(A.RiverfromID(istn))
Upstream.append(A.UpstreamfromID(istn))
else :
Rivers.append("-")
Upstream.append(0)
#
# Find the location within the routing graph of the stations to be diagnosed.
#
Lind = []
for i in IDs :
if i >= 0 :
if i in L.ids :
Lind.append(L.ids.index(i))
else :
print("Station ", StationList[IDs.index(i)], " (ID =",i,") has not been located in the routing graph.")
Lind.append(-1)
else :
Lind.append(-1)
#
# Creat netCDF file with needed information for diagnostic of ORCHIDEE output
#
nc = Dataset(DiagFile, 'w', format='NETCDF4_CLASSIC')
nc.createDimension('x', L.gridshape[1])
nc.createDimension('y', L.gridshape[0])
nc.setncattr("RoutingGraph_File", GraphFile)
nc.setncattr("GRDCObservation_File", GRDCFile)
# Create variables only if the GRDC station has been placed on the graph
ncvar=[]
for i,loc in enumerate(Lind) :
if loc >= 0 :
vname="ST"+str(IDs[i])
print("Creating Variable : ", vname, StationList[i])
ncvar.append(nc.createVariable(vname, float, ('y','x')))
ncvar[-1].Name=StationList[i]
ncvar[-1].River=Rivers[i]
ncvar[-1].ObsUpstream=Upstream[i]
ncvar[-1].Fiindex=L.Fiindex[loc]
ncvar[-1].Fjindex=L.Fjindex[loc]
ncvar[-1].FHTUindex=L.FHTUindex[loc]
ncvar[-1].FMONindex=L.FMONindex[loc]
ncvar[-1][:,:] = L.mask(loc)
nc.close()
......@@ -40,6 +40,30 @@ class AllStations :
def LastDate(self) :
return max([stn.Time[-1] for stn in self.stations])
#
# Inquiry function
#
def FindID(self, name) :
ID = -1
for i,stn in enumerate(self.stations) :
if isinstance(stn.Name, str) :
stnname=stn.Name
else :
stnname=stn.Name.tolist()
#
if stnname.lower().find(name.lower()) >= 0 :
ID = stn.Number
return ID
def RiverfromID(self, stnid) :
allids=[i.Number for i in self.stations]
if isinstance(self.stations[allids.index(stnid)].River, str) :
river = self.stations[allids.index(stnid)].River
else :
river = self.stations[allids.index(stnid)].River.tolist()
return river
def UpstreamfromID(self, stnid) :
allids=[i.Number for i in self.stations]
return self.stations[allids.index(stnid)].Area
#
# Function to dump all stations into a NetCDF file
#
def dumpnetcdf(self, dates, filename, hist) :
......@@ -240,7 +264,7 @@ class AllStations :
#
#
#
def fetchnetcdf(self, filename) :
def fetchnetcdf(self, filename, metaonly=False, region=[[-180.0,180.0],[-90.0,90.]]) :
ncin=Dataset(filename, 'r')
#
self.nbstations = ncin.variables["number"].shape[0]
......@@ -256,10 +280,16 @@ class AllStations :
if len(tmpmerge) > 0 :
self.mergersource = tmpmerge
#
for i in range(self.nbstations) :
self.stations.append(ST.Station("netcdf", ncin, index=i))
# Get coordinates for geographical filter
lon=ncin.variables["lon"][:]
lat=ncin.variables["lat"][:]
#
for i in range(self.nbstations) :
if min(region[0]) < lon[i] < max(region[0]) and min(region[1]) < lat[i] < max(region[1]) :
self.stations.append(ST.Station("netcdf", ncin, index=i, metaonly=metaonly))
ncin.close()
# Keep the actual number of read stations
self.nbstations = len(self.stations)
#
return
#
......
......@@ -17,7 +17,7 @@ defint = -999
defstr = "XXX"
#
class Station :
def __init__(self, ftype, filename, index=0, regioncode=-999, countrycode="XX") :
def __init__(self, ftype, filename, index=0, regioncode=-999, countrycode="XX", metaonly=False) :
#
# Set default values for the station
#
......@@ -51,7 +51,9 @@ class Station :
elif ftype.find("SpainJson") >= 0 :
self.Spainjsonread(filename, countrycode, regioncode)
elif ftype.find("netcdf") >= 0 :
self.getfromnetcdf(filename, index)
self.getmetafromnetcdf(filename, index)
if not metaonly :
self.getdatafromnetcdf(filename, index)
else:
print("Not foreseen file format :", ftype)
sys.exit()
......@@ -267,8 +269,9 @@ class Station :
#
return
def getfromnetcdf(self, nc, i):
def getmetafromnetcdf(self, nc, i):
#
self.Index = i
self.Number = nc.variables["number"][i]
self.WMOregion = nc.variables["WMOreg"][i]
self.WMOsubregion = nc.variables["WMOsubreg"][i]
......@@ -281,10 +284,7 @@ class Station :
self.Area = nc.variables["area"][i]
self.Altitude = nc.variables["altitude"][i]
self.Location = np.append(np.copy(nc.variables["lon"][i]),np.copy(nc.variables["lat"][i]))
# Convert time to
self.Time = CC.gettimeaxis(nc)
#
FillOrig = nc.variables["hydrographs"]._FillValue
if "alternative_index" in nc.variables.keys() :
# Get names of alternatives
nbalt,nbstations = nc.variables["alternative_index"].shape
......@@ -303,7 +303,13 @@ class Station :
else :
self.mergersource = [nc.history]
self.Original = [nc.variables["hydrographs"][:,i]]
return
#
def getdatafromnetcdf(self, nc, i):
# Convert time to
self.Time = CC.gettimeaxis(nc)
#
FillOrig = nc.variables["hydrographs"]._FillValue
#
for d in self.Original :
d[d >= FillOrig] = np.nan
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment