Commit 0bd87daa authored by Thomas Dubos's avatar Thomas Dubos
Browse files

Polish README and scripts

parent 15c8c58d
......@@ -3,3 +3,4 @@ dynamico
XIOS
Grib/*
*~
*.log
# Multiscale Transport
Transport experiments with DYNAMICO meshes : quasi_uniform and variable-resolution
\ No newline at end of file
Transport experiments with DYNAMICO meshes : quasi_uniform and variable-resolution
## Install on camelot ##
```
cd /homedata/$USER
git clone git@gitlab.in2p3.fr:ipsl/projets/dynamico/multiscale_transport.git
cd multiscale_transport
./scripts/dynamico.sh install camelot
module load git python/3.6-anaconda50
```
## Pre-process ##
```
./scripts/netcdf.sh unpack
./scripts/netcdf.sh extract
./scripts/netcdf.sh create
./scripts/netcdf.sh merge
./scripts/netcdf.sh pressure
```
## Uniform-resolution run on camelot ##
```
cd RUN
qsub camelot.sh
```
#!/bin/bash
#PBS -N multiscale-pp
#PBS -q std
#PBS -n
#PBS -l nodes=1:ppn=1
#PBS -l walltime=06:00:00
#PBS -l mem=4gb
#PBS -l vmem=4gb
# Jobs start in the HOME directory, cd to submitted directory
cd "$PBS_O_WORKDIR"
module load git python/3.6-anaconda50
./scripts/netcdf.sh unpack
./scripts/netcdf.sh extract
./scripts/netcdf.sh create
./scripts/netcdf.sh merge
./scripts/netcdf.sh pressure
......@@ -15,10 +15,6 @@ def invert_lev(time, name, var):
for it in range(len(time)):
print("Inverting %s at time %d."%(name, time[it]) )
buf1 = var[it,:,:,:]
# for l in range((llm-1)//2):
# buf = buf3[l,:,:]
# buf3[l,:,:] = buf3[llm-l-1,:,:]
# buf3[llm-l-1,:,:] = buf
buf2 = buf1[::-1,:,:]
var[it,:,:,:] = buf2
......@@ -30,12 +26,12 @@ def pressure(time,A,B,ps, p):
p[it,iz,:,:] = A[iz]+B[iz]*ps_it[:,:]
with cdf.Dataset('merged.nc', 'a') as data:
time, lev, A, B, ps = getvars(data, 'time', 'lev', 'hyam', 'hybm', 'var134')
p, u, v, t = getvars(data, 'q', 'u', 'v', 't')
time, lev, A, B = getvars(data, 'time', 'lev', 'hyam', 'hybm')
ps, p, u, v, t = getvars(data, 'var134', 'q', 'u', 'v', 't')
time, A, B = time[:], A[:], B[:]
pressure(time, A[::-1], B[::-1], ps, p)
invert_lev(time, 'u', u)
invert_lev(time, 'v', v)
invert_lev(time, 't', t)
invert_lev(time, 'Zonal wind', u)
invert_lev(time, 'Meridional wind', v)
invert_lev(time, 'Temperature', t)
......@@ -3,10 +3,11 @@
function cmd_unpack()
{
cd $ROOT
rmdir Grib
rm -rf Grib
mkdir Grib
echo Extracting $(ls ECMWF*.tar)
cd Grib
date
tar --strip-components=5 -x -v -f ../ECMWF*.tar
cmd_unzip
}
......@@ -15,6 +16,7 @@ function cmd_unzip()
{
cd $ROOT/Grib
for ZIPFILE in *.zip ; do
date
echo "Extracting $PWD/$ZIPFILE"
unzip $ZIPFILE && rm -f $ZIPFILE
done
......@@ -30,6 +32,7 @@ function cmd_extract()
tar xf "${DATE}"/surf.$DATE.tar
tar xf "${DATE}"/uvtqz.$DATE.tar
rm -f *.*_12
date
ls -lh *.${DATE}*_*
done
}
......@@ -53,6 +56,7 @@ function cmd_create()
mkdir NetCDF
for FILE in surf uvtqz ; do
LIST=$(cmd_list $FILE)
date
echo $LIST
cdo -f nc4 copy $LIST NetCDF/$FILE.nc
done
......@@ -61,9 +65,17 @@ function cmd_create()
function cmd_merge()
{
cd $ROOT/NetCDF
date
cdo merge surf.nc uvtqz.nc merged.nc
}
function cmd_pressure()
{
cd $ROOT/NetCDF
date
python3 ../scripts/netcdf.py
}
function cmd_()
{
cat <<EOF
......@@ -72,18 +84,22 @@ $0 unpack Unpack master archive ECMWF*.tar then unzip the *.zip files it
$0 extract Unpack *.tar files unpacked with above command
$0 create From files obtained at previous step, create surf.nc and uvtqz.nc for the whole period
$0 merge Merge surf.nc and uvtqz.nc into merged.nc, and invert vertical axis to match DYNAMICO order
$0 pressure In merged.nc, compute 3D pressure field and flip vertical axis of 3D fields
Debug commands :
$0 unzip Unzip and delete *.zip files contained in master archive
$0 list surf|uvtqz List Grib files to be concatenated by $0 create
On camelot you must :
module load git python/3.6-anaconda50
EOF
}
# DAYS="01 02 03 04 05 06 07 08 09 10"
DAYS="01 02"
DAYS="01 02 03 04 05 06 07 08 09 10"
# DAYS="01 02"
TIMES="00_00 00_03 00_06 00_09 12_00 12_03 12_06 12_09"
ROOT=$(cd -P $(dirname $0)/.. ; pwd)
CMD=$1 ; shift
cmd_$CMD $*
cmd_$CMD $* | tee $CMD.log
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment