Skip to content
Snippets Groups Projects
Commit 8c9b7b72 authored by Lionel GUEZ's avatar Lionel GUEZ
Browse files

Generalize `eddy_dump.py`

In script `eddy_dump.py`, do not assume that there is a single date
index. So find ishape by iterating through values of date index and
eddy index.

Add a test of overlap in `short_tests.json`.
parent e8a607ef
No related branches found
No related tags found
No related merge requests found
......@@ -4,15 +4,27 @@ import argparse
import shapefile
from os import path
import pprint
import sys
parser = argparse.ArgumentParser()
parser.add_argument("directory", help = "containing the three shapefiles")
args = parser.parse_args()
eddy_index = input("eddy_index = ? ")
eddy_index = int(eddy_index)
ishape = eddy_index - 1
reply = input("date_index, eddy_index = ? ").split(",")
date_index = int(reply[0])
eddy_index = int(reply[1])
# Find ishape:
filename = path.join(args.directory, "extremum")
with shapefile.Reader(filename) as f:
for ishape, rec in enumerate(f.iterRecords()):
if rec["date_index"] == date_index and rec["eddy_index"] == eddy_index:
break
else:
sys.exit("Not found")
for basename in ["extremum", "outermost_contour", "max_speed_contour"]:
filename = path.join(args.directory, basename)
......
......@@ -1013,7 +1013,10 @@ extremums à une même date. Donc, dans le shapefile \verb+extremum+
écrit par un processus donné, les dates sont dans le désordre. En
outre, deux processus peuvent interpoler des extremums à une même
date. Il sera certainement utile de concaténer et trier les shapefiles
en post-traitement.
en post-traitement. Pour trier, je ne suis pas obligé de charger en
mémoire vive tous les shapefiles : je peux simplement faire la
concaténation puis lire dans extremum.dbf par exemple deux suites
d'entiers \verb+date_index+ et \verb+eddy_index+.
Un processus donné alterne lecture de shapefiles (créés par
extraction\_eddies) et écriture de shapefiles (contenant les
......
&MAIN_NML CORNER_DEG=0.125,-89.875, NLON =1440 NLAT =720,
max_delta = 3 /
......@@ -300,7 +300,7 @@
"$src_dir/Tests/Input/Extraction_eddies_region_2_noise/SHP_triplet",
"$src_dir/Tests/Input/Region_2_2006_01_02/SHP_triplet"],
"title" : "Successive_overlap_region_2",
"description": "Same as Successive_overlap_different_snapshots, but with a larger region. The identifying numbers of the connected eddies are not the same for all edges.",
"description": "Overlap of different snapshots. Same as Successive_overlap_different_snapshots, but with a larger region. The identifying numbers of the connected eddies are not the same for all edges.",
"stdin_filename": "$src_dir/Tests/Input/successive_overlap_region_2_nml.txt"
},
{
......@@ -352,7 +352,7 @@
"$src_dir/Tests/Input/Region_4_2006_01_01/SHP_triplet",
"$src_dir/Tests/Input/Region_4_2006_01_01/SHP_triplet"],
"title" : "Non_successive_overlap",
"description": "Same as Successive_overlap, except for max_delta.",
"description": "Overlap of a snapshot with itself. Same as Successive_overlap, except for max_delta.",
"stdin_filename": "$src_dir/Tests/Input/non_successive_overlap_nml.txt"
},
{
......@@ -360,7 +360,7 @@
"$src_dir/Tests/Input/Region_4_2006_01_01/SHP_triplet",
"$src_dir/Tests/Input/Region_4_2006_01_02/SHP_triplet"],
"title" : "NSO_different_snapshots",
"description": "Same as Successive_overlap_different_snapshots, except for max_delta.",
"description": "Overlap of different snapshots. Same as Successive_overlap_different_snapshots, except for max_delta.",
"stdin_filename": "$src_dir/Tests/Input/non_successive_overlap_nml.txt"
},
{
......@@ -370,5 +370,13 @@
"title" : "NSO_region_5",
"description": "Same as Successive_overlap_region_5, except for max_delta.",
"stdin_filename": "$src_dir/Tests/Input/NSO_region_5_nml.txt"
},
{
"args" : ["$src_dir/mkdir_run.sh", "$build_dir/test_overlap",
"$large_input_dir/SHP_triplet_2006_01_01",
"$large_input_dir/SHP_triplet_2006_01_02"],
"title" : "NSO_global",
"description": "Same as Successive_overlap_global, except for max_delta.",
"stdin_filename": "$src_dir/Tests/Input/NSO_global_nml.txt"
}
]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment