Skip to content
Snippets Groups Projects
Commit b55a165b authored by Lionel GUEZ's avatar Lionel GUEZ
Browse files

Add TAGS rule in `CMakeLists.txt`. Do not rely on external

`settings.mk` in GNUmakefile.

Bug fix in procedure `dispatch_snapshot`: an isolated eddy is a valid eddy.

In script `plot_snapshot.py`, in function snaphot, replace argument ax
with argument `new_figure` so the user does not have to call figure
and `axes(projection = projection)`. Add argument light to function
snapshot so we can call snapshot with light set to False for a second
snapshot. Add a test to only read `h.nc` when we need it.

In script `read_overlap.py`, print more information: number of nodes,
number of edges, number of nodes with at least one successor, number
of nodes with at least one predecessor, splitting events, merging
events.

In script `stat.py`, use the convention that shapefiles are grouped in
a directory instead of identifying a set of shapefiles by the end of
their basename.

In main program `test_successive_overlap`, we know that `delta_in` for
snapshot 1 and `delta_out` for snapshot 2 are `huge(0)` so do not test
this. Print eddy identifiers in a single line. Add the printing of
identifiers of isolated eddies.
parent b8fbe8e6
No related branches found
No related tags found
No related merge requests found
......@@ -29,7 +29,7 @@ import itertools
import cartopy.crs as ccrs
from os import path
def snaphot(directory, ax, dashed = False):
def snaphot(directory, dashed = False, light = False, new_figure = False):
"""Plots extrema, outermost contours and max-speed contours.
directory: containing the three shapefiles.
......@@ -47,6 +47,12 @@ def snaphot(directory, ax, dashed = False):
else:
m_s_iterShapes = reader_m_s.iterShapes()
if new_figure:
plt.figure()
ax = plt.axes(projection = projection)
else:
ax = plt.gca()
for shape_rec_extr, shape_outer, shape_m_s \
in zip(reader_extr, reader_outer.iterShapes(), m_s_iterShapes):
points = shape_rec_extr.shape.points[0]
......@@ -68,27 +74,27 @@ def snaphot(directory, ax, dashed = False):
transform = src_crs)
if shape_rec_extr.record.valid == 1:
if args.light:
if light:
lines[0].set_marker("+")
else:
lines[0].set_marker("o")
elif not args.light:
elif not light:
# Invalid outermost contour
lines[0].set_marker("s")
if not args.light:
if not light:
ax.annotate(str(shape_rec_extr.record.eddy_index),
projection.transform_point(points[0], points[1],
src_crs),
xytext = (3, 3), textcoords = "offset points")
if shape_outer.shapeType != shapefile.NULL and not args.light \
if shape_outer.shapeType != shapefile.NULL and not light \
or shape_rec_extr.record.valid:
points = np.array(shape_outer.points)
lines = ax.plot(points[:, 0], points[:, 1], color = color,
transform = src_crs)
if not args.light:
if not light:
if shape_rec_extr.record.valid == 0:
# Invalid outermost contour
lines[0].set_marker("s")
......@@ -112,7 +118,7 @@ def snaphot(directory, ax, dashed = False):
lines = ax.plot(points[:, 0], points[:, 1], color = color,
transform = src_crs)
if not args.light: lines[0].set_marker("o")
if not light: lines[0].set_marker("o")
if dashed: lines[0].set_linestyle("dashed")
ax.gridlines(draw_labels = True)
......@@ -138,9 +144,10 @@ if __name__ == "__main__":
parser.add_argument("directory", help = "containing the three shapefiles")
args = parser.parse_args()
with netCDF4.Dataset("h.nc") as f:
longitude = f.variables["longitude"][:]
latitude = f.variables["latitude"][:]
if args.grid or args.velocity:
with netCDF4.Dataset("h.nc") as f:
longitude = f.variables["longitude"][:]
latitude = f.variables["latitude"][:]
if args.window:
l = input("llcrnrlon, llcrnrlat, urcrnrlon, "
......@@ -150,12 +157,14 @@ if __name__ == "__main__":
if urcrnrlon - llcrnrlon > 360:
sys.exit("bad values of urcrnrlon and llcrnrlon")
longitude += np.ceil((llcrnrlon - longitude) / 360) * 360
# (in [llcrnrlon, llcrnrlon + 2 pi[)
if args.grid or args.velocity:
longitude += np.ceil((llcrnrlon - longitude) / 360) * 360
# (in [llcrnrlon, llcrnrlon + 2 pi[)
lon_mask = longitude <= urcrnrlon
lat_mask = np.logical_and(latitude >= llcrnrlat, latitude <= urcrnrlat)
else:
lon_mask = longitude <= urcrnrlon
lat_mask = np.logical_and(latitude >= llcrnrlat,
latitude <= urcrnrlat)
elif args.grid or args.velocity:
lon_mask = np.ones(len(longitude), dtype = bool)
lat_mask = np.ones(len(latitude), dtype = bool)
......@@ -170,7 +179,7 @@ if __name__ == "__main__":
ax.plot(lon_2d.reshape(-1), lat_2d.reshape(-1), transform = src_crs,
marker = "+", color = "gray", linestyle = "None")
snaphot(args.directory, ax, args.dashed)
snaphot(args.directory, args.dashed, args.light)
if args.velocity:
with netCDF4.Dataset("uv.nc") as f:
......
......@@ -18,8 +18,27 @@ with open("edgelist.csv") as f:
int(row[3]), float(row[4])
G.add_edge((k1, i1), (k2, i2), weight = weight)
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
print("Number of nodes:", len(G))
print("Number of edges:", G.number_of_edges())
n1 = 0
n2 = 0
print("Merging:")
for n, d in G.in_degree():
if d >= 1:
n2 += 1
if d >= 2: print(list(G.predecessors(n)), "->", n)
print("Splitting:")
for n, d in G.out_degree():
if d >= 1:
n1 += 1
if d >= 2: print(n, "->", list(G.successors(n)))
print("number of nodes with at least one successor =", n1)
print("number of nodes with at least one predecessor =", n2)
pos = nx.nx_agraph.graphviz_layout(G, prog = "dot")
plt.figure()
nx.draw(G, pos, with_labels=True)
plt.show()
......@@ -3,11 +3,10 @@
import argparse
import shapefile
import numpy as np
from os import path
parser = argparse.ArgumentParser()
parser.add_argument("-e", "--end_filename", help = "add character string after "
"extremum, outermost contour and max_speed_contour",
default = "")
parser.add_argument("directory", help = "containing the three shapefiles")
args = parser.parse_args()
n_valid_cycl = 0
......@@ -20,8 +19,8 @@ n_points = 0
n_missing_speed = np.zeros(3, dtype = int)
n_valid_speed = 0
with shapefile.Reader("extremum" + args.end_filename) as extremum, \
shapefile.Reader("outermost_contour" + args.end_filename) \
with shapefile.Reader(path.join(args.directory, "extremum")) as extremum, \
shapefile.Reader(path.join(args.directory, "outermost_contour")) \
as outermost_contour:
n_extr = len(extremum)
......@@ -94,7 +93,7 @@ n_max_speed = 0
n_points_valid = 0
n_points = 0
with shapefile.Reader("max_speed_contour" + args.end_filename) \
with shapefile.Reader(path.join(args.directory, "max_speed_contour")) \
as max_speed_contour:
for shape_rec in max_speed_contour:
l = len(shape_rec.shape.points)
......
......@@ -56,3 +56,4 @@ target_link_libraries(extraction_eddies ${contour_531_LIBRARY}
include(Tests/CMakeLists.txt)
include(ConfigureCompilerFlags)
include(TAGS)
......@@ -49,7 +49,7 @@
\maketitle
\tableofcontents
\section{Extraction des tourbillons}
\section{Extraction des tourbillons instantanés}
\subsection{Divers}
......@@ -1004,12 +1004,12 @@ comptant 10 caractères par entier, 13 caractères par réel, \np{5e7}
sommets et deux arcs par sommet en moyenne, est de 6 GiB.
Problème des sommets isolés. L'information est déjà présente, de façon
implicite, dans les fichiers évoqués ci-dessus. En effet, on a le
nombre de tourbillons à chaque date d'après la liste des identifiants
de tourbillons dans les shapefiles, et on a la liste des tourbillons
non isolés d'après la liste des arcs. Mais la liste des sommets isolés
n'est donc pas très facilement trouvée. On ajoute par conséquent les
fichiers de sortie suivants :
implicite, dans les shapefiles et les fichiers edgelist. En effet, on
a le nombre de tourbillons valides à chaque date d'après les
shapefiles, et on a la liste des tourbillons non isolés d'après la
liste des arcs. Mais la liste des sommets isolés n'est donc pas très
facilement trouvée. On ajoute par conséquent les fichiers de sortie
suivants :
\begin{description}
\item[number\_eddies\_\$m.csv] Une colonne indice de date, une colonne
nombre de tourbillons visibles, une colonne nombre de tourbillons
......
......@@ -31,8 +31,7 @@ sources := $(sort ${src_test_local_extrema} ${src_test_get_1_outerm} ${src_test_
lib_list = GPC_F contour_531 numer_rec_95 GPC shapelib_03 netcdf95 geometry jumble netcdff fortrangis shp fortranc nr_util
makefile_dir = .
include ${general_compiler_options_dir}/settings.mk
VPATH += ${makefile_dir}/Tests
VPATH := ${makefile_dir}
# 2. Objects and executable files
......@@ -55,6 +54,12 @@ execut = test_good_contour test_inside_4 test_get_1_outerm test_local_extrema te
# 3. Compiler-dependent part
# Default mode:
mode = debug
##include ${general_compiler_options_dir}/${FC}_${mode}.mk
# general_compiler_options_dir should be defined in the environment.
ifeq (${FC},gfortran)
# gfortran bug:
FFLAGS := $(subst invalid${comma},,${FFLAGS})
......@@ -62,6 +67,69 @@ endif
# 4. Rules
SHELL = bash
# for echo in log commands
%: %.o
@echo "Linking $@..."
$(LINK.o) $^ $(LOADLIBES) $(LDLIBS) -o $@
# (must be before the other "%:" rules)
%.o: %.f
@echo "Building $@..."
$(COMPILE.f) $(OUTPUT_OPTION) $<
%: %.f
@echo "Compiling and linking $@..."
$(LINK.f) $^ $(LOADLIBES) $(LDLIBS) -o $@
# Extend known suffixes:
# (if the suffix is not ".f" or ".F")
%.o: %.f90
@echo "Building $@..."
$(COMPILE.f) $(OUTPUT_OPTION) $<
%.o: %.F90
@echo "Building $@..."
$(COMPILE.F) $(OUTPUT_OPTION) $<
%: %.f90
@echo "Linking $@..."
$(LINK.f) $^ $(LOADLIBES) $(LDLIBS) -o $@
%: %.F90
@echo "Linking $@..."
$(LINK.F) $^ $(LOADLIBES) $(LDLIBS) -o $@
# Use the Fortran compiler for linking:
LINK.o = $(FC) $(LDFLAGS) $(TARGET_ARCH)
%.so:
$(FC) $(LDFLAGS) ${ldflags_lib_dyn} $^ -o $@
%.csv: %.ods
unoconv --format=csv --output=$@ $<
.DELETE_ON_ERROR:
.PHONY: all clean clobber depend
all: log
TAGS: ${sources}
ctags -e --language-force=fortran $^
# Or, on barren machines:
## etags --language=fortran $^
log:
hostname >$@
${FC} ${version_flag} >>$@ 2>&1
@echo -e "\nFC = ${FC}\n\nCPPFLAGS = ${CPPFLAGS}\n\nFFLAGS = ${FFLAGS}\n\nLDLIBS = ${LDLIBS}\n\nLDFLAGS = ${LDFLAGS}\n\nldflags_lib_dyn = ${ldflags_lib_dyn}" >>$@
clobber: clean
rm -f *.mod ${makefile_dir}/depend.mk TAGS
VPATH += ${makefile_dir}/Tests
all: ${execut} log
test_get_1_outerm: ${obj_test_get_1_outerm}
test_set_max_speed: ${obj_test_set_max_speed}
......
......@@ -15,7 +15,7 @@ program test_successive_overlap
implicit none
integer unit_edgelist, i, j, k
integer unit_edgelist, i, k
type(snapshot) flow(2)
TYPE(shpfileobject) hshp_extremum ! shapefile extremum
TYPE(shpfileobject) hshp_outermost ! shapefile outermost_contour
......@@ -27,7 +27,7 @@ program test_successive_overlap
real:: step_deg(2) = [0.25, 0.25], step(2)
! longitude and latitude steps, in degrees and in rad
logical periodic ! grid is periodic in longitude
integer:: nlon = - 1, nlat = - 1
......@@ -66,6 +66,7 @@ program test_successive_overlap
CALL shpclose(hshp_extremum)
CALL shpclose(hshp_outermost)
CALL shpclose(hshp_max_speed)
print *, "Snapshot 1, k = ", k
call shp_open_03("Snapshot_2/extremum", pszaccess = "rb", &
hshp = hshp_extremum)
......@@ -79,6 +80,7 @@ program test_successive_overlap
CALL shpclose(hshp_extremum)
CALL shpclose(hshp_outermost)
CALL shpclose(hshp_max_speed)
print *, "Snapshot 2, k = ", k
call new_unit(unit_edgelist)
open(unit_edgelist, file = "edgelist.csv", status = "replace", &
......@@ -95,20 +97,37 @@ program test_successive_overlap
close(unit_edgelist)
print *, 'Created file "edgelist.csv".'
do j = 1, 2
print *, "i, flow(", j, ")%list_vis%delta_in:"
print *, "Snapshot 1:"
print *, "Eddies for which delta_out == 1:"
do i = 1, flow(1)%number_vis_extr
if (flow(1)%list_vis(i)%delta_out /= huge(0)) &
write(unit = *, fmt = "(i0, 1x)", advance = "no") i
end do
do i = 1, flow(j)%number_vis_extr
if (flow(j)%list_vis(i)%delta_in /= huge(0)) &
print *, i, flow(j)%list_vis(i)%delta_in
end do
print *
print *, "Valid isolated eddies:"
do i = 1, flow(1)%number_vis_extr
if (flow(1)%list_vis(i)%valid .and. flow(1)%list_vis(i)%delta_out &
== huge(0)) write(unit = *, fmt = "(i0, 1x)", advance = "no") i
end do
print *
print *, "Snapshot 2:"
print *, "Eddies for which delta_in == 1:"
do i = 1, flow(2)%number_vis_extr
if (flow(2)%list_vis(i)%delta_in /= huge(0)) &
write(unit = *, fmt = "(i0, 1x)", advance = "no") i
end do
print *, "i, flow(", j, ")%list_vis%delta_out:"
print *
print *, "Valid isolated eddies:"
do i = 1, flow(j)%number_vis_extr
if (flow(j)%list_vis(i)%delta_out /= huge(0)) &
print *, i, flow(j)%list_vis(i)%delta_out
end do
do i = 1, flow(2)%number_vis_extr
if (flow(2)%list_vis(i)%valid .and. flow(2)%list_vis(i)%delta_in &
== huge(0)) write(unit = *, fmt = "(i0, 1x)", advance = "no") i
end do
print *
end program test_successive_overlap
......@@ -6,6 +6,9 @@ contains
function candidate_overlap(extr_map, list_vis, cyclone)
! Find the eddies in extr_map that are valid and have a given
! cyclonicity.
use derived_types, only: eddy
integer, allocatable:: candidate_overlap(:)
......
......@@ -28,8 +28,9 @@ contains
if (m == 1 .or. k >= k_begin + max_delta) then
do i = 1, s%number_vis_extr
if (s%list_vis(i)%delta_in == huge(0) .and. s%list_vis(i)%delta_out &
== huge(0)) write(unit_isolated, fmt = *) k, i
if (s%list_vis(i)%valid .and. s%list_vis(i)%delta_in == huge(0) &
.and. s%list_vis(i)%delta_out == huge(0)) &
write(unit_isolated, fmt = *) k, i
end do
write(unit_number_eddies, fmt = *) k, s%number_vis_extr, &
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment