-
Lionel GUEZ authored
Since `NR_util` has been integrated into Jumble.
Lionel GUEZ authoredSince `NR_util` has been integrated into Jumble.
test_send_recv.f90 3.87 KiB
program test_send_recv
use, intrinsic:: ISO_FORTRAN_ENV
! Libraries:
use ezmpi, only: ezmpi_bcast
use jumble, only: get_command_arg_dyn, read_column, new_unit
use mpi_f08, only: mpi_init, mpi_finalize, MPI_Comm_rank, MPI_Comm_world, &
MPI_Comm_size, mpi_abort, MPI_TAG_UB, MPI_Comm_get_attr, &
MPI_ADDRESS_KIND, MPI_io, MPI_ANY_SOURCE
use jumble, only: deg_to_rad, assert
use shapelib_03, only: dbf_read_attribute_03
use derived_types, only: snapshot, shpc
use read_snapshot_m, only: read_snapshot
use recv_snapshot_m, only: recv_snapshot
use send_snapshot_m, only: send_snapshot
use shpc_close_m, only: shpc_close
use shpc_open_m, only: shpc_open
use write_eddy_m, only: write_eddy
use write_snapshot_m, only: write_snapshot
implicit none
character(len = :), allocatable:: shpc_dir
type(snapshot) s
integer rank, n_proc, copy, n_dates, unit
logical flag
INTEGER(KIND=MPI_ADDRESS_KIND) attribute_val
real:: corner_deg(2) = [0.125, - 59.875]
! longitude and latitude of the corner of the whole grid, in degrees
real:: step_deg(2) = [0.25, 0.25] ! longitude and latitude steps, in degrees
integer:: nlon = 120, nlat = 120
integer:: dist_lim = 12
! We look for an overlapping eddy at dist_lim (in grid points) of
! the first extremum.
namelist /main_nml/ dist_lim
namelist /grid_nml/ corner_deg, step_deg, nlon, nlat
logical periodic ! grid is periodic in longitude
TYPE(shpc) hshp
!---------------------------------------------------------------------
call mpi_init
call MPI_Comm_rank(MPI_Comm_world, rank)
call MPI_Comm_size(MPI_Comm_world, n_proc)
if (n_proc /= 2) then
if (rank == 0) print *, "test_send_recv: exactly 2 processes required"
call mpi_abort(MPI_Comm_world, errorcode = 1)
end if
call get_command_arg_dyn(1, shpc_dir, "Required argument: SHPC-directory")
if (rank == 0) then
call MPI_Comm_get_attr(MPI_Comm_world, MPI_io, attribute_val, flag)
call assert(flag, "test_send_recv MPI_Comm_get_attr MPI_io")
call assert(attribute_val == MPI_ANY_SOURCE, &
"test_send_recv MPI_io MPI_ANY_SOURCE")
write(unit = error_unit, nml = main_nml)
write(unit = error_unit, fmt = *) "Enter namelist main_nml."
read(unit = *, nml = main_nml)
write(unit = *, nml = main_nml)
call new_unit(unit)
open(unit, file = shpc_dir // "/grid_nml.txt", status = "old", &
action = "read", position = "rewind")
read(unit, nml = grid_nml)
close(unit)
! As we are requiring the grid spacing to be uniform, the value of
! "periodic" may be deduced from the values of step_deg(1) and nlon:
periodic = nint(360. / step_deg(1)) == nlon
print *, "periodic = ", periodic
if (periodic) call assert(2 * dist_lim * step_deg(1) < 180., &
"test_send_recv dist_lim")
copy = merge(dist_lim, 0, periodic)
end if
call shpc_open(hshp, trim(shpc_dir), pszaccess = "rb")
if (rank == 0) then
n_dates = size(hshp%ishape_last)
end if
call ezmpi_bcast(corner_deg, root = 0)
call ezmpi_bcast(step_deg, root = 0)
call ezmpi_bcast(nlon, root = 0)
call ezmpi_bcast(nlat, root = 0)
call ezmpi_bcast(copy, root = 0)
call ezmpi_bcast(n_dates, root = 0)
if (rank == 1) call read_snapshot(s, [hshp], nlon, nlat, k = hshp%d0, &
corner = corner_deg * deg_to_rad, step = step_deg * deg_to_rad, &
copy = copy)
CALL shpc_close(hshp)
if (rank == 1) then
call send_snapshot(s, dest = 0, tag = hshp%d0)
else
! rank == 0
call MPI_Comm_get_attr(MPI_Comm_world, MPI_TAG_UB, attribute_val, flag)
call assert(flag, "test_send_recv MPI_Comm_get_attr MPI_TAG_UB")
print *, "MPI_TAG_UB = ", attribute_val
call recv_snapshot(s, nlon, nlat, copy, source = 1, tag = hshp%d0)
call write_snapshot(s, corner_deg, step_deg, nlon, nlat, copy, hshp%d0)
end if
call mpi_finalize
end program test_send_recv