Skip to content

Commit

Permalink
Merge remote-tracking branch 'gfdl/dev/gfdl' into dev/esmg
Browse files Browse the repository at this point in the history
  • Loading branch information
kshedstrom committed Mar 7, 2022
2 parents 65b2c82 + 4640461 commit b4a67c1
Show file tree
Hide file tree
Showing 25 changed files with 677 additions and 593 deletions.
31 changes: 31 additions & 0 deletions config_src/infra/FMS1/MOM_coms_infra.F90
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ module MOM_coms_infra

public :: PE_here, root_PE, num_PEs, set_rootPE, Set_PElist, Get_PElist
public :: broadcast, sum_across_PEs, min_across_PEs, max_across_PEs
public :: any_across_PEs, all_across_PEs
public :: field_chksum, MOM_infra_init, MOM_infra_end

! This module provides interfaces to the non-domain-oriented communication
Expand Down Expand Up @@ -438,6 +439,36 @@ subroutine min_across_PEs_real_1d(field, length, pelist)
call mpp_min(field, length, pelist)
end subroutine min_across_PEs_real_1d

!> Implementation of any() intrinsic across PEs
function any_across_PEs(field, pelist)
logical, intent(in) :: field !< Local PE value
integer, optional, intent(in) :: pelist(:) !< List of PEs to work with
logical :: any_across_PEs

integer :: field_flag

! FMS1 does not support logical collectives, so integer flags are used.
field_flag = 0
if (field) field_flag = 1
call max_across_PEs(field_flag, pelist)
any_across_PEs = (field_flag > 0)
end function any_across_PEs

!> Implementation of all() intrinsic across PEs
function all_across_PEs(field, pelist)
logical, intent(in) :: field !< Local PE value
integer, optional, intent(in) :: pelist(:) !< List of PEs to work with
logical :: all_across_PEs

integer :: field_flag

! FMS1 does not support logical collectives, so integer flags are used.
field_flag = 0
if (field) field_flag = 1
call min_across_PEs(field_flag, pelist)
all_across_PEs = (field_flag > 0)
end function all_across_PEs

!> Initialize the model framework, including PE communication over a designated communicator.
!! If no communicator ID is provided, the framework's default communicator is used.
subroutine MOM_infra_init(localcomm)
Expand Down
31 changes: 31 additions & 0 deletions config_src/infra/FMS2/MOM_coms_infra.F90
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ module MOM_coms_infra

public :: PE_here, root_PE, num_PEs, set_rootPE, Set_PElist, Get_PElist
public :: broadcast, sum_across_PEs, min_across_PEs, max_across_PEs
public :: any_across_PEs, all_across_PEs
public :: field_chksum, MOM_infra_init, MOM_infra_end

! This module provides interfaces to the non-domain-oriented communication
Expand Down Expand Up @@ -438,6 +439,36 @@ subroutine min_across_PEs_real_1d(field, length, pelist)
call mpp_min(field, length, pelist)
end subroutine min_across_PEs_real_1d

!> Implementation of any() intrinsic across PEs
function any_across_PEs(field, pelist)
logical, intent(in) :: field !< Local PE value
integer, optional, intent(in) :: pelist(:) !< List of PEs to work with
logical :: any_across_PEs

integer :: field_flag

! FMS1 does not support logical collectives, so integer flags are used.
field_flag = 0
if (field) field_flag = 1
call max_across_PEs(field_flag, pelist)
any_across_PEs = (field_flag > 0)
end function any_across_PEs

!> Implementation of all() intrinsic across PEs
function all_across_PEs(field, pelist)
logical, intent(in) :: field !< Local PE value
integer, optional, intent(in) :: pelist(:) !< List of PEs to work with
logical :: all_across_PEs

integer :: field_flag

! FMS1 does not support logical collectives, so integer flags are used.
field_flag = 0
if (field) field_flag = 1
call min_across_PEs(field_flag, pelist)
all_across_PEs = (field_flag > 0)
end function all_across_PEs

!> Initialize the model framework, including PE communication over a designated communicator.
!! If no communicator ID is provided, the framework's default communicator is used.
subroutine MOM_infra_init(localcomm)
Expand Down
348 changes: 239 additions & 109 deletions src/ALE/MOM_ALE.F90

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions src/core/MOM_checksum_packages.F90
Original file line number Diff line number Diff line change
Expand Up @@ -253,8 +253,8 @@ subroutine MOM_state_stats(mesg, u, v, h, Temp, Salt, G, GV, US, allowChange, pe
real, dimension(G%isc:G%iec, G%jsc:G%jec) :: &
tmp_A, & ! The area per cell [m2] (unscaled to permit reproducing sum).
tmp_V, & ! The column-integrated volume [m3] (unscaled to permit reproducing sum)
tmp_T, & ! The column-integrated temperature [degC m3]
tmp_S ! The column-integrated salinity [ppt m3]
tmp_T, & ! The column-integrated temperature [degC m3] (unscaled to permit reproducing sum)
tmp_S ! The column-integrated salinity [ppt m3] (unscaled to permit reproducing sum)
real :: Vol, dV ! The total ocean volume and its change [m3] (unscaled to permit reproducing sum).
real :: Area ! The total ocean surface area [m2] (unscaled to permit reproducing sum).
real :: h_minimum ! The minimum layer thicknesses [H ~> m or kg m-2]
Expand Down Expand Up @@ -294,6 +294,8 @@ subroutine MOM_state_stats(mesg, u, v, h, Temp, Salt, G, GV, US, allowChange, pe
T%average = T%average + dV*Temp(i,j,k)
S%minimum = min( S%minimum, Salt(i,j,k) ) ; S%maximum = max( S%maximum, Salt(i,j,k) )
S%average = S%average + dV*Salt(i,j,k)
tmp_T(i,j) = tmp_T(i,j) + dV*Temp(i,j,k)
tmp_S(i,j) = tmp_S(i,j) + dV*Salt(i,j,k)
endif
if (h_minimum > h(i,j,k)) h_minimum = h(i,j,k)
endif
Expand Down
2 changes: 1 addition & 1 deletion src/core/MOM_verticalGrid.F90
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ subroutine verticalGridInit( param_file, GV, US )
! Here NK_ is a macro, while nk is a variable.
call get_param(param_file, mdl, "NK", nk, &
"The number of model layers.", units="nondim", &
static_value=NK_)
default=NK_)
if (nk /= NK_) call MOM_error(FATAL, "verticalGridInit: " // &
"Mismatched number of layers NK_ between MOM_memory.h and param_file")

Expand Down
45 changes: 44 additions & 1 deletion src/diagnostics/MOM_spatial_means.F90
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ module MOM_spatial_means
public :: global_i_mean, global_j_mean
public :: global_area_mean, global_area_mean_u, global_area_mean_v, global_layer_mean
public :: global_area_integral
public :: global_volume_mean, global_mass_integral
public :: global_volume_mean, global_mass_integral, global_mass_int_EFP
public :: adjust_area_mean_to_zero

contains
Expand Down Expand Up @@ -234,6 +234,49 @@ function global_mass_integral(h, G, GV, var, on_PE_only, scale)

end function global_mass_integral

!> Find the global mass-weighted order invariant integral of a variable in mks units,
!! returning the value as an EFP_type. This uses reproducing sums.
function global_mass_int_EFP(h, G, GV, var, on_PE_only, scale)
type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure
type(verticalGrid_type), intent(in) :: GV !< The ocean's vertical grid structure
real, dimension(SZI_(G),SZJ_(G),SZK_(GV)), &
intent(in) :: h !< Layer thicknesses [H ~> m or kg m-2]
real, dimension(SZI_(G),SZJ_(G),SZK_(GV)), &
optional, intent(in) :: var !< The variable being integrated
logical, optional, intent(in) :: on_PE_only !< If present and true, the sum is only done
!! on the local PE, but it is still order invariant.
real, optional, intent(in) :: scale !< A rescaling factor for the variable
type(EFP_type) :: global_mass_int_EFP !< The mass-weighted integral of var (or 1) in
!! kg times the units of var

! Local variables
real, dimension(SZI_(G), SZJ_(G)) :: tmpForSum
real :: scalefac ! An overall scaling factor for the areas and variable.
integer :: i, j, k, is, ie, js, je, nz, isr, ier, jsr, jer

is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec ; nz = GV%ke
isr = is - (G%isd-1) ; ier = ie - (G%isd-1) ; jsr = js - (G%jsd-1) ; jer = je - (G%jsd-1)

scalefac = GV%H_to_kg_m2 * G%US%L_to_m**2
if (present(scale)) scalefac = scale * scalefac

tmpForSum(:,:) = 0.0
if (present(var)) then
do k=1,nz ; do j=js,je ; do i=is,ie
tmpForSum(i,j) = tmpForSum(i,j) + var(i,j,k) * &
((scalefac * h(i,j,k)) * (G%areaT(i,j) * G%mask2dT(i,j)))
enddo ; enddo ; enddo
else
do k=1,nz ; do j=js,je ; do i=is,ie
tmpForSum(i,j) = tmpForSum(i,j) + &
((scalefac * h(i,j,k)) * (G%areaT(i,j) * G%mask2dT(i,j)))
enddo ; enddo ; enddo
endif

global_mass_int_EFP = reproducing_sum_EFP(tmpForSum, isr, ier, jsr, jer, only_on_PE=on_PE_only)

end function global_mass_int_EFP


!> Determine the global mean of a field along rows of constant i, returning it
!! in a 1-d array using the local indexing. This uses reproducing sums.
Expand Down
4 changes: 0 additions & 4 deletions src/diagnostics/MOM_sum_output.F90
Original file line number Diff line number Diff line change
Expand Up @@ -733,10 +733,6 @@ subroutine write_energy(u, v, h, tv, day, n, G, GV, US, CS, tracer_CSp, dt_forci
enddo ; enddo ; enddo

call sum_across_PEs(CS%ntrunc)
! Sum the various quantities across all the processors. This sum is NOT
! guaranteed to be bitwise reproducible, even on the same decomposition.
! The sum of Tr_stocks should be reimplemented using the reproducing sums.
if (nTr_stocks > 0) call sum_across_PEs(Tr_stocks,nTr_stocks)

call max_across_PEs(max_CFL, 2)

Expand Down
2 changes: 2 additions & 0 deletions src/framework/MOM_coms.F90
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@ module MOM_coms
use MOM_coms_infra, only : PE_here, root_PE, num_PEs, set_rootPE, Set_PElist, Get_PElist
use MOM_coms_infra, only : broadcast, field_chksum, MOM_infra_init, MOM_infra_end
use MOM_coms_infra, only : sum_across_PEs, max_across_PEs, min_across_PEs
use MOM_coms_infra, only : all_across_PEs, any_across_PEs
use MOM_error_handler, only : MOM_error, MOM_mesg, FATAL, WARNING

implicit none ; private

public :: PE_here, root_PE, num_PEs, MOM_infra_init, MOM_infra_end
public :: broadcast, sum_across_PEs, min_across_PEs, max_across_PEs, field_chksum
public :: all_across_PEs, any_across_PEs
public :: set_PElist, Get_PElist, Set_rootPE
public :: reproducing_sum, reproducing_sum_EFP, EFP_sum_across_PEs, EFP_list_sum_across_PEs
public :: EFP_plus, EFP_minus, EFP_to_real, real_to_EFP, EFP_real_diff
Expand Down
8 changes: 4 additions & 4 deletions src/framework/MOM_domains.F90
Original file line number Diff line number Diff line change
Expand Up @@ -220,11 +220,11 @@ subroutine MOM_domains_init(MOM_dom, param_file, symmetric, static_memory, &
call get_param(param_file, mdl, "NIGLOBAL", n_global(1), &
"The total number of thickness grid points in the x-direction in the physical "//&
"domain. With STATIC_MEMORY_ this is set in "//trim(inc_nm)//" at compile time.", &
static_value=NIGLOBAL)
default=NIGLOBAL)
call get_param(param_file, mdl, "NJGLOBAL", n_global(2), &
"The total number of thickness grid points in the y-direction in the physical "//&
"domain. With STATIC_MEMORY_ this is set in "//trim(inc_nm)//" at compile time.", &
static_value=NJGLOBAL)
default=NJGLOBAL)
if (n_global(1) /= NIGLOBAL) call MOM_error(FATAL,"MOM_domains_init: " // &
"static mismatch for NIGLOBAL_ domain size. Header file does not match input namelist")
if (n_global(2) /= NJGLOBAL) call MOM_error(FATAL,"MOM_domains_init: " // &
Expand Down Expand Up @@ -256,11 +256,11 @@ subroutine MOM_domains_init(MOM_dom, param_file, symmetric, static_memory, &
call get_param(param_file, mdl, trim(nihalo_nm), n_halo(1), &
"The number of halo points on each side in the x-direction. How this is set "//&
"varies with the calling component and static or dynamic memory configuration.", &
default=nihalo_dflt, static_value=nihalo_dflt)
default=nihalo_dflt)
call get_param(param_file, mdl, trim(njhalo_nm), n_halo(2), &
"The number of halo points on each side in the y-direction. How this is set "//&
"varies with the calling component and static or dynamic memory configuration.", &
default=njhalo_dflt, static_value=njhalo_dflt)
default=njhalo_dflt)
if (present(min_halo)) then
n_halo(1) = max(n_halo(1), min_halo(1))
min_halo(1) = n_halo(1)
Expand Down
Loading

0 comments on commit b4a67c1

Please sign in to comment.