diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 8b6ddf3e51f5..4fb88c2dc67b 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -11,3 +11,5 @@ updates:
reviewers:
- "mahf708"
- "bartgol"
+ labels:
+ - "AT: Integrate Without Testing"
diff --git a/.github/workflows/e3sm-gh-ci-cime-tests.yml b/.github/workflows/e3sm-gh-ci-cime-tests.yml
index ff9263f9fc7a..d73e96c6a1d1 100644
--- a/.github/workflows/e3sm-gh-ci-cime-tests.yml
+++ b/.github/workflows/e3sm-gh-ci-cime-tests.yml
@@ -9,6 +9,7 @@ on:
jobs:
ci:
+ if: ${{ github.event.repository.name == 'e3sm' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
diff --git a/.github/workflows/e3sm-gh-pages.yml b/.github/workflows/e3sm-gh-pages.yml
index 9c637536ae5e..ccca0c479f26 100644
--- a/.github/workflows/e3sm-gh-pages.yml
+++ b/.github/workflows/e3sm-gh-pages.yml
@@ -15,7 +15,7 @@ concurrency:
jobs:
Build-and-Deploy-docs:
- if: ${{ github.event.repository.name != 'scream' }}
+ if: ${{ github.event.repository.name == 'e3sm' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
diff --git a/.github/workflows/eamxx_default_files.yml b/.github/workflows/eamxx_default_files.yml
new file mode 100644
index 000000000000..5ecdf6dec00c
--- /dev/null
+++ b/.github/workflows/eamxx_default_files.yml
@@ -0,0 +1,62 @@
+name: inputdata
+
+on:
+ push:
+ branches: [ master ]
+ pull_request:
+ branches: [ master ]
+ schedule:
+ - cron: '00 00 * * *'
+ workflow_dispatch:
+
+jobs:
+ scream-defaults:
+ runs-on: ubuntu-latest
+ outputs:
+ event_name: ${{ github.event_name }}
+ steps:
+ - name: Check out the repository
+ uses: actions/checkout@v4
+ with:
+ show-progress: false
+ submodules: false
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5.1.0
+ with:
+ python-version: "3.11"
+ - name: Run unit tests
+ working-directory: components/eamxx/cime_config/
+ run: |
+ python -m unittest tests/eamxx_default_files.py -v
+
+ notify-scream-defaults:
+ needs: scream-defaults
+ if: ${{ failure() && needs.scream-defaults.outputs.event_name != 'pull_request' }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Create issue
+ run: |
+ previous_issue_number=$(gh issue list \
+ --label "$LABELS" \
+ --json number \
+ --jq '.[0].number')
+ if [[ -n $previous_issue_number ]]; then
+ gh issue comment "$previous_issue_number" \
+ --body "$BODY"
+ else
+ gh issue create \
+ --title "$TITLE" \
+ --assignee "$ASSIGNEES" \
+ --label "$LABELS" \
+ --body "$BODY"
+ fi
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GH_REPO: ${{ github.repository }}
+ TITLE: Inputdata server file missing
+ ASSIGNEES: mahf708,bartgol
+ LABELS: bug,input file,notify-file-gh-action
+ BODY: |
+ Workflow failed! There's likely a missing file specified in the configs! For more information, please see:
+ - Workflow URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} (number ${{ github.run_number }}, attempt ${{ github.run_attempt }})
+ - Workflow SHA: ${{ github.sha }}
diff --git a/cime_config/config_grids.xml b/cime_config/config_grids.xml
index 5b1a247deedc..57e20d33b746 100755
--- a/cime_config/config_grids.xml
+++ b/cime_config/config_grids.xml
@@ -4480,6 +4480,11 @@
+
+
+
+
+
diff --git a/cime_config/machines/config_machines.xml b/cime_config/machines/config_machines.xml
index 14f2d9121eb8..7e713799018a 100644
--- a/cime_config/machines/config_machines.xml
+++ b/cime_config/machines/config_machines.xml
@@ -2965,10 +2965,10 @@
spectrum-mpi
cbronze
/usr/workspace/$USER/e3sm_scratch
- /usr/gdata/climdat/ccsm3data/inputdata
- /usr/gdata/climdat/ccsm3data/inputdata/atm/datm7
+ /usr/gdata/e3sm/ccsm3data/inputdata
+ /usr/gdata/e3sm/ccsm3data/inputdata/atm/datm7
/usr/workspace/$USER/archive/$CASE
- /usr/gdata/climdat/baselines/$COMPILER
+ /usr/gdata/e3sm/baselines/$COMPILER
16
lsf
donahue5 -at- llnl.gov
@@ -3022,9 +3022,9 @@
y
- /usr/gdata/climdat/netcdf/bin:$ENV{PATH}
- /usr/gdata/climdat/netcdf/lib:$ENV{LD_LIBRARY_PATH}
- /usr/gdata/climdat/netcdf
+ /usr/gdata/e3sm/netcdf/bin:$ENV{PATH}
+ /usr/gdata/e3sm/netcdf/lib:$ENV{LD_LIBRARY_PATH}
+ /usr/gdata/e3sm/netcdf
2
20
2
@@ -3041,11 +3041,11 @@
mpich
cbronze
/p/lustre2/$USER/e3sm_scratch/ruby
- /usr/gdata/climdat/ccsm3data/inputdata
- /usr/gdata/climdat/ccsm3data/inputdata/atm/datm7
+ /usr/gdata/e3sm/ccsm3data/inputdata
+ /usr/gdata/e3sm/ccsm3data/inputdata/atm/datm7
/p/lustre2/$USER/archive/$CASE
/p/lustre2/$USER/ccsm_baselines/$COMPILER
- /usr/gdata/climdat/tools/cprnc
+ /usr/gdata/e3sm/tools/cprnc
8
lc_slurm
donahue5 -at- llnl.gov
@@ -3073,7 +3073,7 @@
intel-classic/2021.6.0-magic
mvapich2/2.3.7
cmake/3.19.2
- /usr/gdata/climdat/install/quartz/modulefiles
+ /usr/gdata/e3sm/install/quartz/modulefiles
hdf5/1.12.2
netcdf-c/4.9.0
netcdf-fortran/4.6.0
@@ -3084,7 +3084,7 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
- /usr/gdata/climdat/install/quartz/netcdf-fortran/
+ /usr/gdata/e3sm/install/quartz/netcdf-fortran/
/usr/tce/packages/parallel-netcdf/parallel-netcdf-1.12.3-mvapich2-2.3.7-intel-classic-2021.6.0
@@ -3096,11 +3096,11 @@
mpich
cbronze
/p/lustre2/$USER/e3sm_scratch/quartz
- /usr/gdata/climdat/ccsm3data/inputdata
- /usr/gdata/climdat/ccsm3data/inputdata/atm/datm7
+ /usr/gdata/e3sm/ccsm3data/inputdata
+ /usr/gdata/e3sm/ccsm3data/inputdata/atm/datm7
/p/lustre2/$USER/archive/$CASE
/p/lustre2/$USER/ccsm_baselines/$COMPILER
- /usr/gdata/climdat/tools/cprnc
+ /usr/gdata/e3sm/tools/cprnc
8
lc_slurm
donahue5 -at- llnl.gov
@@ -3128,7 +3128,7 @@
intel-classic/2021.6.0-magic
mvapich2/2.3.7
cmake/3.19.2
- /usr/gdata/climdat/install/quartz/modulefiles
+ /usr/gdata/e3sm/install/quartz/modulefiles
hdf5/1.12.2
netcdf-c/4.9.0
netcdf-fortran/4.6.0
@@ -3139,7 +3139,7 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
- /usr/gdata/climdat/install/quartz/netcdf-fortran/
+ /usr/gdata/e3sm/install/quartz/netcdf-fortran/
/usr/tce/packages/parallel-netcdf/parallel-netcdf-1.12.3-mvapich2-2.3.7-intel-classic-2021.6.0
diff --git a/cime_config/tests.py b/cime_config/tests.py
index ab5fa2ce9493..3223ae10804d 100644
--- a/cime_config/tests.py
+++ b/cime_config/tests.py
@@ -610,12 +610,20 @@
"e3sm_scream_v1_lowres" : {
"time" : "01:00:00",
+ "inherit" : ("e3sm_scream_mam4xx_v1_lowres"),
"tests" : (
- "ERP_D_Lh4.ne4_ne4.F2010-SCREAMv1",
- "ERS_Ln9.ne4_ne4.F2000-SCREAMv1-AQP1",
- "SMS_D_Ln9.ne4_ne4.F2010-SCREAMv1-noAero",
- "ERP_Ln22.ne4pg2_ne4pg2.F2010-SCREAMv1",
- "ERS_D_Ln22.ne4pg2_ne4pg2.F2010-SCREAMv1.scream-rad_frequency_2",
+ "ERP_D_Lh4.ne4_ne4.F2010-SCREAMv1.scream-output-preset-1",
+ "ERS_Ln9.ne4_ne4.F2000-SCREAMv1-AQP1.scream-output-preset-2",
+ "SMS_D_Ln9.ne4_ne4.F2010-SCREAMv1-noAero.scream-output-preset-3",
+ "ERP_Ln22.ne4pg2_ne4pg2.F2010-SCREAMv1.scream-output-preset-4",
+ "ERS_D_Ln22.ne4pg2_ne4pg2.F2010-SCREAMv1.scream-rad_frequency_2--scream-output-preset-5",
+ )
+ },
+
+ "e3sm_scream_v1_dp-eamxx" : {
+ "time" : "01:00:00",
+ "tests" : (
+ "ERS_P16_Ln22.ne30_ne30.F2010-SCREAMv1-DP-DYCOMSrf01", # 225 phys cols, roughly size of ne2
)
},
@@ -623,27 +631,26 @@
# should be fast, so we limit it to low res and add some thread tests
# specifically for mappy.
"e3sm_scream_v1_at" : {
- "inherit" : ("e3sm_scream_v1_lowres"),
- "tests" : ("PET_Ln9_P32x2.ne4pg2_ne4pg2.F2010-SCREAMv1")
+ "inherit" : ("e3sm_scream_v1_lowres", "e3sm_scream_v1_dp-eamxx"),
+ "tests" : ("PET_Ln9_P32x2.ne4pg2_ne4pg2.F2010-SCREAMv1.scream-output-preset-1")
},
"e3sm_scream_v1_medres" : {
"time" : "02:00:00",
"tests" : (
# "SMS_D_Ln2.ne30_ne30.F2000-SCREAMv1-AQP1", # Uncomment once IC file for ne30 is ready
- "ERS_Ln22.ne30_ne30.F2010-SCREAMv1.scream-internal_diagnostics_level",
- "PEM_Ln90.ne30pg2_ne30pg2.F2010-SCREAMv1",
- "ERS_Ln90.ne30pg2_ne30pg2.F2010-SCREAMv1.scream-small_kernels",
- "ERP_Ln22.conusx4v1pg2_r05_oECv3.F2010-SCREAMv1-noAero.scream-bfbhash",
+ "ERS_Ln22.ne30_ne30.F2010-SCREAMv1.scream-internal_diagnostics_level--scream-output-preset-3",
+ "PEM_Ln90.ne30pg2_ne30pg2.F2010-SCREAMv1.scream-spa_remap--scream-output-preset-4",
+ "ERS_Ln90.ne30pg2_ne30pg2.F2010-SCREAMv1.scream-small_kernels--scream-output-preset-5",
+ "ERP_Ln22.conusx4v1pg2_r05_oECv3.F2010-SCREAMv1-noAero.scream-bfbhash--scream-output-preset-6",
)
},
+ # Used to track performance
"e3sm_scream_v1_hires" : {
- "time" : "03:00:00",
+ "time" : "01:00:00",
"tests" : (
- "SMS_D_Ln12.ne120_r0125_oRRS18to6v3.F2010-SCREAMv1",
- "SMS_Ln12.ne120_ne120.F2010-SCREAMv1",
-# "SMS_Ln12.ne120_r0125_oRRS18to6v3.F2000-SCREAMv1-AQP1", add when aquap 120 inputs available
+ "SMS_Ln300.ne30pg2_ne30pg2.F2010-SCREAMv1.scream-perf_test--scream-output-preset-1"
)
},
@@ -655,7 +662,7 @@
# Disable the two 111422-commented tests b/c they fail on pm-gpu and
# we're not using MPASSI right now.
#111422 "ERP_Ln22.ne4pg2_oQU480.F2010-SCREAMv1-MPASSI.atmlndactive-rtm_off",
- "ERS_D_Ln22.ne4pg2_oQU480.F2010-SCREAMv1-MPASSI.atmlndactive-rtm_off",
+ "ERS_D_Ln22.ne4pg2_oQU480.F2010-SCREAMv1-MPASSI.atmlndactive-rtm_off--scream-output-preset-1",
# "ERS_Ln22.ne30_oECv3.F2010-SCREAMv1-MPASSI.atmlndactive-rtm_off",
#111422 "PEM_Ln90.ne30pg2_EC30to60E2r2.F2010-SCREAMv1-MPASSI",
# "ERS_Ln22.ne30pg2_EC30to60E2r2.F2010-SCREAMv1-MPASSI.atmlndactive-rtm_off",
@@ -679,6 +686,14 @@
)
},
+ "e3sm_scream_mam4xx_v1_lowres" : {
+ "time" : "01:00:00",
+ "tests" : (
+ "SMS_D_Ln5.ne4pg2_oQU480.F2010-SCREAMv1-MPASSI.scream-mam4xx-optics",
+ )
+ },
+
+
"e3sm_gpuacc" : {
"tests" : (
"SMS_Ld1.T62_oEC60to30v3.CMPASO-NYF",
@@ -1000,4 +1015,3 @@
"e3sm_superbfb_atm", "e3sm_superbfb_wcycl"),
},
}
-
diff --git a/components/eam/src/dynamics/se/stepon.F90 b/components/eam/src/dynamics/se/stepon.F90
index 831fd6d87603..38a376935ee6 100644
--- a/components/eam/src/dynamics/se/stepon.F90
+++ b/components/eam/src/dynamics/se/stepon.F90
@@ -585,8 +585,8 @@ subroutine stepon_run3(dtime, cam_out, phys_state, dyn_in, dyn_out)
if (dp_crm) then
do ie=1,nelemd
- out_gridx(:,:) = dyn_in%elem(ie)%spherep(:,:)%lat
- out_gridy(:,:) = dyn_in%elem(ie)%spherep(:,:)%lon
+ out_gridx(:,:) = dyn_in%elem(ie)%spherep(:,:)%lon
+ out_gridy(:,:) = dyn_in%elem(ie)%spherep(:,:)%lat
call outfld('crm_grid_x', out_gridx, npsq, ie)
call outfld('crm_grid_y', out_gridy, npsq, ie)
enddo
diff --git a/components/eam/src/physics/p3/scream/micro_p3.F90 b/components/eam/src/physics/p3/scream/micro_p3.F90
index e59bcc08239c..f345e5abcf70 100644
--- a/components/eam/src/physics/p3/scream/micro_p3.F90
+++ b/components/eam/src/physics/p3/scream/micro_p3.F90
@@ -1846,7 +1846,8 @@ subroutine get_rain_dsd2(qr,nr,mu_r,lamr,cdistr,logn0r)
real(rtype), intent(out) :: lamr,mu_r,cdistr,logn0r
!local variables:
- real(rtype) :: inv_dum,lammax,lammin
+ real(rtype) :: lammax,lammin
+ real(rtype) :: mass_to_d3_factor
!--------------------------------------------------------------------------
@@ -1858,25 +1859,25 @@ subroutine get_rain_dsd2(qr,nr,mu_r,lamr,cdistr,logn0r)
! find spot in lookup table
! (scaled N/q for lookup table parameter space_
nr = max(nr,nsmall)
- inv_dum = bfb_cbrt(qr/(cons1*nr*6._rtype))
! Apply constant mu_r: Recall the switch to v4 tables means constant mu_r
mu_r = mu_r_constant
- lamr = bfb_cbrt(cons1*nr*(mu_r+3._rtype)*(mu_r+2._rtype)*(mu_r+1._rtype)/(qr)) ! recalculate slope based on mu_r
+ mass_to_d3_factor = cons1*(mu_r+3._rtype)*(mu_r+2._rtype)*(mu_r+1._rtype)
+ lamr = bfb_cbrt(mass_to_d3_factor*nr/qr) ! recalculate slope based on mu_r
lammax = (mu_r+1._rtype)*1.e+5_rtype ! check for slope
lammin = (mu_r+1._rtype)*500._rtype !500=1/(2mm) is inverse of max allowed number-weighted mean raindrop diameter
! apply lambda limiters for rain
if (lamr.lt.lammin) then
lamr = lammin
- nr = bfb_exp(3._rtype*bfb_log(lamr)+bfb_log(qr)+bfb_log(bfb_gamma(mu_r+1._rtype))-bfb_log(bfb_gamma(mu_r+4._rtype)))/(cons1)
+ nr = lamr * lamr * lamr * qr / mass_to_d3_factor
elseif (lamr.gt.lammax) then
lamr = lammax
- nr = bfb_exp(3._rtype*bfb_log(lamr)+bfb_log(qr)+bfb_log(bfb_gamma(mu_r+1._rtype))-bfb_log(bfb_gamma(mu_r+4._rtype)))/(cons1)
+ nr = lamr * lamr * lamr * qr / mass_to_d3_factor
endif
cdistr = nr/bfb_gamma(mu_r+1._rtype)
- logn0r = bfb_log10(nr)+(mu_r+1._rtype)*bfb_log10(lamr)-bfb_log10(bfb_gamma(mu_r+1._rtype)) !note: logn0r is calculated as log10(n0r)
+ logn0r = bfb_log10(cdistr)+(mu_r+1._rtype)*bfb_log10(lamr) !note: logn0r is calculated as log10(n0r)
else
diff --git a/components/eamxx/CMakeLists.txt b/components/eamxx/CMakeLists.txt
index e2aae3e97b32..453b2d4be8a3 100644
--- a/components/eamxx/CMakeLists.txt
+++ b/components/eamxx/CMakeLists.txt
@@ -1,7 +1,18 @@
+####################################################################
+# Basic setup: version, cime debug, cmake paths,... #
+####################################################################
if (NOT DEFINED PROJECT_NAME)
- cmake_minimum_required(VERSION 3.3)
+ cmake_minimum_required(VERSION 3.14)
cmake_policy(SET CMP0057 NEW)
set(SCREAM_CIME_BUILD FALSE)
+
+ # Print the sha of the last commit (useful to double check which version was tested on CDash)
+ execute_process (COMMAND git rev-parse HEAD
+ WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
+ OUTPUT_VARIABLE LAST_GIT_COMMIT_SHA
+ OUTPUT_STRIP_TRAILING_WHITESPACE)
+ set(EAMXX_GIT_VERSION ${LAST_GIT_COMMIT_SHA} CACHE STRING "The sha of the last git commit." FORCE)
+ message(STATUS "The sha of the last commit is ${EAMXX_GIT_VERSION}")
else()
set(SCREAM_CIME_BUILD TRUE)
endif()
@@ -20,6 +31,13 @@ if ($ENV{SCREAM_FORCE_CONFIG_FAIL})
message(FATAL_ERROR "Failed, as instructed by environment")
endif()
+string(TOLOWER "${CMAKE_BUILD_TYPE}" CMAKE_BUILD_TYPE_ci)
+if (CMAKE_BUILD_TYPE_ci STREQUAL "debug")
+ set (SCREAM_DEBUG TRUE)
+else ()
+ set (SCREAM_DEBUG FALSE)
+endif()
+
# Add the ./cmake folder to cmake path. Also add EKAT's cmake folder
set (EKAT_CMAKE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/../../externals/ekat/cmake)
list(APPEND CMAKE_MODULE_PATH
@@ -33,6 +51,26 @@ if (SCREAM_CIME_BUILD)
${CMAKE_CURRENT_SOURCE_DIR}/cmake/cime)
endif ()
+# We want to use C++17 in EAMxx
+set(CMAKE_CXX_STANDARD 17)
+
+if (NOT SCREAM_CIME_BUILD)
+ project(SCREAM CXX C Fortran)
+
+ if (SCREAM_CORI_HACK)
+ list(APPEND CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "ifcore")
+ list(REMOVE_ITEM CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "ifport")
+ endif()
+
+else()
+ # Ensure our languages are all enabled
+ enable_language(C CXX Fortran)
+endif()
+
+####################################################################
+# Kokkos/YAKL-related settings #
+####################################################################
+
if (Kokkos_ENABLE_CUDA)
if (Kokkos_ENABLE_CUDA_RELOCATABLE_DEVICE_CODE)
if (Kokkos_ENABLE_DEBUG_BOUNDS_CHECK)
@@ -56,68 +94,6 @@ endif()
# to be on. For now, simply ensure Kokkos Serial is enabled
option (Kokkos_ENABLE_SERIAL "" ON)
-# We want to use C++17 in EAMxx
-set(CMAKE_CXX_STANDARD 17)
-
-if (NOT SCREAM_CIME_BUILD)
- project(SCREAM CXX C Fortran)
-
- if (SCREAM_CORI_HACK)
- list(APPEND CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "ifcore")
- list(REMOVE_ITEM CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "ifport")
- endif()
-
-else()
- # Ensure our languages are all enabled
- enable_language(C CXX Fortran)
-endif()
-
-# Print the sha of the last commit (useful to double check which version was tested on CDash)
-execute_process (COMMAND git rev-parse HEAD
- WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
- OUTPUT_VARIABLE LAST_GIT_COMMIT_SHA
- OUTPUT_STRIP_TRAILING_WHITESPACE)
-set(EAMXX_GIT_VERSION ${LAST_GIT_COMMIT_SHA} CACHE STRING "The sha of the last git commit.")
-message(STATUS "The sha of the last commit is ${EAMXX_GIT_VERSION}")
-
-set(SCREAM_DOUBLE_PRECISION TRUE CACHE BOOL "Set to double precision (default True)")
-
-# Set the scream base and src directory, to be used across subfolders
-set(SCREAM_BASE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
-set(SCREAM_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src)
-set(SCREAM_BIN_DIR ${CMAKE_CURRENT_BINARY_DIR})
-
-# Shortcut function, to print a variable
-function (print_var var)
- message ("${var}: ${${var}}")
-endfunction ()
-
-function (check_pack_size master_pack_size pack_size name)
- math (EXPR PACK_MODULO "${master_pack_size} % ${pack_size}")
- if ((pack_size GREATER master_pack_size) OR (NOT PACK_MODULO EQUAL 0))
- message (FATAL_ERROR "Invalid '${name}' size of ${pack_size}. Needs to be <= ${master_pack_size} and be a factor of it")
- endif()
-endfunction ()
-
-# Compute reasonable defaults. This needs to happen before the CACHE variables
-# are set.
-string(TOLOWER "${CMAKE_BUILD_TYPE}" CMAKE_BUILD_TYPE_ci)
-if (CMAKE_BUILD_TYPE_ci STREQUAL "debug")
- set (SCREAM_DEBUG TRUE)
-else ()
- set (SCREAM_DEBUG FALSE)
-endif()
-
-# Add RRTMGP debug checks. Note, we might consider also adding RRTMGP_EXPENSIVE_CHECKS
-# to turn on the RRTMGP internal checks here as well, via
-# option (RRTMGP_EXPENSIVE_CHECKS "Turn on internal RRTMGP error checking" ${SCREAM_DEBUG})
-# and then adding to scream_config.h:
-# #cmakedefine RRTMGP_EXPENSIVE_CHECKS
-option (SCREAM_RRTMGP_DEBUG "Turn on extra debug checks in RRTMGP" ${SCREAM_DEBUG})
-
-enable_testing()
-include(CTest)
-
set (EAMXX_ENABLE_GPU FALSE CACHE BOOL "")
set (CUDA_BUILD FALSE CACHE BOOL "") #needed for yakl if kokkos vars are not visible there?
set (HIP_BUILD FALSE CACHE BOOL "") #needed for yakl if kokkos vars are not visible there?
@@ -142,13 +118,16 @@ if( NOT "${CMAKE_CXX_COMPILER_ID}" MATCHES "[Cc]lang" )
set (CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -cpp")
endif()
-### Scream default configuration options
+####################################################################
+# EAMxx main configuration options #
+####################################################################
+
+# First, Compute reasonable defaults. This needs to happen before the CACHE variables are set
set(DEFAULT_MAX_RANKS 4)
set(DEFAULT_MAX_THREADS 16)
set(DEFAULT_MIMIC_GPU FALSE)
set(DEFAULT_FPE FALSE)
set(DEFAULT_PACK_SIZE 16)
-set(DEFAULT_POSSIBLY_NO_PACK FALSE)
if (EAMXX_ENABLE_GPU)
# On the GPU, the pack size must be 1
set(DEFAULT_PACK_SIZE 1)
@@ -203,24 +182,122 @@ if (Kokkos_ENABLE_HIP)
set(DEFAULT_SMALL_KERNELS TRUE)
endif()
-### Set CACHE vars
+if (SCREAM_DEBUG)
+ set(DEFAULT_FPMODEL "strict")
+ if (SCREAM_PACK_SIZE EQUAL 1 AND NOT EAMXX_ENABLE_GPU)
+ set(DEFAULT_FPE TRUE)
+ endif ()
+endif()
+
+### Now that reasonable defaults have been computed, set CACHE vars
set(SCREAM_MIMIC_GPU ${DEFAULT_MIMIC_GPU} CACHE BOOL "Mimic GPU to correctness-test inter-column parallelism on non-GPU platform")
set(SCREAM_PACK_CHECK_BOUNDS FALSE CACHE BOOL "If defined, scream::pack objects check indices against bounds")
-set(SCREAM_TEST_DATA_DIR ${CMAKE_CURRENT_BINARY_DIR}/data CACHE PATH "Location of data files generated by tests")
set(SCREAM_LIB_ONLY ${DEFAULT_LIB_ONLY} CACHE BOOL "Only build libraries, no exes")
set(NetCDF_Fortran_PATH ${DEFAULT_NetCDF_Fortran_PATH} CACHE FILEPATH "Path to netcdf fortran installation")
set(NetCDF_C_PATH ${DEFAULT_NetCDF_C_PATH} CACHE FILEPATH "Path to netcdf C installation")
set(SCREAM_MACHINE ${DEFAULT_SCREAM_MACHINE} CACHE STRING "The CIME/SCREAM name for the current machine")
option(SCREAM_MPI_ON_DEVICE "Whether to use device pointers for MPI calls" ON)
-option(SCREAM_ENABLE_MAM "Whether to enable MAM aerosol support" OFF)
+option(SCREAM_ENABLE_MAM "Whether to enable MAM aerosol support" ON)
set(SCREAM_SMALL_KERNELS ${DEFAULT_SMALL_KERNELS} CACHE STRING "Use small, non-monolothic kokkos kernels")
if (NOT SCREAM_SMALL_KERNELS)
set(EKAT_DISABLE_WORKSPACE_SHARING TRUE CACHE STRING "")
endif()
+# Add RRTMGP debug checks. Note, we might consider also adding RRTMGP_EXPENSIVE_CHECKS
+# to turn on the RRTMGP internal checks here as well, via
+# option (RRTMGP_EXPENSIVE_CHECKS "Turn on internal RRTMGP error checking" ${SCREAM_DEBUG})
+# and then adding to scream_config.h:
+# #cmakedefine RRTMGP_EXPENSIVE_CHECKS
+option (SCREAM_RRTMGP_DEBUG "Turn on extra debug checks in RRTMGP" ${SCREAM_DEBUG})
+
+set(SCREAM_DOUBLE_PRECISION TRUE CACHE BOOL "Set to double precision (default True)")
+
# For now, only used in share/grid/remap/refining_remapper_rma.*pp
option (EAMXX_ENABLE_EXPERIMENTAL_CODE "Compile one-sided MPI for refining remappers" OFF)
+option (SCREAM_ENABLE_ML_CORRECTION "Whether to enable ML correction parametrization" OFF)
+
+# Set number of vertical levels
+set(SCREAM_NUM_VERTICAL_LEV ${DEFAULT_NUM_VERTICAL_LEV} CACHE STRING
+ "The number of levels used in the vertical grid."
+)
+option(SCREAM_HAS_LEAP_YEAR "Whether scream uses leap years or not" ON)
+
+set(SCREAM_FPMODEL ${DEFAULT_FPMODEL} CACHE STRING "Compiler floating point model")
+set(SCREAM_FPE ${DEFAULT_FPE} CACHE BOOL "Enable floating point error exception")
+
+# Whether to use XYZ as a method to detect memory usage.
+option (SCREAM_ENABLE_GETRUSAGE "Whether getrusage can be used to get memory usage." OFF)
+option (SCREAM_ENABLE_STATM "Whether /proc/self/statm can be used to get memory usage." OFF)
+
+# Whether to disable warnings from tpls.
+set (SCREAM_DISABLE_TPL_WARNINGS ON CACHE BOOL "")
+
+# Dycore settings
+set(DEFAULT_SCREAM_DYNAMICS_DYCORE "NONE")
+if (SCREAM_CIME_BUILD AND SCREAM_DYN_TARGET STREQUAL "theta-l_kokkos")
+ set (DEFAULT_SCREAM_DYNAMICS_DYCORE "Homme")
+endif()
+
+set(SCREAM_DYNAMICS_DYCORE ${DEFAULT_SCREAM_DYNAMICS_DYCORE} CACHE STRING
+ "The name of the dycore to be used for dynamics. If NONE, then any code/test requiring dynamics is disabled.")
+
+string(TOUPPER "${SCREAM_DYNAMICS_DYCORE}" SCREAM_DYNAMICS_DYCORE)
+if (NOT ${SCREAM_DOUBLE_PRECISION})
+ # Homme cannot handle single precision, for now. This causes tests to fail.
+ # Fixing this requires adding a config parameter to homme, to switch between
+ # single and double. That must be done in the upstream repo (E3SM), before
+ # we can support it here.
+ # So, for now, if Homme is the requested dyn dycore AND single precision is
+ # requested, we disable dynamics, printing a warning.
+ if ("${SCREAM_DYNAMICS_DYCORE}" STREQUAL "HOMME")
+ message("WARNING! Homme dycore cannot be used in a Single Precision build. Turning Homme off.")
+ set(SCREAM_DYNAMICS_DYCORE "NONE")
+ endif()
+endif()
+
+# Set the scream base and src directory, to be used across subfolders
+set(SCREAM_BASE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
+set(SCREAM_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src)
+set(SCREAM_BIN_DIR ${CMAKE_CURRENT_BINARY_DIR})
+
+####################################################################
+# Packs-related settings #
+####################################################################
+
+# Determine the main pack size.
+set(SCREAM_PACK_SIZE ${DEFAULT_PACK_SIZE} CACHE STRING
+ "The number of scalars in a scream::pack::Pack and Mask. Larger packs have good performance on conditional-free loops due to improved caching.")
+
+# Besides the the main pack size, we have a couple of other pack sizes used across EAMxx
+# For some routines, SKX may have better performance with pack_size=1
+set(SCREAM_SMALL_PACK_SIZE ${SCREAM_PACK_SIZE} CACHE STRING
+ "The number of scalars in a scream::pack::SmallPack and SmallMask. Smaller packs can have better performance in loops with conditionals since more of the packs will have masks with uniform value.")
+set(SCREAM_POSSIBLY_NO_PACK "${Kokkos_ARCH_SKX}" CACHE BOOL
+ "Set possibly-no-pack to this value. You can set it to something else to restore packs on SKX for testing.")
+
+if (SCREAM_POSSIBLY_NO_PACK)
+ set (SCREAM_POSSIBLY_NO_PACK_SIZE 1)
+else()
+ set (SCREAM_POSSIBLY_NO_PACK_SIZE ${SCREAM_PACK_SIZE})
+endif ()
+
+function (check_pack_size master_pack_size pack_size name)
+ math (EXPR PACK_MODULO "${master_pack_size} % ${pack_size}")
+ if ((pack_size GREATER master_pack_size) OR (NOT PACK_MODULO EQUAL 0))
+ message (FATAL_ERROR "Invalid '${name}' size of ${pack_size}. Needs to be <= ${master_pack_size} and be a factor of it")
+ endif()
+endfunction ()
+
+# Checks on pack sizes relative to the master one:
+check_pack_size(${SCREAM_PACK_SIZE} ${SCREAM_SMALL_PACK_SIZE} "small pack")
+# This one is an internal check, as the user cannot set SCREAM_POSSIBLY_NO_PACK_SIZE now.
+check_pack_size(${SCREAM_PACK_SIZE} ${SCREAM_POSSIBLY_NO_PACK_SIZE} "possibly no pack")
+
+####################################################################
+# Input-data locations #
+####################################################################
+
# Handle input root
if (SCREAM_MACHINE AND NOT SCREAM_INPUT_ROOT)
execute_process(COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/scripts/query-cime ${SCREAM_MACHINE} DIN_LOC_ROOT
@@ -274,140 +351,172 @@ set (SCREAM_DATA_DIR ${SCREAM_INPUT_ROOT}/atm/scream CACHE PATH "" FORCE)
set (TOPO_DATA_DIR ${SCREAM_INPUT_ROOT}/atm/cam/topo CACHE PATH "" FORCE)
set (IOP_DATA_DIR ${SCREAM_INPUT_ROOT}/atm/cam/scam/iop CACHE PATH "" FORCE)
-#
-# Handle test level
-#
+####################################################################
+# Tests-related settings #
+####################################################################
+
+if (NOT SCREAM_LIB_ONLY)
+
+ # Assuming SCREAM_LIB_ONLY is FALSE (else, no exec is built at all), we need to decide
+ # wether to build baseline-related execs, and wether we are generating baselines or
+ # comparing against them. These options can help reducing a bit the code that is built
+ # when generating baselines or when running memory-check tests (no baselines needed there)
+ option(SCREAM_ONLY_GENERATE_BASELINES "Whether building only baselines-related executables" OFF)
+ option(SCREAM_ENABLE_BASELINE_TESTS "Whether to run baselines-related tests" ON)
+ if (SCREAM_ONLY_GENERATE_BASELINES AND NOT SCREAM_ENABLE_BASELINE_TESTS)
+ message (FATAL_ERROR
+ "Makes no sense to set SCREAM_ONLY_GENERATE_BASELINES=ON,\n"
+ "but set SCREAM_ENABLE_BASELINE_TESTS=OFF.")
+ endif()
-# Constants
-set(SCREAM_TEST_LEVEL_AT "0")
-set(SCREAM_TEST_LEVEL_NIGHTLY "1")
-set(SCREAM_TEST_LEVEL_EXPERIMENTAL "2")
+ set(SCREAM_BASELINES_DIR "UNSET" CACHE PATH "Folder containing baselines data")
+ if (SCREAM_ENABLE_BASELINE_TESTS)
+ if (NOT EXISTS ${SCREAM_BASELINES_DIR}/data OR NOT IS_DIRECTORY ${SCREAM_BASELINES_DIR}/data)
+ string (CONCAT msg
+ "Error! Baselines tests enabled, but baseline dir is invalid.\n"
+ " SCREAM_BASELINES_DIR: ${SCREAM_BASELINES_DIR}")
+ message ("${msg}")
+ message (FATAL_ERROR "Aborting...")
+ endif()
+ endif()
-set(SCREAM_TEST_LEVEL "AT" CACHE STRING "The test level to run. Default is AT. NIGHTLY will run additional tests but is not guaranteed to PASS. EXPERIMENTAL will run even more tests with failures being more likely")
+ # All baselines tests will add a line to the baseline_list file,
+ # specifyiing the full name of the baseline they generated.
+ # When test-all-scream has to generate new baselines, it will run
+ # ctest -L baseline_gen, and then read this file to find out all the
+ # baseline files to copy into the baseline directory
+ set (SCREAM_TEST_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/data)
+ file(MAKE_DIRECTORY ${SCREAM_TEST_OUTPUT_DIR})
+ file(TOUCH ${SCREAM_TEST_OUTPUT_DIR}/baseline_list)
+
+ set(SCREAM_TEST_LEVEL_AT "0")
+ set(SCREAM_TEST_LEVEL_NIGHTLY "1")
+ set(SCREAM_TEST_LEVEL_EXPERIMENTAL "2")
+
+ set(SCREAM_TEST_LEVEL "AT" CACHE STRING "The test level to run. Default is AT. NIGHTLY will run additional tests but is not guaranteed to PASS. EXPERIMENTAL will run even more tests with failures being more likely")
+
+ if (SCREAM_TEST_LEVEL STREQUAL "AT")
+ set(SCREAM_TEST_LEVEL ${SCREAM_TEST_LEVEL_AT})
+ elseif (SCREAM_TEST_LEVEL STREQUAL "NIGHTLY")
+ set(SCREAM_TEST_LEVEL ${SCREAM_TEST_LEVEL_NIGHTLY})
+ elseif (SCREAM_TEST_LEVEL STREQUAL "EXPERIMENTAL")
+ set(SCREAM_TEST_LEVEL ${SCREAM_TEST_LEVEL_EXPERIMENTAL})
+ else()
+ message(FATAL_ERROR "Unknown SCREAM_TEST_LEVEL '${SCREAM_TEST_LEVEL}'")
+ endif()
-if (SCREAM_TEST_LEVEL STREQUAL "AT")
- set(SCREAM_TEST_LEVEL ${SCREAM_TEST_LEVEL_AT})
-elseif (SCREAM_TEST_LEVEL STREQUAL "NIGHTLY")
- set(SCREAM_TEST_LEVEL ${SCREAM_TEST_LEVEL_NIGHTLY})
-elseif (SCREAM_TEST_LEVEL STREQUAL "EXPERIMENTAL")
- set(SCREAM_TEST_LEVEL ${SCREAM_TEST_LEVEL_EXPERIMENTAL})
-else()
- message(FATAL_ERROR "Unknown SCREAM_TEST_LEVEL '${SCREAM_TEST_LEVEL}'")
-endif()
+ set(SCREAM_TEST_MAX_THREADS ${DEFAULT_MAX_THREADS} CACHE STRING "Upper limit on threads per rank for threaded tests")
+ set(SCREAM_TEST_THREAD_INC 1 CACHE STRING "Thread count increment for threaded tests")
+ set(SCREAM_TEST_MAX_RANKS ${DEFAULT_MAX_RANKS} CACHE STRING "Upper limit on ranks for mpi tests")
+ math(EXPR DEFAULT_MAX_TOTAL_THREADS "${SCREAM_TEST_MAX_RANKS}*${SCREAM_TEST_MAX_THREADS}")
+ set(SCREAM_TEST_MAX_TOTAL_THREADS ${DEFAULT_MAX_TOTAL_THREADS} CACHE STRING "Upper limit on nranks*threads for threaded tests")
+
+ # Make sure SCREAM_TEST_MAX_RANKS and SCREAM_TEST_MAX_THREADS do not individually exceed SCREAM_TEST_MAX_TOTAL_THREADS
+ if (SCREAM_TEST_MAX_THREADS GREATER ${SCREAM_TEST_MAX_TOTAL_THREADS})
+ string(CONCAT msg
+ "The requested number of max threads/rank (${SCREAM_TEST_MAX_THREADS}) is larger "
+ "than the max total threads (${SCREAM_TEST_MAX_TOTAL_THREADS}). Setting "
+ "SCREAM_TEST_MAX_THREADS=${SCREAM_TEST_MAX_TOTAL_THREADS}")
+ message(STATUS "${msg}")
+ set (SCREAM_TEST_MAX_THREADS ${SCREAM_TEST_MAX_TOTAL_THREADS})
+ endif()
+ if (SCREAM_TEST_MAX_RANKS GREATER ${SCREAM_TEST_MAX_TOTAL_THREADS})
+ string(CONCAT msg
+ "The requested number of max ranks (${SCREAM_TEST_MAX_RANKS}) is larger "
+ "than the max total threads (${SCREAM_TEST_MAX_TOTAL_THREADS}). Setting "
+ "SCREAM_TEST_MAX_RANKS=${SCREAM_TEST_MAX_TOTAL_THREADS}")
+ message(STATUS "${msg}")
+ set (SCREAM_TEST_MAX_RANKS ${SCREAM_TEST_MAX_TOTAL_THREADS})
+ endif()
+ # This is a meta-variable, which individual tests can use to set *different* degrees
+ # of testing, in terms of resolutions. E.g., for SHORT use 3 timesteps, for MEDIUM use 10,
+ # for LONG use 100. It is *completely* up to the test to decide what short, medium, and long mean.
+ if (EKAT_ENABLE_COVERAGE OR EKAT_ENABLE_CUDA_MEMCHECK OR EKAT_ENABLE_VALGRIND OR EKAT_ENABLE_COMPUTE_SANITIZER)
+ set (SCREAM_TEST_SIZE_DEFAULT SHORT)
+ # also set thread_ing=$max_thread - 1, so we test at most 2 threading configurations
+ if (SCREAM_TEST_MAX_THREADS GREATER 1)
+ math (EXPR SCREAM_TEST_THREAD_INC ${SCREAM_TEST_MAX_THREADS}-1)
+ endif()
+ else()
+ set (SCREAM_TEST_SIZE_DEFAULT MEDIUM)
+ endif()
-# Assuming SCREAM_LIB_ONLY is FALSE (else, no exec is built at all), we provide the option
-# of building only baseline-related execs. By default, this option is off (menaing "build everything").
-# However, when generating baselines, this can be useful to reduce the amount of stuff compiled.
-set(SCREAM_BASELINES_ONLY FALSE CACHE BOOL "Whether building only baselines-related executables")
+ set(SCREAM_TEST_SIZE ${SCREAM_TEST_SIZE_DEFAULT} CACHE STRING "The kind of testing to perform: SHORT, MEDIUM, LONG. Only applies to certain tests and is generally used to reduce test length when valgrind/cuda-memcheck are on.")
+ set(SCREAM_TEST_VALID_SIZES "SHORT;MEDIUM;LONG" CACHE INTERNAL "List of valid values for SCREAM_TEST_SIZE")
-# Certain builds are not meant to compare against baselines. For instance, a valgrind build
-# has the sole purpose of detecting memory errors. For these builds, we can disable baselines tests
-set(SCREAM_ENABLE_BASELINE_TESTS TRUE CACHE BOOL "Whether to run baselines-related tests")
+ if (SCREAM_TEST_SIZE STREQUAL "SHORT")
+ add_definitions(-DSCREAM_SHORT_TESTS)
+ endif()
-if (SCREAM_BASELINES_ONLY AND NOT SCREAM_ENABLE_BASELINE_TESTS)
- message (FATAL_ERROR
- "Makes no sense to set SCREAM_BASELINES_ONLY=ON,\n"
- "but set SCREAM_ENABLE_BASELINE_TESTS=OFF.")
+ enable_testing()
+ include(CTest)
endif()
-# Set number of vertical levels
-set(SCREAM_NUM_VERTICAL_LEV ${DEFAULT_NUM_VERTICAL_LEV} CACHE STRING
- "The number of levels used in the vertical grid."
-)
-option(SCREAM_HAS_LEAP_YEAR "Whether scream uses leap years or not" ON)
+####################################################################
+# Configure all tpls and subfolders #
+####################################################################
+
+if (DEFINED ENV{SCREAM_FAKE_ONLY})
+ # We don't really need to build ekat, but we do need to configure the test-launcher
+
+ # Declare some vars that Ekat would have declared, and may be used later
+ option (EKAT_ENABLE_MPI "Whether EKAT requires MPI." ON)
+ option (EKAT_TEST_LAUNCHER_MANAGE_RESOURCES "Whether test-launcher should try to manage thread distribution. Requires a ctest resource file to be effective." OFF)
+ option (EKAT_ENABLE_VALGRIND "Whether to run tests with valgrind" OFF)
+ set(EKAT_VALGRIND_SUPPRESSION_FILE "" CACHE FILEPATH "Use this valgrind suppression file if valgrind is enabled.")
+ set (EKAT_ENABLE_GPU False)
+ if (Kokkos_ENABLE_CUDA OR Kokkos_ENABLE_HIP OR Kokkos_ENABLE_SYCL)
+ set (EKAT_ENABLE_GPU True)
+ endif ()
-## Work out pack sizes.
-# Determine the master pack size.
-set(SCREAM_PACK_SIZE ${DEFAULT_PACK_SIZE} CACHE STRING
- "The number of scalars in a scream::pack::Pack and Mask. Larger packs have good performance on conditional-free loops due to improved caching.")
-# With the master pack size determined, we have constraints on the others.
-set(DEFAULT_SMALL_PACK_SIZE ${SCREAM_PACK_SIZE})
-# For some routines, SKX may have better performance with pksize=1
-if (Kokkos_ARCH_SKX)
- set(DEFAULT_POSSIBLY_NO_PACK TRUE)
-endif ()
-set(SCREAM_SMALL_PACK_SIZE ${DEFAULT_SMALL_PACK_SIZE} CACHE STRING
- "The number of scalars in a scream::pack::SmallPack and SmallMask. Smaller packs can have better performance in loops with conditionals since more of the packs will have masks with uniform value.")
-set(SCREAM_POSSIBLY_NO_PACK ${DEFAULT_POSSIBLY_NO_PACK} CACHE BOOL
- "Set possibly-no-pack to this value. You can set it to something else to restore packs on SKX for testing.")
-set (DEFAULT_POSSIBLY_NO_PACK_SIZE ${SCREAM_PACK_SIZE})
+ if (EKAT_TEST_LAUNCHER_MANAGE_RESOURCES)
+ set (TEST_LAUNCHER_MANAGE_RESOURCES True)
+ else()
+ set (TEST_LAUNCHER_MANAGE_RESOURCES False)
+ endif()
+ if (EKAT_ENABLE_GPU)
+ set (TEST_LAUNCHER_ON_GPU True)
+ else()
+ set (TEST_LAUNCHER_ON_GPU False)
+ endif()
-if (SCREAM_POSSIBLY_NO_PACK)
- set (DEFAULT_POSSIBLY_NO_PACK_SIZE 1)
-endif ()
-set (SCREAM_POSSIBLY_NO_PACK_SIZE ${DEFAULT_POSSIBLY_NO_PACK_SIZE})
-# Checks on pack sizes relative to the master one:
-check_pack_size(${SCREAM_PACK_SIZE} ${SCREAM_SMALL_PACK_SIZE} "small pack")
-# This one is an internal check, as the user cannot set SCREAM_POSSIBLY_NO_PACK_SIZE now.
-check_pack_size(${SCREAM_PACK_SIZE} ${SCREAM_POSSIBLY_NO_PACK_SIZE} "possibly no pack")
+ set (EKAT_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../externals/ekat)
-## Now we have pack sizes. Proceed with other config options that depend on
-## these.
+ if (EKAT_ENABLE_MPI)
+ find_package(MPI REQUIRED COMPONENTS C)
-if (SCREAM_DEBUG)
- set(DEFAULT_FPMODEL "strict")
- if (${SCREAM_PACK_SIZE} EQUAL 1 AND NOT ${EAMXX_ENABLE_GPU})
- set(DEFAULT_FPE TRUE)
- endif ()
-endif()
-set(SCREAM_FPMODEL ${DEFAULT_FPMODEL} CACHE STRING "Compiler floating point model")
-set(SCREAM_FPE ${DEFAULT_FPE} CACHE BOOL "Enable floating point error exception")
-### Experimental, under development
+
+ F20TR-SCREAMv1
+ 20TR_SCREAM_ELM%SPBC_CICE%PRES_DOCN%DOM_MOSART_SGLC_SWAV
+
+
+
+ F2010-SCREAMv1-DP-DYCOMSrf01
+ 2010_SCREAM_ELM%SPBC_CICE%PRES_DOCN%DOM_SROF_SGLC_SWAV_SIAC_SESP%DP-EAMxx%DYCOMSrf01
+ Experimental, under development
+
+
2016-08-01
2020-01-20
+ 1999-07-10
+
+
+
+
+
+
+ 864
+
+
+
+
+
+ TRUE
+
+
+
+
+
+ TRUE
+
+
+
+
+
+ 31.5
+
+
+
+
+
+ 238.5
+
+
+
+
+
+ 225
+
+
+
+
+
+ 1
+
+
+
+
+
+ 225
+
+
+
+
+
+ 1
diff --git a/components/eamxx/cime_config/eamxx_buildnml.py b/components/eamxx/cime_config/eamxx_buildnml.py
index db05bd2e20e1..0c7ed8e52aac 100644
--- a/components/eamxx/cime_config/eamxx_buildnml.py
+++ b/components/eamxx/cime_config/eamxx_buildnml.py
@@ -21,7 +21,7 @@
from utils import ensure_yaml # pylint: disable=no-name-in-module
ensure_yaml()
import yaml
-from yaml_utils import Bools,Ints,Floats,Strings,array_representer
+from yaml_utils import Bools,Ints,Floats,Strings,array_representer,array_constructor
_CIMEROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..","..","..","cime")
sys.path.append(os.path.join(_CIMEROOT, "CIME", "Tools"))
@@ -465,7 +465,18 @@ def expand_cime_vars(element, case):
child.text = do_cime_vars(child.text, case)
###############################################################################
-def _create_raw_xml_file_impl(case, xml):
+def write_pretty_xml(filepath, xml):
+###############################################################################
+ with open(filepath, "w") as fd:
+ # dom has better pretty printing than ET in older python versions < 3.9
+ dom = md.parseString(ET.tostring(xml, encoding="unicode"))
+ pretty_xml = dom.toprettyxml(indent=" ")
+ pretty_xml = os.linesep.join([s for s in pretty_xml.splitlines()
+ if s.strip()])
+ fd.write(pretty_xml)
+
+###############################################################################
+def _create_raw_xml_file_impl(case, xml, filepath=None):
###############################################################################
"""
On input, xml contains the parsed content of namelist_defaults_scream.xml.
@@ -610,32 +621,40 @@ def _create_raw_xml_file_impl(case, xml):
selectors = get_valid_selectors(xml)
# 1. Evaluate all selectors
- evaluate_selectors(xml, case, selectors)
+ try:
+ evaluate_selectors(xml, case, selectors)
- # 2. Apply all changes in the SCREAM_ATMCHANGE_BUFFER that may alter
- # which atm processes are used
- apply_atm_procs_list_changes_from_buffer (case,xml)
+ # 2. Apply all changes in the SCREAM_ATMCHANGE_BUFFER that may alter
+ # which atm processes are used
+ apply_atm_procs_list_changes_from_buffer (case,xml)
- # 3. Resolve all inheritances
- resolve_all_inheritances(xml)
+ # 3. Resolve all inheritances
+ resolve_all_inheritances(xml)
- # 4. Expand any CIME var that appears inside XML nodes text
- expand_cime_vars(xml,case)
+ # 4. Expand any CIME var that appears inside XML nodes text
+ expand_cime_vars(xml,case)
- # 5. Grab the atmosphere_processes macro list, with all the defaults
- atm_procs_defaults = get_child(xml,"atmosphere_processes_defaults",remove=True)
+ # 5. Grab the atmosphere_processes macro list, with all the defaults
+ atm_procs_defaults = get_child(xml,"atmosphere_processes_defaults",remove=True)
- # 6. Get atm procs list
- atm_procs_list = get_child(atm_procs_defaults,"atm_procs_list",remove=True)
+ # 6. Get atm procs list
+ atm_procs_list = get_child(atm_procs_defaults,"atm_procs_list",remove=True)
- # 7. Form the nested list of atm procs needed, append to atmosphere_driver section
- atm_procs = gen_atm_proc_group(atm_procs_list.text, atm_procs_defaults)
- atm_procs.tag = "atmosphere_processes"
- xml.append(atm_procs)
+ # 7. Form the nested list of atm procs needed, append to atmosphere_driver section
+ atm_procs = gen_atm_proc_group(atm_procs_list.text, atm_procs_defaults)
+ atm_procs.tag = "atmosphere_processes"
+ xml.append(atm_procs)
- # 8. Apply all changes in the SCREAM_ATMCHANGE_BUFFER that do not alter
- # which atm processes are used
- apply_non_atm_procs_list_changes_from_buffer (case,xml)
+ # 8. Apply all changes in the SCREAM_ATMCHANGE_BUFFER that do not alter
+ # which atm processes are used
+ apply_non_atm_procs_list_changes_from_buffer (case,xml)
+ except BaseException as e:
+ if filepath is not None:
+ dbg_xml_path = filepath.replace(".xml", ".dbg.xml")
+ write_pretty_xml(dbg_xml_path, xml)
+ print(f"Error during XML creation, writing {dbg_xml_path}")
+
+ raise e
perform_consistency_checks (case, xml)
@@ -666,17 +685,11 @@ def create_raw_xml_file(case, caseroot):
# be processed early by treating them as if they were made to the defaults file.
with open(src, "r") as fd:
defaults = ET.parse(fd).getroot()
- raw_xml = _create_raw_xml_file_impl(case, defaults)
+ raw_xml = _create_raw_xml_file_impl(case, defaults, filepath=raw_xml_file)
check_all_values(raw_xml)
- with open(raw_xml_file, "w") as fd:
- # dom has better pretty printing than ET in older python versions < 3.9
- dom = md.parseString(ET.tostring(raw_xml, encoding="unicode"))
- pretty_xml = dom.toprettyxml(indent=" ")
- pretty_xml = os.linesep.join([s for s in pretty_xml.splitlines()
- if s.strip()])
- fd.write(pretty_xml)
+ write_pretty_xml(raw_xml_file, raw_xml)
###############################################################################
def convert_to_dict(element):
@@ -877,6 +890,11 @@ def get_file_parameters(caseroot):
result = []
for item in raw_xml.findall('.//*[@type="file"]'):
+ # Certain configurations may not need a file (e.g., a remap
+ # file for SPA may not be needed if the model resolution
+ # matches the data file resolution
+ if item.text is None or item.text=="":
+ continue
result.append(item.text.strip())
for item in raw_xml.findall('.//*[@type="array(file)"]'):
@@ -886,7 +904,7 @@ def get_file_parameters(caseroot):
return list(OrderedDict.fromkeys(result))
###############################################################################
-def create_input_data_list_file(caseroot):
+def create_input_data_list_file(case,caseroot):
###############################################################################
"""
Create the scream.input_data_list file for this case. This will tell CIME
@@ -894,18 +912,51 @@ def create_input_data_list_file(caseroot):
"""
files_to_download = get_file_parameters(caseroot)
+ # Add array parsing knowledge to yaml loader
+ loader = yaml.SafeLoader
+ loader.add_constructor("!bools",array_constructor)
+ loader.add_constructor("!ints",array_constructor)
+ loader.add_constructor("!floats",array_constructor)
+ loader.add_constructor("!strings",array_constructor)
+
+ # Grab all the output yaml files, open them, and check if horiz_remap_file or vertical_remap_file is used
+ rundir = case.get_value("RUNDIR")
+ eamxx_xml_file = os.path.join(caseroot, "namelist_scream.xml")
+ with open(eamxx_xml_file, "r") as fd:
+ eamxx_xml = ET.parse(fd).getroot()
+
+ scorpio = get_child(eamxx_xml,'Scorpio')
+ out_files_xml = get_child(scorpio,"output_yaml_files",must_exist=False)
+ # out_files = out_files_xml.text.split(",") if (out_files_xml is not None and out_files_xml.text is not None) else []
+ # for fn in out_files:
+ if (out_files_xml is not None and out_files_xml.text is not None):
+ for fn in out_files_xml.text.split(","):
+ # Get full name
+ src_yaml = os.path.expanduser(os.path.join(fn.strip()))
+ dst_yaml = os.path.expanduser(os.path.join(rundir,'data',os.path.basename(src_yaml)))
+
+ # Load file, and look for the remap file entries
+ content = yaml.load(open(dst_yaml,"r"),Loader=loader)
+ if 'horiz_remap_file' in content.keys():
+ files_to_download += [content['horiz_remap_file']]
+ if 'vertical_remap_file' in content.keys():
+ files_to_download += [content['vertical_remap_file']]
+
input_data_list_file = "{}/Buildconf/scream.input_data_list".format(caseroot)
if os.path.exists(input_data_list_file):
os.remove(input_data_list_file)
+ din_loc_root = case.get_value("DIN_LOC_ROOT")
with open(input_data_list_file, "w") as fd:
- for idx, file_path in enumerate(files_to_download):
- fd.write("scream_dl_input_{} = {}\n".format(idx, file_path))
+ for idx, file_path in enumerate(list(set(files_to_download))):
+ # Only add files whose full path starts with the CIME's input data location
+ if file_path.startswith(din_loc_root):
+ fd.write("scream_dl_input_{} = {}\n".format(idx, file_path))
+
###############################################################################
def do_cime_vars_on_yaml_output_files(case, caseroot):
###############################################################################
- from yaml_utils import array_constructor
rundir = case.get_value("RUNDIR")
eamxx_xml_file = os.path.join(caseroot, "namelist_scream.xml")
@@ -956,14 +1007,15 @@ def do_cime_vars_on_yaml_output_files(case, caseroot):
# produces an output at t=0, which is not present in the restarted run, and
# which also causes different timestamp in the file name.
# Hence, change default output settings to perform a single AVERAGE step at the end of the run
- if case.get_value("TESTCASE") in ["ERP", "ERS"]:
- test_env = case.get_env('test')
- stop_n = int(test_env.get_value("STOP_N"))
- stop_opt = test_env.get_value("STOP_OPTION")
- content['output_control']['Frequency'] = stop_n
- content['output_control']['frequency_units'] = stop_opt
- content['Averaging Type'] = 'AVERAGE'
- print ("WARNING: ERS/ERP tests hard code output to consist of a single AVERAGE output step at the end of the run.")
+ if case.get_value("TESTCASE") in ["ERP", "ERS"] and content['Averaging Type'].upper()=="INSTANT":
+ hist_n = int(case.get_value("HIST_N",resolved=True))
+ hist_opt = case.get_value("HIST_OPTION",resolved=True)
+ content['output_control']['Frequency'] = hist_n
+ content['output_control']['frequency_units'] = hist_opt
+ content['output_control']['skip_t0_output'] = True
+ print ("ERS/ERP test with INSTANT output detected. Adjusting output control specs:\n")
+ print (" - setting skip_t0_output=true\n")
+ print (" - setting freq and freq_units to HIST_N and HIST_OPTION respectively\n")
ordered_dump(content, open(dst_yaml, "w"))
diff --git a/components/eamxx/cime_config/eamxx_buildnml_impl.py b/components/eamxx/cime_config/eamxx_buildnml_impl.py
index 2fa00b7f4a4b..e0ecab146246 100644
--- a/components/eamxx/cime_config/eamxx_buildnml_impl.py
+++ b/components/eamxx/cime_config/eamxx_buildnml_impl.py
@@ -162,11 +162,12 @@ def refine_type(entry, force_type=None):
>>> e = '1.0'
>>> refine_type(e,force_type='my_type')
Traceback (most recent call last):
- NameError: ERROR: Invalid/unsupported force type 'my_type'
+ CIME.utils.CIMEError: ERROR: Invalid/unsupported force type 'my_type'
>>> e = 'true,falsE'
>>> refine_type(e,'logical')
Traceback (most recent call last):
- ValueError: Could not refine 'true,falsE' as type 'logical'
+ CIME.utils.CIMEError: ERROR: Could not refine 'true,falsE' as type 'logical':
+ ERROR: For entry of type 'logical', expected 'true' or 'false', got 'true,falsE'
>>> refine_type(e,'array(logical)')
[True, False]
>>> refine_type('', 'array(string)')
@@ -176,15 +177,14 @@ def refine_type(entry, force_type=None):
>>> refine_type(None, 'array(real)')
[]
"""
-
- # If force type is unspecified, try to deduce it
+ # If force type is unspecified, try to deduce it
if force_type is None:
expect (entry is not None,
"If an entry is None, you must specify the force_type")
else:
elem_valid = ["logical","integer","real","string","file"]
valid = elem_valid + ["array("+e+")" for e in elem_valid]
- expect (force_type in valid, exc_type=NameError,
+ expect (force_type in valid,
error_msg=f"Invalid/unsupported force type '{force_type}'")
if is_array_type(force_type):
@@ -208,7 +208,8 @@ def refine_type(entry, force_type=None):
elif entry.upper() == "FALSE":
return False
else:
- return bool(int(entry))
+ expect(False, f"For entry of type 'logical', expected 'true' or 'false', got '{entry}'",
+ exc_type=ValueError)
elif elem_type == "integer":
tmp = float(entry)
@@ -220,7 +221,7 @@ def refine_type(entry, force_type=None):
return str(entry)
except ValueError as e:
- raise ValueError (f"Could not refine '{entry}' as type '{force_type}'") from e
+ expect(False, f"Could not refine '{entry}' as type '{force_type}':\n{e}")
# No force type provided. Try to infer from value
if entry.upper() == "TRUE":
@@ -273,7 +274,7 @@ def derive_type(entry):
elif isinstance(elem_value, str):
elem_type = "string"
else:
- raise RuntimeError("Couldn't derive type of '{}'".format(entry))
+ expect(False, "Couldn't derive type of '{}'".format(entry))
if isinstance(refined_value,list):
return "array(" + elem_type + ")"
@@ -293,7 +294,8 @@ def check_value(elem, value):
>>> root = ET.fromstring(xml)
>>> check_value(root,'1.5')
Traceback (most recent call last):
- ValueError: Could not refine '1.5' as type 'integer'
+ CIME.utils.CIMEError: ERROR: Could not refine '1.5' as type 'integer':
+ ERROR: Cannot interpret 1.5 as int
>>> check_value(root,'3')
Traceback (most recent call last):
CIME.utils.CIMEError: ERROR: Invalid value '3' for element 'a'. Value not in the valid list ('[1, 2]')
diff --git a/components/eamxx/cime_config/namelist_defaults_scream.xml b/components/eamxx/cime_config/namelist_defaults_scream.xml
index 71c1bfd4f828..07ef2793f38c 100644
--- a/components/eamxx/cime_config/namelist_defaults_scream.xml
+++ b/components/eamxx/cime_config/namelist_defaults_scream.xml
@@ -48,7 +48,7 @@ be lost if SCREAM_HACK_XML is not enabled.
ctl_nl
- driver_options,atmosphere_processes,grids_manager,initial_conditions,Scorpio,e3sm_parameters
+ driver_options,iop_options,atmosphere_processes,grids_manager,initial_conditions,Scorpio,e3sm_parameters
@@ -119,9 +119,8 @@ be lost if SCREAM_HACK_XML is not enabled.
for the atmosphere processes section(s).
11) The attribute 'locked="true"' is to be used for entries that cannot be changed
- via atmchange (see scripts/atmchange). For instance, the overall list of atm procs cannot be changed,
- since it would require to re-parse the defaults, to re-generate the correct
- defaults for the (possibly) new atm procs.
+ via atmchange (see scripts/atmchange). If an element is locked, then all children
+ will be locked as well.
12) The attribute 'constraints' allows to specify constraints on values. Valid constraints
are lt, le, ne, gt, ge, and mod. Except the latter (which has slightly different syntax,
@@ -203,6 +202,18 @@ be lost if SCREAM_HACK_XML is not enabled.
${DIN_LOC_ROOT}/atm/scream/tables/vn_table_vals.dat8,
${DIN_LOC_ROOT}/atm/scream/tables/vm_table_vals.dat8
+ 1350.0
+ 1.0
+ 1.0
+ 67.0
+ 0.5
+ 1.0
+ 50.0
+ 900.0
+ 0.65
+ 0.304
+ 1.0
+ 0.00028
@@ -226,20 +237,38 @@ be lost if SCREAM_HACK_XML is not enabled.
+
+
+
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/mam4_mode1_rrtmg_aeronetdust_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/mam4_mode2_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/mam4_mode3_rrtmg_aeronetdust_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/mam4_mode4_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/water_refindex_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/ocphi_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/dust_aeronet_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/ssam_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/sulfate_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/ocpho_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/bcpho_rrtmg_c20240206.nc
+ ${DIN_LOC_ROOT}/atm/scream/mam4xx/physprops/poly_rrtmg_c20240206.nc
+
+
-
+
0
false
- false
+ TIME_DEPENDENT_3D_PROFILE
-
+
- UNSET
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30_to_ne4_mono_20220502.nc
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne4pg2_mono.20220714.nc
- none
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne30pg2_mono.20220714.nc
+
${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne120np4_mono_20220502.nc
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne512np4_mono_20220506.nc
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne120pg2_intbilin_20221012.nc
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne256pg2_intbilin_20221011.nc
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne512pg2_intbilin_20221012.nc
- ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30np4_to_ne1024pg2_intbilin_20221012.nc
-
- ${DIN_LOC_ROOT}/atm/scream/init/spa_file_unified_and_complete_ne30_20220428.nc
+ ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30pg2_to_ne120pg2_20231201.nc
+ ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30pg2_to_ne256pg2_20231201.nc
+ ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30pg2_to_ne512pg2_20231201.nc
+ ${DIN_LOC_ROOT}/atm/scream/maps/map_ne30pg2_to_ne1024pg2_20231201.nc
+
+
+
+ ${DIN_LOC_ROOT}/atm/scream/init/spa_file_unified_and_complete_ne30_20220428.nc
+ ${DIN_LOC_ROOT}/atm/scream/init/spa_file_unified_and_complete_ne30pg2_20240111.nc
+ ${DIN_LOC_ROOT}/atm/scream/init/spa_file_unified_and_complete_ne4_20220428.nc
+ ${DIN_LOC_ROOT}/atm/scream/init/spa_file_unified_and_complete_ne4pg2_20231222.nc
@@ -327,6 +356,7 @@ be lost if SCREAM_HACK_XML is not enabled.
3
3
3
+ 3
4
true
false
@@ -343,6 +373,9 @@ be lost if SCREAM_HACK_XML is not enabled.
>
false
+
+ true
+
@@ -356,11 +389,15 @@ be lost if SCREAM_HACK_XML is not enabled.
6
2
1
+ 1
5
- 10
+ 10
+ 1
+ 1
+ 1
1
hours
@@ -397,10 +434,8 @@ be lost if SCREAM_HACK_XML is not enabled.
${DIN_LOC_ROOT}/atm/scream/init/screami_ne256np4L128_ifs-20200120_20220914.nc
${DIN_LOC_ROOT}/atm/scream/init/screami_ne512np4L128_20220823.nc
${DIN_LOC_ROOT}/atm/scream/init/screami_ne1024np4L128_era5-20131001-topoadj-16x_20220914.nc
- ${DIN_LOC_ROOT}/atm/scream/init/screami_ne1024np4L128_ifs-20160801-topoadjx6t_20221011.nc
${DIN_LOC_ROOT}/atm/scream/init/screami_ne1024np4L128_ifs-20200120-topoadjx6t_20221011.nc
${DIN_LOC_ROOT}/atm/scream/init/screami_aquaplanet_ne4np4L72_20220823.nc
- ${DIN_LOC_ROOT}/atm/scream/init/screami_aquaplanet_ne30np4L128_20220823.nc
${DIN_LOC_ROOT}/atm/scream/init/screami_conusx4v1np4L72-topo12x_013023.nc
@@ -453,11 +488,19 @@ be lost if SCREAM_HACK_XML is not enabled.
0.0
0.0
0.0
+
+
+
+ T_mid
+ false
+ 0
+ 0.001
+ 900.0
- ${SRCROOT}/components/eamxx/data/scream_default_output.yaml
+
./${CASE}.scream
@@ -475,12 +518,19 @@ be lost if SCREAM_HACK_XML is not enabled.
doc="Verbosity level for the atm logger">
info
+
+ warn
+
false
1e-10
1e-14
Warning
true
phis,landfrac
+ false
+ true
@@ -488,11 +538,28 @@ be lost if SCREAM_HACK_XML is not enabled.
${CASE}
+
+
+ true
+ UNSET
+ ${DIN_LOC_ROOT}/atm/cam/scam/iop/DYCOMSrf01_iopfile_4scam.nc
+ -999
+ 31.5
+ -999
+ 238.5
+ false
+ true
+ false
+ true
+
+
0
+ 2
False
2
+ 1
1
6
0
@@ -506,6 +573,7 @@ be lost if SCREAM_HACK_XML is not enabled.
1
1
3.4e-08
+ 0.216784
UNSET
250000.0
250000.0
@@ -513,6 +581,7 @@ be lost if SCREAM_HACK_XML is not enabled.
4.0e4
2.0e4
1.0e4
+ 1.0e4
100000.0
1
0
@@ -522,6 +591,7 @@ be lost if SCREAM_HACK_XML is not enabled.
0
0
sphere
+ plane
9
UNSET
4
@@ -530,19 +600,28 @@ be lost if SCREAM_HACK_XML is not enabled.
256
512
1024
+ 1024
0
0
+ 5
0
+ 5
+ 0
+ 50000
+ 0
+ 50000
-1
4
cube
- UNSET
+ plane
+ UNSET
600
300
75
33.33333333333
16.6666666666666
8.3333333333333
+ 8.3333333333333
75
9999
1
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/internal_diagnostics_level/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/internal_diagnostics_level/shell_commands
index d3a4a39b668a..cf3ca97f6dd3 100644
--- a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/internal_diagnostics_level/shell_commands
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/internal_diagnostics_level/shell_commands
@@ -1,2 +1,2 @@
$CIMEROOT/../components/eamxx/scripts/atmchange --all internal_diagnostics_level=1 atmosphere_processes::internal_diagnostics_level=0 -b
-./xmlchange POSTRUN_SCRIPT="$CIMEROOT/../components/eamxx/tests/postrun/check_hashes_ers.py"
+./xmlchange POSTRUN_SCRIPT="$CIMEROOT/../components/eamxx/scripts/check-hashes-ers"
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/mam4xx/optics/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/mam4xx/optics/shell_commands
new file mode 100644
index 000000000000..1c22bd9ee454
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/mam4xx/optics/shell_commands
@@ -0,0 +1,9 @@
+
+#Default scream has 10 tracers, MAM4xx adds another 31 making a total of 41 tracer
+#Set total number of tracers to 41. We are using append here as last entry wins while parsing xml options
+./xmlchange --append SCREAM_CMAKE_OPTIONS="SCREAM_NUM_TRACERS 41"
+
+$CIMEROOT/../components/eamxx/scripts/atmchange initial_conditions::Filename='$DIN_LOC_ROOT/atm/scream/init/screami_mam4xx_ne4np4L72_c20240208.nc' -b
+$CIMEROOT/../components/eamxx/scripts/atmchange physics::atm_procs_list="mac_aero_mic,mam4_optics,rrtmgp" -b
+
+
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/README b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/README
new file mode 100644
index 000000000000..5d33c71fed37
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/README
@@ -0,0 +1,9 @@
+The testmods in this folder contain different configuration
+of output to request to eamxx. All the presets create a single
+output file, all with a different name, which means one can
+use 2+ presets at the same time.
+
+
+Each preset does basically two things:
+ 1. create a yaml file in the case folder
+ 2. add the yaml file to the yaml_output_files xml setting of eamxx
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/diags/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/diags/shell_commands
new file mode 100644
index 000000000000..904f46b580f2
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/diags/shell_commands
@@ -0,0 +1,37 @@
+# This script generates a (single) yaml file for EAMxx output.
+# The output will be INSTANT, with only some diags fields as ouput
+
+CASEROOT=$(./xmlquery --value CASEROOT)
+CASE=$(./xmlquery --value CASE)
+
+# Scripts location
+YAML_EDIT_SCRIPT=$CIMEROOT/../components/eamxx/scripts/edit-output-stream
+ATMCHANGE=$CIMEROOT/../components/eamxx/scripts/atmchange
+YAML_FILE=$CASEROOT/eamxx_diags_output.yaml
+
+# Figure out the suffix for the physics grid
+ATM_GRID=$(./xmlquery --value ATM_GRID)
+if [[ $ATM_GRID == *"pg2"* ]]; then
+ PGTYPE="PG2"
+else
+ PGTYPE="GLL"
+fi
+
+# List of output fields
+FIELDS='Exner LiqWaterPath dz geopotential_int PotentialTemperature'
+FIELDS+=' precip_liq_surf_mass_flux wind_speed ShortwaveCloudForcing'
+FIELDS+=' T_mid_at_model_bot T_mid_at_900hPa'
+FIELDS+=' horiz_winds_at_100m_above_surface horiz_winds_at_100m_above_sealevel'
+
+# Generate the file
+$YAML_EDIT_SCRIPT -g \
+ -f $YAML_FILE \
+ --avg-type INSTANT \
+ --freq HIST_N \
+ --freq-units HIST_OPTION \
+ --prefix ${CASE}.scream.diags.hi \
+ --grid "Physics ${PGTYPE}" \
+ --fields ${FIELDS}
+
+# Add this output yaml file to the list of eamxx output streams
+$ATMCHANGE output_yaml_files+=$YAML_FILE -b
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/hremap_to_ne4/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/hremap_to_ne4/shell_commands
new file mode 100644
index 000000000000..99289d5e411b
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/hremap_to_ne4/shell_commands
@@ -0,0 +1,15 @@
+# Look for all the eamxx_***_output.yaml files in the case folder and
+# sets horiz remap if atm grid is ne30pg2
+
+CASEROOT=$(./xmlquery --value CASEROOT)
+YAML_EDIT_SCRIPT=$CIMEROOT/../components/eamxx/scripts/edit-output-stream
+ATM_GRID=$(./xmlquery --value ATM_GRID)
+
+if [[ $ATM_GRID = "ne30np4.pg2" ]];then
+ YAML_FILES=$(ls -1 | grep 'eamxx_.*_output.yaml')
+ for fname in ${YAML_FILES}; do
+ $YAML_EDIT_SCRIPT -f $fname --horiz-remap-file \${DIN_LOC_ROOT}/atm/scream/maps/map_ne30pg2_to_ne4pg2_20231201.nc
+ done
+else
+ echo "Note: testmod 'hremap_to_ne4' only works for ne30pg2 atm grid"
+fi
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/phys/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/phys/shell_commands
new file mode 100644
index 000000000000..116cdf111b43
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/phys/shell_commands
@@ -0,0 +1,34 @@
+# This script generates a (single) yaml file for EAMxx output.
+# The output will be INSTANT, with only a few state vars
+
+CASEROOT=$(./xmlquery --value CASEROOT)
+CASE=$(./xmlquery --value CASE)
+
+# Scripts location
+YAML_EDIT_SCRIPT=$CIMEROOT/../components/eamxx/scripts/edit-output-stream
+ATMCHANGE=$CIMEROOT/../components/eamxx/scripts/atmchange
+YAML_FILE=$CASEROOT/eamxx_phys_output.yaml
+
+# Figure out the suffix for the physics grid
+ATM_GRID=$(./xmlquery --value ATM_GRID)
+if [[ $ATM_GRID == *"pg2"* ]]; then
+ PGTYPE="PG2"
+else
+ PGTYPE="GLL"
+fi
+
+# List of output fields
+FIELDS='horiz_winds T_mid tracers pseudo_density p_mid p_int'
+
+# Generate the file
+$YAML_EDIT_SCRIPT -g \
+ -f $YAML_FILE \
+ --avg-type INSTANT \
+ --freq HIST_N \
+ --freq-units HIST_OPTION \
+ --prefix ${CASE}.scream.phys.hi \
+ --grid "Physics ${PGTYPE}" \
+ --fields ${FIELDS}
+
+# Add this output yaml file to the list of eamxx output streams
+$ATMCHANGE output_yaml_files+=$YAML_FILE -b
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/phys_dyn/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/phys_dyn/shell_commands
new file mode 100644
index 000000000000..0952b4afe3f9
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/phys_dyn/shell_commands
@@ -0,0 +1,41 @@
+# This script generates a (single) yaml file for EAMxx output.
+# The output will be INSTANT, with only dyn state vars
+
+CASEROOT=$(./xmlquery --value CASEROOT)
+CASE=$(./xmlquery --value CASE)
+
+# Scripts location
+YAML_EDIT_SCRIPT=$CIMEROOT/../components/eamxx/scripts/edit-output-stream
+ATMCHANGE=$CIMEROOT/../components/eamxx/scripts/atmchange
+YAML_FILE=$CASEROOT/eamxx_dyn_output.yaml
+
+# Figure out the suffix for the physics grid
+ATM_GRID=$(./xmlquery --value ATM_GRID)
+if [[ $ATM_GRID == *"pg2"* ]]; then
+ PGTYPE="PG2"
+else
+ PGTYPE="GLL"
+fi
+
+# List of output fields
+FIELDS='v_dyn vtheta_dp_dyn dp3d_dyn w_int_dyn phis_dyn phi_int_dyn ps_dyn omega_dyn Qdp_dyn'
+
+# Generate the file
+$YAML_EDIT_SCRIPT -g \
+ -f $YAML_FILE \
+ --avg-type INSTANT \
+ --freq HIST_N \
+ --freq-units HIST_OPTION \
+ --prefix ${CASE}.scream.phys_dyn.hi \
+ --grid Dynamics \
+ --io-grid 'Physics GLL' \
+ --fields ${FIELDS}
+
+# Add also a couple of fields on the phys grid, to trigger 2-grid in same stream
+$YAML_EDIT_SCRIPT \
+ -f $YAML_FILE \
+ --grid "Physics ${PGTYPE}" \
+ --fields T_mid horiz_winds
+
+# Add this output yaml file to the list of eamxx output streams
+$ATMCHANGE output_yaml_files+=$YAML_FILE -b
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/1/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/1/shell_commands
new file mode 100644
index 000000000000..74bd2a4d0213
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/1/shell_commands
@@ -0,0 +1,9 @@
+# This preset uses the three output streams (phys_dyn, phys, and diags)
+# It does not add remap, and uses INSTANT output
+
+SCRIPTS_DIR=$CIMEROOT/../components/eamxx/cime_config/testdefs/testmods_dirs/scream/output
+
+# Add the three streams
+. $SCRIPTS_DIR/phys/shell_commands
+. $SCRIPTS_DIR/phys_dyn/shell_commands
+. $SCRIPTS_DIR/diags/shell_commands
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/2/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/2/shell_commands
new file mode 100644
index 000000000000..aea7feb5bbd6
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/2/shell_commands
@@ -0,0 +1,12 @@
+# This preset uses the three output streams (phys_dyn, phys, and diags)
+# It does not add remap, and uses AVERAGE output
+
+SCRIPTS_DIR=$CIMEROOT/../components/eamxx/cime_config/testdefs/testmods_dirs/scream/output
+
+# Add the three streams
+. $SCRIPTS_DIR/phys/shell_commands
+. $SCRIPTS_DIR/phys_dyn/shell_commands
+. $SCRIPTS_DIR/diags/shell_commands
+
+# Change avg-type to AVERAGE for all streams
+. $SCRIPTS_DIR/set_avg/shell_commands
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/3/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/3/shell_commands
new file mode 100644
index 000000000000..da1d02dc5de1
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/3/shell_commands
@@ -0,0 +1,11 @@
+# This preset uses the three output streams (phys, and diags)
+# It adds horiz remap, and uses INSTANT output
+
+SCRIPTS_DIR=$CIMEROOT/../components/eamxx/cime_config/testdefs/testmods_dirs/scream/output
+
+# Add the phys/diags streams (cannot add phys_dyn, b/c we use horiz remap)
+. $SCRIPTS_DIR/phys/shell_commands
+. $SCRIPTS_DIR/diags/shell_commands
+
+# Add horiz remap
+. $SCRIPTS_DIR/hremap_to_ne4/shell_commands
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/4/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/4/shell_commands
new file mode 100644
index 000000000000..521ada7f5082
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/4/shell_commands
@@ -0,0 +1,14 @@
+# This preset uses the three output streams (phys, and diags)
+# It adds horizontal remap, and uses AVERAGE output
+
+SCRIPTS_DIR=$CIMEROOT/../components/eamxx/cime_config/testdefs/testmods_dirs/scream/output
+
+# Add the phys/diags streams (cannot add phys_dyn, b/c we use horiz remap)
+. $SCRIPTS_DIR/phys/shell_commands
+. $SCRIPTS_DIR/diags/shell_commands
+
+# Add horiz remap
+. $SCRIPTS_DIR/hremap_to_ne4/shell_commands
+
+# Use AVERAGE
+. $SCRIPTS_DIR/set_avg/shell_commands
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/5/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/5/shell_commands
new file mode 100644
index 000000000000..bd60a0472a87
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/5/shell_commands
@@ -0,0 +1,15 @@
+# This preset uses the three output streams (phys_dyn, phys, and diags)
+# It adds vertical remap, and uses AVERAGE output
+
+SCRIPTS_DIR=$CIMEROOT/../components/eamxx/cime_config/testdefs/testmods_dirs/scream/output
+
+# Add the phys/dyn/diags streams
+. $SCRIPTS_DIR/phys/shell_commands
+. $SCRIPTS_DIR/phys_dyn/shell_commands
+. $SCRIPTS_DIR/diags/shell_commands
+
+# Add vertical remap
+. $SCRIPTS_DIR/vremap/shell_commands
+
+# Use AVERAGE
+. $SCRIPTS_DIR/set_avg/shell_commands
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/6/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/6/shell_commands
new file mode 100644
index 000000000000..937d16468516
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/preset/6/shell_commands
@@ -0,0 +1,17 @@
+# This preset uses the three output streams (phys, and diags)
+# It adds horizontal and remap, and uses AVERAGE output
+
+SCRIPTS_DIR=$CIMEROOT/../components/eamxx/cime_config/testdefs/testmods_dirs/scream/output
+
+# Add the phys/diags streams (cannot add phys_dyn, b/c we use horiz remap)
+. $SCRIPTS_DIR/phys/shell_commands
+. $SCRIPTS_DIR/diags/shell_commands
+
+# Add horiz remap
+. $SCRIPTS_DIR/hremap_to_ne4/shell_commands
+
+# Add vert remap
+. $SCRIPTS_DIR/vremap/shell_commands
+
+# Use AVERAGE
+. $SCRIPTS_DIR/set_avg/shell_commands
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/set_avg/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/set_avg/shell_commands
new file mode 100644
index 000000000000..dc58bc6301a7
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/set_avg/shell_commands
@@ -0,0 +1,10 @@
+# Look for all the eamxx_***_output.yaml files in the case folder and
+# change the avg type to average.
+
+CASEROOT=$(./xmlquery --value CASEROOT)
+YAML_EDIT_SCRIPT=$CIMEROOT/../components/eamxx/scripts/edit-output-stream
+
+YAML_FILES=$(ls -1 | grep 'eamxx_.*_output.yaml')
+for fname in ${YAML_FILES}; do
+ $YAML_EDIT_SCRIPT -f $fname --avg-type average
+done
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/vremap/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/vremap/shell_commands
new file mode 100644
index 000000000000..151928669d1a
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/output/vremap/shell_commands
@@ -0,0 +1,10 @@
+# Look for all the eamxx_***_output.yaml files in the case folder and
+# sets vertical remap
+
+CASEROOT=$(./xmlquery --value CASEROOT)
+YAML_EDIT_SCRIPT=$CIMEROOT/../components/eamxx/scripts/edit-output-stream
+
+YAML_FILES=$(ls -1 | grep 'eamxx_.*_output.yaml')
+for fname in ${YAML_FILES}; do
+ $YAML_EDIT_SCRIPT -f $fname --vertical-remap-file \${DIN_LOC_ROOT}/atm/scream/maps/vrt_remapping_p_levs_20230926.nc
+done
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/perf_test/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/perf_test/shell_commands
new file mode 100644
index 000000000000..d4e7c2a95377
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/perf_test/shell_commands
@@ -0,0 +1,5 @@
+
+# Force us to use 1 node to eliminate network noise
+if [ `./xmlquery --value MACH` == frontier-scream-gpu ]; then
+ ./xmlchange NTASKS=8
+fi
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/small_kernels/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/small_kernels/shell_commands
index 04989a22796a..e6773dce4199 100644
--- a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/small_kernels/shell_commands
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/small_kernels/shell_commands
@@ -1,7 +1,2 @@
./xmlchange --append SCREAM_CMAKE_OPTIONS='SCREAM_SMALL_KERNELS On'
$CIMEROOT/../components/eamxx/scripts/atmchange --all internal_diagnostics_level=1 atmosphere_processes::internal_diagnostics_level=0 -b
-
-f=$(./xmlquery --value MACH)
-if [ $f == chrysalis ]; then
- ./xmlchange BATCH_COMMAND_FLAGS="--time 00:30:00 -p debug --account e3sm --exclude=chr-0512"
-fi
diff --git a/components/eamxx/cime_config/testdefs/testmods_dirs/scream/spa_remap/shell_commands b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/spa_remap/shell_commands
new file mode 100644
index 000000000000..5a07fb378464
--- /dev/null
+++ b/components/eamxx/cime_config/testdefs/testmods_dirs/scream/spa_remap/shell_commands
@@ -0,0 +1,2 @@
+$CIMEROOT/../components/eamxx/scripts/atmchange -b spa_data_file='${DIN_LOC_ROOT}'/atm/scream/init/spa_file_unified_and_complete_ne4pg2_20231222.nc
+$CIMEROOT/../components/eamxx/scripts/atmchange -b spa_remap_file='${DIN_LOC_ROOT}'/atm/scream/maps/map_ne4pg2_to_ne30pg2_20231201.nc
diff --git a/components/eamxx/cime_config/tests/eamxx_default_files.py b/components/eamxx/cime_config/tests/eamxx_default_files.py
new file mode 100644
index 000000000000..b39d2d5c1553
--- /dev/null
+++ b/components/eamxx/cime_config/tests/eamxx_default_files.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+
+import os
+import http
+import pathlib
+import unittest
+import urllib.request
+import xml.etree.ElementTree as ET
+
+
+class testNamelistDefaultsScream(unittest.TestCase):
+ def setUp(self):
+ """
+ Set up the environment for the test by setting the DIN_LOC_ROOT
+ environment variable. Parse the 'namelist_defaults_scream.xml'
+ file and extract the files of interest based on the DIN_LOC_ROOT
+ variable or the array(file) type. Assign the extracted files
+ to the 'my_files' attribute of the test instance.
+ """
+
+ os.environ["DIN_LOC_ROOT"] = "https://web.lcrc.anl.gov/public/e3sm/inputdata/"
+
+ scream_defaults_path = pathlib.Path(__file__)
+ tree = ET.parse(f"{scream_defaults_path.parent.parent}/namelist_defaults_scream.xml")
+ root = tree.getroot()
+
+ files_of_interest = [
+ child.text for child in root.findall(".//")
+ if child.text and child.text.startswith("${DIN_LOC_ROOT}")
+ ]
+
+ more_files_of_interest = [
+ child.text for child in root.findall(".//")
+ if child.text and "type" in child.attrib.keys() and child.attrib["type"]=="array(file)"
+ ]
+
+ files_of_interest.extend(
+ text.strip() for text_list in more_files_of_interest for text in text_list.split(",")
+ if text.strip().startswith("${DIN_LOC_ROOT}")
+ )
+
+ self.my_files = [
+ file.replace("${DIN_LOC_ROOT}/", "")
+ for file in files_of_interest
+ ]
+
+ self.my_lines = []
+ with open(
+ f"{scream_defaults_path.parent.parent}/namelist_defaults_scream.xml",
+ "r"
+ ) as the_file:
+ for a_line in the_file:
+ self.my_lines.append(a_line)
+
+ def test_ascii_lines(self):
+ """
+ Test that all lines are ASCII
+ """
+
+ for i_line, a_line in enumerate(self.my_lines):
+ with self.subTest(i_line=i_line):
+ self.assertTrue(
+ a_line.isascii(),
+ msg=f"\nERROR! This line is not ASCII!\n{a_line}"
+ )
+
+ def test_opening_files(self):
+ """
+ Test the opening of files from the inputdata server.
+ """
+
+ for i_file in range(len(self.my_files)):
+ with self.subTest(i_file=i_file):
+ try:
+ request_return = urllib.request.urlopen(
+ f"{os.environ['DIN_LOC_ROOT']}{self.my_files[i_file]}"
+ )
+ self.assertIsInstance(request_return, http.client.HTTPResponse)
+ except urllib.error.HTTPError:
+ file_name = f"{os.environ['DIN_LOC_ROOT']}{self.my_files[i_file]}"
+ self.assertTrue(
+ False,
+ msg=f"\nERROR! This file doesn't exist!\n{file_name}"
+ )
+
+ def test_expected_fail(self):
+ """
+ Test an expected failure by manipulating the file name.
+ """
+
+ with self.assertRaises(urllib.error.HTTPError):
+ some_phony_file = f"{self.my_files[5][:-5]}some_phony_file.nc"
+ urllib.request.urlopen(
+ f"{os.environ['DIN_LOC_ROOT']}{some_phony_file}"
+ )
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/components/eamxx/cmake/machine-files/lassen.cmake b/components/eamxx/cmake/machine-files/lassen.cmake
deleted file mode 100644
index 36b69c7f0253..000000000000
--- a/components/eamxx/cmake/machine-files/lassen.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-include(${CMAKE_CURRENT_LIST_DIR}/common.cmake)
-common_setup()
-
-set(NetCDF_PATH /usr/gdata/climdat/netcdf CACHE STRING "")
-set(NetCDF_Fortran_PATH /usr/gdata/climdat/netcdf CACHE STRING "")
-set(LAPACK_LIBRARIES /usr/lib64/liblapack.so CACHE STRING "")
-set(CMAKE_CXX_FLAGS "-DTHRUST_IGNORE_CUB_VERSION_CHECK" CACHE STRING "" FORCE)
-
-set(SCREAM_INPUT_ROOT "/usr/gdata/climdat/ccsm3data/inputdata/" CACHE STRING "")
diff --git a/components/eamxx/cmake/machine-files/mappy.cmake b/components/eamxx/cmake/machine-files/mappy.cmake
index 7c1fc8cf25ea..29fb2e74b8a1 100644
--- a/components/eamxx/cmake/machine-files/mappy.cmake
+++ b/components/eamxx/cmake/machine-files/mappy.cmake
@@ -1,3 +1,5 @@
include(${CMAKE_CURRENT_LIST_DIR}/common.cmake)
common_setup()
-set(PYTHON_EXECUTABLE "/ascldap/users/jgfouca/packages/Python-3.8.5/bin/python3.8" CACHE STRING "" FORCE)
\ No newline at end of file
+set(PYTHON_EXECUTABLE "/ascldap/users/jgfouca/packages/Python-3.8.5/bin/python3.8" CACHE STRING "" FORCE)
+
+set(CMAKE_Fortran_FLAGS "-fallow-argument-mismatch" CACHE STRING "" FORCE)
diff --git a/components/eamxx/cmake/machine-files/quartz-intel.cmake b/components/eamxx/cmake/machine-files/quartz-intel.cmake
index 753c782702db..defd8cbb2d33 100644
--- a/components/eamxx/cmake/machine-files/quartz-intel.cmake
+++ b/components/eamxx/cmake/machine-files/quartz-intel.cmake
@@ -4,4 +4,4 @@ set(PYTHON_EXECUTABLE "/usr/tce/packages/python/python-3.9.12/bin/python3" CACHE
set(PYTHON_LIBRARIES "/usr/lib64/libpython3.9.so.1.0" CACHE STRING "" FORCE)
option (SCREAM_ENABLE_ML_CORRECTION "Whether to enable ML correction parametrization" ON)
set(HDF5_DISABLE_VERSION_CHECK 1 CACHE STRING "" FORCE)
-execute_process(COMMAND source /usr/WS1/climdat/python_venv/3.9.2/screamML/bin/activate)
+execute_process(COMMAND source /usr/WS1/e3sm/python_venv/3.9.2/screamML/bin/activate)
diff --git a/components/eamxx/cmake/machine-files/quartz.cmake b/components/eamxx/cmake/machine-files/quartz.cmake
index ee9a3dcbffd3..e4b4fcbd8a57 100644
--- a/components/eamxx/cmake/machine-files/quartz.cmake
+++ b/components/eamxx/cmake/machine-files/quartz.cmake
@@ -16,4 +16,4 @@ elseif ("${COMPILER}" STREQUAL "gnu")
set(CMAKE_EXE_LINKER_FLAGS "-L/usr/tce/packages/gcc/gcc-8.3.1/rh/lib/gcc/x86_64-redhat-linux/8/" CACHE STRING "" FORCE)
endif()
-set(SCREAM_INPUT_ROOT "/usr/gdata/climdat/ccsm3data/inputdata" CACHE STRING "")
+set(SCREAM_INPUT_ROOT "/usr/gdata/e3sm/ccsm3data/inputdata" CACHE STRING "")
diff --git a/components/eamxx/cmake/machine-files/ruby-intel.cmake b/components/eamxx/cmake/machine-files/ruby-intel.cmake
index 63fff478fdaf..9c6318da4952 100644
--- a/components/eamxx/cmake/machine-files/ruby-intel.cmake
+++ b/components/eamxx/cmake/machine-files/ruby-intel.cmake
@@ -4,4 +4,4 @@ set(PYTHON_EXECUTABLE "/usr/tce/packages/python/python-3.9.12/bin/python3" CACHE
set(PYTHON_LIBRARIES "/usr/lib64/libpython3.9.so.1.0" CACHE STRING "" FORCE)
option (SCREAM_ENABLE_ML_CORRECTION "Whether to enable ML correction parametrization" ON)
set(HDF5_DISABLE_VERSION_CHECK 1 CACHE STRING "" FORCE)
-execute_process(COMMAND source /usr/WS1/climdat/python_venv/3.9.2/screamML/bin/activate)
+execute_process(COMMAND source /usr/WS1/e3sm/python_venv/3.9.2/screamML/bin/activate)
diff --git a/components/eamxx/cmake/machine-files/ruby.cmake b/components/eamxx/cmake/machine-files/ruby.cmake
index d0a9de4baf4b..77d6e6618c71 100644
--- a/components/eamxx/cmake/machine-files/ruby.cmake
+++ b/components/eamxx/cmake/machine-files/ruby.cmake
@@ -12,4 +12,4 @@ include (${EKAT_MACH_FILES_PATH}/kokkos/openmp.cmake)
include (${EKAT_MACH_FILES_PATH}/mpi/srun.cmake)
-set(SCREAM_INPUT_ROOT "/usr/gdata/climdat/ccsm3data/inputdata" CACHE STRING "")
+set(SCREAM_INPUT_ROOT "/usr/gdata/e3sm/ccsm3data/inputdata" CACHE STRING "")
diff --git a/components/eamxx/cmake/machine-files/syrah.cmake b/components/eamxx/cmake/machine-files/syrah.cmake
deleted file mode 100644
index f03fc1e9d469..000000000000
--- a/components/eamxx/cmake/machine-files/syrah.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-include(${CMAKE_CURRENT_LIST_DIR}/common.cmake)
-common_setup()
-
-include (${EKAT_MACH_FILES_PATH}/kokkos/openmp.cmake)
-include (${EKAT_MACH_FILES_PATH}/mpi/srun.cmake)
-
-# Enable Sandy Bridge arch in Kokkos
-option(Kokkos_ARCH_SNB "" ON)
-
-set(CMAKE_CXX_FLAGS "-w -cxxlib=/usr/tce/packages/gcc/gcc-8.3.1/rh" CACHE STRING "" FORCE)
-set(CMAKE_EXE_LINKER_FLAGS "-L/usr/tce/packages/gcc/gcc-8.3.1/rh/lib/gcc/x86_64-redhat-linux/8/ -mkl" CACHE STRING "" FORCE)
-
-set(SCREAM_INPUT_ROOT "/usr/gdata/climdat/ccsm3data/inputdata/" CACHE STRING "")
diff --git a/components/eamxx/data/SCREAM_YAML_README b/components/eamxx/data/SCREAM_YAML_README
deleted file mode 100644
index daf425845e31..000000000000
--- a/components/eamxx/data/SCREAM_YAML_README
+++ /dev/null
@@ -1,52 +0,0 @@
-
-INTRO:
-
-The scream_input.yaml is the key file for configuring a SCREAM run. This file will be
-processed and copied to $case/run/scream_input.yaml by scream's buidnml script, which
-is called during case.setup. Note, this is for runtime coniguration
-only. Cmake/build-time configuration should be done through SCREAM_CMAKE_OPTIONS.
-
-For inline comments, see the version of scream_input.yaml that lives in the repo
-(components/eamxx/data/scream_input.yaml)
-
-Note, the $case/run/scream_input.yaml will NEVER be overwritten by subsequent
-calls to case.setup/buildnml in order to avoid blowing away potential local
-modifications. To force a regeneration of this file, it should be removed from the
-case and `./case.setup --reset` should be called.
-
-SECTIONS:
-
- Atmosphere Driver: Contains settings for the AD. Can turn off processes by editing "Number of Entries" and
- changing the Process N list.
-
- SCREAM: For general SCREAM settings
-
- HOMME: For HOMME settings. These settings will be translated into data/namelist.nl
-
-SYNTAX:
-
-This file supports some special syntax in addition to basic YAML:
-'${VAR}' will be used to refer to env variables in the CIME case
-
-' val1 : key2 => val2 : elseval>' will be used to express conditional
-statements. If switch_val matches key1, then the expression evaluates to val1; if switch_val
-matches key2, then the expression evaluates to val2; if it matches neither, then
-the expression evaluates to elseval. The elseval component of this expression is optional.
-You can have any number (N>=1) of key => val sections.
-
-Example, if you wanted tstep to depend on atm grid resolution:
-
- tstep: "<${ATM_GRID} : ne4np4 => 300 : 30>"
-
-This would give all ne4 cases a timestep of 300, otherwise it would be 30.
-
-You could specify multiple grid->timestep relationships this way:
-
- tstep: "<${ATM_GRID} : ne4np4 => 300 : ne30np4 => 100 : 30>"
-
-Regex matching is supported:
-
- tstep: "<${ATM_GRID} : .*ne4.* => 300 : .*ne30.* => 100 : 30>"
-
-Note: none of this special syntax will be automatically reprocessed if the case XML values
-are changed. Regenerating this file is necessary if relevant case XML values are modified.
diff --git a/components/eamxx/data/scream_default_output.yaml b/components/eamxx/data/scream_default_output.yaml
deleted file mode 100644
index 7e0a45f12d6a..000000000000
--- a/components/eamxx/data/scream_default_output.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-%YAML 1.1
----
-filename_prefix: ${CASE}.scream.hi
-# WARNING: ERS/ERP tets will override this with AVERAGE
-Averaging Type: Instant
-# One output every 31 days if output frequency is set to once per hour
-Max Snapshots Per File: 744
-Fields:
- Physics ${PHYSICS_GRID_TYPE}:
- Field Names:
- # HOMME
- - ps
- - pseudo_density
- - omega
- - p_int
- - p_mid
- # SHOC + HOMME
- - horiz_winds
- # SHOC
- - cldfrac_liq
- - eddy_diff_mom
- - sgs_buoy_flux
- - tke
- - pbl_height
- # CLD
- - cldfrac_ice_for_analysis
- - cldfrac_tot_for_analysis
- # P3
- - bm
- - nc
- - ni
- - nr
- - qi
- - qm
- - qr
- - eff_radius_qc
- - eff_radius_qi
- - eff_radius_qr
- - precip_ice_surf_mass
- - precip_liq_surf_mass
- - rainfrac
- # SHOC + P3
- - qc
- - qv
- # SHOC + P3 + RRTMGP + HOMME
- - T_mid
- # RRTMGP
- - sfc_alb_dir_vis
- - LW_flux_dn
- - LW_flux_up
- - SW_flux_dn
- - SW_flux_up
- - sfc_flux_lw_dn
- - sfc_flux_sw_net
- - cldtot
- - cldlow
- - cldmed
- - cldhgh
- # Surface Fluxes
- - surf_evap
- - surf_sens_flux
- # Diagnostics
- - PotentialTemperature
- # GLL output for homme states.
- Dynamics:
- Field Names:
- - ps_dyn
- - dp3d_dyn
- - omega_dyn
- IO Grid Name: Physics GLL
-output_control:
-# WARNING: ERS/ERP tets will override this with STOP_N/STOP_OPTION
- Frequency: ${HIST_N}
- frequency_units: ${HIST_OPTION}
-...
diff --git a/components/eamxx/data/scream_default_remap.yaml b/components/eamxx/data/scream_default_remap.yaml
deleted file mode 100644
index 8bf47386c76d..000000000000
--- a/components/eamxx/data/scream_default_remap.yaml
+++ /dev/null
@@ -1,67 +0,0 @@
-%YAML 1.1
----
-filename_prefix: ${CASE}.scream.arm_sites.hi
-Averaging Type: Instant
-Max Snapshots Per File: 744 # One output every 31 days
-#remap_file: /g/g17/donahue5/Code/e3sm/scream-docs/regional_output_sites/20221123_ARM_sites_map.nc
-remap_file: /usr/gdata/climdat/ccsm3data/inputdata/atm/scream/maps/map_ne30np4_to_ne4pg2_mono.20220714.nc
-Fields:
- Physics ${PHYSICS_GRID_TYPE}:
- Field Names:
- # HOMME
- - ps
- - pseudo_density
- - omega
- - p_int
- - p_mid
- # SHOC + HOMME
- - horiz_winds
- # SHOC
- - cldfrac_liq
- - eddy_diff_mom
- - sgs_buoy_flux
- - tke
- - pbl_height
- # CLD
- - cldfrac_ice
- - cldfrac_tot
- # P3
- - bm
- - nc
- - ni
- - nr
- - qi
- - qm
- - qr
- - eff_radius_qc
- - eff_radius_qi
- - eff_radius_qr
- - precip_ice_surf_mass
- - precip_liq_surf_mass
- - rainfrac
- # SHOC + P3
- - qc
- - qv
- # SHOC + P3 + RRTMGP + HOMME
- - T_mid
- # RRTMGP
- - sfc_alb_dir_vis
- - LW_flux_dn
- - LW_flux_up
- - SW_flux_dn
- - SW_flux_up
- - sfc_flux_lw_dn
- - sfc_flux_sw_net
- - cldtot
- - cldlow
- - cldmed
- - cldhgh
- # Surface Fluxes
- - surf_evap
- - surf_sens_flux
- # Diagnostics
-# - PotentialTemperature
-output_control:
- Frequency: ${HIST_N}
- frequency_units: ${HIST_OPTION}
-...
diff --git a/components/eamxx/docs/developer/standalone_testing.md b/components/eamxx/docs/developer/standalone_testing.md
index aedb7b2cbaad..63ea01fc612e 100644
--- a/components/eamxx/docs/developer/standalone_testing.md
+++ b/components/eamxx/docs/developer/standalone_testing.md
@@ -33,9 +33,9 @@ make baseline
```
The tests will run, automatically using the baseline file, which is located in
-the CMake-configurable path `${SCREAM_TEST_DATA_DIR}`. By default, this path is
-set to `data/` within your build directory (which is `$RUN_ROOT_DIR`, in
-our case).
+the CMake-configurable path `${SCREAM_BASELINES_DIR}`. By default, this path is
+set to an invalid string. If baselines tests are enabled, we check that a valid
+path has been provided.
To run all of SCREAM's tests, make sure you're in `$RUN_ROOT_DIR` and type
diff --git a/components/eamxx/docs/user/coarse_nudging.md b/components/eamxx/docs/user/coarse_nudging.md
index c52ce9a0eb24..3d7309c42aaa 100644
--- a/components/eamxx/docs/user/coarse_nudging.md
+++ b/components/eamxx/docs/user/coarse_nudging.md
@@ -20,6 +20,6 @@ In other words, the following options are needed:
```shell
./atmchange atm_procs_list=(sc_import,nudging,homme,physics,sc_export)
./atmchange nudging_fields=U,V
-./atmchange nudging_filename=/path/to/nudging_data_ne4pg2_L72.nc
+./atmchange nudging_filenames_patterns=/path/to/nudging_data_ne4pg2_L72.nc
./atmchange nudging_refine_remap_mapfile=/another/path/to/mapping_file_ne4pg2_to_ne120pg2.nc
```
diff --git a/components/eamxx/scripts/atm_manip.py b/components/eamxx/scripts/atm_manip.py
index 2a763bc1990d..fbd8381e1727 100755
--- a/components/eamxx/scripts/atm_manip.py
+++ b/components/eamxx/scripts/atm_manip.py
@@ -170,7 +170,7 @@ def modify_ap_list(xml_root, group, ap_list_str, append_this):
'p1,p2,p1'
>>> modify_ap_list(tree,node,"p1,p3",False)
Traceback (most recent call last):
- ValueError: ERROR: Unrecognized atm proc name 'p3'. To declare a new group, prepend and append '_' to the name.
+ SystemExit: ERROR: Unrecognized atm proc name 'p3'. To declare a new group, prepend and append '_' to the name.
>>> modify_ap_list(tree,node,"p1,_my_group_",False)
True
>>> get_child(node,"atm_procs_list").text
@@ -203,11 +203,11 @@ def modify_ap_list(xml_root, group, ap_list_str, append_this):
new_aps = [n for n in add_aps if find_node(ap_defaults,n) is None]
for ap in new_aps:
- expect (ap[0]=="_" and ap[-1]=="_" and len(ap)>2, exc_type=ValueError,
- error_msg=f"Unrecognized atm proc name '{ap}'. To declare a new group, prepend and append '_' to the name.")
+ expect (ap[0]=="_" and ap[-1]=="_" and len(ap)>2,
+ f"Unrecognized atm proc name '{ap}'. To declare a new group, prepend and append '_' to the name.")
group = gen_atm_proc_group("", ap_defaults)
group.tag = ap
-
+
ap_defaults.append(group)
# Update the 'atm_procs_list' in this node
@@ -217,6 +217,25 @@ def modify_ap_list(xml_root, group, ap_list_str, append_this):
curr_apl.text = ','.join(ap_list)
return True
+###############################################################################
+def is_locked_impl(node):
+###############################################################################
+ return "locked" in node.attrib.keys() and str(node.attrib["locked"]).upper() == "TRUE"
+
+###############################################################################
+def is_locked(xml_root, node):
+###############################################################################
+ if is_locked_impl(node):
+ return True
+ else:
+ parent_map = create_parent_map(xml_root)
+ parents = get_parents(node, parent_map)
+ for parent in parents:
+ if is_locked_impl(parent):
+ return True
+
+ return False
+
###############################################################################
def apply_change(xml_root, node, new_value, append_this):
###############################################################################
@@ -231,6 +250,7 @@ def apply_change(xml_root, node, new_value, append_this):
if append_this:
+ expect (not is_locked(xml_root, node), f"Cannot change {node.tag}, it is locked")
expect ("type" in node.attrib.keys(),
f"Error! Missing type information for {node.tag}")
type_ = node.attrib["type"]
@@ -238,7 +258,11 @@ def apply_change(xml_root, node, new_value, append_this):
"Error! Can only append with array and string types.\n"
f" - name: {node.tag}\n"
f" - type: {type_}")
- if is_array_type(type_):
+
+ if node.text is None:
+ node.text = ""
+
+ if is_array_type(type_) and node.text!="":
node.text += ", " + new_value
else:
node.text += new_value
@@ -246,6 +270,7 @@ def apply_change(xml_root, node, new_value, append_this):
any_change = True
elif node.text != new_value:
+ expect (not is_locked(xml_root, node), f"Cannot change {node.tag}, it is locked")
check_value(node,new_value)
node.text = new_value
any_change = True
@@ -283,16 +308,37 @@ def atm_config_chg_impl(xml_root, change, all_matches=False):
"""
>>> xml = '''
...
- ... 1,2,3
- ... 1
- ... 1
- ... one
- ... one
- ... one
- ...
- ... two
- ... 2
- ...
+ ... 1,2,3
+ ... 1
+ ... 1
+ ... one
+ ... one
+ ... one
+ ...
+ ... two
+ ... 2
+ ...
+ ...
+ ...
+ ...
+ ... hi
+ ...
+ ...
+ ...
+ ...
+ ...
+ ...
+ ... hi
+ ...
+ ...
+ ...
+ ...
+ ...
+ ...
+ ... hi
+ ...
+ ...
+ ...
...
... '''
>>> import xml.etree.ElementTree as ET
@@ -306,7 +352,8 @@ def atm_config_chg_impl(xml_root, change, all_matches=False):
>>> ################ INVALID TYPE #######################
>>> atm_config_chg_impl(tree,'prop2=two')
Traceback (most recent call last):
- ValueError: Could not refine 'two' as type 'integer'
+ CIME.utils.CIMEError: ERROR: Could not refine 'two' as type 'integer':
+ could not convert string to float: 'two'
>>> ################ INVALID VALUE #######################
>>> atm_config_chg_impl(tree,'prop2=3')
Traceback (most recent call last):
@@ -350,6 +397,16 @@ def atm_config_chg_impl(xml_root, change, all_matches=False):
True
>>> get_xml_nodes(tree,'e')[0].text
'one, two'
+ >>> ################ Test locked ##################
+ >>> atm_config_chg_impl(tree, 'lprop2=yo')
+ Traceback (most recent call last):
+ SystemExit: ERROR: Cannot change lprop2, it is locked
+ >>> atm_config_chg_impl(tree, 'lprop3=yo')
+ Traceback (most recent call last):
+ SystemExit: ERROR: Cannot change lprop3, it is locked
+ >>> atm_config_chg_impl(tree, 'lprop4=yo')
+ Traceback (most recent call last):
+ SystemExit: ERROR: Cannot change lprop4, it is locked
"""
node_name, new_value, append_this = parse_change(change)
matches = get_xml_nodes(xml_root, node_name)
diff --git a/components/eamxx/scripts/change-param-pattern b/components/eamxx/scripts/change-param-pattern
index 5ab26a64df3b..a54c7292d246 100755
--- a/components/eamxx/scripts/change-param-pattern
+++ b/components/eamxx/scripts/change-param-pattern
@@ -35,4 +35,4 @@ for bad_name_under in bad_name_unders:
run_cmd_no_fail(f"sed -i -e 's/{bad_name_ws}/{good_name}/g' $(git grep -l '{bad_name_ws}')")
run_cmd_no_fail(f"git commit -a -m '{bad_name_ws} -> {good_name}'")
print(" Testing")
- run_cmd_no_fail("./create_test ERS_D_Ln22.ne4pg2_ne4pg2.F2010-SCREAMv1 --compiler=gnu9", from_dir="../../cime/scripts")
+ run_cmd_no_fail("./create_test ERS_D_Ln22.ne4pg2_ne4pg2.F2010-SCREAMv1", from_dir="../../cime/scripts")
diff --git a/components/eamxx/scripts/check-hashes-ers b/components/eamxx/scripts/check-hashes-ers
new file mode 100755
index 000000000000..6d9da4b2f98e
--- /dev/null
+++ b/components/eamxx/scripts/check-hashes-ers
@@ -0,0 +1,171 @@
+#!/usr/bin/env python3
+
+"""
+See https://acme-climate.atlassian.net/wiki/spaces/NGDNA/pages/3831923056/EAMxx+BFB+hashing
+for full explanation.
+
+This script is used by the scream-internal_diagnostics_level testmod to check
+hash output after a test has run.
+"""
+
+import sys, re, glob, pathlib, argparse
+
+from utils import run_cmd_no_fail, expect
+
+###############################################################################
+def parse_command_line(args, description):
+###############################################################################
+ parser = argparse.ArgumentParser(
+ usage="""\n{0} [=] ...
+OR
+{0} --help
+
+\033[1mEXAMPLES:\033[0m
+ \033[1;32m# Run hash checker on /my/case/dir \033[0m
+ > {0} /my/case/dir
+""".format(pathlib.Path(args[0]).name),
+ description=description,
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ )
+
+ parser.add_argument(
+ "case_dir",
+ help="The test case you want to check"
+ )
+
+ return parser.parse_args(args[1:])
+
+###############################################################################
+def readall(fn):
+###############################################################################
+ with open(fn,'r') as f:
+ txt = f.read()
+ return txt
+
+###############################################################################
+def greptxt(pattern, txt):
+###############################################################################
+ return re.findall('(?:' + pattern + ').*', txt, flags=re.MULTILINE)
+
+###############################################################################
+def grep(pattern, fn):
+###############################################################################
+ txt = readall(fn)
+ return greptxt(pattern, txt)
+
+###############################################################################
+def get_log_glob_from_atm_modelio(case_dir):
+###############################################################################
+ filename = case_dir / 'CaseDocs' / 'atm_modelio.nml'
+ ln = grep('diro = ', filename)[0]
+ run_dir = pathlib.Path(ln.split()[2].split('"')[1])
+ ln = grep('logfile = ', filename)[0]
+ atm_log_fn = ln.split()[2].split('"')[1]
+ id_ = atm_log_fn.split('.')[2]
+ return str(run_dir / '**' / f'e3sm.log.{id_}*')
+
+###############################################################################
+def get_hash_lines(fn):
+###############################################################################
+ rlns = run_cmd_no_fail(f'zgrep exxhash {fn}').splitlines()
+ lns = []
+ if len(rlns) == 0: return lns
+ for rln in rlns:
+ pos = rln.find('exxhash')
+ lns.append(rln[pos:])
+ return lns
+
+###############################################################################
+def parse_time(hash_ln):
+###############################################################################
+ return hash_ln.split()[1:3]
+
+###############################################################################
+def all_equal(t1, t2):
+###############################################################################
+ if len(t1) != len(t2): return False
+ for i in range(len(t1)):
+ if t1[i] != t2[i]: return False
+ return True
+
+###############################################################################
+def find_first_index_at_time(lns, time):
+###############################################################################
+ for i, ln in enumerate(lns):
+ t = parse_time(ln)
+ if all_equal(time, t): return i
+ return None
+
+###############################################################################
+def diff(l1, l2):
+###############################################################################
+ diffs = []
+ for i in range(len(l1)):
+ if l1[i] != l2[i]:
+ diffs.append((l1[i], l2[i]))
+ return diffs
+
+###############################################################################
+def check_hashes_ers(case_dir):
+###############################################################################
+ case_dir_p = pathlib.Path(case_dir)
+ expect(case_dir_p.is_dir(), f"{case_dir} is not a dir")
+
+ # Look for the two e3sm.log files.
+ glob_pat = get_log_glob_from_atm_modelio(case_dir_p)
+ e3sm_fns = glob.glob(glob_pat, recursive=True)
+ if len(e3sm_fns) == 0:
+ print('Could not find e3sm.log files with glob string {}'.format(glob_pat))
+ return False
+ e3sm_fns.sort()
+ if len(e3sm_fns) == 1:
+ # This is the first run. Exit and wait for the second
+ # run. (POSTRUN_SCRIPT is called after each of the two runs.)
+ print('Exiting on first run.')
+ return True
+ print('Diffing base {} and restart {}'.format(e3sm_fns[0], e3sm_fns[1]))
+
+ # Because of the prefixed 1: and 2: on some systems, we can't just use
+ # zdiff.
+ lns = []
+ for f in e3sm_fns:
+ lns.append(get_hash_lines(f))
+ time = parse_time(lns[1][0])
+ time_idx = find_first_index_at_time(lns[0], time)
+ if time_idx is None:
+ print('Could not find a start time.')
+ return False
+ lns[0] = lns[0][time_idx:]
+ if len(lns[0]) != len(lns[1]):
+ print('Number of hash lines starting at restart time do not agree.')
+ return False
+ diffs = diff(lns[0], lns[1])
+
+ # Flushed prints to e3sm.log can sometimes conflict with other
+ # output. Permit up to 'thr' diffs so we don't fail due to badly printed
+ # lines. This isn't a big loss in checking because an ERS_Ln22 second run
+ # writes > 1000 hash lines, and a true loss of BFBness is nearly certain to
+ # propagate to a large number of subsequent hashes.
+ thr = 5
+ if len(lns[0]) < 100: thr = 0
+
+ ok = True
+ if len(diffs) > thr:
+ print('DIFF')
+ print(diffs[-10:])
+ ok = False
+ else:
+ print('OK')
+
+ return ok
+
+###############################################################################
+def _main_func(description):
+###############################################################################
+ success = check_hashes_ers(**vars(parse_command_line(sys.argv, description)))
+ sys.exit(0 if success else 1)
+
+###############################################################################
+
+if (__name__ == "__main__"):
+ _main_func(__doc__)
diff --git a/components/eamxx/scripts/cime-nml-tests b/components/eamxx/scripts/cime-nml-tests
index 0a60376bd45f..b128121cf95f 100755
--- a/components/eamxx/scripts/cime-nml-tests
+++ b/components/eamxx/scripts/cime-nml-tests
@@ -222,9 +222,10 @@ class TestBuildnml(unittest.TestCase):
# Append to an existing entry
name = 'output_yaml_files'
out = run_cmd_no_fail(f"./atmchange {name}+=a.yaml", from_dir=case)
+ out = run_cmd_no_fail(f"./atmchange {name}+=b.yaml", from_dir=case)
# Get the yaml files
- expected =f'{EAMXX_DIR / "data/scream_default_output.yaml"}, a.yaml'
+ expected =f'a.yaml, b.yaml'
self._get_values(case, name, value=expected, expect_equal=True)
###########################################################################
diff --git a/components/eamxx/scripts/eamxx-params-docs-autogen b/components/eamxx/scripts/eamxx-params-docs-autogen
index 3024677d96b0..9bea5242d5a9 100755
--- a/components/eamxx/scripts/eamxx-params-docs-autogen
+++ b/components/eamxx/scripts/eamxx-params-docs-autogen
@@ -20,6 +20,7 @@ from mdutils import Html
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "cime_config"))
from eamxx_buildnml_impl import resolve_all_inheritances, get_valid_selectors
+from atm_manip import is_locked_impl
###############################################################################
def parse_command_line(args, description):
@@ -34,14 +35,15 @@ def parse_command_line(args, description):
return parser.parse_args(args[1:])
###########################################################################
-def add_param(docs,scope,item):
+def add_param(docs, scope, item):
###########################################################################
# Locked parameters are not to be configured at runtime, so don't even bother
# E.g, a locked param is something we need to get in the input file, like
# the restart write frequency, but we don't want the user to modify it
# via atmchange
- if "locked" in item.attrib.keys():
+ if is_locked_impl(item):
return
+
docs.new_line(f"* {scope}{item.tag}:")
pdoc = item.attrib['doc'] if 'doc' in item.attrib.keys() else "**MISSING**"
@@ -53,21 +55,23 @@ def add_param(docs,scope,item):
pvalid = item.attrib['valid_values'] if 'valid_values' in item.attrib.keys() else None
if pvalid is not None:
docs.new_line(f" - valid values: {pvalid}")
+
pconstr = item.attrib['constraints'] if 'constraints' in item.attrib.keys() else None
if pconstr is not None:
docs.new_line(f" - constraints: {pconstr}")
###########################################################################
-def add_children(docs,xml,scope=""):
+def add_children(docs, elem, scope=""):
###########################################################################
done = []
# Locked parameters are not to be configured at runtime, so don't even bother
# E.g, a locked param is something we need to get in the input file, like
# the restart write frequency, but we don't want the user to modify it
# via atmchange
- if "locked" in xml.attrib.keys():
+ if is_locked_impl(elem):
return
- for item in xml:
+
+ for item in elem:
# The same entry may appear multiple times in the XML defaults file,
# each time with different selectors. We don't want to generate the
# same documentation twice.
@@ -75,9 +79,10 @@ def add_children(docs,xml,scope=""):
continue
done.append(item.tag)
if len(item)>0:
- add_children (docs,item,f"{scope}{xml.tag}::")
+ add_children (docs,item,f"{scope}{elem.tag}::")
else:
- add_param(docs,f"{scope}{xml.tag}::",item)
+ add_param(docs,f"{scope}{elem.tag}::",item)
+
docs.new_line()
###########################################################################
@@ -107,7 +112,7 @@ def generate_params_docs():
continue
docs.new_header(level=2,title=ap.tag)
add_children(docs,ap)
-
+
ic = xml_defaults.find('initial_conditions')
docs.new_header(level=1,title="Initial Conditions Parameters")
add_children(docs,ic)
@@ -123,6 +128,7 @@ def generate_params_docs():
homme = xml_defaults.find('ctl_nl')
docs.new_header(level=1,title='Homme namelist')
add_children(docs,homme)
+
docs.create_md_file()
print("Generating eamxx params documentation ... SUCCESS!")
diff --git a/components/eamxx/scripts/edit-output-stream b/components/eamxx/scripts/edit-output-stream
new file mode 100755
index 000000000000..76c88e69ca39
--- /dev/null
+++ b/components/eamxx/scripts/edit-output-stream
@@ -0,0 +1,149 @@
+#!/usr/bin/env python3
+
+"""
+Edit (or create) an output stream yaml file
+"""
+
+import argparse, sys, pathlib
+
+from edit_output_stream import edit_output_stream_impl
+
+###############################################################################
+def parse_command_line(args, description):
+###############################################################################
+ parser = argparse.ArgumentParser(
+ usage="""\n{0} = [=] ...
+OR
+{0} --help
+
+\033[1mEXAMPLES:\033[0m
+ \033[1;32m# Generate empty file (with invalid options)'\033[0m
+ > {0} -g -f my_output.yaml
+
+ \033[1;32m# Generate empty file (with invalid options), overwrite if existing'\033[0m
+ > {0} -g -O -f my_output.yaml
+
+ \033[1;32m# Change avg type to Instant, output frequency to 1 day\033[0m
+ > {0} -f my_output.yaml --avg-type instant --freq 1 --freq-units ndays
+
+ \033[1;32m# Set horiz remaping\033[0m
+ > {0} -f my_output.yaml --horiz-map-file /path/to/map.nc
+""".format(pathlib.Path(args[0]).name),
+ description=description,
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ )
+
+ parser.add_argument(
+ "-f","--filename",
+ required=True,
+ type=str,
+ help="The name of the yaml file to be configured",
+ )
+
+ parser.add_argument(
+ "--prefix",
+ type=str,
+ help="The prefix of the output files",
+ )
+
+ parser.add_argument(
+ "-g", "--generate",
+ default=False,
+ dest="generate",
+ action="store_true",
+ help="Generate a new file",
+ )
+ parser.add_argument(
+ "-O", "--overwrite",
+ default=False,
+ action="store_true",
+ help="When generating a new file, overwrite existing file (if any)",
+ )
+
+ parser.add_argument(
+ "-r","--reset",
+ default=None,
+ nargs="+",
+ type=str,
+ help="List of options to remove (or reset to default values) BEFORE doing any other edit"
+ )
+
+ parser.add_argument(
+ "--avg-type",
+ default=None,
+ type=str.lower,
+ help="Set the averaging type",
+ choices=['instant','average','max','min'],
+ )
+
+ parser.add_argument(
+ "--skip-t0-output",
+ action="store_true",
+ help="Skip t=case_t0 output (only relevant for INSTANT avg)"
+ )
+
+ parser.add_argument(
+ "--freq-units",
+ default=None,
+ type=str.lower,
+ help="Set the output frequency units",
+ )
+
+ parser.add_argument(
+ "--freq",
+ type=str.lower,
+ default=None,
+ help="Set the output frequency",
+ )
+
+ parser.add_argument(
+ "--grid",
+ default=None,
+ type=str,
+ help="Specify grid for which --fields/--io-grid options apply to",
+ )
+
+ parser.add_argument(
+ "--fields",
+ default=[],
+ nargs="+",
+ help="Fields to add to output",
+ )
+
+ parser.add_argument(
+ "--io-grid",
+ default=None,
+ type=str,
+ help="Name of grid onto which to remap fields before outputing them",
+ )
+
+ parser.add_argument(
+ "--horiz-remap-file",
+ default=None,
+ help="Map file to use for horizontal remap",
+ )
+
+ parser.add_argument(
+ "--vertical-remap-file",
+ default=None,
+ help="Map file to use for horizontal remap",
+ )
+
+ return parser.parse_args(args[1:])
+
+###############################################################################
+def _main_func(description):
+###############################################################################
+ if "--test" in sys.argv:
+ from doctest import testmod
+ import edit_output_stream
+ testmod()
+ testmod(m=edit_output_stream)
+ else:
+ edit_output_stream_impl(**vars(parse_command_line(sys.argv, description)))
+ sys.exit(0)
+
+###############################################################################
+
+if (__name__ == "__main__"):
+ _main_func(__doc__)
diff --git a/components/eamxx/scripts/edit_output_stream.py b/components/eamxx/scripts/edit_output_stream.py
new file mode 100644
index 000000000000..8ca194a47f98
--- /dev/null
+++ b/components/eamxx/scripts/edit_output_stream.py
@@ -0,0 +1,223 @@
+import pathlib
+
+from utils import expect, ensure_yaml
+
+ensure_yaml()
+import yaml
+
+###############################################################################
+def generate_empty_yaml(filename,overwrite):
+###############################################################################
+ """
+ Generate a yaml file with basic fields set, but containing empty or invalid values
+ >>> fname = "__test__.yaml"
+ >>> generate_empty_yaml(fname,False)
+ >>> generate_empty_yaml(fname,False)
+ Traceback (most recent call last):
+ SystemExit: ERROR: YAML file already exist. Re-run with -O/--overwrite to overwrite existing file
+ >>> generate_empty_yaml(fname,True)
+ >>> data = yaml.load(open(fname,'r'),Loader=yaml.SafeLoader)
+ >>> len(data)
+ 4
+ >>> data["filename_prefix"]
+ 'UNSET'
+ >>> data["Averaging Type"]
+ 'INVALID'
+ >>> len(data["Fields"])
+ 0
+ >>> oc = data["output_control"]
+ >>> len(oc)
+ 2
+ >>> oc["Frequency"]
+ -1
+ >>> oc["frequency_units"]
+ 'never'
+ >>> # Clean up the file
+ >>> pathlib.Path(fname).unlink()
+ """
+ file = pathlib.Path(filename).resolve()
+
+ expect (overwrite or not file.exists(),
+ "YAML file already exist. Re-run with -O/--overwrite to overwrite existing file")
+
+ if file.exists():
+ file.unlink()
+
+ data = {}
+ data["filename_prefix"] = "UNSET"
+ data["Averaging Type"] = "INVALID"
+ data["Fields"] = {}
+ data["output_control"] = {}
+ data["output_control"]["skip_t0_output"] = "false"
+ data["output_control"]["Frequency"] = -1
+ data["output_control"]["frequency_units"] = "never"
+
+ with open(file,'w') as fd:
+ yaml.dump(data,fd,Dumper=yaml.SafeDumper,explicit_start=True,explicit_end=True,version=(1,2))
+
+###############################################################################
+def edit_output_stream_impl(filename,prefix=None,generate=False,overwrite=False,
+ avg_type=None,skip_t0_output=False,freq_units=None,freq=None,
+ grid=None,fields=[],reset=None,io_grid=None,
+ horiz_remap_file=None,vertical_remap_file=None):
+###############################################################################
+ """
+ Apply the requested changes to the output stream yaml file
+ >>> fname = '__test__.yaml'
+ >>> # Create the file
+ >>> edit_output_stream_impl(fname,generate=True,prefix='foo')
+ >>> # Set some basic options, and then check
+ >>> edit_output_stream_impl(fname,avg_type='max',freq_units='ndays',freq=10)
+ >>> data = yaml.load(open(fname,'r'),Loader=yaml.SafeLoader)
+ >>> data['filename_prefix']
+ 'foo'
+ >>> data['Averaging Type']
+ 'max'
+ >>> data['output_control']['Frequency']
+ 10
+ >>> data['output_control']['frequency_units']
+ 'ndays'
+ >>> # Set fields options, and then check
+ >>> edit_output_stream_impl(fname,fields=['a','b'],grid='my_grid',io_grid='other_grid')
+ >>> data = yaml.load(open(fname,'r'),Loader=yaml.SafeLoader)
+ >>> f = data['Fields']['my_grid']['Field Names']
+ >>> f.sort()
+ >>> f
+ ['a', 'b']
+ >>> data['Fields']['my_grid']['IO Grid Name']
+ 'other_grid'
+ >>> # No remap if online remap (IO Grid Name) is set
+ >>> edit_output_stream_impl(fname,horiz_remap_file='blah')
+ Traceback (most recent call last):
+ SystemExit: ERROR: Cannot use online remap and horiz/vert remap at the same time.
+ >>> edit_output_stream_impl(fname,vertical_remap_file='blah')
+ Traceback (most recent call last):
+ SystemExit: ERROR: Cannot use online remap and horiz/vert remap at the same time.
+ >>> # Remove io grid and fields
+ >>> edit_output_stream_impl(fname,reset=['fields','io-grid'])
+ Traceback (most recent call last):
+ SystemExit: ERROR: Fields reset requested, but no grid name provided. Re-run with --grid GRID_NAME
+ >>> edit_output_stream_impl(fname,reset=['fields','io-grid'],grid='my_grid')
+ >>> data = yaml.load(open(fname,'r'),Loader=yaml.SafeLoader)
+ >>> 'my_grid' in data['Fields'].keys()
+ False
+ >>> # Set remap options, and then check
+ >>> edit_output_stream_impl(fname,horiz_remap_file='blah1',vertical_remap_file='blah2')
+ >>> data = yaml.load(open(fname,'r'),Loader=yaml.SafeLoader)
+ >>> data['horiz_remap_file']
+ 'blah1'
+ >>> data['vertical_remap_file']
+ 'blah2'
+ >>> # Clean up the file
+ >>> pathlib.Path(fname).unlink()
+ """
+
+ if generate:
+ generate_empty_yaml(filename,overwrite)
+
+ file = pathlib.Path(filename).resolve()
+
+ expect (file.exists(),
+ "YAML file does not exist. Re-run with -g/--generate to create")
+
+ data = yaml.load(open(file,"r"),Loader=yaml.SafeLoader)
+
+ # Before adding new options, process all the reset requests
+ if reset is not None:
+ for s in reset:
+ if s=="avg-type":
+ data["Averaging Type"] = "INVALID"
+ elif s=="skip_t0_output":
+ data["skip_t0_output"] = "false"
+ elif s=="preifx":
+ data["filename_prefix"] = "UNSET"
+ elif s=="freq":
+ data["output_control"]["Frequency"] = -1
+ elif s=="freq_units":
+ data["output_control"]["frequency_units"] = "never"
+ elif s=="horiz_remap_file":
+ del data["horiz_remap_file"]
+ elif s=="vert_remap_file":
+ del data["vert_remap_file"]
+ elif s=="fields":
+ expect (grid is not None,
+ "Fields reset requested, but no grid name provided. Re-run with --grid GRID_NAME")
+ if grid in data["Fields"].keys():
+ data["Fields"][grid]["Field Names"] = []
+
+ # Remove this grid if there are no other options set
+ if len(data["Fields"][grid])==1:
+ del data["Fields"][grid]
+ elif s=="io-grid":
+ expect (grid is not None,
+ "IO grid reset requested, but no grid name provided. Re-run with --grid GRID_NAME")
+ if grid in data["Fields"].keys():
+ del data["Fields"][grid]["IO Grid Name"]
+
+ # Remove this grid if there's not other options set other than
+ # fields names, and field names is an empty list
+ if len(data["Fields"][grid])==1 and len(data["Fields"][grid]["Field Names"])==0:
+ del data["Fields"][grid]
+
+ if prefix is not None:
+ data["filename_prefix"] = prefix
+
+ if avg_type is not None:
+ data["Averaging Type"] = avg_type
+
+ if skip_t0_output is not None:
+ data["skip_t0_output"] = skip_t0_output
+
+ if freq is not None:
+ expect (freq.lstrip('-+').isnumeric() or freq=='hist_n',
+ f"Invalid value '{freq}' for --freq. Valid options are\n"
+ " - an integer\n"
+ " - HIST_N\n")
+ data["output_control"]["Frequency"] = int(freq) if freq.lstrip('-+').isnumeric() else f"${{{freq.upper()}}}"
+
+ if freq_units is not None:
+ explicit = ['nsteps','nsecs','nmins','nhours','ndays','nmonths','nyears']
+ expect (freq_units in explicit or freq_units=='hist_option',
+ f"Invalid value '{freq_units}' for --freq-units. Valid options are (case insensitive)\n"
+ " - explicit values: 'nsteps','nsecs','nmins','nhours','ndays','nmonths','nyears'\n"
+ " - CIME variables : 'HIST_OPTION'\n")
+
+ data["output_control"]["frequency_units"] = freq_units if freq_units in explicit else f"${{{freq_units.upper()}}}"
+
+ if horiz_remap_file is not None:
+ data["horiz_remap_file"] = horiz_remap_file
+
+ if vertical_remap_file is not None:
+ data["vertical_remap_file"] = vertical_remap_file
+
+ if len(fields)>0 or io_grid is not None:
+ expect (grid is not None,
+ "Fields list specified, but no grid name provided. Re-run with --grid GRID_NAME")
+
+ section = data["Fields"].setdefault(grid,{})
+ if "Field Names" not in section.keys():
+ section["Field Names"] = []
+
+ fnames = section["Field Names"]
+ fnames += fields
+ fnames = list(set(fnames))
+ section["Field Names"] = fnames
+
+ if io_grid is not None:
+ section["IO Grid Name"] = io_grid
+ # If not already present, add an empty list of field names
+ section.setdefault("Field Names",[])
+
+ data["Fields"][grid] = section
+
+ # We cannot do online remap (typically dyn->physGLL) if horiz or vert remap is used
+ has_online_remap = False
+ for k,v in data["Fields"].items():
+ has_online_remap = has_online_remap or "IO Grid Name" in v.keys();
+ has_vert_remap = "vertical_remap_file" in data.keys()
+ has_horiz_remap = "horiz_remap_file" in data.keys()
+ expect (not has_online_remap or (not has_vert_remap and not has_horiz_remap),
+ "Cannot use online remap and horiz/vert remap at the same time.")
+
+ with open(file,'w') as fd:
+ yaml.dump(dict(data),fd,Dumper=yaml.SafeDumper,explicit_start=True,explicit_end=True,version=(1,2))
diff --git a/components/eamxx/scripts/git_utils.py b/components/eamxx/scripts/git_utils.py
index e3aa70025398..a7202bfbfaba 100644
--- a/components/eamxx/scripts/git_utils.py
+++ b/components/eamxx/scripts/git_utils.py
@@ -55,7 +55,7 @@ def get_current_head(repo=None):
return branch
###############################################################################
-def git_refs_difference (cmp_ref, head="HEAD", repo=None):
+def git_refs_difference(cmp_ref, head="HEAD", repo=None):
###############################################################################
"""
Return the difference in commits between cmp_ref and head.
@@ -136,6 +136,17 @@ def merge_git_ref(git_ref, repo=None, verbose=False, dry_run=False):
print ("git ref {} successfully merged.".format(git_ref))
print_last_commit()
+###############################################################################
+def create_backup_commit (repo=None, dry_run=False):
+###############################################################################
+
+ bkp_cmd = "git add -A && git commit -m 'WARNING: test-all-scream backup commit'"
+ if dry_run:
+ print (f"Would run: {bkp_cmd}")
+ else:
+ run_cmd_no_fail(bkp_cmd, from_dir=repo)
+ expect(is_repo_clean(repo=repo), "Something went wrong while performing the backup commit")
+
###############################################################################
def print_last_commit(git_ref=None, repo=None, dry_run=False):
###############################################################################
@@ -182,7 +193,7 @@ def get_git_toplevel_dir(repo=None):
return output if stat == 0 else None
###############################################################################
-def cleanup_repo(orig_branch, orig_commit, repo=None, dry_run=False):
+def cleanup_repo(orig_branch, orig_commit, has_backup_commit=False, repo=None, dry_run=False):
###############################################################################
"""
Discards all unstaged changes, as well as untracked files
@@ -206,4 +217,10 @@ def cleanup_repo(orig_branch, orig_commit, repo=None, dry_run=False):
# NOTE: if you reset the branch, don't forget to re-update the modules!!
if curr_commit != orig_commit and not dry_run:
run_cmd_no_fail("git reset --hard {}".format(orig_commit), from_dir=repo)
+ if has_backup_commit:
+ # This can happen if we ran an integration test with a dirty repo.
+ # test_all_scream will create a temporary backup commit, which we
+ # need to undo, but leaving the changed files in the workspace.
+ # So DON'T add --hard to this call!
+ run_cmd_no_fail("git reset {HEAD~1}", from_dir=repo)
update_submodules(repo=repo)
diff --git a/components/eamxx/scripts/jenkins/jenkins_cleanup_impl.sh b/components/eamxx/scripts/jenkins/jenkins_cleanup_impl.sh
index 942be64d8df6..3282f86e7158 100755
--- a/components/eamxx/scripts/jenkins/jenkins_cleanup_impl.sh
+++ b/components/eamxx/scripts/jenkins/jenkins_cleanup_impl.sh
@@ -1,11 +1,10 @@
#!/bin/bash -xe
-# Adjust this number to keep more/less builds
-echo "WORKSPACE: ${WORKSPACE}, BUILD_ID: ${BUILD_ID}"
+echo "RUNNING CLEANUP FOR WORKSPACE: ${WORKSPACE}, BUILD_ID: ${BUILD_ID}"
cd ${WORKSPACE}
-NUM_KEEP=30
+NUM_KEEP=12 # Adjust this number to keep more/fewer builds
KEEP_LAST=${BUILD_ID}
KEEP_FIRST=$((${BUILD_ID}-${NUM_KEEP}))
KEEP="$(seq ${KEEP_FIRST} 1 ${KEEP_LAST})"
@@ -15,3 +14,11 @@ REMOVE_THESE="$(ls -1 | grep -vF "${KEEP}")"
echo "Purging old builds: ${REMOVE_THESE}."
/bin/rm -rf $REMOVE_THESE
+
+# Now clean up the scratch area
+if [[ "$NODE_NAME" == "mappy" ]]; then
+ # Ensure we have a newer python
+ source $JENKINS_SCRIPT_DIR/${NODE_NAME}_setup
+
+ $JENKINS_SCRIPT_DIR/scratch_cleanup.py
+fi
diff --git a/components/eamxx/scripts/jenkins/jenkins_common_impl.sh b/components/eamxx/scripts/jenkins/jenkins_common_impl.sh
index fe293debe446..664c911585e2 100755
--- a/components/eamxx/scripts/jenkins/jenkins_common_impl.sh
+++ b/components/eamxx/scripts/jenkins/jenkins_common_impl.sh
@@ -72,7 +72,12 @@ if [ $skip_testing -eq 0 ]; then
# IF such dir is not found, then the default (ctest-build/baselines) is used
BASELINES_DIR=AUTO
- TAS_ARGS="--baseline-dir $BASELINES_DIR \$compiler -c EKAT_DISABLE_TPL_WARNINGS=ON -p -i -m \$machine"
+ TAS_ARGS="--baseline-dir $BASELINES_DIR \$compiler -p -c EKAT_DISABLE_TPL_WARNINGS=ON -i -m \$machine"
+ # pm-gpu needs to do work in scratch area in order not to fill home quota
+ if [[ "$SCREAM_MACHINE" == "pm-gpu" ]]; then
+ TAS_ARGS="${TAS_ARGS} -w /pscratch/sd/e/e3smtest/e3sm_scratch/pm-gpu/ctest-build"
+ fi
+
# Now that we are starting to run things that we expect could fail, we
# do not want the script to exit on any fail since this will prevent
# later tests from running.
@@ -120,7 +125,9 @@ if [ $skip_testing -eq 0 ]; then
fi
fi
- if [[ "$SCREAM_MACHINE" == "weaver" ]]; then
+ if [[ -z "$SCREAM_FAKE_ONLY" && "$SCREAM_MACHINE" == "weaver" ]]; then
+ # The fake-only tests don't launch any kernels which will cause all
+ # the compute-sanitizer runs to fail.
./scripts/gather-all-data "./scripts/test-all-scream -t csm -t csr -t csi -t css ${TAS_ARGS}" -l -m $SCREAM_MACHINE
if [[ $? != 0 ]]; then
fails=$fails+1;
@@ -138,20 +145,6 @@ if [ $skip_testing -eq 0 ]; then
# Run scripts-tests
if [[ $test_scripts == 1 ]]; then
- # JGF: I'm not sure there's much value in these dry-run comparisons
- # since we aren't changing HEADs
- ./scripts/scripts-tests -g -m $SCREAM_MACHINE
- if [[ $? != 0 ]]; then
- fails=$fails+1;
- scripts_fail=1
- fi
-
- ./scripts/scripts-tests -c -m $SCREAM_MACHINE
- if [[ $? != 0 ]]; then
- fails=$fails+1;
- scripts_fail=1
- fi
-
./scripts/scripts-tests -f -m $SCREAM_MACHINE
if [[ $? != 0 ]]; then
fails=$fails+1;
@@ -195,7 +188,7 @@ if [ $skip_testing -eq 0 ]; then
if [[ $test_v1 == 1 ]]; then
# AT runs should be fast. => run only low resolution
- this_output=$(../../cime/scripts/create_test e3sm_scream_v1_at --compiler=gnu9 -c -b master --wait)
+ this_output=$(../../cime/scripts/create_test e3sm_scream_v1_at -c -b master --wait)
if [[ $? != 0 ]]; then
fails=$fails+1;
v1_fail=1
diff --git a/components/eamxx/scripts/jenkins/pm-gpu_setup b/components/eamxx/scripts/jenkins/pm-gpu_setup
new file mode 100644
index 000000000000..7bc04f72f9da
--- /dev/null
+++ b/components/eamxx/scripts/jenkins/pm-gpu_setup
@@ -0,0 +1,2 @@
+source /global/common/software/e3sm/anaconda_envs/load_latest_cime_env.sh
+SCREAM_MACHINE=pm-gpu
diff --git a/components/eamxx/scripts/jenkins/scratch_cleanup.py b/components/eamxx/scripts/jenkins/scratch_cleanup.py
new file mode 100755
index 000000000000..0195195a157e
--- /dev/null
+++ b/components/eamxx/scripts/jenkins/scratch_cleanup.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python3
+
+"""
+Clean up old files in the scratch area for mappy.
+"""
+
+from pathlib import Path
+import re, time, shutil, sys, argparse
+
+###############################################################################
+def parse_command_line(args, description):
+###############################################################################
+ parser = argparse.ArgumentParser(
+ usage="""\n{0} [-c HOURS]
+OR
+{0} --help
+
+\033[1mEXAMPLES:\033[0m
+ \033[1;32m# Purge files older than 20 hours \033[0m
+ > {0} -c 20
+""".format(Path(args[0]).name),
+ description=description,
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ )
+
+ parser.add_argument("-c", "--cutoff", type=int, default=30, help="The cutoff age for purging in hours")
+
+ parser.add_argument("-d", "--dry-run", action="store_true", help="Do a dry run, don't actually remove files")
+
+ args = parser.parse_args(args[1:])
+
+ return args
+
+###############################################################################
+def scratch_cleanup(cutoff, dry_run):
+###############################################################################
+ scratch = Path('/ascldap/users/e3sm-jenkins/acme/scratch')
+
+ timestamp_re = re.compile(r'.*(20[0-9]{6}_[0-9]{6}).*')
+ timestamps = set()
+ for item in scratch.iterdir():
+ basename = item.name
+ re_match = timestamp_re.match(basename)
+ if re_match:
+ timestamps.add(re_match.groups()[0])
+
+ tformat = "%Y%m%d_%H%M%S"
+ curr_time = time.time()
+
+ for timestamp in timestamps:
+ timestamp_time = time.mktime(time.strptime(timestamp, tformat))
+ age_in_hours = (curr_time - timestamp_time) / 3600
+ if age_in_hours > cutoff:
+ print(f"Timestamp {timestamp} is {age_in_hours} hours old and corresponding files will be removed")
+ files_to_remove = scratch.glob(f"*{timestamp}*")
+ for file_to_remove in files_to_remove:
+ print(f" Removing {file_to_remove}")
+ if not dry_run:
+ if file_to_remove.is_dir():
+ shutil.rmtree(file_to_remove)
+ else:
+ file_to_remove.unlink()
+
+ return True
+
+###############################################################################
+def _main_func(description):
+###############################################################################
+ success = scratch_cleanup(**vars(parse_command_line(sys.argv, description)))
+
+ sys.exit(0 if success else 1)
+
+###############################################################################
+
+if __name__ == "__main__":
+ _main_func(__doc__)
diff --git a/components/eamxx/scripts/machines_specs.py b/components/eamxx/scripts/machines_specs.py
index cd717cba6b97..df89ae7b5add 100644
--- a/components/eamxx/scripts/machines_specs.py
+++ b/components/eamxx/scripts/machines_specs.py
@@ -4,6 +4,8 @@
ensure_psutil()
import psutil
+CIMEROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..","..","..","cime")
+
# MACHINE -> (env_setup, # list of shell commands to set up scream-approved env
# compilers, # list of compilers [CXX, F90, C]
# batch submit prefix, # string shell commmand prefix
@@ -26,22 +28,22 @@
["mpicxx","mpifort","mpicc"],
"bsub -I -q rhel8 -n 4 -gpu num=4",
"/home/projects/e3sm/scream/pr-autotester/master-baselines/weaver/"),
- "mappy" : (["module purge", "module load sems-archive-env acme-env acme-cmake/3.26.3 sems-archive-gcc/9.2.0 sems-archive-git/2.10.1 acme-openmpi/4.0.7 acme-netcdf/4.7.4/acme"],
+ "mappy" : (["module purge", "module load sems-archive-env acme-env acme-cmake/3.26.3 acme-gcc/11.2.0 sems-archive-git/2.10.1 acme-openmpi/4.1.4 acme-netcdf/4.7.4/acme", "export GATOR_INITIAL_MB=4000MB"],
["mpicxx","mpifort","mpicc"],
"",
"/sems-data-store/ACME/baselines/scream/master-baselines"),
"lassen" : (["module --force purge", "module load git gcc/8.3.1 cuda/11.8.0 cmake/3.16.8 spectrum-mpi python/3.7.2", "export LLNL_USE_OMPI_VARS='y'",
- "export PATH=/usr/gdata/climdat/netcdf/bin:$PATH",
- "export LD_LIBRARY_PATH=/usr/gdata/climdat/netcdf/lib:$LD_LIBRARY_PATH",
+ "export PATH=/usr/gdata/e3sm/netcdf/bin:$PATH",
+ "export LD_LIBRARY_PATH=/usr/gdata/e3sm/netcdf/lib:$LD_LIBRARY_PATH",
],
["mpicxx","mpifort","mpicc"],
"bsub -Ip -qpdebug",
""),
- "ruby-intel" : (["module --force purge", "module use --append /usr/gdata/climdat/install/quartz/modulefiles", "module load StdEnv cmake/3.19.2 mkl/2022.1.0 intel-classic/2021.6.0-magic mvapich2/2.3.7 hdf5/1.12.2 netcdf-c/4.9.0 netcdf-fortran/4.6.0 parallel-netcdf/1.12.3 python/3.9.12 screamML-venv/0.0.1"],
+ "ruby-intel" : (["module --force purge", "module use --append /usr/gdata/e3sm/install/quartz/modulefiles", "module load StdEnv cmake/3.19.2 mkl/2022.1.0 intel-classic/2021.6.0-magic mvapich2/2.3.7 hdf5/1.12.2 netcdf-c/4.9.0 netcdf-fortran/4.6.0 parallel-netcdf/1.12.3 python/3.9.12 screamML-venv/0.0.1"],
["mpicxx","mpifort","mpicc"],
"salloc --partition=pdebug",
""),
- "quartz-intel" : (["module --force purge", "module use --append /usr/gdata/climdat/install/quartz/modulefiles", "module load StdEnv cmake/3.19.2 mkl/2022.1.0 intel-classic/2021.6.0-magic mvapich2/2.3.7 hdf5/1.12.2 netcdf-c/4.9.0 netcdf-fortran/4.6.0 parallel-netcdf/1.12.3 python/3.9.12 screamML-venv/0.0.1"],
+ "quartz-intel" : (["module --force purge", "module use --append /usr/gdata/e3sm/install/quartz/modulefiles", "module load StdEnv cmake/3.19.2 mkl/2022.1.0 intel-classic/2021.6.0-magic mvapich2/2.3.7 hdf5/1.12.2 netcdf-c/4.9.0 netcdf-fortran/4.6.0 parallel-netcdf/1.12.3 python/3.9.12 screamML-venv/0.0.1"],
["mpicxx","mpifort","mpicc"],
"salloc --partition=pdebug",
""),
@@ -57,15 +59,19 @@
["mpicxx","mpifort","mpicc"],
"bsub -I -q batch -W 0:30 -P cli115 -nnodes 1",
"/gpfs/alpine/cli115/proj-shared/scream/master-baselines"),
- "pm-gpu" : (["module load PrgEnv-gnu gcc/10.3.0 cudatoolkit craype-accel-nvidia80 cray-libsci craype cray-mpich cray-hdf5-parallel cray-netcdf-hdf5parallel cray-parallel-netcdf cmake evp-patch","module unload craype-accel-host perftools-base perftools darshan", "export NVCC_WRAPPER_DEFAULT_COMPILER=CC", "export NVCC_WRAPPER_DEFAULT_ARCH=sm_80"],
+ "pm-cpu" : ([f"eval $({CIMEROOT}/CIME/Tools/get_case_env -c SMS.ne4pg2_ne4pg2.F2010-SCREAMv1.pm-cpu_gnu)"],
+ ["CC","ftn","cc"],
+ "salloc --time 00:30:00 --nodes=1 --constraint=cpu -q debug --account e3sm_g",
+ "/global/cfs/cdirs/e3sm/baselines/gnu/scream/pm-cpu"),
+ "pm-gpu" : ([f"eval $({CIMEROOT}/CIME/Tools/get_case_env -c SMS.ne4pg2_ne4pg2.F2010-SCREAMv1.pm-gpu_gnugpu)", "echo cuda=true"],
["CC","ftn","cc"],
- "srun --time 00:30:00 --nodes=1 --constraint=gpu --exclusive -q regular --account e3sm_g",
- ""),
+ "salloc --time 02:00:00 --nodes=4 --constraint=gpu --gpus-per-node=4 --gpu-bind=none --exclusive -q regular --account e3sm_g",
+ "/global/cfs/cdirs/e3sm/baselines/gnugpu/scream/pm-gpu"),
"compy" : (["module purge", "module load cmake/3.19.6 gcc/8.1.0 mvapich2/2.3.1 python/3.7.3"],
["mpicxx","mpifort","mpicc"],
"srun --time 02:00:00 --nodes=1 -p short --exclusive --account e3sm",
""),
- "chrysalis" : (["eval $(../../cime/CIME/Tools/get_case_env)", "export OMP_NUM_THREADS=1"],
+ "chrysalis" : ([f"eval $({CIMEROOT}/CIME/Tools/get_case_env)", "export OMP_NUM_THREADS=1"],
["mpic++","mpif90","mpicc"],
"srun --mpi=pmi2 -l -N 1 --kill-on-bad-exit --cpu_bind=cores",
"/lcrc/group/e3sm/baselines/chrys/intel/scream"),
@@ -193,10 +199,20 @@ def get_mach_testing_resources(machine):
of jobs across cores.
"""
if is_cuda_machine(machine):
- return int(run_cmd_no_fail("nvidia-smi -L | wc -l"))
+ prefix = "srun " if is_salloc(machine) else ""
+ return int(run_cmd_no_fail(f"{prefix}nvidia-smi -L | wc -l"))
else:
return get_available_cpu_count()
+###############################################################################
+def is_salloc(machine):
+###############################################################################
+ """
+ Return true if we are running on an salloc'd job.
+ """
+ bcmd = get_mach_batch_command(machine)
+ return "salloc" in bcmd and "srun" not in bcmd
+
###############################################################################
def is_cuda_machine(machine):
###############################################################################
diff --git a/components/eamxx/scripts/scripts-tests b/components/eamxx/scripts/scripts-tests
index b70a697a7d55..b5af87320243 100755
--- a/components/eamxx/scripts/scripts-tests
+++ b/components/eamxx/scripts/scripts-tests
@@ -3,14 +3,7 @@
"""
Script containing python test suite for SCREAM test
infrastructure. This suite should be run to confirm overall
-correctness. You should run this test once in generation mode to
-generate baseline results using your reference commit (common
-ancestor) and once in comparison mode to compare against these
-baselines using your development commit. Baseline and compare runs
-will use dry-run modes so we are only comparing hypothetical shell
-commands, not actually running them.
-
-You can also do a full run which will actually execute the commands.
+correctness.
If you are on a batch machine, it is expected that you are on a compute node.
@@ -26,43 +19,17 @@ from machines_specs import is_machine_supported, is_cuda_machine
from git_utils import get_current_branch, get_current_commit, get_current_head, git_refs_difference, \
is_repo_clean, get_common_ancestor, checkout_git_ref, get_git_toplevel_dir
-import unittest, argparse, sys, difflib, shutil, os
+import unittest, argparse, sys, shutil, os
from pathlib import Path
# Globals
TEST_DIR = Path(__file__).resolve().parent
CONFIG = {
"machine" : None,
- "compare" : False,
- "generate" : False,
"full" : False,
"jenkins" : False
}
-###############################################################################
-def run_cmd_store_output(test_obj, cmd, output_file):
-###############################################################################
- output_file.parent.mkdir(parents=True, exist_ok=True)
- output = run_cmd_assert_result(test_obj, cmd, from_dir=TEST_DIR)
- head = get_current_head()
- output = output.replace(head, "CURRENT_HEAD_NORMALIZED")
- output_file.write_text(output)
-
-###############################################################################
-def run_cmd_check_baseline(test_obj, cmd, baseline_path):
-###############################################################################
- test_obj.assertTrue(baseline_path.is_file(), msg="Missing baseline {}".format(baseline_path))
- output = run_cmd_assert_result(test_obj, cmd, from_dir=TEST_DIR)
- head = get_current_head()
- output = output.replace(head, "CURRENT_HEAD_NORMALIZED")
- diff = difflib.unified_diff(
- baseline_path.read_text().splitlines(),
- output.splitlines(),
- fromfile=str(baseline_path),
- tofile=cmd)
- diff_output = "\n".join(diff)
- test_obj.assertEqual("", diff_output, msg=diff_output)
-
###############################################################################
def test_cmake_cache_contents(test_obj, build_name, cache_var, expected_value):
###############################################################################
@@ -150,8 +117,6 @@ class TestBaseOuter: # Hides the TestBase class from test scanner
self._source_file = source_file
self._cmds = list(cmds)
self._machine = CONFIG["machine"]
- self._compare = CONFIG["compare"]
- self._generate = CONFIG["generate"]
self._full = CONFIG["full"]
self._jenkins = CONFIG["jenkins"]
@@ -160,42 +125,20 @@ class TestBaseOuter: # Hides the TestBase class from test scanner
self._results = TEST_DIR.joinpath("results")
self._results.mkdir(parents=True, exist_ok=True) # pylint: disable=no-member
- def get_baseline(self, cmd, machine):
- return self._results.joinpath(self._source_file).with_suffix("").\
- joinpath(machine, self.get_cmd(cmd, machine, dry_run=False).translate(str.maketrans(" /='", "____")))
-
- def get_cmd(self, cmd, machine, dry_run=True):
- return "{}{}".format(cmd.replace("$machine", machine).replace("$results", str(self._results)),
- " --dry-run" if (dry_run and "--dry-run" not in cmd) else "")
+ def get_cmd(self, cmd, machine):
+ return cmd.replace("$machine", machine).replace("$results", str(self._results))
def test_doctests(self):
run_cmd_assert_result(self, "python3 -m doctest {}".format(self._source_file), from_dir=TEST_DIR)
def test_pylint(self):
ensure_pylint()
- run_cmd_assert_result(self, "python3 -m pylint --disable C --disable R {}".format(self._source_file), from_dir=TEST_DIR, verbose=True)
-
- def test_gen_baseline(self):
- if self._generate:
- for cmd in self._cmds:
- run_cmd_store_output(self, self.get_cmd(cmd, self._machine), self.get_baseline(cmd, self._machine))
- else:
- self.skipTest("Skipping dry run baseline generation")
-
- def test_cmp_baseline(self):
- if self._compare:
- for cmd in self._cmds:
- if "-p" in cmd:
- # Parallel builds can generate scrambled output. Skip them.
- continue
- run_cmd_check_baseline(self, self.get_cmd(cmd, self._machine), self.get_baseline(cmd, self._machine))
- else:
- self.skipTest("Skipping dry run baseline comparison")
+ run_cmd_assert_result(self, "python3 -m pylint --disable C --disable R {}".format(self._source_file), from_dir=TEST_DIR)
def test_full(self):
if self._full:
for cmd in self._cmds:
- run_cmd_assert_result(self, self.get_cmd(cmd, self._machine, dry_run=False), from_dir=TEST_DIR)
+ run_cmd_assert_result(self, self.get_cmd(cmd, self._machine), from_dir=TEST_DIR)
else:
self.skipTest("Skipping full run")
@@ -271,9 +214,8 @@ class TestTestAllScream(TestBaseOuter.TestBase):
###############################################################################
CMDS_TO_TEST = [
- "./test-all-scream -m $machine -b HEAD -k -p",
- "./test-all-scream -m $machine -b HEAD -k -t dbg",
- "./test-all-scream --baseline-dir $results -p -c EKAT_DISABLE_TPL_WARNINGS=ON -i -m $machine --submit --dry-run", # always dry run
+ "./test-all-scream -m $machine -p -i -c EKAT_DISABLE_TPL_WARNINGS=ON",
+ "./test-all-scream -m $machine -t dbg",
]
def __init__(self, *internal_args):
@@ -288,9 +230,9 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test the 'dbg' test in test-all-scream. It should set certain CMake values
"""
if not self._jenkins:
- options = "-b HEAD -k -t dbg --config-only"
+ options = "-t dbg --config-only"
cmd = self.get_cmd("./test-all-scream -m $machine {}".format(options),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_cmake_cache_contents(self, "full_debug", "CMAKE_BUILD_TYPE", "Debug")
test_cmake_cache_contents(self, "full_debug", "SCREAM_DOUBLE_PRECISION", "TRUE")
@@ -307,9 +249,9 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test the 'sp' test in test-all-scream. It should set certain CMake values
"""
if not self._jenkins:
- options = "-b HEAD -k -t sp --config-only"
+ options = "-t sp --config-only"
cmd = self.get_cmd("./test-all-scream -m $machine {}".format(options),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_cmake_cache_contents(self, "full_sp_debug", "CMAKE_BUILD_TYPE", "Debug")
test_cmake_cache_contents(self, "full_sp_debug", "SCREAM_DOUBLE_PRECISION", "FALSE")
@@ -329,9 +271,9 @@ class TestTestAllScream(TestBaseOuter.TestBase):
if is_cuda_machine(self._machine):
self.skipTest("Skipping FPE check on cuda")
else:
- options = "-b HEAD -k -t fpe --config-only"
+ options = "-t fpe --config-only"
cmd = self.get_cmd("./test-all-scream -m $machine {}".format(options),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_cmake_cache_contents(self, "debug_nopack_fpe", "CMAKE_BUILD_TYPE", "Debug")
test_cmake_cache_contents(self, "debug_nopack_fpe", "SCREAM_DOUBLE_PRECISION", "TRUE")
@@ -345,9 +287,9 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test the mem (default mem-check build) in test-all-scream. It should set certain CMake values
"""
if not self._jenkins:
- options = "-b HEAD -k -t mem --config-only"
+ options = "-t mem --config-only"
cmd = self.get_cmd("./test-all-scream -m $machine {}".format(options),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
builddir = "compute_sanitizer_memcheck" if is_cuda_machine(self._machine) else "valgrind"
test_cmake_cache_contents(self, builddir, "CMAKE_BUILD_TYPE", "Debug")
@@ -365,9 +307,9 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test the 'opt' test in test-all-scream. It should set certain CMake values
"""
if not self._jenkins:
- options = "-b HEAD -k -t opt --config-only"
+ options = "-t opt --config-only"
cmd = self.get_cmd("./test-all-scream -m $machine {}".format(options),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_cmake_cache_contents(self, "release", "CMAKE_BUILD_TYPE", "Release")
else:
@@ -379,7 +321,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test that the 'dbg' test in test-all-scream detects and returns non-zero if there's a cmake configure error
"""
if self._full:
- cmd = self.get_cmd("./test-all-scream -e SCREAM_FORCE_CONFIG_FAIL=True -m $machine -b HEAD -k -t dbg", self._machine, dry_run=False)
+ cmd = self.get_cmd("./test-all-scream -e SCREAM_FORCE_CONFIG_FAIL=True -m $machine -t dbg", self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR, expect_works=False)
else:
self.skipTest("Skipping full run")
@@ -389,7 +331,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test that the 'dbg' test in test-all-scream detects and returns non-zero if there's a build error
"""
if self._full:
- cmd = self.get_cmd("./test-all-scream -e SCREAM_FORCE_BUILD_FAIL=True -m $machine -b HEAD -k -t dbg", self._machine, dry_run=False)
+ cmd = self.get_cmd("./test-all-scream -e SCREAM_FORCE_BUILD_FAIL=True -m $machine -t dbg", self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR, expect_works=False)
else:
self.skipTest("Skipping full run")
@@ -399,7 +341,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test that a failure from ctest in the testing phase is caught by test-all-scream
"""
if self._full:
- cmd = self.get_cmd("./test-all-scream -e SCREAM_FORCE_RUN_FAIL=True -m $machine -b HEAD -k -t dbg", self._machine, dry_run=False)
+ cmd = self.get_cmd("./test-all-scream -e SCREAM_FORCE_RUN_FAIL=True -m $machine -t dbg", self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR, expect_works=False)
else:
self.skipTest("Skipping full run")
@@ -409,7 +351,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test that the at test level works
"""
if self._full:
- cmd = self.get_cmd("./test-all-scream -x -m $machine -b HEAD -k -t dbg", self._machine, dry_run=False)
+ cmd = self.get_cmd("./test-all-scream -x -m $machine -t dbg", self._machine)
output = run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_test_levels_were_run(self, output, ["AT"], ["NIGHTLY", "EXPERIMENTAL"])
else:
@@ -420,7 +362,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test that the nightly test level works
"""
if self._full:
- cmd = self.get_cmd("./test-all-scream -x --test-level=nightly -m $machine -b HEAD -k -t dbg", self._machine, dry_run=False)
+ cmd = self.get_cmd("./test-all-scream -x --test-level=nightly -m $machine -t dbg", self._machine)
output = run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_test_levels_were_run(self, output, ["AT", "NIGHTLY"], ["EXPERIMENTAL"])
else:
@@ -431,7 +373,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
Test that the experimental test level works
"""
if self._full:
- cmd = self.get_cmd("./test-all-scream -x --test-level=experimental -m $machine -b HEAD -k -t dbg", self._machine, dry_run=False)
+ cmd = self.get_cmd("./test-all-scream -x --test-level=experimental -m $machine -t dbg", self._machine)
output = run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_test_levels_were_run(self, output, ["AT", "NIGHTLY", "EXPERIMENTAL"], [])
else:
@@ -445,13 +387,12 @@ class TestTestAllScream(TestBaseOuter.TestBase):
So set SCREAM_FAKE_ONLY=ON, to lower the build time
"""
if self._full:
- baseline_dir = TEST_DIR.parent/"ctest-build"/"baselines"
- # Start a couple new tests, baselines will be generated
+ # Start a couple new tests, baselines will be generated in ctest-build/baselines
env = "SCREAM_FAKE_ONLY=ON SCREAM_FAKE_GIT_HEAD=FAKE1"
- opts = "-b HEAD -k -t dbg -t sp --no-tests"
- cmd = self.get_cmd("{} ./test-all-scream -m $machine {}".format(env,opts),
- self._machine, dry_run=False)
+ opts = " -g -t dbg -t sp --no-tests"
+ cmd = self.get_cmd(f"{env} ./test-all-scream -m $machine {opts}",
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_baseline_has_sha(self, TEST_DIR, "full_debug", "FAKE1")
@@ -459,39 +400,42 @@ class TestTestAllScream(TestBaseOuter.TestBase):
# Re-run reusing baselines from above
env = "SCREAM_FAKE_ONLY=ON SCREAM_FAKE_GIT_HEAD=FAKE2"
- opts = "--baseline-dir={} -b HEAD -k -t dbg -t sp --no-tests".format(baseline_dir)
- cmd = self.get_cmd("{} ./test-all-scream -m $machine {}".format(env,opts),
- self._machine, dry_run=False)
+ opts = "--baseline-dir LOCAL -t dbg -t sp --no-tests"
+ cmd = self.get_cmd(f"{env} ./test-all-scream -m $machine {opts}",
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_baseline_has_sha(self, TEST_DIR, "full_debug", "FAKE1")
test_baseline_has_sha(self, TEST_DIR, "full_sp_debug", "FAKE1")
# Re-run dbg reusing baselines from above with a fake commit that's not ahead
+ # The flag -u implies -g, but nothing should happen, since SCREAM_FAKE_AHEAD=0
env = "SCREAM_FAKE_ONLY=ON SCREAM_FAKE_AHEAD=0 SCREAM_FAKE_GIT_HEAD=FAKE2"
- opts = "--baseline-dir={} -b HEAD -k -t dbg -u --no-tests".format(baseline_dir)
- cmd = self.get_cmd("{} ./test-all-scream -m $machine {}".format(env,opts),
- self._machine, dry_run=False)
+ opts = "--baseline-dir LOCAL -t dbg -u --no-tests"
+ cmd = self.get_cmd(f"{env} ./test-all-scream -m $machine {opts}",
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_baseline_has_sha(self, TEST_DIR, "full_debug", "FAKE1")
test_baseline_has_sha(self, TEST_DIR, "full_sp_debug", "FAKE1")
# Re-run dbg reusing baselines from above but expire them
+ # The flag -u implies -g, and since SCREAM_FAKE_AHEAD=1, baseline should be regenerated
env = "SCREAM_FAKE_ONLY=ON SCREAM_FAKE_AHEAD=1 SCREAM_FAKE_GIT_HEAD=FAKE2"
- opts = "--baseline-dir={} -b HEAD -k -t dbg -u --no-tests".format(baseline_dir)
- cmd = self.get_cmd("{} ./test-all-scream -m $machine {}".format(env,opts),
- self._machine, dry_run=False)
+ opts = "--baseline-dir LOCAL -t dbg -u --no-tests"
+ cmd = self.get_cmd(f"{env} ./test-all-scream -m $machine {opts}",
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_baseline_has_sha(self, TEST_DIR, "full_debug", "FAKE2")
test_baseline_has_sha(self, TEST_DIR, "full_sp_debug", "FAKE1")
# Re-run reusing some baselines and expiring others
+ # The dbg baselines were generated in the prev step, so this should only gen the sp baselines
env = "SCREAM_FAKE_ONLY=ON SCREAM_FAKE_AHEAD=1 SCREAM_FAKE_GIT_HEAD=FAKE2"
- opts = "--baseline-dir={} -b HEAD -k -t dbg -t sp -u --no-tests".format(baseline_dir)
- cmd = self.get_cmd("{} ./test-all-scream -m $machine {}".format(env,opts),
- self._machine, dry_run=False)
+ opts = "--baseline-dir LOCAL -t dbg -t sp -u --no-tests"
+ cmd = self.get_cmd(f"{env} ./test-all-scream -m $machine {opts}",
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_baseline_has_sha(self, TEST_DIR, "full_debug", "FAKE2")
@@ -499,9 +443,9 @@ class TestTestAllScream(TestBaseOuter.TestBase):
# Re-run without reusing baselines, should force regeneration
env = "SCREAM_FAKE_ONLY=ON SCREAM_FAKE_GIT_HEAD=FAKE3"
- opts = "-b HEAD -k -t dbg -t sp --no-tests"
- cmd = self.get_cmd("{} ./test-all-scream -m $machine {}".format(env,opts),
- self._machine, dry_run=False)
+ opts = "-g -t dbg -t sp --no-tests"
+ cmd = self.get_cmd(f"{env} ./test-all-scream -m $machine {opts}",
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_baseline_has_sha(self, TEST_DIR, "full_debug", "FAKE3")
@@ -518,21 +462,13 @@ class TestTestAllScream(TestBaseOuter.TestBase):
can manage resources correctly.
"""
if self._full and self._machine == "mappy":
- spread_test_opts = "-x -e SCREAM_TEST_THREAD_SPREAD=True -c EKAT_TEST_LAUNCHER_MANAGE_RESOURCES=True -c EKAT_MPIRUN_EXE=mpiexec -c EKAT_MPI_EXTRA_ARGS='-bind-to core' -c EKAT_MPI_NP_FLAG='--map-by' -c EKAT_MPI_THREAD_FLAG='' -m $machine -b HEAD -k -t dbg"
+ spread_test_opts = "-x -e SCREAM_TEST_THREAD_SPREAD=True -e SCREAM_TEST_RANK_SPREAD=True -c EKAT_TEST_LAUNCHER_MANAGE_RESOURCES=True -c EKAT_MPIRUN_EXE=mpiexec -c EKAT_MPI_EXTRA_ARGS='-bind-to core' -c EKAT_MPI_NP_FLAG='--map-by' -c EKAT_MPI_THREAD_FLAG='' -m $machine -t dbg"
- cmd = self.get_cmd(f"./test-all-scream {spread_test_opts} --ctest-parallel-level=40 ", self._machine, dry_run=False)
+ cmd = self.get_cmd(f"./test-all-scream {spread_test_opts} --ctest-parallel-level=40 ", self._machine)
output = run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_omp_spread(self, output, 40)
- cmd = self.get_cmd(f"./test-all-scream {spread_test_opts} --ctest-parallel-level=40 ", self._machine, dry_run=False)
- output = run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
- test_omp_spread(self, output, 40)
-
- cmd = self.get_cmd(f"taskset -c 8-47 ./test-all-scream {spread_test_opts} --ctest-parallel-level=40 ", self._machine, dry_run=False)
- output = run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
- test_omp_spread(self, output, 48, begin=8)
-
- cmd = self.get_cmd(f"taskset -c 8-47 ./test-all-scream {spread_test_opts} --ctest-parallel-level=40 ", self._machine, dry_run=False)
+ cmd = self.get_cmd(f"taskset -c 8-47 ./test-all-scream {spread_test_opts} --ctest-parallel-level=40 ", self._machine)
output = run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
test_omp_spread(self, output, 48, begin=8)
@@ -547,7 +483,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
# We set PULLREQUESTNUM to block dashboard submission
# We set SCREAM_FAKE_AUTO to not interere with real baselines
cmd = self.get_cmd("PR_LABELS= NODE_NAME={} SCREAM_FAKE_AUTO=TRUE PULLREQUESTNUM=42 ./jenkins/jenkins_common.sh".format(self._machine),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
else:
@@ -561,7 +497,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
# We set PULLREQUESTNUM to block dashboard submission
# We set SCREAM_FAKE_AUTO to not interere with real baselines
cmd = self.get_cmd("PR_LABELS= NODE_NAME={} SCREAM_FAKE_AUTO=TRUE PULLREQUESTNUM= ./jenkins/jenkins_common.sh".format(self._machine),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR)
else:
@@ -574,7 +510,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
if self._jenkins:
# Any fail will do, we already checked test-all-scream captures all the fail types
cmd = self.get_cmd("PR_LABELS= SCREAM_FORCE_CONFIG_FAIL=True NODE_NAME={} SCREAM_FAKE_AUTO=TRUE PULLREQUESTNUM=42 ./jenkins/jenkins_common.sh".format(self._machine),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR, expect_works=False)
else:
@@ -587,7 +523,7 @@ class TestTestAllScream(TestBaseOuter.TestBase):
if self._jenkins:
# Any fail will do, we already checked test-all-scream captures all the fail types
cmd = self.get_cmd("PR_LABELS= SCREAM_FORCE_CONFIG_FAIL=True NODE_NAME={} SCREAM_FAKE_AUTO=TRUE PULLREQUESTNUM= ./jenkins/jenkins_common.sh".format(self._machine),
- self._machine, dry_run=False)
+ self._machine)
run_cmd_assert_result(self, cmd, from_dir=TEST_DIR, expect_works=False)
else:
@@ -598,7 +534,7 @@ class TestGatherAllData(TestBaseOuter.TestBase):
###############################################################################
CMDS_TO_TEST = [
- "./gather-all-data './scripts/test-all-scream -m $machine -b HEAD -k' -l -m $machine",
+ "./gather-all-data 'echo -m $machine' -l -m $machine",
]
def __init__(self, *internal_args):
@@ -637,27 +573,10 @@ OR
\033[1;32m# Run pylint tests for test_all_scream \033[0m
> {0} TestTestAllScream.test_pylint
- \033[1;32m# Do a dry-run generation for test_all_scream \033[0m
- > {0} -g TestTestAllScream -m $machine
-
- \033[1;32m# Do a dry-run comparison for test_all_scream \033[0m
- > {0} -c TestTestAllScream -m $machine
-
\033[1;32m# Do a full test run of test_all_scream \033[0m
> {0} -f -m $machine TestTestAllScream
- \033[1;32m# Do a full test run of everything \033[0m
- > {0} -f -m $machine
-
- \033[1;32m# Do a dry-run generation for everything \033[0m
- > {0} -g -m $machine
-
- \033[1;32m# Do a dry-run comparison for comparison \033[0m
- > {0} -c -m $machine
-
\033[1;32m# Run every possible test. This should be done before a PR is issued \033[0m
- > {0} -g -m $machine # You likely want to do this for a reference commit
- > {0} -c -m $machine
> {0} -f -m $machine
\033[1;32m# Test Jenkins script \033[0m
@@ -673,12 +592,6 @@ OR
parser.add_argument("-m", "--machine",
help="Provide machine name. This is required for full (not dry) runs")
- parser.add_argument("-g", "--generate", action="store_true",
- help="Do a dry run with baseline generation")
-
- parser.add_argument("-c", "--compare", action="store_true",
- help="Do a dry run with baseline comparison")
-
parser.add_argument("-f", "--full", action="store_true",
help="Do a full (not dry) run")
@@ -691,7 +604,7 @@ OR
return args
###############################################################################
-def scripts_tests(machine=None, generate=False, compare=False, full=False, jenkins=False):
+def scripts_tests(machine=None, full=False, jenkins=False):
###############################################################################
os.environ["SCREAM_FAKE_ONLY"] = "True"
@@ -700,9 +613,6 @@ def scripts_tests(machine=None, generate=False, compare=False, full=False, jenki
expect(is_machine_supported(machine), "Machine {} is not supported".format(machine))
CONFIG["machine"] = machine
- expect(not (generate and compare), "Cannot do generate and compare in the same run")
- CONFIG["compare"] = compare
- CONFIG["generate"] = generate
CONFIG["jenkins"] = jenkins
if full:
diff --git a/components/eamxx/scripts/test-all-scream b/components/eamxx/scripts/test-all-scream
index 4a6d0710b4e3..bb55ad23c216 100755
--- a/components/eamxx/scripts/test-all-scream
+++ b/components/eamxx/scripts/test-all-scream
@@ -16,6 +16,7 @@ check_minimum_python_version(3, 4)
import argparse, sys, pathlib
+from test_factory import get_test_name_dict
from test_all_scream import TestAllScream
###############################################################################
@@ -35,10 +36,6 @@ OR
> cd $scream_repo/components/eamxx
> ./scripts/{0} --preserve-env -m melvin
- \033[1;32m# Run all tests on current machine with default behavior except using a custom ref for baseline generation\033[0m
- > cd $scream_repo/components/eamxx
- > ./scripts/{0} -m melvin -b BASELINE_REF
-
\033[1;32m# Run all tests on current machine with default behavior except using pre-existing baselines (skips baseline generation) \033[0m
> cd $scream_repo/components/eamxx
> ./scripts/{0} -m melvin --baseline-dir=PATH_TO_BASELINES
@@ -49,7 +46,7 @@ OR
\033[1;32m# Run all tests on current machine with default behavior on a repo with uncommitted changes\033[0m
> cd $scream_repo/components/eamxx
- > ./scripts/{0} -m melvin -k -b HEAD
+ > ./scripts/{0} -m melvin -k -b AUTO
""".format(pathlib.Path(args[0]).name),
description=description,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
@@ -63,17 +60,14 @@ OR
parser.add_argument("-p", "--parallel", action="store_true",
help="Launch the different build types stacks in parallel")
- parser.add_argument("-f", "--fast-fail", action="store_true",
- help="Stop testing when the first failure is detected")
-
- parser.add_argument("-b", "--baseline-ref", default=None, # default will be computed later
- help="What commit to use to generate baselines. Default is merge-base of current commit and origin/master (or HEAD if --keep-tree)")
+ parser.add_argument("-g", "--generate", action="store_true",
+ help="Instruct test-all-scream to generate baselines from current commit. Skips tests")
- parser.add_argument("--baseline-dir", default=None,
- help="Use baselines from the given directory, skip baseline creation.")
+ parser.add_argument("-b", "--baseline-dir", default=None,
+ help="Directory where baselines should be read from (or written to, if -g/-i is used)")
parser.add_argument("-u", "--update-expired-baselines", action="store_true",
- help="Update baselines that appear to be expired.")
+ help="Update baselines that appear to be expired (only used with -g)")
parser.add_argument("-m", "--machine",
help="Provide machine name. This is *always* required. It can, but does not"
@@ -86,9 +80,6 @@ OR
parser.add_argument("--config-only", action="store_true",
help="In the testing phase, only run config step, skip build and tests")
- parser.add_argument("-k", "--keep-tree", action="store_true",
- help="Allow to keep the current work tree when testing against HEAD (only valid with `-b HEAD`)")
-
parser.add_argument("-c", "--custom-cmake-opts", action="append", default=[],
help="Extra custom options to pass to cmake. Can use multiple times for multiple cmake options. The -D is added for you")
@@ -100,12 +91,12 @@ OR
parser.add_argument("--preserve-env", action="store_true",
help="Whether to skip machine env setup, and preserve the current user env (useful to manually test new modules)")
- choices_doc = ", ".join(["'{}' ({})".format(k, v) for k, v in TestAllScream.get_test_name_desc().items()])
+ choices_doc = ", ".join(["'{}' ({})".format(k, v) for k, v in get_test_name_dict().items()])
parser.add_argument("-t", "--test", dest="tests", action="append", default=[],
help=f"Only run specific test configurations, choices={choices_doc}")
parser.add_argument("-i", "--integration-test", action="store_true",
- help="Merge origin/master into this branch before testing.")
+ help="Merge origin/master into this branch before testing (implies -u).")
parser.add_argument("-l", "--local", action="store_true",
help="Allow to not specify a machine name, and have test-all-scream to look"
@@ -123,9 +114,6 @@ OR
parser.add_argument("--quick-rerun-failed", action="store_true",
help="Do not clean the build dir, and do not reconfigure. Just (incremental) build and retest failed tests only.")
- parser.add_argument("-d", "--dry-run", action="store_true",
- help="Do a dry run, commands will be printed but not executed")
-
parser.add_argument("--make-parallel-level", action="store", type=int, default=0,
help="Max number of jobs to be created during compilation. If not provided, use default for given machine.")
diff --git a/components/eamxx/scripts/test_all_scream.py b/components/eamxx/scripts/test_all_scream.py
index dfade8beda0d..ffbe70e94f68 100644
--- a/components/eamxx/scripts/test_all_scream.py
+++ b/components/eamxx/scripts/test_all_scream.py
@@ -1,6 +1,10 @@
-from utils import run_cmd, run_cmd_no_fail, expect, check_minimum_python_version, ensure_psutil
+from utils import run_cmd, run_cmd_no_fail, expect, check_minimum_python_version, ensure_psutil, \
+ SharedArea, safe_copy
from git_utils import get_current_head, get_current_commit, get_current_branch, is_repo_clean, \
- cleanup_repo, merge_git_ref, checkout_git_ref, git_refs_difference, print_last_commit
+ cleanup_repo, merge_git_ref, git_refs_difference, print_last_commit, \
+ create_backup_commit, checkout_git_ref
+
+from test_factory import create_tests, COV
from machines_specs import get_mach_compilation_resources, get_mach_testing_resources, \
get_mach_baseline_root_dir, setup_mach_env, is_cuda_machine, \
@@ -18,270 +22,19 @@
import psutil
import re
-from collections import OrderedDict
from pathlib import Path
-###############################################################################
-class TestProperty(object):
-###############################################################################
-
- """
- Parent class of predefined test types for SCREAM standalone. test-all-scream
- offers a number of customization points, but you may need to just use
- cmake if you need maximal customization. You can run test-all-scream --dry-run
- to get the corresponding cmake command which can then be used as a starting
- point for making your own cmake command.
- """
-
- def __init__(self, longname, description, cmake_args,
- uses_baselines=True, on_by_default=True, default_test_len=None):
- # What the user uses to select tests via test-all-scream CLI.
- # Should also match the class name when converted to caps
- self.shortname = type(self).__name__.lower()
-
- # A longer name used to name baseline and test directories for a test.
- # Also used in output/error messages to refer to the test
- self.longname = longname
-
- # A longer decription of the test
- self.description = description
-
- # Cmake config args for this test. Check that quoting is done with
- # single quotes.
- self.cmake_args = cmake_args
- for name, arg in self.cmake_args:
- expect('"' not in arg,
- f"In test definition for {longname}, found cmake args with double quotes {name}='{arg}'"
- "Please use single quotes if quotes are needed.")
-
- # Does the test do baseline testing
- self.uses_baselines = uses_baselines
-
- # Should this test be run if the user did not specify tests at all?
- self.on_by_default = on_by_default
-
- # Should this test have a default test size
- self.default_test_len = default_test_len
-
- #
- # Properties not set by constructor (Set by the main TestAllScream object)
- #
-
- # Resources used by this test.
- self.compile_res_count = None
- self.testing_res_count = None
-
- # Does this test need baselines
- self.missing_baselines = False
-
- #
- # Common
- #
-
- if not self.uses_baselines:
- self.cmake_args += [("SCREAM_ENABLE_BASELINE_TESTS", "False")]
-
- def disable_baselines(self):
- if self.uses_baselines:
- self.uses_baselines = False
- self.cmake_args += [("SCREAM_ENABLE_BASELINE_TESTS", "False")]
-
- # Tests will generally be referred to via their longname
- def __str__(self):
- return self.longname
-
-###############################################################################
-class DBG(TestProperty):
-###############################################################################
-
- CMAKE_ARGS = [("CMAKE_BUILD_TYPE", "Debug"), ("EKAT_DEFAULT_BFB", "True")]
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "full_debug",
- "debug",
- self.CMAKE_ARGS,
- )
-
-###############################################################################
-class SP(TestProperty):
-###############################################################################
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "full_sp_debug",
- "debug single precision",
- DBG.CMAKE_ARGS + [("SCREAM_DOUBLE_PRECISION", "False")],
- )
-
-###############################################################################
-class FPE(TestProperty):
-###############################################################################
-
- def __init__(self, tas):
- TestProperty.__init__(
- self,
- "debug_nopack_fpe",
- "debug pksize=1 floating point exceptions on",
- DBG.CMAKE_ARGS + [("SCREAM_PACK_SIZE", "1"), ("SCREAM_FPE","True")],
- uses_baselines=False,
- on_by_default=(tas is not None and not tas.on_cuda())
- )
-
-###############################################################################
-class OPT(TestProperty):
-###############################################################################
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "release",
- "release",
- [("CMAKE_BUILD_TYPE", "Release")],
- )
-
-###############################################################################
-class COV(TestProperty):
-###############################################################################
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "coverage",
- "debug coverage",
- [("CMAKE_BUILD_TYPE", "Debug"), ("EKAT_ENABLE_COVERAGE", "True")],
- uses_baselines=False,
- on_by_default=False,
- default_test_len="short"
- )
-
-###############################################################################
-class VALG(TestProperty):
-###############################################################################
-
- def __init__(self, tas):
- TestProperty.__init__(
- self,
- "valgrind",
- "debug with valgrind",
- [("CMAKE_BUILD_TYPE", "Debug"), ("EKAT_ENABLE_VALGRIND", "True")],
- uses_baselines=False,
- on_by_default=False,
- default_test_len="short"
- )
- if tas is not None:
- # If a stored suppression file exists for this machine, use it
- persistent_supp_file = tas.get_root_dir() / "scripts" / "jenkins" / "valgrind" / f"{tas.get_machine()}.supp"
- if persistent_supp_file.exists():
- self.cmake_args.append( ("EKAT_VALGRIND_SUPPRESSION_FILE", str(persistent_supp_file)) )
-
-###############################################################################
-class CSM(TestProperty):
-###############################################################################
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "compute_sanitizer_memcheck",
- "debug with compute sanitizer memcheck",
- [("CMAKE_BUILD_TYPE", "Debug"),
- ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
- ("EKAT_COMPUTE_SANITIZER_OPTIONS", "--tool=memcheck")],
- uses_baselines=False,
- on_by_default=False,
- default_test_len="short"
- )
-
-###############################################################################
-class CSR(TestProperty):
-###############################################################################
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "compute_sanitizer_racecheck",
- "debug with compute sanitizer racecheck",
- [("CMAKE_BUILD_TYPE", "Debug"),
- ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
- ("EKAT_COMPUTE_SANITIZER_OPTIONS", "'--tool=racecheck --racecheck-detect-level=error'")],
- uses_baselines=False,
- on_by_default=False,
- default_test_len="short"
- )
-
-###############################################################################
-class CSI(TestProperty):
-###############################################################################
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "compute_sanitizer_initcheck",
- "debug with compute sanitizer initcheck",
- [("CMAKE_BUILD_TYPE", "Debug"),
- ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
- ("EKAT_COMPUTE_SANITIZER_OPTIONS", "--tool=initcheck")],
- uses_baselines=False,
- on_by_default=False,
- default_test_len="short"
- )
-
-###############################################################################
-class CSS(TestProperty):
-###############################################################################
-
- def __init__(self, _):
- TestProperty.__init__(
- self,
- "compute_sanitizer_synccheck",
- "debug with compute sanitizer synccheck",
- [("CMAKE_BUILD_TYPE", "Debug"),
- ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
- ("EKAT_COMPUTE_SANITIZER_OPTIONS", "--tool=synccheck")],
- uses_baselines=False,
- on_by_default=False,
- default_test_len="short"
- )
-
-###############################################################################
-def test_factory(user_req_tests, tas):
-###############################################################################
- testclasses = TestProperty.__subclasses__()
- if not user_req_tests:
- result = [testclass(tas) for testclass in testclasses
- if testclass(tas).on_by_default]
- else:
- valid_names = [testclass(tas).shortname for testclass in testclasses]
- for user_req_test in user_req_tests:
- expect(user_req_test in valid_names, f"'{user_req_test}' is not a known test")
-
- result = [testclass(tas) for testclass in testclasses if testclass(tas).shortname in user_req_tests]
-
- return result
-
###############################################################################
class TestAllScream(object):
###############################################################################
- ###########################################################################
- @classmethod
- def get_test_name_desc(cls):
- ###########################################################################
- """
- Returns a dict mapping short test names to full names
- """
- testclasses = TestProperty.__subclasses__()
- return OrderedDict([(testc(None).shortname, testc(None).description) for testc in testclasses])
-
###########################################################################
def __init__(self, cxx_compiler=None, f90_compiler=None, c_compiler=None,
- submit=False, parallel=False, fast_fail=False,
- baseline_ref=None, baseline_dir=None, machine=None, no_tests=False, config_only=False, keep_tree=False,
+ submit=False, parallel=False, generate=False, no_tests=False,
+ baseline_dir=None, machine=None, config_only=False,
custom_cmake_opts=(), custom_env_vars=(), preserve_env=False, tests=(),
integration_test=False, local=False, root_dir=None, work_dir=None,
- quick_rerun=False,quick_rerun_failed=False,dry_run=False,
+ quick_rerun=False,quick_rerun_failed=False,
make_parallel_level=0, ctest_parallel_level=0, update_expired_baselines=False,
extra_verbose=False, limit_test_regex=None, test_level="at", test_size=None,
force_baseline_regen=False):
@@ -299,13 +52,10 @@ def __init__(self, cxx_compiler=None, f90_compiler=None, c_compiler=None,
self._c_compiler = c_compiler
self._submit = submit
self._parallel = parallel
- self._fast_fail = fast_fail
- self._baseline_ref = baseline_ref
self._machine = machine
self._local = local
- self._perform_tests = not no_tests
+ self._run_tests = not no_tests
self._config_only = config_only
- self._keep_tree = keep_tree
self._baseline_dir = baseline_dir
self._custom_cmake_opts = custom_cmake_opts
self._custom_env_vars = custom_env_vars
@@ -315,15 +65,16 @@ def __init__(self, cxx_compiler=None, f90_compiler=None, c_compiler=None,
self._integration_test = integration_test
self._quick_rerun = quick_rerun
self._quick_rerun_failed = quick_rerun_failed
- self._dry_run = dry_run
- self._update_expired_baselines= update_expired_baselines
self._extra_verbose = extra_verbose
self._limit_test_regex = limit_test_regex
self._test_level = test_level
self._test_size = test_size
self._force_baseline_regen = force_baseline_regen
-
- # Not all builds are ment to perform comparisons against pre-built baselines
+ # Integration test always updates expired baselines
+ self._update_expired_baselines= update_expired_baselines or self._integration_test or self._force_baseline_regen
+ # If we are to update expired baselines, then we must run the generate phase
+ # NOTE: the gen phase will do nothing if baselines are present and not expired
+ self._generate = generate or self._update_expired_baselines
if self._quick_rerun_failed:
self._quick_rerun = True
@@ -355,30 +106,22 @@ def __init__(self, cxx_compiler=None, f90_compiler=None, c_compiler=None,
self._root_dir = Path(__file__).resolve().parent.parent
else:
self._root_dir = Path(self._root_dir).resolve()
- expect(self._root_dir.is_dir() and self._root_dir.parts()[-2:] == ("scream", "components"),
- f"Bad root-dir '{self._root_dir}', should be: $scream_repo/components/eamxx")
+ expect(self._root_dir.is_dir() and list(self._root_dir.parts)[-2:] == ["components","eamxx"],
+ f"Bad root-dir '{self._root_dir}', should end with: /components/eamxx")
# Make our test objects! Change mem to default mem-check test for current platform
if "mem" in tests:
tests[tests.index("mem")] = "csm" if self.on_cuda() else "valg"
- self._tests = test_factory(tests, self)
+ self._tests = create_tests(tests, self)
if self._work_dir is not None:
self._work_dir = Path(self._work_dir).absolute()
- expect(self._work_dir.is_dir(),
- f"Error! Work directory '{self._work_dir}' does not exist.")
else:
self._work_dir = self._root_dir.absolute().joinpath("ctest-build")
- self._work_dir.mkdir(exist_ok=True)
- os.chdir(str(self._root_dir)) # needed, or else every git command will need repo=root_dir
- expect(get_current_commit(), f"Root dir: {self._root_dir}, does not appear to be a git repo")
-
- # Print some info on the branch
- self._original_branch = get_current_branch()
- self._original_commit = get_current_commit()
+ self._work_dir.mkdir(parents=True, exist_ok=True)
- print_last_commit(git_ref=self._original_branch, dry_run=self._dry_run)
+ os.chdir(str(self._root_dir)) # needed, or else every git command will need repo=root_dir
###################################
# Compilation/testing resources #
@@ -445,80 +188,84 @@ def __init__(self, cxx_compiler=None, f90_compiler=None, c_compiler=None,
# Setup the env on this machine
setup_mach_env(self._machine, ctest_j=ctest_max_jobs)
+ ############################################
+ # Check repo status #
+ ############################################
+
+ expect(get_current_commit(), f"Root dir: {self._root_dir}, does not appear to be a git repo")
+
+ # Get git status info. Besides printing this info, we will need it to restore the repo initial
+ # configuration if we are running an integration test (where baselines need to be created
+ # from the origin/master commit)
+ self._original_branch = get_current_branch()
+ self._original_commit = get_current_commit()
+
+ print_last_commit(git_ref=self._original_branch)
+
+ # If we have an integration test, we need to merge master. Hence, do two things:
+ # 1) create bkp commit for all uncommitted/unstaged changes
+ # 2) save commit, so we can undo the merge after testing
+ self._has_backup_commit = False
+ if self._integration_test:
+ if not is_repo_clean():
+ # Back up work in a temporary commit
+ create_backup_commit()
+ self._has_backup_commit = True
+
+ self._original_commit = get_current_commit()
+
###################################
# Compute baseline info #
###################################
expect (not self._baseline_dir or self._work_dir != self._baseline_dir,
- f"Error! For your safety, do NOT use '{self._work_dir}' to store baselines. Move them to a different directory (even a subdirectory if that works).")
-
- # If no baseline ref/dir was provided, use default master baseline dir for this machine
- # NOTE: if user specifies baseline ref, baseline dir will be set later to a path within work dir
- if self._baseline_dir is None and self._baseline_ref is None:
- self._baseline_dir = "AUTO"
- print ("No '--baseline-dir XYZ' nor '-b XYZ' provided. Testing against default baselines dir for this machine.")
-
- # If -k was used, make sure it's allowed
- if self._keep_tree:
- expect(not self._integration_test, "Should not be doing keep-tree with integration testing")
- print("WARNING! You have uncommitted changes in your repo.",
- " The PASS/FAIL status may depend on these changes",
- " so if you want to keep them, don't forget to create a commit.",sep="\n")
- if self._baseline_dir is None:
- # Make sure the baseline ref is HEAD
- expect(self._baseline_ref == "HEAD",
- "The option --keep-tree is only available when testing against pre-built baselines "
- "(--baseline-dir) or HEAD (-b HEAD)")
+ f"Error! For your safety, do NOT use '{self._work_dir}' (the work_dir) to store baselines. Move them to a different directory (even a subdirectory if that works).")
+
+ # These two dir are special dir for "on-the-fly baselines" and "machine's official baselines"
+ local_baseline_dir = self._work_dir/"baselines"
+ auto_dir = Path(get_mach_baseline_root_dir(self._machine)).absolute()
+ # Handle the "fake" auto case, used in scripts tests
+ if "SCREAM_FAKE_AUTO" in os.environ:
+ auto_dir = auto_dir / "fake"
+
+ if self._baseline_dir == "LOCAL":
+ self._baseline_dir = local_baseline_dir
+ elif self._baseline_dir == "AUTO":
+ self._baseline_dir = auto_dir
+ elif self._baseline_dir is None:
+ if self._generate and not self._integration_test:
+ print ("No '--baseline-dir XYZ' provided. Baselines will be generated in {local_baseline_dir}.")
+ print ("NOTE: test-all-scream will proceed as if --force-baseline-regen was passed")
+ self._baseline_dir = local_baseline_dir
+ self._force_baseline_regen = True
+ self._update_expired_baselines = True
else:
- # Make sure the baseline ref is unset (or HEAD)
- expect(self._baseline_ref is None or self._baseline_ref == "HEAD",
- "The option --keep-tree is only available when testing against pre-built baselines "
- "(--baseline-dir) or HEAD (-b HEAD)")
- else:
- expect(self._dry_run or is_repo_clean(),
- "Repo must be clean before running. If testing against HEAD or pre-built baselines, "
- "you can pass `--keep-tree` to allow non-clean repo.")
+ print ("No '--baseline-dir XYZ' provided. Testing against default baselines dir for this machine.")
+ self._baseline_dir = auto_dir
- # For integration test, enforce baseline_ref==origin/master, and proceed to merge origin/master
- if self._integration_test:
- expect (self._baseline_ref is None or self._baseline_ref=="origin/master",
- "Error! Integration tests cannot be done against an arbitrary baseline ref.")
+ self._baseline_dir = Path(self._baseline_dir).absolute()
- # Set baseline ref and merge it
- self._baseline_ref = "origin/master"
- merge_git_ref(git_ref=self._baseline_ref, verbose=True, dry_run=self._dry_run)
+ # Only integration tests can overwrite the mach-specific baselines
+ if self._baseline_dir==auto_dir:
+ expect (not self._generate or self._integration_test or self._force_baseline_regen,
+ "You are not allowed to overwrite baselines in AUTO dir folder. Only -i and --force-baseline-regen can do that\n"
+ f" AUTO dir: {auto_dir}")
- # Always update expired baselines if this is an integration test
- self._update_expired_baselines = True
+ # Make the baseline dir, if not already existing.
+ if self._generate:
+ self.create_tests_dirs(self._baseline_dir, clean=False)
- # By now, we should have at least one between baseline_dir and baseline_ref set (possibly both)
- default_baselines_root_dir = self._work_dir/"baselines"
- if self._baseline_dir is None:
- # Use default baseline dir, and create it if necessary
- self._baseline_dir = Path(default_baselines_root_dir).absolute()
- self.create_tests_dirs(self._baseline_dir, True) # Wipe out previous baselines
+ # For now, assume baselines are generated from HEAD. If -i was used, we'll change this
+ self._baseline_ref = "origin/master" if self._integration_test else self._original_commit
- else:
- if self._baseline_dir == "AUTO":
- expect (self._baseline_ref is None or self._baseline_ref == "origin/master",
- "Do not specify `-b XYZ` when using `--baseline-dir AUTO`. The AUTO baseline dir should be used for the master baselines only.\n"
- " `-b XYZ` needs to probably build baselines for ref XYZ. However, no baselines will be built if the dir already contains baselines.\n")
- # We treat the "AUTO" string as a request for automatic baseline dir.
- auto_dir = get_mach_baseline_root_dir(self._machine)
- self._baseline_dir = Path(auto_dir) if auto_dir else default_baselines_root_dir
- if "SCREAM_FAKE_AUTO" in os.environ:
- self._baseline_dir = self._baseline_dir/"fake"
- else:
- self._baseline_dir = Path(self._baseline_dir).absolute()
-
- # Make sure the baseline folders exist (but do not purge content if they exist)
- self.create_tests_dirs(self._baseline_dir, False)
-
- # Do not do baseline operations if mem checking is on
+ # Check baselines status
print (f"Checking baselines directory: {self._baseline_dir}")
- self.baselines_are_present()
+ missing_baselines = self.check_baselines_are_present()
+ expect (len(missing_baselines)==0 or self._generate,
+ f"Missing baselines for builds {missing_baselines}. Re-run with -g to generate them")
+
if self._update_expired_baselines:
- self.baselines_are_expired()
+ self.check_baselines_are_expired()
############################################
# Deduce compilers if needed/possible #
@@ -531,16 +278,11 @@ def __init__(self, cxx_compiler=None, f90_compiler=None, c_compiler=None,
if self._c_compiler is None:
self._c_compiler = get_mach_c_compiler(self._machine)
- if not self._dry_run:
- self._f90_compiler = run_cmd_no_fail(f"which {self._f90_compiler}")
- self._cxx_compiler = run_cmd_no_fail(f"which {self._cxx_compiler}")
- self._c_compiler = run_cmd_no_fail(f"which {self._c_compiler}")
-
###############################################################################
def create_tests_dirs(self, root, clean):
###############################################################################
- # Make sure the baseline root directory exists
+ # Make sure the tests root directory exists
root.mkdir(parents=True,exist_ok=True)
# Create build directories (one per test)
@@ -553,9 +295,11 @@ def create_tests_dirs(self, root, clean):
# TypeError: lstat: illegal type for path parameter
shutil.rmtree(str(test_dir))
- # Create this baseline's build dir
- if not test_dir.exists():
- test_dir.mkdir(parents=True)
+ # Create this built type's build dir (if not already existing)
+ test_dir.mkdir(parents=True,exist_ok=True)
+
+ # Create the 'data' subdir (if not already existing)
+ (test_dir / "data").mkdir(parents=False,exist_ok=True)
###############################################################################
def get_baseline_file_sha(self, test):
@@ -568,8 +312,9 @@ def get_baseline_file_sha(self, test):
return None
###############################################################################
- def set_baseline_file_sha(self, test, sha):
+ def set_baseline_file_sha(self, test):
###############################################################################
+ sha = get_current_commit()
baseline_file = (self.get_preexisting_baseline(test).parent)/"baseline_git_sha"
with baseline_file.open("w", encoding="utf-8") as fd:
return fd.write(sha)
@@ -601,7 +346,7 @@ def get_preexisting_baseline(self, test):
return self._baseline_dir/str(test)/"data"
###############################################################################
- def baselines_are_present(self):
+ def check_baselines_are_present(self):
###############################################################################
"""
Check that all baselines are present (one subdir for all values of self._tests)
@@ -611,19 +356,23 @@ def baselines_are_present(self):
expect(self._baseline_dir is not None,
"Error! Baseline directory not correctly set.")
+ missing = []
for test in self._tests:
if test.uses_baselines:
data_dir = self.get_preexisting_baseline(test)
if not data_dir.is_dir():
- test.missing_baselines = True
+ test.baselines_missing = True
+ missing += [test.longname]
print(f" -> Test {test} is missing baselines")
else:
print(f" -> Test {test} appears to have baselines")
else:
print(f" -> Test {test} does not use baselines")
+ return missing
+
###############################################################################
- def baselines_are_expired(self):
+ def check_baselines_are_expired(self):
###############################################################################
"""
Baselines are expired if either:
@@ -632,40 +381,50 @@ def baselines_are_expired(self):
"""
baseline_ref_sha = get_current_commit(commit=self._baseline_ref)
- # Sanity check
- expect(self._baseline_dir is not None, "Error! This routine should only be called when testing against pre-existing baselines.")
-
for test in self._tests:
- if test.uses_baselines and not test.missing_baselines:
- # this test is not missing a baseline, but it may be expired.
-
- baseline_file_sha = self.get_baseline_file_sha(test)
- if baseline_file_sha is None:
- test.missing_baselines = True
- print(f" -> Test {test} has no stored sha so must be considered expired")
- else:
- num_ref_is_behind_file, num_ref_is_ahead_file = git_refs_difference(baseline_file_sha, baseline_ref_sha)
-
- # If the copy in our repo is behind, then we need to update the repo
- expect (num_ref_is_behind_file==0 or not self._integration_test,
+ if not test.uses_baselines or test.baselines_missing:
+ continue
+
+ if self._force_baseline_regen:
+ test.baselines_expired = True
+ print(f" -> Test {test} baselines are expired because self._force_baseline_regen=True")
+ continue
+
+ # this test is not missing a baseline, but it may be expired.
+ baseline_file_sha = self.get_baseline_file_sha(test)
+ if baseline_file_sha is None:
+ test.baselines_missing = True
+ print(f" -> Test {test} has no stored sha so must be considered expired")
+ continue
+
+ # There is a sha file, so check how it compares with self._baseline_ref
+ try:
+ num_ref_is_behind_file, num_ref_is_ahead_file = git_refs_difference(baseline_file_sha, baseline_ref_sha)
+ except SystemExit as e:
+ test.baselines_expired = True
+ reason = f"Failed to get refs difference between {baseline_file_sha} and {baseline_ref_sha} because: {e}"
+ print(f" -> Test {test} baselines are expired because {reason}")
+ continue
+
+ # If the copy in our repo is behind, then we need to update the repo
+ expect (num_ref_is_behind_file==0 or not self._integration_test,
f"""Error! Your repo seems stale, since the baseline sha in your repo is behind
the one last used to generated them. We do *not* allow an integration
test to replace baselines with older ones, for security reasons.
If this is a legitimate case where baselines need to be 'rewound',
e.g. b/c of a (hopefully VERY RARE) force push to master, then
remove existing baselines first. Otherwise, please run 'git fetch $remote'.
- - baseline_ref: {self._baseline_ref}
- - repo baseline sha: {baseline_ref_sha}
- - last used baseline sha: {baseline_file_sha}""")
-
- # If the copy in our repo is not ahead, then baselines are not expired
- if num_ref_is_ahead_file > 0 or self._force_baseline_regen:
- test.missing_baselines = True
- reason = "forcing baseline regen" if self._force_baseline_regen \
- else f"{self._baseline_ref} is ahead of the baseline commit by {num_ref_is_ahead_file}"
- print(f" -> Test {test} baselines are expired because {reason}")
- else:
- print(f" -> Test {test} baselines are valid and do not need to be regenerated")
+- baseline_ref: {self._baseline_ref}
+- repo baseline sha: {baseline_ref_sha}
+- last used baseline sha: {baseline_file_sha}""")
+
+ # If the copy in our repo is ahead, then baselines are expired
+ if num_ref_is_ahead_file > 0:
+ test.baselines_expired = True
+ reason = f"{self._baseline_ref} is ahead of the existing baseline commit {baseline_file_sha} by {num_ref_is_ahead_file}"
+ print(f" -> Test {test} baselines are expired because {reason}")
+ else:
+ print(f" -> Test {test} baselines are valid and do not need to be regenerated")
###############################################################################
def get_machine_file(self):
@@ -691,6 +450,9 @@ def generate_cmake_config(self, test, for_ctest=False):
stat, c_path, _ = run_cmd("nc-config --prefix")
if stat == 0:
result += f" -DNetCDF_C_PATH={c_path}"
+ stat, pc_path, _ = run_cmd("pnetcdf-config --prefix")
+ if stat == 0:
+ result += f" -DPnetCDF_C_PATH={pc_path}"
# Test-specific cmake options
for key, value in test.cmake_args:
@@ -815,7 +577,7 @@ def generate_ctest_config(self, cmake_config, extra_configs, test):
result += f"--resource-spec-file {test_dir}/ctest_resource_file.json "
if self._baseline_dir is not None and test.uses_baselines:
- cmake_config += f" -DSCREAM_TEST_DATA_DIR={self.get_preexisting_baseline(test)}"
+ cmake_config += f" -DSCREAM_BASELINES_DIR={self.get_preexisting_baseline(test).parent}"
if not self._submit:
result += "-DNO_SUBMIT=True "
@@ -847,91 +609,111 @@ def generate_ctest_config(self, cmake_config, extra_configs, test):
return result
###############################################################################
- def generate_baselines(self, test, commit):
+ def generate_baselines(self, test):
###############################################################################
expect(test.uses_baselines,
f"Something is off. generate_baseline should have not be called for test {test}")
- test_dir = self.get_test_dir(self._baseline_dir, test)
+ baseline_dir = self.get_test_dir(self._baseline_dir, test)
+ test_dir = self.get_test_dir(self._work_dir / "tas_baseline_build", test)
+ if test_dir.exists():
+ shutil.rmtree(test_dir)
+ test_dir.mkdir()
+ num_test_res = self.create_ctest_resource_file(test,test_dir)
cmake_config = self.generate_cmake_config(test)
- cmake_config += " -DSCREAM_BASELINES_ONLY=ON"
- cmake_config += f" -DSCREAM_TEST_DATA_DIR={test_dir}/data"
+ cmake_config += " -DSCREAM_ONLY_GENERATE_BASELINES=ON"
+ cmake_config += f" -DSCREAM_BASELINES_DIR={baseline_dir}"
+ cmake_config += f" -DSCREAM_TEST_MAX_TOTAL_THREADS={num_test_res}"
print("===============================================================================")
print(f"Generating baseline for test {test} with config '{cmake_config}'")
print("===============================================================================")
- success = True
+ # We cannot just crash if we fail to generate baselines, since we would
+ # not get a dashboard report if we did that. Instead, just ensure there is
+ # no baseline file to compare against if there's a problem.
+ stat, _, err = run_cmd(f"{cmake_config} {self._root_dir}",
+ from_dir=test_dir, verbose=True)
+ if stat != 0:
+ print (f"WARNING: Failed to create baselines (config phase):\n{err}")
+ return False
- try:
- # We cannot just crash if we fail to generate baselines, since we would
- # not get a dashboard report if we did that. Instead, just ensure there is
- # no baseline file to compare against if there's a problem.
- stat, _, err = run_cmd(f"{cmake_config} {self._root_dir}",
- from_dir=test_dir, verbose=True, dry_run=self._dry_run)
- if stat != 0:
- print (f"WARNING: Failed to configure baselines:\n{err}")
- success = False
+ cmd = f"make -j{test.compile_res_count}"
+ if self._parallel:
+ start, end = self.get_taskset_range(test)
+ cmd = f"taskset -c {start}-{end} sh -c '{cmd}'"
- else:
- cmd = f"make -j{test.compile_res_count} && make -j{test.testing_res_count} baseline"
- if self._parallel:
- start, end = self.get_taskset_range(test)
- cmd = f"taskset -c {start}-{end} sh -c '{cmd}'"
+ stat, _, err = run_cmd(cmd, from_dir=test_dir, verbose=True)
- stat, _, err = run_cmd(cmd, from_dir=test_dir, verbose=True, dry_run=self._dry_run)
+ if stat != 0:
+ print (f"WARNING: Failed to create baselines (build phase):\n{err}")
+ return False
- if stat != 0:
- print(f"WARNING: Failed to create baselines:\n{err}")
- success = False
+ cmd = f"ctest -j{test.testing_res_count}"
+ cmd += " -L baseline_gen"
+ cmd += f" --resource-spec-file {test_dir}/ctest_resource_file.json"
+ stat, _, err = run_cmd(cmd, from_dir=test_dir, verbose=True)
- finally:
- # Clean up the directory, by removing everything but the 'data' subfolder. This must
- # happen unconditionally or else subsequent runs could be corrupted
- run_cmd_no_fail(r"find -maxdepth 1 -not -name data ! -path . -exec rm -rf {} \;",
- from_dir=test_dir, verbose=True, dry_run=self._dry_run)
+ if stat != 0:
+ print (f"WARNING: Failed to create baselines (run phase):\n{err}")
+ return False
- if success:
- # Store the sha used for baselines generation
- self.set_baseline_file_sha(test, commit)
- test.missing_baselines = False
+ # Read list of nc files to copy to baseline dir
+ with open(test_dir/"data/baseline_list","r",encoding="utf-8") as fd:
+ files = fd.read().splitlines()
- return success
+ with SharedArea():
+ for fn in files:
+ # In case appending to the file leaves an empty line at the end
+ if fn != "":
+ src = Path(fn)
+ dst = baseline_dir / "data" / src.name
+ safe_copy(src, dst)
+
+ # Store the sha used for baselines generation
+ self.set_baseline_file_sha(test)
+ test.baselines_missing = False
+
+ # Clean up the directory by removing everything
+ shutil.rmtree(test_dir)
+
+ return True
###############################################################################
def generate_all_baselines(self):
###############################################################################
- git_head_ref = get_current_head()
+
+ tests_needing_baselines = self.baselines_to_be_generated()
+ if len(tests_needing_baselines)==0:
+ return True
+
+ # Switch to baseline ref
+ checkout_git_ref (self._baseline_ref)
print("###############################################################################")
- print(f"Generating baselines for ref {self._baseline_ref}")
+ print(f"Generating baselines from git ref {self._baseline_ref}")
print("###############################################################################")
- commit = get_current_commit(commit=self._baseline_ref)
-
- # Switch to the baseline commit
- checkout_git_ref(self._baseline_ref, verbose=True, dry_run=self._dry_run)
+ tas_baseline_bld = self._work_dir / "tas_baseline_build"
+ if tas_baseline_bld.exists():
+ shutil.rmtree(tas_baseline_bld)
+ tas_baseline_bld.mkdir()
success = True
- tests_needing_baselines = [test for test in self._tests if test.missing_baselines]
num_workers = len(tests_needing_baselines) if self._parallel else 1
with threading3.ProcessPoolExecutor(max_workers=num_workers) as executor:
future_to_test = {
- executor.submit(self.generate_baselines, test, commit) : test
+ executor.submit(self.generate_baselines, test) : test
for test in tests_needing_baselines}
for future in threading3.as_completed(future_to_test):
test = future_to_test[future]
success &= future.result()
- if not success and self._fast_fail:
- print(f"Generation of baselines for test {test} failed")
- return False
-
- # Switch back to the branch commit
- checkout_git_ref(git_head_ref, verbose=True, dry_run=self._dry_run)
+ # Restore original commit
+ checkout_git_ref (self._original_commit)
return success
@@ -957,12 +739,13 @@ def run_test(self, test):
if self._quick_rerun_failed:
ctest_config += "--rerun-failed "
else:
- # This directory might have been used also to build the model to generate baselines.
+ # This directory might have been used before during another test-all-scream run.
# Although it's ok to build in the same dir, we MUST make sure to erase cmake's cache
- # and internal files from the previous build (CMakeCache.txt and CMakeFiles folder)
- run_cmd_no_fail("rm -rf CMake*", from_dir=test_dir, dry_run=self._dry_run)
+ # and internal files from the previous build (CMakeCache.txt and CMakeFiles folder),
+ # Otherwise, we may not pick up changes in certain cmake vars that are already cached.
+ run_cmd_no_fail("rm -rf CMake*", from_dir=test_dir)
- success = run_cmd(ctest_config, from_dir=test_dir, arg_stdout=None, arg_stderr=None, verbose=True, dry_run=self._dry_run)[0] == 0
+ success = run_cmd(ctest_config, from_dir=test_dir, arg_stdout=None, arg_stderr=None, verbose=True)[0] == 0
return success
@@ -973,9 +756,6 @@ def run_all_tests(self):
print("Running tests!")
print("###############################################################################")
- # First, create build directories (one per test). If existing, nuke the content
- self.create_tests_dirs(self._work_dir, not self._quick_rerun)
-
success = True
tests_success = {
test : False
@@ -991,10 +771,6 @@ def run_all_tests(self):
test = future_to_test[future]
tests_success[test] = future.result()
success &= tests_success[test]
- # If failed, and fast fail is requested, return immediately
- # Note: this is effective only if num_worksers=1
- if not success and self._fast_fail:
- break
for t,s in tests_success.items():
if not s:
@@ -1049,6 +825,23 @@ def get_last_ctest_file(self,test,phase):
else:
return None
+ ###############################################################################
+ def baselines_to_be_generated(self):
+ ###############################################################################
+ """
+ Return list of baselines to generate. Baselines need to be generated if
+ - they are missing
+ - they are expired and we asked to update expired baselines
+ """
+ ret = []
+ for test in self._tests:
+ if test.baselines_missing:
+ ret.append(test)
+ elif self._update_expired_baselines and test.baselines_expired:
+ ret.append(test)
+
+ return ret
+
###############################################################################
def test_all_scream(self):
###############################################################################
@@ -1060,25 +853,30 @@ def test_all_scream(self):
success = True
try:
- # If needed, generate baselines first
- tests_needing_baselines = [test for test in self._tests if test.missing_baselines]
- if tests_needing_baselines:
- expect(self._baseline_ref is not None, "Missing baseline ref")
+ if self._integration_test:
+ # Merge origin/master
+ merge_git_ref(git_ref=self._baseline_ref, verbose=True)
+
+ if self._generate:
success = self.generate_all_baselines()
if not success:
print ("Error(s) occurred during baselines generation phase")
+
+ # Do not continue testing, as you may be testing against old/invalid baselines
return False
- # If requested, run tests
- if self._perform_tests:
+ if self._run_tests:
+ # First, create build directories (one per test). If existing, nuke the content
+ self.create_tests_dirs(self._work_dir, not self._quick_rerun)
+
success &= self.run_all_tests()
if not success:
print ("Error(s) occurred during test phase")
finally:
- if not self._keep_tree:
- # Cleanup the repo if needed
- cleanup_repo(self._original_branch, self._original_commit, dry_run=self._dry_run)
+ # Cleanup the repo if needed
+ if self._original_commit!=get_current_commit():
+ cleanup_repo(self._original_branch, self._original_commit, self._has_backup_commit)
return success
diff --git a/components/eamxx/scripts/test_factory.py b/components/eamxx/scripts/test_factory.py
new file mode 100644
index 000000000000..3c480e3c6d8b
--- /dev/null
+++ b/components/eamxx/scripts/test_factory.py
@@ -0,0 +1,258 @@
+# This module contains classes that describe a test type for test-all-scream
+# Each test type (here represented by a TestProperty object) can have different
+# flags, build type, profiling, cmake options, etc.
+# The function "test_factory" can be used to get a list of test types from
+# their string representation.
+
+from collections import OrderedDict
+from utils import expect
+
+###############################################################################
+class TestProperty(object):
+###############################################################################
+
+ """
+ Parent class of predefined test types for SCREAM standalone. test-all-scream
+ offers a number of customization points, but you may need to just use
+ cmake if you need maximal customization. You can run test-all-scream --dry-run
+ to get the corresponding cmake command which can then be used as a starting
+ point for making your own cmake command.
+ """
+
+ def __init__(self, longname, description, cmake_args,
+ uses_baselines=True, on_by_default=True, default_test_len=None):
+ # What the user uses to select tests via test-all-scream CLI.
+ # Should also match the class name when converted to caps
+ self.shortname = type(self).__name__.lower()
+
+ # A longer name used to name baseline and test directories for a test.
+ # Also used in output/error messages to refer to the test
+ self.longname = longname
+
+ # A longer decription of the test
+ self.description = description
+
+ # Cmake config args for this test. Check that quoting is done with
+ # single quotes.
+ self.cmake_args = cmake_args
+ for name, arg in self.cmake_args:
+ expect('"' not in arg,
+ f"In test definition for {longname}, found cmake args with double quotes {name}='{arg}'"
+ "Please use single quotes if quotes are needed.")
+
+ # Does the test do baseline testing
+ self.uses_baselines = uses_baselines
+
+ # Should this test be run if the user did not specify tests at all?
+ self.on_by_default = on_by_default
+
+ # Should this test have a default test size
+ self.default_test_len = default_test_len
+
+ #
+ # Properties not set by constructor (Set by the main TestAllScream object)
+ #
+
+ # Resources used by this test.
+ self.compile_res_count = None
+ self.testing_res_count = None
+
+ # Does this test need baselines
+ self.baselines_missing = False
+ self.baselines_expired = False
+
+ #
+ # Common
+ #
+
+ if not self.uses_baselines:
+ self.cmake_args += [("SCREAM_ENABLE_BASELINE_TESTS", "False")]
+
+ def disable_baselines(self):
+ if self.uses_baselines:
+ self.uses_baselines = False
+ self.cmake_args += [("SCREAM_ENABLE_BASELINE_TESTS", "False")]
+
+ # Tests will generally be referred to via their longname
+ def __str__(self):
+ return self.longname
+
+###############################################################################
+class DBG(TestProperty):
+###############################################################################
+
+ CMAKE_ARGS = [("CMAKE_BUILD_TYPE", "Debug"), ("EKAT_DEFAULT_BFB", "True")]
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "full_debug",
+ "debug",
+ self.CMAKE_ARGS,
+ )
+
+###############################################################################
+class SP(TestProperty):
+###############################################################################
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "full_sp_debug",
+ "debug single precision",
+ DBG.CMAKE_ARGS + [("SCREAM_DOUBLE_PRECISION", "False")],
+ )
+
+###############################################################################
+class FPE(TestProperty):
+###############################################################################
+
+ def __init__(self, tas):
+ TestProperty.__init__(
+ self,
+ "debug_nopack_fpe",
+ "debug pksize=1 floating point exceptions on",
+ DBG.CMAKE_ARGS + [("SCREAM_PACK_SIZE", "1"), ("SCREAM_FPE","True")],
+ uses_baselines=False,
+ on_by_default=(tas is not None and not tas.on_cuda())
+ )
+
+###############################################################################
+class OPT(TestProperty):
+###############################################################################
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "release",
+ "release",
+ [("CMAKE_BUILD_TYPE", "Release")],
+ )
+
+###############################################################################
+class COV(TestProperty):
+###############################################################################
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "coverage",
+ "debug coverage",
+ [("CMAKE_BUILD_TYPE", "Debug"), ("EKAT_ENABLE_COVERAGE", "True")],
+ uses_baselines=False,
+ on_by_default=False,
+ default_test_len="short"
+ )
+
+###############################################################################
+class VALG(TestProperty):
+###############################################################################
+
+ def __init__(self, tas):
+ TestProperty.__init__(
+ self,
+ "valgrind",
+ "debug with valgrind",
+ [("CMAKE_BUILD_TYPE", "Debug"), ("EKAT_ENABLE_VALGRIND", "True")],
+ uses_baselines=False,
+ on_by_default=False,
+ default_test_len="short"
+ )
+ if tas is not None:
+ # If a stored suppression file exists for this machine, use it
+ persistent_supp_file = tas.get_root_dir() / "scripts" / "jenkins" / "valgrind" / f"{tas.get_machine()}.supp"
+ if persistent_supp_file.exists():
+ self.cmake_args.append( ("EKAT_VALGRIND_SUPPRESSION_FILE", str(persistent_supp_file)) )
+
+###############################################################################
+class CSM(TestProperty):
+###############################################################################
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "compute_sanitizer_memcheck",
+ "debug with compute sanitizer memcheck",
+ [("CMAKE_BUILD_TYPE", "Debug"),
+ ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
+ ("EKAT_COMPUTE_SANITIZER_OPTIONS", "--tool=memcheck")],
+ uses_baselines=False,
+ on_by_default=False,
+ default_test_len="short"
+ )
+
+###############################################################################
+class CSR(TestProperty):
+###############################################################################
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "compute_sanitizer_racecheck",
+ "debug with compute sanitizer racecheck",
+ [("CMAKE_BUILD_TYPE", "Debug"),
+ ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
+ ("EKAT_COMPUTE_SANITIZER_OPTIONS", "'--tool=racecheck --racecheck-detect-level=error'")],
+ uses_baselines=False,
+ on_by_default=False,
+ default_test_len="short"
+ )
+
+###############################################################################
+class CSI(TestProperty):
+###############################################################################
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "compute_sanitizer_initcheck",
+ "debug with compute sanitizer initcheck",
+ [("CMAKE_BUILD_TYPE", "Debug"),
+ ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
+ ("EKAT_COMPUTE_SANITIZER_OPTIONS", "--tool=initcheck")],
+ uses_baselines=False,
+ on_by_default=False,
+ default_test_len="short"
+ )
+
+###############################################################################
+class CSS(TestProperty):
+###############################################################################
+
+ def __init__(self, _):
+ TestProperty.__init__(
+ self,
+ "compute_sanitizer_synccheck",
+ "debug with compute sanitizer synccheck",
+ [("CMAKE_BUILD_TYPE", "Debug"),
+ ("EKAT_ENABLE_COMPUTE_SANITIZER", "True"),
+ ("EKAT_COMPUTE_SANITIZER_OPTIONS", "--tool=synccheck")],
+ uses_baselines=False,
+ on_by_default=False,
+ default_test_len="short"
+ )
+
+###############################################################################
+def create_tests(user_req_tests, tas):
+###############################################################################
+ testclasses = TestProperty.__subclasses__()
+ if not user_req_tests:
+ result = [testclass(tas) for testclass in testclasses
+ if testclass(tas).on_by_default]
+ else:
+ valid_names = [testclass(tas).shortname for testclass in testclasses]
+ for user_req_test in user_req_tests:
+ expect(user_req_test in valid_names, f"'{user_req_test}' is not a known test")
+
+ result = [testclass(tas) for testclass in testclasses if testclass(tas).shortname in user_req_tests]
+
+ return result
+
+###########################################################################
+def get_test_name_dict():
+###########################################################################
+ """
+ Returns a dict mapping short test names to full names
+ """
+ testclasses = TestProperty.__subclasses__()
+ return OrderedDict([(testc(None).shortname, testc(None).description) for testc in testclasses])
diff --git a/components/eamxx/scripts/utils.py b/components/eamxx/scripts/utils.py
index d08075a5e34b..9aafd09ae8ae 100644
--- a/components/eamxx/scripts/utils.py
+++ b/components/eamxx/scripts/utils.py
@@ -2,9 +2,11 @@
Utilities
"""
-import os, sys, re, signal, subprocess, site, time
+import os, sys, re, signal, subprocess, site, time, shutil
from importlib import import_module
+import stat as statlib
from pathlib import Path
+from distutils import file_util # pylint: disable=deprecated-module
###############################################################################
def expect(condition, error_msg, exc_type=SystemExit, error_prefix="ERROR:"):
@@ -415,3 +417,81 @@ def ensure_yaml(): _ensure_pylib_impl("yaml", pip_libname="pyyaml",min_version
def ensure_pylint(): _ensure_pylib_impl("pylint")
def ensure_psutil(): _ensure_pylib_impl("psutil")
def ensure_netcdf4(): _ensure_pylib_impl("netCDF4")
+
+###############################################################################
+def safe_copy(src_path, tgt_path, preserve_meta=True):
+###############################################################################
+ """
+ A flexbile and safe copy routine. Will try to copy file and metadata, but this
+ can fail if the current user doesn't own the tgt file. A fallback data-only copy is
+ attempted in this case. Works even if overwriting a read-only file.
+
+ tgt_path can be a directory, src_path must be a file
+
+ most of the complexity here is handling the case where the tgt_path file already
+ exists. This problem does not exist for the tree operations so we don't need to wrap those.
+
+ preserve_meta toggles if file meta-data, like permissions, should be preserved. If you are
+ copying baseline files, you should be within a SharedArea context manager and preserve_meta
+ should be false so that the umask set up by SharedArea can take affect regardless of the
+ permissions of the src files.
+ """
+
+ # Only works for str paths for now
+ src_path = str(src_path)
+ tgt_path = str(tgt_path)
+
+ tgt_path = (
+ os.path.join(tgt_path, os.path.basename(src_path))
+ if os.path.isdir(tgt_path)
+ else tgt_path
+ )
+
+ # Handle pre-existing file
+ if os.path.isfile(tgt_path):
+ st = os.stat(tgt_path)
+ owner_uid = st.st_uid
+
+ # Handle read-only files if possible
+ if not os.access(tgt_path, os.W_OK):
+ if owner_uid == os.getuid():
+ # I am the owner, make writeable
+ os.chmod(tgt_path, st.st_mode | statlib.S_IWRITE)
+ else:
+ # I won't be able to copy this file
+ raise OSError(
+ "Cannot copy over file {}, it is readonly and you are not the owner".format(
+ tgt_path
+ )
+ )
+
+ if owner_uid == os.getuid():
+ # I am the owner, copy file contents, permissions, and metadata
+ file_util.copy_file(
+ src_path,
+ tgt_path,
+ preserve_mode=preserve_meta,
+ preserve_times=preserve_meta,
+ verbose=0,
+ )
+ else:
+ # I am not the owner, just copy file contents
+ shutil.copyfile(src_path, tgt_path)
+
+ else:
+ # We are making a new file, copy file contents, permissions, and metadata.
+ # This can fail if the underlying directory is not writable by current user.
+ file_util.copy_file(
+ src_path,
+ tgt_path,
+ preserve_mode=preserve_meta,
+ preserve_times=preserve_meta,
+ verbose=0,
+ )
+
+ # If src file was executable, then the tgt file should be too
+ st = os.stat(tgt_path)
+ if os.access(src_path, os.X_OK) and st.st_uid == os.getuid():
+ os.chmod(
+ tgt_path, st.st_mode | statlib.S_IXUSR | statlib.S_IXGRP | statlib.S_IXOTH
+ )
diff --git a/components/eamxx/src/CMakeLists.txt b/components/eamxx/src/CMakeLists.txt
index 9568d3cf85ce..59a5d6646443 100644
--- a/components/eamxx/src/CMakeLists.txt
+++ b/components/eamxx/src/CMakeLists.txt
@@ -4,9 +4,6 @@ add_subdirectory(dynamics)
add_subdirectory(physics)
add_subdirectory(diagnostics)
add_subdirectory(control)
-if ("${SCREAM_DYNAMICS_DYCORE}" STREQUAL "HOMME")
- add_subdirectory(doubly-periodic)
-endif()
if (PROJECT_NAME STREQUAL "E3SM")
add_subdirectory(mct_coupling)
endif()
diff --git a/components/eamxx/src/control/atmosphere_driver.cpp b/components/eamxx/src/control/atmosphere_driver.cpp
index 96adc56127d4..a5dacd856d75 100644
--- a/components/eamxx/src/control/atmosphere_driver.cpp
+++ b/components/eamxx/src/control/atmosphere_driver.cpp
@@ -167,44 +167,44 @@ init_time_stamps (const util::TimeStamp& run_t0, const util::TimeStamp& case_t0)
void AtmosphereDriver::
-setup_intensive_observation_period ()
+setup_iop ()
{
- // At this point, must have comm, params, initialized timestamps, and grids created.
- check_ad_status(s_comm_set | s_params_set | s_ts_inited | s_grids_created);
+ // At this point, must have comm, params, initialized timestamps created.
+ check_ad_status(s_comm_set | s_params_set | s_ts_inited);
// Check to make sure iop is not already initialized
- EKAT_REQUIRE_MSG(not m_intensive_observation_period, "Error! setup_intensive_observation_period() is "
- "called, but IOP already set up.\n");
+ EKAT_REQUIRE_MSG(not m_iop, "Error! setup_iop() is called, but IOP already set up.\n");
// This function should only be called if we are enabling IOP
const bool enable_iop =
- m_atm_params.sublist("driver_options").get("enable_intensive_observation_period", false);
- EKAT_REQUIRE_MSG(enable_iop, "Error! setup_intensive_observation_period() is called, but "
- "enable_intensive_observation_period=false "
+ m_atm_params.sublist("driver_options").get("enable_iop", false);
+ EKAT_REQUIRE_MSG(enable_iop, "Error! setup_iop() is called, but enable_iop=false "
"in driver_options parameters.\n");
- // Params must include intensive_observation_period_options sublist.
- const auto iop_sublist_exists = m_atm_params.isSublist("intensive_observation_period_options");
+ // Params must include iop_options sublist.
+ const auto iop_sublist_exists = m_atm_params.isSublist("iop_options");
EKAT_REQUIRE_MSG(iop_sublist_exists,
- "Error! setup_intensive_observation_period() is called, but no intensive_observation_period_options "
+ "Error! setup_iop() is called, but no iop_options "
"defined in parameters.\n");
- const auto iop_params = m_atm_params.sublist("intensive_observation_period_options");
+ const auto iop_params = m_atm_params.sublist("iop_options");
const auto phys_grid = m_grids_manager->get_grid("Physics");
const auto nlevs = phys_grid->get_num_vertical_levels();
const auto hyam = phys_grid->get_geometry_data("hyam");
const auto hybm = phys_grid->get_geometry_data("hybm");
- m_intensive_observation_period =
- std::make_shared(m_atm_comm,
- iop_params,
- m_run_t0,
- nlevs,
- hyam,
- hybm);
+ m_iop = std::make_shared(m_atm_comm,
+ iop_params,
+ m_run_t0,
+ nlevs,
+ hyam,
+ hybm);
auto dx_short_f = phys_grid->get_geometry_data("dx_short");
- m_intensive_observation_period->set_grid_spacing(dx_short_f.get_view()());
+ m_iop->set_grid_spacing(dx_short_f.get_view()());
+
+ // Set IOP object in atm processes
+ m_atm_process_group->set_iop(m_iop);
}
void AtmosphereDriver::create_atm_processes()
@@ -279,6 +279,14 @@ void AtmosphereDriver::create_grids()
setup_shoc_tms_links();
}
+ // IOP object needs the grids_manager to have been created, but is then needed in set_grids()
+ // implementation of some processes, so setup here.
+ const bool enable_iop =
+ m_atm_params.sublist("driver_options").get("enable_iop", false);
+ if (enable_iop) {
+ setup_iop ();
+ }
+
// Set the grids in the processes. Do this by passing the grids manager.
// Each process will grab what they need
m_atm_process_group->set_grids(m_grids_manager);
@@ -338,10 +346,6 @@ void AtmosphereDriver::setup_surface_coupling_processes () const
std::shared_ptr importer = std::dynamic_pointer_cast(atm_proc);
importer->setup_surface_coupling_data(*m_surface_coupling_import_data_manager);
-
- if (m_intensive_observation_period) {
- importer->set_intensive_observation_period(m_intensive_observation_period);
- }
}
if (atm_proc->type() == AtmosphereProcessType::SurfaceCouplingExporter) {
exporter_found = true;
@@ -691,9 +695,9 @@ void AtmosphereDriver::initialize_output_managers () {
checkpoint_params.set("Frequency",-1);
if (io_params.isSublist("model_restart")) {
auto restart_pl = io_params.sublist("model_restart");
- m_output_managers.emplace_back();
+ restart_pl.set("Averaging Type","Instant");
restart_pl.sublist("provenance") = m_atm_params.sublist("provenance");
- auto& om = m_output_managers.back();
+ auto& om = m_output_managers.emplace_back();
if (fvphyshack) {
// Don't save CGLL fields from ICs to the restart file.
std::map fms;
@@ -839,7 +843,7 @@ initialize_fields ()
auto hw = fm->get_field("horiz_winds");
const auto& fid = hw.get_header().get_identifier();
const auto& layout = fid.get_layout();
- const int vec_dim = layout.get_vector_dim();
+ const int vec_dim = layout.get_vector_component_idx();
const auto& units = fid.get_units();
auto U = hw.subfield("U",units,vec_dim,0);
auto V = hw.subfield("V",units,vec_dim,1);
@@ -855,7 +859,7 @@ initialize_fields ()
auto hw = fm->get_field("surf_mom_flux");
const auto& fid = hw.get_header().get_identifier();
const auto& layout = fid.get_layout();
- const int vec_dim = layout.get_vector_dim();
+ const int vec_dim = layout.get_vector_component_idx();
const auto& units = fid.get_units();
auto surf_mom_flux_U = hw.subfield("surf_mom_flux_U",units,vec_dim,0);
auto surf_mom_flux_V = hw.subfield("surf_mom_flux_V",units,vec_dim,1);
@@ -937,29 +941,34 @@ void AtmosphereDriver::create_logger () {
auto& driver_options_pl = m_atm_params.sublist("driver_options");
ci_string log_fname = driver_options_pl.get("Atm Log File","atm.log");
- ci_string log_level_str = driver_options_pl.get("atm_log_level","info");
+ ci_string log_level = driver_options_pl.get("atm_log_level","info");
+ ci_string flush_level = driver_options_pl.get("atm_flush_level","warn");
EKAT_REQUIRE_MSG (log_fname!="",
"Invalid string for 'Atm Log File': '" + log_fname + "'.\n");
- LogLevel log_level;
- if (log_level_str=="trace") {
- log_level = LogLevel::trace;
- } else if (log_level_str=="debug") {
- log_level = LogLevel::debug;
- } else if (log_level_str=="info") {
- log_level = LogLevel::info;
- } else if (log_level_str=="warn") {
- log_level = LogLevel::warn;
- } else if (log_level_str=="err") {
- log_level = LogLevel::err;
- } else if (log_level_str=="off") {
- log_level = LogLevel::off;
- } else {
- EKAT_ERROR_MSG ("Invalid choice for 'atm_log_level': " + log_level_str + "\n");
- }
+ auto str2lev = [](const std::string& s, const std::string& name) {
+ LogLevel lev;
+ if (s=="trace") {
+ lev = LogLevel::trace;
+ } else if (s=="debug") {
+ lev = LogLevel::debug;
+ } else if (s=="info") {
+ lev = LogLevel::info;
+ } else if (s=="warn") {
+ lev = LogLevel::warn;
+ } else if (s=="err") {
+ lev = LogLevel::err;
+ } else if (s=="off") {
+ lev = LogLevel::off;
+ } else {
+ EKAT_ERROR_MSG ("Invalid choice for '" + name + "': " + s + "\n");
+ }
+ return lev;
+ };
using logger_t = Logger;
- m_atm_logger = std::make_shared(log_fname,log_level,m_atm_comm,"");
+ m_atm_logger = std::make_shared(log_fname,str2lev(log_level,"atm_log_level"),m_atm_comm,"");
+ m_atm_logger->flush_on(str2lev(flush_level,"atm_flush_level"));
m_atm_logger->set_no_format();
// In CIME runs, this is already set to false, so atm log does not pollute e3sm.loc.
@@ -1129,7 +1138,7 @@ void AtmosphereDriver::set_initial_conditions ()
}
}
- if (m_intensive_observation_period) {
+ if (m_iop) {
// For runs with IOP, call to setup io grids and lat
// lon information needed for reading from file
for (const auto& it : m_field_mgrs) {
@@ -1140,7 +1149,7 @@ void AtmosphereDriver::set_initial_conditions ()
ic_pl.get("Filename")
:
ic_pl.get("topography_filename");
- m_intensive_observation_period->setup_io_info(file_name, it.second->get_grid());
+ m_iop->setup_io_info(file_name, it.second->get_grid());
}
}
}
@@ -1152,15 +1161,15 @@ void AtmosphereDriver::set_initial_conditions ()
m_atm_logger->info(" [EAMxx] IC filename: " + file_name);
for (const auto& it : m_field_mgrs) {
const auto& grid_name = it.first;
- if (not m_intensive_observation_period) {
+ if (not m_iop) {
read_fields_from_file (ic_fields_names[grid_name],it.second->get_grid(),file_name,m_current_ts);
} else {
// For IOP enabled, we load from file and copy data from the closest
// lat/lon column to every other column
- m_intensive_observation_period->read_fields_from_file_for_iop(file_name,
- ic_fields_names[grid_name],
- m_current_ts,
- it.second);
+ m_iop->read_fields_from_file_for_iop(file_name,
+ ic_fields_names[grid_name],
+ m_current_ts,
+ it.second);
}
}
}
@@ -1227,7 +1236,7 @@ void AtmosphereDriver::set_initial_conditions ()
m_atm_logger->info(" filename: " + file_name);
for (const auto& it : m_field_mgrs) {
const auto& grid_name = it.first;
- if (not m_intensive_observation_period) {
+ if (not m_iop) {
// Topography files always use "ncol_d" for the GLL grid value of ncol.
// To ensure we read in the correct value, we must change the name for that dimension
auto io_grid = it.second->get_grid();
@@ -1243,11 +1252,11 @@ void AtmosphereDriver::set_initial_conditions ()
} else {
// For IOP enabled, we load from file and copy data from the closest
// lat/lon column to every other column
- m_intensive_observation_period->read_fields_from_file_for_iop(file_name,
- topography_file_fields_names[grid_name],
- topography_eamxx_fields_names[grid_name],
- m_current_ts,
- it.second);
+ m_iop->read_fields_from_file_for_iop(file_name,
+ topography_file_fields_names[grid_name],
+ topography_eamxx_fields_names[grid_name],
+ m_current_ts,
+ it.second);
}
}
// Store in provenance list, for later usage in output file metadata
@@ -1268,16 +1277,16 @@ void AtmosphereDriver::set_initial_conditions ()
m_atm_params.sublist("provenance").set("topography_file","NONE");
}
- if (m_intensive_observation_period) {
+ if (m_iop) {
// Load IOP data file data for initial time stamp
- m_intensive_observation_period->read_iop_file_data(m_current_ts);
+ m_iop->read_iop_file_data(m_current_ts);
// Now that ICs are processed, set appropriate fields using IOP file data.
// Since ICs are loaded on GLL grid, we set those fields only and dynamics
// will take care of the rest (for PG2 case).
if (m_field_mgrs.count("Physics GLL") > 0) {
const auto& fm = m_field_mgrs.at("Physics GLL");
- m_intensive_observation_period->set_fields_from_iop_data(fm);
+ m_iop->set_fields_from_iop_data(fm);
}
}
@@ -1441,12 +1450,15 @@ initialize_constant_field(const FieldIdentifier& fid,
// The user provided a constant value for this field. Simply use that.
const auto& layout = f.get_header().get_identifier().get_layout();
- // For vector fields, we expect something like "fname: [val0,...,valN],
- // where the field dim is N+1. For scalars, "fname: val". So check the
- // field layout first, so we know what to get from the parameter list.
- if (layout.is_vector_layout()) {
- const auto idim = layout.get_vector_dim();
- const auto vec_dim = layout.dim(idim);
+ // For vector fields, we allow either single value init or vector value init.
+ // That is, both these are ok
+ // fname: val
+ // fname: [val1,...,valN]
+ // In the first case, all entries of the field are inited to val, while in the latter,
+ // each component is inited to the corresponding entry of the array.
+ if (layout.is_vector_layout() and ic_pl.isType>(name)) {
+ const auto idim = layout.get_vector_component_idx();
+ const auto vec_dim = layout.get_vector_dim();
const auto& values = ic_pl.get>(name);
EKAT_REQUIRE_MSG (values.size()==static_cast(vec_dim),
"Error! Initial condition values array for '" + name + "' has the wrong dimension.\n"
@@ -1554,12 +1566,6 @@ initialize (const ekat::Comm& atm_comm,
create_grids ();
- const bool enable_iop =
- m_atm_params.sublist("driver_options").get("enable_intensive_observation_period", false);
- if (enable_iop) {
- setup_intensive_observation_period ();
- }
-
create_fields ();
initialize_fields ();
diff --git a/components/eamxx/src/control/atmosphere_driver.hpp b/components/eamxx/src/control/atmosphere_driver.hpp
index bf694da3fb3e..1cd8d4db08e7 100644
--- a/components/eamxx/src/control/atmosphere_driver.hpp
+++ b/components/eamxx/src/control/atmosphere_driver.hpp
@@ -72,7 +72,7 @@ class AtmosphereDriver
void init_scorpio (const int atm_id = 0);
// Setup IntensiveObservationPeriod
- void setup_intensive_observation_period ();
+ void setup_iop ();
// Create atm processes, without initializing them
void create_atm_processes ();
@@ -207,7 +207,7 @@ class AtmosphereDriver
std::shared_ptr m_surface_coupling_import_data_manager;
std::shared_ptr m_surface_coupling_export_data_manager;
- std::shared_ptr m_intensive_observation_period;
+ std::shared_ptr m_iop;
// This is the time stamp at the beginning of the time step.
util::TimeStamp m_current_ts;
diff --git a/components/eamxx/src/control/atmosphere_surface_coupling_importer.cpp b/components/eamxx/src/control/atmosphere_surface_coupling_importer.cpp
index 1bf32b924215..e816801fa618 100644
--- a/components/eamxx/src/control/atmosphere_surface_coupling_importer.cpp
+++ b/components/eamxx/src/control/atmosphere_surface_coupling_importer.cpp
@@ -170,9 +170,11 @@ void SurfaceCouplingImporter::do_import(const bool called_during_initialization)
}
});
- // If IOP is defined, potentially overwrite imports with data from IOP file
- if (m_intensive_observation_period) {
- overwrite_iop_imports(called_during_initialization);
+ if (m_iop) {
+ if (m_iop->get_params().get("iop_srf_prop")) {
+ // Overwrite imports with data from IOP file
+ overwrite_iop_imports(called_during_initialization);
+ }
}
}
// =========================================================================================
@@ -181,11 +183,9 @@ void SurfaceCouplingImporter::overwrite_iop_imports (const bool called_during_in
using policy_type = KokkosTypes::RangePolicy;
using C = physics::Constants;
- const auto& iop = m_intensive_observation_period;
-
- const auto has_lhflx = iop->has_iop_field("lhflx");
- const auto has_shflx = iop->has_iop_field("shflx");
- const auto has_Tg = iop->has_iop_field("Tg");
+ const auto has_lhflx = m_iop->has_iop_field("lhflx");
+ const auto has_shflx = m_iop->has_iop_field("shflx");
+ const auto has_Tg = m_iop->has_iop_field("Tg");
static constexpr Real latvap = C::LatVap;
static constexpr Real stebol = C::stebol;
@@ -205,19 +205,19 @@ void SurfaceCouplingImporter::overwrite_iop_imports (const bool called_during_in
// Store IOP surf data into col_val
Real col_val(std::nan(""));
if (fname == "surf_evap" && has_lhflx) {
- const auto f = iop->get_iop_field("lhflx");
+ const auto f = m_iop->get_iop_field("lhflx");
f.sync_to_host();
col_val = f.get_view()()/latvap;
} else if (fname == "surf_sens_flux" && has_shflx) {
- const auto f = iop->get_iop_field("shflx");
+ const auto f = m_iop->get_iop_field("shflx");
f.sync_to_host();
col_val = f.get_view()();
} else if (fname == "surf_radiative_T" && has_Tg) {
- const auto f = iop->get_iop_field("Tg");
+ const auto f = m_iop->get_iop_field("Tg");
f.sync_to_host();
col_val = f.get_view()();
} else if (fname == "surf_lw_flux_up" && has_Tg) {
- const auto f = iop->get_iop_field("Tg");
+ const auto f = m_iop->get_iop_field("Tg");
f.sync_to_host();
col_val = stebol*std::pow(f.get_view()(), 4);
} else {
diff --git a/components/eamxx/src/control/atmosphere_surface_coupling_importer.hpp b/components/eamxx/src/control/atmosphere_surface_coupling_importer.hpp
index 5884c9d40af2..3a34a8b2951e 100644
--- a/components/eamxx/src/control/atmosphere_surface_coupling_importer.hpp
+++ b/components/eamxx/src/control/atmosphere_surface_coupling_importer.hpp
@@ -5,8 +5,6 @@
#include "ekat/ekat_parameter_list.hpp"
#include "share/atm_process/SCDataManager.hpp"
-#include "control/intensive_observation_period.hpp"
-
#include "surface_coupling_utils.hpp"
#include
@@ -36,7 +34,6 @@ class SurfaceCouplingImporter : public AtmosphereProcess
using uview_2d = Unmanaged>;
using name_t = char[32];
- using iop_ptr = std::shared_ptr;
// Constructors
SurfaceCouplingImporter (const ekat::Comm& comm, const ekat::ParameterList& params);
@@ -64,9 +61,6 @@ class SurfaceCouplingImporter : public AtmosphereProcess
// Overwrite imports for IOP cases with IOP file surface data
void overwrite_iop_imports (const bool called_during_initialization);
- void set_intensive_observation_period (const iop_ptr& iop) {
- m_intensive_observation_period = iop;
- }
protected:
// The three main overrides for the subcomponent
@@ -102,9 +96,6 @@ class SurfaceCouplingImporter : public AtmosphereProcess
view_1d m_column_info_d;
decltype(m_column_info_d)::HostMirror m_column_info_h;
- // Intensive observation period object.
- iop_ptr m_intensive_observation_period;
-
// The grid is needed for property checks
std::shared_ptr m_grid;
}; // class SurfaceCouplingImporter
diff --git a/components/eamxx/src/control/intensive_observation_period.cpp b/components/eamxx/src/control/intensive_observation_period.cpp
index f4ab466c17ba..d925b21bfaa3 100644
--- a/components/eamxx/src/control/intensive_observation_period.cpp
+++ b/components/eamxx/src/control/intensive_observation_period.cpp
@@ -116,7 +116,7 @@ IntensiveObservationPeriod(const ekat::Comm& comm,
EKAT_REQUIRE_MSG(m_params.isParameter("target_latitude") && m_params.isParameter("target_longitude"),
"Error! Using intensive observation period files requires "
"target_latitude and target_longitude be gives as parameters in "
- "\"intensive_observation_period_options\" in the input yaml file.\n");
+ "\"iop_options\" in the input yaml file.\n");
const auto target_lat = m_params.get("target_latitude");
const auto target_lon = m_params.get("target_longitude");
EKAT_REQUIRE_MSG(-90 <= target_lat and target_lat <= 90,
@@ -135,16 +135,18 @@ IntensiveObservationPeriod(const ekat::Comm& comm,
if (not m_params.isParameter("iop_nudge_tscale")) m_params.set("iop_nudge_tscale", 10800);
if (not m_params.isParameter("zero_non_iop_tracers")) m_params.set("zero_non_iop_tracers", false);
+ // Store hybrid coords in helper fields
+ m_helper_fields.insert({"hyam", hyam});
+ m_helper_fields.insert({"hybm", hybm});
+
// Use IOP file to initialize parameters
// and timestepping information
- initialize_iop_file(run_t0, model_nlevs, hyam, hybm);
+ initialize_iop_file(run_t0, model_nlevs);
}
void IntensiveObservationPeriod::
initialize_iop_file(const util::TimeStamp& run_t0,
- int model_nlevs,
- const Field& hyam,
- const Field& hybm)
+ int model_nlevs)
{
EKAT_REQUIRE_MSG(m_params.isParameter("iop_file"),
"Error! Using IOP requires defining an iop_file parameter.\n");
@@ -184,6 +186,8 @@ initialize_iop_file(const util::TimeStamp& run_t0,
if (scorpio::has_variable(iop_file, srf_varname)) {
m_iop_field_surface_varnames.insert({iop_varname, srf_varname});
}
+ // Store that the IOP variable is found in the IOP file
+ m_iop_field_type.insert({iop_varname, IOPFieldType::FromFile});
// Allocate field for variable
FieldIdentifier fid(iop_varname, fl, ekat::units::Units::nondimensional(), "");
@@ -191,6 +195,10 @@ initialize_iop_file(const util::TimeStamp& run_t0,
EKAT_REQUIRE_MSG(field_rank <= 1,
"Error! Unexpected field rank "+std::to_string(field_rank)+" for iop file fields.\n");
Field field(fid);
+ if (fl.has_tag(FieldTag::LevelMidPoint) or fl.has_tag(FieldTag::LevelInterface)) {
+ // Request packsize allocation for level layout
+ field.get_header().get_alloc_properties().request_allocation(Pack::n);
+ }
field.allocate_view();
m_iop_fields.insert({iop_varname, field});
}
@@ -230,13 +238,42 @@ initialize_iop_file(const util::TimeStamp& run_t0,
setup_iop_field({"Q2"}, fl_vector);
setup_iop_field({"omega"}, fl_vector, "Ptend");
- // Make sure Ps, T, and q are defined in the iop file
+ // Require Ps, T, q, divT, divq are all defined in the iop file
EKAT_REQUIRE_MSG(has_iop_field("Ps"),
- "Error! Using IOP file requires variable \"Ps\".\n");
+ "Error! IOP file required to contain variable \"Ps\".\n");
EKAT_REQUIRE_MSG(has_iop_field("T"),
- "Error! Using IOP file requires variable \"T\".\n");
+ "Error! IOP file required to contain variable \"T\".\n");
EKAT_REQUIRE_MSG(has_iop_field("q"),
- "Error! Using IOP file requires variable \"q\".\n");
+ "Error! IOP file required to contain variable \"q\".\n");
+ EKAT_REQUIRE_MSG(has_iop_field("divT"),
+ "Error! IOP file required to contain variable \"divT\".\n");
+ EKAT_REQUIRE_MSG(has_iop_field("divq"),
+ "Error! IOP file required to contain variable \"divq\".\n");
+
+ // If we have the vertical component of T/Q forcing, define 3d forcing as a computed field.
+ if (has_iop_field("vertdivT")) {
+ FieldIdentifier fid("divT3d", fl_vector, ekat::units::Units::nondimensional(), "");
+ Field field(fid);
+ field.get_header().get_alloc_properties().request_allocation(Pack::n);
+ field.allocate_view();
+ m_iop_fields.insert({"divT3d", field});
+ m_iop_field_type.insert({"divT3d", IOPFieldType::Computed});
+ }
+ if (has_iop_field("vertdivq")) {
+ FieldIdentifier fid("divq3d", fl_vector, ekat::units::Units::nondimensional(), "");
+ Field field(fid);
+ field.get_header().get_alloc_properties().request_allocation(Pack::n);
+ field.allocate_view();
+ m_iop_fields.insert({"divq3d", field});
+ m_iop_field_type.insert({"divq3d", IOPFieldType::Computed});
+ }
+
+ // Enforce that 3D forcing is all-or-nothing.
+ const bool both = (has_iop_field("divT3d") and has_iop_field("divq3d"));
+ const bool neither = (not (has_iop_field("divT3d") or has_iop_field("divq3d")));
+ EKAT_REQUIRE_MSG(both or neither,
+ "Error! Either T and q both have 3d forcing, or neither have 3d forcing.\n");
+ m_params.set("use_3d_forcing", both);
// Initialize time information
int bdate;
@@ -286,6 +323,7 @@ initialize_iop_file(const util::TimeStamp& run_t0,
ekat::units::Units::nondimensional(),
"");
Field iop_file_pressure(fid);
+ iop_file_pressure.get_header().get_alloc_properties().request_allocation(Pack::n);
iop_file_pressure.allocate_view();
auto data = iop_file_pressure.get_view().data();
read_variable_from_file(iop_file, "lev", "real", {"lev"}, -1, data);
@@ -300,12 +338,9 @@ initialize_iop_file(const util::TimeStamp& run_t0,
fl_vector,
ekat::units::Units::nondimensional(), "");
Field model_pressure(model_pres_fid);
+ model_pressure.get_header().get_alloc_properties().request_allocation(Pack::n);
model_pressure.allocate_view();
m_helper_fields.insert({"model_pressure", model_pressure});
-
- // Store hyam and hybm in helper fields
- m_helper_fields.insert({"hyam", hyam});
- m_helper_fields.insert({"hybm", hybm});
}
void IntensiveObservationPeriod::
@@ -400,7 +435,8 @@ read_fields_from_file_for_iop (const std::string& file_name,
const vos& field_names_nc,
const vos& field_names_eamxx,
const util::TimeStamp& initial_ts,
- const field_mgr_ptr field_mgr)
+ const field_mgr_ptr field_mgr,
+ const int time_index)
{
const auto dummy_units = ekat::units::Units::nondimensional();
@@ -456,7 +492,7 @@ read_fields_from_file_for_iop (const std::string& file_name,
// Read data from file
AtmosphereInput file_reader(file_name,io_grid,io_fields);
- file_reader.read_variables();
+ file_reader.read_variables(time_index);
file_reader.finalize();
// For each field, broadcast data from closest lat/lon column to all processors
@@ -502,27 +538,27 @@ read_fields_from_file_for_iop (const std::string& file_name,
void IntensiveObservationPeriod::
read_iop_file_data (const util::TimeStamp& current_ts)
{
- const auto iop_file = m_params.get("iop_file");
+ // Query to see if we need to load data from IOP file.
+ // If we are still in the time interval as the previous
+ // read from iop file, there is no need to reload data.
const auto iop_file_time_idx = m_time_info.get_iop_file_time_idx(current_ts);
-
- // Sanity check
EKAT_REQUIRE_MSG(iop_file_time_idx >= m_time_info.time_idx_of_current_data,
"Error! Attempting to read previous iop file data time index.\n");
-
- // If we are still in the time interval as the previous read from iop file,
- // there is no need to reload data. Return early
if (iop_file_time_idx == m_time_info.time_idx_of_current_data) return;
+ const auto iop_file = m_params.get("iop_file");
const auto file_levs = scorpio::get_dimlen(iop_file, "lev");
const auto iop_file_pressure = m_helper_fields["iop_file_pressure"];
const auto model_pressure = m_helper_fields["model_pressure"];
const auto surface_pressure = m_iop_fields["Ps"];
- // Loop through iop fields, if rank 1 fields exist we need to
- // gather information for vertically interpolating views
+ // Loop through iop fields, if any rank 1 fields are loaded from file,
+ // we need to gather information for vertical interpolation
bool has_level_data = false;
for (auto& it : m_iop_fields) {
- if (it.second.rank() == 1) {
+ if (it.second.rank() == 1
+ and
+ m_iop_field_type.at(it.first)==IOPFieldType::FromFile) {
has_level_data = true;
break;
}
@@ -589,6 +625,10 @@ read_iop_file_data (const util::TimeStamp& current_ts)
Kokkos::Max(iop_file_start),
Kokkos::Min(iop_file_end));
+ // If no file pressures are found outide the reference pressure range, set to file level endpoints
+ if (iop_file_start == Kokkos::reduction_identity::max()) iop_file_start = 0;
+ if (iop_file_end == Kokkos::reduction_identity::min()) iop_file_end = adjusted_file_levs;
+
// Find model pressure levels just inside range of file pressure levels
Kokkos::parallel_reduce(model_nlevs, KOKKOS_LAMBDA (const int& ilev, int& lmin, int& lmax) {
if (model_pres_v(ilev) >= iop_file_pres_v(iop_file_start) && ilev < lmin) {
@@ -600,6 +640,10 @@ read_iop_file_data (const util::TimeStamp& current_ts)
},
Kokkos::Min(model_start),
Kokkos::Max(model_end));
+
+ // If not reference pressures are found inside file pressures, set to model level endpoints
+ if (model_start == Kokkos::reduction_identity::min()) model_start = model_nlevs-1;
+ if (model_end == Kokkos::reduction_identity::max()) model_end = 1;
}
// Loop through fields and store data from file
@@ -607,6 +651,9 @@ read_iop_file_data (const util::TimeStamp& current_ts)
auto fname = it.first;
auto field = it.second;
+ // If this is a computed field, do not attempt to load from file
+ if (m_iop_field_type.at(fname)==IOPFieldType::Computed) continue;
+
// File may use different varname than IOP class
auto file_varname = (m_iop_file_varnames.count(fname) > 0) ? m_iop_file_varnames[fname] : fname;
@@ -625,6 +672,7 @@ read_iop_file_data (const util::TimeStamp& current_ts)
ekat::units::Units::nondimensional(),
"");
Field iop_file_field(fid);
+ iop_file_field.get_header().get_alloc_properties().request_allocation(Pack::n);
iop_file_field.allocate_view();
// Read data from iop file.
@@ -657,8 +705,8 @@ read_iop_file_data (const util::TimeStamp& current_ts)
iop_file_field.sync_to_dev();
// Vertically interpolate iop file data to iop fields.
- // Note: ekat lininterp requires packs. Use 1d packs here.
- // TODO: allow for nontrivial packsize.
+ // Note: ekat lininterp requires packs. Use 1d packs here
+ // to easily mask out levels which we do not want to interpolate.
const auto iop_file_pres_v = iop_file_pressure.get_view();
const auto model_pres_v = model_pressure.get_view();
const auto iop_file_v = iop_file_field.get_view();
@@ -685,22 +733,42 @@ read_iop_file_data (const util::TimeStamp& current_ts)
// the interpolated region with the value at model_start/model_end
if (fname == "T" || fname == "q" || fname == "u" ||
fname == "u_ls" || fname == "v" || fname == "v_ls") {
- if (model_start > 0) {
- Kokkos::parallel_for(Kokkos::RangePolicy<>(0, model_start),
- KOKKOS_LAMBDA (const int ilev) {
- iop_field_v(ilev) = iop_field_v(model_start);
- });
- }
- if (model_end < total_nlevs) {
- Kokkos::parallel_for(Kokkos::RangePolicy<>(model_end, total_nlevs),
- KOKKOS_LAMBDA (const int ilev) {
- iop_field_v(ilev) = iop_field_v(model_end-1);
- });
- }
+ Kokkos::parallel_for(Kokkos::RangePolicy<>(0, model_start+1),
+ KOKKOS_LAMBDA (const int ilev) {
+ iop_field_v(ilev) = iop_file_v(0);
+ });
+ Kokkos::parallel_for(Kokkos::RangePolicy<>(model_end-1, total_nlevs),
+ KOKKOS_LAMBDA (const int ilev) {
+ iop_field_v(ilev) = iop_file_v(adjusted_file_levs-1);
+ });
}
}
}
+ // Calculate 3d forcing (if applicable).
+ if (has_iop_field("divT3d")) {
+ if (m_iop_field_type.at("divT3d")==IOPFieldType::Computed) {
+ const auto divT = get_iop_field("divT").get_view();
+ const auto vertdivT = get_iop_field("vertdivT").get_view();
+ const auto divT3d = get_iop_field("divT3d").get_view();
+ const auto nlevs = get_iop_field("divT3d").get_header().get_identifier().get_layout().dim(0);
+ Kokkos::parallel_for(nlevs, KOKKOS_LAMBDA (const int ilev) {
+ divT3d(ilev) = divT(ilev) + vertdivT(ilev);
+ });
+ }
+ }
+ if (has_iop_field("divq3d")) {
+ if (m_iop_field_type.at("divq3d")==IOPFieldType::Computed) {
+ const auto divq = get_iop_field("divq").get_view();
+ const auto vertdivq = get_iop_field("vertdivq").get_view();
+ const auto divq3d = get_iop_field("divq3d").get_view();
+ const auto nlevs = get_iop_field("divq3d").get_header().get_identifier().get_layout().dim(0);
+ Kokkos::parallel_for(nlevs, KOKKOS_LAMBDA (const int ilev) {
+ divq3d(ilev) = divq(ilev) + vertdivq(ilev);
+ });
+ }
+ }
+
// Now that data is loaded, reset the index of the currently loaded data.
m_time_info.time_idx_of_current_data = iop_file_time_idx;
}
diff --git a/components/eamxx/src/control/intensive_observation_period.hpp b/components/eamxx/src/control/intensive_observation_period.hpp
index 6e70f44a0f56..6c0b3640ffb2 100644
--- a/components/eamxx/src/control/intensive_observation_period.hpp
+++ b/components/eamxx/src/control/intensive_observation_period.hpp
@@ -26,6 +26,8 @@ class IntensiveObservationPeriod
using KT = ekat::KokkosTypes;
using ESU = ekat::ExeSpaceUtils;
+ using Pack = ekat::Pack;
+ using Pack1d = ekat::Pack;
template
using view_1d = KT::template view_1d;
@@ -35,14 +37,13 @@ class IntensiveObservationPeriod
using view_3d = KT::template view_3d;
template
using view_1d_host = typename view_1d::HostMirror;
- using Pack1d = ekat::Pack;
public:
// Constructor
// Input:
// - comm: MPI communicator
- // - params: Input yaml file needs intensive_observation_period_options sublist
+ // - params: Input yaml file needs iop_options sublist
// - run_t0: Initial timestamp for the simulation
// - model_nlevs: Number of vertical levels in the simulation. Needed since
// the iop file contains a (potentially) different number of levels
@@ -69,31 +70,35 @@ class IntensiveObservationPeriod
void setup_io_info (const std::string& file_name,
const grid_ptr& grid);
- // Read ICs from file for IOP cases. We set all columns in the
- // given fields to the values of the column in the file with the
- // closest lat,lon pair to the target lat,lon in the parameters.
- // The setup_io_info must be called for the correct grids before
- // this function can be called.
- // Input:
+ // Read ICs and SPA data from file and remap to fields in field_mgr.
+ // The remap is defined by setting all columns in the given fields to the
+ // values of the column in the file with the closest lat,lon pair to
+ // the target lat,lon in the parameters.
+ // The function setup_io_info() must be called for the grids corresponding
+ // to the file data before this function can be called.
+ // Fields in the field_mgr must have the same number of levels as the file.
+ // Inputs and outputs:
// - file_name: Name of the file used to load field data (IC or topo file)
// - field_names_nc: Field names used by the input file
// - field_names_eamxx: Field names used by eamxx
- // - initial_ts: Inital timestamp
- // Input/output
+ // - initial_ts: Inital timestamp.
// - field_mgr: Field manager containing fields that need data read from files
+ // - time_index: Time index of read. time_index=-1 will read the latest time in file.
void read_fields_from_file_for_iop(const std::string& file_name,
const vos& field_names_nc,
const vos& field_names_eamxx,
const util::TimeStamp& initial_ts,
- const field_mgr_ptr field_mgr);
+ const field_mgr_ptr field_mgr,
+ const int time_index = -1);
// Version of above, but where nc and eamxx field names are identical
void read_fields_from_file_for_iop(const std::string& file_name,
const vos& field_names,
const util::TimeStamp& initial_ts,
- const field_mgr_ptr field_mgr)
+ const field_mgr_ptr field_mgr,
+ const int time_index = -1)
{
- read_fields_from_file_for_iop(file_name, field_names, field_names, initial_ts, field_mgr);
+ read_fields_from_file_for_iop(file_name, field_names, field_names, initial_ts, field_mgr, time_index);
}
// Set fields using data loaded from the iop file
@@ -175,10 +180,13 @@ class IntensiveObservationPeriod
}
};
+ enum IOPFieldType {
+ FromFile,
+ Computed
+ };
+
void initialize_iop_file(const util::TimeStamp& run_t0,
- int model_nlevs,
- const Field& hyam,
- const Field& hybm);
+ int model_nlevs);
ekat::Comm m_comm;
ekat::ParameterList m_params;
@@ -195,6 +203,7 @@ class IntensiveObservationPeriod
std::map m_iop_file_varnames;
std::map m_iop_field_surface_varnames;
+ std::map m_iop_field_type;
}; // class IntensiveObservationPeriod
} // namespace control
diff --git a/components/eamxx/src/control/tests/CMakeLists.txt b/components/eamxx/src/control/tests/CMakeLists.txt
index 43aa5cebab1b..401c0f576268 100644
--- a/components/eamxx/src/control/tests/CMakeLists.txt
+++ b/components/eamxx/src/control/tests/CMakeLists.txt
@@ -1,5 +1,5 @@
# NOTE: if you have baseline-type tests, add the subdirectory OUTSIDE the following if statement
-if (NOT ${SCREAM_BASELINES_ONLY})
+if (NOT ${SCREAM_ONLY_GENERATE_BASELINES})
include (ScreamUtils)
# Unit test the ad
diff --git a/components/eamxx/src/diagnostics/CMakeLists.txt b/components/eamxx/src/diagnostics/CMakeLists.txt
index 5818c4d836a8..f34d5b99638f 100644
--- a/components/eamxx/src/diagnostics/CMakeLists.txt
+++ b/components/eamxx/src/diagnostics/CMakeLists.txt
@@ -16,6 +16,7 @@ set(DIAGNOSTIC_SRCS
vertical_layer.cpp
virtual_temperature.cpp
water_path.cpp
+ wind_speed.cpp
)
add_library(diagnostics ${DIAGNOSTIC_SRCS})
diff --git a/components/eamxx/src/diagnostics/exner.hpp b/components/eamxx/src/diagnostics/exner.hpp
index 5e029b716bf0..dead0a5cbf64 100644
--- a/components/eamxx/src/diagnostics/exner.hpp
+++ b/components/eamxx/src/diagnostics/exner.hpp
@@ -16,9 +16,6 @@ class ExnerDiagnostic : public AtmosphereDiagnostic
// Constructors
ExnerDiagnostic (const ekat::Comm& comm, const ekat::ParameterList& params);
- // Set type to diagnostic
- AtmosphereProcessType type () const { return AtmosphereProcessType::Diagnostic; }
-
// The name of the diagnostic
std::string name () const { return "Exner"; }
diff --git a/components/eamxx/src/diagnostics/field_at_height.cpp b/components/eamxx/src/diagnostics/field_at_height.cpp
index 7759dfe5811d..38ae5e3e5503 100644
--- a/components/eamxx/src/diagnostics/field_at_height.cpp
+++ b/components/eamxx/src/diagnostics/field_at_height.cpp
@@ -38,6 +38,13 @@ FieldAtHeight (const ekat::Comm& comm, const ekat::ParameterList& params)
: AtmosphereDiagnostic(comm,params)
{
m_field_name = m_params.get("field_name");
+ auto surf_ref = m_params.get("surface_reference");
+ EKAT_REQUIRE_MSG(surf_ref == "sealevel" or surf_ref == "surface",
+ "Error! Invalid surface reference for FieldAtHeight.\n"
+ " - field name: " + m_field_name + "\n"
+ " - surface reference: " + surf_ref + "\n"
+ " - valid options: sealevel, surface\n");
+ m_z_name = (surf_ref == "sealevel") ? "z" : "geopotential";
const auto& location = m_params.get("vertical_location");
auto chars_start = location.find_first_not_of("0123456789.");
EKAT_REQUIRE_MSG (chars_start!=0 && chars_start!=std::string::npos,
@@ -52,7 +59,7 @@ FieldAtHeight (const ekat::Comm& comm, const ekat::ParameterList& params)
"Error! Invalid string for height value for FieldAtHeight.\n"
" - input string : " + location + "\n"
" - expected format: Nm, with N integer\n");
- m_diag_name = m_field_name + "_at_" + m_params.get("vertical_location");
+ m_diag_name = m_field_name + "_at_" + m_params.get("vertical_location") + "_above_" + surf_ref;
}
void FieldAtHeight::
@@ -62,8 +69,8 @@ set_grids (const std::shared_ptr grids_manager)
add_field(m_field_name,gname);
// We don't know yet which one we need
- add_field("z_mid",gname);
- add_field("z_int",gname);
+ add_field(m_z_name+"_mid",gname);
+ add_field(m_z_name+"_int",gname);
}
void FieldAtHeight::
@@ -90,7 +97,7 @@ initialize_impl (const RunType /*run_type*/)
" - field layout: " + to_string(layout) + "\n");
// Figure out the z value
- m_z_name = tag==LEV ? "z_mid" : "z_int";
+ m_z_suffix = tag==LEV ? "_mid" : "_int";
// All good, create the diag output
FieldIdentifier d_fid (m_diag_name,layout.strip_dim(tag),fid.get_units(),fid.get_grid_name());
@@ -111,7 +118,7 @@ initialize_impl (const RunType /*run_type*/)
// =========================================================================================
void FieldAtHeight::compute_diagnostic_impl()
{
- const auto z_view = get_field_in(m_z_name).get_view();
+ const auto z_view = get_field_in(m_z_name + m_z_suffix).get_view();
const Field& f = get_field_in(m_field_name);
const auto& fl = f.get_header().get_identifier().get_layout();
diff --git a/components/eamxx/src/diagnostics/field_at_height.hpp b/components/eamxx/src/diagnostics/field_at_height.hpp
index 54843b4f1a0e..e6198153f940 100644
--- a/components/eamxx/src/diagnostics/field_at_height.hpp
+++ b/components/eamxx/src/diagnostics/field_at_height.hpp
@@ -33,6 +33,7 @@ class FieldAtHeight : public AtmosphereDiagnostic
std::string m_diag_name;
std::string m_z_name;
+ std::string m_z_suffix;
std::string m_field_name;
Real m_z;
diff --git a/components/eamxx/src/diagnostics/longwave_cloud_forcing.cpp b/components/eamxx/src/diagnostics/longwave_cloud_forcing.cpp
index 23ef59891e9f..4e44dff9dc76 100644
--- a/components/eamxx/src/diagnostics/longwave_cloud_forcing.cpp
+++ b/components/eamxx/src/diagnostics/longwave_cloud_forcing.cpp
@@ -19,6 +19,8 @@ void LongwaveCloudForcingDiagnostic::set_grids(const std::shared_ptrget_grid("Physics");
const auto& grid_name = grid->name();
@@ -33,7 +35,7 @@ void LongwaveCloudForcingDiagnostic::set_grids(const std::shared_ptr("LW_clrsky_flux_up", scalar3d_layout_mid, W/m2, grid_name);
// Construct and allocate the diagnostic field
- FieldIdentifier fid (name(), scalar2d_layout_col, W/m2, grid_name);
+ FieldIdentifier fid (name(), scalar2d_layout_col, radflux_units, grid_name);
m_diagnostic_output = Field(fid);
auto& C_ap = m_diagnostic_output.get_header().get_alloc_properties();
C_ap.request_allocation();
diff --git a/components/eamxx/src/diagnostics/longwave_cloud_forcing.hpp b/components/eamxx/src/diagnostics/longwave_cloud_forcing.hpp
index 9703330ce0cb..efe0e7c04760 100644
--- a/components/eamxx/src/diagnostics/longwave_cloud_forcing.hpp
+++ b/components/eamxx/src/diagnostics/longwave_cloud_forcing.hpp
@@ -16,9 +16,6 @@ class LongwaveCloudForcingDiagnostic : public AtmosphereDiagnostic
// Constructors
LongwaveCloudForcingDiagnostic (const ekat::Comm& comm, const ekat::ParameterList& params);
- // Set type to diagnostic
- AtmosphereProcessType type () const { return AtmosphereProcessType::Diagnostic; }
-
// The name of the diagnostic
std::string name () const { return "LongwaveCloudForcing"; }
diff --git a/components/eamxx/src/diagnostics/potential_temperature.hpp b/components/eamxx/src/diagnostics/potential_temperature.hpp
index 1e0af49fb597..933a6935005d 100644
--- a/components/eamxx/src/diagnostics/potential_temperature.hpp
+++ b/components/eamxx/src/diagnostics/potential_temperature.hpp
@@ -20,9 +20,6 @@ class PotentialTemperatureDiagnostic : public AtmosphereDiagnostic
// Constructors
PotentialTemperatureDiagnostic (const ekat::Comm& comm, const ekat::ParameterList& params);
- // Set type to diagnostic
- AtmosphereProcessType type () const { return AtmosphereProcessType::Diagnostic; }
-
// The name of the diagnostic
std::string name () const { return "PotentialTemperature"; }
diff --git a/components/eamxx/src/diagnostics/register_diagnostics.hpp b/components/eamxx/src/diagnostics/register_diagnostics.hpp
index 181a531a7c87..1f0c3bd63e3f 100644
--- a/components/eamxx/src/diagnostics/register_diagnostics.hpp
+++ b/components/eamxx/src/diagnostics/register_diagnostics.hpp
@@ -19,6 +19,7 @@
#include "diagnostics/field_at_pressure_level.hpp"
#include "diagnostics/precip_surf_mass_flux.hpp"
#include "diagnostics/surf_upward_latent_heat_flux.hpp"
+#include "diagnostics/wind_speed.hpp"
namespace scream {
@@ -45,6 +46,7 @@ inline void register_diagnostics () {
diag_factory.register_product("VaporFlux",&create_atmosphere_diagnostic);
diag_factory.register_product("precip_surf_mass_flux",&create_atmosphere_diagnostic);
diag_factory.register_product("surface_upward_latent_heat_flux",&create_atmosphere_diagnostic);
+ diag_factory.register_product("wind_speed",&create_atmosphere_diagnostic);
}
} // namespace scream
diff --git a/components/eamxx/src/diagnostics/sea_level_pressure.hpp b/components/eamxx/src/diagnostics/sea_level_pressure.hpp
index 8c522a2e75fe..0ae793658cd1 100644
--- a/components/eamxx/src/diagnostics/sea_level_pressure.hpp
+++ b/components/eamxx/src/diagnostics/sea_level_pressure.hpp
@@ -25,9 +25,6 @@ class SeaLevelPressureDiagnostic : public AtmosphereDiagnostic
// Constructors
SeaLevelPressureDiagnostic (const ekat::Comm& comm, const ekat::ParameterList& params);
- // Set type to diagnostic
- AtmosphereProcessType type () const { return AtmosphereProcessType::Diagnostic; }
-
// The name of the diagnostic
std::string name () const { return "SeaLevelPressure"; }
diff --git a/components/eamxx/src/diagnostics/shortwave_cloud_forcing.cpp b/components/eamxx/src/diagnostics/shortwave_cloud_forcing.cpp
index e0e815549c8a..9ca5ea4a8a14 100644
--- a/components/eamxx/src/diagnostics/shortwave_cloud_forcing.cpp
+++ b/components/eamxx/src/diagnostics/shortwave_cloud_forcing.cpp
@@ -17,7 +17,8 @@ void ShortwaveCloudForcingDiagnostic::set_grids(const std::shared_ptrget_grid("Physics");
const auto& grid_name = grid->name();
@@ -28,13 +29,13 @@ void ShortwaveCloudForcingDiagnostic::set_grids(const std::shared_ptr("SW_flux_dn", scalar3d_layout_mid, W/m2, grid_name);
- add_field("SW_flux_up", scalar3d_layout_mid, W/m2, grid_name);
- add_field("SW_clrsky_flux_dn", scalar3d_layout_mid, W/m2, grid_name);
- add_field("SW_clrsky_flux_up", scalar3d_layout_mid, W/m2, grid_name);
+ add_field("SW_flux_dn", scalar3d_layout_mid, radflux_units, grid_name);
+ add_field("SW_flux_up", scalar3d_layout_mid, radflux_units, grid_name);
+ add_field("SW_clrsky_flux_dn", scalar3d_layout_mid, radflux_units, grid_name);
+ add_field("SW_clrsky_flux_up", scalar3d_layout_mid, radflux_units, grid_name);
// Construct and allocate the diagnostic field
- FieldIdentifier fid (name(), scalar2d_layout_col, W/m2, grid_name);
+ FieldIdentifier fid (name(), scalar2d_layout_col, radflux_units, grid_name);
m_diagnostic_output = Field(fid);
auto& C_ap = m_diagnostic_output.get_header().get_alloc_properties();
C_ap.request_allocation();
diff --git a/components/eamxx/src/diagnostics/shortwave_cloud_forcing.hpp b/components/eamxx/src/diagnostics/shortwave_cloud_forcing.hpp
index 9d676338a765..421d06d3fe07 100644
--- a/components/eamxx/src/diagnostics/shortwave_cloud_forcing.hpp
+++ b/components/eamxx/src/diagnostics/shortwave_cloud_forcing.hpp
@@ -16,9 +16,6 @@ class ShortwaveCloudForcingDiagnostic : public AtmosphereDiagnostic
// Constructors
ShortwaveCloudForcingDiagnostic (const ekat::Comm& comm, const ekat::ParameterList& params);
- // Set type to diagnostic
- AtmosphereProcessType type () const { return AtmosphereProcessType::Diagnostic; }
-
// The name of the diagnostic
std::string name () const { return "ShortwaveCloudForcing"; }
diff --git a/components/eamxx/src/diagnostics/surf_upward_latent_heat_flux.cpp b/components/eamxx/src/diagnostics/surf_upward_latent_heat_flux.cpp
index 19a49ad595c5..009f8e6dd77f 100644
--- a/components/eamxx/src/diagnostics/surf_upward_latent_heat_flux.cpp
+++ b/components/eamxx/src/diagnostics/surf_upward_latent_heat_flux.cpp
@@ -22,6 +22,9 @@ set_grids (const std::shared_ptr grids_manager)
{
const auto m2 = ekat::units::m * ekat::units::m;
const auto W = ekat::units::W;
+ auto radflux_units = W/(m2);
+ radflux_units.set_string("W/m2");
+
const auto surf_evap_units = ekat::units::kg / m2 / ekat::units::s;
auto grid = grids_manager->get_grid("Physics");
@@ -35,7 +38,7 @@ set_grids (const std::shared_ptr grids_manager)
add_field("surf_evap", scalar2d_layout_mid, surf_evap_units, grid_name);
// Construct and allocate the diagnostic field
- FieldIdentifier fid(name(), scalar2d_layout_mid, W/m2, grid_name);
+ FieldIdentifier fid(name(), scalar2d_layout_mid, radflux_units, grid_name);
// handle parent class member variables
m_diagnostic_output = Field(fid);
m_diagnostic_output.get_header().get_alloc_properties().request_allocation();
diff --git a/components/eamxx/src/diagnostics/tests/CMakeLists.txt b/components/eamxx/src/diagnostics/tests/CMakeLists.txt
index 0882d3f6119d..d413b013f444 100644
--- a/components/eamxx/src/diagnostics/tests/CMakeLists.txt
+++ b/components/eamxx/src/diagnostics/tests/CMakeLists.txt
@@ -6,7 +6,7 @@ function (createDiagTest test_name test_srcs)
LABELS diagnostics)
endfunction ()
-if (NOT SCREAM_BASELINES_ONLY)
+if (NOT SCREAM_ONLY_GENERATE_BASELINES)
include(ScreamUtils)
# Test extracting a single level of a field
@@ -59,4 +59,6 @@ if (NOT SCREAM_BASELINES_ONLY)
# Test surface latent heat flux
CreateDiagTest(surface_upward_latent_heat_flux "surf_upward_latent_heat_flux_tests.cpp")
+ # Test wind speed diagnostic
+ CreateDiagTest(wind_speed "wind_speed_tests.cpp")
endif()
diff --git a/components/eamxx/src/diagnostics/tests/field_at_height_tests.cpp b/components/eamxx/src/diagnostics/tests/field_at_height_tests.cpp
index 0d45eb62e879..57a057116102 100644
--- a/components/eamxx/src/diagnostics/tests/field_at_height_tests.cpp
+++ b/components/eamxx/src/diagnostics/tests/field_at_height_tests.cpp
@@ -9,6 +9,10 @@
namespace scream {
+void f_z_src(const Real y0, const Real m, const Field& z_data, Field& out_data);
+void f_z_tgt(const Real y0, const Real m, const Real z_target, const Field& z_data, Field& out_data);
+bool views_are_approx_equal(const Field& f0, const Field& f1, const Real tol, const bool msg = true);
+
TEST_CASE("field_at_height")
{
using namespace ShortFieldTagsNames;
@@ -18,11 +22,12 @@ TEST_CASE("field_at_height")
// Get an MPI comm group for test
ekat::Comm comm(MPI_COMM_WORLD);
- constexpr int nruns = 10;
+ constexpr int nruns = 100;
util::TimeStamp t0 ({2022,1,1},{0,0,0});
// Create a grids manager w/ a point grid
+ constexpr Real tol = std::numeric_limits::epsilon()*1e5;
int ncols = 3;
int ndims = 4;
int nlevs = 10;
@@ -31,35 +36,57 @@ TEST_CASE("field_at_height")
gm->build_grids();
auto grid = gm->get_grid("Point Grid");
- // Create input test fields, as well as z_mid/int fields
const auto m = ekat::units::m;
+ // Create input data test fields
FieldIdentifier s_mid_fid ("s_mid",FieldLayout({COL, LEV},{ncols, nlevs }),m,grid->name());
FieldIdentifier s_int_fid ("s_int",FieldLayout({COL, ILEV},{ncols, nlevs+1}),m,grid->name());
FieldIdentifier v_mid_fid ("v_mid",FieldLayout({COL,CMP, LEV},{ncols,ndims,nlevs }),m,grid->name());
FieldIdentifier v_int_fid ("v_int",FieldLayout({COL,CMP,ILEV},{ncols,ndims,nlevs+1}),m,grid->name());
- FieldIdentifier z_mid_fid ("z_mid",FieldLayout({COL, LEV},{ncols, nlevs }),m,grid->name());
- FieldIdentifier z_int_fid ("z_int",FieldLayout({COL, ILEV},{ncols, nlevs+1}),m,grid->name());
+ // Create vertical fields z and geo on both midpoints and interfaces
+ FieldIdentifier z_surf_fid ("z_surf", FieldLayout({COL },{ncols }),m,grid->name());
+ FieldIdentifier z_mid_fid ("z_mid", FieldLayout({COL, LEV},{ncols, nlevs }),m,grid->name());
+ FieldIdentifier z_int_fid ("z_int", FieldLayout({COL, ILEV},{ncols, nlevs+1}),m,grid->name());
+ FieldIdentifier geo_mid_fid ("geopotential_mid",FieldLayout({COL, LEV},{ncols, nlevs }),m,grid->name());
+ FieldIdentifier geo_int_fid ("geopotential_int",FieldLayout({COL, ILEV},{ncols, nlevs+1}),m,grid->name());
+ // Keep track of reference fields for comparison
+ FieldIdentifier s_tgt_fid ("scalar_target",FieldLayout({COL },{ncols }),m,grid->name());
+ FieldIdentifier v_tgt_fid ("vector_target",FieldLayout({COL,CMP},{ncols,ndims}),m,grid->name());
- Field s_mid (s_mid_fid);
- Field s_int (s_int_fid);
- Field v_mid (v_mid_fid);
- Field v_int (v_int_fid);
- Field z_mid (z_mid_fid);
- Field z_int (z_int_fid);
+ Field s_mid (s_mid_fid);
+ Field s_int (s_int_fid);
+ Field v_mid (v_mid_fid);
+ Field v_int (v_int_fid);
+ Field z_surf (z_surf_fid);
+ Field z_mid (z_mid_fid);
+ Field z_int (z_int_fid);
+ Field geo_mid (geo_mid_fid);
+ Field geo_int (geo_int_fid);
+ Field s_tgt (s_tgt_fid);
+ Field v_tgt (v_tgt_fid);
s_mid.allocate_view();
s_int.allocate_view();
v_mid.allocate_view();
v_int.allocate_view();
+ z_surf.allocate_view();
z_mid.allocate_view();
z_int.allocate_view();
+ geo_mid.allocate_view();
+ geo_int.allocate_view();
+ s_tgt.allocate_view();
+ v_tgt.allocate_view();
s_mid.get_header().get_tracking().update_time_stamp(t0);
s_int.get_header().get_tracking().update_time_stamp(t0);
v_mid.get_header().get_tracking().update_time_stamp(t0);
v_int.get_header().get_tracking().update_time_stamp(t0);
+ z_surf.get_header().get_tracking().update_time_stamp(t0);
z_mid.get_header().get_tracking().update_time_stamp(t0);
z_int.get_header().get_tracking().update_time_stamp(t0);
+ geo_mid.get_header().get_tracking().update_time_stamp(t0);
+ geo_int.get_header().get_tracking().update_time_stamp(t0);
+ s_tgt.get_header().get_tracking().update_time_stamp(t0);
+ v_tgt.get_header().get_tracking().update_time_stamp(t0);
auto print = [&](const std::string& msg) {
if (comm.am_i_root()) {
@@ -69,16 +96,19 @@ TEST_CASE("field_at_height")
auto engine = scream::setup_random_test(&comm);
using IPDF = std::uniform_int_distribution;
+ using RPDF = std::uniform_real_distribution;
IPDF pdf_fields (0,1000);
- IPDF pdf_levs (1,nlevs-1);
+ RPDF pdf_m (1,10);
+ RPDF pdf_y0 (0,5);
// Lambda to create and run a diag, and return output
auto run_diag = [&](const Field& f, const Field& z,
- const std::string& loc) {
+ const std::string& loc, const std::string& surf_ref) {
util::TimeStamp t0 ({2022,1,1},{0,0,0});
auto& factory = AtmosphereDiagnosticFactory::instance();
ekat::ParameterList pl;
+ pl.set("surface_reference",surf_ref);
pl.set("vertical_location",loc);
pl.set("field_name",f.name());
pl.set("grid_name",grid->name());
@@ -92,135 +122,231 @@ TEST_CASE("field_at_height")
return diag->get_diagnostic();
};
- // Create z(i,j)=nlevs-j, which makes testing easier
- for (auto f : {z_mid, z_int}) {
- auto v = f.get_view();
- const auto& dims = f.get_header().get_identifier().get_layout().dims();
- for (int i=0; i();
+ const auto& zmid_v = z_mid.get_view();
+ const auto& zsurf_v = z_surf.get_view();
+ const auto& geoint_v = geo_int.get_view();
+ const auto& geomid_v = geo_mid.get_view();
+ int min_col_thickness = z_top;
+ int max_surf = 0;
+ for (int ii=0; ii max_surf ? zsurf_v(ii) : max_surf;
+ const Real col_thickness = z_top - zsurf_v(ii);
+ min_col_thickness = min_col_thickness < col_thickness ? col_thickness : min_col_thickness;
+ const Real dz = (z_top - zsurf_v(ii))/nlevs;
+ zint_v(ii,0) = z_top;
+ geoint_v(ii,0) = z_top - zsurf_v(ii); // Note, the distance above surface needs to consider the surface height.
+ for (int jj=0; jj Testing throws error with unsupported reference height...\n");
+ {
+ REQUIRE_THROWS(run_diag (s_mid,geo_mid,"1m","foobar"));
}
+ print(" -> Testing throws error with unsupported reference height... OK\n");
// Run many times
- Real z_tgt,lev_tgt;
+ int z_tgt;
std::string loc;
- for (int irun=0; irun();
- const auto& size = f.get_header().get_identifier().get_layout().size();
- for (int i=0; i Testing for a reference height above %s...\n",surf_ref.c_str());
+ const auto mid_src = surf_ref == "sealevel" ? z_mid : geo_mid;
+ const auto int_src = surf_ref == "sealevel" ? z_int : geo_int;
+ const int max_surf_4test = surf_ref == "sealevel" ? max_surf : 0;
+ for (int irun=0; irun Testing with z_tgt coinciding with a z level\n");
- {
- print(" -> scalar midpoint field...............\n");
- auto d = run_diag (s_mid,z_mid,loc);
- auto tgt = s_mid.subfield(1,static_cast(lev_tgt));
- REQUIRE (views_are_equal(d,tgt,&comm));
- print(" -> scalar midpoint field............... OK!\n");
- }
- {
- print(" -> scalar interface field...............\n");
- auto d = run_diag (s_int,z_int,loc);
- // z_mid = nlevs+1-ilev, so the tgt slice is nlevs+1-z_tgt
- auto tgt = s_int.subfield(1,static_cast(lev_tgt));
- REQUIRE (views_are_equal(d,tgt,&comm));
- print(" -> scalar interface field............... OK!\n");
- }
- {
- print(" -> vector midpoint field...............\n");
- auto d = run_diag (v_mid,z_mid,loc);
- // We can't subview over 3rd index and keep layout right,
- // so do all cols separately
- for (int i=0; i(lev_tgt));
- REQUIRE (views_are_equal(di,tgt,&comm));
+ // Set target z-slice for testing to a random value.
+ z_tgt = pdf_levs(engine)+max_surf_4test;
+ loc = std::to_string(z_tgt) + "m";
+ printf(" -> test at height of %s.............\n",loc.c_str());
+ {
+ print(" -> scalar midpoint field...............\n");
+ auto d = run_diag(s_mid,mid_src,loc,surf_ref);
+ f_z_tgt(inter,slope,z_tgt,mid_src,s_tgt);
+ REQUIRE (views_are_approx_equal(d,s_tgt,tol));
+ print(" -> scalar midpoint field............... OK!\n");
+ }
+ {
+ print(" -> scalar interface field...............\n");
+ auto d = run_diag (s_int,int_src,loc,surf_ref);
+ f_z_tgt(inter,slope,z_tgt,int_src,s_tgt);
+ REQUIRE (views_are_approx_equal(d,s_tgt,tol));
+ print(" -> scalar interface field............... OK!\n");
+ }
+ {
+ print(" -> vector midpoint field...............\n");
+ auto d = run_diag (v_mid,mid_src,loc,surf_ref);
+ f_z_tgt(inter,slope,z_tgt,mid_src,v_tgt);
+ REQUIRE (views_are_approx_equal(d,v_tgt,tol));
+ print(" -> vector midpoint field............... OK!\n");
+ }
+ {
+ print(" -> vector interface field...............\n");
+ auto d = run_diag (v_int,int_src,loc,surf_ref);
+ f_z_tgt(inter,slope,z_tgt,int_src,v_tgt);
+ REQUIRE (views_are_approx_equal(d,v_tgt,tol));
+ print(" -> vector interface field............... OK!\n");
+ }
+ {
+ print(" -> Forced fail, give incorrect location...............\n");
+ const int z_tgt_adj = (z_tgt+max_surf_4test)/2;
+ std::string loc_err = std::to_string(z_tgt_adj) + "m";
+ auto d = run_diag(s_int,int_src,loc_err,surf_ref);
+ f_z_tgt(inter,slope,z_tgt,int_src,s_tgt);
+ REQUIRE (!views_are_approx_equal(d,s_tgt,tol,false));
+ print(" -> Forced fail, give incorrect location............... OK!\n");
}
- print(" -> vector midpoint field............... OK!\n");
}
{
- print(" -> vector interface field...............\n");
- auto d = run_diag (v_int,z_int,loc);
- // We can't subview over 3rd index and keep layout right,
- // so do all cols separately
- for (int i=0; i(lev_tgt));
- REQUIRE (views_are_equal(di,tgt,&comm));
- }
- print(" -> vector interface field............... OK!\n");
+ print(" -> Forced extrapolation ...............\n");
+ auto slope = pdf_m(engine);
+ auto inter = pdf_y0(engine);
+ f_z_src(inter, slope, int_src, s_int);
+ print(" -> at top...............\n");
+ z_tgt = 2*z_top;
+ std::string loc = std::to_string(z_tgt) + "m";
+ auto dtop = run_diag(s_int,int_src,loc,surf_ref);
+ f_z_tgt(inter,slope,z_tgt,int_src,s_tgt);
+ REQUIRE (views_are_approx_equal(dtop,s_tgt,tol));
+ print(" -> at bot...............\n");
+ z_tgt = 0;
+ loc = std::to_string(z_tgt) + "m";
+ auto dbot = run_diag(s_int,int_src,loc,surf_ref);
+ f_z_tgt(inter,slope,z_tgt,int_src,s_tgt);
+ REQUIRE (views_are_approx_equal(dbot,s_tgt,tol));
+ print(" -> Forced extrapolation............... OK!\n");
}
+ printf(" -> Testing for a reference height above %s... OK!\n",surf_ref.c_str());
+ }
+}
- z_tgt = pdf_levs(engine) + 0.5;
- lev_tgt = nlevs-z_tgt;
- loc = std::to_string(z_tgt) + "m";
-
- auto zp1 = static_cast(std::round(lev_tgt+0.5));
- auto zm1 = static_cast(std::round(lev_tgt-0.5));
-
- print(" -> Testing with z_tgt between levels\n");
- {
- print(" -> scalar midpoint field...............\n");
- auto d = run_diag (s_mid,z_mid,loc);
- auto tgt = s_mid.subfield(1,zp1).clone();
- tgt.update(s_mid.subfield(1,zm1),0.5,0.5);
- REQUIRE (views_are_equal(d,tgt,&comm));
- print(" -> scalar midpoint field............... OK!\n");
+//-------------------------------
+// Set up the inpute data. To make the test simple we assume a linear distribution of the data
+// with height. That way we can exactly calculate what a linear interpolation to a random
+// height would be.
+void f_z_src(const Real y0, const Real m, const Field& z_data, Field& out_data) {
+ using namespace ShortFieldTagsNames;
+ const auto layout = out_data.get_header().get_identifier().get_layout();
+ if (layout.has_tag(CMP)) { // Is a vector layout, meaning different dims than z_data.
+ const auto& dims = layout.dims();
+ const auto& z_view = z_data.get_view();
+ const auto& out_view = out_data.get_view();
+ for (int ii=0; ii scalar interface field...............\n");
- auto d = run_diag (s_int,z_int,loc);
- auto tgt = s_int.subfield(1,zp1).clone();
- tgt.update(s_int.subfield(1,zm1),0.5,0.5);
- REQUIRE (views_are_equal(d,tgt,&comm));
- print(" -> scalar interface field............... OK!\n");
+ } else { // Not a vector output, easier to deal with
+ const auto z_view = z_data.get_internal_view_data();
+ const auto& size = z_data.get_header().get_identifier().get_layout().size();
+ auto out_view = out_data.get_internal_view_data();
+ for (int ii=0; ii vector midpoint field...............\n");
- auto d = run_diag (v_mid,z_mid,loc);
- // We can't subview over 3rd index and keep layout right,
- // so do all cols separately
- for (int i=0; i();
+ const auto& zdims = z_data.get_header().get_identifier().get_layout().dims();
+ if (layout.has_tag(CMP)) { // Is a vector layout, meaning different dims than z_target.
+ const auto& dims = layout.dims();
+ const auto& out_view = out_data.get_view();
+ for (int ii=0; ii z_view(ii,0)) {
+ out_view(ii,nd) = y0 + m*(nd+1)*z_view(ii,0);
+ } else if ( z_target < z_view(ii,zdims[1]-1)) {
+ out_view(ii,nd) = y0 + m*(nd+1)*z_view(ii,zdims[1]-1);
+ } else {
+ out_view(ii,nd) = y0 + m*(nd+1)*z_target;
+ }
}
- print(" -> vector midpoint field............... OK!\n");
}
- {
- print(" -> vector interface field...............\n");
- auto d = run_diag (v_int,z_int,loc);
- // We can't subview over 3rd index and keep layout right,
- // so do all cols separately
- for (int i=0; i();
+ for (int ii=0; ii z_view(ii,0)) {
+ out_view(ii) = y0 + m*z_view(ii,0);
+ } else if ( z_target < z_view(ii,zdims[1]-1)) {
+ out_view(ii) = y0 + m*z_view(ii,zdims[1]-1);
+ } else {
+ out_view(ii) = y0 + m*z_target;
}
- print(" -> vector interface field............... OK!\n");
}
}
+ out_data.sync_to_dev();
+}
+/*-----------------------------------------------------------------------------------------------*/
+bool views_are_approx_equal(const Field& f0, const Field& f1, const Real tol, const bool msg)
+{
+ const auto& l0 = f0.get_header().get_identifier().get_layout();
+ const auto& l1 = f1.get_header().get_identifier().get_layout();
+ EKAT_REQUIRE_MSG(l0==l1,"Error! views_are_approx_equal - the two fields don't have matching layouts.");
+ // Take advantage of field utils update, min and max to assess the max difference between the two fields
+ // simply.
+ auto ft = f0.clone();
+ ft.update(f1,1.0,-1.0);
+ auto d_min = field_min(ft);
+ auto d_max = field_max(ft);
+ if (std::abs(d_min) > tol or std::abs(d_max) > tol) {
+ if (msg) {
+ printf("The two copies of (%16s) are NOT approx equal within a tolerance of %e.\n The min and max errors are %e and %e respectively.\n",f0.name().c_str(),tol,d_min,d_max);
+ }
+ return false;
+ } else {
+ return true;
+ }
+
}
} // namespace scream
diff --git a/components/eamxx/src/diagnostics/tests/vapor_flux_tests.cpp b/components/eamxx/src/diagnostics/tests/vapor_flux_tests.cpp
index aceb705a61c7..bcd7efc8bb87 100644
--- a/components/eamxx/src/diagnostics/tests/vapor_flux_tests.cpp
+++ b/components/eamxx/src/diagnostics/tests/vapor_flux_tests.cpp
@@ -83,7 +83,7 @@ void run(std::mt19937_64& engine)
REQUIRE_THROWS (diag_factory.create("VaporFlux",comm,params)); // No 'Wind Component'
params.set("Wind Component","foo");
REQUIRE_THROWS (diag_factory.create("VaporFlux",comm,params)); // Invalid 'Wind Component'
- for (const std::string& which_comp : {"Zonal", "Meridional"}) {
+ for (const std::string which_comp : {"Zonal", "Meridional"}) {
// Construct the Diagnostic
params.set("Wind Component",which_comp);
auto diag = diag_factory.create("VaporFlux",comm,params);
diff --git a/components/eamxx/src/diagnostics/tests/wind_speed_tests.cpp b/components/eamxx/src/diagnostics/tests/wind_speed_tests.cpp
new file mode 100644
index 000000000000..7e3affedacb2
--- /dev/null
+++ b/components/eamxx/src/diagnostics/tests/wind_speed_tests.cpp
@@ -0,0 +1,96 @@
+#include "catch2/catch.hpp"
+
+#include "diagnostics/register_diagnostics.hpp"
+#include "share/grid/mesh_free_grids_manager.hpp"
+#include "share/util/scream_setup_random_test.hpp"
+#include "share/field/field_utils.hpp"
+
+namespace scream {
+
+std::shared_ptr
+create_gm (const ekat::Comm& comm, const int ncols, const int nlevs) {
+
+ const int num_global_cols = ncols*comm.size();
+
+ using vos_t = std::vector;
+ ekat::ParameterList gm_params;
+ gm_params.set("grids_names",vos_t{"Point Grid"});
+ auto& pl = gm_params.sublist("Point Grid");
+ pl.set("type","point_grid");
+ pl.set("aliases",vos_t{"Physics"});
+ pl.set("number_of_global_columns", num_global_cols);
+ pl.set("number_of_vertical_levels", nlevs);
+
+ auto gm = create_mesh_free_grids_manager(comm,gm_params);
+ gm->build_grids();
+
+ return gm;
+}
+
+TEST_CASE("wind_speed")
+{
+ using namespace ShortFieldTagsNames;
+ using namespace ekat::units;
+
+ // A world comm
+ ekat::Comm comm(MPI_COMM_WORLD);
+
+ // A time stamp
+ util::TimeStamp t0 ({2022,1,1},{0,0,0});
+
+ // Create a grids manager - single column for these tests
+ constexpr int nlevs = 33;
+ const int ngcols = 2*comm.size();;
+ auto gm = create_gm(comm,ngcols,nlevs);
+ auto grid = gm->get_grid("Physics");
+
+ // Input (randomized) velocity
+ auto vector3d = grid->get_3d_vector_layout(true,CMP,2);
+ FieldIdentifier uv_fid ("horiz_winds",vector3d,m/s,grid->name());
+ Field uv(uv_fid);
+ uv.allocate_view();
+ uv.get_header().get_tracking().update_time_stamp(t0);
+
+ // Construct random number generator stuff
+ using RPDF = std::uniform_real_distribution;
+ RPDF pdf(-1,1);
+ auto engine = scream::setup_random_test();
+
+ // Construct the Diagnostics
+ std::map> diags;
+ auto& diag_factory = AtmosphereDiagnosticFactory::instance();
+ register_diagnostics();
+
+ constexpr int ntests = 5;
+ for (int itest=0; itestset_grids(gm);
+ diag->set_required_field(uv);
+ diag->initialize(t0,RunType::Initial);
+
+ // Run diag
+ diag->compute_diagnostic();
+
+ // Check result
+ uv.sync_to_host();
+ diag->get_diagnostic().sync_to_host();
+
+ auto uv_h = uv.get_view();
+ auto ws_h = diag->get_diagnostic().get_view();
+
+ for (int icol=0; icolget_num_local_dofs(); ++icol) {
+ for (int ilev=0; ilev
+
+namespace scream
+{
+
+WindSpeed::
+WindSpeed (const ekat::Comm& comm, const ekat::ParameterList& params)
+ : AtmosphereDiagnostic(comm,params)
+{
+ // Nothing to do here
+}
+
+void WindSpeed::
+set_grids(const std::shared_ptr grids_manager)
+{
+ using namespace ekat::units;
+ using namespace ShortFieldTagsNames;
+
+ auto grid = grids_manager->get_grid("Physics");
+ const auto& grid_name = grid->name();
+
+ m_ncols = grid->get_num_local_dofs();
+ m_nlevs = grid->get_num_vertical_levels();
+
+ auto scalar3d = grid->get_3d_scalar_layout(true);
+ auto vector3d = grid->get_3d_vector_layout(true,CMP,2);
+
+ // The fields required for this diagnostic to be computed
+ add_field("horiz_winds", vector3d, Pa, grid_name);
+
+ // Construct and allocate the 3d wind_speed field
+ FieldIdentifier fid ("wind_speed", scalar3d, m/s, grid_name);
+ m_diagnostic_output = Field(fid);
+ m_diagnostic_output.allocate_view();
+}
+
+void WindSpeed::compute_diagnostic_impl()
+{
+ using KT = KokkosTypes;
+ using RP = typename KT::RangePolicy;
+
+ const auto uv = get_field_in("horiz_winds").get_view();
+ const auto ws = m_diagnostic_output.get_view();
+
+ const int nlevs = m_nlevs;
+ Kokkos::parallel_for("Compute " + name(), RP(0,m_nlevs*m_ncols),
+ KOKKOS_LAMBDA(const int& idx) {
+ const int icol = idx / nlevs;
+ const int ilev = idx % nlevs;
+ const auto& u = uv(icol,0,ilev);
+ const auto& v = uv(icol,1,ilev);
+ ws (icol,ilev) = sqrt(u*u + v*v);
+ });
+}
+
+} //namespace scream
diff --git a/components/eamxx/src/diagnostics/wind_speed.hpp b/components/eamxx/src/diagnostics/wind_speed.hpp
new file mode 100644
index 000000000000..91ac551a1b76
--- /dev/null
+++ b/components/eamxx/src/diagnostics/wind_speed.hpp
@@ -0,0 +1,37 @@
+#ifndef EAMXX_WIND_SPEED_HPP
+#define EAMXX_WIND_SPEED_HPP
+
+#include "share/atm_process/atmosphere_diagnostic.hpp"
+
+namespace scream
+{
+
+/*
+ * This diagnostic will compute the magnitute of the horiz_winds vector
+ */
+
+class WindSpeed : public AtmosphereDiagnostic
+{
+public:
+ // Constructors
+ WindSpeed (const ekat::Comm& comm, const ekat::ParameterList& params);
+
+ // The name of the diagnostic
+ std::string name () const override { return "wind_speed"; }
+
+ // Set the grid
+ void set_grids (const std::shared_ptr grids_manager) override;
+
+protected:
+#ifdef KOKKOS_ENABLE_CUDA
+public:
+#endif
+ void compute_diagnostic_impl () override;
+
+ int m_ncols;
+ int m_nlevs;
+};
+
+} //namespace scream
+
+#endif // EAMXX_WIND_SPEED_HPP
diff --git a/components/eamxx/src/doubly-periodic/CMakeLists.txt b/components/eamxx/src/doubly-periodic/CMakeLists.txt
deleted file mode 100644
index 09dab68a8e28..000000000000
--- a/components/eamxx/src/doubly-periodic/CMakeLists.txt
+++ /dev/null
@@ -1,60 +0,0 @@
-include (ScreamUtils)
-
-set(DP_SRCS
- dp_f90.cpp
- dp_iso_c.f90
- #${SCREAM_BASE_DIR}/../eam/src/control/apply_iop_forcing.F90
- #${SCREAM_BASE_DIR}/../eam/src/dynamics/se/se_iop_intr_mod.F90",
- #${SCREAM_BASE_DIR}/../eam/src/control/iop_data_mod.F90",
- #${SCREAM_BASE_DIR}/../eam/src/control/history_iop.F90"
-)
-
-# Set cmake config options for Homme
-if (NOT "${SCREAM_DYNAMICS_DYCORE}" STREQUAL "HOMME")
- message(FATAL_ERROR "Requires homme")
-endif()
-
-# Get or create the dynamics lib
-# HOMME_TARGET NP PLEV QSIZE_D
-CreateDynamicsLib("theta-l_kokkos" 4 72 10)
-
-if (NOT SCREAM_LIB_ONLY)
- list(APPEND DP_SRCS
- dp_functions_f90.cpp
- ) # Add f90 bridges needed for testing
-endif()
-
-# Add ETI source files if not on CUDA/HIP
-if (NOT EAMXX_ENABLE_GPU OR Kokkos_ENABLE_CUDA_RELOCATABLE_DEVICE_CODE OR Kokkos_ENABLE_HIP_RELOCATABLE_DEVICE_CODE)
- list(APPEND DP_SRCS
- eti/dp_advance_iop_forcing.cpp
- eti/dp_advance_iop_nudging.cpp
- eti/dp_advance_iop_subsidence.cpp
- eti/dp_iop_setinitial.cpp
- eti/dp_iop_broadcast.cpp
- eti/dp_apply_iop_forcing.cpp
- eti/dp_iop_domain_relaxation.cpp
- eti/dp_crm_resolved_turb.cpp
- eti/dp_iop_default_opts.cpp
- eti/dp_iop_setopts.cpp
- eti/dp_setiopupdate_init.cpp
- eti/dp_setiopupdate.cpp
- eti/dp_readiopdata.cpp
- eti/dp_iop_intht.cpp
- ) # DP ETI SRCS
-endif()
-
-add_library(dp ${DP_SRCS})
-set_target_properties(dp PROPERTIES
- Fortran_MODULE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/modules
-)
-target_include_directories(dp PUBLIC
- ${CMAKE_CURRENT_BINARY_DIR}/modules
- ${CMAKE_CURRENT_SOURCE_DIR}
- ${CMAKE_CURRENT_SOURCE_DIR}/impl
-)
-target_link_libraries(dp PUBLIC physics_share scream_share ${dynLibName})
-
-#if (NOT SCREAM_LIB_ONLY)
-# add_subdirectory(tests)
-#endif()
diff --git a/components/eamxx/src/doubly-periodic/dp_constants.hpp b/components/eamxx/src/doubly-periodic/dp_constants.hpp
deleted file mode 100644
index c974106f83ec..000000000000
--- a/components/eamxx/src/doubly-periodic/dp_constants.hpp
+++ /dev/null
@@ -1,22 +0,0 @@
-#ifndef DP_CONSTANTS_HPP
-#define DP_CONSTANTS_HPP
-
-namespace scream {
-namespace dp {
-
-/*
- * Mathematical constants used by dp.
- */
-
-template
-struct Constants
-{
- static constexpr Scalar iop_nudge_tq_low = 1050;
- static constexpr Scalar iop_nudge_tq_high = 0;
- static constexpr Scalar iop_nudge_tscale = 10800;
-};
-
-} // namespace dp
-} // namespace scream
-
-#endif
diff --git a/components/eamxx/src/doubly-periodic/dp_f90.cpp b/components/eamxx/src/doubly-periodic/dp_f90.cpp
deleted file mode 100644
index 2802e5266fd5..000000000000
--- a/components/eamxx/src/doubly-periodic/dp_f90.cpp
+++ /dev/null
@@ -1,27 +0,0 @@
-#include "dp_f90.hpp"
-#include "physics_constants.hpp"
-
-#include "ekat/ekat_assert.hpp"
-
-using scream::Int;
-
-extern "C" {
-
-void init_time_level_c (const int& nm1, const int& n0, const int& np1,
- const int& nstep, const int& nstep0);
-
-}
-
-namespace scream {
-namespace dp {
-
-void dp_init(const bool force_reinit) {
- static bool is_init = false;
- if (!is_init || force_reinit) {
- init_time_level_c(10, 3, 11, 5, 4);
- is_init = true;
- }
-}
-
-} // namespace dp
-} // namespace scream
diff --git a/components/eamxx/src/doubly-periodic/dp_f90.hpp b/components/eamxx/src/doubly-periodic/dp_f90.hpp
deleted file mode 100644
index 338a583f777a..000000000000
--- a/components/eamxx/src/doubly-periodic/dp_f90.hpp
+++ /dev/null
@@ -1,18 +0,0 @@
-#ifndef SCREAM_DP_F90_HPP
-#define SCREAM_DP_F90_HPP
-
-#include "share/scream_types.hpp"
-
-#include
-#include
-
-namespace scream {
-namespace dp {
-
-// Initialize DP. This is only for standalone DP testing.
-void dp_init(const bool force_reinit=false);
-
-} // namespace dp
-} // namespace scream
-
-#endif
diff --git a/components/eamxx/src/doubly-periodic/dp_functions.hpp b/components/eamxx/src/doubly-periodic/dp_functions.hpp
deleted file mode 100644
index 18e6ec58ba9e..000000000000
--- a/components/eamxx/src/doubly-periodic/dp_functions.hpp
+++ /dev/null
@@ -1,325 +0,0 @@
-#ifndef DP_FUNCTIONS_HPP
-#define DP_FUNCTIONS_HPP
-
-#include "physics/share/physics_constants.hpp"
-#include "dp_constants.hpp"
-
-#include "share/scream_types.hpp"
-
-#include "ekat/ekat_pack_kokkos.hpp"
-#include "ekat/ekat_workspace.hpp"
-
-#include "Elements.hpp"
-#include "Tracers.hpp"
-
-namespace scream {
-namespace dp {
-
-/*
- * Functions is a stateless struct used to encapsulate a
- * number of functions for DP. We use the ETI pattern for
- * these functions.
- *
- * DP assumptions:
- * - Kokkos team policies have a vector length of 1
- */
-
-using element_t = Homme::Elements;
-using tracer_t = Homme::Tracers;
-struct hvcoord_t{};
-struct timelevel_t{};
-struct hybrid_t{};
-
-template
-struct Functions
-{
- //
- // ------- Types --------
- //
-
- using Scalar = ScalarT;
- using Device = DeviceT;
-
- template
- using BigPack = ekat::Pack;
- template
- using SmallPack = ekat::Pack;
-
- using IntSmallPack = SmallPack;
- using Pack = BigPack;
- using Spack = SmallPack;
-
- using Mask = ekat::Mask;
- using Smask = ekat::Mask;
-
- using KT = ekat::KokkosTypes;
- using ExeSpace = typename KT::ExeSpace;
-
- using C = physics::Constants;
- using DPC = dp::Constants;
-
- template
- using view_1d = typename KT::template view_1d;
- template
- using view_2d = typename KT::template view_2d;
- template
- using view_3d = typename KT::template view_3d;
-
- template
- using view_1d_ptr_array = typename KT::template view_1d_ptr_carray;
-
- template
- using uview_1d = typename ekat::template Unmanaged >;
-
- template
- using uview_2d = typename ekat::template Unmanaged >;
-
- using MemberType = typename KT::MemberType;
-
- using WorkspaceMgr = typename ekat::WorkspaceManager;
- using Workspace = typename WorkspaceMgr::Workspace;
-
- //
- // --------- Functions ---------
- //
-
- // ---------------------------------------------------------------------
- // Define the pressures of the interfaces and midpoints from the
- // coordinate definitions and the surface pressure.
- // ---------------------------------------------------------------------
- KOKKOS_FUNCTION
- static void plevs0(
- // Input arguments
- const Int& nver, // vertical dimension
- const Scalar& ps, // Surface pressure (pascals)
- const uview_1d& hyai, // ps0 component of hybrid coordinate - interfaces
- const uview_1d& hyam, // ps0 component of hybrid coordinate - midpoints
- const uview_1d& hybi, // ps component of hybrid coordinate - interfaces
- const uview_1d& hybm, // ps component of hybrid coordinate - midpoints
- // Kokkos stuff
- const MemberType& team,
- // Output arguments
- const uview_1d& pint, // Pressure at model interfaces
- const uview_1d& pmid, // Pressure at model levels
- const uview_1d& pdel); // Layer thickness (pint(k+1) - pint(k))
-
- //-----------------------------------------------------------------------
- // advance_iop_forcing
- // Purpose:
- // Apply large scale forcing for t, q, u, and v as provided by the
- // case IOP forcing file.
- //
- // Author:
- // Original version: Adopted from CAM3.5/CAM5
- // Updated version for E3SM: Peter Bogenschutz (bogenschutz1@llnl.gov)
- // and replaces the forecast.F90 routine in CAM3.5/CAM5/CAM6/E3SMv1/E3SMv2
- // CXX version: James Foucar (jgfouca@sandia.gov)
- //
- //-----------------------------------------------------------------------
- KOKKOS_FUNCTION
- static void advance_iop_forcing(
- // Input arguments
- const Int& plev, // number of vertical levels
- const Int& pcnst, // number of advected constituents including cloud water
- const bool& have_u, // dataset contains u
- const bool& have_v, // dataset contains v
- const bool& dp_crm, // use 3d forcing
- const bool& use_3dfrc, // use 3d forcing
- const Scalar& scm_dt, // model time step [s]
- const Scalar& ps_in, // surface pressure [Pa]
- const uview_1d& u_in, // zonal wind [m/s]
- const uview_1d& v_in, // meridional wind [m/s]
- const uview_1d& t_in, // temperature [K]
- const uview_2d& q_in, // q tracer array [units vary]
- const uview_1d& t_phys_frc, // temperature forcing from physics [K/s]
- const uview_1d& divt3d, // 3D T advection
- const uview_2d& divq3d, // 3D q advection
- const uview_1d& divt, // Divergence of temperature
- const uview_2d& divq, // Divergence of moisture
- const uview_1d& wfld, // Vertical motion (slt)
- const uview_1d& uobs, // actual u wind
- const uview_1d& vobs, // actual v wind
- const uview_1d& hyai, // ps0 component of hybrid coordinate - interfaces
- const uview_1d& hyam, // ps0 component of hybrid coordinate - midpoints
- const uview_1d& hybi, // ps component of hybrid coordinate - interfaces
- const uview_1d& hybm, // ps component of hybrid coordinate - midpoints
- // Kokkos stuff
- const MemberType& team,
- const Workspace& workspace,
- // Output arguments
- const uview_1d& u_update, // updated temperature [K]
- const uview_1d& v_update, // updated q tracer array [units vary]
- const uview_1d& t_update, // updated zonal wind [m/s]
- const uview_2d& q_update); // updated meridional wind [m/s]
-
- //-----------------------------------------------------------------------
- // advance_iop_nudging
- // Purpose:
- // Option to nudge t and q to observations as specified by the IOP file
- //
- // Author:
- // Original version: Adopted from CAM3.5/CAM5
- // Updated version for E3SM: Peter Bogenschutz (bogenschutz1@llnl.gov)
- // CXX version: Conrad Clevenger (tccleve@sandia.gov)
- //
- //-----------------------------------------------------------------------
- KOKKOS_FUNCTION
- static void advance_iop_nudging(
- // Input arguments
- const Int& plev, // number of vertical levels
- const Scalar& scm_dt, // model time step [s]
- const Scalar& ps_in, // surface pressure [Pa]
- const uview_1d& t_in, // temperature [K]
- const uview_1d& q_in, // water vapor mixing ratio [kg/kg]
- const uview_1d& tobs, // observed temperature [K]
- const uview_1d& qobs, // observed vapor mixing ratio [kg/kg]
- const uview_1d& hyai, // ps0 component of hybrid coordinate - interfaces
- const uview_1d& hyam, // ps0 component of hybrid coordinate - midpoints
- const uview_1d& hybi, // ps component of hybrid coordinate - interfaces
- const uview_1d& hybm, // ps component of hybrid coordinate - midpoints
- // Kokkos stuff
- const MemberType& team,
- const Workspace& workspace,
- // Output arguments
- const uview_1d& t_update, // updated temperature [K]
- const uview_1d& q_update, // updated water vapor [kg/kg]
- const uview_1d& relaxt, // relaxation of temperature [K/s]
- const uview_1d& relaxq); // relaxation of vapor [kg/kg/s]
-
- KOKKOS_INLINE_FUNCTION
- static void do_advance_iop_subsidence_update(
- const Int& k,
- const Int& plev,
- const Spack& fac,
- const Spack& swfldint,
- const Spack& swfldint_p1,
- const uview_1d& in,
- const uview_1d& in_s,
- const uview_1d& update);
-
- //-----------------------------------------------------------------------
- //
- // Purpose:
- // Option to compute effects of large scale subsidence on T, q, u, and v.
- // Code originated from CAM3.5/CAM5 Eulerian subsidence computation for SCM
- // in the old forecast.f90 routine.
- //-----------------------------------------------------------------------
- KOKKOS_FUNCTION
- static void advance_iop_subsidence(
- // Input arguments
- const Int& plev, // number of vertical levels
- const Int& pcnst, // number of advected constituents including cloud water
- const Scalar& scm_dt, // model time step [s]
- const Scalar& ps_in, // surface pressure [Pa]
- const uview_1d& u_in, // zonal wind [m/s]
- const uview_1d& v_in, // meridional wind [m/s]
- const uview_1d& t_in, // temperature [K]
- const uview_2d& q_in, // tracer [vary]
- const uview_1d& hyai, // ps0 component of hybrid coordinate - interfaces
- const uview_1d& hyam, // ps0 component of hybrid coordinate - midpoints
- const uview_1d& hybi, // ps component of hybrid coordinate - interfaces
- const uview_1d