Skip to content

Commit

Permalink
Merge branch 'develop' into feature/METplus_templates
Browse files Browse the repository at this point in the history
  • Loading branch information
JeffBeck-NOAA authored Mar 29, 2023
2 parents 8001b51 + bd80d94 commit ec333ba
Show file tree
Hide file tree
Showing 102 changed files with 1,084 additions and 897 deletions.
11 changes: 6 additions & 5 deletions .cicd/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pipeline {
// choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use')
// Use the line below to enable hera
// choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion'], description: 'Specify the platform(s) to use')
choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'jet', 'orion'], description: 'Specify the platform(s) to use')
choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'orion'], description: 'Specify the platform(s) to use')
// Allow job runner to filter based on compiler
choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build')
// Uncomment the following line to re-enable comprehensive tests
Expand Down Expand Up @@ -80,7 +80,7 @@ pipeline {
name 'SRW_PLATFORM'
// Uncomment line below to re-add use of Hera
// values 'cheyenne', 'gaea', 'hera', 'jet', 'orion' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'
values 'cheyenne', 'gaea', 'jet', 'orion' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'
values 'cheyenne', 'gaea', 'orion' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'
}

axis {
Expand All @@ -94,7 +94,8 @@ pipeline {
exclude {
axis {
name 'SRW_PLATFORM'
values 'gaea', 'jet', 'orion' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1'
// values 'gaea', 'jet', 'orion' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1'
values 'gaea', 'orion' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1'
}

axis {
Expand Down Expand Up @@ -136,7 +137,7 @@ pipeline {
post {
success {
sh 'cd "${WORKSPACE}/${INSTALL_NAME}" && tar --create --gzip --verbose --file "${WORKSPACE}/${BUILD_NAME}.tgz" *'
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.log", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.log", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
}
}
Expand Down Expand Up @@ -180,7 +181,7 @@ pipeline {
sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/tests/WE2E/WE2E_tests_*yaml ${WORKSPACE}/tests/WE2E/WE2E_summary*txt ${WORKSPACE}/tests/WE2E/log.*'
// Remove the data sets from the experiments directory to conserve disk space
sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf'
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_results-*-*.txt', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_logs-*-*.tgz', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_results-*-*.txt', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_logs-*-*.tgz', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions Externals.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
hash = e051e0e
hash = 36d6e16
local_path = sorc/ufs-weather-model
required = True

Expand All @@ -21,7 +21,7 @@ protocol = git
repo_url = https://github.com/NOAA-EMC/UPP
# Specify either a branch name or a hash but not both.
#branch = develop
hash = 2b2c84a
hash = 22cfb88
local_path = sorc/UPP
required = True

Expand Down
46 changes: 46 additions & 0 deletions docs/UsersGuide/source/ConfigWorkflow.rst
Original file line number Diff line number Diff line change
Expand Up @@ -464,6 +464,9 @@ Baseline Workflow Tasks
``RUN_TASK_RUN_POST``: (Default: true)
Flag that determines whether to run the ``TN_RUN_POST`` task. Valid values: ``True`` | ``False``

``RUN_TASK_RUN_PRDGEN``: (Default: false)
Flag that determines whether to run the ``TN_RUN_PRDGEN`` task. Valid values: ``True`` | ``False``

.. _VXTasks:

Verification Tasks
Expand Down Expand Up @@ -1222,6 +1225,49 @@ Set parameters for customizing the :term:`UPP`.
Note that this variable is first changed to lower case before being used to construct the file names.

RUN_PRDGEN Configuration Parameters
=====================================

Non-default parameters for the ``run_prdgen`` task are set in the ``task_run_prdgen:`` section of the ``config.yaml`` file.

Basic Task Parameters
---------------------------------
For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task.

``TN_RUN_PRDGEN``: (Default: "run_prdgen")
Set the name of this Rocoto workflow task. Users typically do not need to change this value.

``NNODES_RUN_PRDGEN``: (Default: 1)
Number of nodes to use for the job.

``PPN_RUN_PRDGEN``: (Default: 22)
Number of :term:`MPI` processes per node.

``WTIME_RUN_PRDGEN``: (Default: 00:30:00)
Maximum time for the task to complete.

``MAXTRIES_RUN_PRDGEN``: (Default: 2)
Maximum number of times to attempt the task.

``KMP_AFFINITY_RUN_PRDGEN``: (Default: "scatter")
Intel Thread Affinity Interface for the ``run_prdgen`` task. See :ref:`this note <thread-affinity>` for more information on thread affinity.

``OMP_NUM_THREADS_RUN_PRDGEN``: (Default: 1)
The number of OpenMP threads to use for parallel regions.

``OMP_STACKSIZE_RUN_PRDGEN``: (Default: "1024m")
Controls the size of the stack for threads created by the OpenMP implementation.

``DO_PARALLEL_PRDGEN``: (Default: false)
Flag that determines whether to use CFP to run the product generation job in parallel. CFP is a utility that allows the user to launch a number of small jobs across nodes/cpus in one batch command. This option should be used with the ``RRFS_NA_3km`` grid and ``PPN_RUN_PRDGEN`` should be set to 22.

``ADDNL_OUTPUT_GRIDS``: (Default: [])
Set additional output grids for wgrib2 remapping, if any. Space-separated list of strings, e.g., ( "130" "242" "clue"). Default is no additional grids.

``TESTBED_FIELDS_FN``: (Default: "")
The file which lists grib2 fields to be extracted for testbed files. Empty string means no need to generate testbed files.


.. _get-obs-ccpa:

GET_OBS_CCPA Configuration Parameters
Expand Down
4 changes: 2 additions & 2 deletions docs/UsersGuide/source/ContainerQuickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW
.. code-block:: console
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_SOURCE_BASEDIR_ICS: /scratch2/BMC/det/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh}
EXTRN_MDL_SOURCE_BASEDIR_ICS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh}
EXTRN_MDL_FILES_ICS: []
EXTRN_MDL_DATA_STORES: disk
Expand All @@ -333,7 +333,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW
.. code-block:: console
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_SOURCE_BASEDIR_LBCS: /scratch2/BMC/det/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh}
EXTRN_MDL_SOURCE_BASEDIR_LBCS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh}
EXTRN_MDL_FILES_LBCS: []
EXTRN_MDL_DATA_STORES: disk
Expand Down
38 changes: 19 additions & 19 deletions docs/UsersGuide/source/RunSRW.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,23 +43,23 @@ The SRW App requires input files to run. These include static datasets, initial
.. _DataLocations:
.. table:: Data Locations for Level 1 Systems

+--------------+-----------------------------------------------------------------+
| Machine | File location |
+==============+=================================================================+
| Cheyenne | /glade/p/ral/jntp/UFS_SRW_App/develop/input_model_data/ |
+--------------+-----------------------------------------------------------------+
| Gaea | /lustre/f2/pdata/ncep/UFS_SRW_App/develop/input_model_data/ |
+--------------+-----------------------------------------------------------------+
| Hera | /scratch2/BMC/det/UFS_SRW_App/develop/input_model_data/ |
+--------------+-----------------------------------------------------------------+
| Jet | /mnt/lfs4/BMC/wrfruc/UFS_SRW_App/develop/input_model_data/ |
+--------------+-----------------------------------------------------------------+
| NOAA Cloud | /contrib/EPIC/UFS_SRW_App/develop/input_model_data/ |
+--------------+-----------------------------------------------------------------+
| Orion | /work/noaa/fv3-cam/UFS_SRW_App/develop/input_model_data/ |
+--------------+-----------------------------------------------------------------+
| WCOSS2 | /lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop/input_model_data/ |
+--------------+-----------------------------------------------------------------+
+--------------+------------------------------------------------------------------------------+
| Machine | File location |
+==============+==============================================================================+
| Cheyenne | /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/ |
+--------------+------------------------------------------------------------------------------+
| Gaea | /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/ |
+--------------+------------------------------------------------------------------------------+
| Hera | /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data/ |
+--------------+------------------------------------------------------------------------------+
| Jet | /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/input_model_data/ |
+--------------+------------------------------------------------------------------------------+
| NOAA Cloud | /contrib/EPIC/UFS_SRW_data/develop/input_model_data/ |
+--------------+------------------------------------------------------------------------------+
| Orion | /work/noaa/epic-ps/role-epic-ps/UFS_SRW_data/develop/input_model_data/ |
+--------------+------------------------------------------------------------------------------+
| WCOSS2 | /lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop/input_model_data/ |
+--------------+------------------------------------------------------------------------------+

For Level 2-4 systems, the data must be added to the user's system. Detailed instructions on how to add the data can be found in :numref:`Section %s <DownloadingStagingInput>`. Sections :numref:`%s <Input>` and :numref:`%s <OutputFiles>` contain useful background information on the input and output files used in the SRW App.

Expand Down Expand Up @@ -507,11 +507,11 @@ For example, to run the out-of-the-box experiment on Gaea, add or modify variabl
EXPT_SUBDIR: run_basic_srw
task_get_extrn_ics:
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_SOURCE_BASEDIR_ICS: /lustre/f2/pdata/ncep/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/2019061518
EXTRN_MDL_SOURCE_BASEDIR_ICS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/2019061518
EXTRN_MDL_DATA_STORES: disk
task_get_extrn_lbcs:
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_SOURCE_BASEDIR_LBCS: /lustre/f2/pdata/ncep/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/2019061518
EXTRN_MDL_SOURCE_BASEDIR_LBCS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/2019061518
EXTRN_MDL_DATA_STORES: disk
To determine whether the ``config.yaml`` file adjustments are valid, users can run the following script from the ``ush`` directory:
Expand Down
125 changes: 125 additions & 0 deletions jobs/JREGIONAL_RUN_PRDGEN
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
#!/bin/bash

#
#-----------------------------------------------------------------------
#
# This script runs wgrib2 to create various subdomain GRIB2 files from
# the raw UPP-generated GRIB2 output from the run_post task of the
# FV3-LAM model.
#
#-----------------------------------------------------------------------
#

#
#-----------------------------------------------------------------------
#
# Source the variable definitions file and the bash utility functions.
#
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
#
# Save current shell options (in a global array). Then set new options
# for this script/function.
#
#-----------------------------------------------------------------------
#
{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
# Get the full path to the file in which this script/function is located
# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
# which the file is located (scrfunc_dir).
#
#-----------------------------------------------------------------------
#
scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
scrfunc_fn=$( basename "${scrfunc_fp}" )
scrfunc_dir=$( dirname "${scrfunc_fp}" )
#
#-----------------------------------------------------------------------
#
# Print message indicating entry into script.
#
#-----------------------------------------------------------------------
#
print_info_msg "
========================================================================
Entering script: \"${scrfunc_fn}\"
In directory: \"${scrfunc_dir}\"
This is the J-job script for the task that runs wgrib2 on the output
files corresponding to a specified forecast hour.
========================================================================"
#
#-----------------------------------------------------------------------
#
# Set the run directory.
#
#-----------------------------------------------------------------------
#
DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}"
#
#-----------------------------------------------------------------------
#
# If it doesn't already exist, create the directory (postprd_dir) in which
# to store post-processing output. (Note that postprd_dir may already
# have been created by this post-processing script run for a different
# forecast hour of the same cycle and/or ensemble member.) Also, create
# a temporary work directory (DATA_FHR) for the current forecast hour.
# DATA_FHR will be deleted later after the processing for the current
# forecast hour is complete. Then change location to DATA_FHR.
#
# Note that there may be a preexisting version of DATA_FHR from previous
# runs of this script for the current forecast hour (and current cycle),
# e.g. from the workflow task that runs this script failing and then being
# called again. Thus, we first make sure preexisting versions are deleted.
#
#-----------------------------------------------------------------------
#
if [ "${RUN_ENVIR}" = "community" ]; then
export COMOUT="${DATA}/postprd"
fi
mkdir_vrfy -p "${COMOUT}"

# subhourly post
if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then
export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn"
else
export DATA_FHR="${DATA:-$COMOUT}/$fhr"
fi
check_for_preexist_dir_file "${DATA_FHR}" "delete"
mkdir_vrfy -p "${DATA_FHR}"

cd_vrfy "${DATA_FHR}"
#
#-----------------------------------------------------------------------
#
# Call the ex-script for this J-job and pass to it the necessary varia-
# bles.
#
#-----------------------------------------------------------------------
#
$SCRIPTSdir/exregional_run_prdgen.sh || print_err_msg_exit "\
Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
#
#-----------------------------------------------------------------------
#
# Run job postamble.
#
#-----------------------------------------------------------------------
#
job_postamble
#
#-----------------------------------------------------------------------
#
# Restore the shell options saved at the beginning of this script/func-
# tion.
#
#-----------------------------------------------------------------------
#
{ restore_shell_opts; } > /dev/null 2>&1
Loading

0 comments on commit ec333ba

Please sign in to comment.