From 0c95b44b1e8eadb31903ae0a8dd1549196c9b207 Mon Sep 17 00:00:00 2001 From: Yunheng Wang <47898913+ywangwof@users.noreply.github.com> Date: Tue, 22 Mar 2022 20:00:08 -0500 Subject: [PATCH 01/29] Add support on NSSL/Odin (#227) * Add support on NSSL/Odin * Add wlfow_odin.env and modify detect_machine.sh * update comment * Detect explicitly odin1/odin2 --- env/build_odin_intel.env | 58 ++++++++++++++++++++++++++++++++++++++++ env/detect_machine.sh | 3 +++ env/wflow_odin.env | 25 +++++++++++++++++ 3 files changed, 86 insertions(+) create mode 100644 env/build_odin_intel.env create mode 100644 env/wflow_odin.env diff --git a/env/build_odin_intel.env b/env/build_odin_intel.env new file mode 100644 index 0000000000..cf69d3d091 --- /dev/null +++ b/env/build_odin_intel.env @@ -0,0 +1,58 @@ +#Setup instructions for NOAA RDHPC Jet using Intel-18.0.5.274 (bash shell) + +module unload modules +unset -f module + +export BASH_ENV=/usr/local/lmod/8.3.1/init/bash +source $BASH_ENV +export LMOD_SYSTEM_DEFAULT_MODULES=PrgEnv-intel:cray-mpich:intel:craype +module --initial_load --no_redirect restore +#module use <$HOME>/ +export MODULEPATH=/oldscratch/ywang/external/hpc-stack/modulefiles/mpi/intel/2020/cray-mpich/7.7.16:/oldscratch/ywang/external/hpc-stack/modulefiles/compiler/intel/2020:/oldscratch/ywang/external/hpc-stack/modulefiles/core:/oldscratch/ywang/external/hpc-stack/modulefiles/stack:/opt/cray/pe/perftools/21.02.0/modulefiles:/opt/cray/ari/modulefiles:/opt/cray/pe/craype-targets/default/modulefiles:/opt/cray/pe/modulefiles:/opt/cray/modulefiles:/opt/modulefiles + +#module purge +export CMAKE=/home/yunheng.wang/tools/cmake-3.23.0-rc2/bin/cmake +export PATH=/home/yunheng.wang/tools/cmake-3.23.0-rc2/bin:${PATH} + +module load hpc/1.2.0 +module load hpc-intel +module load hpc-cray-mpich + +#module load srw_common + +module load jasper +module load zlib +module load png + +#module load cray-hdf5 +#module load cray-netcdf +module load esmf +module load fms + +module load bacio +module load crtm +module load g2 +module load g2tmpl +module load ip +module load sp +module load w3nco +module load upp + +module load gftl-shared +module load yafyaml +module load mapl + +module load gfsio +module load landsfcutil +module load nemsio +module load nemsiogfs +module load sfcio +module load sigio +module load w3emc +module load wgrib2 + +export CMAKE_C_COMPILER=cc +export CMAKE_CXX_COMPILER=CC +export CMAKE_Fortran_COMPILER=ftn +export CMAKE_Platform=odin.intel + diff --git a/env/detect_machine.sh b/env/detect_machine.sh index 2e06e0ca7d..1634b18dd1 100755 --- a/env/detect_machine.sh +++ b/env/detect_machine.sh @@ -96,6 +96,9 @@ case $(hostname -f) in login01.expanse.sdsc.edu) MACHINE_ID=expanse ;; ### expanse1 login02.expanse.sdsc.edu) MACHINE_ID=expanse ;; ### expanse2 + + nid00193) MACHINE_ID=odin ;; ### Odin1 at NSSL + nid00385) MACHINE_ID=odin ;; ### Odin2 at NSSL esac MACHINE="${MACHINE_ID}" diff --git a/env/wflow_odin.env b/env/wflow_odin.env new file mode 100644 index 0000000000..2e3f60e4ea --- /dev/null +++ b/env/wflow_odin.env @@ -0,0 +1,25 @@ +# >>> conda initialize >>> +# !! Contents within this block are managed by 'conda init' !! +__conda_setup="$('/scratch/software/Odin/python/anaconda2/bin/conda' 'shell.bash' 'hook' 2> /dev/null)" +if [ $? -eq 0 ]; then + eval "$__conda_setup" +else + if [ -f "/scratch/software/Odin/python/anaconda2/etc/profile.d/conda.sh" ]; then + . "/scratch/software/Odin/python/anaconda2/etc/profile.d/conda.sh" + else + export PATH="/scratch/software/Odin/python/anaconda2/bin:$PATH" + fi +fi +unset __conda_setup +# <<< conda initialize <<< + +# To make "regional_workflow" avaiable, +# you should uncomment the following lines, which create file ".condarc" +# or install the environment yourself. +#cat > $HOME/.condarc < Date: Wed, 30 Mar 2022 12:02:11 -0600 Subject: [PATCH 02/29] Add python module to cheyenne build environments (#232) --- env/build_cheyenne_gnu.env | 1 + env/build_cheyenne_intel.env | 1 + 2 files changed, 2 insertions(+) diff --git a/env/build_cheyenne_gnu.env b/env/build_cheyenne_gnu.env index 876aff5fab..36dea1137d 100644 --- a/env/build_cheyenne_gnu.env +++ b/env/build_cheyenne_gnu.env @@ -7,6 +7,7 @@ module load ncarenv/1.3 module load gnu/10.1.0 module load mpt/2.22 module load ncarcompilers/0.5.0 +module load python/3.7.9 module unload netcdf module use /glade/p/ral/jntp/GMTB/tools/hpc-stack-v1.2.0/modulefiles/stack diff --git a/env/build_cheyenne_intel.env b/env/build_cheyenne_intel.env index e4641b841d..75ffc0fa62 100644 --- a/env/build_cheyenne_intel.env +++ b/env/build_cheyenne_intel.env @@ -7,6 +7,7 @@ module load ncarenv/1.3 module load intel/2021.2 module load mpt/2.22 module load ncarcompilers/0.5.0 +module load python/3.7.9 module unload netcdf module use /glade/p/ral/jntp/GMTB/tools/hpc-stack-v1.2.0/modulefiles/stack From 31dab61baedcaf6bfd0e29192369332d8930b5cc Mon Sep 17 00:00:00 2001 From: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com> Date: Wed, 30 Mar 2022 17:06:17 -0400 Subject: [PATCH 03/29] Update SRW Documentation (#212) * updated docs * added git submodule * fix formatting * added new submodule commits * fixed ref links * finished Intro * finish Components & Intro edits * edited Rocoto workflow section of Quickstart * added minor hpc submodule commits * Updates to Rocoto Workflow in Quick Start * add to HPC-stack intro * submodule updates * added submodule docs edits * hpc-stack updates & formatting fixes * hpc-stack intro edits * bibtex attempted fix * add hpc-stack module edits * update sphinxcontrib version * add .readthedocs.yaml file * update .readthedocs.yaml file * update .readthedocs.yaml file * update conf.py * updates .readthedocs.yaml with submodules * updates .readthedocs.yaml with submodules * submodule updates * submodule updates * minor Intro edits * minor Intro edits * minor Intro edits * submodule updates * fixed typos in QS * QS updates * QS updates * QS updates * updates to InputOutput and QS * fix I/O doc typos * pull updates to hpc-stack docs * pull updates to hpc-stack docs * fix table wrapping * updates to QS for cloud * fix QS export statements * fix QS export statements * QS edits on bind, config * add bullet points to notes * running without rocoto * add HPC-Stack submodule w/docs * split QS into container/non-container approaches * added filepath changes for running in container on Orion, et al. * edits to overview and container QS * moved CodeReposAndDirs.rst info to the Introduction & deleted file * continued edits to SRWAppOverview * combine overview w/non-container docs * finish merging non-container guide & SRWOverview, rename/remove files, update FAQ * minor edits for Intro & QS * updates to BuildRun doc through 3.8.1 * edits to Build/Run and Components * remove .gitignore * fix Ch 3 title, 4 supported platform levels note * fix typos, add term links * other minor fixes/suggestions implemented * updated Intro based on feedback; changed SRW to SRW App throughout * update comment to Intro citation * add user-defined vertical levels to future work * Add instructions for srw_common module load * fix typo * update Intro & BuildRunSRW based on Mark's feedback * minor intro updates * 1st round of jwolff's edits * 2nd round of jwolff updates * update QS intro * fix minor physics details * update citation and physics suite name * add compute node allocation info to QS * add authoritative hpc-stack docs to Intro Co-authored-by: gspetro --- .gitmodules | 3 + .readthedocs.yaml | 35 + docs/UsersGuide/build/.gitignore | 4 - docs/UsersGuide/source/BuildRunSRW.rst | 886 ++++++++++++++++++ docs/UsersGuide/source/CodeReposAndDirs.rst | 261 ------ docs/UsersGuide/source/Components.rst | 80 ++ docs/UsersGuide/source/ConfigNewPlatform.rst | 11 +- docs/UsersGuide/source/FAQ.rst | 48 +- docs/UsersGuide/source/Glossary.rst | 74 +- docs/UsersGuide/source/Include-HPCInstall.rst | 8 + docs/UsersGuide/source/InputOutputFiles.rst | 199 ++-- docs/UsersGuide/source/Introduction.rst | 519 ++++++---- docs/UsersGuide/source/Quickstart.rst | 445 ++++----- docs/UsersGuide/source/SRWAppOverview.rst | 710 -------------- docs/UsersGuide/source/WE2Etests.rst | 2 +- .../source/_static/theme_overrides.css | 2 + docs/UsersGuide/source/conf.py | 15 +- docs/UsersGuide/source/index.rst | 10 +- docs/UsersGuide/source/references.bib | 8 +- hpc-stack-mod | 1 + 20 files changed, 1786 insertions(+), 1535 deletions(-) create mode 100644 .gitmodules create mode 100644 .readthedocs.yaml delete mode 100644 docs/UsersGuide/build/.gitignore create mode 100644 docs/UsersGuide/source/BuildRunSRW.rst delete mode 100644 docs/UsersGuide/source/CodeReposAndDirs.rst create mode 100644 docs/UsersGuide/source/Components.rst create mode 100644 docs/UsersGuide/source/Include-HPCInstall.rst delete mode 100644 docs/UsersGuide/source/SRWAppOverview.rst create mode 160000 hpc-stack-mod diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..ca914133d5 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "hpc-stack-mod"] + path = hpc-stack-mod + url = https://github.com/NOAA-EMC/hpc-stack.git diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..e0987f8926 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,35 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-20.04 + tools: + python: "3.9" + # You can also specify other tool versions: + # nodejs: "16" + # rust: "1.55" + # golang: "1.17" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/UsersGuide/source/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/UsersGuide/requirements.txt + +submodules: + include: + - hpc-stack-mod + recursive: true + diff --git a/docs/UsersGuide/build/.gitignore b/docs/UsersGuide/build/.gitignore deleted file mode 100644 index 5e7d2734cf..0000000000 --- a/docs/UsersGuide/build/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore diff --git a/docs/UsersGuide/source/BuildRunSRW.rst b/docs/UsersGuide/source/BuildRunSRW.rst new file mode 100644 index 0000000000..5eee295f30 --- /dev/null +++ b/docs/UsersGuide/source/BuildRunSRW.rst @@ -0,0 +1,886 @@ +.. _BuildRunSRW: + +===================================== +Building and Running the SRW App +===================================== + +The Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application is an :term:`umbrella repository` consisting of a number of different :ref:`components ` housed in external repositories. Once the SRW App is configured and built, users can generate predictions of atmospheric behavior over a limited spatial area and on time scales ranging from minutes out to several days. + +This chapter walks users through how to build and run the "out-of-the-box" case for the SRW App. However, the steps are relevant to any SRW Application experiment and can be modified to suit user goals. The "out-of-the-box" SRW App case builds a weather forecast for June 15-16, 2019. Multiple convective weather events during these two days produced over 200 filtered storm reports. Severe weather was clustered in two areas: the Upper Midwest through the Ohio Valley and the Southern Great Plains. This forecast uses a predefined 25-km Continental United States (:term:`CONUS`) domain (RRFS_CONUS_25km), the Global Forecast System (:term:`GFS`) version 15.2 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. + +.. attention:: + + All UFS applications support `four platform levels `_. The steps described in this chapter will work most smoothly on preconfigured (Level 1) systems. On Level 1 systems, all of the required libraries for building community releases of UFS models and applications are available in a central location. This guide can serve as a starting point for running the SRW App on other systems, too, but the user may need to perform additional troubleshooting. + +.. note:: + The :ref:`container approach ` is recommended for a smoother build and run experience. Building without a container allows for the use of the Rocoto workflow manager and may allow for more customization. However, the non-container approach requires more in-depth system-based knowledge, especially on Level 3 and 4 systems; it is less appropriate for beginners. + +The overall procedure for generating an experiment is shown in :numref:`Figure %s `, with the scripts to generate and run the workflow shown in red. The steps are as follows: + + * :ref:`Install prerequisites ` + * :ref:`Clone the SRW App from GitHub ` + * :ref:`Check out the external repositories ` + * :ref:`Set up the build environment ` + * :ref:`Build the executables ` + * :ref:`Download and stage data ` + * :ref:`Optional: Configure a new grid ` + * :ref:`Generate a regional workflow experiment ` + * :ref:`Configure the experiment parameters ` + * :ref:`Load the python environment for the regional workflow ` + * :ref:`Run the regional workflow ` + * :ref:`Optional: Plot the output ` + +.. _AppOverallProc: + +.. figure:: _static/FV3LAM_wflow_overall.png + + *Overall layout of the SRW App Workflow* + + +.. _HPCstackInfo: + +Install the HPC-Stack +======================== + +.. Attention:: + Skip the HPC-Stack installation if working on a `Level 1 system `_ (e.g., Cheyenne, Hera, Orion, NOAA Cloud). + +**Definition:** :term:`HPC-Stack` is a repository that provides a unified, shell script-based build system and builds the software stack required for `UFS `_ applications such as the SRW App. + +Background +---------------- + +The UFS Weather Model draws on over 50 code libraries to run its applications. These libraries range from libraries developed in-house at NOAA (e.g. NCEPLIBS, FMS, etc.) to libraries developed by NOAA's partners (e.g. PIO, ESMF, etc.) to truly third party libraries (e.g. NETCDF). Individual installation of these libraries is not practical, so the `HPC-Stack `__ was developed as a central installation system to ensure that the infrastructure environment across multiple platforms is as similar as possible. Installation of the HPC-Stack is required to run the SRW App. + +Instructions +------------------------- +Users working on systems that fall under `Support Levels 2-4 `_ will need to install the HPC-Stack the first time they try to build applications (such as the SRW App) or models that depend on it. Users can either build the HPC-stack on their local system or use the centrally maintained stacks on each HPC platform if they are working on a Level 1 system. For a detailed description of installation options, see :ref:`Installing the HPC-Stack `. + +After completing installation, continue to the next section. + +.. _DownloadSRWApp: + +Download the UFS SRW Application Code +====================================== +The SRW Application source code is publicly available on GitHub. To download the SRW App, clone the ``develop`` branch of the repository: + +.. code-block:: console + + git clone -b develop https://github.com/ufs-community/ufs-srweather-app.git + +.. + COMMENT: This will need to be changed to the updated release branch of the SRW repo once it exists. + +The cloned repository contains the configuration files and sub-directories shown in +:numref:`Table %s `. + +.. _FilesAndSubDirs: + +.. table:: Files and sub-directories of the ufs-srweather-app repository + + +--------------------------------+--------------------------------------------------------+ + | **File/Directory Name** | **Description** | + +================================+========================================================+ + | CMakeLists.txt | Main cmake file for SRW App | + +--------------------------------+--------------------------------------------------------+ + | Externals.cfg | Includes tags pointing to the correct version of the | + | | external GitHub repositories/branches used in the SRW | + | | App. | + +--------------------------------+--------------------------------------------------------+ + | LICENSE.md | CC0 license information | + +--------------------------------+--------------------------------------------------------+ + | README.md | Getting Started Guide | + +--------------------------------+--------------------------------------------------------+ + | ufs_srweather_app_meta.h.in | Meta information for SRW App which can be used by | + | | other packages | + +--------------------------------+--------------------------------------------------------+ + | ufs_srweather_app.settings.in | SRW App configuration summary | + +--------------------------------+--------------------------------------------------------+ + | env | Contains build and workflow environment files | + +--------------------------------+--------------------------------------------------------+ + | docs | Contains release notes, documentation, and User's Guide| + +--------------------------------+--------------------------------------------------------+ + | manage_externals | Utility for checking out external repositories | + +--------------------------------+--------------------------------------------------------+ + | src | Contains CMakeLists.txt; external repositories | + | | will be cloned in this directory. | + +--------------------------------+--------------------------------------------------------+ + + +.. _CheckoutExternals: + +Check Out External Components +================================ + +The SRW App relies on a variety of components (e.g., regional_workflow, UFS_UTILS, ufs-weather-model, and UPP) detailed in :numref:`Chapter %s ` of this User's Guide. Users must run the ``checkout_externals`` script to link the necessary external repositories to the SRW App. The ``checkout_externals`` script uses the configuration file ``Externals.cfg`` in the top level directory of the SRW App to clone the correct tags (code versions) of the external repositories listed in :numref:`Section %s ` into the appropriate directories under the ``regional_workflow`` and ``src`` directories. + +Run the executable that pulls in SRW App components from external repositories: + +.. code-block:: console + + cd ufs-srweather-app + ./manage_externals/checkout_externals + + + +.. _SetUpBuild: + +Set up the Build Environment +============================ + +Before building the SRW App, the build environment must be set up for the user's specific platform. There is a set of common modules required to build the SRW App. These are located in the ``env/srw_common`` file. To load the set of common modules, run: + +.. code-block:: console + + module use + +where ```` is the full path to the ``env`` directory. + +Then, users must set up the platform-specific elements of the build environment. For Level 1 systems, scripts for loading the proper modules and/or setting the correct environment variables can be found in the ``env`` directory of the SRW App in files named ``build__.env``. Here is a sample directory listing of these build files: + +.. code-block:: console + + $ ls -l env/ + -rw-rw-r-- 1 user ral 1228 Oct 9 10:09 build_cheyenne_intel.env + -rw-rw-r-- 1 user ral 1134 Oct 9 10:09 build_hera_intel.env + -rw-rw-r-- 1 user ral 1228 Oct 9 10:09 build_jet_intel.env + ... + +On Level 1 systems, the commands in the ``build__.env`` files can be directly copy-pasted into the command line, or the file can be sourced from the ``ufs-srweather-app/env`` directory. For example, on Hera, run: + +.. code-block:: + + source env/build_hera_intel.env + +from the main ``ufs-srweather-app`` directory to source the appropriate file. + +On Level 2-4 systems, users will need to modify certain environment variables, such as the path to NCEP libraries, so that the SRW App can find and load the appropriate modules. For systems with Lmod installed, one of the current ``build__.env`` files can be copied and used as a template. To check whether Lmod is installed, run ``echo $LMOD_PKG``, and see if it outputs a path to the Lmod package. On systems without Lmod, users can modify or set the required environment variables with the ``export`` or ``setenv`` commands despending on whether they are using a bash or csh/tcsh shell, respectively: + +.. code-block:: + + export = + setenv + + +.. _BuildExecutables: + +Build the Executables +======================= + +Create a directory to hold the build's executables: + +.. code-block:: console + + mkdir build + cd build + +From the build directory, run the following commands to build the pre-processing utilities, forecast model, and post-processor: + +.. code-block:: console + + cmake .. -DCMAKE_INSTALL_PREFIX=.. + make -j 4 >& build.out & + +``-DCMAKE_INSTALL_PREFIX`` specifies the location in which the ``bin``, ``include``, ``lib``, and ``share`` directories will be created. These directories will contain various components of the SRW App. Its recommended value ``..`` denotes one directory up from the build directory. In the next line, the ``make`` call argument ``-j 4`` indicates that the build will run in parallel with 4 threads. + +The build will take a few minutes to complete. When it starts, a random number is printed to the console, and when it is done, a ``[1]+ Done`` message is printed to the console. ``[1]+ Exit`` indicates an error. Output from the build will be in the ``ufs-srweather-app/build/build.out`` file. When the build completes, users should see the forecast model executable ``ufs_model`` and several pre- and post-processing executables in the ``ufs-srweather-app/bin`` directory. These executables are described in :numref:`Table %s `. + +.. hint:: + + If you see the build.out file, but there is no ``ufs-srweather-app/bin`` directory, wait a few more minutes for the build to complete. + +.. _ExecDescription: + +.. table:: Names and descriptions of the executables produced by the build step and used by the SRW App + + +------------------------+---------------------------------------------------------------------------------+ + | **Executable Name** | **Description** | + +========================+=================================================================================+ + | chgres_cube | Reads in raw external model (global or regional) and surface climatology data | + | | to create initial and lateral boundary conditions | + +------------------------+---------------------------------------------------------------------------------+ + | filter_topo | Filters topography based on resolution | + +------------------------+---------------------------------------------------------------------------------+ + | global_equiv_resol | Calculates a global, uniform, cubed-sphere equivalent resolution for the | + | | regional Extended Schmidt Gnomonic (ESG) grid | + +------------------------+---------------------------------------------------------------------------------+ + | make_solo_mosaic | Creates mosaic files with halos | + +------------------------+---------------------------------------------------------------------------------+ + | upp.x | Post-processor for the model output | + +------------------------+---------------------------------------------------------------------------------+ + | ufs_model | UFS Weather Model executable | + +------------------------+---------------------------------------------------------------------------------+ + | orog | Generates orography, land mask, and gravity wave drag files from fixed files | + +------------------------+---------------------------------------------------------------------------------+ + | regional_esg_grid | Generates an ESG regional grid based on a user-defined namelist | + +------------------------+---------------------------------------------------------------------------------+ + | sfc_climo_gen | Creates surface climatology fields from fixed files for use in ``chgres_cube`` | + +------------------------+---------------------------------------------------------------------------------+ + | shave | Shaves the excess halo rows down to what is required for the lateral boundary | + | | conditions (LBC's) in the orography and grid files | + +------------------------+---------------------------------------------------------------------------------+ + | vcoord_gen | Generates hybrid coordinate interface profiles | + +------------------------+---------------------------------------------------------------------------------+ + | fvcom_to_FV3 | Determines lake surface conditions for the Great Lakes | + +------------------------+---------------------------------------------------------------------------------+ + | make_hgrid | Computes geo-referencing parameters (e.g., latitude, longitude, grid cell area) | + | | for global uniform grids | + +------------------------+---------------------------------------------------------------------------------+ + | emcsfc_ice_blend | Blends National Ice Center sea ice cover and EMC sea ice concentration data to | + | | create a global sea ice analysis used to update the GFS once per day | + +------------------------+---------------------------------------------------------------------------------+ + | emcsfc_snow2mdl | Blends National Ice Center snow cover and Air Force snow depth data to create a | + | | global depth analysis used to update the GFS snow field once per day | + +------------------------+---------------------------------------------------------------------------------+ + | global_cycle | Updates the GFS surface conditions using external snow and sea ice analyses | + +------------------------+---------------------------------------------------------------------------------+ + | inland | Creates an inland land mask by determining in-land (i.e. non-coastal) points | + | | and assigning a value of 1. Default value is 0. | + +------------------------+---------------------------------------------------------------------------------+ + | orog_gsl | Ceates orographic statistics fields required for the orographic drag suite | + | | developed by NOAA's Global Systems Laboratory (GSL) | + +------------------------+---------------------------------------------------------------------------------+ + | fregrid | Remaps data from the input mosaic grid to the output mosaic grid | + +------------------------+---------------------------------------------------------------------------------+ + | lakefrac | Calculates the ratio of the lake area to the grid cell area at each atmospheric | + | | grid point. | + +------------------------+---------------------------------------------------------------------------------+ + +.. _Data: + +Download and Stage the Data +============================ + +The SRW App requires input files to run. These include static datasets, initial and boundary conditions files, and model configuration files. On Level 1 and 2 systems, the data required to run SRW App tests are already available. For Level 3 and 4 systems, the data must be added. Detailed instructions on how to add the data can be found in the :numref:`Section %s Downloading and Staging Input Data `. :numref:`Sections %s ` and :numref:`%s ` contain useful background information on the input and output files used in the SRW App. + +.. _GridSpecificConfig: + +Grid Configuration +======================= + +The SRW App officially supports three different predefined grids as shown in :numref:`Table %s `. The "out-of-the-box" SRW App case uses the ``RRFS_CONUS_25km`` predefined grid option. More information on the predefined and user-generated grid options can be found in :numref:`Chapter %s ` for those who are curious. Users who plan to utilize one of the three pre-defined domain (grid) options may continue to :numref:`Step %s `. Users who plan to create a new domain should refer to :numref:`Chapter %s ` for details on how to do so. At a minimum, these users will need to add the new grid name to the ``valid_param_vals`` script and add the corresponding grid-specific parameters in the ``set_predef_grid_params`` script. + +.. _PredefinedGrids: + +.. table:: Predefined grids in the SRW App + + +----------------------+-------------------+--------------------------------+ + | **Grid Name** | **Grid Type** | **Quilting (write component)** | + +======================+===================+================================+ + | RRFS_CONUS_25km | ESG grid | lambert_conformal | + +----------------------+-------------------+--------------------------------+ + | RRFS_CONUS_13km | ESG grid | lambert_conformal | + +----------------------+-------------------+--------------------------------+ + | RRFS_CONUS_3km | ESG grid | lambert_conformal | + +----------------------+-------------------+--------------------------------+ + + +.. _GenerateForecast: + +Generate the Forecast Experiment +================================= +Generating the forecast experiment requires three steps: + +* :ref:`Set experiment parameters ` +* :ref:`Set Python and other environment parameters ` +* :ref:`Run a script to generate the experiment workflow ` + +The first two steps depend on the platform being used and are described here for each Level 1 platform. Users will need to adjust the instructions to their machine if they are working on a Level 2-4 platform. Information in :numref:`Chapter %s: Configuring the Workflow ` can help with this. + +.. _ExptConfig: + +Set Experiment Parameters +---------------------------- + +Each experiment requires certain basic information to run (e.g., date, grid, physics suite). This information is specified in ``config_defaults.sh`` and in the user-specific ``config.sh`` file. When generating a new experiment, the SRW App first reads and assigns default values from the ``config_defaults.sh`` file. Then, it reads and (re)assigns variables from the user's custom ``config.sh`` file. For background info on ``config_defaults.sh``, read :numref:`Section %s `, or jump to :numref:`Section %s ` to continue configuring the experiment. + +.. _DefaultConfigSection: + +Default configuration: ``config_defaults.sh`` +------------------------------------------------ + +.. note:: + This section provides background information on how the SRW App uses the ``config_defaults.sh`` file. This information is informative, but users do not need to modify ``config_defaults.sh`` to run the out-of-the-box case for the SRW App. Users may skip to :numref:`Step %s ` to continue configuring their experiment. + +Important configuration variables in the ``config_defaults.sh`` file appear in +:numref:`Table %s `. Some of these default values are intentionally invalid in order to ensure that the user assigns valid values in the user-specified ``config.sh`` file. Any settings provided in ``config.sh`` will override the default ``config_defaults.sh`` +settings. There is usually no need for a user to modify the default configuration file. Additional information on the default settings can be found in the file itself and in :numref:`Chapter %s `. + +.. _ConfigVarsDefault: + +.. table:: Configuration variables specified in the config_defaults.sh script. + + +----------------------+------------------------------------------------------------+ + | **Group Name** | **Configuration variables** | + +======================+============================================================+ + | Experiment mode | RUN_ENVIR | + +----------------------+------------------------------------------------------------+ + | Machine and queue | MACHINE, ACCOUNT, SCHED, PARTITION_DEFAULT, QUEUE_DEFAULT, | + | | PARTITION_HPSS, QUEUE_HPSS, PARTITION_FCST, QUEUE_FCST | + +----------------------+------------------------------------------------------------+ + | Cron | USE_CRON_TO_RELAUNCH, CRON_RELAUNCH_INTVL_MNTS | + +----------------------+------------------------------------------------------------+ + | Experiment Dir. | EXPT_BASEDIR, EXPT_SUBDIR | + +----------------------+------------------------------------------------------------+ + | NCO mode | COMINgfs, STMP, NET, envir, RUN, PTMP | + +----------------------+------------------------------------------------------------+ + | Separator | DOT_OR_USCORE | + +----------------------+------------------------------------------------------------+ + | File name | EXPT_CONFIG_FN, RGNL_GRID_NML_FN, DATA_TABLE_FN, | + | | DIAG_TABLE_FN, FIELD_TABLE_FN, FV3_NML_BASE_SUITE_FN, | + | | FV3_NML_YALM_CONFIG_FN, FV3_NML_BASE_ENS_FN, | + | | MODEL_CONFIG_FN, NEMS_CONFIG_FN, FV3_EXEC_FN, | + | | WFLOW_XML_FN, GLOBAL_VAR_DEFNS_FN, | + | | EXTRN_MDL_ICS_VAR_DEFNS_FN, EXTRN_MDL_LBCS_VAR_DEFNS_FN, | + | | WFLOW_LAUNCH_SCRIPT_FN, WFLOW_LAUNCH_LOG_FN | + +----------------------+------------------------------------------------------------+ + | Forecast | DATE_FIRST_CYCL, DATE_LAST_CYCL, CYCL_HRS, FCST_LEN_HRS | + +----------------------+------------------------------------------------------------+ + | IC/LBC | EXTRN_MDL_NAME_ICS, EXTRN_MDL_NAME_LBCS, | + | | LBC_SPEC_INTVL_HRS, FV3GFS_FILE_FMT_ICS, | + | | FV3GFS_FILE_FMT_LBCS | + +----------------------+------------------------------------------------------------+ + | NOMADS | NOMADS, NOMADS_file_type | + +----------------------+------------------------------------------------------------+ + | External model | USE_USER_STAGED_EXTRN_FILES, EXTRN_MDL_SOURCE_BASEDRI_ICS, | + | | EXTRN_MDL_FILES_ICS, EXTRN_MDL_SOURCE_BASEDIR_LBCS, | + | | EXTRN_MDL_FILES_LBCS | + +----------------------+------------------------------------------------------------+ + | CCPP | CCPP_PHYS_SUITE | + +----------------------+------------------------------------------------------------+ + | GRID | GRID_GEN_METHOD | + +----------------------+------------------------------------------------------------+ + | ESG grid | ESGgrid_LON_CTR, ESGgrid_LAT_CTR, ESGgrid_DELX, | + | | ESGgrid_DELY, ESGgrid_NX, ESGgrid_NY, | + | | ESGgrid_WIDE_HALO_WIDTH | + +----------------------+------------------------------------------------------------+ + | Input configuration | DT_ATMOS, LAYOUT_X, LAYOUT_Y, BLOCKSIZE, QUILTING, | + | | PRINT_ESMF, WRTCMP_write_groups, | + | | WRTCMP_write_tasks_per_group, WRTCMP_output_grid, | + | | WRTCMP_cen_lon, WRTCMP_cen_lat, WRTCMP_lon_lwr_left, | + | | WRTCMP_lat_lwr_left, WRTCMP_lon_upr_rght, | + | | WRTCMP_lat_upr_rght, WRTCMP_dlon, WRTCMP_dlat, | + | | WRTCMP_stdlat1, WRTCMP_stdlat2, WRTCMP_nx, WRTCMP_ny, | + | | WRTCMP_dx, WRTCMP_dy | + +----------------------+------------------------------------------------------------+ + | Pre-existing grid | PREDEF_GRID_NAME, PREEXISTING_DIR_METHOD, VERBOSE | + +----------------------+------------------------------------------------------------+ + | Cycle-independent | RUN_TASK_MAKE_GRID, GRID_DIR, RUN_TASK_MAKE_OROG, | + | | OROG_DIR, RUN_TASK_MAKE_SFC_CLIMO, SFC_CLIMO_DIR | + +----------------------+------------------------------------------------------------+ + | Surface climatology | SFC_CLIMO_FIELDS, FIXgsm, TOPO_DIR, SFC_CLIMO_INPUT_DIR, | + | | FNGLAC, FNMXIC, FNTSFC, FNSNOC, FNZORC, FNAISC, FNSMCC, | + | | FNMSKH, FIXgsm_FILES_TO_COPY_TO_FIXam, | + | | FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING, | + | | FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING, | + | | CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING | + +----------------------+------------------------------------------------------------+ + | Workflow task | MAKE_GRID_TN, MAKE_OROG_TN, MAKE_SFC_CLIMO_TN, | + | | GET_EXTRN_ICS_TN, GET_EXTRN_LBCS_TN, MAKE_ICS_TN, | + | | MAKE_LBCS_TN, RUN_FCST_TN, RUN_POST_TN | + +----------------------+------------------------------------------------------------+ + | NODE | NNODES_MAKE_GRID, NNODES_MAKE_OROG, NNODES_MAKE_SFC_CLIMO, | + | | NNODES_GET_EXTRN_ICS, NNODES_GET_EXTRN_LBCS, | + | | NNODES_MAKE_ICS, NNODES_MAKE_LBCS, NNODES_RUN_FCST, | + | | NNODES_RUN_POST | + +----------------------+------------------------------------------------------------+ + | MPI processes | PPN_MAKE_GRID, PPN_MAKE_OROG, PPN_MAKE_SFC_CLIMO, | + | | PPN_GET_EXTRN_ICS, PPN_GET_EXTRN_LBCS, PPN_MAKE_ICS, | + | | PPN_MAKE_LBCS, PPN_RUN_FCST, PPN_RUN_POST | + +----------------------+------------------------------------------------------------+ + | Walltime | WTIME_MAKE_GRID, WTIME_MAKE_OROG, WTIME_MAKE_SFC_CLIMO, | + | | WTIME_GET_EXTRN_ICS, WTIME_GET_EXTRN_LBCS, WTIME_MAKE_ICS, | + | | WTIME_MAKE_LBCS, WTIME_RUN_FCST, WTIME_RUN_POST | + +----------------------+------------------------------------------------------------+ + | Maximum attempt | MAXTRIES_MAKE_GRID, MAXTRIES_MAKE_OROG, | + | | MAXTRIES_MAKE_SFC_CLIMO, MAXTRIES_GET_EXTRN_ICS, | + | | MAXTRIES_GET_EXTRN_LBCS, MAXTRIES_MAKE_ICS, | + | | MAXTRIES_MAKE_LBCS, MAXTRIES_RUN_FCST, MAXTRIES_RUN_POST | + +----------------------+------------------------------------------------------------+ + | Post configuration | USE_CUSTOM_POST_CONFIG_FILE, CUSTOM_POST_CONFIG_FP | + +----------------------+------------------------------------------------------------+ + | Running ensembles | DO_ENSEMBLE, NUM_ENS_MEMBERS | + +----------------------+------------------------------------------------------------+ + | Stochastic physics | DO_SHUM, DO_SPPT, DO_SKEB, SHUM_MAG, SHUM_LSCALE, | + | | SHUM_TSCALE, SHUM_INT, SPPT_MAG, SPPT_LSCALE, SPPT_TSCALE, | + | | SPPT_INT, SKEB_MAG, SKEB_LSCALE, SKEP_TSCALE, SKEB_INT, | + | | SKEB_VDOF, USE_ZMTNBLCK | + +----------------------+------------------------------------------------------------+ + | Boundary blending | HALO_BLEND | + +----------------------+------------------------------------------------------------+ + | FVCOM | USE_FVCOM, FVCOM_DIR, FVCOM_FILE | + +----------------------+------------------------------------------------------------+ + | Compiler | COMPILER | + +----------------------+------------------------------------------------------------+ + + +.. _UserSpecificConfig: + +User-specific configuration: ``config.sh`` +-------------------------------------------- + +The user must specify certain basic information about the experiment in a ``config.sh`` file located in the ``ufs-srweather-app/regional_workflow/ush`` directory. Two example templates are provided in that directory: ``config.community.sh`` and ``config.nco.sh``. The first file is a minimal example for creating and running an experiment in the *community* mode (with ``RUN_ENVIR`` set to ``community``). The second is an example for creating and running an experiment in the *NCO* (operational) mode (with ``RUN_ENVIR`` set to ``nco``). The *community* mode is recommended in most cases and will be fully supported for this release. The operational/NCO mode will typically be used by those at the NOAA/NCEP/Environmental Modeling Center (EMC) and the NOAA/Global Systems Laboratory (GSL) working on pre-implementation testing for the Rapid Refresh Forecast System (RRFS). :numref:`Table %s ` shows the configuration variables, along with their default values in ``config_default.sh`` and the values defined in ``config.community.sh``. + +.. _ConfigCommunity: + +.. table:: Configuration variables specified in the config.community.sh script + + +--------------------------------+-------------------+--------------------------------------------------------+ + | **Parameter** | **Default Value** | **config.community.sh Value** | + +================================+===================+========================================================+ + | MACHINE | "BIG_COMPUTER" | "hera" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | ACCOUNT | "project_name" | "an_account" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | EXPT_SUBDIR | "" | "test_CONUS_25km_GFSv16" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | VERBOSE | "TRUE" | "TRUE" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | RUN_ENVIR | "nco" | "community" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | PREEXISTING_DIR_METHOD | "delete" | "rename" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | PREDEF_GRID_NAME | "" | "RRFS_CONUS_25km" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | GRID_GEN_METHOD | "ESGgrid" | "ESGgrid" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | QUILTING | "TRUE" | "TRUE" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | CCPP_PHYS_SUITE | "FV3_GSD_V0" | "FV3_GFS_v16" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | FCST_LEN_HRS | "24" | "48" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | LBC_SPEC_INTVL_HRS | "6" | "6" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | DATE_FIRST_CYCL | "YYYYMMDD" | "20190615" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | DATE_LAST_CYCL | "YYYYMMDD" | "20190615" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | CYCL_HRS | ("HH1" "HH2") | "00" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | EXTRN_MDL_NAME_ICS | "FV3GFS" | "FV3GFS" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | EXTRN_MDL_NAME_LBCS | "FV3GFS" | "FV3GFS" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | FV3GFS_FILE_FMT_ICS | "nemsio" | "grib2" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | FV3GFS_FILE_FMT_LBCS | "nemsio" | "grib2" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | WTIME_RUN_FCST | "04:30:00" | "01:00:00" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | USE_USER_STAGED_EXTRN_FILES | "FALSE" | "TRUE" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | EXTRN_MDL_SOURCE_BASE_DIR_ICS | "" | "/scratch2/BMC/det/UFS_SRW_app/v1p0/model_data/FV3GFS" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | EXTRN_MDL_FILES_ICS | "" | "gfs.pgrb2.0p25.f000" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | EXTRN_MDL_SOURCE_BASEDIR_LBCS | "" | "/scratch2/BMC/det/UFS_SRW_app/v1p0/model_data/FV3GFS" | + +--------------------------------+-------------------+--------------------------------------------------------+ + | EXTRN_MDL_FILES_LBCS | "" | "gfs.pgrb2.0p25.f006" | + +--------------------------------+-------------------+--------------------------------------------------------+ + + +To get started, make a copy of ``config.community.sh``. From the ``ufs-srweather-app`` directory, run: + +.. code-block:: console + + cd regional_workflow/ush + cp config.community.sh config.sh + +The default settings in this file include a predefined 25-km :term:`CONUS` grid (RRFS_CONUS_25km), the :term:`GFS` v16 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. + +Next, edit the new ``config.sh`` file to customize it for your machine. At a minimum, change the ``MACHINE`` and ``ACCOUNT`` variables; then choose a name for the experiment directory by setting ``EXPT_SUBDIR``. If you have pre-staged the initialization data for the experiment, set ``USE_USER_STAGED_EXTRN_FILES="TRUE"``, and set the paths to the data for ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS``. + +Sample settings are indicated below for Level 1 platforms. Detailed guidance applicable to all systems can be found in :numref:`Chapter %s: Configuring the Workflow `, which discusses each variable and the options available. Additionally, information about the three predefined Limited Area Model (LAM) Grid options can be found in :numref:`Chapter %s: Limited Area Model (LAM) Grids `. + +.. important:: + + If you set up the build environment with the GNU compiler in :numref:`Section %s `, you will have to check that the line ``COMPILER="gnu"`` appears in the ``config.sh`` file. + +.. hint:: + + To determine an appropriate ACCOUNT field for Level 1 systems, run ``groups``, and it will return a list of projects you have permissions for. Not all of the listed projects/groups have an HPC allocation, but those that do are potentially valid account names. + +Minimum parameter settings for running the out-of-the-box SRW App case on Level 1 machines: + +**Cheyenne:** + +.. code-block:: console + + MACHINE="cheyenne" + ACCOUNT="" + EXPT_SUBDIR="" + USE_USER_STAGED_EXTRN_FILES="TRUE" + EXTRN_MDL_SOURCE_BASEDIR_ICS="/glade/p/ral/jntp/UFS_SRW_app/staged_extrn_mdl_files" + EXTRN_MDL_SOURCE_BASEDIR_LBCS="/glade/p/ral/jntp/UFS_SRW_app/staged_extrn_mdl_files" + +**Hera, Jet, Orion, Gaea:** + +The ``MACHINE``, ``ACCOUNT``, and ``EXPT_SUBDIR`` settings are the same as for Cheyenne, except that ``"cheyenne"`` should be switched to ``"hera"``, ``"jet"``, ``"orion"``, or ``"gaea"``, respectively. Set ``USE_USER_STAGED_EXTRN_FILES="TRUE"``, but replace the file paths to Cheyenne's data with the file paths for the correct machine. ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` use the same file path. + +On Hera: + +.. code-block:: console + + "/scratch2/BMC/det/UFS_SRW_app/v1p0/model_data" + +On Jet: + +.. code-block:: console + + "/lfs4/BMC/wrfruc/FV3-LAM/model_data" + +On Orion: + +.. code-block:: console + + "/work/noaa/fv3-cam/UFS_SRW_app/v1p0/model_data" + + +On Gaea: + +.. code-block:: console + + "/lustre/f2/pdata/esrl/gsd/ufs/ufs-srw-release-v1.0.0/staged_extrn_mdl_files" + + +For **WCOSS** systems, edit ``config.sh`` with these WCOSS-specific parameters, and use a valid WCOSS project code for the account parameter: + +.. code-block:: console + + MACHINE=”wcoss_cray” or MACHINE=”wcoss_dell_p3” + ACCOUNT="my_account" + EXPT_SUBDIR="my_expt_name" + USE_USER_STAGED_EXTRN_FILES="TRUE" + +For WCOSS_DELL_P3: + +.. code-block:: console + + EXTRN_MDL_SOURCE_BASEDIR_ICS="/gpfs/dell2/emc/modeling/noscrub/UFS_SRW_App/model_data" + EXTRN_MDL_SOURCE_BASEDIR_LBCS="/gpfs/dell2/emc/modeling/noscrub/UFS_SRW_App/model_data" + +For WCOSS_CRAY: + +.. code-block:: console + + EXTRN_MDL_SOURCE_BASEDIR_ICS="/gpfs/hps3/emc/meso/noscrub/UFS_SRW_App/model_data" + EXTRN_MDL_SOURCE_BASEDIR_LBCS="/gpfs/hps3/emc/meso/noscrub/UFS_SRW_App/model_data" + + +**NOAA Cloud Systems:** + +.. code-block:: console + + MACHINE="SINGULARITY" + ACCOUNT="none" + EXPT_SUBDIR="" + EXPT_BASEDIR="lustre/$USER/expt_dirs" + COMPILER="gnu" + USE_USER_STAGED_EXTRN_FILES="TRUE" + EXTRN_MDL_SOURCE_BASEDIR_ICS="/contrib/EPIC/model_data/FV3GFS" + EXTRN_MDL_FILES_ICS=( "gfs.pgrb2.0p25.f000" ) + EXTRN_MDL_SOURCE_BASEDIR_LBCS="/contrib/EPIC/model_data/FV3GFS" + EXTRN_MDL_FILES_LBCS=( "gfs.pgrb2.0p25.f006" "gfs.pgrb2.0p25.f012" ) + +.. note:: + + The values of the configuration variables should be consistent with those in the + ``valid_param_vals script``. In addition, various example configuration files can be + found in the ``regional_workflow/tests/baseline_configs`` directory. + + + +.. _SetUpPythonEnv: + +Set up the Python and other Environment Parameters +-------------------------------------------------- +The workflow requires Python 3 with the packages 'PyYAML', 'Jinja2', and 'f90nml' available. This Python environment has already been set up on Level 1 platforms, and it can be activated in the following way (from ``/ufs-srweather-app/regional_workflow/ush``): + +.. code-block:: console + + source ../../env/wflow_.env + +This command will activate the ``regional_workflow`` conda environment. The user should see ``(regional_workflow)`` in front of the Terminal prompt at this point. If this is not the case, activate the regional workflow from the ``ush`` directory by running: + +.. code-block:: console + + conda init + source ~/.bashrc + conda activate regional_workflow + + +.. _GenerateWorkflow: + +Generate the Regional Workflow +------------------------------------------- + +Run the following command from the ``ufs-srweather-app/regional_workflow/ush`` directory to generate the workflow: + +.. code-block:: console + + ./generate_FV3LAM_wflow.sh + +The last line of output from this script, starting with ``*/1 * * * *`` or ``*/3 * * * *``, can be saved and :ref:`used later ` to automatically run portions of the workflow. + +This workflow generation script creates an experiment directory and populates it with all the data needed to run through the workflow. The flowchart in :numref:`Figure %s ` describes the experiment generation process. First, ``generate_FV3LAM_wflow.sh`` runs the ``setup.sh`` script to set the configuration parameters. Second, it copies the time-independent (fix) files and other necessary data input files from their location in the ufs-weather-model directory to the experiment directory (``EXPTDIR``). Third, it copies the weather model executable (``ufs_model``) from the ``bin`` directory to ``EXPTDIR`` and creates the input namelist file ``input.nml`` based on the ``input.nml.FV3`` file in the regional_workflow/ush/templates directory. Lastly, it creates the workflow XML file ``FV3LAM_wflow.xml`` that is executed when running the experiment with the Rocoto workflow manager. + +The ``setup.sh`` script reads three other configuration scripts in order: (1) ``config_default.sh`` (:numref:`Section %s `), (2) ``config.sh`` (:numref:`Section %s `), and (3) ``set_predef_grid_params.sh`` (:numref:`Section %s `). If a parameter is specified differently in these scripts, the file containing the last defined value will be used. + +The generated workflow will appear in ``EXPTDIR``, where ``EXPTDIR=${EXPT_BASEDIR}/${EXPT_SUBDIR}``. These variables were specified in the ``config.sh`` file in :numref:`Step %s `. The settings for these paths can also be viewed in the console output from the ``./generate_FV3LAM_wflow.sh`` script or in the ``log.generate_FV3LAM_wflow`` file, which can be found in ``$EXPTDIR``. + +.. _WorkflowGeneration: + +.. figure:: _static/FV3regional_workflow_gen.png + + *Experiment generation description* + +.. _WorkflowTaskDescription: + +Description of Workflow Tasks +-------------------------------- + +.. note:: + This section gives a general overview of workflow tasks. To begin running the workflow, skip to :numref:`Step %s ` + +:numref:`Figure %s ` illustrates the overall workflow. Individual tasks that make up the workflow are specified in the ``FV3LAM_wflow.xml`` file. :numref:`Table %s ` describes the function of each task. The first three pre-processing tasks; ``MAKE_GRID``, ``MAKE_OROG``, and ``MAKE_SFC_CLIMO`` are optional. If the user stages pre-generated grid, orography, and surface climatology fix files, these three tasks can be skipped by adding the following lines to the ``config.sh`` file before running the ``generate_FV3LAM_wflow.sh`` script: + +.. code-block:: console + + RUN_TASK_MAKE_GRID=”FALSE” + RUN_TASK_MAKE_OROG=”FALSE” + RUN_TASK_MAKE_SFC_CLIMO=”FALSE” + + +.. _WorkflowTasksFig: + +.. figure:: _static/FV3LAM_wflow_flowchart.png + + *Flowchart of the workflow tasks* + + +The ``FV3LAM_wflow.xml`` file runs the specific j-job scripts (``regional_workflow/jobs/JREGIONAL_[task name]``) in the prescribed order when the experiment is launched via the ``launch_FV3LAM_wflow.sh`` script or the ``rocotorun`` command. Each j-job task has its own source script (or "ex-script") named ``exregional_[task name].sh`` in the ``regional_workflow/scripts`` directory. Two database files named ``FV3LAM_wflow.db`` and ``FV3LAM_wflow_lock.db`` are generated and updated by the Rocoto calls. There is usually no need for users to modify these files. To relaunch the workflow from scratch, delete these two ``*.db`` files and then call the launch script repeatedly for each task. + + +.. _WorkflowTasksTable: + +.. table:: Workflow tasks in the SRW App + + +----------------------+------------------------------------------------------------+ + | **Workflow Task** | **Task Description** | + +======================+============================================================+ + | make_grid | Pre-processing task to generate regional grid files. Only | + | | needs to be run once per experiment. | + +----------------------+------------------------------------------------------------+ + | make_orog | Pre-processing task to generate orography files. Only | + | | needs to be run once per experiment. | + +----------------------+------------------------------------------------------------+ + | make_sfc_climo | Pre-processing task to generate surface climatology files. | + | | Only needs to be run, at most, once per experiment. | + +----------------------+------------------------------------------------------------+ + | get_extrn_ics | Cycle-specific task to obtain external data for the | + | | initial conditions | + +----------------------+------------------------------------------------------------+ + | get_extrn_lbcs | Cycle-specific task to obtain external data for the | + | | lateral boundary conditions (LBC's) | + +----------------------+------------------------------------------------------------+ + | make_ics | Generate initial conditions from the external data | + +----------------------+------------------------------------------------------------+ + | make_lbcs | Generate LBC's from the external data | + +----------------------+------------------------------------------------------------+ + | run_fcst | Run the forecast model (UFS weather model) | + +----------------------+------------------------------------------------------------+ + | run_post | Run the post-processing tool (UPP) | + +----------------------+------------------------------------------------------------+ + + + +.. _RocotoRun: + +Run the Workflow Using Rocoto +============================= +The information in this section assumes that Rocoto is available on the desired platform. (Note that Rocoto cannot be used when running the workflow within a container.) If Rocoto is not available, it is still possible to run the workflow using stand-alone scripts according to the process outlined in :numref:`Section %s `. There are two main ways to run the workflow with Rocoto: (1) with the ``launch_FV3LAM_wflow.sh`` script, and (2) by manually calling the ``rocotorun`` command. Users can also automate the workflow using a crontab. + +Optionally, an environment variable can be set to navigate to the ``$EXPTDIR`` more easily. If the login shell is bash, it can be set as follows: + +.. code-block:: console + + export EXPTDIR=// + +If the login shell is csh/tcsh, it can be set using: + +.. code-block:: console + + setenv EXPTDIR /path-to-experiment/directory + + +Launch the Rocoto Workflow Using a Script +----------------------------------------------- + +To run Rocoto using the ``launch_FV3LAM_wflow.sh`` script provided, simply call it without any arguments: + +.. code-block:: console + + cd $EXPTDIR + ./launch_FV3LAM_wflow.sh + +This script creates a log file named ``log.launch_FV3LAM_wflow`` in ``$EXPTDIR`` or appends information to it if the file already exists. The launch script also creates the ``log/FV3LAM_wflow.log`` file, which shows Rocoto task information. Check the end of the log files periodically to see how the experiment is progressing: + +.. code-block:: console + + tail -n 40 log.launch_FV3LAM_wflow + +In order to launch additional tasks in the workflow, call the launch script again; this action will need to be repeated until all tasks in the workflow have been launched. To (re)launch the workflow and check its progress on a single line, run: + +.. code-block:: console + + ./launch_FV3LAM_wflow.sh; tail -n 40 log.launch_FV3LAM_wflow + +This will output the last 40 lines of the log file, which list the status of the workflow tasks (e.g., SUCCEEDED, DEAD, RUNNING, SUBMITTING, QUEUED). The number 40 can be changed according to the user's preferences. The output will look like this: + +.. code-block:: console + + CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION + ====================================================================================================== + 202006170000 make_grid druby://hfe01:33728 SUBMITTING - 0 0.0 + 202006170000 make_orog - - - - - + 202006170000 make_sfc_climo - - - - - + 202006170000 get_extrn_ics druby://hfe01:33728 SUBMITTING - 0 0.0 + 202006170000 get_extrn_lbcs druby://hfe01:33728 SUBMITTING - 0 0.0 + 202006170000 make_ics - - - - - + 202006170000 make_lbcs - - - - - + 202006170000 run_fcst - - - - - + 202006170000 run_post_00 - - - - - + 202006170000 run_post_01 - - - - - + 202006170000 run_post_02 - - - - - + 202006170000 run_post_03 - - - - - + 202006170000 run_post_04 - - - - - + 202006170000 run_post_05 - - - - - + 202006170000 run_post_06 - - - - - + + Summary of workflow status: + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + 0 out of 1 cycles completed. + Workflow status: IN PROGRESS + +Error messages for each specific task can be found in the task log files located in ``$EXPTDIR/log``. + +If everything goes smoothly, you will eventually get the following workflow status table as follows: + +.. code-block:: console + + CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION + ====================================================================================================== + 202006170000 make_grid 8854765 SUCCEEDED 0 1 6.0 + 202006170000 make_orog 8854809 SUCCEEDED 0 1 27.0 + 202006170000 make_sfc_climo 8854849 SUCCEEDED 0 1 36.0 + 202006170000 get_extrn_ics 8854763 SUCCEEDED 0 1 54.0 + 202006170000 get_extrn_lbcs 8854764 SUCCEEDED 0 1 61.0 + 202006170000 make_ics 8854914 SUCCEEDED 0 1 119.0 + 202006170000 make_lbcs 8854913 SUCCEEDED 0 1 98.0 + 202006170000 run_fcst 8854992 SUCCEEDED 0 1 655.0 + 202006170000 run_post_00 8855459 SUCCEEDED 0 1 6.0 + 202006170000 run_post_01 8855460 SUCCEEDED 0 1 6.0 + 202006170000 run_post_02 8855461 SUCCEEDED 0 1 6.0 + 202006170000 run_post_03 8855462 SUCCEEDED 0 1 6.0 + 202006170000 run_post_04 8855463 SUCCEEDED 0 1 6.0 + 202006170000 run_post_05 8855464 SUCCEEDED 0 1 6.0 + 202006170000 run_post_06 8855465 SUCCEEDED 0 1 6.0 + +If all the tasks complete successfully, the workflow status in the log file will indicate “SUCCESS." Otherwise, the workflow status will indicate “FAILURE." + + +Launch the Rocoto Workflow Manually +--------------------------------------- + +Load Rocoto +^^^^^^^^^^^^^^^^ + +Instead of running the ``./launch_FV3LAM_wflow.sh`` script, users can load Rocoto and any other required modules. This gives the user more control over the process and allows them to view experiment progress more easily. On Level 1 systems, the Rocoto modules are loaded automatically in :numref:`Step %s `. For most other systems, a variant on the following commands will be necessary to load the Rocoto module: + +.. code-block:: console + + module use + module load rocoto + +Some systems may require a version number (e.g., ``module load rocoto/1.3.3``) + +Run the Rocoto Workflow +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +After loading Rocoto, call ``rocotorun`` from the experiment directory to launch the workflow tasks. This will start any tasks that do not have a dependency. As the workflow progresses through its stages, ``rocotostat`` will show the state of each task and allow users to monitor progress: + +.. code-block:: console + + cd $EXPTDIR + rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + +The ``rocotorun`` and ``rocotostat`` commands above will need to be resubmitted regularly and repeatedly until the experiment is finished. In part, this is to avoid having the system time out. This also ensures that when one task ends, tasks dependent on it will run as soon as possible, and ``rocotostat`` will capture the new progress. + +If the experiment fails, the ``rocotostat`` command will indicate which task failed. Users can look at the log file in the ``log`` subdirectory for the failed task to determine what caused the failure. For example, if the ``make_grid`` task failed, users can open the ``make_grid.log`` file to see what caused the problem: + +.. code-block:: console + + cd $EXPTDIR/log + vi make_grid.log + +.. note:: + + If users have the `Slurm workload manager `_ on their system, they can run the ``squeue`` command in lieu of ``rocotostat`` to check what jobs are currently running. + +.. _Automate: + +Automated Option +---------------------- +For automatic resubmission of the workflow at regular intervals (e.g., every minute), the user can add a crontab entry using the ``crontab -e`` command. As mentioned in :numref:`Section %s `, the last line of output from ``./generate_FV3LAM_wflow.sh`` (starting with ``*/1 * * * *`` or ``*/3 * * * *``), can be pasted into the crontab file. It can also be found in the ``$EXPTDIR/log.generate_FV3LAM_wflow`` file. The crontab entry should resemble the following: + +.. code-block:: console + + */3 * * * * cd && /apps/rocoto/1.3.3/bin/rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + +where ```` is changed to correspond to the user's ``$EXPTDIR``, and ``/apps/rocoto/1.3.3/bin/rocotorun`` corresponds to the location of the ``rocotorun`` command on the user's system. The number ``3`` can be changed to a different positive integer and simply means that the workflow will be resubmitted every three minutes. + +.. hint:: + + * On NOAA Cloud instances, ``*/1 * * * *`` is the preferred option for cron jobs because compute nodes will shut down if they remain idle too long. If the compute node shuts down, it can take 15-20 minutes to start up a new one. + * On other NOAA HPC systems, admins discourage the ``*/1 * * * *`` due to load problems. ``*/3 * * * *`` is the preferred option for cron jobs on non-Cloud systems. + +To check the experiment progress: + +.. code-block:: console + + cd $EXPTDIR + rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + +After finishing the experiment, open the crontab using ``crontab -e`` and delete the crontab entry. + +.. note:: + + On Orion, *cron* is only available on the orion-login-1 node, so users will need to work on that node when running *cron* jobs on Orion. + +The workflow run is complete when all tasks have “SUCCEEDED”, and the rocotostat command outputs the following: + +.. code-block:: console + + CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION + ========================================================================================================== + 201906150000 make_grid 4953154 SUCCEEDED 0 1 5.0 + 201906150000 make_orog 4953176 SUCCEEDED 0 1 26.0 + 201906150000 make_sfc_climo 4953179 SUCCEEDED 0 1 33.0 + 201906150000 get_extrn_ics 4953155 SUCCEEDED 0 1 2.0 + 201906150000 get_extrn_lbcs 4953156 SUCCEEDED 0 1 2.0 + 201906150000 make_ics 4953184 SUCCEEDED 0 1 16.0 + 201906150000 make_lbcs 4953185 SUCCEEDED 0 1 71.0 + 201906150000 run_fcst 4953196 SUCCEEDED 0 1 1035.0 + 201906150000 run_post_f000 4953244 SUCCEEDED 0 1 5.0 + 201906150000 run_post_f001 4953245 SUCCEEDED 0 1 4.0 + ... + 201906150000 run_post_f048 4953381 SUCCEEDED 0 1 7.0 + +.. _PlotOutput: + +Plot the Output +=============== +Two python scripts are provided to generate plots from the :term:`FV3`-LAM post-processed :term:`GRIB2` output. Information on how to generate the graphics can be found in :numref:`Chapter %s `. diff --git a/docs/UsersGuide/source/CodeReposAndDirs.rst b/docs/UsersGuide/source/CodeReposAndDirs.rst deleted file mode 100644 index e52f5512c0..0000000000 --- a/docs/UsersGuide/source/CodeReposAndDirs.rst +++ /dev/null @@ -1,261 +0,0 @@ -.. _CodeReposAndDirs: - -========================================= -Code Repositories and Directory Structure -========================================= -This chapter describes the code repositories that comprise the UFS SRW Application, -without describing any of the components in detail. - -.. _HierarchicalRepoStr: - -Hierarchical Repository Structure -================================= -The umbrella repository for the UFS SRW Application is named ufs-srweather-app and is -available on GitHub at https://github.com/ufs-community/ufs-srweather-app. An umbrella -repository is defined as a repository that houses external code, called "externals," from -additional repositories. The UFS SRW Application includes the ``manage_externals`` tools -along with a configuration file called ``Externals.cfg``, which describes the external -repositories associated with this umbrella repo (see :numref:`Table %s `). - -.. _top_level_repos: - -.. table:: List of top-level repositories that comprise the UFS SRW Application. - - +---------------------------------+---------------------------------------------------------+ - | **Repository Description** | **Authoritative repository URL** | - +=================================+=========================================================+ - | Umbrella repository for the UFS | https://github.com/ufs-community/ufs-srweather-app | - | Short-Range Weather Application | | - +---------------------------------+---------------------------------------------------------+ - | Repository for | https://github.com/ufs-community/ufs-weather-model | - | the UFS Weather Model | | - +---------------------------------+---------------------------------------------------------+ - | Repository for the regional | https://github.com/ufs-community/regional_workflow | - | workflow | | - +---------------------------------+---------------------------------------------------------+ - | Repository for UFS utilities, | https://github.com/ufs-community/UFS_UTILS | - | including pre-processing, | | - | chgres_cube, and more | | - +---------------------------------+---------------------------------------------------------+ - | Repository for the Unified Post | https://github.com/NOAA-EMC/UPP | - | Processor (UPP) | | - +---------------------------------+---------------------------------------------------------+ - -The UFS Weather Model contains a number of sub-repositories used by the model as -documented `here `_. - -Note that the prerequisite libraries (including NCEP Libraries and external libraries) are not -included in the UFS SRW Application repository. The source code for these components resides in -the repositories `NCEPLIBS `_ and `NCEPLIBS-external -`_. - -These external components are already built on the preconfigured platforms listed `here -`_. -However, they must be cloned and built on other platforms according to the instructions provided -in the wiki pages of those repositories: https://github.com/NOAA-EMC/NCEPLIBS/wiki and -https://github.com/NOAA-EMC/NCEPLIBS-external/wiki. - -.. _TopLevelDirStructure: - -Directory Structure -=================== -The directory structure for the SRW Application is determined by the ``local_path`` settings in -the ``Externals.cfg`` file, which is in the directory where the umbrella repository has -been cloned. After ``manage_externals/checkout_externals`` is run, the specific GitHub repositories -that are described in :numref:`Table %s ` are cloned into the target -subdirectories shown below. The directories that will be created later by running the -scripts are presented in parentheses. Some directories have been removed for brevity. - -.. code-block:: console - - ufs-srweather-app - ├── (bin) - ├── (build) - ├── docs - │ └── UsersGuide - ├── (include) - ├── (lib) - ├── manage_externals - ├── regional_workflow - │ ├── docs - │ │ └── UsersGuide - │ ├── (fix) - │ ├── jobs - │ ├── modulefiles - │ ├── scripts - │ ├── tests - │ │ └── baseline_configs - │ └── ush - │ ├── Python - │ ├── rocoto - │ ├── templates - │ └── wrappers - ├── (share) - └── src - ├── UPP - │ ├── parm - │ └── sorc - │ └── ncep_post.fd - ├── UFS_UTILS - │ ├── sorc - │ │ ├── chgres_cube.fd - │ │ ├── fre-nctools.fd - | │ ├── grid_tools.fd - │ │ ├── orog_mask_tools.fd - │ │ └── sfc_climo_gen.fd - │ └── ush - └── ufs_weather_model - └── FV3 - ├── atmos_cubed_sphere - └── ccpp - -Regional Workflow Sub-Directories ---------------------------------- -Under the ``regional_workflow`` directory shown in :numref:`TopLevelDirStructure` there are -a number of sub-directories that are created when the regional workflow is cloned. The -contents of these sub-directories are described in :numref:`Table %s `. - -.. _Subdirectories: - -.. table:: Sub-directories of the regional workflow. - - +-------------------------+---------------------------------------------------------+ - | **Directory Name** | **Description** | - +=========================+=========================================================+ - | docs | Users' Guide Documentation | - +-------------------------+---------------------------------------------------------+ - | jobs | J-job scripts launched by Rocoto | - +-------------------------+---------------------------------------------------------+ - | modulefiles | Files used to load modules needed for building and | - | | running the workflow | - +-------------------------+---------------------------------------------------------+ - | scripts | Run scripts launched by the J-jobs | - +-------------------------+---------------------------------------------------------+ - | tests | Baseline experiment configuration | - +-------------------------+---------------------------------------------------------+ - | ush | Utility scripts used by the workflow | - +-------------------------+---------------------------------------------------------+ - -.. _ExperimentDirSection: - -Experiment Directory Structure -============================== -When the ``generate_FV3LAM_wflow.sh`` script is run, the user-defined experimental directory -``EXPTDIR=/path-to/ufs-srweather-app/../expt_dirs/${EXPT_SUBDIR}`` is created, where ``EXPT_SUBDIR`` -is specified in the ``config.sh`` file. The contents of the ``EXPTDIR`` directory, before the -workflow is run, is shown in :numref:`Table %s `. - -.. _ExptDirStructure: - -.. table:: Files and sub-directory initially created in the experimental directory. - :widths: 33 67 - - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | **File Name** | **Description** | - +===========================+=======================================================================================================+ - | config.sh | User-specified configuration file, see :numref:`Section %s ` | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | data_table | Cycle-independent input file (empty) | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | field_table | Tracers in the `forecast model | - | | `_ | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | FV3LAM_wflow.xml | Rocoto XML file to run the workflow | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | input.nml | Namelist for the `UFS Weather model | - | | `_ | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | launch_FV3LAM_wflow.sh | Symlink to the shell script of | - | | ``ufs-srweather-app/regional_workflow/ush/launch_FV3LAM_wflow.sh`` | - | | that can be used to (re)launch the Rocoto workflow. | - | | Each time this script is called, it appends to a log | - | | file named ``log.launch_FV3LAM_wflow``. | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | log.generate_FV3LAM_wflow | Log of the output from the experiment generation script | - | | ``generate_FV3LAM_wflow.sh`` | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | nems.configure | See `NEMS configuration file | - | | `_ | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | suite_{CCPP}.xml | CCPP suite definition file used by the forecast model | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | var_defns.sh | Shell script defining the experiment parameters. It contains all | - | | of the primary parameters specified in the default and | - | | user-specified configuration files plus many secondary parameters | - | | that are derived from the primary ones by the experiment | - | | generation script. This file is sourced by various other scripts | - | | in order to make all the experiment variables available to these | - | | scripts. | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - | YYYYMMDDHH | Cycle directory (empty) | - +---------------------------+-------------------------------------------------------------------------------------------------------+ - -In addition, the *community* mode creates the ``fix_am`` and ``fix_lam`` directories in ``EXPTDIR``. -The ``fix_lam`` directory is initially empty but will contain some *fix* (time-independent) files -after the grid, orography, and/or surface climatology generation tasks are run. - -.. _FixDirectories: - -.. table:: Description of the fix directories - - +-------------------------+----------------------------------------------------------+ - | **Directory Name** | **Description** | - +=========================+==========================================================+ - | fix_am | Directory containing the global `fix` (time-independent) | - | | data files. The experiment generation script copies | - | | these files from a machine-dependent system directory. | - +-------------------------+----------------------------------------------------------+ - | fix_lam | Directory containing the regional fix (time-independent) | - | | data files that describe the regional grid, orography, | - | | and various surface climatology fields as well as | - | | symlinks to pre-generated files. | - +-------------------------+----------------------------------------------------------+ - -Once the workflow is launched with the ``launch_FV3LAM_wflow.sh`` script, a log file named -``log.launch_FV3LAM_wflow`` will be created (or appended to it if it already exists) in ``EXPTDIR``. -Once the ``make_grid``, ``make_orog``, and ``make_sfc_climo`` tasks and the ``get_extrn_ics`` -and ``get_extrn_lbc`` tasks for the YYYYMMDDHH cycle have completed successfully, new files and -sub-directories are created, as described in :numref:`Table %s `. - -.. _CreatedByWorkflow: - -.. table:: New directories and files created when the workflow is launched. - :widths: 30 70 - - +---------------------------+--------------------------------------------------------------------+ - | **Directory/file Name** | **Description** | - +===========================+====================================================================+ - | YYYYMMDDHH | This is updated when the first cycle-specific workflow tasks are | - | | run, which are ``get_extrn_ics`` and ``get_extrn_lbcs`` (they are | - | | launched simultaneously for each cycle in the experiment). We | - | | refer to this as a “cycle directory”. Cycle directories are | - | | created to contain cycle-specific files for each cycle that the | - | | experiment runs. If ``DATE_FIRST_CYCL`` and ``DATE_LAST_CYCL`` | - | | were different, and/or ``CYCL_HRS`` contained more than one | - | | element in the ``config.sh`` file, then more than one cycle | - | | directory would be created under the experiment directory. | - +---------------------------+--------------------------------------------------------------------+ - | grid | Directory generated by the ``make_grid`` task containing grid | - | | files for the experiment | - +---------------------------+--------------------------------------------------------------------+ - | log | Contains log files generated by the overall workflow and its | - | | various tasks. Look in these files to trace why a task may have | - | | failed. | - +---------------------------+--------------------------------------------------------------------+ - | orog | Directory generated by the ``make_orog`` task containing the | - | | orography files for the experiment | - +---------------------------+--------------------------------------------------------------------+ - | sfc_climo | Directory generated by the ``make_sfc_climo`` task containing the | - | | surface climatology files for the experiment | - +---------------------------+--------------------------------------------------------------------+ - | FV3LAM_wflow.db | Database files that are generated when Rocoto is called (by the | - | FV3LAM_wflow_lock.db | launch script) to launch the workflow. | - +---------------------------+--------------------------------------------------------------------+ - | log.launch_FV3LAM_wflow | This is the log file to which the launch script | - | | ``launch_FV3LAM_wflow.sh`` appends its output each time it is | - | | called. Take a look at the last 30–50 lines of this file to check | - | | the status of the workflow. | - +---------------------------+--------------------------------------------------------------------+ - -The output files for an experiment are described in :numref:`Section %s `. -The workflow tasks are described in :numref:`Section %s `). diff --git a/docs/UsersGuide/source/Components.rst b/docs/UsersGuide/source/Components.rst new file mode 100644 index 0000000000..f2f17c149a --- /dev/null +++ b/docs/UsersGuide/source/Components.rst @@ -0,0 +1,80 @@ +.. _Components: + +============================ +SRW Application Components +============================ + +The SRW Application assembles a variety of components, including: + +* Pre-processor Utilities & Initial Conditions +* UFS Weather Forecast Model +* Unified Post-Processor +* Visualization Examples +* Build System and Workflow + +These components are documented within this User's Guide and supported through a `community forum `_. + +.. _Utils: + +Pre-processor Utilities and Initial Conditions +============================================== + +The SRW Application includes a number of pre-processing utilities that initialize and prepare the model. Since the SRW App provides forecast predictions over a limited area (rather than globally), it is necessary to first generate a regional grid (``regional_esg_grid/make_hgrid``) along with :term:`orography` (``orog``) and surface climatology (``sfc_climo_gen``) files on that grid. Grids include a strip, or "halo," of six cells that surround the regional grid and feed in lateral boundary condition data. Since different grid and orography files require different numbers of halo cells, additional utilities handle topography filtering and shave the number of halo points (based on downstream workflow component requirements). The pre-processing software ``chgres_cube`` is used to convert the raw external model data into initial and lateral boundary condition files in netCDF format. These are needed as input to the FV3-LAM. Additional information about the UFS pre-processor utilities can be found in the `UFS_UTILS User's Guide `_. + +The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), and High-Resolution Rapid Refresh (:term:`HRRR`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates. + +.. WARNING:: + For GFS data, dates prior to 1 January 2018 may work but are not guaranteed. Public archives of model data can be accessed through the `National Centers for Environmental Information `_ (NCEI) or through the `NOAA Operational Model Archive and Distribution System `_ (NOMADS). Raw external model data may be pre-staged on disk by the user. + + +Forecast Model +============== + +The prognostic atmospheric model in the UFS SRW Application is the Finite-Volume Cubed-Sphere +(:term:`FV3`) dynamical core configured with a Limited Area Model (:term:`LAM`) capability :cite:`BlackEtAl2021`. The dynamical core is the computational part of a model that solves the equations of fluid motion. A User’s Guide for the UFS :term:`Weather Model` is `here `__. + +Supported model resolutions in this release include 3-, 13-, and 25-km predefined Contiguous U.S. (:term:`CONUS`) domains, each with 64 vertical levels. Preliminary tools for users to define their own domain are also available in the release with full, formal support of these tools to be provided in future releases. The Extended Schmidt Gnomonic (ESG) grid is used with the FV3-LAM, which features relatively uniform grid cells across the entirety of the domain. Additional information about the FV3 dynamical core can be found `here `__ and on the `NOAA Geophysical Fluid Dynamics Laboratory website `_. + +Interoperable atmospheric physics, along with various land surface model options, are supported through the Common Community Physics Package (:term:`CCPP`), described `here `__. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. There will be four physics options supported for the v2.0 release. The first is the FV3_RRFS_v1beta physics suite, which is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (RRFS) planned for 2023-2024, and the second is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. Additionally, FV3_WoFS and FV3_HRRR will be supported. A scientific description of the CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `_, and CCPP technical aspects are described in the `CCPP Technical Documentation `_. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. + +The SRW App supports the use of both :term:`GRIB2` and :term:`NEMSIO` input data. The UFS Weather Model ingests initial and lateral boundary condition files produced by :term:`chgres_cube` and outputs files in netCDF format on a specific projection (e.g., Lambert Conformal) in the horizontal direction and model levels in the vertical direction. + +Post-processor +============== + +The SRW Application is distributed with the Unified Post Processor (:term:`UPP`) included in the workflow as a way to convert the netCDF output on the native model grid to :term:`GRIB2` format on standard isobaric vertical coordinates. The UPP can also be used to compute a variety of useful diagnostic fields, as described in the `UPP User’s Guide `__. + +Output from UPP can be used with visualization, plotting, and verification packages or in +further downstream post-processing (e.g., statistical post-processing techniques). + +Visualization Example +===================== +A Python script is provided to create basic visualization of the model output. The script +is designed to output graphics in PNG format for 14 standard meteorological variables +when using the pre-defined :term:`CONUS` domain. A difference plotting script is also included to visually compare two runs for the same domain and resolution. These scripts are provided only as an example for users familiar with Python. They may be used to perform a visual check to verify that the application is producing reasonable results. + +After running ``manage_externals/checkout_externals``, the visualization scripts will be available in the ``ufs-srweather-app/regional_workflow/ush/Python`` directory. Usage information and instructions are described in :numref:`Chapter %s ` and are also included at the top of the script. + +Build System and Workflow +========================= + +The SRW Application has a portable build system and a user-friendly, modular, and +expandable workflow framework. + +An umbrella CMake-based build system is used for building the components necessary for running the end-to-end SRW Application, including the UFS Weather Model and the pre- and post-processing software. Additional libraries necessary for the application (e.g., :term:`NCEPLIBS-external` and :term:`NCEPLIBS`) are not included in the SRW Application build system but are available pre-built on pre-configured platforms. On other systems, they can be installed via the HPC-Stack (see :numref:`Chapter %s: Installing the HPC-Stack `). There is a small set of system libraries and utilities that are assumed to be present on the target computer: the CMake build software, a Fortran, C, and C++ compiler, and an :term:`MPI` library. + +Once built, the provided experiment generator script can be used to create a Rocoto-based +workflow file that will run each task in the system in the proper sequence (see `Rocoto documentation +`_). If Rocoto and/or a batch system is not present on the available platform, the individual components can be run in a stand-alone, command line fashion with provided run scripts. The generated namelist for the atmospheric model can be modified in order to vary settings such as forecast starting and ending dates, forecast length hours, the :term:`CCPP` physics suite, integration time step, history file output frequency, and more. It also allows for configuration of other elements of the workflow; for example, whether to run some or all of the pre-processing, forecast model, and post-processing steps. + +This SRW Application release has been tested on a variety of platforms widely used by +researchers, such as the NOAA Research and Development High-Performance Computing Systems +(RDHPCS), including Hera, Orion, and Jet; NOAA’s Weather and Climate Operational +Supercomputing System (WCOSS); the National Center for Atmospheric Research (NCAR) Cheyenne +system; the National Severe Storms Laboratory (NSSL) HPC machine, Odin; the National Science Foundation Stampede2 system; and generic Linux and macOS systems using Intel and GNU compilers. Four `levels of support `_ have been defined for the SRW Application, including pre-configured (Level 1), configurable (Level 2), limited test platforms (Level 3), and build only platforms (Level 4). Each level is further described below. + +On pre-configured (Level 1) computational platforms, all the required libraries for building the SRW Application are available in a central place. That means bundled libraries (NCEPLIBS) and third-party libraries (NCEPLIBS-external) have both been built. The SRW Application is expected to build and run out-of-the-box on these pre-configured platforms. + +A few additional computational platforms are considered configurable for the SRW Application release. Configurable platforms (Level 2) are platforms where all of the required libraries for building the SRW Application are expected to install successfully but are not available in a central location. Applications and models are expected to build and run once the required bundled libraries (e.g., NCEPLIBS) and third-party libraries (e.g., NCEPLIBS-external) are built. + +Limited-Test (Level 3) and Build-Only (Level 4) computational platforms are those in which the developers have built the code but little or no pre-release testing has been conducted, respectively. A complete description of the levels of support, along with a list of preconfigured and configurable platforms can be found in the `SRW Application wiki page `_. diff --git a/docs/UsersGuide/source/ConfigNewPlatform.rst b/docs/UsersGuide/source/ConfigNewPlatform.rst index 381ffb98cb..9e6f719851 100644 --- a/docs/UsersGuide/source/ConfigNewPlatform.rst +++ b/docs/UsersGuide/source/ConfigNewPlatform.rst @@ -4,7 +4,7 @@ Configuring a New Platform ========================== -The UFS SRW Application has been designed to work primarily on a number of Level 1 and 2 support platforms, as specified `here `_. However, it is also designed with flexibility in mind, so that any sufficiently up-to-date machine with a UNIX-based operating system should be capable of running the application. A full list of prerequisites for installing the UFS SRW App and running the Graduate Student Test can be found in :numref:`Section %s `. +The UFS SRW Application has been designed to work primarily on a number of Level 1 and 2 support platforms, as specified `here `__. However, it is also designed with flexibility in mind, so that any sufficiently up-to-date machine with a UNIX-based operating system should be capable of running the application. A full list of prerequisites for installing the UFS SRW App and running the Graduate Student Test can be found in :numref:`Section %s `. The first step to installing on a new machine is to install :term:`NCEPLIBS` (https://github.com/NOAA-EMC/NCEPLIBS), the NCEP libraries package, which is a set of libraries created and maintained by NCEP and EMC that are used in many parts of the UFS. NCEPLIBS comes with a large number of prerequisites (see :numref:`Section %s ` for more info), but the only required software prior to starting the installation process are as follows: @@ -57,7 +57,7 @@ However, it is also possible to install these utilities via Macports (https://ww Installing NCEPLIBS-external ============================ -In order to facilitate the installation of NCEPLIBS (and therefore, the SRW and other UFS applications) on new platforms, EMC maintains a one-stop package containing most of the prerequisite libraries and software necessary for installing NCEPLIBS. This package is known as NCEPLIBS-external, and is maintained in a git repository at https://github.com/NOAA-EMC/NCEPLIBS-external. Instructions for installing these will depend on your platform, but generally so long as all the above-mentioned prerequisites have been installed you can follow the proceeding instructions verbatim (in bash; a csh-based shell will require different commands). Some examples for installing on specific platforms can be found in the `NCEPLIBS-external/doc directory `. +In order to facilitate the installation of NCEPLIBS (and therefore, the SRW App and other UFS applications) on new platforms, EMC maintains a one-stop package containing most of the prerequisite libraries and software necessary for installing NCEPLIBS. This package is known as NCEPLIBS-external, and is maintained in a git repository at https://github.com/NOAA-EMC/NCEPLIBS-external. Instructions for installing these will depend on your platform, but generally so long as all the above-mentioned prerequisites have been installed you can follow the proceeding instructions verbatim (in bash; a csh-based shell will require different commands). Some examples for installing on specific platforms can be found in the `NCEPLIBS-external/doc directory `. These instructions will install the NCEPLIBS-external in the current directory tree, so be sure you are in the desired location before starting. @@ -126,8 +126,8 @@ Further information on including prerequisite libraries, as well as other helpfu Once the NCEPLIBS package has been successfully installed, you can move on to building the UFS SRW Application. -Building the UFS Short-Range Weather Application (UFS SRW App) -============================================================== +Building the UFS SRW Application +======================================= Building the UFS SRW App is similar to building NCEPLIBS, in that the code is stored in a git repository and is built using CMake software. The first step is to retrieve the code from GitHub, using the variables defined earlier: .. code-block:: console @@ -212,8 +212,9 @@ Once the data has been staged, setting up your experiment on a platform without These are the two ``MACHINE`` settings for generic, non-Rocoto-based platforms; you should choose the one most appropriate for your machine. ``MACOS`` has its own setting due to some differences in how command-line utilities function on Darwin-based operating systems. ``LAYOUT_X=2`` + ``LAYOUT_Y=2`` - These are the settings that control the MPI decomposition when running the weather model. There are default values, but for your machine it is recommended that you specify your own layout to achieve the correct number of MPI processes for your application. In total, your machine should be able to handle ``LAYOUT_X×LAYOUT_Y+WRTCMP_write_tasks_per_group`` tasks. ``WRTCMP_write_tasks_per_group`` is the number of MPI tasks that will be set aside for writing model output, and it is a setting dependent on the domain you have selected. You can find and edit the value of this variable in the file ``regional_workflow/ush/set_predef_grid_params.sh``. + These are the settings that control the MPI decomposition when running the weather model. There are default values, but for your machine it is recommended that you specify your own layout to achieve the correct number of MPI processes for your application. In total, your machine should be able to handle ``LAYOUT_X×LAYOUT_Y+WRTCMP_write_tasks_per_group`` tasks. ``WRTCMP_write_tasks_per_group`` is the number of MPI tasks that will be set aside for writing model output, and it is a setting dependent on the domain you have selected. You can find and edit the value of this variable in the file ``regional_workflow/ush/set_predef_grid_params.sh``. ``RUN_CMD_UTILS="mpirun -np 4"`` This is the run command for MPI-enabled pre-processing utilities. Depending on your machine and your MPI installation, you may need to use a different command for launching an MPI-enabled executable. diff --git a/docs/UsersGuide/source/FAQ.rst b/docs/UsersGuide/source/FAQ.rst index 05313a998c..50d3141006 100644 --- a/docs/UsersGuide/source/FAQ.rst +++ b/docs/UsersGuide/source/FAQ.rst @@ -1,17 +1,26 @@ .. _FAQ: -*** +**** FAQ -*** +**** + +* :ref:`How do I turn on/off the cycle-independent workflow tasks? ` +* :ref:`How do I define an experiment name? ` +* :ref:`How do I change the Physics Suite Definition File (SDF)? ` +* :ref:`How do I restart a DEAD task? ` +* :ref:`How do I change the grid? ` + +.. _CycleInd: + +=========================================================== +How do I turn on/off the cycle-independent workflow tasks? +=========================================================== -========================================================= -How do I turn On/Off the Cycle-Independent Workflow Tasks -========================================================= The first three pre-processing tasks ``make_grid``, ``make_orog``, and ``make_sfc_climo`` are cycle-independent, meaning that they only need to be run once per experiment. If the grid, orography, and surface climatology files that these tasks generate are already available (e.g. from a previous experiment that used the same grid as the current), then -these tasks can be skipped by having the workflow use those pre-generated files. This +these tasks can be skipped, and the workflow can use those pre-generated files. This can be done by adding the following lines to the ``config.sh`` script before running the ``generate_FV3LAM_wflow.sh`` script: @@ -28,26 +37,36 @@ The ``RUN_TASK_MAKE_GRID``, ``RUN_TASK_MAKE_OROG``, and ``RUN_TASK_MAKE_SFC_CLIM disable their respective tasks, and ``GRID_DIR``, ``OROG_DIR``, and ``SFC_CLIMO_DIR`` specify the directories in which the workflow can find the pre-generated grid, orography, and surface climatology files, respectively (these directories may be the same, i.e. all -three sets of files may be placed in the same location). By default, the ``RUN_TASK_MAKE_...`` +three sets of files may be placed in the same location). By default, the ``RUN_TASK_MAKE_...`` flags are set to ``TRUE`` in ``config_defaults.sh``, i.e. the workflow will by default run the ``make_grid``, ``make_orog``, and ``make_sfc_climo`` tasks. +.. _DefineExptName: + =================================== How do I define an experiment name? =================================== + The name of the experiment is set in the ``config.sh`` file using the variable ``EXPT_SUBDIR``. -See :numref:`Section %s ` for more details. +See :numref:`Section %s ` for more details. + + +.. _ChangePhysics: + +========================================================= +How do I change the Physics Suite Definition File (SDF)? +========================================================= -================================================ -How do I change the Suite Definition File (SDF)? -================================================ The SDF is set in the ``config.sh`` file using the variable ``CCPP_PHYS_SUITE``. When the ``generate_FV3LAM_wflow.sh`` script is run, the SDF file is copied from its location in the forecast model directory to the experiment directory ``EXPTDIR``. +.. _RestartTask: + ============================= How do I restart a DEAD task? ============================= + On platforms that utilize Rocoto workflow software (such as NCAR’s Cheyenne machine), sometimes if something goes wrong with the workflow a task may end up in the DEAD state: @@ -67,15 +86,18 @@ command: .. code-block:: console - rocotorewind -w FV3SAR_wflow.xml -d FV3SAR_wflow.db -v 10 -c 201905200000 -t get_extrn_ics + rocotorewind -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 -c 201905200000 -t get_extrn_ics where ``-c`` specifies the cycle date (first column of rocotostat output) and ``-t`` represents the task name -(second column of rocotostat output). After using ``rocotorewind``, the next time ``rocotorun`` is used to +(second column of rocotostat output). After using ``rocotorewind``, the next time ``rocotorun`` is used to advance the workflow, the job will be resubmitted. +.. _ChangeGrid: + =========================== How do I change the grid? =========================== + To change the predefined grid, you need to modify the ``PREDEF_GRID_NAME`` variable in the ``config.sh`` script which the user has created to generate an experiment configuration and workflow. Users can choose from one of three predefined grids for the SRW Application: diff --git a/docs/UsersGuide/source/Glossary.rst b/docs/UsersGuide/source/Glossary.rst index 622814368a..3873298cfe 100644 --- a/docs/UsersGuide/source/Glossary.rst +++ b/docs/UsersGuide/source/Glossary.rst @@ -7,38 +7,78 @@ Glossary .. glossary:: CCPP - A forecast-model agnostic, vetted collection of codes containing atmospheric physical - parameterizations and suites of parameterizations for use in Numerical Weather Prediction - (NWP) along with a framework that connects the physics to the host forecast model. + The `Common Community Physics Package `_ is a forecast-model agnostic, vetted collection of codes containing atmospheric physical parameterizations and suites of parameterizations for use in Numerical Weather Prediction (NWP) along with a framework that connects the physics to the host forecast model. + + Component + A software element that has a clear function and interface. In Earth system models, components are often single portions of the Earth system (e.g. atmosphere, ocean, or land surface) that are assembled to form a whole. + + Component Repository + A :term:`repository` that contains, at a minimum, source code for a single component. + + Container + `Docker `__ describes a container as "a standard unit of software that packages up code and all its dependencies so the application runs quickly and reliably from one computing environment to another." + + CONUS + Continental United States chgres_cube The preprocessing software used to create initial and boundary condition files to “coldstart” the forecast model. + dynamical core + Global atmospheric model based on fluid dynamics principles, including Euler's equations of motion. + + EPIC + EPIC stands for the `Earth Prediction Innovation Center `__. EPIC seeks to accelerate scientific research and modeling contributions through continuous and sustained community engagement to produce the most accurate and reliable operational modeling system in the world. + FV3 The Finite-Volume Cubed-Sphere dynamical core (dycore). Developed at NOAA's Geophysical Fluid Dynamics Laboratory (GFDL), it is a scalable and flexible dycore capable of both hydrostatic and non-hydrostatic atmospheric simulations. It is the dycore used in the UFS Weather Model. + GFS + `Global Forecast System `_. The GFS is a National Centers for Environmental Prediction (NCEP) weather forecast model that generates data for dozens of atmospheric and land-soil variables, including temperatures, winds, precipitation, soil moisture, and atmospheric ozone concentration. The system couples four separate models (atmosphere, ocean model, land/soil model, and sea ice) that work together to accurately depict weather conditions. + GRIB2 The second version of the World Meterological Organization's (WMO) standard for distributing gridded data. + HPC-Stack + The `HPC-Stack `__ is a repository that provides a unified, shell script-based build system for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `__ and the `Joint Effort for Data assimilation Integration (JEDI) `__ framework. + + HRRR + `High Resolution Rapid Refresh `__. The HRRR is a NOAA real-time 3-km resolution, hourly updated, cloud-resolving, convection-allowing atmospheric model, initialized by 3km grids with 3km radar assimilation. Radar data is assimilated in the HRRR every 15 min over a 1-h period adding further detail to that provided by the hourly data assimilation from the 13km radar-enhanced Rapid Refresh. + + IC/LBC + Initial conditions/lateral boundary conditions + + LAM + Limited Area Model, formerly known as the "Stand-Alone Regional Model," or SAR. LAM grids use a regional (rather than global) configuration of the FV3 dynamical core. + + LBC + Lateral boundary conditions. + + MPI + MPI stands for Message Passing Interface. An MPI is a standardized communication system used in parallel programming. It establishes portable and efficient syntax for the exchange of messages and data between multiple processors that are used by a single computer program. An MPI is required for high-performance computing (HPC). + + NAM + `North American Mesoscale Forecast System `_. NAM generates multiple grids (or domains) of weather forecasts over the North American continent at various horizontal resolutions. Each grid contains data for dozens of weather parameters, including temperature, precipitation, lightning, and turbulent kinetic energy. NAM uses additional numerical weather models to generate high-resolution forecasts over fixed regions, and occasionally to follow significant weather events like hurricanes. + NCEP National Centers for Environmental Prediction, an arm of the National Weather Service, - consisting of nine centers. More information can be found at https://www.ncep.noaa.gov. + consisting of nine centers. More information can be found at https://www.ncep.noaa.gov. NCEPLIBS The software libraries created and maintained by :term:`NCEP` that are required for running - :term:`chgres_cube`, the UFS Weather Model, and :term:`UPP`. + :term:`chgres_cube`, the UFS Weather Model, and :term:`UPP`. They are included in the `HPC-Stack `__. NCEPLIBS-external A collection of third-party libraries required to build :term:`NCEPLIBS`, :term:`chgres_cube`, - the UFS Weather Model, and :term:`UPP`. + the UFS Weather Model, and :term:`UPP`. They are included in the `HPC-Stack `__. NCL An interpreted programming language designed specifically for scientific data analysis and - visualization. More information can be found at https://www.ncl.ucar.edu. + visualization. Stands for NCAR Command Language. More information can be found at https://www.ncl.ucar.edu. NEMS The NOAA Environmental Modeling System is a common modeling framework whose purpose is @@ -47,6 +87,18 @@ Glossary NEMSIO A binary format for atmospheric model output from :term:`NCEP`'s Global Forecast System (GFS). + NWP + Numerical Weather Prediction (NWP) takes current observations of weather and processes them with computer models to forecast the future state of the weather. + + Orography + The branch of physical geography dealing with mountains. + + RAP + `Rapid Refresh `__. The continental-scale NOAA hourly-updated assimilation/modeling system operational at NCEP. RAP covers North America and is comprised primarily of a numerical forecast model and an analysis/assimilation system to initialize that model. RAP is complemented by the higher-resolution 3km High-Resolution Rapid Refresh (HRRR) model. + + Repository + A central location in which files (e.g., data, code, documentation) are stored and managed. + UFS The Unified Forecast System is a community-based, coupled comprehensive Earth modeling system consisting of several applications (apps). These apps span regional to global @@ -60,10 +112,16 @@ Glossary and boundary condition generation codes used by the UFS Short-Range Weather App are all part of this collection. + Umbrella repository + A repository that houses external code, or “externals,” from additional repositories. + UPP - The Unified Post Processor is software developed at :term:`NCEP` and used operationally to + The `Unified Post Processor `__ is software developed at :term:`NCEP` and used operationally to post-process raw output from a variety of :term:`NCEP`'s NWP models, including the FV3. + Weather Enterprise + Individuals and organizations from public, private, and academic sectors that contribute to the research, development, and production of weather forecast products; primary consumers of these weather forecast products. + Weather Model A prognostic model that can be used for short- and medium-range research and operational forecasts. It can be an atmosphere-only model or an atmospheric diff --git a/docs/UsersGuide/source/Include-HPCInstall.rst b/docs/UsersGuide/source/Include-HPCInstall.rst new file mode 100644 index 0000000000..b467d96d23 --- /dev/null +++ b/docs/UsersGuide/source/Include-HPCInstall.rst @@ -0,0 +1,8 @@ +.. _InstallHPCstack: + +.. include:: ../../../hpc-stack-mod/docs/source/hpc-install.rst + +.. include:: ../../../hpc-stack-mod/docs/source/hpc-prereqs.rst +.. include:: ../../../hpc-stack-mod/docs/source/hpc-parameters.rst +.. include:: ../../../hpc-stack-mod/docs/source/hpc-components.rst +.. include:: ../../../hpc-stack-mod/docs/source/hpc-notes.rst \ No newline at end of file diff --git a/docs/UsersGuide/source/InputOutputFiles.rst b/docs/UsersGuide/source/InputOutputFiles.rst index 2cce0786d2..cfda134b60 100644 --- a/docs/UsersGuide/source/InputOutputFiles.rst +++ b/docs/UsersGuide/source/InputOutputFiles.rst @@ -1,11 +1,12 @@ .. _InputOutputFiles: -====================== +======================= Input and Output Files -====================== +======================= This chapter provides an overview of the input and output files needed by the components -of the UFS SRW Application (:term:`UFS_UTILS`, the UFS :term:`Weather Model`, and :term:`UPP`). -Links to more detailed documentation for each of the components are provided. +of the UFS SRW Application (i.e., :term:`UFS_UTILS`, the UFS :term:`Weather Model`, and the :term:`UPP`). Links to more detailed documentation for each of the components are provided. For SRW App users who want to jump straight to downloading and staging the files, see :numref:`Section %s `. + +.. _Input: Input Files =========== @@ -19,45 +20,30 @@ The external model files needed for initializing the runs can be obtained in a n ways, including: pulled directly from `NOMADS `_; limited data availability), pulled from the NOAA HPSS during the workflow execution (requires user access), or obtained and staged by the user from a different source. The data format for -these files can be :term:`GRIB2` or :term:`NEMSIO`. More information on downloading and staging -the external model data can be found in :numref:`Section %s `. Once staged, -the end-to-end application will run the system and write output files to disk. +these files can be :term:`GRIB2` or :term:`NEMSIO`. More information on downloading and setting up +the external model data can be found in :numref:`Section %s `. Once the data is set up, the end-to-end application will run the system and write output files to disk. Pre-processing (UFS_UTILS) -------------------------- -When a user runs the SRW Application as described in the quickstart guide -:numref:`Section %s `, input data for the pre-processing utilities is linked -from a location on disk to your experiment directory by the workflow generation step. The -pre-processing utilities use many different datasets to create grids, and to generate model -input datasets from the external model files. A detailed description of the input files -for the pre-processing utilities can be found `here -`_. +When a user runs the SRW Application as described in the Quick Start Guide :numref:`Chapter %s `, :numref:`Step %s Generate the Forecast Experiment ` links the input data for the pre-processing utilities from a location on disk to the experiment directory. The pre-processing utilities use many different datasets to create grids and to generate model input datasets from the external model files. A detailed description of the input files for the pre-processing utilities can be found `here `__. UFS Weather Model ----------------- -The input files for the weather model include both static (fixed) files and grid and date -specific files (terrain, initial conditions, boundary conditions, etc). The static fix files -must be staged by the user unless you are running on a pre-configured platform, in which case -you can link to the existing copy on that machine. See :numref:`Section %s ` -for more information. The static, grid, and date specific files are linked in the experiment -directory by the workflow scripts. An extensive description of the input files for the weather -model can be found in the `UFS Weather Model User's Guide `_. -The namelists and configuration files for the SRW Application are created from templates by the -workflow, as described in :numref:`Section %s `. +The input files for the weather model include both static (fixed) files and grid- and date-specific files (terrain, initial conditions, boundary conditions, etc). The static fix files +must be staged by the user unless you are running on a Level 1/pre-configured platform, in which case you can link to the existing copy of the data on that machine. See :numref:`Section %s ` for more information. The static, grid, and date-specific files are linked in the experiment directory by the workflow scripts. An extensive description of the input files for the weather model can be found in the `UFS Weather Model User's Guide `__. The namelists and configuration files for the SRW Application are created from templates by the workflow, as described in :numref:`Section %s `. Unified Post Processor (UPP) ---------------------------- Documentation for the UPP input files can be found in the `UPP User's Guide -`_. +`__. .. _WorkflowTemplates: Workflow -------- -The SRW Application uses a series of template files, combined with user selected settings, +The SRW Application uses a series of template files, combined with user-selected settings, to create the required namelists and parameter files needed by the Application. These -templates can be reviewed to see what defaults are being used, and where configuration parameters -are assigned from the ``config.sh`` file. +templates can be reviewed to see what defaults are being used and where configuration parameters from the ``config.sh`` file are assigned. List of Template Files ^^^^^^^^^^^^^^^^^^^^^^ @@ -66,7 +52,7 @@ and are shown in :numref:`Table %s `. .. _TemplateFiles: -.. table:: Template files for a regional workflow. +.. table:: Template Files for a Regional Workflow +-----------------------------+-------------------------------------------------------------+ | **File Name** | **Description** | @@ -81,7 +67,7 @@ and are shown in :numref:`Table %s `. +-----------------------------+-------------------------------------------------------------+ | field_table_[CCPP] | Cycle-independent file that the forecast model reads in at | | | the start of each forecast. It specifies the tracers that | - | | the forecast model will advect. A different field_table | + | | the forecast model will advect. A different field_table | | | may be needed for different CCPP suites. | +-----------------------------+-------------------------------------------------------------+ | FV3.input.yml | YAML configuration file containing the forecast model’s | @@ -108,24 +94,14 @@ and are shown in :numref:`Table %s `. | README.xml_templating.md | Instruction of Rocoto XML templating with Jinja. | +-----------------------------+-------------------------------------------------------------+ -Additional information related to the ``diag_table_[CCPP]``, ``field_table_[CCPP]``, ``input.nml.FV3``, -``model_conigure``, and ``nems.configure`` can be found in the `UFS Weather Model User's Guide -`_, +Additional information related to the ``diag_table_[CCPP]``, ``field_table_[CCPP]``, ``input.nml.FV3``, ``model_conigure``, and ``nems.configure`` can be found in the `UFS Weather Model User's Guide `__, while information on the ``regional_grid.nml`` can be found in the `UFS_UTILS User’s Guide -`_. +`_. Migratory Route of the Input Files in the Workflow ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ :numref:`Figure %s ` shows how the case-specific input files in the -``ufs-srweather-app/regional_workflow/ush/templates/`` directory flow to the experiment directory. -The value of ``CCPP_PHYS_SUITE`` is specified in the configuration file ``config.sh``. The template -input files corresponding to ``CCPP_PHYS_SUITE``, such as ``field_table`` and ``nems_configure``, are -copied to the experiment directory ``EXPTDIR`` and the namelist file of the weather model ``input.nml`` -is created from the ``input.nml.FV3`` and ``FV3.input.yml`` files by running the script ``generate_FV3LAM_wflow.sh``. -While running the task ``RUN_FCST`` in the regional workflow as shown in :numref:`Figure %s `, -the ``field_table``, ``nems.configure``, and ``input.nml`` files, located in ``EXPTDIR`` are linked to the -cycle directory ``CYCLE_DIR/``, and ``diag_table`` and ``model_configure`` are copied from the ``templates`` -directory. Finally, these files are updated with the variables specified in ``var_defn.sh``. +``ufs-srweather-app/regional_workflow/ush/templates/`` directory flow to the experiment directory. The value of ``CCPP_PHYS_SUITE`` is specified in the configuration file ``config.sh``. The template input files corresponding to ``CCPP_PHYS_SUITE``, such as ``field_table`` and ``nems_configure``, are copied to the experiment directory ``EXPTDIR``, and the namelist file of the weather model ``input.nml`` is created from the ``input.nml.FV3`` and ``FV3.input.yml`` files by running the script ``generate_FV3LAM_wflow.sh``. While running the task ``RUN_FCST`` in the regional workflow as shown in :numref:`Figure %s `, the ``field_table``, ``nems.configure``, and ``input.nml`` files, located in ``EXPTDIR``, are linked to the cycle directory ``CYCLE_DIR/``. Additionally, ``diag_table`` and ``model_configure`` are copied from the ``templates`` directory. Finally, these files are updated with the variables specified in ``var_defn.sh``. .. _MigratoryRoute: @@ -162,52 +138,47 @@ experiment run directory ``EXPTDIR/YYYYMMDDHH/INPUT`` and consist of the followi * ``sfc_data.nc -> sfc_data.tile7.halo0.nc`` These output files are used as inputs for the UFS weather model, and are described in the `Users Guide -`_. +`__. + +.. + COMMENT: Change link above (structure of "latest" is significantly different) UFS Weather Model ----------------- As mentioned previously, the workflow can be run in ‘community’ or ‘nco’ mode, which determines the location and names of the output files. In addition to this option, output can also be in netCDF or NEMSIO format. The output file format is set in the ``model_configure`` files using the -``output_file`` variable. At this time, due to limitations in the post-processing component, only netCDF -format output is recommended for the SRW application. +``output_file`` variable. At this time, due to limitations in the post-processing component, only netCDF format output is recommended for the SRW Application. .. note:: In summary, the fully supported options for this release include running in ‘community’ mode with netCDF format output files. -In this case, the netCDF output files are written to the ``EXPTDIR/YYYYMMDDHH`` directory. The bases of -the file names are specified in the input file ``model_configure`` and are set to the following in the SRW Application: +In this case, the netCDF output files are written to the ``EXPTDIR/YYYYMMDDHH`` directory. The bases of the file names are specified in the input file ``model_configure`` and are set to the following in the SRW Application: * ``dynfHHH.nc`` * ``phyfHHH.nc`` Additional details may be found in the UFS Weather Model `Users Guide -`_. +`__. + Unified Post Processor (UPP) ---------------------------- -Documentation for the UPP output files can be found `here `_. +Documentation for the UPP output files can be found `here `__. -For the SRW Application, the weather model netCDF output files are written to the ``EXPTDIR/YYYYMMDDHH/postprd`` -directory and have the naming convention (file->linked to): +For the SRW Application, the weather model netCDF output files are written to the ``EXPTDIR/YYYYMMDDHH/postprd`` directory and have the naming convention (file->linked to): * ``BGRD3D_{YY}{JJJ}{hh}{mm}f{fhr}00 -> {domain}.t{cyc}z.bgrd3df{fhr}.tmXX.grib2`` * ``BGDAWP_{YY}{JJJ}{hh}{mm}f{fhr}00 -> {domain}.t{cyc}z.bgdawpf{fhr}.tmXX.grib2`` -The default setting for the output file names uses ``rrfs`` for ``{domain}``. This may be overridden by -the user in the ``config.sh`` settings. +The default setting for the output file names uses ``rrfs`` for ``{domain}``. This may be overridden by the user in the ``config.sh`` settings. -If you wish to modify the fields or levels that are output from the UPP, you will need to make -modifications to file ``fv3lam.xml``, which resides in the UPP repository distributed with the UFS SRW -Application. Specifically, if the code was cloned in the directory ``ufs-srweather-app``, the file will be -located in ``ufs-srweather-app/src/UPP/parm``. +If you wish to modify the fields or levels that are output from the UPP, you will need to make modifications to file ``fv3lam.xml``, which resides in the UPP repository distributed with the UFS SRW Application. Specifically, if the code was cloned in the directory ``ufs-srweather-app``, the file will be located in ``ufs-srweather-app/src/UPP/parm``. .. note:: This process requires advanced knowledge of which fields can be output for the UFS Weather Model. -Use the directions in the `UPP User's Guide `_ -for details on how to make modifications to the ``fv3lam.xml`` file and for remaking the flat text file that -the UPP reads, which is called ``postxconfig-NT-fv3lam.txt`` (default). +Use the directions in the `UPP User's Guide `__ for details on how to make modifications to the ``fv3lam.xml`` file and for remaking the flat text file that the UPP reads, which is called ``postxconfig-NT-fv3lam.txt`` (default). Once you have created the new flat text file reflecting your changes, you will need to modify your ``config.sh`` to point the workflow to the new text file. In your ``config.sh``, set the following: @@ -215,11 +186,9 @@ Once you have created the new flat text file reflecting your changes, you will n .. code-block:: console USE_CUSTOM_POST_CONFIG_FILE=”TRUE” - CUSTOM_POST_CONFIG_PATH=”/path/to/custom/postxconfig-NT-fv3lam.txt” + CUSTOM_POST_CONFIG_PATH=”” -which tells the workflow to use the custom file located in the user-defined path. The path should -include the filename. If this is set to true and the file path is not found, then an error will occur -when trying to generate the SRW Application workflow. +which tells the workflow to use the custom file located in the user-defined path. The path should include the filename. If this is set to true and the file path is not found, then an error will occur when trying to generate the SRW Application workflow. You may then start your case workflow as usual and the UPP will use the new flat ``*.txt`` file. @@ -227,61 +196,66 @@ You may then start your case workflow as usual and the UPP will use the new flat Downloading and Staging Input Data ================================== -A set of input files, including static (fix) data and raw initial and lateral boundary conditions -(IC/LBCs), are needed to run the SRW Application. +A set of input files, including static (fix) data and raw initial and lateral boundary conditions (:term:`IC/LBC`'s), are needed to run the SRW Application. .. _StaticFixFiles: Static Files ------------ -A set of fix files are necessary to run the SRW Application. Environment variables describe the -location of the static files: ``FIXgsm``, ``TOPO_DIR``, and ``SFC_CLIMO_INPUT_DIR`` are the directories -where the static files are located. If you are on a pre-configured or configurable platform, there is no -need to stage the fixed files manually because they have been prestaged and the paths -are set in ``regional_workflow/ush/setup.sh``. If the user's platform is not defined -in that file, the static files can be pulled individually or as a full tar file from the `FTP data repository -`_ or from `Amazon Web Services (AWS) cloud storage -`_ -and staged on your machine. The paths to the staged files must then be set in ``config.sh`` -as follows: +The environment variables ``FIXgsm``, ``TOPO_DIR``, and ``SFC_CLIMO_INPUT_DIR`` indicate the path to +the directories where the static files are located. If you are on a pre-configured or configurable platform (i.e., a Level 1 or 2 platform), there is no need to stage the fixed files manually because they have been prestaged, and the paths are set in ``regional_workflow/ush/setup.sh``. On Level 3 & 4 systems, the static files can be downloaded individually or as a full tar file from the `FTP data repository `__ or from `Amazon Web Services (AWS) cloud storage `__ using the ``wget`` command. Then ``tar -xf `` will extract the compressed file: + +.. code-block:: console + + wget https://ufs-data.s3.amazonaws.com/public_release/ufs-srweather-app-v1.0.0/fix/fix_files.tar.gz + tar -xf fix_files.tar.gz + +The paths to the staged files must then be set in ``config.sh``. Add the following code or alter the variable paths if they are already listed in the ``config.sh`` file: * ``FIXgsm=/path-to/fix/fix_am`` * ``TOPO_DIR=/path-to/fix/fix_am/fix_orog`` * ``SFC_CLIMO_INPUT_DIR=/path-to/fix_am/fix/sfc_climo/`` +.. _InitialConditions: + Initial Condition Formats and Source ------------------------------------ -The SRW Application currently supports raw initial and lateral boundary conditions from numerous models -(i.e., FV3GFS, NAM, RAP, HRRR). The data can be provided in three formats: :term:`NEMSIO`, netCDF, -or :term:`GRIB2`. The SRW Application currently only supports the use of NEMSIO and netCDF input files -from the GFS. - -Environment variables describe what IC/LBC files to use (pre-staged files or files to be automatically -pulled from the NOAA HPSS) and the location of the and IC/LBC files: ``USE_USER_STAGED_EXTRN_FILES`` -is the ``T/F`` flag defining what raw data files to use, ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` is the -directory where the initial conditions are located, and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` is the -directory where the lateral boundary conditions are located. - -If you have access to the NOAA HPSS and want to automatically download the IC/LBC files using the -workflow, these environment variables can be left out of the ``config.sh`` file. However, if you do -not have access to the NOAA HPSS and you need to pull and stage the data manually, you will need to -set ``USE_USER_STAGED_EXTRN_FILES`` to ``TRUE`` and then set the paths to the where the IC/LBC files are located. - -A small sample of IC/LBCs is available at the `FTP data repository -`_ or from `AWS cloud storage -`_. +The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, NAM, RAP, HRRR). The data can be provided in three formats: :term:`NEMSIO`, netCDF, or :term:`GRIB2`. The SRW Application currently only supports the use of NEMSIO and netCDF input files from the GFS. + +The data required to run the "out-of-the-box" SRW App case described in :numref:`Chapter %s ` is already preinstalled on `Level 1 `__ systems. Users on other systems can find the required IC/LBC data in the `FTP data repository `__ or on `AWS cloud storage `_. + +To add this data to your system, run the following commands from the ``ufs-srweather-app`` directory: + +.. code-block:: console + + wget https://ftp.emc.ncep.noaa.gov/EIB/UFS/SRW/v1p0/simple_test_case/gst_model_data.tar.gz + tar -xf gst_model_data.tar.gz + +This will extract the files and place them within a new ``model_data`` directory inside the ``ufs-srweather-app``. + +Then, the paths to ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` must be set in the ``config.sh`` file. + +.. code-block:: console + + cd + vi config.sh + +Then, in ``config.sh``, set the following environment variables: + +.. code-block:: console + + USE_USER_STAGED_EXTRN_FILES=TRUE + EXTRN_MDL_SOURCE_BASEDIR_ICS= + EXTRN_MDL_SOURCE_BASEDIR_LBCS= + +These environment variables describe what :term:`IC/LBC` files to use (pre-staged files or files to be automatically pulled from the NOAA HPSS) and the location of the IC/LBC files. ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` is the directory where the initial conditions are located, and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` is the directory where the lateral boundary conditions are located. Initial and Lateral Boundary Condition Organization --------------------------------------------------- The suggested directory structure and naming convention for the raw input files is described -below. While there is flexibility to modify these settings, this will provide the most reusability -for multiple dates when using the SRW Application workflow. +below. While there is flexibility to modify these settings, this will provide the most reusability for multiple dates when using the SRW Application workflow. -For ease of reusing the ``config.sh`` for multiple dates and cycles, it is recommended to set up -your raw IC/LBC files such that it includes the model name (e.g., FV3GFS, NAM, RAP, HRRR) and -``YYYYMMDDHH``, for example: ``/path-to/model_data/FV3GFS/2019061518``. Since both initial -and lateral boundary condition files are necessary, you can also include an ICS and LBCS directory. -The sample IC/LBCs available at the FTP data repository are structured as follows: +For ease of reusing the ``config.sh`` for multiple dates and cycles, it is recommended to set up your raw :term:`IC/LBC` files such that it includes the model name (e.g., FV3GFS, NAM, RAP, HRRR) and ``YYYYMMDDHH``, for example: ``/path-to/model_data/FV3GFS/2019061518``. Since both initial and lateral boundary condition files are necessary, you can also include an ICS and LBCS directory. The sample IC/LBC's available at the FTP data repository are structured as follows: * ``/path-to/model_data/MODEL/YYYYMMDDHH/ICS`` * ``/path-to/model_data/MODEL/YYYYMMDDHH/LBCS`` @@ -289,8 +263,9 @@ The sample IC/LBCs available at the FTP data repository are structured as follow When files are pulled from the NOAA HPSS, the naming convention looks something like: * FV3GFS (GRIB2): ``gfs.t{cycle}z.pgrb2.0p25.f{fhr}`` -* FV3GFS (NEMSIO): ICs: ``gfs.t{cycle}z.atmanl.nemsio`` and ``gfs.t{cycle}z.sfcanl.nemsio``; - LBCs: ``gfs.t{cycle}z.atmf{fhr}.nemsio`` +* FV3GFS (NEMSIO): + * ICs: ``gfs.t{cycle}z.atmanl.nemsio`` and ``gfs.t{cycle}z.sfcanl.nemsio``; + * LBCs: ``gfs.t{cycle}z.atmf{fhr}.nemsio`` * RAP (GRIB2): ``rap.t{cycle}z.wrfprsf{fhr}.grib2`` * HRRR (GRIB2): ``hrrr.t{cycle}z.wrfprsf{fhr}.grib2`` @@ -313,7 +288,7 @@ Doing this allows for the following to be set in the ``config.sh`` regardless of EXTRN_MDL_SOURCE_BASEDIR_LBCS="/path-to/model_data/RAP" EXTRN_MDL_FILES_LBCS=( "rap.wrfprsf03.grib2" "rap.wrfprsf06.grib2" ) -If you choose to forgo the extra ``ICS`` and ``LBCS`` directory, you may also simply either +If you choose to forgo the extra ``ICS`` and ``LBCS`` directory, you may either rename the original files to remove the cycle or modify the ``config.sh`` to set: .. code-block:: console @@ -327,15 +302,14 @@ The default initial and lateral boundary condition files are set to be a severe from 20190615 at 00 UTC. FV3GFS GRIB2 files are the default model and file format. A tar file (``gst_model_data.tar.gz``) containing the model data for this case is available on EMC's FTP data repository at https://ftp.emc.ncep.noaa.gov/EIB/UFS/SRW/v1p0/simple_test_case/. It is -also available on Amazon Web Services (AWS) at -https://ufs-data.s3.amazonaws.com/public_release/ufs-srweather-app-v1.0.0/ic/gst_model_data.tar.gz. +also available on Amazon Web Services (AWS) at https://ufs-data.s3.amazonaws.com/public_release/ufs-srweather-app-v1.0.0/ic/gst_model_data.tar.gz. Running the App for Different Dates ----------------------------------- If users want to run the SRW Application for dates other than 06-15-2019, you will need to make a change in the case to specify the desired data. This is done by modifying the ``config.sh`` ``DATE_FIRST_CYCL``, ``DATE_LAST_CYCL``, and ``CYCL_HRS`` settings. The -forecast length can be modified by changed the ``FCST_LEN_HRS``. In addition, the lateral +forecast length can be modified by changing the ``FCST_LEN_HRS``. In addition, the lateral boundary interval can be specified using the ``LBC_SPEC_INTVL_HRS`` variable. Users will need to ensure that the initial and lateral boundary condition files are available @@ -345,8 +319,7 @@ Staging Initial Conditions Manually ----------------------------------- If users want to run the SRW Application with raw model files for dates other than what are currently available on the preconfigured platforms, they need to stage the data manually. -The data should be placed in ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS``. -Raw model files may be available from a number of sources. A few examples are provided here for convenience. +The data should be placed in ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS``. The path to these variables can be set in the ``config.sh`` file. Raw model files are available from a number of sources. A few examples are provided here for convenience. NOMADS: https://nomads.ncep.noaa.gov/pub/data/nccf/com/{model}/prod, where model may be: @@ -392,7 +365,7 @@ GRIB2 and NEMSIO files your directory structure might look like: /path-to/model_data/FV3GFS/YYYYMMDDHH/ICS and LBCS /path-to/model_data/FV3GFS_nemsio/YYYYMMDDHH/ICS and LBCS -If you want to use GRIB2 format files for FV3GFS you must also set two additional environment +If you want to use GRIB2 format files for FV3GFS you must also set additional environment variables, including: .. code-block:: console @@ -411,6 +384,4 @@ that the users share the same ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_S directories. That way, if raw model input files are already on disk for a given date they do not need to be replicated. -The files in the subdirectories of the ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` -directories should be write-protected. This prevents these files from being accidentally modified or deleted. -The directories should generally be group writable so the directory can be shared among multiple users. +The files in the subdirectories of the ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` directories should be write-protected. This prevents these files from being accidentally modified or deleted. The directories should generally be group writable so the directory can be shared among multiple users. diff --git a/docs/UsersGuide/source/Introduction.rst b/docs/UsersGuide/source/Introduction.rst index ac1bedb1b0..1e1766d8f9 100644 --- a/docs/UsersGuide/source/Introduction.rst +++ b/docs/UsersGuide/source/Introduction.rst @@ -1,177 +1,331 @@ .. _Introduction: -============ +============== Introduction -============ - -The Unified Forecast System (:term:`UFS`) is a community-based, coupled, comprehensive Earth modeling system. -It is designed to be the source system for NOAA’s operational numerical weather prediction applications -while enabling research, development, and contribution opportunities for the broader weather enterprise. -For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. - -The UFS can be configured for multiple applications (see a complete list at -https://ufscommunity.org/science/aboutapps/). The configuration described here is the UFS Short-Range -Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain -and on time scales from less than an hour out to several days. The SRW Application v1.0 release includes a -prognostic atmospheric model, pre- and post-processing, and a community workflow for running the system -end-to-end, which are documented within the User's Guide and supported through a community forum. -Future work will include expanding the capabilities of the application to include data assimilation -(DA) and a verification package (e.g. METplus) as part of the workflow. This documentation provides an -overview of the release components, a description of the supported capabilities, a quick start guide -for running the application, and information on where to find more information and obtain support. - -The SRW App v1.0.0 citation is as follows and should be used when presenting results based on research -conducted with the App. - -UFS Development Team. (2021, March 4). Unified Forecast System (UFS) Short-Range Weather (SRW) Application -(Version v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.4534994 +============== + +The Unified Forecast System (:term:`UFS`) is a community-based, coupled, comprehensive Earth modeling system. NOAA’s operational model suite for numerical weather prediction (:term:`NWP`) is quickly transitioning to the UFS from a number of different modeling systems. The UFS enables research, development, and contribution opportunities within the broader :term:`weather enterprise` (e.g. government, industry, and academia). For more information about the UFS, visit the `UFS Portal `__. + +The UFS includes `multiple applications `__ that support different forecast durations and spatial domains. This documentation describes the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The SRW Application v2.0 release includes a prognostic atmospheric model, pre- and post-processing, and a community workflow for running the system end-to-end. These components are documented within this User's Guide and supported through a `community forum `_. New and improved capabilities for this release include the addition of a verification package (METplus) for both deterministic and ensemble simulations and support for four Stochastically Perturbed Perturbation (SPP) schemes. Future work will expand the capabilities of the application to include data assimilation (DA) and a forecast restart/cycling capability. + +This documentation provides a :ref:`Quick Start Guide ` for running the SRW Application in a container and a :ref:`detailed guide ` for running the SRW App on supported platforms. It also provides an overview of the :ref:`release components ` and details on how to customize or modify different portions of the workflow. + +The SRW App v1.0.0 citation is as follows and should be used when presenting results based on research conducted with the App: + +UFS Development Team. (2021, March 4). Unified Forecast System (UFS) Short-Range Weather (SRW) Application (Version v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.4534994 + +.. + COMMENT: Update version numbers/citation for release! Also update release date for citation! + + +How to Use This Document +======================== + +This guide instructs both novice and experienced users on downloading, building, and running the SRW Application. Please post questions in the `UFS Forum `__. + +.. code-block:: console + + Throughout the guide, this presentation style indicates shell commands and options, + code examples, etc. + +Variables presented as ``AaBbCc123`` in this User's Guide typically refer to variables in scripts, names of files, and directories. + +File paths or code that include angle brackets (e.g., ``build__.env``) indicate that users should insert options appropriate to their SRW App configuration (e.g., ``build_orion_intel.env``). + +.. hint:: + * To get started running the SRW App, see the :ref:`Quick Start Guide ` for beginners or refer to the in-depth chapter on :ref:`Running the Short-Range Weather Application `. + * For background information on the SRW App code repositories and directory structure, see :numref:`Section %s ` below. + * For an outline of SRW App components, see section :numref:`Section %s ` below or refer to :numref:`Chapter %s ` for a more in-depth treatment. + + +.. _ComponentsOverview: + +SRW App Components Overview +============================== Pre-processor Utilities and Initial Conditions -============================================== - -The SRW Application includes a number of pre-processing utilities to initialize and prepare the -model for integration. For the limited area model (LAM), it is necessary to first generate a -regional grid ``regional_esg_grid/make_hgrid`` along with orography ``orog`` and surface climatology ``sfc_climo_gen`` -files on that grid. There are additional utilities included to handle the correct number of halo ``shave`` -points and topography filtering ``filter_topo``. The pre-processing software ``chgres_cube`` -is used to convert the raw external model data into initial and lateral boundary condition files in netCDF -format, needed as input to the FV3-LAM. Additional information about the UFS pre-processor utilities can -be found in the `UFS_UTILS User’s Guide `_. - -The SRW Application can be initialized from a range of operational initial condition files. It is -possible to initialize the model from GFS, NAM, RAP, and HRRR files in Gridded Binary v2 (GRIB2) -format and GFS in NEMSIO format for past dates. Please note, for GFS data, dates prior to 1 January 2018 may work but are -not guaranteed. Public archives of model data can be accessed through the `National Centers for -Environmental Information `_ -(NCEI) or through the `NOAA Operational Model Archive and Distribution System `_ -(NOMADS). Raw external model data may be pre-staged on disk by the user. +------------------------------------------------ + +The SRW Application includes a number of pre-processing utilities that initialize and prepare the model. Tasks include generating a regional grid along with :term:`orography` and surface climatology files for that grid. One pre-processing utility converts the raw external model data into initial and lateral boundary condition files in netCDF format. Later, these files are used as input to the atmospheric model (FV3-LAM). Additional information about the pre-processor utilities can be found in :numref:`Chapter %s ` and in the `UFS_UTILS User’s Guide `_. Forecast Model -============== +----------------- + +Atmospheric Model +^^^^^^^^^^^^^^^^^^^^^^ The prognostic atmospheric model in the UFS SRW Application is the Finite-Volume Cubed-Sphere -(:term:`FV3`) dynamical core configured with a Limited Area Model (LAM) capability :cite:`BlackEtAl2020`. -The dynamical core is the computational part of a model that solves the equations of fluid motion. A User’s -Guide for the UFS :term:`Weather Model` is `here `_. - -Supported model resolutions in this release include a 3-, 13-, and 25-km predefined Contiguous -U.S. (CONUS) domain, all with 64 vertical levels. Preliminary tools for users to define their -own domain are also available in the release with full, formal support of these tools to be -provided in future releases. The Extended Schmidt Gnomonic (ESG) grid is used with the FV3-LAM, -which features relatively uniform grid cells across the entirety of the domain. Additional -information about the FV3 dynamical core can be found `here -`_ and on the `NOAA Geophysical -Fluid Dynamics Laboratory website `_. - -Interoperable atmospheric physics, along with the Noah Multi-parameterization (Noah MP) -Land Surface Model options, are supported through the Common Community Physics Package -(:term:`CCPP`; described `here `_). -Atmospheric physics are a set of numerical methods describing small-scale processes such -as clouds, turbulence, radiation, and their interactions. There are two physics options -supported for the release. The first is an experimental physics suite being tested for use -in the future operational implementation of the Rapid Refresh Forecast System (RRFS) planned -for 2023-2024, and the second is an updated version of the physics suite used in the operational -Global Forecast System (GFS) v15. A scientific description of the CCPP parameterizations and -suites can be found in the `CCPP Scientific Documentation `_, -and CCPP technical aspects are described in the `CCPP Technical Documentation -`_. The model namelist has many settings -beyond the physics options that can optimize various aspects of the model for use with each -of the supported suites. - -The SRW App supports the use of both GRIB2 and :term:`NEMSIO` input data. The UFS Weather Model -ingests initial and lateral boundary condition files produced by :term:`chgres_cube` and outputs files in -netCDF format on a specific projection (e.g., Lambert Conformal) in the horizontal and model -levels in the vertical. - -Post-processor -============== +(:term:`FV3`) dynamical core configured with a Limited Area Model (LAM) capability (:cite:t:`BlackEtAl2021`). The dynamical core is the computational part of a model that solves the equations of fluid motion. A User’s Guide for the UFS :term:`Weather Model` can be found `here `__. + +Common Community Physics Package +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The `Common Community Physics Package `_ (:term:`CCPP`) supports interoperable atmospheric physics and land surface model options. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. The upcoming SRW App release includes four physics suites. + +Data Format +^^^^^^^^^^^^^^^^^^^^^^ + +The SRW App supports the use of external model data in :term:`GRIB2`, :term:`NEMSIO`, and netCDF format when generating initial and boundary conditions. The UFS Weather Model ingests initial and lateral boundary condition files produced by :term:`chgres_cube`. + + +Unified Post-Processor (UPP) +-------------------------------- -The SRW Application is distributed with the Unified Post Processor (:term:`UPP`) included in the -workflow as a way to convert the netCDF output on the native model grid to GRIB2 format on -standard isobaric vertical coordinates. UPP can also be used to compute a variety of useful -diagnostic fields, as described in the `UPP user’s guide `_. +The `Unified Post Processor `__ (:term:`UPP`) processes raw output from a variety of numerical weather prediction (:term:`NWP`) models. In the SRW App, it converts data output from netCDF format to GRIB2 format. The UPP can also be used to compute a variety of useful diagnostic fields, as described in the `UPP User’s Guide `_. Output from the UPP can be used with visualization, plotting, and verification packages, or for further downstream post-processing (e.g., statistical post-processing techniques). -Output from UPP can be used with visualization, plotting, and verification packages, or for -further downstream post-processing, e.g. statistical post-processing techniques. Visualization Example -===================== -A Python script is provided to create basic visualization of the model output. The script -is designed to output graphics in PNG format for 14 standard meteorological variables -when using the pre-defined CONUS domain. In addition, a difference plotting script is included -to visually compare two runs for the same domain and resolution. These scripts are provided only -as an example for users familiar with Python, and may be used to do a visual check to verify -that the application is producing reasonable results. - -The scripts are available in the `regional_workflow repository -`_ -under ush/Python. Usage information and instructions are described in -:numref:`Chapter %s ` and are also included at the top of the script. +------------------------- + +This SRW Application release provides Python scripts to create basic visualizations of the model output. :numref:`Chapter %s ` contains usage information and instructions; instructions also appear at the top of the scripts. Build System and Workflow -========================= - -The SRW Application has a portable build system and a user-friendly, modular, and -expandable workflow framework. - -An umbrella CMake-based build system is used for building the components necessary -for running the end-to-end SRW Application: the UFS Weather Model and the pre- and -post-processing software. Additional libraries (:term:`NCEPLIBS-external` and :term:`NCEPLIBS`) necessary -for the application are not included in the SRW Application build system, but are available -pre-built on pre-configured platforms. There is a small set of system libraries and utilities -that are assumed to be present on the target computer: the CMake build software, a Fortran, -C, and C++ compiler, and MPI library. - -Once built, the provided experiment generator script can be used to create a Rocoto-based -workflow file that will run each task in the system (see `Rocoto documentation -`_) in the proper sequence. -If Rocoto and/or a batch system is not present on the available platform, the individual -components can be run in a stand-alone, command line fashion with provided run scripts. The -generated namelist for the atmospheric model can be modified in order to vary settings such -as forecast starting and ending dates, forecast length hours, the CCPP physics suite, -integration time step, history file output frequency, and more. It also allows for configuration -of other elements of the workflow; for example, whether to run some or all of the pre-processing, -forecast model, and post-processing steps. - -This SRW Application release has been tested on a variety of platforms widely used by -researchers, such as the NOAA Research and Development High-Performance Computing Systems -(RDHPCS), including Hera, Orion, and Jet; NOAA’s Weather and Climate Operational -Supercomputing System (WCOSS); the National Center for Atmospheric Research (NCAR) Cheyenne -system; NSSL’s HPC machine, Odin; the National Science Foundation Stampede2 system; and -generic Linux and macOS systems using Intel and GNU compilers. Four `levels of support -`_ -have been defined for the SRW Application, including pre-configured (level 1), configurable -(level 2), limited test platforms (level 3), and build only platforms (level 4). Each -level is further described below. - -For the selected computational platforms that have been pre-configured (level 1), all the -required libraries for building the SRW Application are available in a central place. That -means bundled libraries (NCEPLIBS) and third-party libraries (NCEPLIBS-external) have both -been built. The SRW Application is expected to build and run out of the box on these -pre-configured platforms and users can proceed directly to the using the workflow, as -described in the Quick Start (:numref:`Chapter %s `). - -A few additional computational platforms are considered configurable for the SRW -Application release. Configurable platforms (level 2) are platforms where all of -the required libraries for building the SRW Application are expected to install successfully, -but are not available in a central place. Applications and models are expected to build -and run once the required bundled libraries (NCEPLIBS) and third-party libraries (NCEPLIBS-external) -are built. - -Limited-Test (level 3) and Build-Only (level 4) computational platforms are those in which -the developers have built the code but little or no pre-release testing has been conducted, -respectively. A complete description of the levels of support, along with a list of preconfigured -and configurable platforms can be found in the `SRW Application wiki page -`_. - -User Support, Documentation, and Contributing Development -========================================================= - -A forum-based, online `support system `_ with topical sections -provides a centralized location for UFS users and developers to post questions and exchange -information. The forum complements the formal, written documentation, summarized here for ease of -use. +---------------------------- + +The SRW Application has a portable CMake-based build system that packages together all the components required to build the SRW Application. Once built, users can generate a Rocoto-based workflow that will run each task in the proper sequence (see `Rocoto documentation `__ for more on workflow management). Individual components can also be run in a stand-alone, command line fashion. + +The SRW Application allows for configuration of various elements of the workflow. For example, users can modify the parameters of the atmospheric model, such as start and end dates, duration, time step, and the physics suite used for the simulation. + +This SRW Application release has been tested on a variety of platforms widely used by researchers, including NOAA High-Performance Computing (HPC) systems (e.g. Hera, Orion), cloud environments, and generic Linux and macOS systems. Four `levels of support `_ have been defined for the SRW Application. Preconfigured (Level 1) systems already have the required external libraries (HPC-Stack) available in a central location. The SRW Application is expected to build and run out-of-the-box on these systems, and users can :ref:`download the SRW App code ` without first installing prerequisites. On other platforms, the SRW App can be :ref:`run within a container ` that includes the HPC-Stack, or the required libraries will need to be installed as part of the :ref:`SRW Application build ` process. Once these prerequisite libraries are installed, applications and models should build and run successfully. However, users may need to perform additional troubleshooting on Level 3 or 4 systems since little or no pre-release testing has been conducted on these systems. + + + +.. _SRWStructure: + +Code Repositories and Directory Structure +========================================= + +.. _HierarchicalRepoStr: + +Hierarchical Repository Structure +----------------------------------- +The :term:`umbrella repository` for the SRW Application is named ``ufs-srweather-app`` and is available on GitHub at https://github.com/ufs-community/ufs-srweather-app. An umbrella repository is a repository that houses external code, called "externals," from additional repositories. The SRW Application includes the ``manage_externals`` tool and a configuration file called ``Externals.cfg``, which describes the external repositories associated with the SRW App umbrella repository (see :numref:`Table %s `). + +.. _top_level_repos: + +.. table:: List of top-level repositories that comprise the UFS SRW Application + + +---------------------------------+---------------------------------------------------------+ + | **Repository Description** | **Authoritative repository URL** | + +=================================+=========================================================+ + | Umbrella repository for the UFS | https://github.com/ufs-community/ufs-srweather-app | + | Short-Range Weather Application | | + +---------------------------------+---------------------------------------------------------+ + | Repository for | https://github.com/ufs-community/ufs-weather-model | + | the UFS Weather Model | | + +---------------------------------+---------------------------------------------------------+ + | Repository for the regional | https://github.com/ufs-community/regional_workflow | + | workflow | | + +---------------------------------+---------------------------------------------------------+ + | Repository for UFS utilities, | https://github.com/ufs-community/UFS_UTILS | + | including pre-processing, | | + | chgres_cube, and more | | + +---------------------------------+---------------------------------------------------------+ + | Repository for the Unified Post | https://github.com/NOAA-EMC/UPP | + | Processor (UPP) | | + +---------------------------------+---------------------------------------------------------+ + +The UFS Weather Model contains a number of sub-repositories, which are documented `here `__. + +Note that the prerequisite libraries (including NCEP Libraries and external libraries) are not included in the UFS SRW Application repository. The `HPC-Stack `__ repository assembles these prerequisite libraries. The HPC-Stack has already been built on `preconfigured (Level 1) platforms `__. However, it must be built on other systems. :numref:`Chapter %s ` contains details on installing the HPC-Stack. + + +.. _TopLevelDirStructure: + +Directory Structure +---------------------- +The ``ufs-srweather-app`` :term:`umbrella repository` structure is determined by the ``local_path`` settings contained within the ``Externals.cfg`` file. After ``manage_externals/checkout_externals`` is run (:numref:`Step %s `), the specific GitHub repositories described in :numref:`Table %s ` are cloned into the target subdirectories shown below. Directories that will be created as part of the build process appear in parentheses and will not be visible until after the build is complete. Some directories have been removed for brevity. + +.. code-block:: console + + ufs-srweather-app + ├── (bin) + ├── (build) + ├── docs + │ └── UsersGuide + ├── (include) + ├── (lib) + ├── manage_externals + ├── regional_workflow + │ ├── docs + │ │ └── UsersGuide + │ ├── (fix) + │ ├── jobs + │ ├── modulefiles + │ ├── scripts + │ ├── tests + │ │ └── baseline_configs + │ └── ush + │ ├── Python + │ ├── rocoto + │ ├── templates + │ └── wrappers + ├── (share) + └── src + ├── UPP + │ ├── parm + │ └── sorc + │ └── ncep_post.fd + ├── UFS_UTILS + │ ├── sorc + │ │ ├── chgres_cube.fd + │ │ ├── fre-nctools.fd + | │ ├── grid_tools.fd + │ │ ├── orog_mask_tools.fd + │ │ └── sfc_climo_gen.fd + │ └── ush + └── ufs_weather_model + └── FV3 + ├── atmos_cubed_sphere + └── ccpp + +Regional Workflow Sub-Directories +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +A number of sub-directories are created under the ``regional_workflow`` directory when the regional workflow is cloned (see directory diagram :ref:`above `). :numref:`Table %s ` describes the contents of these sub-directories. + +.. _Subdirectories: + +.. table:: Sub-directories of the regional workflow + + +-------------------------+---------------------------------------------------------+ + | **Directory Name** | **Description** | + +=========================+=========================================================+ + | docs | User's Guide Documentation | + +-------------------------+---------------------------------------------------------+ + | jobs | J-job scripts launched by Rocoto | + +-------------------------+---------------------------------------------------------+ + | modulefiles | Files used to load modules needed for building and | + | | running the workflow | + +-------------------------+---------------------------------------------------------+ + | scripts | Run scripts launched by the J-jobs | + +-------------------------+---------------------------------------------------------+ + | tests | Baseline experiment configuration | + +-------------------------+---------------------------------------------------------+ + | ush | Utility scripts used by the workflow | + +-------------------------+---------------------------------------------------------+ + +.. _ExperimentDirSection: + +Experiment Directory Structure +-------------------------------- +When the user generates an experiment using the ``generate_FV3LAM_wflow.sh`` script (:numref:`Step %s `), a user-defined experimental directory (``EXPTDIR``) is created based on information specified in the ``config.sh`` file. :numref:`Table %s ` shows the contents of the experiment directory before running the experiment workflow. + +.. _ExptDirStructure: + +.. table:: Files and sub-directory initially created in the experimental directory + :widths: 33 67 + + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | **File Name** | **Description** | + +===========================+=======================================================================================================+ + | config.sh | User-specified configuration file, see :numref:`Section %s ` | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | data_table | Cycle-independent input file (empty) | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | field_table | Tracers in the `forecast model | + | | `_ | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | FV3LAM_wflow.xml | Rocoto XML file to run the workflow | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | input.nml | Namelist for the `UFS Weather model | + | | `_ | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | launch_FV3LAM_wflow.sh | Symlink to the shell script of | + | | ``ufs-srweather-app/regional_workflow/ush/launch_FV3LAM_wflow.sh`` | + | | that can be used to (re)launch the Rocoto workflow. | + | | Each time this script is called, it appends to a log | + | | file named ``log.launch_FV3LAM_wflow``. | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | log.generate_FV3LAM_wflow | Log of the output from the experiment generation script | + | | ``generate_FV3LAM_wflow.sh`` | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | nems.configure | See `NEMS configuration file | + | | `_ | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | suite_{CCPP}.xml | CCPP suite definition file used by the forecast model | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | var_defns.sh | Shell script defining the experiment parameters. It contains all | + | | of the primary parameters specified in the default and | + | | user-specified configuration files plus many secondary parameters | + | | that are derived from the primary ones by the experiment | + | | generation script. This file is sourced by various other scripts | + | | in order to make all the experiment variables available to these | + | | scripts. | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + | YYYYMMDDHH | Cycle directory (empty) | + +---------------------------+-------------------------------------------------------------------------------------------------------+ + +In addition, running the SRW App in *community* mode creates the ``fix_am`` and ``fix_lam`` directories in ``EXPTDIR``. The ``fix_lam`` directory is initially empty but will contain some *fix* (time-independent) files after the grid, orography, and/or surface climatology generation tasks are run. + +.. _FixDirectories: + +.. table:: Description of the fix directories + + +-------------------------+----------------------------------------------------------+ + | **Directory Name** | **Description** | + +=========================+==========================================================+ + | fix_am | Directory containing the global fix (time-independent) | + | | data files. The experiment generation script copies | + | | these files from a machine-dependent system directory. | + +-------------------------+----------------------------------------------------------+ + | fix_lam | Directory containing the regional fix (time-independent) | + | | data files that describe the regional grid, orography, | + | | and various surface climatology fields as well as | + | | symlinks to pre-generated files. | + +-------------------------+----------------------------------------------------------+ + +Once the workflow is launched with the ``launch_FV3LAM_wflow.sh`` script, a log file named +``log.launch_FV3LAM_wflow`` will be created (unless it already exists) in ``EXPTDIR``. The first several workflow tasks (i.e., ``make_grid``, ``make_orog``, ``make_sfc_climo``, ``get_extrn_ics``, and ``get_extrn_lbc``) are preprocessing tasks, which result in the creation of new files and +sub-directories, described in :numref:`Table %s `. + +.. _CreatedByWorkflow: + +.. table:: New directories and files created when the workflow is launched + :widths: 30 70 + + +---------------------------+--------------------------------------------------------------------+ + | **Directory/File Name** | **Description** | + +===========================+====================================================================+ + | YYYYMMDDHH | This is a “cycle directory” that is updated when the first | + | | cycle-specific workflow tasks (``get_extrn_ics`` and | + | | ``get_extrn_lbcs``) are run. These tasks are launched | + | | simultaneously for each cycle in the experiment. Cycle directories | + | | are created to contain cycle-specific files for each cycle that | + | | the experiment runs. If ``DATE_FIRST_CYCL`` and ``DATE_LAST_CYCL`` | + | | are different, and/or if ``CYCL_HRS`` contains more than one | + | | element in the ``config.sh`` file, more than one cycle directory | + | | will be created under the experiment directory. | + +---------------------------+--------------------------------------------------------------------+ + | grid | Directory generated by the ``make_grid`` task to store grid files | + | | for the experiment | + +---------------------------+--------------------------------------------------------------------+ + | log | Contains log files generated by the overall workflow and by its | + | | various tasks. Look in these files to trace why a task may have | + | | failed. | + +---------------------------+--------------------------------------------------------------------+ + | orog | Directory generated by the ``make_orog`` task containing the | + | | orography files for the experiment | + +---------------------------+--------------------------------------------------------------------+ + | sfc_climo | Directory generated by the ``make_sfc_climo`` task containing the | + | | surface climatology files for the experiment | + +---------------------------+--------------------------------------------------------------------+ + | FV3LAM_wflow.db | Database files that are generated when Rocoto is called (by the | + | FV3LAM_wflow_lock.db | launch script) to launch the workflow. | + +---------------------------+--------------------------------------------------------------------+ + | log.launch_FV3LAM_wflow | The ``launch_FV3LAM_wflow.sh`` script appends its output to this | + | | log file each time it is called. Take a look at the last 30–50 | + | | lines of this file to check the status of the workflow. | + +---------------------------+--------------------------------------------------------------------+ + +The output files for an experiment are described in :numref:`Section %s `. +The workflow tasks are described in :numref:`Section %s `). + + +User Support, Documentation, and Contributions to Development +=============================================================== + +A forum-based, online `support system `_ organized by topic provides a centralized location for UFS users and developers to post questions and exchange information. A list of available documentation is shown in :numref:`Table %s `. @@ -182,15 +336,17 @@ A list of available documentation is shown in :numref:`Table %s ` need to be followed. +utilities, model code, and infrastructure. Users can post issues in the related GitHub repositories to report bugs or to announce upcoming contributions to the code base. For code to be accepted in the authoritative repositories, users must follow the code management rules of each UFS component repository, which are outlined in the respective User's Guides listed in :numref:`Table %s `. Future Direction -================ +================= -Users can expect to see incremental improvements and additional capabilities in upcoming -releases of the SRW Application to enhance research opportunities and support operational -forecast implementations. Planned advancements include: +Users can expect to see incremental improvements and additional capabilities in upcoming releases of the SRW Application to enhance research opportunities and support operational forecast implementations. Planned enhancements include: * A more extensive set of supported developmental physics suites. -* A larger number of pre-defined domains/resolutions and a fully supported capability to create a user-defined domain. -* Inclusion of data assimilation, cycling, and ensemble capabilities. -* A verification package (i.e., METplus) integrated into the workflow. -* Inclusion of stochastic perturbation techniques. +* A larger number of pre-defined domains/resolutions and a *fully supported* capability to create a user-defined domain. +* Add user-defined vertical levels (number and distribution). +* Inclusion of data assimilation and forecast restart/cycling capabilities. -In addition to the above list, other improvements will be addressed in future releases. - -How to Use This Document -======================== - -This guide instructs both novice and experienced users on downloading, -building and running the SRW Application. Please post questions in the -UFS forum at https://forums.ufscommunity.org/. - -.. code-block:: console - - Throughout the guide, this presentation style indicates shell - commands and options, code examples, etc. +.. bibliography:: references.bib -.. note:: - Variables presented as ``AaBbCc123`` in this document typically refer to variables - in scripts, names of files and directories. -.. bibliography:: references.bib diff --git a/docs/UsersGuide/source/Quickstart.rst b/docs/UsersGuide/source/Quickstart.rst index 24630b65ab..23c3f0aeea 100644 --- a/docs/UsersGuide/source/Quickstart.rst +++ b/docs/UsersGuide/source/Quickstart.rst @@ -1,320 +1,345 @@ -.. _Quickstart: - -==================== -Workflow Quick Start -==================== -To build and run the out-of-the-box case of the UFS Short-Range Weather (SRW) Application the user -must get the source code for multiple components, including: the regional workflow, the UFS_UTILS -pre-processor utilities, the UFS Weather Model, and the Unified Post Processor (UPP). Once the UFS -SRW Application umbrella repository is cloned, obtaining the necessary external repositories is -simplified by the use of ``manage_externals``. The out-of-the-box case uses a predefined 25-km -CONUS grid (RRFS_CONUS_25km), the GFS version 15.2 physics suite (FV3_GFS_v15p2 CCPP), and -FV3-based GFS raw external model data for initialization. +.. _QuickstartC: -.. note:: +==================================== +Container-Based Quick Start Guide +==================================== - The steps described in this chapter are applicable to preconfigured (Level 1) machines where - all of the required libraries for building community releases of UFS models and applications - are available in a central place (i.e. the bundled libraries (NCEPLIBS) and third-party - libraries (NCEPLIBS-external) have both been built). The Level 1 platforms are listed `here - `_. - For more information on compiling NCEPLIBS-external and NCEPLIBS, please refer to the - NCEPLIBS-external `wiki `_. +This Quick Start Guide will help users to build and run the "out-of-the-box" case for the Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application using a `Singularity `__ :term:`container`. The container approach provides a uniform enviroment in which to build and run the SRW App. Normally, the details of building and running the SRW App vary from system to system due to the many possible combinations of operating systems, compilers, :term:`MPI`’s, and package versions available. Installation via Singularity container reduces this variability and allows for a smoother SRW App build experience. However, the container is not compatible with the `Rocoto workflow manager `__, so users must run each task in the workflow manually. Additionally, the Singularity container can only run on a single compute node, which makes the container-based approach inadequate for large experiments. It is an excellent starting point for running the "out-of-the-box" SRW App case and other small experiments. However, the :ref:`non-container approach ` may be more appropriate for those users who desire additional customizability or more compute power, particularly if they already have experience running the SRW App. +The "out-of-the-box" SRW App case described in this User's Guide builds a weather forecast for June 15-16, 2019. Multiple convective weather events during these two days produced over 200 filtered storm reports. Severe weather was clustered in two areas: the Upper Midwest through the Ohio Valley and the Southern Great Plains. This forecast uses a predefined 25-km Continental United States (:term:`CONUS`) grid (RRFS_CONUS_25km), the Global Forecast System (:term:`GFS`) version 16 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. -Download the UFS SRW Application Code -===================================== -The necessary source code is publicly available on GitHub. To clone the release branch of the repository: +.. attention:: -.. code-block:: console + All UFS applications support `four platform levels `_. The steps described in this chapter will work most smoothly on preconfigured (Level 1) systems. However, this guide can serve as a starting point for running the SRW App on other systems, too. - git clone -b ufs-v1.0.0 https://github.com/ufs-community/ufs-srweather-app.git - cd ufs-srweather-app +.. _DownloadCodeC: -Then, check out the submodules for the SRW application: +Building the UFS SRW Application +=========================================== -.. code-block:: console +Prerequisites: Install Singularity +------------------------------------ - ./manage_externals/checkout_externals +To build and run the SRW App using a Singularity container, first install the Singularity package according to the `Singularity Installation Guide `_. This will include the installation of dependencies and the installation of the Go programming language. SingularityCE Version 3.7 or above is recommended. -The ``checkout_externals`` script uses the configuration file ``Externals.cfg`` in the top level directory -and will clone the regional workflow, pre-processing utilities, UFS Weather Model, and UPP source code -into the appropriate directories under your ``regional_workflow`` and ``src`` directories. +.. warning:: + Docker containers can only be run with root privileges, and users cannot have root privileges on HPC's. Therefore, it is not possible to build the SRW, which uses the HPC-Stack, inside a Docker container on an HPC system. A Docker image may be pulled, but it must be run inside a container such as Singularity. -.. _SetUpBuild: +Working in the Cloud +----------------------- -Set up the Build Environment -============================ -Instructions for loading the proper modules and/or setting the correct environment variables can be -found in the ``env/`` directory in files named ``build__.env``. -The commands in these files can be directly copy-pasted to the command line or the file can be sourced. -You may need to modify certain variables such as the path to NCEP libraries for your individual platform, -or use ``setenv`` rather than ``export`` depending on your environment: +For those working on non-cloud-based systems, skip to :numref:`Step %s `. Users building the SRW App using NOAA's Cloud resources must complete a few additional steps to ensure that the SRW App builds and runs correctly. -.. code-block:: console +On NOAA Cloud systems, certain environment variables must be set *before* building the container: + +.. code-block:: - $ ls -l env/ - -rw-rw-r-- 1 user ral 1062 Apr 27 10:09 build_cheyenne_gnu.env - -rw-rw-r-- 1 user ral 1061 Apr 27 10:09 build_cheyenne_intel.env - -rw-rw-r-- 1 user ral 1023 Apr 27 10:09 build_hera_intel.env - -rw-rw-r-- 1 user ral 1017 Apr 27 10:09 build_jet_intel.env + sudo su + export SINGULARITY_CACHEDIR=/lustre/cache + export SINGULARITY_TEMPDIR=/lustre/tmp -Build the Executables -===================== -Build the executables as follows: +If the ``cache`` and ``tmp`` directories do not exist already, they must be created with a ``mkdir`` command. -.. code-block:: console +.. note:: + ``/lustre`` is a fast but non-persistent file system used on NOAA cloud systems. To retain work completed in this directory, `tar the files `__ and move them to the ``/contrib`` directory, which is much slower but persistent. - mkdir build - cd build +.. _WorkOnHPC: -Run ``cmake`` to set up the ``Makefile``, then run ``make``: +Working on HPC Systems +-------------------------- + +Those *not* working on HPC systems may skip to the :ref:`next step `. +On HPC systems (including NOAA's Cloud platforms), allocate a compute node on which to run the SRW App. On NOAA's Cloud platforms, the following commands will allocate a compute node: .. code-block:: console - cmake .. -DCMAKE_INSTALL_PREFIX=.. - make -j 4 >& build.out & + salloc -N 1 + module load gnu openmpi + mpirun -n 1 hostname + ssh + +The third command will output a hostname. Replace ```` in the last command with the output from the third command. After "ssh-ing" to the compute node in the last command, build and run the SRW App from that node. -Output from the build will be in the ``ufs-srweather-app/build/build.out`` file. -When the build completes, you should see the forecast model executable ``NEMS.exe`` and eleven -pre- and post-processing executables in the ``ufs-srweather-app/bin`` directory which are -described in :numref:`Table %s `. +The appropriate commands on other Level 1 platforms will vary, and users should consult the `documentation `__ for those platforms. In general, the allocation command will follow one of these two patterns depending on whether the system uses the Slurm or PBS resource manager respectively: -Generate the Workflow Experiment -================================ -Generating the workflow experiment requires three steps: +.. code-block:: console + + salloc -N 1 -n -A -t